├── .github
└── workflows
│ ├── build.yml
│ └── publish.yml
├── .gitignore
├── LICENSE
├── README.md
├── build.gradle
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
└── src
├── main
├── java
│ └── com
│ │ └── datamountaineer
│ │ └── streamreactor
│ │ └── connect
│ │ └── json
│ │ └── SimpleJsonConverter.java
└── scala
│ └── com
│ └── datamountaineer
│ └── streamreactor
│ └── connect
│ ├── concurrent
│ ├── ExecutorExtension.scala
│ └── FutureAwaitWithFailFastFn.scala
│ ├── config
│ ├── Helpers.scala
│ ├── Routes.scala
│ ├── SSLConfigContext.scala
│ └── base
│ │ ├── const
│ │ └── TraitConfigConst.scala
│ │ └── traits
│ │ ├── AllowParallelizationSettings.scala
│ │ ├── BaseConfig.scala
│ │ ├── BaseSettings.scala
│ │ ├── BatchSizeSettings.scala
│ │ ├── ConnectionSettings.scala
│ │ ├── ConsistencyLevelSettings.scala
│ │ ├── DatabaseSettings.scala
│ │ ├── ErrorPolicySettings.scala
│ │ ├── KcqlSettings.scala
│ │ ├── NumberRetriesSettings.scala
│ │ ├── RetryIntervalSettings.scala
│ │ ├── SSLSettings.scala
│ │ ├── ThreadPoolSettings.scala
│ │ ├── UserSettings.scala
│ │ └── WriteTimeoutSettings.scala
│ ├── converters
│ ├── FieldConverter.scala
│ ├── MsgKey.scala
│ ├── Transform.scala
│ ├── sink
│ │ ├── AvroConverter.scala
│ │ ├── BytesConverter.scala
│ │ ├── Converter.scala
│ │ └── SinkRecordToJson.scala
│ └── source
│ │ ├── AvroConverter.scala
│ │ ├── BytesConverter.scala
│ │ ├── Converter.scala
│ │ ├── JsonConverterWithSchemaEvolution.scala
│ │ ├── JsonOptNullConverter.scala
│ │ ├── JsonPassThroughConverter.scala
│ │ ├── JsonResilientConverter.scala
│ │ ├── JsonSimpleConverter.scala
│ │ └── KeyExtractor.scala
│ ├── errors
│ ├── ErrorHandler.scala
│ └── ErrorPolicy.scala
│ ├── offsets
│ └── OffsetHandler.scala
│ ├── queues
│ └── QueueHelpers.scala
│ ├── rowkeys
│ ├── RowKeyBuilderString.scala
│ └── RowKeyModeEnums.scala
│ ├── schemas
│ ├── ConverterUtil.scala
│ ├── PayloadFields.scala
│ ├── SchemaHelper.scala
│ ├── SchemaRegistry.scala
│ ├── StructFieldExtractor.scala
│ └── StructHelper.scala
│ ├── serialization
│ └── AvroSerializer.scala
│ ├── sink
│ └── DbWriter.scala
│ ├── source
│ ├── ExponentialBackOff.scala
│ └── ExponentialBackOffHandler.scala
│ └── utils
│ ├── JarManifest.scala
│ └── ProgressCounter.scala
└── test
├── java
└── com
│ └── datamountaineer
│ └── streamreactor
│ └── connect
│ └── json
│ └── SimpleJsonConverterTest.java
├── resources
├── cts_keystore.jks
├── cts_truststore.jks
├── log4j.properties
├── stc_keystore.jks
├── stc_truststore.jks
├── sts_keystore.jks
└── sts_truststore.jks
└── scala
└── com
└── datamountaineer
└── streamreactor
└── connect
├── TestUtilsBase.scala
├── concurrent
└── FutureAwaitWithFailFastFnTest.scala
├── config
├── KcqlSettingsTest.scala
├── TestHelpers.scala
└── TestSSLConfigContext.scala
├── converters
├── sink
│ ├── AvroConverterTest.scala
│ └── BytesConverterTest.scala
└── source
│ ├── AvroConverterTest.scala
│ ├── BytesConverterTest.scala
│ ├── JacksonJson.scala
│ ├── JsonConverterWithSchemaEvolutionTest.scala
│ ├── JsonPassThroughConverterTest.scala
│ └── JsonSimpleConverterTest.scala
├── errors
├── TestErrorHandlerNoop.scala
├── TestErrorHandlerRetry.scala
└── TestErrorHandlerThrow.scala
├── offsets
└── TestOffsetHandler.scala
├── schemas
├── StructFieldExtractorTest.scala
└── TestConverterUtil.scala
├── serialization
└── AvroSerializerTest.scala
└── sink
├── StringGenericRowKeyBuilderTest.scala
├── StringSinkRecordKeyBuilderTest.scala
└── StringStructFieldsStringKeyBuilderTest.scala
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - uses: actions/checkout@v2
12 | - name: Set up JDK 1.8
13 | uses: actions/setup-java@v1
14 | with:
15 | java-version: 1.8
16 | - name: Download gradle
17 | run: ./gradlew --version
18 | - name: Run tests
19 | run: ./gradlew test
20 | - name: Run compile
21 | run: ./gradlew compile
22 | - name: Run build
23 | run: ./gradlew build
24 |
25 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 |
3 | on:
4 | push:
5 | tags:
6 | - '*'
7 |
8 | jobs:
9 | build:
10 |
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - uses: actions/checkout@v2
15 | - name: Set up JDK 1.8
16 | uses: actions/setup-java@v1
17 | with:
18 | java-version: 1.8
19 | - shell: bash
20 | env:
21 | GRADLE_PROPERTIES: ${{ secrets.GRADLE_PROPERTIES }}
22 | SIGNING_GPG_KEY: ${{ secrets.SIGNING_GPG_KEY }}
23 | run: |
24 | base64 -d <<< "$SIGNING_GPG_KEY" > /tmp/secring.gpg
25 | echo "$GRADLE_PROPERTIES" > gradle.properties
26 | - name: Get the tag
27 | id: get_tag
28 | run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
29 | - name: Upload archive
30 | run: ./gradlew -Prelease -Pversion=${{ steps.get_tag.outputs.VERSION }} signArchives uploadArchives
31 | - name: Release archive
32 | run: ./gradlew -Prelease -Pversion=${{ steps.get_tag.outputs.VERSION }} closeAndReleaseRepository
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.gradle
2 | /build
3 | /.idea
4 | # sbt specific
5 | # Scala-IDE specific
6 | /.settings/
7 | /.classpath
8 | /.project
9 | /out/
10 | /.vscode/
11 | /bin/
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 | [
](http://search.maven.org/#search%7Cga%7C1%7Cg%3A%22com.datamountaineer%22%20AND%20a%3A%22kafka-connect-common%22)
4 | Kafka Connect Common is in Maven, include it in your connector.
5 |
6 |
7 | # Releases
8 |
9 |
10 | | Version | Confluent Version |Kafka| Kcql Version | Scala Version |
11 | | ------- | ----------------- |-----|--------------|---------------|
12 | |2.0.5|5.4.0|2.4.0|2.8.7|2.12
13 | |2.0.4|5.4.0|2.4.0|2.8.7|2.12
14 | |2.0.3|5.4.0|2.4.0|2.8.6|2.12
15 | |2.0.2|5.4.0|2.4.0|2.8.5|2.12
16 | |2.0.1|5.4.0|2.4.0|2.8.4|2.12
17 | |2.0.0|5.4.0|2.4.0|2.8.4|2.12
18 | |1.1.9|5.0.0|1.1.0|2.8.4|2.11
19 | |1.1.8|5.0.0|1.1.0|2.8.4|2.11
20 | |1.1.5|5.0.0|1.1.0|2.8.2|2.11
21 | |1.1.5|4.1.0|1.1.0|2.8.2|2.11
22 | |1.1.4|4.1.0|1.1.0|2.8.2|2.11
23 | |1.1.3|4.1.0|1.1.0|2.8|2.11
24 | |1.1.2|4.1.0|1.1.0|2.7|2.11
25 | |1.1.1|4.1.0|1.1.0|2.5.1|2.11
26 | |1.1.0|4.1.0|1.1.0|2.5.1|2.11
27 | |1.0.9|4.0.0|1.0.0|2.5.1|2.11
28 | |1.0.8|4.0.0|1.0.0|2.5.1|2.11
29 | |1.0.7|4.0.0|1.0.0|2.5.1|2.11
30 | |1.0.6|4.0.0|1.0.0|2.5.1|2.11
31 | |1.0.5|4.0.0|1.0.0|2.5.1|2.11
32 | |1.0.4|4.0.0|1.0.0|2.5.1|2.11
33 | |1.0.3|4.0.0|1.0.0|2.4|2.11
34 | |1.0.2|4.0.0|1.0.0|2.4|2.11
35 | |1.0.1|4.0.0|1.0.0|2.4|2.11
36 | |1.0.2|4.0.0|1.0.0|2.4|2.11
37 | |1.0.1|4.0.0|1.0.0|2.4|2.11
38 | |1.0.0|4.0.0|1.0.0|2.4|2.11
39 |
40 | ```bash
41 | #maven
42 |
43 | com.datamountaineer
44 | kafka-connect-common
45 | LATEST
46 |
47 | ```
48 |
49 | # kafka-connect-common
50 | Common components used across the datamountaineer kafka connect connectors.
51 |
52 | ## Packages
53 |
54 | ### Config
55 |
56 | #### SSLConfigConext
57 | Contains class for SSL Context configuration for supplied trust and keystores.
58 |
59 | ### Offsets
60 |
61 | The offset handler retrieves, from Kafka the stored offset map per source partition.
62 |
63 | ### Queues
64 |
65 | Helper methods to drain LinkedBlockingQueues.
66 |
67 | ### Sink
68 |
69 | Contains Writer and KeyBuilder classes.
70 |
71 | ### DbWriter
72 |
73 | Defines the contract for inserting a new row for the connect sink record.
74 |
75 | #### KeyBuilder
76 |
77 | * Builds the new record key for the given connect SinkRecord.
78 | * Builds a new key from the payload fields specified.
79 |
80 | ### Schemas
81 |
82 | * RestService to integrate with the Schema Registry
83 |
84 | #### PayloadFields
85 | Works out the fields and their mappings to be used when inserting a new row.
86 |
87 | ### ConvertUtil
88 |
89 | Converts source and sink records to JSON and Avro and back.
90 |
91 | ### StructFieldsExtractor
92 |
93 | Extracts fields from a SinkRecord Struct based on a specified set of provided columns.
94 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | version=2.0.5
2 |
3 | ossrhUsername=me
4 | ossrhPassword=you
5 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.3-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 | # Determine the Java command to use to start the JVM.
86 | if [ -n "$JAVA_HOME" ] ; then
87 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
88 | # IBM's JDK on AIX uses strange locations for the executables
89 | JAVACMD="$JAVA_HOME/jre/sh/java"
90 | else
91 | JAVACMD="$JAVA_HOME/bin/java"
92 | fi
93 | if [ ! -x "$JAVACMD" ] ; then
94 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
95 |
96 | Please set the JAVA_HOME variable in your environment to match the
97 | location of your Java installation."
98 | fi
99 | else
100 | JAVACMD="java"
101 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
102 |
103 | Please set the JAVA_HOME variable in your environment to match the
104 | location of your Java installation."
105 | fi
106 |
107 | # Increase the maximum file descriptors if we can.
108 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
109 | MAX_FD_LIMIT=`ulimit -H -n`
110 | if [ $? -eq 0 ] ; then
111 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
112 | MAX_FD="$MAX_FD_LIMIT"
113 | fi
114 | ulimit -n $MAX_FD
115 | if [ $? -ne 0 ] ; then
116 | warn "Could not set maximum file descriptor limit: $MAX_FD"
117 | fi
118 | else
119 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
120 | fi
121 | fi
122 |
123 | # For Darwin, add options to specify how the application appears in the dock
124 | if $darwin; then
125 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
126 | fi
127 |
128 | # For Cygwin or MSYS, switch paths to Windows format before running java
129 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
130 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
131 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
132 | JAVACMD=`cygpath --unix "$JAVACMD"`
133 |
134 | # We build the pattern for arguments to be converted via cygpath
135 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
136 | SEP=""
137 | for dir in $ROOTDIRSRAW ; do
138 | ROOTDIRS="$ROOTDIRS$SEP$dir"
139 | SEP="|"
140 | done
141 | OURCYGPATTERN="(^($ROOTDIRS))"
142 | # Add a user-defined pattern to the cygpath arguments
143 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
144 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
145 | fi
146 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
147 | i=0
148 | for arg in "$@" ; do
149 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
150 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
151 |
152 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
153 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
154 | else
155 | eval `echo args$i`="\"$arg\""
156 | fi
157 | i=`expr $i + 1`
158 | done
159 | case $i in
160 | 0) set -- ;;
161 | 1) set -- "$args0" ;;
162 | 2) set -- "$args0" "$args1" ;;
163 | 3) set -- "$args0" "$args1" "$args2" ;;
164 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
165 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
166 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
167 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
168 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
169 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
170 | esac
171 | fi
172 |
173 | # Escape application args
174 | save () {
175 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
176 | echo " "
177 | }
178 | APP_ARGS=`save "$@"`
179 |
180 | # Collect all arguments for the java command, following the shell quoting and substitution rules
181 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
182 |
183 | exec "$JAVACMD" "$@"
184 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto init
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto init
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :init
68 | @rem Get command-line arguments, handling Windows variants
69 |
70 | if not "%OS%" == "Windows_NT" goto win9xME_args
71 |
72 | :win9xME_args
73 | @rem Slurp the command line arguments.
74 | set CMD_LINE_ARGS=
75 | set _SKIP=2
76 |
77 | :win9xME_args_slurp
78 | if "x%~1" == "x" goto execute
79 |
80 | set CMD_LINE_ARGS=%*
81 |
82 | :execute
83 | @rem Setup the command line
84 |
85 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
86 |
87 | @rem Execute Gradle
88 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
89 |
90 | :end
91 | @rem End local scope for the variables with windows NT shell
92 | if "%ERRORLEVEL%"=="0" goto mainEnd
93 |
94 | :fail
95 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
96 | rem the _cmd.exe /c_ return code!
97 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
98 | exit /b 1
99 |
100 | :mainEnd
101 | if "%OS%"=="Windows_NT" endlocal
102 |
103 | :omega
104 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/concurrent/ExecutorExtension.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.concurrent
17 |
18 | import java.util.concurrent.Executor
19 |
20 | import scala.concurrent.{Future, Promise}
21 |
22 | object ExecutorExtension {
23 |
24 | implicit class RunnableWrapper(val executor: Executor) extends AnyVal {
25 | def submit[T](thunk: => T): Future[T] = {
26 | val promise = Promise[T]()
27 | executor.execute(new Runnable {
28 | override def run(): Unit = {
29 | try {
30 | val t = thunk
31 | promise.success(t)
32 | } catch {
33 | case t: Throwable => promise.failure(t)
34 | }
35 | }
36 | })
37 | promise.future
38 | }
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/concurrent/FutureAwaitWithFailFastFn.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.concurrent
18 |
19 | import java.util.concurrent.{ExecutorService, TimeUnit}
20 |
21 | import com.typesafe.scalalogging.StrictLogging
22 |
23 | import scala.concurrent.ExecutionContext.Implicits.global
24 | import scala.concurrent.duration._
25 | import scala.concurrent.{Await, Future, Promise}
26 | import scala.util.Failure
27 |
28 | object FutureAwaitWithFailFastFn extends StrictLogging {
29 |
30 | def apply(executorService: ExecutorService, futures: Seq[Future[Unit]], duration: Duration): Unit = {
31 | //make sure we ask the executor to shutdown to ensure the process exits
32 | executorService.shutdown()
33 |
34 | val promise = Promise[Boolean]()
35 |
36 | //stop on the first failure
37 | futures.foreach { f =>
38 | f.failed.foreach { case t =>
39 | if (promise.tryFailure(t)) {
40 | executorService.shutdownNow()
41 | }
42 | }
43 | }
44 |
45 | val fut = Future.sequence(futures)
46 | fut.foreach { case t =>
47 | if (promise.trySuccess(true)) {
48 | val failed = executorService.shutdownNow()
49 | if (failed.size() > 0) {
50 | logger.error(s"${failed.size()} task have failed.")
51 | }
52 | }
53 | }
54 |
55 | Await.ready(promise.future, duration).value match {
56 | case Some(Failure(t)) =>
57 | executorService.awaitTermination(1, TimeUnit.MINUTES)
58 | //throw the underlying error
59 | throw t
60 |
61 | case _ =>
62 | executorService.awaitTermination(1, TimeUnit.MINUTES)
63 | }
64 | }
65 |
66 | def apply[T](executorService: ExecutorService, futures: Seq[Future[T]], duration: Duration = 1.hours): Seq[T] = {
67 | //make sure we ask the executor to shutdown to ensure the process exits
68 | executorService.shutdown()
69 |
70 | val promise = Promise[Boolean]()
71 |
72 | //stop on the first failure
73 | futures.foreach { f =>
74 | f.failed.foreach { case t =>
75 | if (promise.tryFailure(t)) {
76 | executorService.shutdownNow()
77 | }
78 | }
79 | }
80 |
81 | val fut = Future.sequence(futures)
82 | fut.foreach { case t =>
83 | if (promise.trySuccess(true)) {
84 | val failed = executorService.shutdownNow()
85 | if (failed.size() > 0) {
86 | logger.error(s"${failed.size()} task have failed.")
87 | }
88 | }
89 | }
90 |
91 | Await.ready(promise.future, duration).value match {
92 | case Some(Failure(t)) =>
93 | executorService.awaitTermination(1, TimeUnit.MINUTES)
94 | //throw the underlying error
95 | throw t
96 |
97 | case _ =>
98 | executorService.awaitTermination(1, TimeUnit.MINUTES)
99 | //return the result from each of the futures
100 | Await.result(Future.sequence(futures), 1.minute)
101 | }
102 | }
103 | }
104 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/Helpers.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config
18 |
19 | import com.datamountaineer.kcql.Kcql
20 | import com.typesafe.scalalogging.StrictLogging
21 | import org.apache.kafka.common.config.ConfigException
22 |
23 | /**
24 | * Created by andrew@datamountaineer.com on 13/05/16.
25 | * kafka-connect-common
26 | */
27 |
28 | object Helpers extends StrictLogging {
29 |
30 | /**
31 | * Build a mapping of table to topic
32 | * filtering on the assigned tables.
33 | *
34 | * @param input The raw input string to parse .i.e. table:topic,table2:topic2.
35 | * @param filterTable The tables to filter for.
36 | * @return a Map of table->topic.
37 | * */
38 | def buildRouteMaps(input: String, filterTable: List[String]) : Map[String, String] = {
39 | tableTopicParser(input).filter({ case (k, v) => filterTable.contains(k)})
40 | }
41 |
42 |
43 | //{table:f1,f2}
44 | def pKParser(input : String) : Map[String, List[String]] = {
45 | val mappings = input.split("\\}")
46 | .toList
47 | .map(s => s.replace(",{", "").replace("{", "").replace("}", "").trim())
48 |
49 | mappings.map(
50 | m => {
51 | val colon = m.indexOf(":")
52 | if (colon >= 0) {
53 | val topic = m.substring(0, colon)
54 | val fields = m.substring(colon + 1, m.length).split(",").toList
55 | (topic, fields)
56 | } else {
57 | throw new ConfigException(s"Invalid format for PKs. Received $input. Format should be {topic:f1,2}," +
58 | s"{topic2:f3,f3}....")
59 | }
60 | }
61 | ).toMap
62 | }
63 |
64 | /**
65 | * Break a comma and colon separated string into a map of table to topic or topic to table
66 | *
67 | * If now values is found after a comma the value before the comma is used.
68 | *
69 | * @param input The input string to parse.
70 | * @return a Map of table->topic or topic->table.
71 | * */
72 | def splitter(input: String, delimiter: String) : Map[String, String] = {
73 | input.split(",")
74 | .toList
75 | .map(c => c.split(delimiter))
76 | .map(a => {if (a.length == 1) (a(0), a(0)) else (a(0), a(1)) }).toMap
77 | }
78 |
79 | /**
80 | * Break a comma and colon separated string into a map of table to topic or topic to table
81 | *
82 | * If now values is found after a comma the value before the comma is used.
83 | *
84 | * @param input The input string to parse.
85 | * @return a Map of table->topic or topic->table.
86 | * */
87 | def tableTopicParser(input: String) : Map[String, String] = {
88 | input.split(",")
89 | .toList
90 | .map(c => c.split(":"))
91 | .map(a => {if (a.length == 1) (a(0), a(0)) else (a(0), a(1)) }).toMap
92 | }
93 |
94 |
95 | def checkInputTopics(kcqlConstant: String, props: Map[String, String]) = {
96 | val topics = props.get("topics").get.split(",").map(t => t.trim).toSet
97 | val raw = props.get(kcqlConstant).get
98 | if (raw.isEmpty) {
99 | throw new ConfigException(s"Missing $kcqlConstant")
100 | }
101 | val kcql = raw.split(";").map(r => Kcql.parse(r)).toSet
102 | val sources = kcql.map(k => k.getSource)
103 | val res = topics.subsetOf(sources)
104 |
105 | if (!res) {
106 | val missing = topics.diff(sources)
107 | throw new ConfigException(s"Mandatory `topics` configuration contains topics not set in $kcqlConstant: ${missing}, kcql contains $sources")
108 | }
109 |
110 | val res1 = sources.subsetOf(topics)
111 |
112 | if (!res1) {
113 | val missing = topics.diff(sources)
114 | throw new ConfigException(s"$kcqlConstant configuration contains topics not set in mandatory `topic` configuration: ${missing}, kcql contains $sources")
115 | }
116 |
117 | true
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/Routes.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config
18 |
19 | /**
20 | * Created by andrew@datamountaineer.com on 19/05/16.
21 | * kafka-connect-common
22 | */
23 | case class RouteMapping(source: String,
24 | target: String,
25 | allFields : Boolean,
26 | fieldMappings: List[Field] = List.empty[Field])
27 |
28 | case class Field(name: String, target: String, isPrimaryKey: Boolean = false)
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/SSLConfigContext.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config
18 |
19 | import java.io.FileInputStream
20 | import java.security.{KeyStore, SecureRandom}
21 | import javax.net.ssl.{KeyManager, KeyManagerFactory, SSLContext, TrustManagerFactory}
22 |
23 | /**
24 | * Created by andrew@datamountaineer.com on 14/04/16.
25 | * stream-reactor
26 | */
27 | object SSLConfigContext {
28 | def apply(config: SSLConfig) = {
29 | getSSLContext(config)
30 | }
31 |
32 | /**
33 | * Get a SSL Connect for a given set of credentials
34 | *
35 | * @param config An SSLConfig containing key and truststore credentials
36 | * @return a SSLContext
37 | **/
38 | def getSSLContext(config: SSLConfig) = {
39 | val useClientCertAuth = config.useClientCert
40 |
41 | //is client certification authentication set
42 | val keyManagers: Array[KeyManager] = useClientCertAuth match {
43 | case true => getKeyManagers(config)
44 | case false => Array[KeyManager]()
45 | }
46 |
47 | val ctx: SSLContext = SSLContext.getInstance("SSL")
48 | val trustManagers = getTrustManagers(config)
49 | ctx.init(keyManagers, trustManagers, new SecureRandom())
50 | ctx
51 | }
52 |
53 | /**
54 | * Get an array of Trust Managers
55 | *
56 | * @param config An SSLConfig containing key and truststore credentials
57 | * @return An Array of TrustManagers
58 | **/
59 | def getTrustManagers(config: SSLConfig) = {
60 | val tsf = new FileInputStream(config.trustStorePath)
61 | val ts = KeyStore.getInstance(config.trustStoreType)
62 | ts.load(tsf, config.trustStorePass.toCharArray)
63 | val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
64 | tmf.init(ts)
65 | tmf.getTrustManagers
66 | }
67 |
68 | /**
69 | * Get an array of Key Managers
70 | *
71 | * @param config An SSLConfig containing key and truststore credentials
72 | * @return An Array of KeyManagers
73 | **/
74 | def getKeyManagers(config: SSLConfig): Array[KeyManager] = {
75 | require(config.keyStorePath.nonEmpty, "Key store path is not set!")
76 | require(config.keyStorePass.nonEmpty, "Key store password is not set!")
77 | val ksf = new FileInputStream(config.keyStorePath.get)
78 | val ks = KeyStore.getInstance(config.keyStoreType)
79 | ks.load(ksf, config.keyStorePass.get.toCharArray)
80 | val kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
81 | kmf.init(ks, config.keyStorePass.get.toCharArray)
82 | kmf.getKeyManagers
83 | }
84 |
85 | }
86 |
87 | /**
88 | * Class for holding key and truststore settings
89 | **/
90 | case class SSLConfig(trustStorePath: String,
91 | trustStorePass: String,
92 | keyStorePath: Option[String],
93 | keyStorePass: Option[String],
94 | useClientCert: Boolean = false,
95 | keyStoreType: String = "JKS",
96 | trustStoreType: String = "JKS")
97 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/const/TraitConfigConst.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.const
18 |
19 | object TraitConfigConst {
20 | val KCQL_PROP_SUFFIX = "kcql"
21 | val ERROR_POLICY_PROP_SUFFIX = "error.policy"
22 | val RETRY_INTERVAL_PROP_SUFFIX = "retry.interval"
23 | val MAX_RETRIES_PROP_SUFFIX = "max.retries"
24 | val BATCH_SIZE_PROP_SUFFIX = "batch.size"
25 | val DATABASE_PROP_SUFFIX = "db"
26 | val THREAD_POLL_PROP_SUFFIX = "threadpool.size"
27 | val ALLOW_PARALLEL_WRITE_PROP_SUFFIX = "parallel.write"
28 | val CONSISTENCY_LEVEL_PROP_SUFFIX = "consistency.level"
29 | val USERNAME_SUFFIX = "username"
30 | val PASSWORD_SUFFIX = "password"
31 | val AUTH_MECH_SUFFIX = "auth.mechanism"
32 | val TRUSTSTORE_PASS_SUFFIX = "truststore.pass"
33 | val TRUSTSTORE_PATH_SUFFIX = "truststore.path"
34 | val KEYSTORE_PASS_SUFFIX = "keystore.pass"
35 | val KEYSTORE_PATH_SUFFIX = "keystore.path"
36 | val CERTIFICATES_SUFFIX = "certs"
37 | val CERTIFICATE_KEY_CHAIN_SUFFIX = "cert.chain.key"
38 | val CERT_KEY="cert.key"
39 |
40 | val PROGRESS_ENABLED_CONST = "connect.progress.enabled"
41 | val CONNECT_ERROR_POLICY_CONST = "connect.error.policy"
42 | val URI_SUFFIX = "uri"
43 | val URL_SUFFIX ="url"
44 | val CLUSTER_NAME_SUFFIX = "cluster.name"
45 | val CONNECTION_HOST_SUFFIX = "host"
46 | val CONNECTION_HOSTS_SUFFIX = "hosts"
47 | val CONNECTION_PORT_SUFFIX = "port"
48 | val CONNECTION_PORTS_SUFFIX = "ports"
49 | val WRITE_TIMEOUT_SUFFIX = "write.timeout"
50 | val SCHEMA_REGISTRY_SUFFIX = "schema.registry.url"
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/AllowParallelizationSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.ALLOW_PARALLEL_WRITE_PROP_SUFFIX
20 |
21 | trait AllowParallelizationSettings extends BaseSettings {
22 | val allowParallelConstant: String = s"$connectorPrefix.$ALLOW_PARALLEL_WRITE_PROP_SUFFIX"
23 |
24 | def getAllowParallel: java.lang.Boolean = getBoolean(allowParallelConstant)
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/BaseConfig.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import java.util
20 |
21 | import org.apache.kafka.common.config.{AbstractConfig, ConfigDef}
22 |
23 | abstract class BaseConfig(connectorPrefixStr: String, confDef: ConfigDef, props: util.Map[String, String])
24 | extends AbstractConfig(confDef, props) {
25 | val connectorPrefix: String = connectorPrefixStr
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/BaseSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import java.util
20 |
21 | import org.apache.kafka.common.config.types.Password
22 |
23 | trait BaseSettings {
24 | def connectorPrefix: String
25 |
26 | def getString(key: String): String
27 |
28 | def getInt(key: String): Integer
29 |
30 | def getBoolean(key: String): java.lang.Boolean
31 |
32 | def getPassword(key: String): Password
33 |
34 | def getList(key: String): util.List[String]
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/BatchSizeSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.BATCH_SIZE_PROP_SUFFIX
20 |
21 | trait BatchSizeSettings extends BaseSettings {
22 | val batchSizeConstant: String = s"$connectorPrefix.$BATCH_SIZE_PROP_SUFFIX"
23 |
24 | def getBatchSize: Int = getInt(batchSizeConstant)
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/ConnectionSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | /**
20 | * Created by andrew@datamountaineer.com on 31/07/2017.
21 | * stream-reactor
22 | */
23 |
24 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst._
25 | import org.apache.kafka.common.config.ConfigException
26 |
27 | trait ConnectionSettings extends BaseSettings {
28 | val uriConst = s"$connectorPrefix.$URI_SUFFIX"
29 | val schemaRegistryConst = s"$connectorPrefix.$SCHEMA_REGISTRY_SUFFIX"
30 | val urlConst = s"$connectorPrefix.$URL_SUFFIX"
31 | val hostConst = s"$connectorPrefix.$CONNECTION_HOST_SUFFIX"
32 | val hostsConst = s"$connectorPrefix.$CONNECTION_HOSTS_SUFFIX"
33 | val portConst = s"$connectorPrefix.$CONNECTION_PORT_SUFFIX"
34 | val portsConst = s"$connectorPrefix.$CONNECTION_PORTS_SUFFIX"
35 |
36 | def getPort = getInt(portConst)
37 | def getUri = getString(uriConst)
38 | def getSchemaRegistryUrl = getString(schemaRegistryConst)
39 |
40 | def getUrl : String = {
41 | val url = getString(urlConst)
42 | if (url == null || url.trim.length == 0) {
43 | throw new ConfigException(s"$urlConst has not been set")
44 | }
45 | url
46 | }
47 |
48 | def getHosts : String = {
49 | val connection = getString(hostsConst)
50 |
51 | if (connection == null || connection.trim.isEmpty) {
52 | throw new ConfigException(s"$hostsConst is not provided!")
53 | }
54 | connection
55 | }
56 |
57 | def getHost : String = {
58 | val connection = getString(hostConst)
59 |
60 | if (connection == null || connection.trim.isEmpty) {
61 | throw new ConfigException(s"$hostsConst is not provided!")
62 | }
63 | connection
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/ConsistencyLevelSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst._
20 | import org.apache.kafka.common.config.ConfigException
21 |
22 | import scala.reflect.ClassTag
23 | import scala.util.{Failure, Success, Try}
24 |
25 | trait ConsistencyLevelSettings[T <: Enum[T]] extends BaseSettings {
26 | def consistencyLevelConstant: String = s"$connectorPrefix.$CONSISTENCY_LEVEL_PROP_SUFFIX"
27 |
28 | def getConsistencyLevel(implicit ct: ClassTag[T]): Option[T] = {
29 |
30 | val enum: Class[T] = ct.runtimeClass.asInstanceOf[Class[T]]
31 |
32 | val consistencyLevel = getString(consistencyLevelConstant) match {
33 | case "" => None
34 | case other =>
35 | Try(Enum.valueOf[T](enum, other)) match {
36 | case Failure(_) => throw new ConfigException(s"'$other' is not a valid $consistencyLevelConstant. " +
37 | s"Available values are:${enum.getEnumConstants.map(_.toString).mkString(",")}")
38 | case Success(cl) => Some(cl)
39 | }
40 | }
41 |
42 | consistencyLevel
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/DatabaseSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.DATABASE_PROP_SUFFIX
20 |
21 | trait DatabaseSettings extends BaseSettings {
22 | val databaseConstant: String = s"$connectorPrefix.$DATABASE_PROP_SUFFIX"
23 |
24 | def getDatabase: String = getString(databaseConstant)
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/ErrorPolicySettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.errors.{ErrorPolicy, ErrorPolicyEnum}
20 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.ERROR_POLICY_PROP_SUFFIX
21 |
22 | trait ErrorPolicySettings extends BaseSettings {
23 | def errorPolicyConst = s"$connectorPrefix.$ERROR_POLICY_PROP_SUFFIX"
24 |
25 | def getErrorPolicy: ErrorPolicy = ErrorPolicy(ErrorPolicyEnum.withName(getString(errorPolicyConst).toUpperCase))
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/KcqlSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 |
20 | import com.datamountaineer.kcql.{Field, FormatType, Kcql, WriteModeEnum}
21 | import com.datamountaineer.streamreactor.connect.rowkeys.{StringGenericRowKeyBuilder, StringKeyBuilder, StringStructFieldsStringKeyBuilder}
22 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.KCQL_PROP_SUFFIX
23 | import org.apache.kafka.common.config.ConfigException
24 | import scala.collection.JavaConverters._
25 | import scala.collection.immutable.ListSet
26 |
27 | trait KcqlSettings extends BaseSettings {
28 | val kcqlConstant: String = s"$connectorPrefix.$KCQL_PROP_SUFFIX"
29 |
30 | def getKCQL: Set[Kcql] = {
31 | val raw = getString(kcqlConstant)
32 | if (raw.isEmpty) {
33 | throw new ConfigException(s"Missing $kcqlConstant")
34 | }
35 | raw.split(";").map(r => Kcql.parse(r)).toSet
36 | }
37 |
38 | def getKCQLRaw: Array[String] = {
39 | val raw = getString(kcqlConstant)
40 | if (raw.isEmpty) {
41 | throw new ConfigException(s"Missing $kcqlConstant")
42 | }
43 | raw.split(";")
44 | }
45 |
46 | def getFieldsMap(kcql: Set[Kcql] = getKCQL): Map[String, Map[String, String]] = {
47 | kcql.toList.map(rm =>
48 | (rm.getSource, rm.getFields.asScala.map(fa => (fa.toString, fa.getAlias)).toMap)
49 | ).toMap
50 | }
51 |
52 | def getFields(kcql: Set[Kcql] = getKCQL): Map[String, Seq[Field]] = {
53 | kcql.toList.map(rm => (rm.getSource, rm.getFields.asScala)).toMap
54 | }
55 |
56 | def getIgnoreFields(kcql: Set[Kcql] = getKCQL): Map[String, Seq[Field]] = {
57 | kcql.toList.map(rm => (rm.getSource, rm.getIgnoredFields.asScala)).toMap
58 | }
59 |
60 | def getFieldsAliases(kcql: Set[Kcql] = getKCQL): List[Map[String, String]] = {
61 | kcql.toList.map(rm => rm.getFields.asScala.map(fa => (fa.getName, fa.getAlias)).toMap)
62 | }
63 |
64 | def getIgnoreFieldsMap(kcql: Set[Kcql] = getKCQL): Map[String, Set[String]] = {
65 | kcql.toList.map(r => (r.getSource, r.getIgnoredFields.asScala.map(f => f.getName).toSet)).toMap
66 | }
67 |
68 | def getPrimaryKeys(kcql: Set[Kcql] = getKCQL): Map[String, Set[String]] = {
69 | kcql.toList.map{r =>
70 | val names: Seq[String] = r.getPrimaryKeys.asScala.map(f => f.getName)
71 | val set: Set[String] = ListSet(names.reverse:_*)
72 | (r.getSource, set)
73 | }.toMap
74 | }
75 |
76 | def getTableTopic(kcql: Set[Kcql] = getKCQL): Map[String, String] = {
77 | kcql.toList.map(r => (r.getSource, r.getTarget)).toMap
78 | }
79 |
80 | def getFormat(formatType: FormatType => FormatType, kcql: Set[Kcql] = getKCQL): Map[String, FormatType] = {
81 | kcql.toList.map(r => (r.getSource, formatType(r.getFormatType))).toMap
82 | }
83 |
84 | def getTTL(kcql: Set[Kcql] = getKCQL): Map[String, Long] = {
85 | kcql.toList.map(r => (r.getSource, r.getTTL)).toMap
86 | }
87 |
88 | // def getIncrementalMode(kcql: Set[Kcql] = getKCQL): Map[String, String] = {
89 | // kcql.toList.map(r => (r.getSource, r.getIncrementalMode)).toMap
90 | // }
91 |
92 | def getBatchSize(kcql: Set[Kcql] = getKCQL, defaultBatchSize: Int): Map[String, Int] = {
93 | kcql.toList.map(r => (r.getSource, Option(r.getBatchSize).getOrElse(defaultBatchSize))).toMap
94 | }
95 |
96 | def getBucketSize(kcql: Set[Kcql] = getKCQL): Map[String, Int] = {
97 | kcql.toList.map(r => (r.getSource, r.getBucketing.getBucketsNumber)).toMap
98 | }
99 |
100 | def getWriteMode(kcql: Set[Kcql] = getKCQL) : Map[String, WriteModeEnum] = {
101 | kcql.toList.map(r => (r.getSource, r.getWriteMode)).toMap
102 | }
103 |
104 | def getAutoCreate(kcql: Set[Kcql] = getKCQL) : Map[String, Boolean] = {
105 | kcql.toList.map(r => (r.getSource, r.isAutoCreate)).toMap
106 | }
107 |
108 | def getAutoEvolve(kcql: Set[Kcql] = getKCQL) : Map[String, Boolean] = {
109 | kcql.toList.map(r => (r.getSource, r.isAutoEvolve)).toMap
110 | }
111 |
112 | /** Get all the upsert keys
113 | *
114 | * @param kcql
115 | * @param preserveFullKeys (default false) If true, keys that
116 | * have parents will return the full
117 | * key (ie. "A.B.C" rather than just
118 | * "C")
119 | * @return map of topic to set of keys
120 | */
121 | def getUpsertKeys(
122 | kcql: Set[Kcql] = getKCQL,
123 | preserveFullKeys: Boolean = false):
124 | Map[String, Set[String]] = {
125 |
126 | kcql
127 | .filter(c => c.getWriteMode == WriteModeEnum.UPSERT)
128 | .map { r =>
129 | val keys: Set[String] = ListSet(r.getPrimaryKeys.asScala.map(key =>
130 | preserveFullKeys match {
131 | case false => key.getName
132 | case true => key.toString
133 | }
134 | ).reverse:_*)
135 | if (keys.isEmpty) throw new ConfigException(s"${r.getTarget} is set up with upsert, you need primary keys setup")
136 | (r.getSource, keys)
137 | }.toMap
138 | }
139 |
140 | def getUpsertKey(kcql: Set[Kcql] = getKCQL): Map[String, String] = {
141 | kcql
142 | .filter(c => c.getWriteMode == WriteModeEnum.UPSERT)
143 | .map { r =>
144 | val keyList: List[Field] = r.getPrimaryKeys().asScala.toList
145 | val keys: Set[Field] = ListSet( keyList.reverse:_* )
146 | if (keys.isEmpty) throw new ConfigException(s"${r.getTarget} is set up with upsert, you need primary keys setup")
147 | (r.getSource, keys.head.getName)
148 | }.toMap
149 | }
150 |
151 | def getRowKeyBuilders(kcql: Set[Kcql] = getKCQL): List[StringKeyBuilder] = {
152 | kcql.toList.map { k =>
153 | val keys = k.getPrimaryKeys.asScala.map(k => k.getName)
154 | // No PK => 'topic|par|offset' builder else generic-builder
155 | if (keys.nonEmpty) StringStructFieldsStringKeyBuilder(keys) else new StringGenericRowKeyBuilder()
156 | }
157 | }
158 |
159 | def getPrimaryKeyCols(kcql: Set[Kcql] = getKCQL): Map[String, Set[String]] = {
160 | kcql.toList.map(k =>
161 | (k.getSource, ListSet(k.getPrimaryKeys.asScala.map(p => p.getName).reverse:_*).toSet)
162 | ).toMap
163 | }
164 |
165 | def getIncrementalMode(routes: Set[Kcql]): Map[String, String] = {
166 | routes.toList.map(r => (r.getSource, r.getIncrementalMode)).toMap
167 | }
168 |
169 | def checkInputTopics(props: Map[String, String]) = {
170 | val topics = props.get("topics").get.split(",").toSet
171 | val raw = props.get(kcqlConstant).get
172 | if (raw.isEmpty) {
173 | throw new ConfigException(s"Missing $kcqlConstant")
174 | }
175 | val kcql = raw.split(";").map(r => Kcql.parse(r)).toSet
176 | val sources = kcql.map(k => k.getSource)
177 |
178 | val res = topics.subsetOf(sources)
179 |
180 | if (!res) {
181 | throw new ConfigException(s"Mandatory `topics` configuration contains topics not set in $kcqlConstant")
182 | }
183 |
184 | val res1 = sources.subsetOf(topics)
185 |
186 | if (!res1) {
187 | throw new ConfigException(s"$kcqlConstant configuration contains topics not set in mandatory `topic` configuration")
188 | }
189 |
190 | true
191 | }
192 | }
193 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/NumberRetriesSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.MAX_RETRIES_PROP_SUFFIX
20 |
21 | trait NumberRetriesSettings extends BaseSettings {
22 | def numberRetriesConstant: String = s"$connectorPrefix.$MAX_RETRIES_PROP_SUFFIX"
23 |
24 | def getNumberRetries: Int = getInt(numberRetriesConstant)
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/RetryIntervalSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.RETRY_INTERVAL_PROP_SUFFIX
20 |
21 | trait RetryIntervalSettings extends BaseSettings {
22 | val retryIntervalConstant: String = s"$connectorPrefix.$RETRY_INTERVAL_PROP_SUFFIX"
23 |
24 | def getRetryInterval: Int = getInt(retryIntervalConstant)
25 | }
26 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/SSLSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst._
20 |
21 | /**
22 | * Created by andrew@datamountaineer.com on 31/07/2017.
23 | * stream-reactor
24 | */
25 | trait SSLSettings extends BaseSettings {
26 | val trustStorePath: String = s"$connectorPrefix.$TRUSTSTORE_PATH_SUFFIX"
27 | val trustStorePass: String = s"$connectorPrefix.$TRUSTSTORE_PASS_SUFFIX"
28 | val keyStorePath: String = s"$connectorPrefix.$KEYSTORE_PATH_SUFFIX"
29 | val keyStorePass: String = s"$connectorPrefix.$KEYSTORE_PASS_SUFFIX"
30 | val certificates: String = s"$connectorPrefix.$CERTIFICATES_SUFFIX"
31 | val certificateKeyChain: String = s"$connectorPrefix.$CERTIFICATE_KEY_CHAIN_SUFFIX"
32 |
33 | def getTrustStorePath = getString(trustStorePath)
34 | def getTrustStorePass = getPassword(trustStorePass)
35 |
36 | def getKeyStorePath = getString(keyStorePath)
37 | def getKeyStorePass = getPassword(keyStorePass)
38 |
39 | def getCertificates = getList(certificates)
40 | def getCertificateKeyChain = getString(certificateKeyChain)
41 |
42 | }
43 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/ThreadPoolSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.THREAD_POLL_PROP_SUFFIX
20 |
21 | trait ThreadPoolSettings extends BaseSettings {
22 | def threadPoolConstant: String = s"$connectorPrefix.$THREAD_POLL_PROP_SUFFIX"
23 |
24 | def getThreadPoolSize: Int = {
25 | val threads = getInt(threadPoolConstant)
26 | if (threads <= 0) 4 * Runtime.getRuntime.availableProcessors()
27 | else threads
28 | }
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/UserSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.{PASSWORD_SUFFIX, USERNAME_SUFFIX}
20 |
21 | /**
22 | * Created by andrew@datamountaineer.com on 29/07/2017.
23 | * stream-reactor
24 | */
25 | trait UserSettings extends BaseSettings {
26 | val passwordConst = s"$connectorPrefix.$PASSWORD_SUFFIX"
27 | val usernameConst = s"$connectorPrefix.$USERNAME_SUFFIX"
28 |
29 | def getSecret = getPassword(passwordConst)
30 | def getUsername = getString(usernameConst)
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/config/base/traits/WriteTimeoutSettings.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config.base.traits
18 |
19 | import com.datamountaineer.streamreactor.connect.config.base.const.TraitConfigConst.WRITE_TIMEOUT_SUFFIX
20 |
21 | /**
22 | * Created by andrew@datamountaineer.com on 31/07/2017.
23 | * stream-reactor
24 | */
25 | trait WriteTimeoutSettings extends BaseSettings {
26 | val writeTimeoutSettingsConst = s"$connectorPrefix.$WRITE_TIMEOUT_SUFFIX"
27 |
28 | def getWriteTimeout = getInt(writeTimeoutSettingsConst)
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/FieldConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters
18 |
19 | import com.datamountaineer.kcql.Field
20 |
21 | import scala.collection.JavaConverters._
22 |
23 | object FieldConverter {
24 | def apply(field: Field): com.landoop.sql.Field = {
25 | com.landoop.sql.Field(
26 | field.getName,
27 | field.getAlias,
28 | Option(field.getParentFields).map(_.asScala.toVector).orNull)
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/MsgKey.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters
17 |
18 | import com.sksamuel.avro4s.{RecordFormat, SchemaFor}
19 | import io.confluent.connect.avro.AvroData
20 |
21 | case class MsgKey(topic: String, id: String)
22 |
23 | object MsgKey {
24 | private val recordFormat = RecordFormat[MsgKey]
25 | private val avroSchema = SchemaFor[MsgKey]()
26 | private val avroData = new AvroData(1)
27 | val schema = avroData.toConnectSchema(avroSchema)
28 |
29 | def getStruct(topic: String, id: String) = avroData.toConnectData(avroSchema, recordFormat.to(MsgKey(topic, id))).value()
30 | }
31 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/Transform.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters
18 |
19 | import java.nio.ByteBuffer
20 |
21 | import com.datamountaineer.streamreactor.connect.json.SimpleJsonConverter
22 | import com.fasterxml.jackson.databind.JsonNode
23 | import com.landoop.connect.sql.StructSql._
24 | import com.landoop.json.sql.JsonSql._
25 | import com.landoop.json.sql.JacksonJson
26 | import com.landoop.sql.Field
27 | import com.typesafe.scalalogging.StrictLogging
28 | import org.apache.kafka.connect.data.{Schema, Struct}
29 |
30 | import scala.util.{Failure, Success, Try}
31 |
32 | object Transform extends StrictLogging {
33 | lazy val simpleJsonConverter = new SimpleJsonConverter()
34 |
35 | def apply(fields: Seq[Field],
36 | ignoredFields: Seq[Field],
37 | schema: Schema,
38 | value: Any,
39 | withStructure:Boolean): String = {
40 | def raiseException(msg: String, t: Throwable) = throw new IllegalArgumentException(msg, t)
41 |
42 | if (value == null) {
43 | if (schema == null || !schema.isOptional) {
44 | raiseException("Null value is not allowed.", null)
45 | }
46 | else null
47 | } else {
48 | if (schema != null) {
49 | schema.`type`() match {
50 | case Schema.Type.BYTES =>
51 | //we expected to be json
52 | val array = value match {
53 | case a: Array[Byte] => a
54 | case b: ByteBuffer => b.array()
55 | case other => raiseException("Invalid payload:$other for schema Schema.BYTES.", null)
56 | }
57 |
58 | Try(JacksonJson.mapper.readTree(array)) match {
59 | case Failure(e) => raiseException("Invalid json.", e)
60 | case Success(json) =>
61 | Try(json.sql(fields, !withStructure)) match {
62 | case Failure(e) => raiseException(s"A KCQL exception occurred. ${e.getMessage}", e)
63 | case Success(jn) => jn.toString
64 | }
65 | }
66 |
67 | case Schema.Type.STRING =>
68 | //we expected to be json
69 | Try(JacksonJson.asJson(value.asInstanceOf[String])) match {
70 | case Failure(e) => raiseException("Invalid json", e)
71 | case Success(json) =>
72 | Try(json.sql(fields, !withStructure)) match {
73 | case Success(jn) => jn.toString
74 | case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
75 | }
76 | }
77 |
78 | case Schema.Type.STRUCT =>
79 | val struct = value.asInstanceOf[Struct]
80 | Try(struct.sql(fields, !withStructure)) match {
81 | case Success(s) => simpleJsonConverter.fromConnectData(s.schema(), s).toString
82 |
83 | case Failure(e) => raiseException(s"A KCQL error occurred.${e.getMessage}", e)
84 | }
85 |
86 | case other => raiseException("Can't transform Schema type:$other.", null)
87 | }
88 | } else {
89 | //we can handle java.util.Map (this is what JsonConverter can spit out)
90 | value match {
91 | case m: java.util.Map[_, _] =>
92 | val map = m.asInstanceOf[java.util.Map[String, Any]]
93 | val jsonNode: JsonNode = JacksonJson.mapper.valueToTree(map)
94 | Try(jsonNode.sql(fields, !withStructure)) match {
95 | case Success(j) => j.toString
96 | case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
97 | }
98 | case s: String =>
99 | Try(JacksonJson.asJson(value.asInstanceOf[String])) match {
100 | case Failure(e) => raiseException("Invalid json.", e)
101 | case Success(json) =>
102 | Try(json.sql(fields, !withStructure)) match {
103 | case Success(jn) => jn.toString
104 | case Failure(e) => raiseException(s"A KCQL exception occurred.${e.getMessage}", e)
105 | }
106 | }
107 |
108 | case b: Array[Byte] =>
109 | Try(JacksonJson.mapper.readTree(b)) match {
110 | case Failure(e) => raiseException("Invalid json.", e)
111 | case Success(json) =>
112 | Try(json.sql(fields, !withStructure)) match {
113 | case Failure(e) => raiseException(s"A KCQL exception occurred. ${e.getMessage}", e)
114 | case Success(jn) => jn.toString
115 | }
116 | }
117 | //we take it as String
118 | case other => raiseException(s"Value:$other is not handled!", null)
119 | }
120 | }
121 | }
122 | }
123 | }
124 |
125 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/sink/AvroConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters.sink
17 |
18 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
19 | import io.confluent.connect.avro.AvroData
20 | import java.io.ByteArrayOutputStream
21 | import java.io.File
22 | import org.apache.avro.{Schema => AvroSchema}
23 | import org.apache.avro.generic.GenericRecord
24 | import org.apache.avro.io.EncoderFactory
25 | import org.apache.avro.reflect.ReflectDatumWriter
26 | import org.apache.kafka.connect.sink.SinkRecord
27 | import org.apache.zookeeper.server.quorum.QuorumPeerConfig.ConfigException
28 |
29 |
30 | class AvroConverter extends Converter {
31 | private val avroData = new AvroData(8)
32 | private var sinkToSchemaMap: Map[String, AvroSchema] = Map.empty
33 | private var avroWritersMap: Map[String, ReflectDatumWriter[Object]] = Map.empty
34 |
35 | override def convert(sinkTopic: String,
36 | data: SinkRecord): SinkRecord = {
37 | Option(data) match {
38 | case None =>
39 | new SinkRecord(
40 | sinkTopic,
41 | 0,
42 | null,
43 | null,
44 | avroData.toConnectSchema(sinkToSchemaMap(sinkTopic)),
45 | null,
46 | 0
47 | )
48 | case Some(_) =>
49 | val kafkaTopic = data.topic()
50 | val writer = avroWritersMap.getOrElse(kafkaTopic.toLowerCase, throw new ConfigException(s"Invalid ${AvroConverter.SCHEMA_CONFIG} is not configured for $kafkaTopic"))
51 |
52 | val output = new ByteArrayOutputStream();
53 | val decoder = EncoderFactory.get().binaryEncoder(output, null)
54 | output.reset()
55 |
56 | val avro = avroData.fromConnectData(data.valueSchema(), data.value())
57 | avro.asInstanceOf[GenericRecord]
58 |
59 | val record = writer.write(avro, decoder)
60 | decoder.flush()
61 | val arr = output.toByteArray
62 |
63 | new SinkRecord(
64 | kafkaTopic,
65 | data.kafkaPartition(),
66 | MsgKey.schema,
67 | MsgKey.getStruct(sinkTopic, data.key().toString()),
68 | data.valueSchema(),
69 | arr,
70 | 0
71 | )
72 |
73 |
74 | }
75 | }
76 |
77 | override def initialize(config: Map[String, String]): Unit = {
78 | sinkToSchemaMap = AvroConverter.getSchemas(config)
79 | avroWritersMap = sinkToSchemaMap.map { case (key, schema) =>
80 | key -> new ReflectDatumWriter[Object](schema)
81 | }
82 | }
83 | }
84 |
85 | object AvroConverter {
86 | val SCHEMA_CONFIG = "connect.converter.avro.schemas"
87 |
88 | def getSchemas(config: Map[String, String]): Map[String, AvroSchema] = {
89 | config.getOrElse(SCHEMA_CONFIG, throw new ConfigException(s"$SCHEMA_CONFIG is not provided"))
90 | .toString
91 | .split(';')
92 | .filter(_.trim.nonEmpty)
93 | .map(_.split("="))
94 | .map {
95 | case Array(sink, path) =>
96 | val file = new File(path)
97 | if (!file.exists()) {
98 | throw new ConfigException(s"Invalid $SCHEMA_CONFIG. The file $path doesn't exist!")
99 | }
100 | val s = sink.trim.toLowerCase()
101 | if (s.isEmpty) {
102 | throw new ConfigException(s"Invalid $SCHEMA_CONFIG. The topic is not valid for entry containing $path")
103 | }
104 | s -> new AvroSchema.Parser().parse(file)
105 | case other => throw new ConfigException(s"$SCHEMA_CONFIG is not properly set. The format is Mqtt_Sink->AVRO_FILE")
106 | }.toMap
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/sink/BytesConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters.sink
17 |
18 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
19 | import org.apache.kafka.connect.data.Schema
20 | import org.apache.kafka.connect.sink.SinkRecord
21 |
22 | class BytesConverter extends Converter {
23 | override def convert(sinkTopic: String,
24 | data: SinkRecord): SinkRecord = {
25 | Option(data) match {
26 | case None =>
27 | new SinkRecord(
28 | sinkTopic,
29 | 0,
30 | null,
31 | null,
32 | Schema.BYTES_SCHEMA,
33 | null,
34 | 0
35 | )
36 | case Some(_) =>
37 | new SinkRecord(
38 | data.topic(),
39 | data.kafkaPartition(),
40 | MsgKey.schema,
41 | MsgKey.getStruct(sinkTopic, data.key().toString()),
42 | Schema.BYTES_SCHEMA,
43 | data.value(),
44 | 0
45 | )
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/sink/Converter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters.sink
17 |
18 | import org.apache.kafka.connect.sink.SinkRecord
19 |
20 | /**
21 | * Provides the interface for converting a Connect sink payload (JMS, MQTT, etc) to a SinkRecord
22 | */
23 | trait Converter {
24 | def initialize(map: Map[String, String]): Unit = {}
25 |
26 | def convert(sinkTopic: String, data: SinkRecord): SinkRecord
27 | }
28 |
29 |
30 | object Converter {
31 | val TopicKey = "topic"
32 | }
33 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/sink/SinkRecordToJson.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import com.datamountaineer.streamreactor.connect.schemas.ConverterUtil
20 | import com.fasterxml.jackson.databind.ObjectMapper
21 | import com.landoop.json.sql.JacksonJson
22 | import org.apache.kafka.connect.data.Schema
23 | import org.apache.kafka.connect.sink.SinkRecord
24 | import org.json4s.jackson.JsonMethods._
25 |
26 | import scala.util.Try
27 |
28 | /**
29 | * Created by andrew@datamountaineer.com on 29/12/2016.
30 | * kafka-connect-common
31 | */
32 | object SinkRecordToJson extends ConverterUtil {
33 |
34 | private val mapper = new ObjectMapper()
35 |
36 | def apply(record: SinkRecord,
37 | fields: Map[String, Map[String, String]],
38 | ignoreFields: Map[String, Set[String]]): String = {
39 |
40 | val schema = record.valueSchema()
41 | val value = record.value()
42 |
43 | if (schema == null) {
44 | if(value == null){
45 | throw new IllegalArgumentException(s"The sink record value is null.(topic=${record.topic()} partition=${record.kafkaPartition()} offset=${record.kafkaOffset()})".stripMargin)
46 | }
47 | //try to take it as string
48 | value match {
49 | case map: java.util.Map[_, _] =>
50 | val extracted = convertSchemalessJson(record,
51 | fields.getOrElse(record.topic(), Map.empty),
52 | ignoreFields.getOrElse(record.topic(), Set.empty))
53 | .asInstanceOf[java.util.Map[String, Any]]
54 | //not ideal; but the implementation is hashmap anyway
55 | mapper.writeValueAsString(extracted)
56 |
57 | case other => sys.error(
58 | s"""
59 | |For schemaless record only String and Map types are supported. Class =${Option(other).map(_.getClass.getCanonicalName).getOrElse("unknown(null value)}")}
60 | |Record info:
61 | |topic=${record.topic()} partition=${record.kafkaPartition()} offset=${record.kafkaOffset()}
62 | |${Try(JacksonJson.toJson(value)).getOrElse("")}""".stripMargin)
63 | }
64 | } else {
65 | schema.`type`() match {
66 | case Schema.Type.STRING =>
67 | val extracted = convertStringSchemaAndJson(record,
68 | fields.getOrElse(record.topic(), Map.empty),
69 | ignoreFields.getOrElse(record.topic(), Set.empty))
70 | compact(render(extracted))
71 | case Schema.Type.STRUCT =>
72 | val extracted = convert(record,
73 | fields.getOrElse(record.topic(), Map.empty),
74 | ignoreFields.getOrElse(record.topic(), Set.empty))
75 |
76 | simpleJsonConverter.fromConnectData(extracted.valueSchema(), extracted.value()).toString
77 |
78 | case other => sys.error(s"$other schema is not supported")
79 | }
80 | }
81 | }
82 | }
83 |
84 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/AvroConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters.source
17 |
18 | import java.io.File
19 | import java.util.Collections
20 |
21 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
22 | import io.confluent.connect.avro.AvroData
23 | import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
24 | import org.apache.avro.io.DecoderFactory
25 | import org.apache.avro.{Schema => AvroSchema}
26 | import org.apache.kafka.connect.data.{Schema, Struct}
27 | import org.apache.kafka.connect.source.SourceRecord
28 | import org.apache.zookeeper.server.quorum.QuorumPeerConfig.ConfigException
29 |
30 |
31 | class AvroConverter extends Converter {
32 | private val avroData = new AvroData(8)
33 | private var sourceToSchemaMap: Map[String, AvroSchema] = Map.empty
34 | private var avroReadersMap: Map[String, GenericDatumReader[GenericRecord]] = Map.empty
35 |
36 | override def convert(kafkaTopic: String,
37 | sourceTopic: String,
38 | messageId: String,
39 | bytes: Array[Byte],
40 | keys: Seq[String] = Seq.empty,
41 | keyDelimiter: String = ".",
42 | properties: Map[String, String] = Map.empty): SourceRecord = {
43 | Option(bytes) match {
44 | case None =>
45 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
46 | null,
47 | kafkaTopic,
48 | avroData.toConnectSchema(sourceToSchemaMap(sourceTopic)),
49 | null)
50 | case Some(_) =>
51 | val reader = avroReadersMap.getOrElse(sourceTopic.toLowerCase, throw new ConfigException(s"Invalid ${AvroConverter.SCHEMA_CONFIG} is not configured for $sourceTopic"))
52 | val decoder = DecoderFactory.get().binaryDecoder(bytes, null)
53 | val record = reader.read(null, decoder)
54 | val schemaAndValue = avroData.toConnectData(sourceToSchemaMap(sourceTopic.toLowerCase), record)
55 | val value = schemaAndValue.value()
56 | value match {
57 | case s: Struct if keys.nonEmpty =>
58 | val keysValue = keys.flatMap { key =>
59 | Option(KeyExtractor.extract(s, key.split('.').toVector)).map(_.toString)
60 | }.mkString(keyDelimiter)
61 | new SourceRecord(
62 | Collections.singletonMap(Converter.TopicKey, sourceTopic),
63 | null,
64 | kafkaTopic,
65 | Schema.STRING_SCHEMA,
66 | keysValue,
67 | schemaAndValue.schema(),
68 | schemaAndValue.value())
69 | case _ =>
70 | new SourceRecord(
71 | Collections.singletonMap(Converter.TopicKey, sourceTopic),
72 | null,
73 | kafkaTopic,
74 | MsgKey.schema,
75 | MsgKey.getStruct(sourceTopic, messageId),
76 | schemaAndValue.schema(),
77 | schemaAndValue.value())
78 | }
79 |
80 | }
81 | }
82 |
83 | override def initialize(config: Map[String, String]): Unit = {
84 | sourceToSchemaMap = AvroConverter.getSchemas(config)
85 | avroReadersMap = sourceToSchemaMap.map { case (key, schema) =>
86 | key -> new GenericDatumReader[GenericRecord](schema)
87 | }
88 | }
89 | }
90 |
91 | object AvroConverter {
92 | val SCHEMA_CONFIG = "connect.source.converter.avro.schemas"
93 |
94 | def getSchemas(config: Map[String, String]): Map[String, AvroSchema] = {
95 | config.getOrElse(SCHEMA_CONFIG, throw new ConfigException(s"$SCHEMA_CONFIG is not provided"))
96 | .toString
97 | .split(';')
98 | .filter(_.trim.nonEmpty)
99 | .map(_.split("="))
100 | .map {
101 | case Array(source, path) =>
102 | val file = new File(path)
103 | if (!file.exists()) {
104 | throw new ConfigException(s"Invalid $SCHEMA_CONFIG. The file $path doesn't exist!")
105 | }
106 | val s = source.trim.toLowerCase()
107 | if (s.isEmpty) {
108 | throw new ConfigException(s"Invalid $SCHEMA_CONFIG. The topic is not valid for entry containing $path")
109 | }
110 | s -> new AvroSchema.Parser().parse(file)
111 | case other => throw new ConfigException(s"$SCHEMA_CONFIG is not properly set. The format is Mqtt_Source->AVRO_FILE")
112 | }.toMap
113 | }
114 | }
115 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/BytesConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters.source
17 |
18 | import java.util.Collections
19 |
20 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
21 | import org.apache.kafka.connect.data.Schema
22 | import org.apache.kafka.connect.source.SourceRecord
23 |
24 | class BytesConverter extends Converter {
25 | override def convert(kafkaTopic: String,
26 | sourceTopic: String,
27 | messageId: String,
28 | bytes: Array[Byte],
29 | keys: Seq[String] = Seq.empty,
30 | keyDelimiter: String = ".",
31 | properties: Map[String, String] = Map.empty): SourceRecord = {
32 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
33 | null,
34 | kafkaTopic,
35 | MsgKey.schema,
36 | MsgKey.getStruct(sourceTopic, messageId),
37 | Schema.BYTES_SCHEMA,
38 | bytes)
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/Converter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.datamountaineer.streamreactor.connect.converters.source
17 |
18 | import org.apache.kafka.connect.source.SourceRecord
19 |
20 | /**
21 | * Provides the interface for converting a Connect source payload (JMS, MQTT, etc) to a SourceRecord
22 | */
23 | trait Converter {
24 | def initialize(map: Map[String, String]): Unit = {}
25 |
26 | def convert(kafkaTopic: String,
27 | sourceTopic: String,
28 | messageId: String,
29 | bytes: Array[Byte],
30 | keys: Seq[String] = Seq.empty,
31 | keyDelimiter: String = ".",
32 | properties: Map[String, String] = Map.empty): SourceRecord
33 | }
34 |
35 | object Converter {
36 | val TopicKey = "topic"
37 | }
38 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonConverterWithSchemaEvolution.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.nio.charset.Charset
20 | import java.util
21 | import java.util.Collections
22 |
23 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
24 | import io.confluent.connect.avro.AvroData
25 | import org.apache.kafka.connect.data._
26 | import org.apache.kafka.connect.source.SourceRecord
27 |
28 | /**
29 | * Experimental
30 | */
31 | class JsonConverterWithSchemaEvolution extends Converter {
32 | private val avroData = new AvroData(4)
33 | implicit private var latestSchema: Option[Schema] = None
34 |
35 |
36 | override def convert(kafkaTopic: String,
37 | mqttSource: String,
38 | messageId: String,
39 | bytes: Array[Byte],
40 | keys: Seq[String] = Seq.empty,
41 | keyDelimiter: String = ".",
42 | properties: Map[String, String] = Map.empty): SourceRecord = {
43 | require(bytes != null, s"Invalid $bytes parameter")
44 | val json = new String(bytes, Charset.defaultCharset)
45 | val schemaAndValue = JsonConverterWithSchemaEvolution.convert(mqttSource, json)
46 | latestSchema = Some(schemaAndValue.schema())
47 |
48 | val value = schemaAndValue.value()
49 | value match {
50 | case s: Struct if keys.nonEmpty =>
51 | val keysValue = keys.flatMap { key =>
52 | Option(KeyExtractor.extract(s, key.split('.').toVector)).map(_.toString)
53 | }.mkString(keyDelimiter)
54 |
55 | new SourceRecord(null,
56 | Collections.singletonMap(JsonConverterWithSchemaEvolution.ConfigKey, latestSchema.map(avroData.fromConnectSchema(_).toString).orNull),
57 | kafkaTopic,
58 | Schema.STRING_SCHEMA,
59 | keysValue,
60 | schemaAndValue.schema(),
61 | schemaAndValue.value())
62 |
63 | case _ =>
64 | new SourceRecord(null,
65 | Collections.singletonMap(JsonConverterWithSchemaEvolution.ConfigKey, latestSchema.map(avroData.fromConnectSchema(_).toString).orNull),
66 | kafkaTopic,
67 | MsgKey.schema,
68 | MsgKey.getStruct(mqttSource, messageId),
69 | schemaAndValue.schema(),
70 | schemaAndValue.value())
71 | }
72 |
73 | }
74 | }
75 |
76 | object JsonConverterWithSchemaEvolution {
77 |
78 | val ConfigKey = "JsonConverterWithSchemaEvolution.Schema"
79 |
80 | import org.json4s._
81 | import org.json4s.native.JsonMethods._
82 |
83 | def convert(name: String, str: String)(implicit schema: Option[Schema]): SchemaAndValue = convert(name, parse(str))
84 |
85 | def convert(name: String, value: JValue)(implicit aggregatedSchema: Option[Schema]): SchemaAndValue = {
86 | value match {
87 | case JArray(arr) =>
88 | val values = new util.ArrayList[AnyRef]()
89 | val prevSchema = aggregatedSchema.map(_.field(name)).map(_.schema)
90 | val sv = convert(name, arr.head)(prevSchema)
91 | values.add(sv.value())
92 | arr.tail.foreach { v => values.add(convert(name, v)(prevSchema).value()) }
93 |
94 | val schema = SchemaBuilder.array(sv.schema()).optional().build()
95 | new SchemaAndValue(schema, values)
96 | case JBool(b) => new SchemaAndValue(Schema.OPTIONAL_BOOLEAN_SCHEMA, b)
97 | case JDecimal(d) =>
98 | val schema = Decimal.builder(d.scale).optional().build()
99 | new SchemaAndValue(schema, Decimal.fromLogical(schema, d.bigDecimal))
100 | case JDouble(d) => new SchemaAndValue(Schema.OPTIONAL_FLOAT64_SCHEMA, d)
101 | case JInt(i) => new SchemaAndValue(Schema.OPTIONAL_INT64_SCHEMA, i.toLong) //on purpose! LONG (we might get later records with long entries)
102 | case JLong(l) => new SchemaAndValue(Schema.OPTIONAL_INT64_SCHEMA, l)
103 | case JNull | JNothing => new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, null)
104 | case JString(s) => new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, s)
105 | case JObject(values) =>
106 | val builder = SchemaBuilder.struct().name(name)
107 |
108 | val fields = values.map { case (n, v) =>
109 | val prevSchema = aggregatedSchema.map(_.field(n)).map(_.schema())
110 | val schemaAndValue = convert(n, v)(prevSchema)
111 | builder.field(n, schemaAndValue.schema())
112 | n -> schemaAndValue.value()
113 | }.toMap
114 | val schema = builder.build()
115 |
116 | import scala.collection.JavaConverters._
117 | aggregatedSchema
118 | .foreach { schema =>
119 | schema.fields().asScala
120 | .withFilter(f => !fields.contains(f.name()))
121 | .foreach { f =>
122 | builder.field(f.name(), f.schema())
123 | }
124 | }
125 |
126 | val struct = new Struct(schema)
127 | fields.foreach { case (field, v) => struct.put(field, v) }
128 |
129 | new SchemaAndValue(schema, struct)
130 | }
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonOptNullConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.nio.charset.Charset
20 | import java.util
21 | import java.util.Collections
22 |
23 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
24 | import org.apache.kafka.connect.data._
25 | import org.apache.kafka.connect.source.SourceRecord
26 |
27 |
28 | class JsonOptNullConverter extends Converter {
29 | override def convert(kafkaTopic: String,
30 | sourceTopic: String,
31 | messageId: String,
32 | bytes: Array[Byte],
33 | keys:Seq[String] = Seq.empty,
34 | keyDelimiter:String = ".",
35 | properties: Map[String, String] = Map.empty): SourceRecord = {
36 | require(bytes != null, s"Invalid $bytes parameter")
37 | val json = new String(bytes, Charset.defaultCharset)
38 | val schemaAndValue = JsonOptNullConverter.convert(sourceTopic, json)
39 | val value = schemaAndValue.value()
40 | value match {
41 | case s:Struct if keys.nonEmpty =>
42 | val keysValue = keys.flatMap { key =>
43 | Option(KeyExtractor.extract(s, key.split('.').toVector)).map(_.toString)
44 | }.mkString(keyDelimiter)
45 |
46 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
47 | null,
48 | kafkaTopic,
49 | Schema.STRING_SCHEMA,
50 | keysValue,
51 | schemaAndValue.schema(),
52 | schemaAndValue.value())
53 | case _=>
54 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
55 | null,
56 | kafkaTopic,
57 | MsgKey.schema,
58 | MsgKey.getStruct(sourceTopic, messageId),
59 | schemaAndValue.schema(),
60 | schemaAndValue.value())
61 | }
62 |
63 | }
64 | }
65 |
66 | object JsonOptNullConverter {
67 |
68 | import org.json4s._
69 | import org.json4s.native.JsonMethods._
70 |
71 | def convert(name: String, str: String): SchemaAndValue = convert(name, parse(str))
72 |
73 | def convert(name: String, value: JValue): SchemaAndValue = {
74 | value match {
75 | case JArray(arr) =>
76 | val values = new util.ArrayList[AnyRef]()
77 | val sv = convert(name, arr.head)
78 | values.add(sv.value())
79 | arr.tail.foreach { v => values.add(convert(name, v).value()) }
80 |
81 | val schema = SchemaBuilder.array(sv.schema()).optional().build()
82 | new SchemaAndValue(schema, values)
83 | case JBool(b) => new SchemaAndValue(Schema.BOOLEAN_SCHEMA, b)
84 | case JDecimal(d) =>
85 | val schema = Decimal.builder(d.scale).optional().build()
86 | new SchemaAndValue(schema, Decimal.fromLogical(schema, d.bigDecimal))
87 | case JDouble(d) => new SchemaAndValue(Schema.FLOAT64_SCHEMA, d)
88 | case JInt(i) => new SchemaAndValue(Schema.INT64_SCHEMA, i.toLong) //on purpose! LONG (we might get later records with long entries)
89 | case JLong(l) => new SchemaAndValue(Schema.INT64_SCHEMA, l)
90 | case JNull | JNothing => new SchemaAndValue(Schema.OPTIONAL_STRING_SCHEMA, null)
91 | case JString(s) => new SchemaAndValue(Schema.STRING_SCHEMA, s)
92 | case JObject(values) =>
93 | val builder = SchemaBuilder.struct().name(name.replace("/", "_"))
94 |
95 | val fields = values.map { case (n, v) =>
96 | val schemaAndValue = convert(n, v)
97 | builder.field(n, schemaAndValue.schema())
98 | n -> schemaAndValue.value()
99 | }.toMap
100 | val schema = builder.build()
101 |
102 | val struct = new Struct(schema)
103 | fields.foreach { case (field, v) => struct.put(field, v) }
104 |
105 | new SchemaAndValue(schema, struct)
106 | }
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonPassThroughConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.util.Collections
20 |
21 | import com.landoop.json.sql.JacksonJson
22 | import org.apache.kafka.connect.source.SourceRecord
23 |
24 |
25 | class JsonPassThroughConverter extends Converter {
26 | override def convert(kafkaTopic: String,
27 | sourceTopic: String,
28 | messageId: String,
29 | bytes: Array[Byte],
30 | keys: Seq[String] = Seq.empty,
31 | keyDelimiter: String = ".",
32 | properties: Map[String, String] = Map.empty): SourceRecord = {
33 | require(bytes != null, s"Invalid $bytes parameter")
34 |
35 | val json = new String(bytes, "utf-8")
36 | val jsonNode = JacksonJson.asJson(json)
37 | var keysValue = keys.flatMap { key =>
38 | Option(KeyExtractor.extract(jsonNode, key.split('.').toVector)).map(_.toString)
39 | }.mkString(keyDelimiter)
40 |
41 | // If keys are not provided, default one will be constructed
42 | if (keysValue == "") {
43 | keysValue = s"$sourceTopic$keyDelimiter$messageId"
44 | }
45 |
46 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
47 | null,
48 | kafkaTopic,
49 | null,
50 | keysValue,
51 | null,
52 | json)
53 | }
54 | }
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonResilientConverter.scala:
--------------------------------------------------------------------------------
1 | package com.datamountaineer.streamreactor.connect.converters.source
2 |
3 | import java.util
4 |
5 | import org.apache.kafka.connect.data.Schema
6 | import org.apache.kafka.connect.data.SchemaAndValue
7 | import org.apache.kafka.connect.json.JsonConverter
8 |
9 | /**
10 | * A Json converter built with resilience, meaning that malformed Json messages are now ignored
11 | */
12 | class JsonResilientConverter extends JsonConverter {
13 |
14 | override def configure(configs: util.Map[String, _], isKey: Boolean) {
15 | super.configure(configs, isKey)
16 | }
17 |
18 | override def fromConnectData(topic: String, schema: Schema, value: Object): Array[Byte] = {
19 | try {
20 | super.fromConnectData(topic, schema, value)
21 | } catch {
22 | case t: Throwable =>
23 | t.printStackTrace()
24 | // Ignore exceptions
25 | null
26 | }
27 | }
28 |
29 | override def toConnectData(topic: String, value: Array[Byte]): SchemaAndValue = {
30 | try {
31 | super.toConnectData(topic, value)
32 | } catch {
33 | case t: Throwable =>
34 | t.printStackTrace()
35 | // Ignore exceptions
36 | SchemaAndValue.NULL
37 | }
38 | }
39 |
40 | }
41 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonSimpleConverter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.nio.charset.Charset
20 | import java.util
21 | import java.util.Collections
22 |
23 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
24 | import org.apache.kafka.connect.data._
25 | import org.apache.kafka.connect.source.SourceRecord
26 |
27 |
28 | class JsonSimpleConverter extends Converter {
29 | override def convert(kafkaTopic: String,
30 | sourceTopic: String,
31 | messageId: String,
32 | bytes: Array[Byte],
33 | keys:Seq[String] = Seq.empty,
34 | keyDelimiter:String = ".",
35 | properties: Map[String, String] = Map.empty): SourceRecord = {
36 | require(bytes != null, s"Invalid $bytes parameter")
37 | val json = new String(bytes, Charset.defaultCharset)
38 | val schemaAndValue = JsonSimpleConverter.convert(sourceTopic, json)
39 | val value = schemaAndValue.value()
40 | value match {
41 | case s:Struct if keys.nonEmpty =>
42 | val keysValue = keys.flatMap { key =>
43 | Option(KeyExtractor.extract(s, key.split('.').toVector)).map(_.toString)
44 | }.mkString(keyDelimiter)
45 |
46 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
47 | null,
48 | kafkaTopic,
49 | Schema.STRING_SCHEMA,
50 | keysValue,
51 | schemaAndValue.schema(),
52 | schemaAndValue.value())
53 | case _=>
54 | new SourceRecord(Collections.singletonMap(Converter.TopicKey, sourceTopic),
55 | null,
56 | kafkaTopic,
57 | MsgKey.schema,
58 | MsgKey.getStruct(sourceTopic, messageId),
59 | schemaAndValue.schema(),
60 | schemaAndValue.value())
61 | }
62 |
63 | }
64 | }
65 |
66 | object JsonSimpleConverter {
67 |
68 | import org.json4s._
69 | import org.json4s.native.JsonMethods._
70 |
71 | def convert(name: String, str: String): SchemaAndValue = convert(name, parse(str))
72 |
73 | def convert(name: String, value: JValue): SchemaAndValue = {
74 | value match {
75 | case JArray(arr) =>
76 | val values = new util.ArrayList[AnyRef]()
77 | val sv = convert(name, arr.head)
78 | values.add(sv.value())
79 | arr.tail.foreach { v => values.add(convert(name, v).value()) }
80 |
81 | val schema = SchemaBuilder.array(sv.schema()).optional().build()
82 | new SchemaAndValue(schema, values)
83 | case JBool(b) => new SchemaAndValue(Schema.BOOLEAN_SCHEMA, b)
84 | case JDecimal(d) =>
85 | val schema = Decimal.builder(d.scale).optional().build()
86 | new SchemaAndValue(schema, Decimal.fromLogical(schema, d.bigDecimal))
87 | case JDouble(d) => new SchemaAndValue(Schema.FLOAT64_SCHEMA, d)
88 | case JInt(i) => new SchemaAndValue(Schema.INT64_SCHEMA, i.toLong) //on purpose! LONG (we might get later records with long entries)
89 | case JLong(l) => new SchemaAndValue(Schema.INT64_SCHEMA, l)
90 | case JNull | JNothing => new SchemaAndValue(Schema.STRING_SCHEMA, null)
91 | case JString(s) => new SchemaAndValue(Schema.STRING_SCHEMA, s)
92 | case JObject(values) =>
93 | val builder = SchemaBuilder.struct().name(name.replace("/", "_"))
94 |
95 | val fields = values.map { case (n, v) =>
96 | val schemaAndValue = convert(n, v)
97 | builder.field(n, schemaAndValue.schema())
98 | n -> schemaAndValue.value()
99 | }.toMap
100 | val schema = builder.build()
101 |
102 | val struct = new Struct(schema)
103 | fields.foreach { case (field, v) => struct.put(field, v) }
104 |
105 | new SchemaAndValue(schema, struct)
106 | }
107 | }
108 | }
109 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/converters/source/KeyExtractor.scala:
--------------------------------------------------------------------------------
1 | package com.datamountaineer.streamreactor.connect.converters.source
2 |
3 | import com.fasterxml.jackson.databind.JsonNode
4 | import com.fasterxml.jackson.databind.node._
5 | import org.apache.kafka.connect.data._
6 | import scala.collection.JavaConverters._
7 |
8 | import scala.annotation.tailrec
9 |
10 | object KeyExtractor {
11 | def extract(node: JsonNode, path: Vector[String]): Any = {
12 | @tailrec
13 | def innerExtract(n: JsonNode, p: Vector[String]): Any = {
14 | def checkValidPath() = {
15 | if (p.nonEmpty) {
16 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field")
17 | }
18 | }
19 |
20 | n match {
21 | case null => null
22 | case bn: BinaryNode =>
23 | checkValidPath()
24 | n.binaryValue()
25 |
26 | case _: BooleanNode =>
27 | checkValidPath()
28 | n.booleanValue()
29 |
30 | case _: BigIntegerNode =>
31 | checkValidPath()
32 | n.bigIntegerValue()
33 | case _: DecimalNode =>
34 | checkValidPath()
35 | n.decimalValue()
36 | case _: DoubleNode =>
37 | checkValidPath()
38 | n.doubleValue()
39 | case _: FloatNode =>
40 | checkValidPath()
41 | n.floatValue()
42 | case _: IntNode =>
43 | checkValidPath()
44 | n.intValue()
45 | case _: LongNode =>
46 | checkValidPath()
47 | n.longValue()
48 | case _: ShortNode =>
49 | checkValidPath()
50 | n.shortValue()
51 | case _: TextNode =>
52 | checkValidPath()
53 | n.textValue()
54 | case _: NullNode =>
55 | checkValidPath()
56 | null
57 | case _: MissingNode =>
58 | checkValidPath()
59 | null
60 |
61 | case node: ObjectNode =>
62 | if (p.isEmpty) {
63 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. The path is not resolving to a primitive field")
64 | }
65 | val childNode = Option(node.get(p.head)).getOrElse {
66 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. Can't find ${p.head} field. Field found are:${node.fieldNames().asScala.mkString(",")}")
67 | }
68 |
69 | innerExtract(childNode, p.tail)
70 | case array: ArrayNode =>
71 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. The path is involving an array structure")
72 |
73 | case other =>
74 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. $other is not handled")
75 | }
76 | }
77 |
78 | if (node == null) {
79 | throw new NullPointerException("Invalid parameter 'node'")
80 | }
81 | innerExtract(node, path)
82 | }
83 |
84 |
85 | def extract(struct: Struct, path: Vector[String]): Any = {
86 | // @tailrec
87 | def innerExtract(field: Field, value: AnyRef, p: Vector[String]): Any = {
88 | def checkValidPath() = {
89 | if (p.nonEmpty) {
90 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field")
91 | }
92 | }
93 |
94 |
95 | if (value == null) {
96 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. Field '${field.name()}' is null")
97 | }
98 | Option(field.schema().name()).collect {
99 | case Decimal.LOGICAL_NAME =>
100 | value match {
101 | case bd: BigDecimal =>
102 | checkValidPath()
103 | bd
104 | case array: Array[Byte] =>
105 | checkValidPath()
106 | Decimal.toLogical(field.schema, value.asInstanceOf[Array[Byte]])
107 | }
108 | case Date.LOGICAL_NAME =>
109 | value.asInstanceOf[Any] match {
110 | case d: java.util.Date =>
111 | checkValidPath()
112 | d
113 | case i: Int =>
114 | checkValidPath()
115 | Date.toLogical(field.schema, i)
116 | case _ => throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
117 | }
118 | case Time.LOGICAL_NAME =>
119 | value.asInstanceOf[Any] match {
120 | case i: Int =>
121 | checkValidPath()
122 | Time.toLogical(field.schema, value.asInstanceOf[Int])
123 | case d: java.util.Date =>
124 | checkValidPath()
125 | d
126 | case _ => throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
127 | }
128 | case Timestamp.LOGICAL_NAME =>
129 | value.asInstanceOf[Any] match {
130 | case l: Long =>
131 | checkValidPath()
132 | Timestamp.toLogical(field.schema, l)
133 | case d: java.util.Date =>
134 | checkValidPath()
135 | d
136 | case _ => throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
137 | }
138 | }.getOrElse {
139 | val v = field.schema().`type`() match {
140 | case Schema.Type.BOOLEAN =>
141 | checkValidPath()
142 | value.asInstanceOf[Boolean]
143 | case Schema.Type.BYTES =>
144 | checkValidPath()
145 | value.asInstanceOf[Array[Byte]]
146 | case Schema.Type.FLOAT32 =>
147 | checkValidPath()
148 | value.asInstanceOf[Float]
149 | case Schema.Type.FLOAT64 =>
150 | checkValidPath()
151 | value.asInstanceOf[Double]
152 | case Schema.Type.INT8 =>
153 | checkValidPath()
154 | value.asInstanceOf[Byte]
155 | case Schema.Type.INT16 =>
156 | checkValidPath()
157 | value.asInstanceOf[Short]
158 | case Schema.Type.INT32 =>
159 | checkValidPath()
160 | value.asInstanceOf[Int]
161 | case Schema.Type.INT64 =>
162 | checkValidPath()
163 | value.asInstanceOf[Long]
164 | case Schema.Type.STRING =>
165 | checkValidPath()
166 | value.toString
167 |
168 | case Schema.Type.MAP =>
169 | if (p.isEmpty) {
170 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field. It resolves to:${field.schema()}")
171 | }
172 | val map = value.asInstanceOf[java.util.Map[String, AnyRef]]
173 | val f = new Field(p.head, 0, field.schema().valueSchema())
174 |
175 | innerExtract(f, map.get(p.head), p.tail)
176 |
177 | case Schema.Type.STRUCT =>
178 | if (p.isEmpty) {
179 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. It doesn't resolve to a primitive field. It resolves to:${field.schema()}")
180 | }
181 | val s = value.asInstanceOf[Struct]
182 | val childField = Option(s.schema().field(p.head))
183 | .getOrElse {
184 | throw new IllegalArgumentException(s"Invalid field selection for '${path.mkString(".")}'. Can't find field '${p.head}'. Fields available:${s.schema().fields().asScala.map(_.name()).mkString(",")}")
185 | }
186 |
187 | innerExtract(childField, s.get(childField), p.tail)
188 | case other => sys.error(s"$other is not a recognized schema")
189 | }
190 | v
191 | }
192 | }
193 |
194 | val field = Option(struct.schema().field(path.head)).getOrElse {
195 | throw new IllegalArgumentException(s"Couldn't find field '${path.head}' in the schema:${struct.schema().fields().asScala.map(_.name()).mkString(",")}")
196 | }
197 |
198 | innerExtract(field, struct.get(field), path.tail)
199 | }
200 | }
201 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/errors/ErrorHandler.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.errors
18 |
19 | import java.text.SimpleDateFormat
20 | import java.util.Date
21 |
22 | import com.typesafe.scalalogging.StrictLogging
23 |
24 | import scala.util.{Failure, Success, Try}
25 |
26 | /**
27 | * Created by andrew@datamountaineer.com on 29/05/16.
28 | * stream-reactor-maven
29 | */
30 | trait ErrorHandler extends StrictLogging {
31 | var errorTracker: Option[ErrorTracker] = None
32 | private val dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS'Z'")
33 |
34 | def initialize(maxRetries: Int, errorPolicy: ErrorPolicy): Unit = {
35 | errorTracker = Some(ErrorTracker(maxRetries, maxRetries, "", new Date(), errorPolicy))
36 | }
37 |
38 | def getErrorTrackerRetries() : Int = {
39 | errorTracker.get.retries
40 | }
41 |
42 | def errored() : Boolean = {
43 | errorTracker.get.retries != errorTracker.get.maxRetries
44 | }
45 |
46 | def handleTry[A](t : Try[A]) : Option[A] = {
47 | require(errorTracker.isDefined, "ErrorTracker is not set call. Initialize.")
48 | t
49 | match {
50 | case Success(s) => {
51 | //success, check if we had previous errors.
52 | if (errorTracker.get.retries != errorTracker.get.maxRetries) {
53 | logger.info(s"Recovered from error ${errorTracker.get.lastErrorMessage} at " +
54 | s"${dateFormatter.format(errorTracker.get.lastErrorTimestamp)}")
55 | }
56 | //cleared error
57 | resetErrorTracker()
58 | Some(s)
59 | }
60 | case Failure(f) =>
61 | //decrement the retry count
62 | logger.error(s"Encountered error ${f.getMessage}", f)
63 | this.errorTracker = Some(decrementErrorTracker(errorTracker.get, f.getMessage))
64 | handleError(f, errorTracker.get.retries, errorTracker.get.policy)
65 | None
66 | }
67 | }
68 |
69 | def resetErrorTracker() = {
70 | errorTracker = Some(ErrorTracker(errorTracker.get.maxRetries, errorTracker.get.maxRetries, "", new Date(),
71 | errorTracker.get.policy))
72 | }
73 |
74 | private def decrementErrorTracker(errorTracker: ErrorTracker, msg: String): ErrorTracker = {
75 | if (errorTracker.maxRetries == -1) {
76 | ErrorTracker(errorTracker.retries, errorTracker.maxRetries, msg, new Date(), errorTracker.policy)
77 | } else {
78 | ErrorTracker(errorTracker.retries - 1, errorTracker.maxRetries, msg, new Date(), errorTracker.policy)
79 | }
80 | }
81 |
82 | private def handleError(f: Throwable, retries: Int, policy: ErrorPolicy): Unit = {
83 | policy.handle(f, true, retries)
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/errors/ErrorPolicy.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.errors
18 |
19 | import java.util.Date
20 |
21 | import com.datamountaineer.streamreactor.connect.errors.ErrorPolicyEnum.ErrorPolicyEnum
22 | import com.typesafe.scalalogging.StrictLogging
23 | import org.apache.kafka.connect.errors.RetriableException
24 |
25 | /**
26 | * Created by andrew@datamountaineer.com on 19/05/16.
27 | * kafka-connect-common
28 | */
29 | object ErrorPolicyEnum extends Enumeration {
30 | type ErrorPolicyEnum = Value
31 | val NOOP, THROW, RETRY = Value
32 | }
33 |
34 | case class ErrorTracker(retries: Int, maxRetries: Int, lastErrorMessage: String, lastErrorTimestamp: Date, policy: ErrorPolicy)
35 |
36 | trait ErrorPolicy extends StrictLogging {
37 | def handle(error: Throwable, sink: Boolean = true, retryCount: Int = 0)
38 | }
39 |
40 | object ErrorPolicy extends StrictLogging {
41 | def apply(policy: ErrorPolicyEnum): ErrorPolicy = {
42 | policy match {
43 | case ErrorPolicyEnum.NOOP => NoopErrorPolicy()
44 | case ErrorPolicyEnum.THROW => ThrowErrorPolicy()
45 | case ErrorPolicyEnum.RETRY => RetryErrorPolicy()
46 | }
47 | }
48 | }
49 |
50 | case class NoopErrorPolicy() extends ErrorPolicy {
51 | override def handle(error: Throwable, sink: Boolean = true, retryCount: Int = 0){
52 | logger.warn(s"Error policy NOOP: ${error.getMessage}. Processing continuing.")
53 | }
54 | }
55 |
56 | case class ThrowErrorPolicy() extends ErrorPolicy {
57 | override def handle(error: Throwable, sink: Boolean = true, retryCount: Int = 0){
58 | throw new RuntimeException(error)
59 | }
60 | }
61 |
62 | case class RetryErrorPolicy() extends ErrorPolicy {
63 |
64 | override def handle(error: Throwable, sink: Boolean = true, retryCount: Int) = {
65 | if (retryCount == 0) {
66 | throw new RuntimeException(error)
67 | }
68 | else {
69 | logger.warn(s"Error policy set to RETRY. Remaining attempts $retryCount")
70 | throw new RetriableException(error)
71 | }
72 | }
73 | }
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/offsets/OffsetHandler.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.offsets
18 |
19 | import java.util
20 | import java.util.Collections
21 | import org.apache.kafka.connect.source.SourceTaskContext
22 |
23 | import scala.collection.JavaConverters._
24 |
25 | /**
26 | * Created by andrew@datamountaineer.com on 25/04/16.
27 | * stream-reactor
28 | */
29 | object OffsetHandler {
30 |
31 | /**
32 | * Recover the offsets
33 | *
34 | * @param lookupPartitionKey A partition key for the offset map
35 | * @param sourcePartition A list of datasets .i.e tables to get the partition offsets for
36 | * @param context The Source task context to get the offsets from
37 | * @return a List of partition offsets for the datasets
38 | * */
39 | def recoverOffsets(lookupPartitionKey: String, sourcePartition: util.List[String], context: SourceTaskContext) = {
40 | val partitions = sourcePartition.asScala.map(t => Collections.singletonMap(lookupPartitionKey, t)).asJava
41 | context.offsetStorageReader().offsets(partitions)
42 | }
43 |
44 | /**
45 | * Returns a last stored offset for the partitionKeyValue
46 | *
47 | * @param offsets The offsets to search through.
48 | * @param lookupPartitionKey The key for this partition .i.e. cassandra.assigned.tables.
49 | * @param partitionKeyValue The value for the partition .i.e. table1.
50 | * @param lookupOffsetCol The offset columns to look for. For example the timestamp column from table1.
51 | * @return The optional T of last stored value for the framework.
52 | * */
53 | def recoverOffset[T](offsets: util.Map[util.Map[String, String],util.Map[String, Object]],
54 | lookupPartitionKey: String,
55 | partitionKeyValue: String,
56 | lookupOffsetCol: String
57 | ) : Option[T] = {
58 | val partition = Collections.singletonMap(lookupPartitionKey, partitionKeyValue)
59 | val offset = offsets.get(partition)
60 | if (offset != null && offset.get(lookupOffsetCol) != null) {
61 | Some(offset.get(lookupOffsetCol).asInstanceOf[T])
62 | } else {
63 | None
64 | }
65 | }
66 | }
67 |
68 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/queues/QueueHelpers.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.queues
18 |
19 | import java.util
20 | import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
21 |
22 | import com.google.common.collect.Queues
23 | import com.typesafe.scalalogging.StrictLogging
24 | import org.apache.kafka.connect.source.SourceRecord
25 |
26 | /**
27 | * Created by r on 3/1/16.
28 | */
29 | object QueueHelpers extends StrictLogging {
30 |
31 | implicit class LinkedBlockingQueueExtension[T](val lbq: LinkedBlockingQueue[T]) extends AnyVal {
32 | def drainWithTimeoutTo(collection: util.Collection[_ >: T], maxElements: Int, timeout: Long, unit: TimeUnit): Int = {
33 | Queues.drain[T](lbq, collection, maxElements, timeout, unit)
34 | }
35 | }
36 |
37 | def drainWithTimeoutNoGauva(records: util.ArrayList[SourceRecord], batchSize: Int, lingerTimeout: Long, queue: LinkedBlockingQueue[SourceRecord]) = {
38 | var added = 0
39 | val deadline = System.nanoTime() + TimeUnit.NANOSECONDS.toNanos(lingerTimeout)
40 |
41 | //wait for batch size or linger, which ever is first
42 | while (added < batchSize) {
43 | added += queue.drainTo(records, batchSize - added)
44 | //still not at batch size, poll with timeout
45 | if (added < batchSize) {
46 | val record = queue.poll(deadline - System.nanoTime(), TimeUnit.NANOSECONDS)
47 | record match {
48 | case s: SourceRecord =>
49 | records.add(s)
50 | added += 1
51 | case _ => added = batchSize
52 | }
53 | }
54 | }
55 | }
56 |
57 | /**
58 | * Drain the queue with timeout
59 | *
60 | * @param queue The queue to drain
61 | * @param batchSize Batch size to take
62 | * @param timeOut Timeout to take the batch
63 | * @return ArrayList of T
64 | * */
65 | def drainQueueWithTimeOut[T](queue: LinkedBlockingQueue[T], batchSize: Int, timeOut: Long) = {
66 | val l = new util.ArrayList[T]()
67 | logger.debug(s"Found ${queue.size()}. Draining entries to batchSize ${batchSize}.")
68 | queue.drainWithTimeoutTo(l, batchSize, timeOut, TimeUnit.MILLISECONDS)
69 | l
70 | }
71 |
72 | /**
73 | * Drain the queue
74 | *
75 | * @param queue The queue to drain
76 | * @param batchSize Batch size to take
77 | * @return ArrayList of T
78 | * */
79 | def drainQueue[T](queue: LinkedBlockingQueue[T], batchSize: Int) = {
80 | val l = new util.ArrayList[T]()
81 | logger.debug(s"Found ${queue.size()}. Draining entries to batchSize ${batchSize}.")
82 | queue.drainTo(l, batchSize)
83 | l
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/rowkeys/RowKeyBuilderString.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.rowkeys
18 |
19 | import org.apache.kafka.connect.data.{Schema, Struct}
20 | import org.apache.kafka.connect.sink.SinkRecord
21 |
22 | import scala.collection.JavaConverters._
23 |
24 | /**
25 | * Builds the new record key for the given connect SinkRecord.
26 | */
27 | trait StringKeyBuilder {
28 | def build(record: SinkRecord): String
29 | }
30 |
31 | /**
32 | * Uses the connect record (topic, partition, offset) to set the schema
33 | *
34 | * @param keyDelimiter Row key delimiter
35 | * @return a unique string for the message identified by: ||
36 | */
37 | class StringGenericRowKeyBuilder(keyDelimiter: String = "|") extends StringKeyBuilder {
38 |
39 | override def build(record: SinkRecord): String = {
40 | Seq(record.topic(), record.kafkaPartition(), record.kafkaOffset().toString).mkString(keyDelimiter)
41 | }
42 | }
43 |
44 | /**
45 | * Creates a key based on the connect SinkRecord instance key. Only connect Schema primitive types are handled
46 | */
47 | class StringSinkRecordKeyBuilder extends StringKeyBuilder {
48 | override def build(record: SinkRecord): String = {
49 | val `type` = record.keySchema().`type`()
50 | require(`type`.isPrimitive, "The SinkRecord key schema is not a primitive type")
51 |
52 | `type`.name() match {
53 | case "INT8" | "INT16" | "INT32" | "INT64" | "FLOAT32" | "FLOAT64" | "BOOLEAN" | "STRING" | "BYTES" => record.key().toString
54 | case other => throw new IllegalArgumentException(s"$other is not supported by the ${getClass.getName}")
55 | }
56 | }
57 | }
58 |
59 | /**
60 | * Builds a new key from the payload fields specified
61 | *
62 | * @param keys The key to build
63 | * @param keyDelimiter Row key delimiter
64 | */
65 | case class StringStructFieldsStringKeyBuilder(keys: Seq[String],
66 | keyDelimiter: String = ".") extends StringKeyBuilder {
67 | private val availableSchemaTypes = Set(
68 | Schema.Type.BOOLEAN,
69 | Schema.Type.BYTES,
70 | Schema.Type.FLOAT32,
71 | Schema.Type.FLOAT64,
72 | Schema.Type.INT8,
73 | Schema.Type.INT16,
74 | Schema.Type.INT32,
75 | Schema.Type.INT64,
76 | Schema.Type.STRING
77 | )
78 |
79 | require(keys.nonEmpty, "Invalid keys provided")
80 |
81 | /**
82 | * Builds a row key for a records
83 | *
84 | * @param record a SinkRecord to build the key for
85 | * @return A row key string
86 | * */
87 | override def build(record: SinkRecord): String = {
88 | val struct = record.value().asInstanceOf[Struct]
89 | val schema = struct.schema
90 |
91 | val availableFields = schema.fields().asScala.map(_.name).toSet
92 | val missingKeys = keys.filterNot(availableFields.contains)
93 | require(missingKeys.isEmpty, s"${missingKeys.mkString(",")} keys are not present in the SinkRecord payload:${availableFields.mkString(",")}")
94 |
95 | keys.flatMap { case key =>
96 | val field = schema.field(key)
97 | val value = struct.get(field)
98 |
99 | require(value != null, s"$key field value is null. Non null value is required for the fileds creating the Hbase row key")
100 | if (availableSchemaTypes.contains(field.schema().`type`())) Some(value.toString)
101 | else None
102 | }.mkString(keyDelimiter)
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/rowkeys/RowKeyModeEnums.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.rowkeys
18 |
19 | /**
20 | * Created by andrew@datamountaineer.com on 27/05/16.
21 | * kafka-connect-common
22 | */
23 | object RowKeyModeEnum extends Enumeration {
24 | type RowKeyModeEnum = Value
25 | val FIELDS, GENERIC, SINKRECORD, AVRO = Value
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/schemas/PayloadFields.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.schemas
18 |
19 | import org.apache.kafka.common.config.ConfigException
20 |
21 | /**
22 | * Contains the SinkConnect payload fields to consider and/or their mappings
23 | *
24 | * @param includeAllFields Boolean fall to indicate if all fields are considered
25 | * @param fieldsMappings Field mappings from SinkRecord to HBase
26 | */
27 | case class PayloadFields(includeAllFields: Boolean,
28 | fieldsMappings: Map[String, String])
29 |
30 | object PayloadFields {
31 | /**
32 | * Works out the fields and their mappings to be used when inserting a new row
33 | *
34 | * @param setting - The configuration specifing the fields and their mappings
35 | * @return A dictionary of fields and their mappings alongside a flag specifying if all fields should be used. If no mapping has been specified the field name is considered to be the mapping
36 | */
37 | def apply(setting: Option[String]): PayloadFields = {
38 | setting match {
39 | case None => PayloadFields(includeAllFields = true, Map.empty[String, String])
40 | case Some(c) =>
41 |
42 | val mappings = c.split(",").map { case f =>
43 | f.trim.split("=").toSeq match {
44 | case Seq(field) =>
45 | field -> field
46 | case Seq(field, alias) =>
47 | field -> alias
48 | case _ => throw new ConfigException(s"$c is not valid. Need to set the fields and mappings like: field1,field2,field3=alias3,[field4, field5=alias5]")
49 | }
50 | }.toMap
51 |
52 | PayloadFields(mappings.contains("*"), mappings - "*")
53 | }
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/schemas/SchemaHelper.scala:
--------------------------------------------------------------------------------
1 | package com.datamountaineer.streamreactor.connect.schemas
2 |
3 | import org.apache.kafka.connect.data.Field
4 | import org.apache.kafka.connect.data.Schema
5 |
6 | import scala.collection.JavaConverters._
7 |
8 | object SchemaHelper {
9 | implicit final class SchemaExtensions(val schema: Schema) extends AnyVal {
10 | def extractSchema(path: String): Either[FieldSchemaExtractionError, Schema] = {
11 | val fields = path.split('.')
12 | val start: Either[FieldSchemaExtractionError, State] = Right(State(schema, Vector.empty))
13 | fields.foldLeft(start) {
14 | case (l@Left(_), _) => l
15 | case (Right(state), field) =>
16 | state.schema.`type`() match {
17 | case Schema.Type.STRUCT | Schema.Type.MAP =>
18 | state.schema.extractField(field) match {
19 | case Some(value) => Right(state.copy(schema = value.schema(), path = state.path :+ field))
20 | case None =>
21 | val path = (state.path :+ field).mkString(".")
22 | val msg = s"Field [$path] does not exist. Schema is ${schema.`type`()}. Available Fields are [${schema.fields().asScala.map(_.name()).mkString(",")}]"
23 | Left(FieldSchemaExtractionError(path, msg))
24 | }
25 | case other=>
26 | val path = state.path.mkString(".")
27 | Left(FieldSchemaExtractionError(path, s"Expecting a schema to be a structure but found [${other.getName}]."))
28 | }
29 | }.map(_.schema)
30 | }
31 |
32 | def extractField(field: String): Option[Field] = {
33 | Option(schema.field(field))
34 | }
35 | }
36 |
37 | private final case class State(schema: Schema, path: Vector[String])
38 | }
39 |
40 | case class FieldSchemaExtractionError(path: String, msg: String)
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/schemas/SchemaRegistry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.schemas
18 |
19 | import com.typesafe.scalalogging.StrictLogging
20 | import io.confluent.kafka.schemaregistry.client.rest.RestService
21 |
22 | import scala.util.{Failure, Success, Try}
23 | import scala.collection.JavaConverters._
24 |
25 | /**
26 | * Created by andrew@datamountaineer.com on 13/06/16.
27 | * kafka-connect-common
28 | */
29 | object SchemaRegistry extends StrictLogging {
30 |
31 | /**
32 | * Get a schema for a given subject
33 | *
34 | * @param url The url of the schema registry
35 | * @param subject The subject to het the schema for
36 | * @return The schema for the subject
37 | * */
38 | def getSchema(url : String, subject : String) : String = {
39 | val registry = new RestService(url)
40 |
41 | Try(registry.getLatestVersion(subject).getSchema) match {
42 | case Success(s) => {
43 | logger.info(s"Found schema for $subject")
44 | s
45 | }
46 | case Failure(f) => {
47 | logger.warn("Unable to connect to the Schema registry. An attempt will be made to create the table" +
48 | " on receipt of the first records.")
49 | ""
50 | }
51 | }
52 | }
53 |
54 | /**
55 | * Get a list of subjects from the registry
56 | *
57 | * @param url The url to the schema registry
58 | * @return A list of subjects/topics
59 | * */
60 | def getSubjects(url: String) : List[String] = {
61 | val registry = new RestService(url)
62 | val schemas: List[String] = Try(registry.getAllSubjects.asScala.toList) match {
63 | case Success(s) => s
64 | case Failure(f) => {
65 | logger.warn("Unable to connect to the Schema registry. An attempt will be made to create the table" +
66 | " on receipt of the first records.")
67 | List.empty[String]
68 | }
69 | }
70 |
71 | schemas.foreach(s=>logger.info(s"Found schemas for $s"))
72 | schemas
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/schemas/StructFieldExtractor.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.schemas
18 |
19 | /**
20 | * Created by andrew@datamountaineer.com on 29/05/16.
21 | * kafka-connect-common
22 | */
23 |
24 | import java.text.SimpleDateFormat
25 | import java.util.TimeZone
26 |
27 | import org.apache.kafka.connect.data._
28 |
29 | import scala.collection.JavaConverters._
30 |
31 | trait StructFieldsValuesExtractor {
32 | def get(struct: Struct): Seq[(String, Any)]
33 | }
34 |
35 | /**
36 | * Extracts fields from a SinkRecord Struct based on a specified set of provided columns.
37 | *
38 | * @param includeAllFields Boolean indicating if all the fields from the SinkRecord are to be written to the sink
39 | * @param fieldsAliasMap A map of fields and their alias,if provided, to extract from the SinkRecord
40 | **/
41 | case class StructFieldsExtractor(includeAllFields: Boolean, fieldsAliasMap: Map[String, String]) extends StructFieldsValuesExtractor {
42 |
43 | /**
44 | * Get a sequence of columns names to column values for a given struct
45 | *
46 | * @param struct A SinkRecord struct
47 | * @return a Sequence of column names and values
48 | **/
49 | def get(struct: Struct): Seq[(String, AnyRef)] = {
50 | val schema = struct.schema()
51 | val fields: Seq[Field] = if (includeAllFields) schema.fields().asScala
52 | else schema.fields().asScala.filter(f => fieldsAliasMap.contains(f.name()))
53 |
54 | val fieldsAndValues = fields.flatMap { case field =>
55 | getFieldValue(field, struct).map(value => fieldsAliasMap.getOrElse(field.name(), field.name()) -> value)
56 | }
57 | fieldsAndValues
58 | }
59 |
60 | /**
61 | * For a field in a struct return the value
62 | *
63 | * @param field A field to return the value for
64 | * @param struct A struct to extract the field from
65 | * @return an optional value for the field
66 | **/
67 | private def getFieldValue(field: Field, struct: Struct): Option[AnyRef] = {
68 | Option(struct.get(field)) match {
69 | case None => None
70 | case Some(value) =>
71 | val fieldName = field.name()
72 | Option(field.schema().name()).collect {
73 | case Decimal.LOGICAL_NAME =>
74 | value match {
75 | case bd: BigDecimal => bd
76 | case array: Array[Byte] => Decimal.toLogical(field.schema, value.asInstanceOf[Array[Byte]])
77 | }
78 | case Date.LOGICAL_NAME =>
79 | value.asInstanceOf[Any] match {
80 | case d: java.util.Date => d
81 | case i: Int => Date.toLogical(field.schema, i)
82 | case _ => throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
83 | }
84 | case Time.LOGICAL_NAME =>
85 | value.asInstanceOf[Any] match {
86 | case i: Int => Time.toLogical(field.schema, value.asInstanceOf[Int])
87 | case d: java.util.Date => d
88 | case _ => throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
89 | }
90 | case Timestamp.LOGICAL_NAME =>
91 | value.asInstanceOf[Any] match {
92 | case l: Long => Timestamp.toLogical(field.schema, l)
93 | case d: java.util.Date => d
94 | case _ => throw new IllegalArgumentException(s"Can't convert $value to Date for schema:${field.schema().`type`()}")
95 | }
96 | }.orElse {
97 | val v = field.schema().`type`() match {
98 | case Schema.Type.BOOLEAN => struct.getBoolean(fieldName)
99 | case Schema.Type.BYTES => struct.getBytes(fieldName)
100 | case Schema.Type.FLOAT32 => struct.getFloat32(fieldName)
101 | case Schema.Type.FLOAT64 => struct.getFloat64(fieldName)
102 | case Schema.Type.INT8 => struct.getInt8(fieldName)
103 | case Schema.Type.INT16 => struct.getInt16(fieldName)
104 | case Schema.Type.INT32 => struct.getInt32(fieldName)
105 | case Schema.Type.INT64 => struct.getInt64(fieldName)
106 | case Schema.Type.STRING => struct.getString(fieldName)
107 | case other => sys.error(s"$other is not a recognized schema")
108 | }
109 | Some(v)
110 | }
111 | }
112 | }
113 | }
114 |
115 |
116 | object StructFieldsExtractor {
117 | val DateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
118 | val TimeFormat: SimpleDateFormat = new SimpleDateFormat("HH:mm:ss.SSSZ")
119 | DateFormat.setTimeZone(TimeZone.getTimeZone("UTC"))
120 | }
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/schemas/StructHelper.scala:
--------------------------------------------------------------------------------
1 | package com.datamountaineer.streamreactor.connect.schemas
2 |
3 | import org.apache.kafka.connect.data.Struct
4 |
5 | import scala.collection.JavaConverters._
6 |
7 | object StructHelper {
8 |
9 | implicit final class StructExtension(val struct: Struct) extends AnyVal {
10 | def extractValueFromPath(path: String): Either[FieldValueExtractionError, Option[AnyRef]] = {
11 | val fields = path.split('.')
12 | val start: Either[FieldValueExtractionError, State] = Right(State(Some(struct), Vector.empty))
13 |
14 | fields.foldLeft(start) {
15 | case (l@Left(_), _) => l
16 | case (s@Right(state), field) =>
17 | state.value.fold(s.asInstanceOf[Either[FieldValueExtractionError, State]]) {
18 | case s: Struct =>
19 | s.fieldValue(field) match {
20 | case Some(value) =>
21 | Right.apply[FieldValueExtractionError, State](state.copy(value = Some(value), path = state.path :+ field))
22 | case None =>
23 | val path = (state.path :+ field).mkString(".")
24 | val msg = s"Field [$path] does not exist. Available Fields are [${s.schema().fields().asScala.map(_.name()).mkString(",")}]"
25 | Left.apply[FieldValueExtractionError, State](FieldValueExtractionError(path, msg))
26 | }
27 | case other =>
28 | val path = state.path.mkString(".")
29 | Left.apply[FieldValueExtractionError, State](FieldValueExtractionError(path, s"Expecting a a structure but found [$other]."))
30 | }
31 | }
32 | .map(_.value)
33 | }
34 |
35 | def fieldValue(field: String): Option[AnyRef] = {
36 | Option(struct.schema().field(field)).map { _ =>
37 | struct.get(field)
38 | }
39 | }
40 | }
41 |
42 | private final case class State(value: Option[AnyRef], path: Vector[String])
43 |
44 | }
45 |
46 | case class FieldValueExtractionError(path: String, msg: String)
47 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/serialization/AvroSerializer.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.serialization
18 |
19 | import java.io.{ByteArrayOutputStream, InputStream, OutputStream}
20 |
21 | import com.sksamuel.avro4s.{RecordFormat, SchemaFor}
22 | import org.apache.avro.Schema
23 | import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord}
24 | import org.apache.avro.io.{DecoderFactory, EncoderFactory}
25 |
26 | object AvroSerializer {
27 | def write[T <: Product](t: T)(implicit os: OutputStream, formatter: RecordFormat[T], schemaFor: SchemaFor[T]): Unit = write(apply(t), schemaFor())
28 |
29 | def write(record: GenericRecord, schema: Schema)(implicit os: OutputStream) = {
30 | val writer = new GenericDatumWriter[GenericRecord](schema)
31 | val encoder = EncoderFactory.get().binaryEncoder(os, null)
32 |
33 | writer.write(record, encoder)
34 | encoder.flush()
35 | os.flush()
36 | }
37 |
38 | def getBytes[T <: Product](t: T)(implicit recordFormat: RecordFormat[T], schemaFor: SchemaFor[T]): Array[Byte] = getBytes(recordFormat.to(t), schemaFor())
39 |
40 | def getBytes(record: GenericRecord, schema: Schema): Array[Byte] = {
41 | implicit val output = new ByteArrayOutputStream()
42 | write(record, schema)
43 | output.toByteArray
44 | }
45 |
46 | def read(is: InputStream, schema: Schema): GenericRecord = {
47 | val reader = new GenericDatumReader[GenericRecord](schema)
48 | val decoder = DecoderFactory.get().binaryDecoder(is, null)
49 | reader.read(null, decoder)
50 | }
51 |
52 | def read[T <: Product](is: InputStream)(implicit schemaFor: SchemaFor[T], recordFormat: RecordFormat[T]): T = recordFormat.from(read(is, schemaFor()))
53 |
54 | def apply[T <: Product](t: T)(implicit formatter: RecordFormat[T]): GenericRecord = formatter.to(t)
55 | }
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/sink/DbWriter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.sink
18 |
19 | import org.apache.kafka.connect.sink.SinkRecord
20 |
21 |
22 | /**
23 | * Defines the construct for inserting a new row for the connect sink record
24 | */
25 | trait DbWriter extends AutoCloseable {
26 | def write(records: Seq[SinkRecord]): Unit
27 | }
28 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/source/ExponentialBackOff.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.source
18 |
19 | /**
20 | * Created by andrew@datamountaineer.com on 03/03/2017.
21 | * kafka-connect-common
22 | */
23 |
24 | import java.time.{Clock, Duration, Instant}
25 |
26 | class ExponentialBackOff(step: Duration, cap: Duration, iteration: Int = 0, clock: Clock = Clock.systemUTC(), first: Boolean = true) {
27 | def now = Instant.now(clock)
28 | val endTime: Instant = now.plus(exponentialInterval(iteration))
29 |
30 | def remaining: Duration = Duration.between(now, endTime)
31 |
32 | def passed: Boolean = now.isAfter(this.endTime)
33 |
34 | def nextSuccess(): ExponentialBackOff = {
35 | new ExponentialBackOff(step, cap, 0, clock, false)
36 | }
37 |
38 | def nextFailure(): ExponentialBackOff = {
39 | new ExponentialBackOff(step, cap, iteration + 1, clock, false)
40 | }
41 |
42 | private def exponentialInterval(i: Int) = {
43 | if (first) Duration.ofMillis(-1) else Duration.ofMillis(Math.min(cap.toMillis, step.toMillis * Math.pow(2, i).toLong))
44 | }
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/source/ExponentialBackOffHandler.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.source
18 |
19 | import java.time.Duration
20 |
21 | import com.typesafe.scalalogging.StrictLogging
22 |
23 | /**
24 | * Created by andrew@datamountaineer.com on 03/03/2017.
25 | * kafka-connect-common
26 | */
27 | class ExponentialBackOffHandler(name: String, step: Duration, cap: Duration) extends StrictLogging {
28 | private var backoff = new ExponentialBackOff(step, cap)
29 |
30 | def ready = backoff.passed
31 |
32 | def failure = {
33 | backoff = backoff.nextFailure
34 | logger.info(s"$name: Next poll will be around ${backoff.endTime}")
35 | }
36 |
37 | def success = {
38 | backoff = backoff.nextSuccess
39 | logger.info(s"$name: Backing off. Next poll will be around ${backoff.endTime}")
40 | }
41 |
42 | def update(status: Boolean): Unit = {
43 | if (status) {
44 | success
45 | } else {
46 | failure
47 | }
48 | }
49 |
50 | def remaining = backoff.remaining
51 | }
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/utils/JarManifest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.utils
18 |
19 | import java.io.File
20 | import java.net.URL
21 | import java.util.jar.JarFile
22 |
23 | import scala.collection.mutable
24 |
25 | case class JarManifest(location: URL) {
26 |
27 | val map = mutable.Map.empty[String, String]
28 |
29 | var msg = "unknown"
30 | try {
31 | val file = new File(location.toURI)
32 | if (file.isFile) {
33 | val jarFile = new JarFile(file)
34 | val manifest = jarFile.getManifest
35 | val attributes = manifest.getMainAttributes
36 | map += "StreamReactor-Version" -> attributes.getValue("StreamReactor-Version")
37 | map += "Kafka-Version" -> attributes.getValue("Kafka-Version")
38 | map += "Git-Repo" -> attributes.getValue("Git-Repo")
39 | map += "Git-Commit-Hash" -> attributes.getValue("Git-Commit-Hash")
40 | map += "Git-Tag" -> attributes.getValue("Git-Tag")
41 | map += "StreamReactor-Docs" -> attributes.getValue("StreamReactor-Docs")
42 | }
43 | }
44 | catch {
45 | case t: Throwable => msg = t.getMessage
46 | }
47 |
48 | def version(): String = map.getOrElse("StreamReactor-Version", "")
49 |
50 | def gitRepo(): String = map.getOrElse("Git-Repo", "")
51 |
52 | def gitCommit(): String = map.getOrElse("Git-Commit-Hash", "")
53 |
54 | def gitTag(): String = map.getOrElse("Git-Tag", "")
55 |
56 | def printManifest(): String = {
57 | val msg = "unknown"
58 |
59 | s"""
60 | |StreamReactor-Version: ${map.getOrElse("StreamReactor-Version", msg)}
61 | |Kafka-Version: ${map.getOrElse("Kafka-Version", msg)}
62 | |Git-Repo: ${map.getOrElse("Git-Repo", msg)}
63 | |Git-Commit-Hash: ${map.getOrElse("Git-Commit-Hash", msg)}
64 | |Git-Tag: ${map.getOrElse("Git-Tag", msg)}
65 | |StreamReactor-Docs: ${map.getOrElse("StreamReactor-Docs", msg)}
66 | """.
67 | stripMargin
68 | }
69 | }
--------------------------------------------------------------------------------
/src/main/scala/com/datamountaineer/streamreactor/connect/utils/ProgressCounter.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.utils
18 |
19 | import java.text.SimpleDateFormat
20 | import java.util.Date
21 |
22 | import com.typesafe.scalalogging.StrictLogging
23 | import org.apache.kafka.connect.connector.ConnectRecord
24 |
25 | import scala.collection.immutable.Seq
26 | import scala.collection.mutable
27 |
28 | /**
29 | * Created by andrew@datamountaineer.com on 03/03/2017.
30 | * kafka-connect-common
31 | */
32 | case class ProgressCounter(periodMillis: Int = 60000) extends StrictLogging {
33 | private val startTime = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())
34 | private var timestamp: Long = 0
35 | private val counter = mutable.Map.empty[String, Long]
36 |
37 | def update[T <: ConnectRecord[T]](records: Seq[ConnectRecord[T]]): Unit = {
38 | val newTimestamp = System.currentTimeMillis()
39 |
40 | records.foreach(r => counter.put(r.topic(), counter.getOrElse(r.topic(), 0L) + 1L))
41 |
42 | if ((newTimestamp - timestamp) >= periodMillis && records.nonEmpty) {
43 | counter.foreach({ case (k, v) => logger.info(s"Delivered $v records for $k since $startTime") })
44 | counter.empty
45 | timestamp = newTimestamp
46 | }
47 | }
48 |
49 | def empty(): Unit = counter.clear()
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/src/test/resources/cts_keystore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/src/test/resources/cts_keystore.jks
--------------------------------------------------------------------------------
/src/test/resources/cts_truststore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/src/test/resources/cts_truststore.jks
--------------------------------------------------------------------------------
/src/test/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | #
2 | # Copyright 2017 Datamountaineer.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | # suppress inspection "UnusedProperty" for whole file
18 | log4j.rootLogger=INFO,stdout
19 |
20 | #stdout
21 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
22 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
23 | log4j.appender.stdout.layout.conversionPattern=%d{ISO8601} %-5p [%t] [%c] [%M:%L] %m%n
24 |
25 | #Turn down cassandra logging
26 | log4j.logger.com.datastax.driver.core=WARN
27 | log4j.logger.org.apache.cassandra.db=WARN
28 | log4j.logger.org.apache.cassandra.transport.Server=WARN
29 | log4j.logger.org.apache.cassandra.utils.SignarLibrary=WARN
30 | log4j.logger.org.apache.cassandra.config=WARN
31 | log4j.logger.org.apache.cassandra.service=WARN
32 | log4j.logger.com.datastax.driver.core.Cluster=ERROR
33 |
--------------------------------------------------------------------------------
/src/test/resources/stc_keystore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/src/test/resources/stc_keystore.jks
--------------------------------------------------------------------------------
/src/test/resources/stc_truststore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/src/test/resources/stc_truststore.jks
--------------------------------------------------------------------------------
/src/test/resources/sts_keystore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/src/test/resources/sts_keystore.jks
--------------------------------------------------------------------------------
/src/test/resources/sts_truststore.jks:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lensesio/kafka-connect-common/a2c540fea19c1409a7aec6dd88dac1433ad85dee/src/test/resources/sts_truststore.jks
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/TestUtilsBase.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect
18 |
19 | import java.util
20 | import java.util.Collections
21 |
22 | import com.sksamuel.avro4s.RecordFormat
23 | import org.apache.avro.generic.GenericData
24 | import org.apache.avro.generic.GenericRecord
25 | import org.apache.kafka.connect.data.Schema
26 | import org.apache.kafka.connect.data.SchemaBuilder
27 | import org.apache.kafka.connect.data.Struct
28 | import org.apache.kafka.connect.sink.SinkRecord
29 | import org.apache.kafka.connect.source.SourceTaskContext
30 | import org.apache.kafka.connect.storage.OffsetStorageReader
31 | import org.mockito.MockitoSugar
32 | import org.scalatest.BeforeAndAfter
33 | import org.scalatest.matchers.should.Matchers
34 | import org.scalatest.wordspec.AnyWordSpec
35 |
36 | import scala.collection.JavaConverters._
37 |
38 | /**
39 | * Created by andrew@datamountaineer.com on 29/02/16.
40 | * stream-reactor
41 | */
42 |
43 | trait TestUtilsBase extends AnyWordSpec with Matchers with BeforeAndAfter with MockitoSugar {
44 | val TOPIC = "sink_test"
45 | val VALUE_JSON_STRING="{\"id\":\"sink_test-1-1\",\"int_field\":1,\"long_field\":1,\"string_field\":\"foo\"}"
46 | val KEY="topic_key_1"
47 | val ID = "sink_test-1-1"
48 | val AVRO_SCHEMA_LITERAL = "{\n\t\"type\": \"record\",\n\t\"name\": \"myrecord\",\n\t\"fields\": [{\n\t\t\"name\": \"id\",\n\t\t\"type\": \"string\"\n\t}, {\n\t\t\"name\": \"int_field\",\n\t\t\"type\": \"int\"\n\t}, {\n\t\t\"name\": \"long_field\",\n\t\t\"type\": \"long\"\n\t}, {\n\t\t\"name\": \"string_field\",\n\t\t\"type\": \"string\"\n\t}]\n}"
49 | val AVRO_SCHEMA : org.apache.avro.Schema = new org.apache.avro.Schema.Parser().parse(AVRO_SCHEMA_LITERAL)
50 |
51 | def buildAvro() : GenericRecord = {
52 | val avro = new GenericData.Record(AVRO_SCHEMA)
53 | avro.put("id", ID)
54 | avro.put("int_field", 1)
55 | avro.put("long_field", 1L)
56 | avro.put("string_field", "foo")
57 | avro
58 | }
59 |
60 | def buildNestedAvro() : GenericRecord = {
61 | val recordFormat = RecordFormat[WithNested]
62 | val record = WithNested(1.1, Nested("abc", 100))
63 | recordFormat.to(record)
64 | }
65 |
66 | //build a test record schema
67 | def createSchema: Schema = {
68 | SchemaBuilder.struct.name("record")
69 | .version(1)
70 | .field("id", Schema.STRING_SCHEMA)
71 | .field("int_field", Schema.INT32_SCHEMA)
72 | .field("long_field", Schema.INT64_SCHEMA)
73 | .field("string_field", Schema.STRING_SCHEMA)
74 | .build
75 | }
76 |
77 | //build a test record
78 | def createRecord(schema: Schema, id: String): Struct = {
79 | new Struct(schema)
80 | .put("id", id)
81 | .put("int_field", 1)
82 | .put("long_field", 1L)
83 | .put("string_field", "foo")
84 | }
85 |
86 | //generate some test records
87 | def getTestRecord: SinkRecord= {
88 | val schema = createSchema
89 | val record: Struct = createRecord(schema, ID)
90 | new SinkRecord(TOPIC, 1, Schema.STRING_SCHEMA, KEY.toString, schema, record, 1)
91 | }
92 |
93 | def getSourceTaskContext(lookupPartitionKey: String, offsetValue: String, offsetColumn : String, table : String) = {
94 | /**
95 | * offset holds a map of map[string, something],map[identifier, value]
96 | *
97 | * map(map(assign.import.table->table1) -> map("my_timeuuid"->"2013-01-01 00:05+0000")
98 | */
99 |
100 | //set up partition
101 | val partition: util.Map[String, String] = Collections.singletonMap(lookupPartitionKey, table)
102 | //as a list to search for
103 | val partitionList: util.List[util.Map[String, String]] = List(partition).asJava
104 | //set up the offset
105 | val offset: util.Map[String, Object] = (Collections.singletonMap(offsetColumn,offsetValue ))
106 | //create offsets to initialize from
107 | val offsets :util.Map[util.Map[String, String],util.Map[String, Object]] = Map(partition -> offset).asJava
108 |
109 | //mock out reader and task context
110 | val taskContext = mock[SourceTaskContext]
111 | val reader = mock[OffsetStorageReader]
112 | when(reader.offsets(partitionList)).thenReturn(offsets)
113 | when(taskContext.offsetStorageReader()).thenReturn(reader)
114 |
115 | taskContext
116 | }
117 | }
118 |
119 |
120 | case class WithNested(x:Double, y:Nested)
121 | case class Nested(a:String, b:Int)
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/concurrent/FutureAwaitWithFailFastFnTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.concurrent
18 |
19 | import java.util.concurrent.Executors
20 |
21 | import com.datamountaineer.streamreactor.connect.concurrent.ExecutorExtension._
22 | import org.scalactic.source.Position
23 | import org.scalatest.concurrent.{Eventually, TimeLimits}
24 | import org.scalatest.matchers.should.Matchers
25 | import org.scalatest.time.{Millis, Span}
26 | import org.scalatest.wordspec.AnyWordSpec
27 |
28 | import scala.util.{Failure, Try}
29 |
30 | /**
31 | * Created by stepi on 22/06/16.
32 | */
33 | class FutureAwaitWithFailFastFnTest extends AnyWordSpec with Matchers with Eventually with TimeLimits {
34 |
35 |
36 | "FutureAwaitWithFailFastFn" should {
37 | "return when all the futures have completed" in {
38 | val exec = Executors.newFixedThreadPool(10)
39 | val futures = (1 to 5).map(i => exec.submit {
40 | Thread.sleep(300)
41 | i
42 | })
43 | eventually {
44 | val result = FutureAwaitWithFailFastFn(exec, futures)
45 | exec.isTerminated shouldBe true
46 | result shouldBe Seq(1, 2, 3, 4, 5)
47 | }
48 | }
49 |
50 | "stop when the first futures times out" in {
51 | val exec = Executors.newFixedThreadPool(6)
52 | val futures = for (i <- 1 to 10) yield {
53 | exec.submit {
54 | if (i == 4) {
55 | Thread.sleep(1000)
56 | sys.error("this task failed.")
57 | } else {
58 | Thread.sleep(50000)
59 | }
60 | }
61 | }
62 |
63 | eventually {
64 | val t = Try(FutureAwaitWithFailFastFn(exec, futures))
65 | t.isFailure shouldBe true
66 | t.asInstanceOf[Failure[_]].exception.getMessage shouldBe "this task failed."
67 | exec.isTerminated shouldBe true
68 | }
69 | }
70 | }
71 |
72 | }
73 |
74 |
75 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/config/KcqlSettingsTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config
18 |
19 | import java.util
20 |
21 | import com.datamountaineer.streamreactor.connect.config.base.traits.KcqlSettings
22 | import org.apache.kafka.common.config.types.Password
23 | import org.scalatest.matchers.should.Matchers
24 | import org.scalatest.wordspec.AnyWordSpec
25 |
26 |
27 | class KcqlSettingsTest extends AnyWordSpec with Matchers {
28 |
29 | import scala.collection.JavaConverters._
30 |
31 | case class KS(kcql: String) extends KcqlSettings {
32 |
33 | override def connectorPrefix: String = "66686723939"
34 | override def getString(key: String): String = key match {
35 | case `kcqlConstant` => kcql
36 | case _ => null
37 | }
38 | override def getInt(key: String): Integer = 0
39 | override def getBoolean(key: String): java.lang.Boolean = false
40 | override def getPassword(key: String): Password = null
41 | override def getList(key: String): util.List[String] = List.empty[String].asJava
42 | }
43 |
44 | def testUpsertKeys(
45 | kcql: String,
46 | expectedKeys: Set[String],
47 | topic: String = "t",
48 | preserve: Boolean = false) = {
49 | val keys = KS(kcql).getUpsertKeys(preserveFullKeys=preserve)(topic)
50 | // get rid of ListSet to avoid ordering issues:
51 | keys.toList.toSet shouldBe expectedKeys
52 | }
53 |
54 | "KcqlSettings.getUpsertKeys()" should {
55 |
56 | "return 'basename' of key by default" in {
57 |
58 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a", Set("a"))
59 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m.x", Set("a", "x"))
60 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK b.m.x", Set("x"))
61 | }
62 |
63 | "return full keys if requested" in {
64 |
65 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a", Set("a"), preserve=true)
66 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m", Set("a", "b.m"), preserve=true)
67 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK a, b.m, b.n.x", Set("a", "b.m", "b.n.x"), preserve=true)
68 | testUpsertKeys("UPSERT INTO coll SELECT * FROM t PK b.m.x", Set("b.m.x"), preserve=true)
69 | }
70 |
71 | "return keys in the expected order - as listed in the PK clause" in {
72 |
73 | val kcql = "UPSERT INTO coll SELECT * FROM t PK a,b,c,d"
74 | val expectedKeys = List("a","b","c","d")
75 | val keys = KS(kcql).getUpsertKeys(preserveFullKeys=true)("t").toList.sorted
76 | // SCALA 2.12 WARNING: If this fails when you upgrade to 2.12, you need to
77 | // modify KcqlSettings to remove all the reverse() calls when constructing
78 | // the ListSets.
79 | keys shouldBe expectedKeys
80 | }
81 |
82 | }
83 |
84 | }
85 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/config/TestHelpers.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config
18 |
19 | import com.datamountaineer.streamreactor.connect.TestUtilsBase
20 | import org.apache.kafka.common.config.ConfigException
21 |
22 | /**
23 | * Created by andrew@datamountaineer.com on 23/08/2017.
24 | * kafka-connect-common
25 | */
26 | class TestHelpers extends TestUtilsBase {
27 |
28 | val kcqlConstant: String = "myconnector.kcql"
29 |
30 | "should throw exception if topics not specified in connector props" in {
31 | val props = Map("topics" -> "t1",
32 | s"$kcqlConstant" -> "insert into table select * from t1;insert into table2 select * from t2"
33 | )
34 |
35 | intercept[ConfigException] {
36 | Helpers.checkInputTopics(kcqlConstant, props)
37 | }
38 |
39 | }
40 |
41 | "should throw exception if topics not specified in kcql" in {
42 | val props = Map("topics" -> "t1,t2",
43 | s"$kcqlConstant" -> "insert into table select * from t1"
44 | )
45 |
46 | intercept[ConfigException] {
47 | Helpers.checkInputTopics(kcqlConstant, props)
48 | }
49 | }
50 |
51 | "should not throw exception if all good" in {
52 | val props = Map("topics" -> "t1,t2",
53 | s"$kcqlConstant" -> "insert into table select * from t1;insert into table2 select * from t2"
54 | )
55 |
56 | val res = Helpers.checkInputTopics(kcqlConstant, props)
57 | res shouldBe true
58 | }
59 |
60 | "should add topics involved in kcql error to message" in {
61 | val props = Map("topics" -> "topic1",
62 | s"$kcqlConstant" -> "insert into table select time,c1,c2 from topic1 WITH TIMESTAMP time"
63 | )
64 |
65 | val e = intercept[ConfigException] {
66 | Helpers.checkInputTopics(kcqlConstant, props)
67 | }
68 |
69 | e.getMessage.contains("topic1WITHTIMESTAMPtime") shouldBe true
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/config/TestSSLConfigContext.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.config
18 |
19 | import javax.net.ssl.{KeyManager, SSLContext, TrustManager}
20 | import org.scalatest.wordspec.AnyWordSpec
21 | import org.scalatest.BeforeAndAfter
22 | import org.scalatest.matchers.should.Matchers
23 |
24 | /**
25 | * Created by andrew@datamountaineer.com on 19/04/16.
26 | * stream-reactor
27 | */
28 | class TestSSLConfigContext extends AnyWordSpec with Matchers with BeforeAndAfter {
29 | var sslConfig : SSLConfig = null
30 | var sslConfigNoClient : SSLConfig = null
31 |
32 | before {
33 | val trustStorePath = System.getProperty("truststore")
34 | val trustStorePassword ="erZHDS9Eo0CcNo"
35 | val keystorePath = System.getProperty("keystore")
36 | val keystorePassword ="8yJQLUnGkwZxOw"
37 | sslConfig = SSLConfig(trustStorePath, trustStorePassword , Some(keystorePath), Some(keystorePassword), true)
38 | sslConfigNoClient = SSLConfig(trustStorePath, trustStorePassword , Some(keystorePath), Some(keystorePassword), false)
39 | }
40 |
41 | "SSLConfigContext" should {
42 | "should return an Array of KeyManagers" in {
43 | val keyManagers = SSLConfigContext.getKeyManagers(sslConfig)
44 | keyManagers.length shouldBe 1
45 | val entry = keyManagers.head
46 | entry shouldBe a [KeyManager]
47 | }
48 |
49 | "should return an Array of TrustManagers" in {
50 | val trustManager = SSLConfigContext.getTrustManagers(sslConfig)
51 | trustManager.length shouldBe 1
52 | val entry = trustManager.head
53 | entry shouldBe a [TrustManager]
54 | }
55 |
56 | "should return a SSLContext" in {
57 | val context = SSLConfigContext(sslConfig)
58 | context.getProtocol shouldBe "SSL"
59 | context shouldBe a [SSLContext]
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/sink/AvroConverterTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.sink
18 |
19 | import java.io.{BufferedWriter, File, FileWriter}
20 | import java.nio.file.Paths
21 | import java.util.UUID
22 |
23 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
24 | import com.sksamuel.avro4s.RecordFormat
25 | import io.confluent.connect.avro.AvroData
26 | import org.apache.avro.{Schema, SchemaBuilder}
27 | import org.apache.kafka.connect.data.{Struct, Schema => KafkaSchema, SchemaBuilder => KafkaSchemaBuilder}
28 | import org.apache.kafka.connect.errors.DataException
29 | import org.apache.kafka.connect.sink.SinkRecord
30 | import org.scalatest.wordspec.AnyWordSpec
31 | import org.scalatest.BeforeAndAfterAll
32 | import org.scalatest.matchers.should.Matchers
33 |
34 | import scala.reflect.io.Path
35 |
36 | class AvroConverterTest extends AnyWordSpec with Matchers with BeforeAndAfterAll {
37 | private val sinkTopic = "somesink"
38 | private val folder = new File(UUID.randomUUID().toString)
39 | folder.mkdir()
40 | val path = Path(folder.getAbsolutePath)
41 |
42 | val kafkaSchema = KafkaSchemaBuilder.struct.name("record")
43 | .version(1)
44 | .field("id", KafkaSchema.STRING_SCHEMA)
45 | .field("amount", KafkaSchema.FLOAT64_SCHEMA)
46 | .field("timestamp", KafkaSchema.INT64_SCHEMA)
47 | .build
48 |
49 | override def beforeAll() = {
50 |
51 | }
52 |
53 | override def afterAll() = {
54 | path.deleteRecursively()
55 | }
56 |
57 | private def initializeConverter(converter: AvroConverter, schema: Schema) = {
58 | def writeSchema(schema: Schema): File = {
59 | val schemaFile = Paths.get(folder.getName, UUID.randomUUID().toString)
60 | val bw = new BufferedWriter(new FileWriter(schemaFile.toFile))
61 | bw.write(schema.toString)
62 | bw.close()
63 |
64 | schemaFile.toFile
65 | }
66 |
67 | converter.initialize(Map(
68 | AvroConverter.SCHEMA_CONFIG -> s"$sinkTopic=${writeSchema(schema)}"
69 | ))
70 |
71 | }
72 |
73 | "Sink AvroConverter" should {
74 | "handle null payloads" in {
75 | val converter = new AvroConverter()
76 | val schema = SchemaBuilder.builder().stringType()
77 | initializeConverter(converter, schema)
78 |
79 | val sinkRecord = converter.convert(sinkTopic, null)
80 |
81 | sinkRecord.key() shouldBe null
82 | sinkRecord.keySchema() shouldBe null
83 | sinkRecord.value() shouldBe null
84 | }
85 |
86 | "throw an exception if the payload contains a wrong type" in {
87 | intercept[DataException] {
88 | val recordFormat = RecordFormat[Transaction]
89 | val transaction = Transaction("test", 2354.99, System.currentTimeMillis())
90 | val avro = recordFormat.to(transaction)
91 |
92 | val converter = new AvroConverter
93 | initializeConverter(converter, avro.getSchema)
94 |
95 | val payload = new Struct(kafkaSchema)
96 | .put("id", 15)
97 | .put("amount", 2354.99)
98 | .put("timestamp", 1578467749572L)
99 | val data = new SinkRecord(sinkTopic, 0, null, "keyA", kafkaSchema, payload, 0)
100 | val sinkRecord = converter.convert(sinkTopic, data)
101 | }
102 | }
103 |
104 | "create avro records" in {
105 | val recordFormat = RecordFormat[Transaction]
106 | val transaction = Transaction("test", 2354.99, 1578467749572L)
107 | val avro = recordFormat.to(transaction)
108 |
109 | val converter = new AvroConverter
110 | initializeConverter(converter, avro.getSchema)
111 |
112 | val payload = new Struct(kafkaSchema)
113 | .put("id", "test")
114 | .put("amount", 2354.99)
115 | .put("timestamp", 1578467749572L)
116 | val data = new SinkRecord(sinkTopic, 0, null, "keyA", kafkaSchema, payload, 0)
117 | val sinkRecord = converter.convert(sinkTopic, data)
118 |
119 | sinkRecord.key() shouldBe MsgKey.getStruct(sinkTopic, "keyA")
120 | sinkRecord.keySchema() shouldBe MsgKey.schema
121 |
122 | val avroData = new AvroData(4)
123 | sinkRecord.valueSchema() shouldBe kafkaSchema
124 |
125 | sinkRecord.value() shouldBe Array(8, 116, 101, 115, 116, 20, -82, 71, -31,
126 | -6, 101, -94, 64, -120, -69, -109, -64,
127 | -16, 91).map(_.toByte)
128 | }
129 | }
130 | }
131 |
132 |
133 | case class Transaction(id: String, amount: Double, timestamp: Long)
134 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/sink/BytesConverterTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.sink
18 |
19 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
20 | import org.apache.kafka.connect.data.Schema
21 | import org.apache.kafka.connect.sink.SinkRecord
22 | import org.scalatest.matchers.should.Matchers
23 | import org.scalatest.wordspec.AnyWordSpec
24 |
25 | class BytesConverterTest extends AnyWordSpec with Matchers {
26 | private val converter = new BytesConverter()
27 | private val topic = "topicA"
28 |
29 | "Sink BytesConverter" should {
30 | "handle null payloads" in {
31 | val sinkRecord = converter.convert(topic, null)
32 |
33 | sinkRecord.keySchema() shouldBe null
34 | sinkRecord.key() shouldBe null
35 | sinkRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
36 | sinkRecord.value() shouldBe null
37 | }
38 |
39 | "handle non-null payloads" in {
40 | val expectedPayload: Array[Byte] = Array(245, 2, 10, 200, 22, 0, 0, 11).map(_.toByte)
41 | val data = new SinkRecord(topic, 0, null, "keyA", null, expectedPayload, 0)
42 | val sinkRecord = converter.convert(topic, data)
43 |
44 | sinkRecord.keySchema() shouldBe MsgKey.schema
45 | sinkRecord.key() shouldBe MsgKey.getStruct("topicA", "keyA")
46 | sinkRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
47 | sinkRecord.value() shouldBe expectedPayload
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/source/AvroConverterTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.io.{BufferedWriter, File, FileWriter}
20 | import java.nio.ByteBuffer
21 | import java.nio.file.Paths
22 | import java.util.UUID
23 |
24 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
25 | import com.sksamuel.avro4s._
26 | import io.confluent.connect.avro.AvroData
27 | import org.apache.avro.generic.GenericRecord
28 | import org.apache.avro.io.EncoderFactory
29 | import org.apache.avro.specific.SpecificDatumWriter
30 | import org.apache.avro.{AvroRuntimeException, Schema, SchemaBuilder}
31 | import org.apache.kafka.common.utils.ByteBufferOutputStream
32 | import org.scalatest.BeforeAndAfterAll
33 | import org.scalatest.matchers.should.Matchers
34 | import org.scalatest.wordspec.AnyWordSpec
35 |
36 | import scala.reflect.io.Path
37 |
38 | class AvroConverterTest extends AnyWordSpec with Matchers with BeforeAndAfterAll {
39 | private val topic = "topicA"
40 | private val sourceTopic = "somesource"
41 | private val folder = new File(UUID.randomUUID().toString)
42 | folder.mkdir()
43 | val path = Path(folder.getAbsolutePath)
44 |
45 | override def beforeAll() = {
46 |
47 | }
48 |
49 | override def afterAll() = {
50 | path.deleteRecursively()
51 | }
52 |
53 | private def initializeConverter(converter: AvroConverter, schema: Schema, converterTopic: String) = {
54 | def writeSchema(schema: Schema): File = {
55 | val schemaFile = Paths.get(folder.getName, UUID.randomUUID().toString)
56 | val bw = new BufferedWriter(new FileWriter(schemaFile.toFile))
57 | bw.write(schema.toString)
58 | bw.close()
59 |
60 | schemaFile.toFile
61 | }
62 |
63 | converter.initialize(Map(
64 | AvroConverter.SCHEMA_CONFIG -> s"$converterTopic=${writeSchema(schema)}"
65 | ))
66 |
67 | }
68 |
69 | private def write(record: GenericRecord): Array[Byte] = {
70 | val byteBuffer = ByteBuffer.wrap(new Array(128))
71 | val writer = new SpecificDatumWriter[GenericRecord](record.getSchema)
72 | val encoder = EncoderFactory.get().directBinaryEncoder(new ByteBufferOutputStream(byteBuffer), null)
73 |
74 | writer.write(record, encoder)
75 |
76 | byteBuffer.flip()
77 | byteBuffer.array()
78 | }
79 |
80 |
81 | "AvroConverter" should {
82 | "handle null payloads" in {
83 | val converter = new AvroConverter()
84 | val schema = SchemaBuilder.builder().stringType()
85 | initializeConverter(converter, schema, sourceTopic)
86 |
87 | val sourceRecord = converter.convert(topic, sourceTopic, "100", null)
88 |
89 | sourceRecord.key() shouldBe null
90 | sourceRecord.keySchema() shouldBe null
91 | sourceRecord.value() shouldBe null
92 | }
93 |
94 | "throw an exception if it can't parse the payload" in {
95 | intercept[AvroRuntimeException] {
96 | val recordFormat = RecordFormat[Transaction]
97 | val transaction = Transaction("test", 2354.99, System.currentTimeMillis())
98 | val avro = recordFormat.to(transaction)
99 |
100 | val converter = new AvroConverter
101 | initializeConverter(converter, avro.getSchema, sourceTopic)
102 |
103 | val sourceRecord = converter.convert(topic, sourceTopic, "1001", write(avro).map(b => (b + 1) % 255).map(_.toByte))
104 |
105 | sourceRecord.key() shouldBe null
106 | sourceRecord.keySchema() shouldBe null
107 |
108 | val avroData = new AvroData(4)
109 |
110 | sourceRecord.value() shouldBe avroData.toConnectData(avro.getSchema, avro).value()
111 |
112 | sourceRecord.valueSchema() shouldBe avroData.toConnectSchema(avro.getSchema)
113 | }
114 | }
115 |
116 | "handle avro records" in {
117 | val recordFormat = RecordFormat[Transaction]
118 | val transaction = Transaction("test", 2354.99, System.currentTimeMillis())
119 | val avro = recordFormat.to(transaction)
120 |
121 | val converter = new AvroConverter
122 | initializeConverter(converter, avro.getSchema, sourceTopic)
123 |
124 | val sourceRecord = converter.convert(topic, sourceTopic, "1001", write(avro))
125 |
126 | sourceRecord.key() shouldBe MsgKey.getStruct(sourceTopic, "1001")
127 | sourceRecord.keySchema() shouldBe MsgKey.schema
128 |
129 | val avroData = new AvroData(4)
130 | sourceRecord.valueSchema() shouldBe avroData.toConnectSchema(avro.getSchema)
131 |
132 | sourceRecord.value() shouldBe avroData.toConnectData(avro.getSchema, avro).value()
133 | }
134 |
135 | "handle avro records when the source topic name contains \"+\"" in {
136 | val sourceTopicWithPlus = "somesource+"
137 | val recordFormat = RecordFormat[Transaction]
138 | val transaction = Transaction("test", 2354.99, System.currentTimeMillis())
139 | val avro = recordFormat.to(transaction)
140 |
141 | val converter = new AvroConverter
142 | initializeConverter(converter, avro.getSchema, sourceTopicWithPlus)
143 |
144 | val sourceRecord = converter.convert(topic, sourceTopicWithPlus, "1001", write(avro))
145 |
146 | sourceRecord.key() shouldBe MsgKey.getStruct(sourceTopicWithPlus, "1001")
147 | sourceRecord.keySchema() shouldBe MsgKey.schema
148 |
149 | val avroData = new AvroData(4)
150 | sourceRecord.valueSchema() shouldBe avroData.toConnectSchema(avro.getSchema)
151 |
152 | sourceRecord.value() shouldBe avroData.toConnectData(avro.getSchema, avro).value()
153 | }
154 |
155 | }
156 | }
157 |
158 |
159 | case class Transaction(id: String, amount: Double, timestamp: Long)
160 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/source/BytesConverterTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
20 | import org.apache.kafka.connect.data.Schema
21 | import org.scalatest.matchers.should.Matchers
22 | import org.scalatest.wordspec.AnyWordSpec
23 |
24 | class BytesConverterTest extends AnyWordSpec with Matchers {
25 | private val converter = new BytesConverter()
26 | private val topic = "topicA"
27 |
28 | "BytesConverter" should {
29 | "handle null payloads" in {
30 | val sourceRecord = converter.convert(topic, "somesource", "100", null)
31 |
32 | sourceRecord.keySchema() shouldBe MsgKey.schema
33 | sourceRecord.key() shouldBe MsgKey.getStruct("somesource", "100")
34 | sourceRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
35 | sourceRecord.value() shouldBe null
36 | }
37 |
38 | "handle non-null payloads" in {
39 | val expectedPayload: Array[Byte] = Array(245, 2, 10, 200, 22, 0, 0, 11).map(_.toByte)
40 | val sourceRecord = converter.convert(topic, "somesource", "1001", expectedPayload)
41 |
42 | sourceRecord.keySchema() shouldBe MsgKey.schema
43 | sourceRecord.key() shouldBe MsgKey.getStruct("somesource", "1001")
44 | sourceRecord.valueSchema() shouldBe Schema.BYTES_SCHEMA
45 | sourceRecord.value() shouldBe expectedPayload
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/source/JacksonJson.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import org.json4s._
20 | import org.json4s.jackson.Serialization
21 | import org.json4s.jackson.Serialization.write
22 |
23 | object JacksonJson {
24 |
25 | //implicit val formats: DefaultFormats.type = DefaultFormats
26 | implicit val formats = Serialization.formats(NoTypeHints)
27 |
28 | /*def toJson(value: Map[Symbol, Any]): String = {
29 | toJson(value map { case (k,v) => k.name -> v})
30 | }*/
31 |
32 | def toJson[T<:AnyRef](value: T): String = {
33 | write(value)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonConverterWithSchemaEvolutionTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.util.Collections
20 |
21 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
22 | import com.sksamuel.avro4s.{RecordFormat, SchemaFor}
23 | import io.confluent.connect.avro.AvroData
24 | import org.apache.avro.Schema
25 | import org.apache.kafka.connect.data.Struct
26 | import org.scalatest.matchers.should.Matchers
27 | import org.scalatest.wordspec.AnyWordSpec
28 |
29 | class JsonConverterWithSchemaEvolutionTest extends AnyWordSpec with Matchers {
30 | val topic = "the_real_topic"
31 | val sourceTopic = "source_topic"
32 | val avroData = new AvroData(4)
33 |
34 | "JsonConverter" should {
35 | "throw IllegalArgumentException if payload is null" in {
36 | intercept[IllegalArgumentException] {
37 | val converter = new JsonConverterWithSchemaEvolution
38 | val record = converter.convert("topic", "somesource", "1000", null)
39 | }
40 | }
41 |
42 | "handle a simple json" in {
43 | val json = JacksonJson.toJson(Car("LaFerrari", "Ferrari", 2015, 963, 0.0001))
44 | val converter = new JsonConverterWithSchemaEvolution
45 | val record = converter.convert(topic, sourceTopic, "100", json.getBytes)
46 | record.keySchema() shouldBe MsgKey.schema
47 | record.key().asInstanceOf[Struct].getString("topic") shouldBe sourceTopic
48 | record.key().asInstanceOf[Struct].getString("id") shouldBe "100"
49 |
50 | val schema =
51 | new Schema.Parser().parse(
52 | SchemaFor[CarOptional]().toString
53 | .replace("\"name\":\"CarOptional\"", s"""\"name\":\"$sourceTopic\"""")
54 | .replace(s""",\"namespace\":\"${getClass.getCanonicalName.dropRight(getClass.getSimpleName.length+1)}\"""", "")
55 | )
56 | val format = RecordFormat[CarOptional]
57 | val carOptional = format.to(CarOptional(Option("LaFerrari"), Option("Ferrari"), Option(2015), Option(963), Option(0.0001)))
58 |
59 | record.valueSchema() shouldBe avroData.toConnectSchema(schema)
60 |
61 | record.value() shouldBe avroData.toConnectData(schema, carOptional).value()
62 | record.sourcePartition() shouldBe null
63 | record.sourceOffset() shouldBe Collections.singletonMap(JsonConverterWithSchemaEvolution.ConfigKey, avroData.fromConnectSchema(avroData.toConnectSchema(schema)).toString())
64 | }
65 | }
66 | }
67 |
68 |
69 | case class Car(name: String,
70 | manufacturer: String,
71 | model: Long,
72 | bhp: Long,
73 | price: Double)
74 |
75 |
76 | case class CarOptional(name: Option[String],
77 | manufacturer: Option[String],
78 | model: Option[Long],
79 | bhp: Option[Long],
80 | price: Option[Double])
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonPassThroughConverterTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.util.Collections
20 |
21 | import org.scalatest.matchers.should.Matchers
22 | import org.scalatest.wordspec.AnyWordSpec
23 |
24 | class JsonPassThroughConverterTest extends AnyWordSpec with Matchers {
25 | val topic = "the_real_topic"
26 | val sourceTopic = "source_topic"
27 |
28 | "JsonPassThroughConverter" should {
29 | "pass single message with no key through as json" in {
30 | val car = Car("LaFerrari", "Ferrari", 2015, 963, 0.0001)
31 | val json = JacksonJson.toJson(car)
32 | val converter = new JsonPassThroughConverter
33 | val record = converter.convert(topic, sourceTopic, "100", json.getBytes)
34 | record.keySchema() shouldBe null
35 | record.key() shouldBe "source_topic.100"
36 |
37 | record.valueSchema() shouldBe null
38 |
39 | record.value() shouldBe json
40 | record.sourcePartition() shouldBe Collections.singletonMap(Converter.TopicKey, sourceTopic)
41 | record.sourceOffset() shouldBe null
42 | }
43 |
44 | "pass single message with key through as json" in {
45 | val car = Car("LaFerrari", "Ferrari", 2015, 963, 0.0001)
46 | val json = JacksonJson.toJson(car)
47 | val converter = new JsonPassThroughConverter
48 | val keys = List("name", "manufacturer")
49 | val record = converter.convert(topic, sourceTopic, "100", json.getBytes, keys)
50 | record.keySchema() shouldBe null
51 | record.key() shouldBe "LaFerrari.Ferrari"
52 |
53 | record.valueSchema() shouldBe null
54 |
55 | record.value() shouldBe json
56 | record.sourcePartition() shouldBe Collections.singletonMap(Converter.TopicKey, sourceTopic)
57 | record.sourceOffset() shouldBe null
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/converters/source/JsonSimpleConverterTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.converters.source
18 |
19 | import java.util.Collections
20 |
21 | import com.datamountaineer.streamreactor.connect.converters.MsgKey
22 | import com.sksamuel.avro4s.{RecordFormat, SchemaFor}
23 | import io.confluent.connect.avro.AvroData
24 | import org.apache.avro.Schema
25 | import org.scalatest.matchers.should.Matchers
26 | import org.scalatest.wordspec.AnyWordSpec
27 |
28 | class JsonSimpleConverterTest extends AnyWordSpec with Matchers {
29 | val topic = "the_real_topic"
30 | val sourceTopic = "source_topic"
31 | val avroData = new AvroData(4)
32 |
33 | "JsonSimpleConverter" should {
34 | "convert from json to the struct" in {
35 | val car = Car("LaFerrari", "Ferrari", 2015, 963, 0.0001)
36 | val json = JacksonJson.toJson(car)
37 | val converter = new JsonSimpleConverter
38 | val record = converter.convert(topic, sourceTopic, "100", json.getBytes)
39 | record.keySchema() shouldBe MsgKey.schema
40 | record.key() shouldBe MsgKey.getStruct(sourceTopic, "100")
41 |
42 | val schema = new Schema.Parser().parse(
43 | SchemaFor[Car]().toString
44 | .replace("\"name\":\"Car\"", s"""\"name\":\"$sourceTopic\"""")
45 | .replace(s"""\"namespace\":\"${getClass.getCanonicalName.dropRight(getClass.getSimpleName.length+1)}\",""", "")
46 | )
47 | val format = RecordFormat[Car]
48 | val avro = format.to(car)
49 |
50 | record.valueSchema() shouldBe avroData.toConnectSchema(schema)
51 |
52 | record.value() shouldBe avroData.toConnectData(schema, avro).value()
53 | record.sourcePartition() shouldBe Collections.singletonMap(Converter.TopicKey, sourceTopic)
54 | record.sourceOffset() shouldBe null
55 | }
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/errors/TestErrorHandlerNoop.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.errors
18 |
19 | import com.datamountaineer.streamreactor.connect.TestUtilsBase
20 | import org.apache.kafka.connect.errors.RetriableException
21 |
22 | import scala.util.Failure
23 |
24 | /**
25 | * Created by andrew@datamountaineer.com on 24/08/2017.
26 | * kafka-connect-common
27 | */
28 | class TestErrorHandlerNoop extends TestUtilsBase with ErrorHandler {
29 |
30 | initialize(10, ErrorPolicy(ErrorPolicyEnum.NOOP))
31 |
32 | "should continue with noop" in {
33 |
34 | {
35 | try {
36 | 1 / 0
37 | } catch {
38 | case t: Throwable => {
39 | handleTry(Failure(t))
40 | }
41 | }
42 | }
43 |
44 | getErrorTrackerRetries shouldBe 9
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/errors/TestErrorHandlerRetry.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.errors
18 |
19 | import com.datamountaineer.streamreactor.connect.TestUtilsBase
20 | import org.apache.kafka.connect.errors.RetriableException
21 |
22 | import scala.util.{Failure, Try}
23 |
24 | /**
25 | * Created by andrew@datamountaineer.com on 24/08/2017.
26 | * kafka-connect-common
27 | */
28 | class TestErrorHandlerRetry extends TestUtilsBase with ErrorHandler {
29 |
30 | initialize(10, ErrorPolicy(ErrorPolicyEnum.RETRY))
31 |
32 | "should reduce number of retries" in {
33 |
34 | intercept[RetriableException] {
35 | try {
36 | 1 / 0
37 | } catch {
38 | case t: Throwable => {
39 | handleTry(Failure(t))
40 | }
41 | }
42 | }
43 |
44 | getErrorTrackerRetries shouldBe 9
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/errors/TestErrorHandlerThrow.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.errors
18 |
19 | import com.datamountaineer.streamreactor.connect.TestUtilsBase
20 | import org.apache.kafka.connect.errors.RetriableException
21 |
22 | import scala.util.Failure
23 |
24 | /**
25 | * Created by andrew@datamountaineer.com on 24/08/2017.
26 | * kafka-connect-common
27 | */
28 | class TestErrorHandlerThrow extends TestUtilsBase with ErrorHandler {
29 |
30 | initialize(10, ErrorPolicy(ErrorPolicyEnum.THROW))
31 |
32 | "should throw" in {
33 |
34 | intercept[RuntimeException] {
35 | try {
36 | 1 / 0
37 | } catch {
38 | case t: Throwable => {
39 | handleTry(Failure(t))
40 | }
41 | }
42 | }
43 | }
44 |
45 | }
46 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/offsets/TestOffsetHandler.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.offsets
18 |
19 | import com.datamountaineer.streamreactor.connect.TestUtilsBase
20 | import org.mockito.MockitoSugar
21 | import org.scalatest.matchers.should.Matchers
22 | import org.scalatest.wordspec.AnyWordSpec
23 |
24 | import scala.collection.JavaConverters._
25 |
26 | /**
27 | * Created by andrew@datamountaineer.com on 28/04/16.
28 | * kafka-connect-common
29 | */
30 | /**
31 | * Created by andrew@datamountaineer.com on 27/04/16.
32 | * stream-reactor
33 | */
34 | class TestOffsetHandler extends AnyWordSpec with Matchers with MockitoSugar with TestUtilsBase {
35 | "should return an offset" in {
36 | val lookupPartitionKey = "test_lk_key"
37 | val offsetValue = "2013-01-01 00:05+0000"
38 | val offsetColumn = "my_timeuuid_col"
39 | val table = "testTable"
40 | val taskContext = getSourceTaskContext(lookupPartitionKey, offsetValue,offsetColumn, table)
41 |
42 | //check we can read it back
43 | val tables = List(table)
44 | val offsetsRecovered = OffsetHandler.recoverOffsets(lookupPartitionKey, tables.asJava, taskContext)
45 | val offsetRecovered = OffsetHandler.recoverOffset[String](offsetsRecovered, lookupPartitionKey, table, offsetColumn)
46 | offsetRecovered.get shouldBe (offsetValue)
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/schemas/StructFieldExtractorTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.schemas
18 |
19 | import org.apache.kafka.connect.data.{Date, Schema, SchemaBuilder, Struct}
20 | import org.scalatest.matchers.should.Matchers
21 | import org.scalatest.wordspec.AnyWordSpec
22 |
23 | class StructFieldExtractorTest extends AnyWordSpec with Matchers {
24 | "StructFieldExtractor" should {
25 | "return all the fields and their bytes value" in {
26 | val schema = SchemaBuilder.struct().name("com.example.Person")
27 | .field("firstName", Schema.STRING_SCHEMA)
28 | .field("lastName", Schema.STRING_SCHEMA)
29 | .field("age", Schema.INT32_SCHEMA)
30 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
31 |
32 | val struct = new Struct(schema)
33 | .put("firstName", "Alex")
34 | .put("lastName", "Smith")
35 | .put("age", 30)
36 |
37 | val map = new StructFieldsExtractor(true, Map.empty).get(struct).toMap
38 |
39 | map.get("firstName").get shouldBe "Alex"
40 | map.get("lastName").get shouldBe "Smith"
41 | map.get("age").get shouldBe 30
42 | }
43 |
44 | "return all fields and apply the mapping" in {
45 | val schema = SchemaBuilder.struct().name("com.example.Person")
46 | .field("firstName", Schema.STRING_SCHEMA)
47 | .field("lastName", Schema.STRING_SCHEMA)
48 | .field("age", Schema.INT32_SCHEMA)
49 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
50 |
51 | val struct = new Struct(schema)
52 | .put("firstName", "Alex")
53 | .put("lastName", "Smith")
54 | .put("age", 30)
55 |
56 | val map = new StructFieldsExtractor(true, Map("lastName" -> "Name", "age" -> "a")).get(struct).toMap
57 |
58 | map.get("firstName").get shouldBe "Alex"
59 | map.get("Name").get shouldBe "Smith"
60 | map.get("a").get shouldBe 30
61 |
62 | }
63 |
64 | "return only the specified fields" in {
65 | val schema = SchemaBuilder.struct().name("com.example.Person")
66 | .field("firstName", Schema.STRING_SCHEMA)
67 | .field("lastName", Schema.STRING_SCHEMA)
68 | .field("age", Schema.INT32_SCHEMA)
69 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
70 |
71 | val struct = new Struct(schema)
72 | .put("firstName", "Alex")
73 | .put("lastName", "Smith")
74 | .put("age", 30)
75 |
76 | val map = new StructFieldsExtractor(false, Map("lastName" -> "Name", "age" -> "age")).get(struct).toMap
77 |
78 | map.get("Name").get shouldBe "Smith"
79 | map.get("age").get shouldBe 30
80 |
81 | map.size shouldBe 2
82 | }
83 | }
84 |
85 | "handle Date fieldds" in {
86 | val dateSchema = Date.builder().build()
87 | val schema = SchemaBuilder.struct().name("com.example.Person")
88 | .field("firstName", Schema.STRING_SCHEMA)
89 | .field("lastName", Schema.STRING_SCHEMA)
90 | .field("age", Schema.INT32_SCHEMA)
91 | .field("date", dateSchema).build()
92 |
93 | val date = java.sql.Date.valueOf("2017-04-25")
94 | val struct = new Struct(schema)
95 | .put("firstName", "Alex")
96 | .put("lastName", "Smith")
97 | .put("age", 30)
98 | .put("date", date)
99 |
100 | val map1 = new StructFieldsExtractor(false, Map("date" -> "date")).get(struct).toMap
101 | map1.get("date").get shouldBe date
102 | map1.size shouldBe 1
103 |
104 | val d = Date.toLogical(dateSchema, 10000)
105 | struct.put("date", d)
106 |
107 | val map2 = new StructFieldsExtractor(false, Map("date" -> "date")).get(struct).toMap
108 | map2.get("date").get shouldBe d
109 | map2.size shouldBe 1
110 |
111 | }
112 |
113 | }
114 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/serialization/AvroSerializerTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.serialization
18 |
19 | import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
20 |
21 | import org.scalatest.matchers.should.Matchers
22 | import org.scalatest.wordspec.AnyWordSpec
23 |
24 |
25 | class AvroSerializerTest extends AnyWordSpec with Matchers {
26 | "AvroSerializer" should {
27 | "read and write from and to Avro" in {
28 | val book = Book("On Intelligence", Author("Jeff", "Hawkins", 1957), "0805078533", 273, 14.72)
29 |
30 | implicit val os = new ByteArrayOutputStream()
31 | AvroSerializer.write(book)
32 |
33 | implicit val is = new ByteArrayInputStream(os.toByteArray)
34 |
35 | val actualBook = AvroSerializer.read[Book](is)
36 |
37 | actualBook shouldBe book
38 | os.toByteArray shouldBe AvroSerializer.getBytes(book)
39 | }
40 | }
41 |
42 |
43 | case class Author(firstName: String, lastName: String, yearBorn: Int)
44 |
45 | case class Book(title: String, autor: Author, isbn: String, pages: Int, price: Double)
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/sink/StringGenericRowKeyBuilderTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.sink
18 |
19 | import com.datamountaineer.streamreactor.connect.rowkeys.StringGenericRowKeyBuilder
20 | import org.apache.kafka.connect.data.Schema
21 | import org.apache.kafka.connect.sink.SinkRecord
22 | import org.scalatest.matchers.should.Matchers
23 | import org.scalatest.wordspec.AnyWordSpec
24 |
25 |
26 | class StringGenericRowKeyBuilderTest extends AnyWordSpec with Matchers {
27 | "StringGenericRowKeyBuilder" should {
28 | "use the topic, partition and offset to make the key" in {
29 |
30 | val topic = "sometopic"
31 | val partition = 2
32 | val offset = 1243L
33 | val sinkRecord = new SinkRecord(topic, partition, Schema.INT32_SCHEMA, 345, Schema.STRING_SCHEMA, "", offset)
34 |
35 | val keyBuilder = new StringGenericRowKeyBuilder()
36 | val expected = Seq(topic, partition, offset).mkString("|")
37 | keyBuilder.build(sinkRecord) shouldBe expected
38 | }
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/sink/StringSinkRecordKeyBuilderTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.sink
18 |
19 | import com.datamountaineer.streamreactor.connect.rowkeys.StringSinkRecordKeyBuilder
20 | import org.apache.kafka.connect.data.Schema
21 | import org.apache.kafka.connect.sink.SinkRecord
22 | import org.scalatest.matchers.should.Matchers
23 | import org.scalatest.wordspec.AnyWordSpec
24 |
25 |
26 | class StringSinkRecordKeyBuilderTest extends AnyWordSpec with Matchers {
27 | val keyRowKeyBuilder = new StringSinkRecordKeyBuilder()
28 |
29 | "SinkRecordKeyStringKeyBuilder" should {
30 |
31 | "create the right key from the Schema key value - Byte" in {
32 | val b = 123.toByte
33 | val sinkRecord = new SinkRecord("", 1, Schema.INT8_SCHEMA, b, Schema.FLOAT64_SCHEMA, Nil, 0)
34 |
35 | keyRowKeyBuilder.build(sinkRecord) shouldBe "123"
36 |
37 | }
38 | "create the right key from the Schema key value - String" in {
39 | val s = "somekey"
40 | val sinkRecord = new SinkRecord("", 1, Schema.STRING_SCHEMA, s, Schema.FLOAT64_SCHEMA, Nil, 0)
41 |
42 | keyRowKeyBuilder.build(sinkRecord) shouldBe s
43 | }
44 |
45 | "create the right key from the Schema key value - Bytes" in {
46 | val bArray = Array(23.toByte, 24.toByte, 242.toByte)
47 | val sinkRecord = new SinkRecord("", 1, Schema.BYTES_SCHEMA, bArray, Schema.FLOAT64_SCHEMA, Nil, 0)
48 | keyRowKeyBuilder.build(sinkRecord) shouldBe bArray.toString
49 | }
50 | "create the right key from the Schema key value - Boolean" in {
51 | val bool = true
52 | val sinkRecord = new SinkRecord("", 1, Schema.BOOLEAN_SCHEMA, bool, Schema.FLOAT64_SCHEMA, Nil, 0)
53 |
54 | keyRowKeyBuilder.build(sinkRecord) shouldBe "true"
55 |
56 | }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/src/test/scala/com/datamountaineer/streamreactor/connect/sink/StringStructFieldsStringKeyBuilderTest.scala:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2017 Datamountaineer.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.datamountaineer.streamreactor.connect.sink
18 |
19 | import com.datamountaineer.streamreactor.connect.rowkeys.StringStructFieldsStringKeyBuilder
20 | import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
21 | import org.apache.kafka.connect.sink.SinkRecord
22 | import org.scalatest.matchers.should.Matchers
23 | import org.scalatest.wordspec.AnyWordSpec
24 |
25 |
26 | class StringStructFieldsStringKeyBuilderTest extends AnyWordSpec with Matchers {
27 | "StructFieldsStringKeyBuilder" should {
28 | "raise an exception if the field is not present in the struct" in {
29 | intercept[IllegalArgumentException] {
30 | val schema = SchemaBuilder.struct().name("com.example.Person")
31 | .field("firstName", Schema.STRING_SCHEMA)
32 | .field("age", Schema.INT32_SCHEMA)
33 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
34 |
35 | val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)
36 |
37 | val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
38 | StringStructFieldsStringKeyBuilder(Seq("threshold")).build(sinkRecord)
39 | }
40 | }
41 |
42 | "create the row key based on one single field in the struct" in {
43 | val schema = SchemaBuilder.struct().name("com.example.Person")
44 | .field("firstName", Schema.STRING_SCHEMA)
45 | .field("age", Schema.INT32_SCHEMA)
46 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
47 |
48 | val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)
49 |
50 | val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
51 | StringStructFieldsStringKeyBuilder(Seq("firstName")).build(sinkRecord) shouldBe "Alex"
52 | }
53 |
54 | "create the row key based on one single field with doc in the struct" in {
55 | val firstNameSchema = SchemaBuilder.`type`(Schema.Type.STRING).doc("first name")
56 | val schema = SchemaBuilder.struct().name("com.example.Person")
57 | .field("firstName", firstNameSchema)
58 | .field("age", Schema.INT32_SCHEMA)
59 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
60 |
61 | val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)
62 |
63 | val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
64 | StringStructFieldsStringKeyBuilder(Seq("firstName")).build(sinkRecord) shouldBe "Alex"
65 | }
66 |
67 | "create the row key based on more thant one field in the struct" in {
68 | val schema = SchemaBuilder.struct().name("com.example.Person")
69 | .field("firstName", Schema.STRING_SCHEMA)
70 | .field("age", Schema.INT32_SCHEMA)
71 | .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA).build()
72 |
73 | val struct = new Struct(schema).put("firstName", "Alex").put("age", 30)
74 |
75 | val sinkRecord = new SinkRecord("sometopic", 1, null, null, schema, struct, 1)
76 | StringStructFieldsStringKeyBuilder(Seq("firstName", "age")).build(sinkRecord) shouldBe "Alex.30"
77 | }
78 | }
79 | }
--------------------------------------------------------------------------------