├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── bin ├── activator └── activator.bat ├── build.sbt ├── configuration ├── clickhouse-sink.properties └── connect-standalone.properties ├── libexec └── activator-launch-1.3.12.jar ├── project ├── build.properties └── plugins.sbt └── src ├── main └── java │ └── com │ └── grabds │ └── kafka │ └── connect │ ├── ClickhouseSink.java │ ├── ClickhouseTask.java │ ├── Constants.java │ ├── clickhouse │ ├── ClickhouseSvcImpl.java │ └── ClickhouseTypeInfo.java │ ├── decoders │ └── JsonDecoder.java │ └── exceptions │ └── MissedFieldException.java └── test └── java └── com └── grabds └── kafka └── connect ├── ClickhouseTest.java └── JsonDecoderTest.java /.gitignore: -------------------------------------------------------------------------------- 1 | /RUNNING_PID 2 | /logs/ 3 | /project/*-shim.sbt 4 | /project/project/ 5 | /project/target/ 6 | /target/ 7 | .idea -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2014 Typesafe, Inc. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | build: 3 | sbt compile 4 | 5 | uber-jar: 6 | sbt assembly 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # clickhouse-kafka-connect 2 | 3 | Ingress data from kafka topic into clickhouse table (JSON format) with the help of kafka connect. 4 | 5 | Schema mapping is based on clickhouse table metadata. 6 | 7 | ## Quickstart 8 | 9 | Run kafka broker 10 | 11 | ```bash 12 | 13 | # run zookeeper 14 | docker run -itd --name zookeeper -p 2181:2181 -m 512m zookeeper 15 | 16 | # run kafka 17 | docker run -itd \ 18 | --name kafka \ 19 | --rm \ 20 | -m 2048m \ 21 | --hostname localhost \ 22 | --link zookeeper:zookeeper \ 23 | -p 9092:9092 \ 24 | --env KAFKA_ADVERTISED_HOST_NAME=127.0.0.1 \ 25 | --env ZOOKEEPER_IP=127.0.0.1 \ 26 | --env ZOOKEEPER_CHROOT=/kafka_v_0_10 \ 27 | --env KAFKA_DEFAULT_REPLICATION_FACTOR=1 \ 28 | dmitryb/kafka-0.10.2.1:1.0 29 | 30 | 31 | # create kafka topic 32 | kafka-topics.sh --create --zookeeper localhost:2181/kafka_v_0_10 --replication-factor 1 --partitions 3 --topic table10-json 33 | 34 | ``` 35 | 36 | Run clickhouse server 37 | 38 | ```bash 39 | 40 | # run clickhouse server 41 | docker run -itd --name clickhouse -p 8123:8123 -p 9000:9000 dmitryb/clickhouse-server:latest 42 | 43 | # open clickhouse client 44 | 45 | docker run -it --rm --net=host yandex/clickhouse-client -h localhost 46 | 47 | # create db & table 48 | CREATE DATABASE IF NOT EXISTS DB01; 49 | 50 | USE DB01; 51 | 52 | CREATE TABLE IF NOT EXISTS DB01.Table10 ON CLUSTER default_cluster 53 | ( 54 | UpdateDate Date, 55 | GeoHash String, 56 | NChecked UInt32, 57 | NBooked Nullable(UInt32) 58 | ) ENGINE = MergeTree(UpdateDate, (GeoHash, UpdateDate), 8192); 59 | 60 | # create cluster table (if using cluster config) 61 | CREATE TABLE IF NOT EXISTS DB01.Table10_c ON CLUSTER default_cluster as DB01.Table10 62 | ENGINE = Distributed(default_cluster, DB01, Table10, cityHash64(GeoHash)); 63 | 64 | ``` 65 | 66 | Run kafka connect 67 | 68 | ```bash 69 | 70 | # download and install kafka connect (or use docker image) 71 | # https://www.confluent.io/download/ 72 | 73 | # run connect 74 | ./bin/connect-standalone ./configuration/connect-standalone.properties ./configuration/clickhouse-sink.properties 75 | 76 | # send data to kafka topic 77 | echo '{"UpdateDate": "2018-04-12", "GeoHash": "geo01", "NChecked": 10, NBooked: 3}' | kafka-console-producer.sh --broker-list localhost:9092 --topic table10-json 78 | 79 | # query clickhouse table 80 | 81 | ``` -------------------------------------------------------------------------------- /bin/activator: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ### ------------------------------- ### 4 | ### Helper methods for BASH scripts ### 5 | ### ------------------------------- ### 6 | 7 | realpath () { 8 | ( 9 | TARGET_FILE="$1" 10 | FIX_CYGPATH="$2" 11 | 12 | cd "$(dirname "$TARGET_FILE")" 13 | TARGET_FILE=$(basename "$TARGET_FILE") 14 | 15 | COUNT=0 16 | while [ -L "$TARGET_FILE" -a $COUNT -lt 100 ] 17 | do 18 | TARGET_FILE=$(readlink "$TARGET_FILE") 19 | cd "$(dirname "$TARGET_FILE")" 20 | TARGET_FILE=$(basename "$TARGET_FILE") 21 | COUNT=$(($COUNT + 1)) 22 | done 23 | 24 | # make sure we grab the actual windows path, instead of cygwin's path. 25 | if [[ "x$FIX_CYGPATH" != "x" ]]; then 26 | echo "$(cygwinpath "$(pwd -P)/$TARGET_FILE")" 27 | else 28 | echo "$(pwd -P)/$TARGET_FILE" 29 | fi 30 | ) 31 | } 32 | 33 | 34 | # Uses uname to detect if we're in the odd cygwin environment. 35 | is_cygwin() { 36 | local os=$(uname -s) 37 | case "$os" in 38 | CYGWIN*) return 0 ;; 39 | *) return 1 ;; 40 | esac 41 | } 42 | 43 | # TODO - Use nicer bash-isms here. 44 | CYGWIN_FLAG=$(if is_cygwin; then echo true; else echo false; fi) 45 | 46 | 47 | # This can fix cygwin style /cygdrive paths so we get the 48 | # windows style paths. 49 | cygwinpath() { 50 | local file="$1" 51 | if [[ "$CYGWIN_FLAG" == "true" ]]; then 52 | echo $(cygpath -w $file) 53 | else 54 | echo $file 55 | fi 56 | } 57 | 58 | # Make something URI friendly 59 | make_url() { 60 | url="$1" 61 | local nospaces=${url// /%20} 62 | if is_cygwin; then 63 | echo "/${nospaces//\\//}" 64 | else 65 | echo "$nospaces" 66 | fi 67 | } 68 | 69 | declare -a residual_args 70 | declare -a java_args 71 | declare -a scalac_args 72 | declare -a sbt_commands 73 | declare java_cmd=java 74 | declare java_version 75 | declare -r real_script_path="$(realpath "$0")" 76 | declare -r sbt_home="$(realpath "$(dirname "$(dirname "$real_script_path")")")" 77 | declare -r sbt_bin_dir="$(dirname "$real_script_path")" 78 | declare -r app_version="1.3.12" 79 | 80 | declare -r script_name=activator 81 | declare -r java_opts=( "${ACTIVATOR_OPTS[@]}" "${SBT_OPTS[@]}" "${JAVA_OPTS[@]}" "${java_opts[@]}" ) 82 | userhome="$HOME" 83 | if is_cygwin; then 84 | # cygwin sets home to something f-d up, set to real windows homedir 85 | userhome="$USERPROFILE" 86 | fi 87 | declare -r activator_user_home_dir="${userhome}/.activator" 88 | declare -r java_opts_config_home="${activator_user_home_dir}/activatorconfig.txt" 89 | declare -r java_opts_config_version="${activator_user_home_dir}/${app_version}/activatorconfig.txt" 90 | 91 | echoerr () { 92 | echo 1>&2 "$@" 93 | } 94 | vlog () { 95 | [[ $verbose || $debug ]] && echoerr "$@" 96 | } 97 | dlog () { 98 | [[ $debug ]] && echoerr "$@" 99 | } 100 | 101 | jar_file () { 102 | echo "$(cygwinpath "${sbt_home}/libexec/activator-launch-${app_version}.jar")" 103 | } 104 | 105 | acquire_sbt_jar () { 106 | sbt_jar="$(jar_file)" 107 | 108 | if [[ ! -f "$sbt_jar" ]]; then 109 | echoerr "Could not find launcher jar: $sbt_jar" 110 | exit 2 111 | fi 112 | } 113 | 114 | execRunner () { 115 | # print the arguments one to a line, quoting any containing spaces 116 | [[ $verbose || $debug ]] && echo "# Executing command line:" && { 117 | for arg; do 118 | if printf "%s\n" "$arg" | grep -q ' '; then 119 | printf "\"%s\"\n" "$arg" 120 | else 121 | printf "%s\n" "$arg" 122 | fi 123 | done 124 | echo "" 125 | } 126 | 127 | # THis used to be exec, but we loose the ability to re-hook stty then 128 | # for cygwin... Maybe we should flag the feature here... 129 | "$@" 130 | } 131 | 132 | addJava () { 133 | dlog "[addJava] arg = '$1'" 134 | java_args=( "${java_args[@]}" "$1" ) 135 | } 136 | addSbt () { 137 | dlog "[addSbt] arg = '$1'" 138 | sbt_commands=( "${sbt_commands[@]}" "$1" ) 139 | } 140 | addResidual () { 141 | dlog "[residual] arg = '$1'" 142 | residual_args=( "${residual_args[@]}" "$1" ) 143 | } 144 | addDebugger () { 145 | addJava "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=$1" 146 | } 147 | addConfigOpts () { 148 | dlog "[addConfigOpts] arg = '$*'" 149 | for item in $* 150 | do 151 | addJava "$item" 152 | done 153 | } 154 | 155 | get_mem_opts () { 156 | # if we detect any of these settings in ${JAVA_OPTS} we need to NOT output our settings. 157 | # The reason is the Xms/Xmx, if they don't line up, cause errors. 158 | if [[ "${JAVA_OPTS}" == *-Xmx* ]] || [[ "${JAVA_OPTS}" == *-Xms* ]] || [[ "${JAVA_OPTS}" == *-XX:MaxPermSize* ]] || [[ "${JAVA_OPTS}" == *-XX:MaxMetaspaceSize* ]] || [[ "${JAVA_OPTS}" == *-XX:ReservedCodeCacheSize* ]]; then 159 | echo "" 160 | else 161 | # a ham-fisted attempt to move some memory settings in concert 162 | # so they need not be messed around with individually. 163 | local mem=${1:-1024} 164 | local codecache=$(( $mem / 8 )) 165 | (( $codecache > 128 )) || codecache=128 166 | (( $codecache < 512 )) || codecache=512 167 | local class_metadata_size=$(( $codecache * 2 )) 168 | local class_metadata_opt=$([[ "$java_version" < "1.8" ]] && echo "MaxPermSize" || echo "MaxMetaspaceSize") 169 | 170 | echo "-Xms${mem}m -Xmx${mem}m -XX:ReservedCodeCacheSize=${codecache}m -XX:${class_metadata_opt}=${class_metadata_size}m" 171 | fi 172 | } 173 | 174 | require_arg () { 175 | local type="$1" 176 | local opt="$2" 177 | local arg="$3" 178 | if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then 179 | echo "$opt requires <$type> argument" 180 | exit 1 181 | fi 182 | } 183 | 184 | is_function_defined() { 185 | declare -f "$1" > /dev/null 186 | } 187 | 188 | # If we're *not* running in a terminal, and we don't have any arguments, then we need to add the 'ui' parameter 189 | detect_terminal_for_ui() { 190 | [[ ! -t 0 ]] && [[ "${#residual_args}" == "0" ]] && { 191 | addResidual "ui" 192 | } 193 | # SPECIAL TEST FOR MAC 194 | [[ "$(uname)" == "Darwin" ]] && [[ "$HOME" == "$PWD" ]] && [[ "${#residual_args}" == "0" ]] && { 195 | echo "Detected MAC OSX launched script...." 196 | echo "Swapping to UI" 197 | addResidual "ui" 198 | } 199 | } 200 | 201 | process_args () { 202 | while [[ $# -gt 0 ]]; do 203 | case "$1" in 204 | -h|-help) usage; exit 1 ;; 205 | -v|-verbose) verbose=1 && shift ;; 206 | -d|-debug) debug=1 && shift ;; 207 | 208 | -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; 209 | -mem) require_arg integer "$1" "$2" && sbt_mem="$2" && shift 2 ;; 210 | -jvm-debug) require_arg port "$1" "$2" && addDebugger $2 && shift 2 ;; 211 | -batch) exec &1 | awk -F '"' '/version/ {print $2}') 230 | vlog "[process_args] java_version = '$java_version'" 231 | } 232 | 233 | # Detect that we have java installed. 234 | checkJava() { 235 | local required_version="$1" 236 | # Now check to see if it's a good enough version 237 | if [[ "$java_version" == "" ]]; then 238 | echo 239 | echo No java installations was detected. 240 | echo Please go to http://www.java.com/getjava/ and download 241 | echo 242 | exit 1 243 | elif [[ ! "$java_version" > "$required_version" ]]; then 244 | echo 245 | echo The java installation you have is not up to date 246 | echo $script_name requires at least version $required_version+, you have 247 | echo version $java_version 248 | echo 249 | echo Please go to http://www.java.com/getjava/ and download 250 | echo a valid Java Runtime and install before running $script_name. 251 | echo 252 | exit 1 253 | fi 254 | } 255 | 256 | 257 | run() { 258 | # no jar? download it. 259 | [[ -f "$sbt_jar" ]] || acquire_sbt_jar "$sbt_version" || { 260 | # still no jar? uh-oh. 261 | echo "Download failed. Obtain the sbt-launch.jar manually and place it at $sbt_jar" 262 | exit 1 263 | } 264 | 265 | # process the combined args, then reset "$@" to the residuals 266 | process_args "$@" 267 | detect_terminal_for_ui 268 | set -- "${residual_args[@]}" 269 | argumentCount=$# 270 | 271 | # TODO - java check should be configurable... 272 | checkJava "1.6" 273 | 274 | #If we're in cygwin, we should use the windows config, and terminal hacks 275 | if [[ "$CYGWIN_FLAG" == "true" ]]; then 276 | stty -icanon min 1 -echo > /dev/null 2>&1 277 | addJava "-Djline.terminal=jline.UnixTerminal" 278 | addJava "-Dsbt.cygwin=true" 279 | fi 280 | 281 | # run sbt 282 | execRunner "$java_cmd" \ 283 | "-Dactivator.home=$(make_url "$sbt_home")" \ 284 | ${SBT_OPTS:-$default_sbt_opts} \ 285 | $(get_mem_opts $sbt_mem) \ 286 | ${JAVA_OPTS} \ 287 | ${java_args[@]} \ 288 | -jar "$sbt_jar" \ 289 | "${sbt_commands[@]}" \ 290 | "${residual_args[@]}" 291 | 292 | exit_code=$? 293 | 294 | # Clean up the terminal from cygwin hacks. 295 | if [[ "$CYGWIN_FLAG" == "true" ]]; then 296 | stty icanon echo > /dev/null 2>&1 297 | fi 298 | exit $exit_code 299 | } 300 | 301 | 302 | declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" 303 | declare -r sbt_opts_file=".sbtopts" 304 | declare -r etc_sbt_opts_file="${sbt_home}/conf/sbtopts" 305 | declare -r win_sbt_opts_file="${sbt_home}/conf/sbtconfig.txt" 306 | 307 | usage() { 308 | cat < path to global settings/plugins directory (default: ~/.sbt) 323 | -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11 series) 324 | -ivy path to local Ivy repository (default: ~/.ivy2) 325 | -mem set memory options (default: $sbt_mem, which is $(get_mem_opts $sbt_mem)) 326 | -no-share use all local caches; no sharing 327 | -no-global uses global caches, but does not use global ~/.sbt directory. 328 | -jvm-debug Turn on JVM debugging, open at the given port. 329 | -batch Disable interactive mode 330 | 331 | # sbt version (default: from project/build.properties if present, else latest release) 332 | -sbt-version use the specified version of sbt 333 | -sbt-jar use the specified jar as the sbt launcher 334 | -sbt-rc use an RC version of sbt 335 | -sbt-snapshot use a snapshot version of sbt 336 | 337 | # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) 338 | -java-home alternate JAVA_HOME 339 | 340 | # jvm options and output control 341 | JAVA_OPTS environment variable, if unset uses "$java_opts" 342 | SBT_OPTS environment variable, if unset uses "$default_sbt_opts" 343 | ACTIVATOR_OPTS Environment variable, if unset uses "" 344 | .sbtopts if this file exists in the current directory, it is 345 | prepended to the runner args 346 | /etc/sbt/sbtopts if this file exists, it is prepended to the runner args 347 | -Dkey=val pass -Dkey=val directly to the java runtime 348 | -J-X pass option -X directly to the java runtime 349 | (-J is stripped) 350 | -S-X add -X to sbt's scalacOptions (-S is stripped) 351 | 352 | In the case of duplicated or conflicting options, the order above 353 | shows precedence: JAVA_OPTS lowest, command line options highest. 354 | EOM 355 | } 356 | 357 | 358 | 359 | process_my_args () { 360 | while [[ $# -gt 0 ]]; do 361 | case "$1" in 362 | -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; 363 | -no-share) addJava "$noshare_opts" && shift ;; 364 | -no-global) addJava "-Dsbt.global.base=$(pwd)/project/.sbtboot" && shift ;; 365 | -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; 366 | -sbt-dir) require_arg path "$1" "$2" && addJava "-Dsbt.global.base=$2" && shift 2 ;; 367 | -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; 368 | -batch) exec ^&1') do ( 109 | if %%~j==java set JAVAINSTALLED=1 110 | if %%~j==openjdk set JAVAINSTALLED=1 111 | ) 112 | 113 | rem Detect the same thing about javac 114 | if "%_JAVACCMD%"=="" ( 115 | if not "%JAVA_HOME%"=="" ( 116 | if exist "%JAVA_HOME%\bin\javac.exe" set "_JAVACCMD=%JAVA_HOME%\bin\javac.exe" 117 | ) 118 | ) 119 | if "%_JAVACCMD%"=="" set _JAVACCMD=javac 120 | for /F %%j in ('"%_JAVACCMD%" -version 2^>^&1') do ( 121 | if %%~j==javac set JAVACINSTALLED=1 122 | ) 123 | 124 | rem BAT has no logical or, so we do it OLD SCHOOL! Oppan Redmond Style 125 | set JAVAOK=true 126 | if not defined JAVAINSTALLED set JAVAOK=false 127 | if not defined JAVACINSTALLED set JAVAOK=false 128 | 129 | if "%JAVAOK%"=="false" ( 130 | echo. 131 | echo A Java JDK is not installed or can't be found. 132 | if not "%JAVA_HOME%"=="" ( 133 | echo JAVA_HOME = "%JAVA_HOME%" 134 | ) 135 | echo. 136 | echo Please go to 137 | echo http://www.oracle.com/technetwork/java/javase/downloads/index.html 138 | echo and download a valid Java JDK and install before running Activator. 139 | echo. 140 | echo If you think this message is in error, please check 141 | echo your environment variables to see if "java.exe" and "javac.exe" are 142 | echo available via JAVA_HOME or PATH. 143 | echo. 144 | if defined DOUBLECLICKED pause 145 | exit /B 1 146 | ) 147 | 148 | rem Check what Java version is being used to determine what memory options to use 149 | for /f "tokens=3" %%g in ('java -version 2^>^&1 ^| findstr /i "version"') do ( 150 | set JAVA_VERSION=%%g 151 | ) 152 | 153 | rem Strips away the " characters 154 | set JAVA_VERSION=%JAVA_VERSION:"=% 155 | 156 | rem TODO Check if there are existing mem settings in JAVA_OPTS/CFG_OPTS and use those instead of the below 157 | for /f "delims=. tokens=1-3" %%v in ("%JAVA_VERSION%") do ( 158 | set MAJOR=%%v 159 | set MINOR=%%w 160 | set BUILD=%%x 161 | 162 | set META_SIZE=-XX:MetaspaceSize=64M -XX:MaxMetaspaceSize=256M 163 | if "!MINOR!" LSS "8" ( 164 | set META_SIZE=-XX:PermSize=64M -XX:MaxPermSize=256M 165 | ) 166 | 167 | set MEM_OPTS=!META_SIZE! 168 | ) 169 | 170 | rem We use the value of the JAVA_OPTS environment variable if defined, rather than the config. 171 | set _JAVA_OPTS=%JAVA_OPTS% 172 | if "%_JAVA_OPTS%"=="" set _JAVA_OPTS=%CFG_OPTS% 173 | 174 | set DEBUG_OPTS= 175 | 176 | rem Loop through the arguments, building remaining args in args variable 177 | set args= 178 | :argsloop 179 | if not "%~1"=="" ( 180 | rem Checks if the argument contains "-D" and if true, adds argument 1 with 2 and puts an equal sign between them. 181 | rem This is done since batch considers "=" to be a delimiter so we need to circumvent this behavior with a small hack. 182 | set arg1=%~1 183 | if "!arg1:~0,2!"=="-D" ( 184 | set "args=%args% "%~1"="%~2"" 185 | shift 186 | shift 187 | goto argsloop 188 | ) 189 | 190 | if "%~1"=="-jvm-debug" ( 191 | if not "%~2"=="" ( 192 | rem This piece of magic somehow checks that an argument is a number 193 | for /F "delims=0123456789" %%i in ("%~2") do ( 194 | set var="%%i" 195 | ) 196 | if defined var ( 197 | rem Not a number, assume no argument given and default to 9999 198 | set JPDA_PORT=9999 199 | ) else ( 200 | rem Port was given, shift arguments 201 | set JPDA_PORT=%~2 202 | shift 203 | ) 204 | ) else ( 205 | set JPDA_PORT=9999 206 | ) 207 | shift 208 | 209 | set DEBUG_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=!JPDA_PORT! 210 | goto argsloop 211 | ) 212 | rem else 213 | set "args=%args% "%~1"" 214 | shift 215 | goto argsloop 216 | ) 217 | 218 | :run 219 | 220 | if "!args!"=="" ( 221 | if defined DOUBLECLICKED ( 222 | set CMDS="ui" 223 | ) else set CMDS=!args! 224 | ) else set CMDS=!args! 225 | 226 | rem We add a / in front, so we get file:///C: instead of file://C: 227 | rem Java considers the later a UNC path. 228 | rem We also attempt a solid effort at making it URI friendly. 229 | rem We don't even bother with UNC paths. 230 | set JAVA_FRIENDLY_HOME_1=/!ACTIVATOR_HOME:\=/! 231 | set JAVA_FRIENDLY_HOME=/!JAVA_FRIENDLY_HOME_1: =%%20! 232 | 233 | rem Checks if the command contains spaces to know if it should be wrapped in quotes or not 234 | set NON_SPACED_CMD=%_JAVACMD: =% 235 | if "%_JAVACMD%"=="%NON_SPACED_CMD%" %_JAVACMD% %DEBUG_OPTS% %MEM_OPTS% %ACTIVATOR_OPTS% %SBT_OPTS% %_JAVA_OPTS% "-Dactivator.home=%JAVA_FRIENDLY_HOME%" -jar "%ACTIVATOR_HOME%\libexec\%ACTIVATOR_LAUNCH_JAR%" %CMDS% 236 | if NOT "%_JAVACMD%"=="%NON_SPACED_CMD%" "%_JAVACMD%" %DEBUG_OPTS% %MEM_OPTS% %ACTIVATOR_OPTS% %SBT_OPTS% %_JAVA_OPTS% "-Dactivator.home=%JAVA_FRIENDLY_HOME%" -jar "%ACTIVATOR_HOME%\libexec\%ACTIVATOR_LAUNCH_JAR%" %CMDS% 237 | 238 | if ERRORLEVEL 1 goto error 239 | goto end 240 | 241 | :error 242 | set ERROR_CODE=1 243 | 244 | :end 245 | 246 | @endlocal 247 | 248 | exit /B %ERROR_CODE% 249 | -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | name := """clickhouse-kafka-connect""" 2 | 3 | version := "1.0" 4 | 5 | scalaVersion := "2.11.6" 6 | 7 | libraryDependencies ++= Seq( 8 | "org.apache.kafka" % "connect-api" % "0.10.0.0" % "provided", 9 | "com.google.code.gson" % "gson" % "2.8.0", 10 | 11 | "ru.yandex.clickhouse" % "clickhouse-jdbc" % "0.1.39", 12 | 13 | "org.apache.logging.log4j" % "log4j-api" % "2.8.2", 14 | "org.apache.logging.log4j" % "log4j-core" % "2.8.2", 15 | 16 | "junit" % "junit" % "4.12" % "test", 17 | "com.novocode" % "junit-interface" % "0.11" % "test" 18 | ) 19 | 20 | // autopack plagin 21 | packAutoSettings 22 | 23 | // uber jar plugin 24 | 25 | // skip tests during assembly 26 | test in assembly := {} 27 | 28 | //merge strategy 29 | assemblyMergeStrategy in assembly := { 30 | case PathList("META-INF", xs @ _*) => MergeStrategy.discard 31 | case x => MergeStrategy.first 32 | } -------------------------------------------------------------------------------- /configuration/clickhouse-sink.properties: -------------------------------------------------------------------------------- 1 | # clickhouse-sink.properties config file 2 | 3 | name=clickhouse-sink 4 | connector.class=com.grabds.kafka.connect.ClickhouseSink 5 | tasks.max=1 6 | topics=table10-json 7 | 8 | clickhouse.server=jdbc:clickhouse://internal-af67bdeb33cac11e8a4310671e315a7c-1771144458.ap-southeast-1.elb.amazonaws.com:8123 9 | clickhouse.db=DB01 10 | clickhouse.table=Table10_c -------------------------------------------------------------------------------- /configuration/connect-standalone.properties: -------------------------------------------------------------------------------- 1 | # my-standalone.properties worker config file 2 | 3 | #bootstrap kafka servers 4 | bootstrap.servers=localhost:9092 5 | 6 | # specify input data format 7 | key.converter=org.apache.kafka.connect.storage.StringConverter 8 | value.converter=org.apache.kafka.connect.storage.StringConverter 9 | 10 | # The internal converter used for offsets, most will always want to use the built-in default 11 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter 12 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter 13 | internal.key.converter.schemas.enable=false 14 | internal.value.converter.schemas.enable=false 15 | 16 | # local file storing offsets and config data 17 | offset.storage.file.filename=/tmp/connect.offsets 18 | 19 | # search for plugins 20 | plugin.path=/Users/dmitry.bezyazychnyy/workspace/clickhouse-kafka-connect/target/scala-2.11 -------------------------------------------------------------------------------- /libexec/activator-launch-1.3.12.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DmitryBe/clickhouse-kafka-connect/63bc8afdc14c9f062cae1f9d98b97e05ab132c0b/libexec/activator-launch-1.3.12.jar -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | #Activator-generated Properties 2 | #Wed Apr 11 13:23:14 SGT 2018 3 | template.uuid=9cbaa284-7f8c-4ef2-b48e-fc3ee73a11eb 4 | sbt.version=0.13.8 5 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | addSbtPlugin("org.xerial.sbt" % "sbt-pack" % "0.8.2") 2 | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5") -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/ClickhouseSink.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect; 2 | 3 | import org.apache.kafka.connect.sink.SinkConnector; 4 | import org.apache.kafka.connect.connector.Task; 5 | import org.apache.kafka.common.config.ConfigDef; 6 | import org.apache.kafka.common.config.ConfigDef.Type; 7 | import org.apache.kafka.common.config.ConfigDef.Importance; 8 | 9 | import java.util.List; 10 | import java.util.LinkedList; 11 | import java.util.Map; 12 | import java.util.HashMap; 13 | 14 | public class ClickhouseSink extends SinkConnector{ 15 | 16 | private String clickhouseUri; 17 | private String clickhouseDb; 18 | private String clickhouseTable; 19 | 20 | @Override 21 | public String version() { 22 | return Constants.VERSION; 23 | } 24 | 25 | @Override 26 | public void start(Map props) { 27 | clickhouseUri = props.get(Constants.CLICKHOUSE_URI); 28 | clickhouseDb = props.get(Constants.CLICKHOUSE_DB); 29 | clickhouseTable = props.get(Constants.CLICKHOUSE_TABLE); 30 | } 31 | 32 | @Override 33 | public Class taskClass() { 34 | return ClickhouseTask.class; 35 | } 36 | 37 | @Override 38 | public List> taskConfigs(int maxTasks) { 39 | 40 | final List> configs = new LinkedList<>(); 41 | 42 | for (int i = 0; i < maxTasks; i++) { 43 | final Map config = new HashMap<>(); 44 | config.put(Constants.CLICKHOUSE_URI, clickhouseUri); 45 | config.put(Constants.CLICKHOUSE_DB, clickhouseDb); 46 | config.put(Constants.CLICKHOUSE_TABLE, clickhouseTable); 47 | 48 | configs.add(config); 49 | } 50 | 51 | return configs; 52 | 53 | } 54 | 55 | @Override 56 | public void stop() { 57 | 58 | } 59 | 60 | @Override 61 | public ConfigDef config() { 62 | 63 | final ConfigDef configDef = new ConfigDef(); 64 | configDef.define(Constants.CLICKHOUSE_URI, Type.STRING, "jdbc:clickhouse://localhost:8123", Importance.HIGH, "Clickhouse uri (jdbc:clickhouse://:)"); 65 | configDef.define(Constants.CLICKHOUSE_DB, Type.STRING, Importance.HIGH, "Database name"); 66 | configDef.define(Constants.CLICKHOUSE_TABLE, Type.STRING, Importance.HIGH, "Table name"); 67 | 68 | return configDef; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/ClickhouseTask.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect; 2 | 3 | import com.google.gson.*; 4 | import com.grabds.kafka.connect.clickhouse.ClickhouseSvcImpl; 5 | import com.grabds.kafka.connect.decoders.JsonDecoder; 6 | import org.apache.kafka.connect.sink.SinkTask; 7 | import org.apache.kafka.connect.sink.SinkRecord; 8 | import org.apache.kafka.common.TopicPartition; 9 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 10 | import org.apache.logging.log4j.LogManager; 11 | import org.apache.logging.log4j.Logger; 12 | 13 | import java.sql.SQLException; 14 | import java.util.Collection; 15 | import java.util.Map; 16 | import java.util.stream.Collectors; 17 | 18 | public class ClickhouseTask extends SinkTask{ 19 | 20 | private static Logger log = LogManager.getLogger(ClickhouseTask.class); 21 | private JsonDecoder decoder; 22 | private ClickhouseSvcImpl svc; 23 | 24 | @Override 25 | public String version() { 26 | return Constants.VERSION; 27 | } 28 | 29 | @Override 30 | public void start(Map props) { 31 | 32 | log.info("ClickhouseTask is starting"); 33 | 34 | // init json decoder 35 | decoder = new JsonDecoder(); 36 | 37 | // init clickhouse service 38 | try { 39 | String clickhouseUri = props.get(Constants.CLICKHOUSE_URI); 40 | String clickhouseDb = props.get(Constants.CLICKHOUSE_DB); 41 | String clickhouseTable = props.get(Constants.CLICKHOUSE_TABLE); 42 | svc = new ClickhouseSvcImpl(clickhouseUri, clickhouseDb, clickhouseTable); 43 | log.info("clickhouse svc created"); 44 | }catch (SQLException e){ 45 | log.error("error initializing clickhouse service", e); 46 | throw new org.apache.kafka.connect.errors.RetriableException(e); 47 | } 48 | catch (Exception e){ 49 | log.error("error initializing clickhouse service", e); 50 | } 51 | } 52 | 53 | @Override 54 | public void put(Collection records) { 55 | 56 | // expected SinkRecord type is String 57 | 58 | Collection strRecords = records.stream().map(r -> { 59 | return r.value().toString(); 60 | }).collect(Collectors.toList()); 61 | 62 | // str -> jsonObject 63 | Collection jsonRecords = decoder.tryDecode(strRecords); 64 | 65 | Integer processedCount = this.svc.batchProcess(jsonRecords); 66 | log.info(String.format("processed: {}", processedCount)); 67 | } 68 | 69 | @Override 70 | public void flush(Map offsets) {} 71 | 72 | @Override 73 | public void stop() { 74 | log.info("ClickhouseTask is stopping"); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/Constants.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect; 2 | 3 | public final class Constants { 4 | 5 | public static final String VERSION = "0.1"; 6 | public static final String CLICKHOUSE_URI = "clickhouse.server"; 7 | public static final String CLICKHOUSE_DB = "clickhouse.db"; 8 | public static final String CLICKHOUSE_TABLE = "clickhouse.table"; 9 | 10 | public Constants() { 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/clickhouse/ClickhouseSvcImpl.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect.clickhouse; 2 | 3 | import com.google.gson.*; 4 | import org.apache.kafka.connect.errors.RetriableException; 5 | import ru.yandex.clickhouse.ClickHouseDataSource; 6 | import ru.yandex.clickhouse.settings.ClickHouseProperties; 7 | import org.apache.logging.log4j.LogManager; 8 | import org.apache.logging.log4j.Logger; 9 | import scala.Tuple2; 10 | import com.grabds.kafka.connect.exceptions.*; 11 | import java.sql.*; 12 | import java.util.ArrayList; 13 | import java.util.Arrays; 14 | import java.util.Collection; 15 | import java.util.List; 16 | import java.util.regex.Matcher; 17 | import java.util.regex.Pattern; 18 | import java.util.stream.Collectors; 19 | import java.util.stream.IntStream; 20 | 21 | public class ClickhouseSvcImpl { 22 | 23 | private static Pattern clickhouseTypePattern = Pattern.compile("^Nullable\\(([\\d\\w]+)\\)"); 24 | private static Logger log = LogManager.getLogger(ClickhouseSvcImpl.class); 25 | private ClickHouseDataSource dataSource; 26 | private String tableName; 27 | private List schema; 28 | private String insertSql; 29 | 30 | public List getSchema() { 31 | return schema; 32 | } 33 | 34 | public ClickhouseSvcImpl(String serverUri, String dbName, String table) throws SQLException { 35 | 36 | ClickHouseProperties properties = new ClickHouseProperties(); 37 | properties.setDatabase(dbName); 38 | 39 | this.dataSource = new ClickHouseDataSource(serverUri, properties); 40 | this.tableName = table; 41 | 42 | // retrieve schema info 43 | try(Connection connection = this.dataSource.getConnection()) { 44 | this.schema = this.getSchema(connection, table); 45 | this.insertSql = this.generateInsertSql(); 46 | } 47 | 48 | } 49 | 50 | public String generateInsertSql() { 51 | 52 | String fieldsStr = schema.stream() 53 | .map(f -> f.fieldName) 54 | .collect(Collectors.joining(", ")); 55 | 56 | String valsStr = IntStream.range(0, schema.size()).boxed() 57 | .collect(Collectors.toList()) 58 | .stream() 59 | .map(x -> "?") 60 | .collect(Collectors.joining(", ")); 61 | 62 | return String.format("INSERT INTO %s (%s) VALUES (%s)", this.tableName, fieldsStr, valsStr); 63 | } 64 | 65 | public Integer batchProcess(Collection recordsAsJson) { 66 | 67 | try(Connection connection = dataSource.getConnection()) { 68 | 69 | // create batch insert statment 70 | PreparedStatement statement = connection.prepareStatement(this.insertSql); 71 | populateInsertStatement(recordsAsJson, statement); 72 | return Arrays.stream(statement.executeBatch()).sum(); 73 | 74 | } catch (SQLException e){ 75 | // by throwing RetriableException we expect that Task will be restarted 76 | throw new RetriableException("error getting clickhouse connection", e); 77 | } 78 | } 79 | 80 | public List getSchema(Connection connection, String tableName) throws SQLException { 81 | /* 82 | get schema information for provided table 83 | */ 84 | 85 | String sql = String.format("DESCRIBE %s", tableName); 86 | PreparedStatement schemaStmnt = connection.prepareStatement(sql); 87 | ResultSet rs = schemaStmnt.executeQuery(); 88 | 89 | List schema = new ArrayList(); 90 | 91 | Integer counter = 1; 92 | while (rs.next()){ 93 | ClickhouseTypeInfo typeInfo = extractTypeInfo(rs, counter); 94 | schema.add(typeInfo); 95 | counter += 1; 96 | } 97 | 98 | return schema; 99 | } 100 | 101 | private void populateInsertStatement(Collection recordsAsJson, PreparedStatement statement) { 102 | /* 103 | for every row (if pass schema validation) add into statement 104 | */ 105 | 106 | recordsAsJson.stream().forEach(rec -> { 107 | 108 | try{ 109 | 110 | prepInsertStatementForRec(rec).forEach(r -> { 111 | 112 | try{ 113 | statement.setObject(r._1(), r._2()); 114 | }catch (SQLException e) { 115 | log.error("statement setObject error", e); 116 | } 117 | 118 | }); 119 | statement.addBatch(); 120 | 121 | } 122 | catch (MissedFieldException e){ 123 | log.error("row schema validation failed", e); 124 | } 125 | catch (Exception e) { 126 | log.error("general error", e); 127 | } 128 | }); 129 | } 130 | 131 | private Collection> prepInsertStatementForRec(JsonObject rec) throws RuntimeException { 132 | 133 | return schema.stream().map(field -> { 134 | 135 | JsonElement jsonElem = rec.get(field.fieldName); 136 | if(jsonElem == null && !field.isNullable) { 137 | String msg = String.format("rec: '%s' doesn't contain required field: '%s'", rec.toString(), field.fieldName); 138 | throw new MissedFieldException(msg); 139 | } else { 140 | Object val = null; 141 | if(jsonElem != null) 142 | val = jsonElem.getAsString(); 143 | return new Tuple2(field.idx, val); 144 | } 145 | 146 | }).collect(Collectors.toList()); 147 | } 148 | 149 | private ClickhouseTypeInfo extractTypeInfo(ResultSet rs, Integer idx) throws SQLException { 150 | /* 151 | gets clickhouse column info for current row (from rs), 152 | extracts column name and type (str) 153 | validates type for being Nullable 154 | returns ClickhouseTypeInfo(columnd_idx, name, type_name, isNullable) 155 | */ 156 | 157 | String cName = rs.getString("name"); 158 | String cType = rs.getString("type"); 159 | 160 | Matcher m = clickhouseTypePattern.matcher(cType); 161 | if(m.matches()) { 162 | String typeName = m.group(1); 163 | return new ClickhouseTypeInfo(idx ,cName, typeName, true); 164 | } 165 | 166 | return new ClickhouseTypeInfo(idx, cName, cType, false); 167 | } 168 | 169 | private void logSchemaInfo(){ 170 | log.info("schema information"); 171 | this.schema.stream() 172 | .forEach(f -> { 173 | String msg = String.format("%s: %s %s [is nullable: $s]", f.idx, f.fieldName, f.strType, f.isNullable); 174 | log.info(msg); 175 | }); 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/clickhouse/ClickhouseTypeInfo.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect.clickhouse; 2 | 3 | class ClickhouseTypeInfo 4 | { 5 | public Integer idx; 6 | public String fieldName; 7 | public String strType; 8 | public Boolean isNullable; 9 | 10 | public ClickhouseTypeInfo(Integer idx, String fieldName, String strType, Boolean isNullable) { 11 | this.idx = idx; 12 | this.fieldName = fieldName; 13 | this.strType = strType; 14 | this.isNullable = isNullable; 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/decoders/JsonDecoder.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect.decoders; 2 | 3 | import com.google.gson.*; 4 | import com.grabds.kafka.connect.clickhouse.ClickhouseSvcImpl; 5 | import org.apache.logging.log4j.LogManager; 6 | import org.apache.logging.log4j.Logger; 7 | import java.util.Collection; 8 | import java.util.Optional; 9 | import java.util.stream.Collectors; 10 | 11 | public class JsonDecoder { 12 | 13 | private static Logger log = LogManager.getLogger(JsonDecoder.class); 14 | private Gson gson; 15 | 16 | public JsonDecoder() { 17 | 18 | gson = new GsonBuilder() 19 | .setFieldNamingPolicy(FieldNamingPolicy.UPPER_CAMEL_CASE) 20 | .setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS") 21 | .create(); 22 | } 23 | 24 | public JsonObject decode(String str){ 25 | return gson.fromJson(str, JsonObject.class); 26 | } 27 | 28 | public Collection tryDecode(Collection strList){ 29 | /* 30 | return collection of successfully decoded records 31 | */ 32 | 33 | Collection jsonRecords = strList.stream() 34 | .map(r -> { 35 | 36 | Optional oJson; 37 | try{ 38 | oJson = Optional.of(this.decode(r)); 39 | }catch (JsonSyntaxException e){ 40 | String msg = String.format("incorrect json format for record: '%s'", r); 41 | log.error(msg, e); 42 | oJson = Optional.empty(); 43 | } 44 | 45 | return oJson; 46 | }) 47 | .filter(Optional::isPresent) 48 | .map(Optional::get) 49 | .collect(Collectors.toList()); 50 | 51 | return jsonRecords; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/main/java/com/grabds/kafka/connect/exceptions/MissedFieldException.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect.exceptions; 2 | 3 | public class MissedFieldException extends RuntimeException{ 4 | 5 | public MissedFieldException(String message) { 6 | super(message); 7 | } 8 | } 9 | 10 | -------------------------------------------------------------------------------- /src/test/java/com/grabds/kafka/connect/ClickhouseTest.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect; 2 | 3 | import com.google.gson.FieldNamingPolicy; 4 | import com.google.gson.Gson; 5 | import com.google.gson.GsonBuilder; 6 | import com.google.gson.JsonObject; 7 | import com.grabds.kafka.connect.clickhouse.ClickhouseSvcImpl; 8 | import org.junit.Test; 9 | import ru.yandex.clickhouse.ClickHouseDataSource; 10 | import ru.yandex.clickhouse.settings.ClickHouseProperties; 11 | 12 | import java.sql.Connection; 13 | import java.sql.Date; 14 | import java.sql.PreparedStatement; 15 | import java.util.*; 16 | import java.util.stream.Collectors; 17 | 18 | import static org.junit.Assert.assertTrue; 19 | 20 | 21 | public class ClickhouseTest { 22 | 23 | String serverName = "internal-af67bdeb33cac11e8a4310671e315a7c-1771144458.ap-southeast-1.elb.amazonaws.com"; 24 | Integer serverPort = 8123; 25 | String serverUri = String.format("jdbc:clickhouse://%s:%s", serverName, serverPort); 26 | String dbName = "DB01"; 27 | 28 | 29 | @Test 30 | public void clickhouseSvc() throws Exception { 31 | 32 | Gson gson = new GsonBuilder() 33 | .setFieldNamingPolicy(FieldNamingPolicy.UPPER_CAMEL_CASE) 34 | .setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS") 35 | .create(); 36 | 37 | List records = Arrays.asList( 38 | "{\"UpdateDate\": \"2017-11-08\", \"GeoHash\":\"geo1000\", \"NBooked\":\"101\", \"N1\": 10 }" 39 | ); 40 | 41 | List jsonRecs = records.stream().map(rec -> gson.fromJson(rec, JsonObject.class)).collect(Collectors.toList()); 42 | 43 | 44 | ClickhouseSvcImpl svc = new ClickhouseSvcImpl(serverUri, dbName, "Table2"); 45 | Integer r = svc.batchProcess(jsonRecs); 46 | 47 | assertTrue(true); 48 | } 49 | 50 | @Test 51 | public void testInsert() throws Exception { 52 | 53 | ClickHouseProperties properties = new ClickHouseProperties(); 54 | properties.setDatabase(dbName); 55 | 56 | ClickHouseDataSource dataSource = new ClickHouseDataSource(serverUri, properties); 57 | Connection connection = dataSource.getConnection(); 58 | 59 | PreparedStatement statement = connection.prepareStatement("INSERT INTO Table1_c (UpdateDate, GeoHash, NBooked) VALUES (?, ?, ?)"); 60 | 61 | statement.setObject(1, Date.valueOf("2017-11-08")); 62 | statement.setObject(2, "geo200"); 63 | statement.setObject(3, 21); 64 | statement.addBatch(); 65 | 66 | int[] r = statement.executeBatch(); 67 | 68 | assertTrue(true); 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /src/test/java/com/grabds/kafka/connect/JsonDecoderTest.java: -------------------------------------------------------------------------------- 1 | package com.grabds.kafka.connect; 2 | 3 | import static org.junit.Assert.assertTrue; 4 | 5 | import com.google.gson.*; 6 | import com.grabds.kafka.connect.decoders.JsonDecoder; 7 | import org.junit.Test; 8 | import java.lang.reflect.Array; 9 | import java.util.*; 10 | import java.util.stream.Collectors; 11 | 12 | public class JsonDecoderTest { 13 | 14 | @Test 15 | public void testJsonDecode() throws Exception { 16 | 17 | JsonDecoder decoder = new JsonDecoder(); 18 | 19 | String recordStr = "{\"name\": \"user100\"}"; 20 | JsonObject json = decoder.decode(recordStr); 21 | assertTrue(json != null); 22 | } 23 | 24 | @Test 25 | public void testJsonDecodeList() throws Exception { 26 | JsonDecoder decoder = new JsonDecoder(); 27 | 28 | Collection records = Arrays.asList( 29 | "{\"name user100\"}", 30 | "{\"name\": \"user100\"}" 31 | ); 32 | 33 | Collection jsonRecords = decoder.tryDecode(records); 34 | 35 | // one record has incorrect format 36 | assertTrue(jsonRecords.size() == 1); 37 | } 38 | } 39 | --------------------------------------------------------------------------------