├── .gitignore ├── LICENSE ├── README.md ├── bin ├── create-topics.sh └── createInteractiveQueryTopics.sh ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── libs ├── connect │ └── kafka-connect-jdbc-h2.jar └── kafka_2.12-1.0.0.tgz ├── settings.gradle └── src ├── main ├── java │ └── bbejeck │ │ ├── chapter_2 │ │ ├── consumer │ │ │ └── ThreadedConsumerExample.java │ │ ├── partitioner │ │ │ └── PurchaseKeyPartitioner.java │ │ └── producer │ │ │ └── SimpleProducer.java │ │ ├── chapter_3 │ │ ├── KafkaStreamsYellingApp.java │ │ ├── ZMartKafkaStreamsAdvancedReqsApp.java │ │ ├── ZMartKafkaStreamsApp.java │ │ └── service │ │ │ └── SecurityDBService.java │ │ ├── chapter_4 │ │ ├── KafkaStreamsJoinsApp.java │ │ ├── ZMartKafkaStreamsAddStateApp.java │ │ ├── joiner │ │ │ └── PurchaseJoiner.java │ │ ├── partitioner │ │ │ └── RewardsStreamPartitioner.java │ │ ├── timestamp_extractor │ │ │ └── TransactionTimestampExtractor.java │ │ └── transformer │ │ │ └── PurchaseRewardTransformer.java │ │ ├── chapter_5 │ │ ├── AggregationsAndReducingExample.java │ │ ├── CountingWindowingAndKtableJoinExample.java │ │ ├── GlobalKTableExample.java │ │ ├── KStreamVsKTableExample.java │ │ └── timestamp_extractor │ │ │ └── StockTransactionTimestampExtractor.java │ │ ├── chapter_6 │ │ ├── CoGroupingApplication.java │ │ ├── PopsHopsApplication.java │ │ ├── StockPerformanceApplication.java │ │ ├── StockPerformanceStreamsAndProcessorApplication.java │ │ ├── StockPerformanceStreamsAndProcessorMultipleValuesApplication.java │ │ ├── ZMartProcessorApp.java │ │ ├── cancellation │ │ │ └── StockPerformanceCancellingProcessor.java │ │ ├── processor │ │ │ ├── BeerPurchaseProcessor.java │ │ │ ├── KStreamPrinter.java │ │ │ ├── MapValueProcessor.java │ │ │ ├── StockPerformanceProcessor.java │ │ │ ├── UpperCaseProcessor.java │ │ │ └── cogrouping │ │ │ │ ├── ClickEventProcessor.java │ │ │ │ ├── CogroupingMethodHandleProcessor.java │ │ │ │ ├── CogroupingProcessor.java │ │ │ │ ├── CogroupingPunctuator.java │ │ │ │ ├── CogroupingSystemTimeProcessor.java │ │ │ │ └── StockTransactionProcessor.java │ │ ├── punctuator │ │ │ └── StockPerformancePunctuator.java │ │ └── transformer │ │ │ ├── StockPerformanceMultipleValuesTransformer.java │ │ │ └── StockPerformanceTransformer.java │ │ ├── chapter_7 │ │ ├── CoGroupingListeningExampleApplication.java │ │ ├── StockPerformanceStreamsAndProcessorMetricsApplication.java │ │ ├── ZMartKafkaStreamsAdvancedReqsMetricsApp.java │ │ ├── interceptors │ │ │ ├── StockTransactionConsumerInterceptor.java │ │ │ └── ZMartProducerInterceptor.java │ │ ├── restore │ │ │ └── LoggingStateRestoreListener.java │ │ └── transformer │ │ │ └── StockPerformanceMetricsTransformer.java │ │ ├── chapter_8 │ │ ├── StockPerformanceStreamsProcessorTopology.java │ │ └── ZMartTopology.java │ │ ├── chapter_9 │ │ ├── DeserializerErrorHandler.java │ │ ├── StockCountsStreamsConnectIntegrationApplication.java │ │ ├── StockPerformanceInteractiveQueryApplication.java │ │ ├── StockPerformanceInteractiveQueryDataProducer.java │ │ └── restore │ │ │ └── StateRestoreHttpReporter.java │ │ ├── clients │ │ ├── consumer │ │ │ ├── ConsumerProperties.java │ │ │ └── TypedConsoleConsumer.java │ │ └── producer │ │ │ ├── KeyValueMultiTopicConsoleProducer.java │ │ │ └── MockDataProducer.java │ │ ├── collectors │ │ ├── FixedSizePriorityQueue.java │ │ └── StockTransactionCollector.java │ │ ├── model │ │ ├── BeerPurchase.java │ │ ├── ClickEvent.java │ │ ├── CorrelatedPurchase.java │ │ ├── Currency.java │ │ ├── CustomerTransactions.java │ │ ├── FinancialNews.java │ │ ├── PublicTradedCompany.java │ │ ├── Purchase.java │ │ ├── PurchaseKey.java │ │ ├── PurchasePattern.java │ │ ├── RewardAccumulator.java │ │ ├── ShareVolume.java │ │ ├── StockPerformance.java │ │ ├── StockTickerData.java │ │ ├── StockTransaction.java │ │ ├── StockTransactionSummary.java │ │ ├── TransactionCount.java │ │ ├── TransactionSummary.java │ │ └── Tweet.java │ │ ├── util │ │ ├── Topics.java │ │ ├── collection │ │ │ └── Tuple.java │ │ ├── datagen │ │ │ ├── CustomDateGenerator.java │ │ │ └── DataGenerator.java │ │ ├── db │ │ │ └── DBServer.java │ │ ├── serde │ │ │ └── StreamsSerdes.java │ │ └── serializer │ │ │ ├── FixedSizePriorityQueueAdapter.java │ │ │ ├── JsonDeserializer.java │ │ │ └── JsonSerializer.java │ │ └── webserver │ │ └── InteractiveQueryServer.java └── resources │ ├── conf │ ├── connect-standalone.properties │ └── connector-jdbc.properties │ ├── ksql │ ├── create_stream.txt │ ├── create_table.txt │ └── stock_performance_query.ksql │ ├── log4j.properties │ └── webserver │ ├── interactiveQueriesApplication.html │ ├── jquery-3.2.1.min.js │ ├── jquery-ui.min.js │ ├── jquery-ui.theme.css │ ├── jquery.color-2.1.2.js │ └── jquery.js └── test ├── java └── bbejeck │ ├── MockKeyValueStore.java │ ├── chapter_3 │ └── KafkaStreamsYellingIntegrationTest.java │ ├── chapter_4 │ └── TransformerTest.java │ ├── chapter_6 │ └── processor │ │ └── cogrouping │ │ └── CogroupingMethodHandleProcessorTest.java │ ├── chapter_8 │ ├── StockPerformanceStreamsProcessorTopologyTest.java │ └── ZMartTopologyTest.java │ └── util │ └── serializer │ ├── EventTransactionTupleSerdeTest.java │ └── PurchaseKeySerdeTest.java └── resources └── log4j.properties /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Gradle template 3 | .gradle 4 | /build/ 5 | 6 | /src/main/java/bbejeck/holding_pen 7 | streaming-workflows 8 | 9 | .idea 10 | 11 | 12 | 13 | # Ignore Gradle GUI config 14 | gradle-app.setting 15 | 16 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 17 | !gradle-wrapper.jar 18 | 19 | # Cache of project 20 | .gradletasknamecache 21 | 22 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898 23 | # gradle/wrapper/gradle-wrapper.properties 24 | ### JetBrains template 25 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 26 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 27 | *.ipr 28 | # User-specific stuff: 29 | .idea/**/workspace.xml 30 | .idea/**/tasks.xml 31 | 32 | 33 | 34 | # Sensitive or high-churn files: 35 | .idea/**/dataSources/ 36 | .idea/**/dataSources.ids 37 | .idea/**/dataSources.xml 38 | .idea/**/dataSources.local.xml 39 | .idea/**/sqlDataSources.xml 40 | .idea/**/dynamic.xml 41 | .idea/**/uiDesigner.xml 42 | 43 | # Gradle: 44 | .idea/**/gradle.xml 45 | .idea/**/libraries 46 | 47 | # Mongo Explorer plugin: 48 | .idea/**/mongoSettings.xml 49 | 50 | ## File-based project format: 51 | *.iws 52 | 53 | ## Plugin-specific files: 54 | 55 | # IntelliJ 56 | /out/ 57 | 58 | # mpeltonen/sbt-idea plugin 59 | .idea_modules/ 60 | 61 | # JIRA plugin 62 | atlassian-ide-plugin.xml 63 | 64 | # Crashlytics plugin (for Android Studio and IntelliJ) 65 | com_crashlytics_export_strings.xml 66 | crashlytics.properties 67 | crashlytics-build.properties 68 | fabric.properties 69 | ### Java template 70 | 71 | # Log file 72 | *.log 73 | 74 | # BlueJ files 75 | *.ctxt 76 | 77 | # Mobile Tools for Java (J2ME) 78 | .mtj.tmp/ 79 | 80 | # Package Files # 81 | *.war 82 | *.ear 83 | *.zip 84 | *.tar.gz 85 | *.rar 86 | 87 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 88 | hs_err_pid* 89 | -------------------------------------------------------------------------------- /bin/create-topics.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | 4 | KAFKA_HOME=$1 5 | ZK_HOST=$2 6 | ZK_PORT=$3 7 | 8 | topics="src-topic patterns rewards purchases stocks stocks-out transaction-summary coffee electronics transactions customer_transactions events cogrouped-results stock-performance stock-transactions stock-ticker-table stock-ticker-stream stock-volume-by-company companies clients financial-news pops-hops-purchases international-sales domestic-sales stock-counts transaction-count session-transactions sector-transaction-counts" 9 | 10 | for topic in ${topics}; do 11 | echo "attempting to create topic ${topic}" 12 | ${KAFKA_HOME}/bin/kafka-topics.sh --create --topic ${topic} --partitions 1 --replication-factor 1 --zookeeper ${ZK_HOST}:${ZK_PORT} 13 | done 14 | -------------------------------------------------------------------------------- /bin/createInteractiveQueryTopics.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbejeck/kafka-streams-in-action/75dff32abcf107ca2dfd4fc9313c96f8b4411efb/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Sat Jul 16 21:02:57 EDT 2016 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.11-all.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /libs/connect/kafka-connect-jdbc-h2.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbejeck/kafka-streams-in-action/75dff32abcf107ca2dfd4fc9313c96f8b4411efb/libs/connect/kafka-connect-jdbc-h2.jar -------------------------------------------------------------------------------- /libs/kafka_2.12-1.0.0.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bbejeck/kafka-streams-in-action/75dff32abcf107ca2dfd4fc9313c96f8b4411efb/libs/kafka_2.12-1.0.0.tgz -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'kafka-streams-in-action' 2 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_2/consumer/ThreadedConsumerExample.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_2.consumer; 2 | 3 | import org.apache.kafka.clients.consumer.Consumer; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.clients.consumer.ConsumerRecords; 6 | import org.apache.kafka.clients.consumer.KafkaConsumer; 7 | 8 | import java.util.Collections; 9 | import java.util.Properties; 10 | import java.util.concurrent.ExecutorService; 11 | import java.util.concurrent.Executors; 12 | import java.util.concurrent.TimeUnit; 13 | 14 | /** 15 | * This example expects a topic "test-topic" to exist with 2 partitions 16 | */ 17 | public class ThreadedConsumerExample { 18 | 19 | private volatile boolean doneConsuming = false; 20 | private int numberPartitions; 21 | private ExecutorService executorService; 22 | 23 | public ThreadedConsumerExample(int numberPartitions) { 24 | this.numberPartitions = numberPartitions; 25 | } 26 | 27 | 28 | public void startConsuming() { 29 | executorService = Executors.newFixedThreadPool(numberPartitions); 30 | Properties properties = getConsumerProps(); 31 | 32 | for (int i = 0; i < numberPartitions; i++) { 33 | Runnable consumerThread = getConsumerThread(properties); 34 | executorService.submit(consumerThread); 35 | } 36 | } 37 | 38 | private Runnable getConsumerThread(Properties properties) { 39 | return () -> { 40 | Consumer consumer = null; 41 | try { 42 | consumer = new KafkaConsumer<>(properties); 43 | consumer.subscribe(Collections.singletonList("test-topic")); 44 | while (!doneConsuming) { 45 | ConsumerRecords records = consumer.poll(5000); 46 | for (ConsumerRecord record : records) { 47 | String message = String.format("Consumed: key = %s value = %s with offset = %d partition = %d", 48 | record.key(), record.value(), record.offset(), record.partition()); 49 | System.out.println(message); 50 | } 51 | 52 | } 53 | } catch (Exception e) { 54 | e.printStackTrace(); 55 | } finally { 56 | if (consumer != null) { 57 | consumer.close(); 58 | } 59 | } 60 | }; 61 | } 62 | 63 | public void stopConsuming() throws InterruptedException { 64 | doneConsuming = true; 65 | executorService.awaitTermination(10000, TimeUnit.MILLISECONDS); 66 | executorService.shutdownNow(); 67 | } 68 | 69 | 70 | private Properties getConsumerProps() { 71 | Properties properties = new Properties(); 72 | properties.put("bootstrap.servers", "localhost:9092"); 73 | properties.put("group.id", "simple-consumer-example"); 74 | properties.put("auto.offset.reset", "earliest"); 75 | properties.put("enable.auto.commit", "true"); 76 | properties.put("auto.commit.interval.ms", "3000"); 77 | properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 78 | properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); 79 | 80 | return properties; 81 | 82 | } 83 | 84 | /** 85 | * Change the constructor arg to match the actual number of partitions 86 | */ 87 | 88 | public static void main(String[] args) throws InterruptedException { 89 | ThreadedConsumerExample consumerExample = new ThreadedConsumerExample(2); 90 | consumerExample.startConsuming(); 91 | Thread.sleep(60000); //Run for one minute 92 | consumerExample.stopConsuming(); 93 | } 94 | 95 | } 96 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_2/partitioner/PurchaseKeyPartitioner.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_2.partitioner; 2 | 3 | import bbejeck.model.PurchaseKey; 4 | import org.apache.kafka.clients.producer.internals.DefaultPartitioner; 5 | import org.apache.kafka.common.Cluster; 6 | 7 | 8 | public class PurchaseKeyPartitioner extends DefaultPartitioner { 9 | 10 | 11 | @Override 12 | public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { 13 | Object newKey = null; 14 | if (key != null) { 15 | PurchaseKey purchaseKey = (PurchaseKey) key; 16 | newKey = purchaseKey.getCustomerId(); 17 | keyBytes = ((String) newKey).getBytes(); 18 | } 19 | return super.partition(topic, newKey, keyBytes, value, valueBytes, cluster); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_2/producer/SimpleProducer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_2.producer; 2 | 3 | import bbejeck.chapter_2.partitioner.PurchaseKeyPartitioner; 4 | import bbejeck.model.PurchaseKey; 5 | import org.apache.kafka.clients.producer.Callback; 6 | import org.apache.kafka.clients.producer.KafkaProducer; 7 | import org.apache.kafka.clients.producer.Producer; 8 | import org.apache.kafka.clients.producer.ProducerRecord; 9 | import org.apache.kafka.clients.producer.RecordMetadata; 10 | 11 | import java.util.Date; 12 | import java.util.Properties; 13 | import java.util.concurrent.Future; 14 | 15 | /** 16 | * Example of a simple producer, not meant to run as a stand alone example. 17 | * 18 | * If desired to run this example change the ProducerRecord below to 19 | * use a real topic name and comment out line #33 below. 20 | */ 21 | public class SimpleProducer { 22 | 23 | 24 | 25 | public static void main(String[] args) { 26 | 27 | Properties properties = new Properties(); 28 | properties.put("bootstrap.servers", "localhost:9092"); 29 | properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 30 | properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 31 | properties.put("acks", "1"); 32 | properties.put("retries", "3"); 33 | properties.put("compression.type", "snappy"); 34 | //This line in for demonstration purposes 35 | properties.put("partitioner.class", PurchaseKeyPartitioner.class.getName()); 36 | 37 | PurchaseKey key = new PurchaseKey("12334568", new Date()); 38 | 39 | try(Producer producer = new KafkaProducer<>(properties)) { 40 | ProducerRecord record = new ProducerRecord<>("some-topic", key, "value"); 41 | 42 | Callback callback = (metadata, exception) -> { 43 | if (exception != null) { 44 | exception.printStackTrace(); 45 | } 46 | }; 47 | 48 | Future sendFuture = producer.send(record, callback); 49 | } 50 | 51 | } 52 | 53 | 54 | } 55 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_3/KafkaStreamsYellingApp.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.chapter_3; 18 | 19 | import bbejeck.clients.producer.MockDataProducer; 20 | import org.apache.kafka.common.serialization.Serde; 21 | import org.apache.kafka.common.serialization.Serdes; 22 | import org.apache.kafka.streams.Consumed; 23 | import org.apache.kafka.streams.KafkaStreams; 24 | import org.apache.kafka.streams.StreamsBuilder; 25 | import org.apache.kafka.streams.StreamsConfig; 26 | import org.apache.kafka.streams.kstream.KStream; 27 | import org.apache.kafka.streams.kstream.Printed; 28 | import org.apache.kafka.streams.kstream.Produced; 29 | import org.slf4j.Logger; 30 | import org.slf4j.LoggerFactory; 31 | 32 | import java.util.Properties; 33 | 34 | public class KafkaStreamsYellingApp { 35 | 36 | private static final Logger LOG = LoggerFactory.getLogger(KafkaStreamsYellingApp.class); 37 | 38 | public static void main(String[] args) throws Exception { 39 | 40 | 41 | //Used only to produce data for this application, not typical usage 42 | MockDataProducer.produceRandomTextData(); 43 | 44 | Properties props = new Properties(); 45 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "yelling_app_id"); 46 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 47 | 48 | StreamsConfig streamsConfig = new StreamsConfig(props); 49 | 50 | Serde stringSerde = Serdes.String(); 51 | 52 | StreamsBuilder builder = new StreamsBuilder(); 53 | 54 | KStream simpleFirstStream = builder.stream("src-topic", Consumed.with(stringSerde, stringSerde)); 55 | 56 | 57 | KStream upperCasedStream = simpleFirstStream.mapValues(String::toUpperCase); 58 | 59 | upperCasedStream.to( "out-topic", Produced.with(stringSerde, stringSerde)); 60 | upperCasedStream.print(Printed.toSysOut().withLabel("Yelling App")); 61 | 62 | 63 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig); 64 | LOG.info("Hello World Yelling App Started"); 65 | kafkaStreams.start(); 66 | Thread.sleep(35000); 67 | LOG.info("Shutting down the Yelling APP now"); 68 | kafkaStreams.close(); 69 | MockDataProducer.shutdown(); 70 | 71 | } 72 | } -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_3/ZMartKafkaStreamsApp.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.chapter_3; 18 | 19 | import bbejeck.clients.producer.MockDataProducer; 20 | import bbejeck.model.Purchase; 21 | import bbejeck.model.PurchasePattern; 22 | import bbejeck.model.RewardAccumulator; 23 | import bbejeck.util.serde.StreamsSerdes; 24 | import org.apache.kafka.clients.consumer.ConsumerConfig; 25 | import org.apache.kafka.common.serialization.Serde; 26 | import org.apache.kafka.common.serialization.Serdes; 27 | import org.apache.kafka.streams.Consumed; 28 | import org.apache.kafka.streams.KafkaStreams; 29 | import org.apache.kafka.streams.StreamsBuilder; 30 | import org.apache.kafka.streams.StreamsConfig; 31 | import org.apache.kafka.streams.kstream.KStream; 32 | import org.apache.kafka.streams.kstream.Printed; 33 | import org.apache.kafka.streams.kstream.Produced; 34 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 35 | import org.slf4j.Logger; 36 | import org.slf4j.LoggerFactory; 37 | 38 | import java.util.Properties; 39 | 40 | 41 | public class ZMartKafkaStreamsApp { 42 | 43 | private static final Logger LOG = LoggerFactory.getLogger(ZMartKafkaStreamsApp.class); 44 | 45 | public static void main(String[] args) throws Exception { 46 | 47 | 48 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 49 | 50 | Serde purchaseSerde = StreamsSerdes.PurchaseSerde(); 51 | Serde purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); 52 | Serde rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); 53 | Serde stringSerde = Serdes.String(); 54 | 55 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 56 | 57 | KStream purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde)) 58 | .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); 59 | 60 | KStream patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()); 61 | 62 | patternKStream.print(Printed.toSysOut().withLabel("patterns")); 63 | patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde)); 64 | 65 | 66 | KStream rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()); 67 | 68 | rewardsKStream.print(Printed.toSysOut().withLabel("rewards")); 69 | rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde)); 70 | 71 | 72 | 73 | purchaseKStream.print(Printed.toSysOut().withLabel("purchases")); 74 | purchaseKStream.to("purchases", Produced.with(stringSerde,purchaseSerde)); 75 | 76 | 77 | // used only to produce data for this application, not typical usage 78 | MockDataProducer.producePurchaseData(); 79 | 80 | KafkaStreams kafkaStreams = new KafkaStreams(streamsBuilder.build(),streamsConfig); 81 | LOG.info("ZMart First Kafka Streams Application Started"); 82 | kafkaStreams.start(); 83 | Thread.sleep(65000); 84 | LOG.info("Shutting down the Kafka Streams Application now"); 85 | kafkaStreams.close(); 86 | MockDataProducer.shutdown(); 87 | } 88 | 89 | 90 | 91 | 92 | private static Properties getProperties() { 93 | Properties props = new Properties(); 94 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "FirstZmart-Kafka-Streams-Client"); 95 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "zmart-purchases"); 96 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "FirstZmart-Kafka-Streams-App"); 97 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 98 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 99 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 100 | return props; 101 | } 102 | 103 | } 104 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_3/service/SecurityDBService.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_3.service; 2 | 3 | import java.util.Date; 4 | 5 | 6 | public interface SecurityDBService { 7 | 8 | static void saveRecord(Date date, String employeeId, String item) { 9 | System.out.println("Warning!! Found potential problem !! Saving transaction on "+date+" for "+employeeId+" item "+ item); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_4/ZMartKafkaStreamsAddStateApp.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.chapter_4; 18 | 19 | import bbejeck.chapter_4.partitioner.RewardsStreamPartitioner; 20 | import bbejeck.chapter_4.transformer.PurchaseRewardTransformer; 21 | import bbejeck.clients.producer.MockDataProducer; 22 | import bbejeck.model.Purchase; 23 | import bbejeck.model.PurchasePattern; 24 | import bbejeck.model.RewardAccumulator; 25 | import bbejeck.util.serde.StreamsSerdes; 26 | import org.apache.kafka.clients.consumer.ConsumerConfig; 27 | import org.apache.kafka.common.serialization.Serde; 28 | import org.apache.kafka.common.serialization.Serdes; 29 | import org.apache.kafka.streams.Consumed; 30 | import org.apache.kafka.streams.KafkaStreams; 31 | import org.apache.kafka.streams.StreamsBuilder; 32 | import org.apache.kafka.streams.StreamsConfig; 33 | import org.apache.kafka.streams.kstream.KStream; 34 | import org.apache.kafka.streams.kstream.Printed; 35 | import org.apache.kafka.streams.kstream.Produced; 36 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 37 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 38 | import org.apache.kafka.streams.state.KeyValueStore; 39 | import org.apache.kafka.streams.state.StoreBuilder; 40 | import org.apache.kafka.streams.state.Stores; 41 | import org.slf4j.Logger; 42 | import org.slf4j.LoggerFactory; 43 | 44 | import java.util.Properties; 45 | 46 | 47 | public class ZMartKafkaStreamsAddStateApp { 48 | 49 | private static final Logger LOG = LoggerFactory.getLogger(ZMartKafkaStreamsAddStateApp.class); 50 | 51 | public static void main(String[] args) throws Exception { 52 | 53 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 54 | 55 | Serde purchaseSerde = StreamsSerdes.PurchaseSerde(); 56 | Serde purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); 57 | Serde rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); 58 | Serde stringSerde = Serdes.String(); 59 | 60 | StreamsBuilder builder = new StreamsBuilder(); 61 | 62 | KStream purchaseKStream = builder.stream( "transactions", Consumed.with(stringSerde, purchaseSerde)) 63 | .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); 64 | 65 | KStream patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()); 66 | 67 | patternKStream.print(Printed.toSysOut().withLabel("patterns")); 68 | patternKStream.to("patterns", Produced.with(stringSerde, purchasePatternSerde)); 69 | 70 | 71 | 72 | // adding State to processor 73 | String rewardsStateStoreName = "rewardsPointsStore"; 74 | RewardsStreamPartitioner streamPartitioner = new RewardsStreamPartitioner(); 75 | 76 | KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(rewardsStateStoreName); 77 | StoreBuilder> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), Serdes.Integer()); 78 | 79 | builder.addStateStore(storeBuilder); 80 | 81 | KStream transByCustomerStream = purchaseKStream.through( "customer_transactions", Produced.with(stringSerde, purchaseSerde, streamPartitioner)); 82 | 83 | 84 | KStream statefulRewardAccumulator = transByCustomerStream.transformValues(() -> new PurchaseRewardTransformer(rewardsStateStoreName), 85 | rewardsStateStoreName); 86 | 87 | statefulRewardAccumulator.print(Printed.toSysOut().withLabel("rewards")); 88 | statefulRewardAccumulator.to("rewards", Produced.with(stringSerde, rewardAccumulatorSerde)); 89 | 90 | 91 | 92 | // used only to produce data for this application, not typical usage 93 | MockDataProducer.producePurchaseData(); 94 | 95 | 96 | LOG.info("Starting Adding State Example"); 97 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(),streamsConfig); 98 | LOG.info("ZMart Adding State Application Started"); 99 | kafkaStreams.cleanUp(); 100 | kafkaStreams.start(); 101 | Thread.sleep(65000); 102 | LOG.info("Shutting down the Add State Application now"); 103 | kafkaStreams.close(); 104 | MockDataProducer.shutdown(); 105 | } 106 | 107 | 108 | 109 | 110 | private static Properties getProperties() { 111 | Properties props = new Properties(); 112 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "AddingStateConsumer"); 113 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "AddingStateGroupId"); 114 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "AddingStateAppId"); 115 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 116 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 117 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 118 | return props; 119 | } 120 | 121 | } 122 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_4/joiner/PurchaseJoiner.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_4.joiner; 2 | 3 | import bbejeck.model.CorrelatedPurchase; 4 | import bbejeck.model.Purchase; 5 | import org.apache.kafka.streams.kstream.ValueJoiner; 6 | 7 | import java.util.ArrayList; 8 | import java.util.Date; 9 | import java.util.List; 10 | 11 | 12 | public class PurchaseJoiner implements ValueJoiner { 13 | 14 | @Override 15 | public CorrelatedPurchase apply(Purchase purchase, Purchase otherPurchase) { 16 | 17 | CorrelatedPurchase.Builder builder = CorrelatedPurchase.newBuilder(); 18 | 19 | Date purchaseDate = purchase != null ? purchase.getPurchaseDate() : null; 20 | Double price = purchase != null ? purchase.getPrice() : 0.0; 21 | String itemPurchased = purchase != null ? purchase.getItemPurchased() : null; 22 | 23 | Date otherPurchaseDate = otherPurchase != null ? otherPurchase.getPurchaseDate() : null; 24 | Double otherPrice = otherPurchase != null ? otherPurchase.getPrice() : 0.0; 25 | String otherItemPurchased = otherPurchase != null ? otherPurchase.getItemPurchased() : null; 26 | 27 | List purchasedItems = new ArrayList<>(); 28 | 29 | if (itemPurchased != null) { 30 | purchasedItems.add(itemPurchased); 31 | } 32 | 33 | if (otherItemPurchased != null) { 34 | purchasedItems.add(otherItemPurchased); 35 | } 36 | 37 | String customerId = purchase != null ? purchase.getCustomerId() : null; 38 | String otherCustomerId = otherPurchase != null ? otherPurchase.getCustomerId() : null; 39 | 40 | builder.withCustomerId(customerId != null ? customerId : otherCustomerId) 41 | .withFirstPurchaseDate(purchaseDate) 42 | .withSecondPurchaseDate(otherPurchaseDate) 43 | .withItemsPurchased(purchasedItems) 44 | .withTotalAmount(price + otherPrice); 45 | 46 | return builder.build(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_4/partitioner/RewardsStreamPartitioner.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_4.partitioner; 2 | 3 | import bbejeck.model.Purchase; 4 | import org.apache.kafka.streams.processor.StreamPartitioner; 5 | 6 | 7 | public class RewardsStreamPartitioner implements StreamPartitioner { 8 | 9 | @Override 10 | public Integer partition(String key, Purchase value, int numPartitions) { 11 | return value.getCustomerId().hashCode() % numPartitions; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_4/timestamp_extractor/TransactionTimestampExtractor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_4.timestamp_extractor; 2 | 3 | import bbejeck.model.Purchase; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.streams.processor.TimestampExtractor; 6 | 7 | 8 | public class TransactionTimestampExtractor implements TimestampExtractor { 9 | 10 | @Override 11 | public long extract(ConsumerRecord record, long previousTimestamp) { 12 | Purchase purchasePurchaseTransaction = (Purchase) record.value(); 13 | return purchasePurchaseTransaction.getPurchaseDate().getTime(); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_4/transformer/PurchaseRewardTransformer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_4.transformer; 2 | 3 | import bbejeck.model.Purchase; 4 | import bbejeck.model.RewardAccumulator; 5 | import org.apache.kafka.streams.kstream.ValueTransformer; 6 | import org.apache.kafka.streams.processor.ProcessorContext; 7 | import org.apache.kafka.streams.state.KeyValueStore; 8 | 9 | import java.util.Objects; 10 | 11 | 12 | public class PurchaseRewardTransformer implements ValueTransformer { 13 | 14 | private KeyValueStore stateStore; 15 | private final String storeName; 16 | private ProcessorContext context; 17 | 18 | public PurchaseRewardTransformer(String storeName) { 19 | Objects.requireNonNull(storeName,"Store Name can't be null"); 20 | this.storeName = storeName; 21 | } 22 | 23 | @Override 24 | @SuppressWarnings("unchecked") 25 | public void init(ProcessorContext context) { 26 | this.context = context; 27 | stateStore = (KeyValueStore) this.context.getStateStore(storeName); 28 | } 29 | 30 | @Override 31 | public RewardAccumulator transform(Purchase value) { 32 | RewardAccumulator rewardAccumulator = RewardAccumulator.builder(value).build(); 33 | Integer accumulatedSoFar = stateStore.get(rewardAccumulator.getCustomerId()); 34 | 35 | if (accumulatedSoFar != null) { 36 | rewardAccumulator.addRewardPoints(accumulatedSoFar); 37 | } 38 | stateStore.put(rewardAccumulator.getCustomerId(), rewardAccumulator.getTotalRewardPoints()); 39 | 40 | return rewardAccumulator; 41 | 42 | } 43 | 44 | @Override 45 | @SuppressWarnings("deprecation") 46 | public RewardAccumulator punctuate(long timestamp) { 47 | return null; //no-op null values not forwarded. 48 | } 49 | 50 | @Override 51 | public void close() { 52 | //no-op 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_5/KStreamVsKTableExample.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_5; 2 | 3 | 4 | import bbejeck.clients.producer.MockDataProducer; 5 | import bbejeck.model.StockTickerData; 6 | import bbejeck.util.serde.StreamsSerdes; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.common.serialization.Serdes; 9 | import org.apache.kafka.streams.KafkaStreams; 10 | import org.apache.kafka.streams.StreamsBuilder; 11 | import org.apache.kafka.streams.StreamsConfig; 12 | import org.apache.kafka.streams.kstream.KStream; 13 | import org.apache.kafka.streams.kstream.KTable; 14 | import org.apache.kafka.streams.kstream.Printed; 15 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 16 | import org.slf4j.Logger; 17 | import org.slf4j.LoggerFactory; 18 | 19 | import java.util.Properties; 20 | 21 | import static bbejeck.clients.producer.MockDataProducer.STOCK_TICKER_STREAM_TOPIC; 22 | import static bbejeck.clients.producer.MockDataProducer.STOCK_TICKER_TABLE_TOPIC; 23 | 24 | public class KStreamVsKTableExample { 25 | 26 | private static final Logger LOG = LoggerFactory.getLogger(KStreamVsKTableExample.class); 27 | 28 | public static void main(String[] args) throws Exception { 29 | 30 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 31 | 32 | StreamsBuilder builder = new StreamsBuilder(); 33 | 34 | 35 | KTable stockTickerTable = builder.table(STOCK_TICKER_TABLE_TOPIC); 36 | KStream stockTickerStream = builder.stream(STOCK_TICKER_STREAM_TOPIC); 37 | 38 | stockTickerTable.toStream().print(Printed.toSysOut().withLabel("Stocks-KTable")); 39 | stockTickerStream.print(Printed.toSysOut().withLabel( "Stocks-KStream")); 40 | 41 | int numberCompanies = 3; 42 | int iterations = 3; 43 | 44 | MockDataProducer.produceStockTickerData(numberCompanies, iterations); 45 | 46 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); 47 | LOG.info("KTable vs KStream output started"); 48 | kafkaStreams.cleanUp(); 49 | kafkaStreams.start(); 50 | Thread.sleep(15000); 51 | LOG.info("Shutting down KTable vs KStream Application now"); 52 | kafkaStreams.close(); 53 | MockDataProducer.shutdown(); 54 | 55 | } 56 | 57 | private static Properties getProperties() { 58 | Properties props = new Properties(); 59 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "KStreamVSKTable_app"); 60 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "KStreamVSKTable_group"); 61 | props.put(ConsumerConfig.CLIENT_ID_CONFIG, "KStreamVSKTable_client"); 62 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 63 | props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "30000"); 64 | props.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "15000"); 65 | //props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG,"0"); 66 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 67 | props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, "1"); 68 | props.put(ConsumerConfig.METADATA_MAX_AGE_CONFIG, "10000"); 69 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 70 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, StreamsSerdes.StockTickerSerde().getClass().getName()); 71 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 72 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 73 | return props; 74 | 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_5/timestamp_extractor/StockTransactionTimestampExtractor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_5.timestamp_extractor; 2 | 3 | 4 | import bbejeck.model.StockTransaction; 5 | import bbejeck.util.datagen.DataGenerator; 6 | import org.apache.kafka.clients.consumer.ConsumerRecord; 7 | import org.apache.kafka.streams.processor.TimestampExtractor; 8 | 9 | import java.util.Date; 10 | 11 | public class StockTransactionTimestampExtractor implements TimestampExtractor { 12 | 13 | @Override 14 | public long extract(ConsumerRecord consumerRecord, long l) { 15 | 16 | if(! (consumerRecord.value() instanceof StockTransaction)) { 17 | return System.currentTimeMillis(); 18 | } 19 | 20 | StockTransaction stockTransaction = (StockTransaction) consumerRecord.value(); 21 | Date transactionDate = stockTransaction.getTransactionTimestamp(); 22 | return (transactionDate != null) ? transactionDate.getTime() : consumerRecord.timestamp(); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/CoGroupingApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6; 2 | 3 | 4 | import bbejeck.chapter_6.processor.KStreamPrinter; 5 | import bbejeck.chapter_6.processor.cogrouping.ClickEventProcessor; 6 | import bbejeck.chapter_6.processor.cogrouping.CogroupingProcessor; 7 | import bbejeck.chapter_6.processor.cogrouping.StockTransactionProcessor; 8 | import bbejeck.clients.producer.MockDataProducer; 9 | import bbejeck.model.ClickEvent; 10 | import bbejeck.model.StockTransaction; 11 | import bbejeck.util.collection.Tuple; 12 | import bbejeck.util.serde.StreamsSerdes; 13 | import org.apache.kafka.clients.consumer.ConsumerConfig; 14 | import org.apache.kafka.common.serialization.Deserializer; 15 | import org.apache.kafka.common.serialization.Serde; 16 | import org.apache.kafka.common.serialization.Serdes; 17 | import org.apache.kafka.common.serialization.Serializer; 18 | import org.apache.kafka.streams.KafkaStreams; 19 | import org.apache.kafka.streams.StreamsConfig; 20 | import org.apache.kafka.streams.Topology; 21 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 22 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 23 | import org.apache.kafka.streams.state.KeyValueStore; 24 | import org.apache.kafka.streams.state.StoreBuilder; 25 | import org.apache.kafka.streams.state.Stores; 26 | 27 | import java.util.HashMap; 28 | import java.util.List; 29 | import java.util.Map; 30 | import java.util.Properties; 31 | 32 | import static bbejeck.chapter_6.processor.cogrouping.CogroupingProcessor.TUPLE_STORE_NAME; 33 | 34 | public class CoGroupingApplication { 35 | 36 | public static void main(String[] args) throws Exception { 37 | 38 | 39 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 40 | Deserializer stringDeserializer = Serdes.String().deserializer(); 41 | Serializer stringSerializer = Serdes.String().serializer(); 42 | Serde, List>> eventPerformanceTuple = StreamsSerdes.EventTransactionTupleSerde(); 43 | Serializer, List>> tupleSerializer = eventPerformanceTuple.serializer(); 44 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 45 | Deserializer stockTransactionDeserializer = stockTransactionSerde.deserializer(); 46 | 47 | Serde clickEventSerde = StreamsSerdes.ClickEventSerde(); 48 | Deserializer clickEventDeserializer = clickEventSerde.deserializer(); 49 | 50 | 51 | Topology topology = new Topology(); 52 | Map changeLogConfigs = new HashMap<>(); 53 | changeLogConfigs.put("retention.ms", "120000"); 54 | changeLogConfigs.put("cleanup.policy", "compact,delete"); 55 | 56 | 57 | KeyValueBytesStoreSupplier storeSupplier = Stores.persistentKeyValueStore(TUPLE_STORE_NAME); 58 | StoreBuilder, List>>> storeBuilder = 59 | Stores.keyValueStoreBuilder(storeSupplier, 60 | Serdes.String(), 61 | eventPerformanceTuple).withLoggingEnabled(changeLogConfigs); 62 | 63 | topology.addSource("Txn-Source", stringDeserializer, stockTransactionDeserializer, "stock-transactions") 64 | .addSource("Events-Source", stringDeserializer, clickEventDeserializer, "events") 65 | .addProcessor("Txn-Processor", StockTransactionProcessor::new, "Txn-Source") 66 | .addProcessor("Events-Processor", ClickEventProcessor::new, "Events-Source") 67 | .addProcessor("CoGrouping-Processor", CogroupingProcessor::new, "Txn-Processor", "Events-Processor") 68 | .addStateStore(storeBuilder, "CoGrouping-Processor") 69 | .addSink("Tuple-Sink", "cogrouped-results", stringSerializer, tupleSerializer, "CoGrouping-Processor"); 70 | 71 | topology.addProcessor("Print", new KStreamPrinter("Co-Grouping"), "CoGrouping-Processor"); 72 | 73 | 74 | MockDataProducer.produceStockTransactionsAndDayTradingClickEvents(50, 100, 100, StockTransaction::getSymbol); 75 | 76 | KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig); 77 | System.out.println("Co-Grouping App Started"); 78 | kafkaStreams.cleanUp(); 79 | kafkaStreams.start(); 80 | Thread.sleep(70000); 81 | System.out.println("Shutting down the Co-Grouping App now"); 82 | kafkaStreams.close(); 83 | MockDataProducer.shutdown(); 84 | } 85 | 86 | 87 | private static Properties getProperties() { 88 | Properties props = new Properties(); 89 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "cogrouping-client"); 90 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "cogrouping-group"); 91 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "cogrouping-appid"); 92 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 93 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 94 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 95 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 96 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 97 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 98 | return props; 99 | } 100 | 101 | 102 | } 103 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/PopsHopsApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6; 2 | 3 | 4 | import bbejeck.chapter_6.processor.BeerPurchaseProcessor; 5 | import bbejeck.chapter_6.processor.KStreamPrinter; 6 | import bbejeck.clients.producer.MockDataProducer; 7 | import bbejeck.model.BeerPurchase; 8 | import bbejeck.util.Topics; 9 | import bbejeck.util.serializer.JsonDeserializer; 10 | import bbejeck.util.serializer.JsonSerializer; 11 | import org.apache.kafka.clients.consumer.ConsumerConfig; 12 | import org.apache.kafka.common.serialization.Deserializer; 13 | import org.apache.kafka.common.serialization.Serde; 14 | import org.apache.kafka.common.serialization.Serdes; 15 | import org.apache.kafka.common.serialization.Serializer; 16 | import org.apache.kafka.streams.KafkaStreams; 17 | import org.apache.kafka.streams.StreamsConfig; 18 | import org.apache.kafka.streams.Topology; 19 | import org.apache.kafka.streams.processor.UsePreviousTimeOnInvalidTimestamp; 20 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 21 | 22 | import java.util.Properties; 23 | 24 | import static org.apache.kafka.streams.Topology.AutoOffsetReset.LATEST; 25 | 26 | public class PopsHopsApplication { 27 | 28 | 29 | public static void main(String[] args) throws Exception { 30 | 31 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 32 | Deserializer beerPurchaseDeserializer = new JsonDeserializer<>(BeerPurchase.class); 33 | Serde stringSerde = Serdes.String(); 34 | Deserializer stringDeserializer = stringSerde.deserializer(); 35 | Serializer stringSerializer = stringSerde.serializer(); 36 | Serializer beerPurchaseSerializer = new JsonSerializer<>(); 37 | 38 | Topology toplogy = new Topology(); 39 | 40 | String domesticSalesSink = "domestic-beer-sales"; 41 | String internationalSalesSink = "international-beer-sales"; 42 | String purchaseSourceNodeName = "beer-purchase-source"; 43 | String purchaseProcessor = "purchase-processor"; 44 | 45 | 46 | BeerPurchaseProcessor beerProcessor = new BeerPurchaseProcessor(domesticSalesSink, internationalSalesSink); 47 | 48 | toplogy.addSource(LATEST, 49 | purchaseSourceNodeName, 50 | new UsePreviousTimeOnInvalidTimestamp(), 51 | stringDeserializer, 52 | beerPurchaseDeserializer, 53 | Topics.POPS_HOPS_PURCHASES.topicName()) 54 | .addProcessor(purchaseProcessor, 55 | () -> beerProcessor, 56 | purchaseSourceNodeName); 57 | 58 | //Uncomment these two lines and comment out the printer lines for writing to topics 59 | // .addSink(internationalSalesSink,"international-sales", stringSerializer, beerPurchaseSerializer, purchaseProcessor) 60 | // .addSink(domesticSalesSink,"domestic-sales", stringSerializer, beerPurchaseSerializer, purchaseProcessor); 61 | 62 | //You'll have to comment these lines out if you want to write to topics as they have the same node names 63 | toplogy.addProcessor(domesticSalesSink, 64 | new KStreamPrinter("domestic-sales"), 65 | purchaseProcessor ); 66 | 67 | toplogy.addProcessor(internationalSalesSink, 68 | new KStreamPrinter("international-sales"), 69 | purchaseProcessor ); 70 | 71 | KafkaStreams kafkaStreams = new KafkaStreams(toplogy, streamsConfig); 72 | MockDataProducer.produceBeerPurchases(5); 73 | System.out.println("Starting Pops-Hops Application now"); 74 | kafkaStreams.cleanUp(); 75 | kafkaStreams.start(); 76 | Thread.sleep(70000); 77 | System.out.println("Shutting down Pops-Hops Application now"); 78 | kafkaStreams.close(); 79 | MockDataProducer.shutdown(); 80 | } 81 | 82 | 83 | private static Properties getProperties() { 84 | Properties props = new Properties(); 85 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "beer-app-client"); 86 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "beer-app-group"); 87 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "beer-app-appid"); 88 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 89 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 90 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 91 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 92 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 93 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 94 | return props; 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/StockPerformanceApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6; 2 | 3 | 4 | import bbejeck.chapter_6.processor.KStreamPrinter; 5 | import bbejeck.chapter_6.processor.StockPerformanceProcessor; 6 | import bbejeck.clients.producer.MockDataProducer; 7 | import bbejeck.model.StockPerformance; 8 | import bbejeck.model.StockTransaction; 9 | import bbejeck.util.serde.StreamsSerdes; 10 | import org.apache.kafka.clients.consumer.ConsumerConfig; 11 | import org.apache.kafka.common.serialization.Deserializer; 12 | import org.apache.kafka.common.serialization.Serde; 13 | import org.apache.kafka.common.serialization.Serdes; 14 | import org.apache.kafka.common.serialization.Serializer; 15 | import org.apache.kafka.streams.KafkaStreams; 16 | import org.apache.kafka.streams.StreamsConfig; 17 | import org.apache.kafka.streams.Topology; 18 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 19 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 20 | import org.apache.kafka.streams.state.KeyValueStore; 21 | import org.apache.kafka.streams.state.StoreBuilder; 22 | import org.apache.kafka.streams.state.Stores; 23 | 24 | import java.util.Properties; 25 | 26 | public class StockPerformanceApplication { 27 | 28 | 29 | public static void main(String[] args) throws Exception { 30 | 31 | 32 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 33 | Deserializer stringDeserializer = Serdes.String().deserializer(); 34 | Serializer stringSerializer = Serdes.String().serializer(); 35 | Serde stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); 36 | Serializer stockPerformanceSerializer = stockPerformanceSerde.serializer(); 37 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 38 | Deserializer stockTransactionDeserializer = stockTransactionSerde.deserializer(); 39 | 40 | 41 | Topology topology = new Topology(); 42 | String stocksStateStore = "stock-performance-store"; 43 | double differentialThreshold = 0.02; 44 | 45 | KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore(stocksStateStore); 46 | StoreBuilder> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); 47 | 48 | 49 | topology.addSource("stocks-source", stringDeserializer, stockTransactionDeserializer,"stock-transactions") 50 | .addProcessor("stocks-processor", () -> new StockPerformanceProcessor(stocksStateStore, differentialThreshold), "stocks-source") 51 | .addStateStore(storeBuilder,"stocks-processor") 52 | .addSink("stocks-sink", "stock-performance", stringSerializer, stockPerformanceSerializer, "stocks-processor"); 53 | 54 | 55 | topology.addProcessor("stocks-printer", new KStreamPrinter("StockPerformance"), "stocks-processor"); 56 | 57 | KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig); 58 | MockDataProducer.produceStockTransactionsWithKeyFunction(50,50, 25, StockTransaction::getSymbol); 59 | System.out.println("Stock Analysis App Started"); 60 | kafkaStreams.cleanUp(); 61 | kafkaStreams.start(); 62 | Thread.sleep(70000); 63 | System.out.println("Shutting down the Stock Analysis App now"); 64 | kafkaStreams.close(); 65 | MockDataProducer.shutdown(); 66 | } 67 | 68 | private static Properties getProperties() { 69 | Properties props = new Properties(); 70 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "stock-analysis-client"); 71 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "stock-analysis-group"); 72 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "stock-analysis-appid"); 73 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 74 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 75 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 76 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 77 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 78 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 79 | return props; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/StockPerformanceStreamsAndProcessorApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6; 2 | 3 | 4 | import bbejeck.chapter_6.transformer.StockPerformanceTransformer; 5 | import bbejeck.clients.producer.MockDataProducer; 6 | import bbejeck.model.StockPerformance; 7 | import bbejeck.model.StockTransaction; 8 | import bbejeck.util.serde.StreamsSerdes; 9 | import org.apache.kafka.clients.consumer.ConsumerConfig; 10 | import org.apache.kafka.common.serialization.Serde; 11 | import org.apache.kafka.common.serialization.Serdes; 12 | import org.apache.kafka.streams.Consumed; 13 | import org.apache.kafka.streams.KafkaStreams; 14 | import org.apache.kafka.streams.StreamsBuilder; 15 | import org.apache.kafka.streams.StreamsConfig; 16 | import org.apache.kafka.streams.kstream.Printed; 17 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 18 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 19 | import org.apache.kafka.streams.state.KeyValueStore; 20 | import org.apache.kafka.streams.state.StoreBuilder; 21 | import org.apache.kafka.streams.state.Stores; 22 | 23 | import java.util.Properties; 24 | 25 | public class StockPerformanceStreamsAndProcessorApplication { 26 | 27 | 28 | public static void main(String[] args) throws Exception { 29 | 30 | 31 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 32 | Serde stringSerde = Serdes.String(); 33 | Serde stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); 34 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 35 | 36 | 37 | StreamsBuilder builder = new StreamsBuilder(); 38 | 39 | String stocksStateStore = "stock-performance-store"; 40 | double differentialThreshold = 0.02; 41 | 42 | KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); 43 | StoreBuilder> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); 44 | 45 | builder.addStateStore(storeBuilder); 46 | 47 | builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) 48 | .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore) 49 | .print(Printed.toSysOut().withLabel("StockPerformance")); 50 | 51 | //Uncomment this line and comment out the line above for writing to a topic 52 | //.to(stringSerde, stockPerformanceSerde, "stock-performance"); 53 | 54 | 55 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); 56 | MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol); 57 | System.out.println("Stock Analysis KStream/Process API App Started"); 58 | kafkaStreams.cleanUp(); 59 | kafkaStreams.start(); 60 | Thread.sleep(70000); 61 | System.out.println("Shutting down the Stock KStream/Process API Analysis App now"); 62 | kafkaStreams.close(); 63 | MockDataProducer.shutdown(); 64 | } 65 | 66 | private static Properties getProperties() { 67 | Properties props = new Properties(); 68 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-papi-stock-analysis-client"); 69 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-papi-stock-analysis-group"); 70 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-stock-analysis-appid"); 71 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 72 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 73 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 74 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 75 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 76 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 77 | return props; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/StockPerformanceStreamsAndProcessorMultipleValuesApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6; 2 | 3 | 4 | import bbejeck.chapter_6.transformer.StockPerformanceMultipleValuesTransformer; 5 | import bbejeck.clients.producer.MockDataProducer; 6 | import bbejeck.model.StockPerformance; 7 | import bbejeck.model.StockTransaction; 8 | import bbejeck.util.serde.StreamsSerdes; 9 | import org.apache.kafka.clients.consumer.ConsumerConfig; 10 | import org.apache.kafka.common.serialization.Serde; 11 | import org.apache.kafka.common.serialization.Serdes; 12 | import org.apache.kafka.streams.Consumed; 13 | import org.apache.kafka.streams.KafkaStreams; 14 | import org.apache.kafka.streams.KeyValue; 15 | import org.apache.kafka.streams.StreamsBuilder; 16 | import org.apache.kafka.streams.StreamsConfig; 17 | import org.apache.kafka.streams.kstream.Printed; 18 | import org.apache.kafka.streams.kstream.TransformerSupplier; 19 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 20 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 21 | import org.apache.kafka.streams.state.KeyValueStore; 22 | import org.apache.kafka.streams.state.StoreBuilder; 23 | import org.apache.kafka.streams.state.Stores; 24 | 25 | import java.util.List; 26 | import java.util.Properties; 27 | 28 | public class StockPerformanceStreamsAndProcessorMultipleValuesApplication { 29 | 30 | 31 | public static void main(String[] args) throws Exception { 32 | 33 | 34 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 35 | Serde stringSerde = Serdes.String(); 36 | Serde stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); 37 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 38 | 39 | 40 | StreamsBuilder builder = new StreamsBuilder(); 41 | 42 | String stocksStateStore = "stock-performance-store"; 43 | double differentialThreshold = 0.05; 44 | 45 | TransformerSupplier>>> transformerSupplier = 46 | () -> new StockPerformanceMultipleValuesTransformer(stocksStateStore, differentialThreshold); 47 | 48 | KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); 49 | StoreBuilder> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); 50 | 51 | builder.addStateStore(storeBuilder); 52 | 53 | builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) 54 | .transform(transformerSupplier, stocksStateStore).flatMap((dummyKey,valueList) -> valueList) 55 | .print(Printed.toSysOut().withLabel("StockPerformance")); 56 | //.to(stringSerde, stockPerformanceSerde, "stock-performance"); 57 | 58 | 59 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); 60 | MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol); 61 | System.out.println("Stock Analysis KStream/Process API App Started"); 62 | kafkaStreams.cleanUp(); 63 | kafkaStreams.start(); 64 | Thread.sleep(70000); 65 | System.out.println("Shutting down the Stock KStream/Process API Analysis App now"); 66 | kafkaStreams.close(); 67 | MockDataProducer.shutdown(); 68 | } 69 | 70 | private static Properties getProperties() { 71 | Properties props = new Properties(); 72 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-papi-stock-analysis-client"); 73 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-papi-stock-analysis-group"); 74 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-stock-analysis-appid"); 75 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 76 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 77 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 78 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 79 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 80 | return props; 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/ZMartProcessorApp.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6; 2 | 3 | 4 | import bbejeck.chapter_6.processor.KStreamPrinter; 5 | import bbejeck.chapter_6.processor.MapValueProcessor; 6 | import bbejeck.clients.producer.MockDataProducer; 7 | import bbejeck.model.Purchase; 8 | import bbejeck.model.PurchasePattern; 9 | import bbejeck.model.RewardAccumulator; 10 | import bbejeck.util.serde.StreamsSerdes; 11 | import org.apache.kafka.clients.consumer.ConsumerConfig; 12 | import org.apache.kafka.common.serialization.Deserializer; 13 | import org.apache.kafka.common.serialization.Serde; 14 | import org.apache.kafka.common.serialization.Serdes; 15 | import org.apache.kafka.common.serialization.Serializer; 16 | import org.apache.kafka.streams.KafkaStreams; 17 | import org.apache.kafka.streams.StreamsConfig; 18 | import org.apache.kafka.streams.Topology; 19 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 20 | 21 | import java.util.Properties; 22 | 23 | public class ZMartProcessorApp { 24 | 25 | 26 | public static void main(String[] args) throws Exception { 27 | MockDataProducer.producePurchaseData(); 28 | 29 | 30 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 31 | Deserializer stringDeserializer = Serdes.String().deserializer(); 32 | Serializer stringSerializer = Serdes.String().serializer(); 33 | Serde purchaseSerde = StreamsSerdes.PurchaseSerde(); 34 | Deserializer purchaseDeserializer = purchaseSerde.deserializer(); 35 | Serializer purchaseSerializer = purchaseSerde.serializer(); 36 | Serializer patternSerializer = StreamsSerdes.PurchasePatternSerde().serializer(); 37 | Serializer rewardsSerializer = StreamsSerdes.RewardAccumulatorSerde().serializer(); 38 | 39 | Topology topology = new Topology(); 40 | 41 | topology.addSource("txn-source", stringDeserializer, purchaseDeserializer, "transactions") 42 | .addProcessor("masking-processor", 43 | () -> new MapValueProcessor(p -> Purchase.builder(p).maskCreditCard().build()), "txn-source") 44 | .addProcessor("rewards-processor", 45 | () -> new MapValueProcessor(purchase -> RewardAccumulator.builder(purchase).build()), "txn-source") 46 | .addProcessor("patterns-processor", 47 | () -> new MapValueProcessor(purchase -> PurchasePattern.builder(purchase).build()), "txn-source") 48 | .addSink("purchase-sink", "purchases", stringSerializer, purchaseSerializer, "masking-processor") 49 | .addSink("rewards-sink", "rewards", stringSerializer, rewardsSerializer, "rewards-processor") 50 | .addSink("patterns-sink", "patterns", stringSerializer, patternSerializer, "patterns-processor"); 51 | 52 | 53 | topology.addProcessor("purchase-printer", new KStreamPrinter("purchase"), "masking-processor") 54 | .addProcessor("rewards-printer", new KStreamPrinter("rewards"), "rewards-processor") 55 | .addProcessor("patterns-printer", new KStreamPrinter("pattens"), "patterns-processor"); 56 | 57 | KafkaStreams kafkaStreams = new KafkaStreams(topology, streamsConfig); 58 | System.out.println("ZMart Processor App Started"); 59 | kafkaStreams.start(); 60 | Thread.sleep(35000); 61 | System.out.println("Shutting down the ZMart Processor App now"); 62 | kafkaStreams.close(); 63 | MockDataProducer.shutdown(); 64 | } 65 | 66 | private static Properties getProperties() { 67 | Properties props = new Properties(); 68 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "zmart-processor-client"); 69 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "zmart-processor-group"); 70 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "zmart-processor-appid"); 71 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 72 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 73 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); 74 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 75 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 76 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 77 | return props; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/cancellation/StockPerformanceCancellingProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.cancellation; 2 | 3 | 4 | import bbejeck.chapter_6.punctuator.StockPerformancePunctuator; 5 | import bbejeck.model.StockPerformance; 6 | import bbejeck.model.StockTransaction; 7 | import org.apache.kafka.streams.processor.AbstractProcessor; 8 | import org.apache.kafka.streams.processor.Cancellable; 9 | import org.apache.kafka.streams.processor.ProcessorContext; 10 | import org.apache.kafka.streams.processor.PunctuationType; 11 | import org.apache.kafka.streams.state.KeyValueStore; 12 | 13 | import java.time.Duration; 14 | import java.time.Instant; 15 | 16 | /** 17 | * Simple class demonstrating how to cancel a punctuation processing. In this case 18 | * the punctuation is stopped after 15 minutes. 19 | * 20 | * This is a arbitrary example but demonstrates how we can cancel the punctuation. After 21 | * cancelling, you could reschedule for a different time or schedule a different Punctuator 22 | * to run. 23 | */ 24 | public class StockPerformanceCancellingProcessor extends AbstractProcessor { 25 | 26 | private KeyValueStore keyValueStore; 27 | private String stateStoreName; 28 | private double differentialThreshold; 29 | private Cancellable cancellable; 30 | private Instant startInstant = Instant.now(); 31 | private final long maxElapsedTimeForPunctuation = 15; 32 | 33 | 34 | public StockPerformanceCancellingProcessor(String stateStoreName, double differentialThreshold) { 35 | this.stateStoreName = stateStoreName; 36 | this.differentialThreshold = differentialThreshold; 37 | } 38 | 39 | @SuppressWarnings("unchecked") 40 | @Override 41 | public void init(ProcessorContext processorContext) { 42 | super.init(processorContext); 43 | keyValueStore = (KeyValueStore) context().getStateStore(stateStoreName); 44 | StockPerformancePunctuator punctuator = new StockPerformancePunctuator(differentialThreshold, 45 | context(), 46 | keyValueStore); 47 | 48 | cancellable = context().schedule(10000, PunctuationType.WALL_CLOCK_TIME, punctuator); 49 | } 50 | 51 | @Override 52 | public void process(String symbol, StockTransaction transaction) { 53 | 54 | long elapsedTime = Duration.between(startInstant, Instant.now()).toMinutes(); 55 | 56 | // cancels punctuation after 15 minutes 57 | if(elapsedTime >= maxElapsedTimeForPunctuation ) { 58 | cancellable.cancel(); 59 | } 60 | 61 | if (symbol != null) { 62 | StockPerformance stockPerformance = keyValueStore.get(symbol); 63 | 64 | if (stockPerformance == null) { 65 | stockPerformance = new StockPerformance(); 66 | } 67 | 68 | stockPerformance.updatePriceStats(transaction.getSharePrice()); 69 | stockPerformance.updateVolumeStats(transaction.getShares()); 70 | stockPerformance.setLastUpdateSent(Instant.now()); 71 | 72 | keyValueStore.put(symbol, stockPerformance); 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/BeerPurchaseProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor; 2 | 3 | import bbejeck.model.BeerPurchase; 4 | import bbejeck.model.Currency; 5 | import org.apache.kafka.streams.processor.AbstractProcessor; 6 | 7 | import java.text.DecimalFormat; 8 | 9 | import static bbejeck.model.Currency.DOLLARS; 10 | 11 | 12 | public class BeerPurchaseProcessor extends AbstractProcessor { 13 | 14 | private String domesticSalesNode; 15 | private String internationalSalesNode; 16 | 17 | public BeerPurchaseProcessor(String domesticSalesNode, String internationalSalesNode) { 18 | this.domesticSalesNode = domesticSalesNode; 19 | this.internationalSalesNode = internationalSalesNode; 20 | } 21 | 22 | @Override 23 | public void process(String key, BeerPurchase beerPurchase) { 24 | 25 | Currency transactionCurrency = beerPurchase.getCurrency(); 26 | if (transactionCurrency != DOLLARS) { 27 | BeerPurchase dollarBeerPurchase; 28 | BeerPurchase.Builder builder = BeerPurchase.newBuilder(beerPurchase); 29 | double internationalSaleAmount = beerPurchase.getTotalSale(); 30 | String pattern = "###.##"; 31 | DecimalFormat decimalFormat = new DecimalFormat(pattern); 32 | builder.currency(DOLLARS); 33 | builder.totalSale(Double.parseDouble(decimalFormat.format(transactionCurrency.convertToDollars(internationalSaleAmount)))); 34 | dollarBeerPurchase = builder.build(); 35 | context().forward(key, dollarBeerPurchase, internationalSalesNode); 36 | } else { 37 | context().forward(key, beerPurchase, domesticSalesNode); 38 | } 39 | 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/KStreamPrinter.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor; 2 | 3 | import org.apache.kafka.streams.processor.AbstractProcessor; 4 | import org.apache.kafka.streams.processor.Processor; 5 | import org.apache.kafka.streams.processor.ProcessorSupplier; 6 | 7 | 8 | public class KStreamPrinter implements ProcessorSupplier { 9 | private String name; 10 | 11 | 12 | public KStreamPrinter(String name) { 13 | this.name = name; 14 | } 15 | 16 | 17 | @Override 18 | public Processor get() { 19 | return new PrintingProcessor(this.name); 20 | } 21 | 22 | 23 | private class PrintingProcessor extends AbstractProcessor { 24 | private String name; 25 | 26 | 27 | PrintingProcessor(String name) { 28 | this.name = name; 29 | } 30 | 31 | @Override 32 | public void process(Object key, Object value) { 33 | System.out.println(String.format("[%s] Key [%s] Value[%s]", name, key, value)); 34 | this.context().forward(key, value); 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/MapValueProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor; 2 | 3 | import org.apache.kafka.streams.processor.AbstractProcessor; 4 | 5 | import java.util.function.Function; 6 | 7 | 8 | 9 | public class MapValueProcessor extends AbstractProcessor { 10 | 11 | // It is assumed the valueMapper is stateless 12 | private Function valueMapper; 13 | 14 | public MapValueProcessor(Function valueMapper) { 15 | this.valueMapper = valueMapper; 16 | } 17 | 18 | @Override 19 | public void process(K key, V value) { 20 | VR newValue = valueMapper.apply(value); 21 | this.context().forward(key, newValue); 22 | } 23 | 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/StockPerformanceProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor; 2 | 3 | 4 | import bbejeck.chapter_6.punctuator.StockPerformancePunctuator; 5 | import bbejeck.model.StockPerformance; 6 | import bbejeck.model.StockTransaction; 7 | import org.apache.kafka.streams.processor.AbstractProcessor; 8 | import org.apache.kafka.streams.processor.ProcessorContext; 9 | import org.apache.kafka.streams.processor.PunctuationType; 10 | import org.apache.kafka.streams.state.KeyValueStore; 11 | 12 | import java.time.Instant; 13 | 14 | public class StockPerformanceProcessor extends AbstractProcessor { 15 | 16 | private KeyValueStore keyValueStore; 17 | private String stateStoreName; 18 | private double differentialThreshold; 19 | 20 | public StockPerformanceProcessor(String stateStoreName, double differentialThreshold) { 21 | this.stateStoreName = stateStoreName; 22 | this.differentialThreshold = differentialThreshold; 23 | } 24 | 25 | @SuppressWarnings("unchecked") 26 | @Override 27 | public void init(ProcessorContext processorContext) { 28 | super.init(processorContext); 29 | keyValueStore = (KeyValueStore) context().getStateStore(stateStoreName); 30 | StockPerformancePunctuator punctuator = new StockPerformancePunctuator(differentialThreshold, 31 | context(), 32 | keyValueStore); 33 | 34 | context().schedule(10000, PunctuationType.WALL_CLOCK_TIME, punctuator); 35 | } 36 | 37 | @Override 38 | public void process(String symbol, StockTransaction transaction) { 39 | if (symbol != null) { 40 | StockPerformance stockPerformance = keyValueStore.get(symbol); 41 | 42 | if (stockPerformance == null) { 43 | stockPerformance = new StockPerformance(); 44 | } 45 | 46 | stockPerformance.updatePriceStats(transaction.getSharePrice()); 47 | stockPerformance.updateVolumeStats(transaction.getShares()); 48 | stockPerformance.setLastUpdateSent(Instant.now()); 49 | 50 | keyValueStore.put(symbol, stockPerformance); 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/UpperCaseProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor; 2 | 3 | 4 | import org.apache.kafka.streams.processor.AbstractProcessor; 5 | 6 | public class UpperCaseProcessor extends AbstractProcessor { 7 | 8 | 9 | @Override 10 | public void process(String key, String value) { 11 | 12 | context().forward(key, value.toUpperCase()); 13 | 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/cogrouping/ClickEventProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import io.dropwizard.cli.Cli; 8 | import org.apache.kafka.streams.KeyValue; 9 | import org.apache.kafka.streams.processor.AbstractProcessor; 10 | import org.apache.kafka.streams.processor.ProcessorContext; 11 | import org.apache.kafka.streams.state.KeyValueIterator; 12 | import org.apache.kafka.streams.state.KeyValueStore; 13 | 14 | import java.util.ArrayList; 15 | import java.util.List; 16 | 17 | public class ClickEventProcessor extends AbstractProcessor { 18 | 19 | @Override 20 | @SuppressWarnings("unchecked") 21 | public void init(ProcessorContext context) { 22 | super.init(context); 23 | 24 | } 25 | 26 | 27 | @Override 28 | public void process(String key, ClickEvent clickEvent) { 29 | if (key != null) { 30 | Tuple tuple = Tuple.of(clickEvent, null); 31 | context().forward(key, tuple); 32 | } 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/cogrouping/CogroupingMethodHandleProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import org.apache.kafka.streams.KeyValue; 8 | import org.apache.kafka.streams.processor.AbstractProcessor; 9 | import org.apache.kafka.streams.processor.ProcessorContext; 10 | import org.apache.kafka.streams.state.KeyValueIterator; 11 | import org.apache.kafka.streams.state.KeyValueStore; 12 | 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | 16 | import static org.apache.kafka.streams.processor.PunctuationType.STREAM_TIME; 17 | 18 | /** 19 | * This class provides the same functionality as the 20 | * {@link CogroupingProcessor} but the {@link org.apache.kafka.streams.processor.Punctuator} 21 | * is provided via a method handle versus a concrete instance. 22 | */ 23 | public class CogroupingMethodHandleProcessor extends AbstractProcessor> { 24 | 25 | private KeyValueStore,List>> tupleStore; 26 | public static final String TUPLE_STORE_NAME = "tupleCoGroupStore"; 27 | 28 | 29 | @Override 30 | @SuppressWarnings("unchecked") 31 | public void init(ProcessorContext context) { 32 | super.init(context); 33 | tupleStore = (KeyValueStore) context().getStateStore(TUPLE_STORE_NAME); 34 | context().schedule(15000L, STREAM_TIME, this::cogroup); 35 | } 36 | 37 | @Override 38 | public void process(String key, Tuple value) { 39 | 40 | Tuple, List> cogroupedTuple = tupleStore.get(key); 41 | if (cogroupedTuple == null) { 42 | cogroupedTuple = Tuple.of(new ArrayList<>(), new ArrayList<>()); 43 | } 44 | 45 | if(value._1 != null) { 46 | cogroupedTuple._1.add(value._1); 47 | } 48 | 49 | if(value._2 != null) { 50 | cogroupedTuple._2.add(value._2); 51 | } 52 | 53 | tupleStore.put(key, cogroupedTuple); 54 | } 55 | 56 | public void cogroup(long timestamp) { 57 | KeyValueIterator, List>> iterator = tupleStore.all(); 58 | 59 | while (iterator.hasNext()) { 60 | KeyValue, List>> cogrouping = iterator.next(); 61 | 62 | if (cogrouping.value != null && (!cogrouping.value._1.isEmpty() || !cogrouping.value._2.isEmpty())) { 63 | List clickEvents = new ArrayList<>(cogrouping.value._1); 64 | List stockTransactions = new ArrayList<>(cogrouping.value._2); 65 | 66 | context().forward(cogrouping.key, Tuple.of(clickEvents, stockTransactions)); 67 | cogrouping.value._1.clear(); 68 | cogrouping.value._2.clear(); 69 | tupleStore.put(cogrouping.key, cogrouping.value); 70 | } 71 | } 72 | iterator.close(); 73 | } 74 | 75 | } 76 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/cogrouping/CogroupingProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import org.apache.kafka.streams.processor.AbstractProcessor; 8 | import org.apache.kafka.streams.processor.ProcessorContext; 9 | import org.apache.kafka.streams.state.KeyValueStore; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | 14 | import static org.apache.kafka.streams.processor.PunctuationType.STREAM_TIME; 15 | 16 | /** 17 | * User: Bill Bejeck 18 | * Date: 8/12/17 19 | * Time: 10:54 AM 20 | */ 21 | public class CogroupingProcessor extends AbstractProcessor> { 22 | 23 | private KeyValueStore,List>> tupleStore; 24 | public static final String TUPLE_STORE_NAME = "tupleCoGroupStore"; 25 | 26 | 27 | @Override 28 | @SuppressWarnings("unchecked") 29 | public void init(ProcessorContext context) { 30 | super.init(context); 31 | tupleStore = (KeyValueStore) context().getStateStore(TUPLE_STORE_NAME); 32 | CogroupingPunctuator punctuator = new CogroupingPunctuator(tupleStore, context()); 33 | context().schedule(15000L, STREAM_TIME, punctuator); 34 | } 35 | 36 | @Override 37 | public void process(String key, Tuple value) { 38 | 39 | Tuple, List> cogroupedTuple = tupleStore.get(key); 40 | if (cogroupedTuple == null) { 41 | cogroupedTuple = Tuple.of(new ArrayList<>(), new ArrayList<>()); 42 | } 43 | 44 | if(value._1 != null) { 45 | cogroupedTuple._1.add(value._1); 46 | } 47 | 48 | if(value._2 != null) { 49 | cogroupedTuple._2.add(value._2); 50 | } 51 | 52 | tupleStore.put(key, cogroupedTuple); 53 | } 54 | 55 | } 56 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/cogrouping/CogroupingPunctuator.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import org.apache.kafka.streams.KeyValue; 8 | import org.apache.kafka.streams.processor.ProcessorContext; 9 | import org.apache.kafka.streams.processor.Punctuator; 10 | import org.apache.kafka.streams.state.KeyValueIterator; 11 | import org.apache.kafka.streams.state.KeyValueStore; 12 | 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | 16 | public class CogroupingPunctuator implements Punctuator { 17 | 18 | private final KeyValueStore, List>> tupleStore; 19 | private final ProcessorContext context; 20 | 21 | public CogroupingPunctuator(KeyValueStore, List>> tupleStore, ProcessorContext context) { 22 | this.tupleStore = tupleStore; 23 | this.context = context; 24 | } 25 | 26 | @Override 27 | public void punctuate(long timestamp) { 28 | KeyValueIterator, List>> iterator = tupleStore.all(); 29 | 30 | while (iterator.hasNext()) { 31 | KeyValue, List>> cogrouped = iterator.next(); 32 | // if either list contains values forward results 33 | if (cogrouped.value != null && (!cogrouped.value._1.isEmpty() || !cogrouped.value._2.isEmpty())) { 34 | List clickEvents = new ArrayList<>(cogrouped.value._1); 35 | List stockTransactions = new ArrayList<>(cogrouped.value._2); 36 | 37 | context.forward(cogrouped.key, Tuple.of(clickEvents, stockTransactions)); 38 | // empty out the current cogrouped results 39 | cogrouped.value._1.clear(); 40 | cogrouped.value._2.clear(); 41 | tupleStore.put(cogrouped.key, cogrouped.value); 42 | } 43 | } 44 | iterator.close(); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/cogrouping/CogroupingSystemTimeProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import org.apache.kafka.streams.processor.AbstractProcessor; 8 | import org.apache.kafka.streams.processor.ProcessorContext; 9 | import org.apache.kafka.streams.state.KeyValueStore; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | 14 | import static org.apache.kafka.streams.processor.PunctuationType.WALL_CLOCK_TIME; 15 | 16 | /** 17 | * User: Bill Bejeck 18 | * Date: 8/12/17 19 | * Time: 10:54 AM 20 | */ 21 | public class CogroupingSystemTimeProcessor extends AbstractProcessor> { 22 | 23 | private KeyValueStore,List>> tupleStore; 24 | public static final String TUPLE_STORE_NAME = "tupleCoGroupStore"; 25 | 26 | 27 | @Override 28 | @SuppressWarnings("unchecked") 29 | public void init(ProcessorContext context) { 30 | super.init(context); 31 | tupleStore = (KeyValueStore) context().getStateStore(TUPLE_STORE_NAME); 32 | CogroupingPunctuator punctuator = new CogroupingPunctuator(tupleStore, context()); 33 | context().schedule(15000L, WALL_CLOCK_TIME, punctuator); 34 | } 35 | 36 | @Override 37 | public void process(String key, Tuple value) { 38 | 39 | Tuple, List> cogroupedTuple = tupleStore.get(key); 40 | if (cogroupedTuple == null) { 41 | cogroupedTuple = Tuple.of(new ArrayList<>(), new ArrayList<>()); 42 | } 43 | 44 | if(value._1 != null) { 45 | cogroupedTuple._1.add(value._1); 46 | } 47 | 48 | if(value._2 != null) { 49 | cogroupedTuple._2.add(value._2); 50 | } 51 | 52 | tupleStore.put(key, cogroupedTuple); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/processor/cogrouping/StockTransactionProcessor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import org.apache.kafka.streams.processor.AbstractProcessor; 8 | import org.apache.kafka.streams.processor.ProcessorContext; 9 | import org.apache.kafka.streams.state.KeyValueStore; 10 | 11 | import java.util.ArrayList; 12 | import java.util.List; 13 | 14 | 15 | public class StockTransactionProcessor extends AbstractProcessor { 16 | 17 | 18 | @Override 19 | @SuppressWarnings("unchecked") 20 | public void init(ProcessorContext context) { 21 | super.init(context); 22 | } 23 | 24 | @Override 25 | public void process(String key, StockTransaction value) { 26 | if (key != null) { 27 | Tuple tuple = Tuple.of(null, value); 28 | context().forward(key, tuple); 29 | } 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/punctuator/StockPerformancePunctuator.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.punctuator; 2 | 3 | import bbejeck.model.StockPerformance; 4 | import org.apache.kafka.streams.KeyValue; 5 | import org.apache.kafka.streams.processor.ProcessorContext; 6 | import org.apache.kafka.streams.processor.Punctuator; 7 | import org.apache.kafka.streams.state.KeyValueIterator; 8 | import org.apache.kafka.streams.state.KeyValueStore; 9 | 10 | /** 11 | * User: Bill Bejeck 12 | * Date: 8/14/17 13 | * Time: 7:06 PM 14 | */ 15 | public class StockPerformancePunctuator implements Punctuator { 16 | 17 | 18 | private double differentialThreshold; 19 | private ProcessorContext context; 20 | private KeyValueStore keyValueStore; 21 | 22 | public StockPerformancePunctuator(double differentialThreshold, 23 | ProcessorContext context, 24 | KeyValueStore keyValueStore) { 25 | 26 | this.differentialThreshold = differentialThreshold; 27 | this.context = context; 28 | this.keyValueStore = keyValueStore; 29 | } 30 | 31 | @Override 32 | public void punctuate(long timestamp) { 33 | KeyValueIterator performanceIterator = keyValueStore.all(); 34 | 35 | while (performanceIterator.hasNext()) { 36 | KeyValue keyValue = performanceIterator.next(); 37 | String key = keyValue.key; 38 | StockPerformance stockPerformance = keyValue.value; 39 | 40 | if (stockPerformance != null) { 41 | if (stockPerformance.priceDifferential() >= differentialThreshold || 42 | stockPerformance.volumeDifferential() >= differentialThreshold) { 43 | context.forward(key, stockPerformance); 44 | } 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/transformer/StockPerformanceMultipleValuesTransformer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.transformer; 2 | 3 | 4 | import bbejeck.model.StockPerformance; 5 | import bbejeck.model.StockTransaction; 6 | import org.apache.kafka.streams.KeyValue; 7 | import org.apache.kafka.streams.kstream.Transformer; 8 | import org.apache.kafka.streams.processor.ProcessorContext; 9 | import org.apache.kafka.streams.processor.PunctuationType; 10 | import org.apache.kafka.streams.state.KeyValueIterator; 11 | import org.apache.kafka.streams.state.KeyValueStore; 12 | 13 | import java.time.Instant; 14 | import java.util.ArrayList; 15 | import java.util.List; 16 | 17 | public class StockPerformanceMultipleValuesTransformer implements Transformer>>> { 18 | 19 | private String stateStoreName ; 20 | private double differentialThreshold = 0.02; 21 | private ProcessorContext processorContext; 22 | private KeyValueStore keyValueStore; 23 | 24 | 25 | public StockPerformanceMultipleValuesTransformer(String stateStoreName, double differentialThreshold) { 26 | this.stateStoreName = stateStoreName; 27 | this.differentialThreshold = differentialThreshold; 28 | } 29 | 30 | 31 | @SuppressWarnings("unchecked") 32 | @Override 33 | public void init(ProcessorContext processorContext) { 34 | this.processorContext = processorContext; 35 | keyValueStore = (KeyValueStore) this.processorContext.getStateStore(stateStoreName); 36 | this.processorContext.schedule(15000, PunctuationType.STREAM_TIME, this::punctuate); 37 | } 38 | 39 | @Override 40 | public KeyValue>> transform(String symbol, StockTransaction transaction) { 41 | if (symbol != null) { 42 | StockPerformance stockPerformance = keyValueStore.get(symbol); 43 | 44 | if (stockPerformance == null) { 45 | stockPerformance = new StockPerformance(); 46 | } 47 | 48 | stockPerformance.updatePriceStats(transaction.getSharePrice()); 49 | stockPerformance.updateVolumeStats(transaction.getShares()); 50 | stockPerformance.setLastUpdateSent(Instant.now()); 51 | 52 | keyValueStore.put(symbol, stockPerformance); 53 | } 54 | return null; 55 | } 56 | 57 | @Override 58 | @SuppressWarnings("deprecation") 59 | public KeyValue>> punctuate(long timestamp) { 60 | List> stockPerformanceList = new ArrayList<>(); 61 | KeyValueIterator performanceIterator = keyValueStore.all(); 62 | while (performanceIterator.hasNext()) { 63 | KeyValue keyValue = performanceIterator.next(); 64 | StockPerformance stockPerformance = keyValue.value; 65 | 66 | if (stockPerformance != null) { 67 | if (stockPerformance.priceDifferential() >= differentialThreshold || 68 | stockPerformance.volumeDifferential() >= differentialThreshold) { 69 | stockPerformanceList.add(keyValue); 70 | } 71 | } 72 | } 73 | return stockPerformanceList.isEmpty() ? null : KeyValue.pair(null, stockPerformanceList); 74 | } 75 | 76 | @Override 77 | public void close() { 78 | //no-op 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_6/transformer/StockPerformanceTransformer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.transformer; 2 | 3 | 4 | import bbejeck.chapter_6.punctuator.StockPerformancePunctuator; 5 | import bbejeck.model.StockPerformance; 6 | import bbejeck.model.StockTransaction; 7 | import org.apache.kafka.streams.KeyValue; 8 | import org.apache.kafka.streams.kstream.Transformer; 9 | import org.apache.kafka.streams.processor.ProcessorContext; 10 | import org.apache.kafka.streams.processor.PunctuationType; 11 | import org.apache.kafka.streams.state.KeyValueStore; 12 | 13 | import java.time.Instant; 14 | 15 | public class StockPerformanceTransformer implements Transformer> { 16 | 17 | private String stateStoreName; 18 | private double differentialThreshold; 19 | private KeyValueStore keyValueStore; 20 | 21 | 22 | public StockPerformanceTransformer(String stateStoreName, double differentialThreshold) { 23 | this.stateStoreName = stateStoreName; 24 | this.differentialThreshold = differentialThreshold; 25 | } 26 | 27 | @SuppressWarnings("unchecked") 28 | @Override 29 | public void init(ProcessorContext processorContext) { 30 | keyValueStore = (KeyValueStore) processorContext.getStateStore(stateStoreName); 31 | StockPerformancePunctuator punctuator = new StockPerformancePunctuator(differentialThreshold, processorContext, keyValueStore); 32 | processorContext.schedule(15000, PunctuationType.STREAM_TIME, punctuator); 33 | } 34 | 35 | @Override 36 | public KeyValue transform(String symbol, StockTransaction transaction) { 37 | if (symbol != null) { 38 | StockPerformance stockPerformance = keyValueStore.get(symbol); 39 | 40 | if (stockPerformance == null) { 41 | stockPerformance = new StockPerformance(); 42 | } 43 | 44 | stockPerformance.updatePriceStats(transaction.getSharePrice()); 45 | stockPerformance.updateVolumeStats(transaction.getShares()); 46 | stockPerformance.setLastUpdateSent(Instant.now()); 47 | 48 | keyValueStore.put(symbol, stockPerformance); 49 | } 50 | return null; 51 | } 52 | 53 | @Override 54 | @SuppressWarnings("deprecation") 55 | public KeyValue punctuate(long l) { 56 | throw new UnsupportedOperationException("Should use the punctuate method on Punctuator"); 57 | } 58 | 59 | @Override 60 | public void close() { 61 | //no-op 62 | } 63 | 64 | } 65 | 66 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_7/StockPerformanceStreamsAndProcessorMetricsApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_7; 2 | 3 | 4 | import bbejeck.chapter_7.transformer.StockPerformanceMetricsTransformer; 5 | import bbejeck.clients.producer.MockDataProducer; 6 | import bbejeck.model.StockPerformance; 7 | import bbejeck.model.StockTransaction; 8 | import bbejeck.util.serde.StreamsSerdes; 9 | import org.apache.kafka.clients.consumer.ConsumerConfig; 10 | import org.apache.kafka.common.Metric; 11 | import org.apache.kafka.common.MetricName; 12 | import org.apache.kafka.common.serialization.Serde; 13 | import org.apache.kafka.common.serialization.Serdes; 14 | import org.apache.kafka.streams.Consumed; 15 | import org.apache.kafka.streams.KafkaStreams; 16 | import org.apache.kafka.streams.StreamsBuilder; 17 | import org.apache.kafka.streams.StreamsConfig; 18 | import org.apache.kafka.streams.kstream.Produced; 19 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 20 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 21 | import org.apache.kafka.streams.state.KeyValueStore; 22 | import org.apache.kafka.streams.state.StoreBuilder; 23 | import org.apache.kafka.streams.state.Stores; 24 | import org.slf4j.Logger; 25 | import org.slf4j.LoggerFactory; 26 | 27 | import java.util.Collections; 28 | import java.util.Map; 29 | import java.util.Properties; 30 | 31 | public class StockPerformanceStreamsAndProcessorMetricsApplication { 32 | 33 | private static final Logger LOG = LoggerFactory.getLogger(StockPerformanceStreamsAndProcessorMetricsApplication.class); 34 | 35 | public static void main(String[] args) throws Exception { 36 | 37 | 38 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 39 | Serde stringSerde = Serdes.String(); 40 | Serde stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); 41 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 42 | 43 | 44 | StreamsBuilder builder = new StreamsBuilder(); 45 | 46 | String stocksStateStore = "stock-performance-store"; 47 | double differentialThreshold = 0.05; 48 | 49 | KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); 50 | StoreBuilder> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); 51 | 52 | builder.addStateStore(storeBuilder); 53 | 54 | builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) 55 | .transform(() -> new StockPerformanceMetricsTransformer(stocksStateStore, differentialThreshold), stocksStateStore) 56 | .peek((k, v)-> LOG.info("[stock-performance] key: {} value: {}" , k, v)) 57 | .to( "stock-performance", Produced.with(stringSerde, stockPerformanceSerde)); 58 | 59 | 60 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); 61 | MockDataProducer.produceStockTransactionsWithKeyFunction(50, 50, 25, StockTransaction::getSymbol); 62 | LOG.info("Stock Analysis KStream/Process API Metrics App Started"); 63 | kafkaStreams.cleanUp(); 64 | kafkaStreams.start(); 65 | 66 | 67 | 68 | 69 | Thread.sleep(70000); 70 | 71 | 72 | LOG.info("Shutting down the Stock KStream/Process API Analysis Metrics App now"); 73 | for (Map.Entry metricNameEntry :kafkaStreams.metrics().entrySet()) { 74 | Metric metric = metricNameEntry.getValue(); 75 | MetricName metricName = metricNameEntry.getKey(); 76 | if(!metric.metricValue().equals(0.0) && !metric.metricValue().equals(Double.NEGATIVE_INFINITY)) { 77 | LOG.info("MetricName {}", metricName.name()); 78 | LOG.info(" = {}", metric.metricValue()); 79 | } 80 | 81 | } 82 | kafkaStreams.close(); 83 | MockDataProducer.shutdown(); 84 | } 85 | 86 | private static Properties getProperties() { 87 | Properties props = new Properties(); 88 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-stats-stock-analysis-client"); 89 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-stats-stock-analysis-group"); 90 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-stats-stock-analysis-appid"); 91 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 92 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 93 | props.put(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG, "DEBUG"); 94 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 95 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 96 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 97 | props.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), Collections.singletonList(bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor.class)); 98 | return props; 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_7/interceptors/StockTransactionConsumerInterceptor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_7.interceptors; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerInterceptor; 4 | import org.apache.kafka.clients.consumer.ConsumerRecord; 5 | import org.apache.kafka.clients.consumer.ConsumerRecords; 6 | import org.apache.kafka.clients.consumer.OffsetAndMetadata; 7 | import org.apache.kafka.common.TopicPartition; 8 | import org.slf4j.Logger; 9 | import org.slf4j.LoggerFactory; 10 | 11 | import java.util.Iterator; 12 | import java.util.Map; 13 | 14 | /** 15 | * Bare bones implementation of a ConsumerInterceptor and simply prints results to the 16 | * stdout 17 | * 18 | * Using Object, Object as we'll get byte[] for the keys and values, hence we won't inspect the 19 | * messages. If you want to inspect the messages you'll need to desarialize - inspect - serialize the 20 | * messages before returning. 21 | */ 22 | public class StockTransactionConsumerInterceptor implements ConsumerInterceptor { 23 | 24 | private static final Logger LOG = LoggerFactory.getLogger(StockTransactionConsumerInterceptor.class); 25 | 26 | public StockTransactionConsumerInterceptor() { 27 | LOG.info("Built StockTransactionConsumerInterceptor"); 28 | } 29 | 30 | @Override 31 | public ConsumerRecords onConsume(ConsumerRecords consumerRecords) { 32 | LOG.info("Intercepted ConsumerRecords {}", buildMessage(consumerRecords.iterator())); 33 | return consumerRecords; 34 | } 35 | 36 | @Override 37 | public void onCommit(Map map) { 38 | LOG.info("Commit information {}", map); 39 | } 40 | 41 | @Override 42 | public void close() { 43 | 44 | } 45 | 46 | @Override 47 | public void configure(Map map) { 48 | 49 | } 50 | 51 | private String buildMessage(Iterator> consumerRecords) { 52 | StringBuilder builder = new StringBuilder(); 53 | while (consumerRecords.hasNext()) { 54 | builder.append(consumerRecords.next()); 55 | } 56 | return builder.toString(); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_7/interceptors/ZMartProducerInterceptor.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_7.interceptors; 2 | 3 | 4 | import org.apache.kafka.clients.producer.ProducerInterceptor; 5 | import org.apache.kafka.clients.producer.ProducerRecord; 6 | import org.apache.kafka.clients.producer.RecordMetadata; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import java.util.Map; 11 | 12 | public class ZMartProducerInterceptor implements ProducerInterceptor { 13 | 14 | 15 | private static final Logger LOG = LoggerFactory.getLogger(ZMartProducerInterceptor.class); 16 | 17 | @Override 18 | public ProducerRecord onSend(ProducerRecord record) { 19 | 20 | LOG.info("ProducerRecord being sent out {} ", record); 21 | 22 | return record; 23 | } 24 | 25 | @Override 26 | public void onAcknowledgement(RecordMetadata metadata, Exception exception) { 27 | if (exception != null) { 28 | LOG.warn("Exception encountered producing record {}", exception); 29 | } else { 30 | LOG.info("record has been acknowledged {} ", metadata); 31 | } 32 | } 33 | 34 | @Override 35 | public void close() { 36 | 37 | } 38 | 39 | @Override 40 | public void configure(Map configs) { 41 | 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_7/restore/LoggingStateRestoreListener.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_7.restore; 2 | 3 | import org.apache.kafka.common.TopicPartition; 4 | import org.apache.kafka.streams.processor.StateRestoreListener; 5 | import org.slf4j.Logger; 6 | import org.slf4j.LoggerFactory; 7 | 8 | import java.text.DecimalFormat; 9 | import java.text.NumberFormat; 10 | import java.util.Map; 11 | import java.util.concurrent.ConcurrentHashMap; 12 | 13 | /** 14 | * User: Bill Bejeck 15 | * Date: 8/19/17 16 | * Time: 7:54 PM 17 | */ 18 | public class LoggingStateRestoreListener implements StateRestoreListener { 19 | 20 | private static final Logger LOG = LoggerFactory.getLogger(LoggingStateRestoreListener.class); 21 | private final Map totalToRestore = new ConcurrentHashMap<>(); 22 | private final Map restoredSoFar = new ConcurrentHashMap<>(); 23 | 24 | 25 | @Override 26 | public void onRestoreStart(TopicPartition topicPartition, String store, long start, long end) { 27 | long toRestore = end - start; 28 | totalToRestore.put(topicPartition, toRestore); 29 | LOG.info("Starting restoration for {} on topic-partition {} total to restore {}", store, topicPartition, toRestore); 30 | 31 | } 32 | 33 | @Override 34 | public void onBatchRestored(TopicPartition topicPartition, String store, long start, long batchCompleted) { 35 | NumberFormat formatter = new DecimalFormat("#.##"); 36 | 37 | long currentProgress = batchCompleted + restoredSoFar.getOrDefault(topicPartition, 0L); 38 | double percentComplete = (double) currentProgress / totalToRestore.get(topicPartition); 39 | 40 | LOG.info("Completed {} for {}% of total restoration for {} on {}", 41 | batchCompleted, formatter.format(percentComplete * 100.00), store, topicPartition); 42 | restoredSoFar.put(topicPartition, currentProgress); 43 | } 44 | 45 | @Override 46 | public void onRestoreEnd(TopicPartition topicPartition, String store, long totalRestored) { 47 | LOG.info("Restoration completed for {} on topic-partition {}", store, topicPartition); 48 | restoredSoFar.put(topicPartition, 0L); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_7/transformer/StockPerformanceMetricsTransformer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_7.transformer; 2 | 3 | 4 | import bbejeck.model.StockPerformance; 5 | import bbejeck.model.StockTransaction; 6 | import org.apache.kafka.common.metrics.Sensor; 7 | import org.apache.kafka.streams.KeyValue; 8 | import org.apache.kafka.streams.kstream.Transformer; 9 | import org.apache.kafka.streams.processor.ProcessorContext; 10 | import org.apache.kafka.streams.processor.PunctuationType; 11 | import org.apache.kafka.streams.state.KeyValueIterator; 12 | import org.apache.kafka.streams.state.KeyValueStore; 13 | 14 | import java.time.Instant; 15 | import java.util.concurrent.atomic.AtomicInteger; 16 | 17 | public class StockPerformanceMetricsTransformer implements Transformer> { 18 | 19 | private String stocksStateStore = "stock-performance-store"; 20 | private KeyValueStore keyValueStore; 21 | private double differentialThreshold = 0.05; 22 | private ProcessorContext processorContext; 23 | private Sensor metricsSensor; 24 | private static AtomicInteger count = new AtomicInteger(1); 25 | 26 | 27 | public StockPerformanceMetricsTransformer(String stocksStateStore, double differentialThreshold) { 28 | this.stocksStateStore = stocksStateStore; 29 | this.differentialThreshold = differentialThreshold; 30 | } 31 | 32 | @Override 33 | @SuppressWarnings("unchecked") 34 | public void init(final ProcessorContext processorContext) { 35 | keyValueStore = (KeyValueStore) processorContext.getStateStore(stocksStateStore); 36 | this.processorContext = processorContext; 37 | 38 | this.processorContext.schedule(5000, PunctuationType.WALL_CLOCK_TIME, this::doPunctuate); 39 | 40 | 41 | final String tagKey = "task-id"; 42 | final String tagValue = processorContext.taskId().toString(); 43 | final String nodeName = "StockPerformanceProcessor_"+count.getAndIncrement(); 44 | metricsSensor = processorContext.metrics().addLatencyAndThroughputSensor("transformer-node", 45 | nodeName, "stock-performance-calculation", 46 | Sensor.RecordingLevel.DEBUG, 47 | tagKey, 48 | tagValue); 49 | } 50 | 51 | @Override 52 | public KeyValue transform(String symbol, StockTransaction stockTransaction) { 53 | if (symbol != null) { 54 | StockPerformance stockPerformance = keyValueStore.get(symbol); 55 | 56 | if (stockPerformance == null) { 57 | stockPerformance = new StockPerformance(); 58 | } 59 | 60 | long start = System.nanoTime(); 61 | stockPerformance.updatePriceStats(stockTransaction.getSharePrice()); 62 | stockPerformance.updateVolumeStats(stockTransaction.getShares()); 63 | stockPerformance.setLastUpdateSent(Instant.now()); 64 | long end = System.nanoTime(); 65 | 66 | processorContext.metrics().recordLatency(metricsSensor, start, end); 67 | 68 | keyValueStore.put(symbol, stockPerformance); 69 | } 70 | return null; 71 | } 72 | 73 | private void doPunctuate(long timestamp) { 74 | KeyValueIterator performanceIterator = keyValueStore.all(); 75 | 76 | while (performanceIterator.hasNext()) { 77 | KeyValue keyValue = performanceIterator.next(); 78 | String key = keyValue.key; 79 | StockPerformance stockPerformance = keyValue.value; 80 | 81 | if (stockPerformance != null) { 82 | if (stockPerformance.priceDifferential() >= differentialThreshold || 83 | stockPerformance.volumeDifferential() >= differentialThreshold) { 84 | processorContext.forward(key, stockPerformance); 85 | } 86 | } 87 | } 88 | } 89 | 90 | @Override 91 | @SuppressWarnings("deprecation") 92 | public KeyValue punctuate(long l) { 93 | throw new UnsupportedOperationException("Should not call punctuate"); 94 | } 95 | 96 | @Override 97 | public void close() { 98 | 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_8/StockPerformanceStreamsProcessorTopology.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_8; 2 | 3 | import bbejeck.chapter_6.transformer.StockPerformanceTransformer; 4 | import bbejeck.model.StockPerformance; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.serde.StreamsSerdes; 7 | import org.apache.kafka.common.serialization.Serde; 8 | import org.apache.kafka.common.serialization.Serdes; 9 | import org.apache.kafka.streams.Consumed; 10 | import org.apache.kafka.streams.StreamsBuilder; 11 | import org.apache.kafka.streams.Topology; 12 | import org.apache.kafka.streams.kstream.Produced; 13 | import org.apache.kafka.streams.state.KeyValueBytesStoreSupplier; 14 | import org.apache.kafka.streams.state.KeyValueStore; 15 | import org.apache.kafka.streams.state.StoreBuilder; 16 | import org.apache.kafka.streams.state.Stores; 17 | 18 | /** 19 | * User: Bill Bejeck 20 | * Date: 9/10/17 21 | * Time: 3:54 PM 22 | */ 23 | public class StockPerformanceStreamsProcessorTopology { 24 | 25 | public static Topology build() { 26 | 27 | Serde stringSerde = Serdes.String(); 28 | Serde stockPerformanceSerde = StreamsSerdes.StockPerformanceSerde(); 29 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 30 | 31 | 32 | StreamsBuilder builder = new StreamsBuilder(); 33 | 34 | String stocksStateStore = "stock-performance-store"; 35 | double differentialThreshold = 0.02; 36 | 37 | KeyValueBytesStoreSupplier storeSupplier = Stores.lruMap(stocksStateStore, 100); 38 | StoreBuilder> storeBuilder = Stores.keyValueStoreBuilder(storeSupplier, Serdes.String(), stockPerformanceSerde); 39 | 40 | builder.addStateStore(storeBuilder); 41 | 42 | builder.stream("stock-transactions", Consumed.with(stringSerde, stockTransactionSerde)) 43 | .transform(() -> new StockPerformanceTransformer(stocksStateStore, differentialThreshold), stocksStateStore) 44 | .to("stock-performance", Produced.with(stringSerde, stockPerformanceSerde)); 45 | 46 | return builder.build(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_8/ZMartTopology.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_8; 2 | 3 | import bbejeck.model.Purchase; 4 | import bbejeck.model.PurchasePattern; 5 | import bbejeck.model.RewardAccumulator; 6 | import bbejeck.util.serde.StreamsSerdes; 7 | import org.apache.kafka.common.serialization.Serde; 8 | import org.apache.kafka.common.serialization.Serdes; 9 | import org.apache.kafka.streams.Consumed; 10 | import org.apache.kafka.streams.StreamsBuilder; 11 | import org.apache.kafka.streams.Topology; 12 | import org.apache.kafka.streams.kstream.KStream; 13 | import org.apache.kafka.streams.kstream.Produced; 14 | 15 | /** 16 | * User: Bill Bejeck 17 | * Date: 9/9/17 18 | * Time: 9:52 AM 19 | */ 20 | public class ZMartTopology { 21 | 22 | public static Topology build() { 23 | 24 | Serde purchaseSerde = StreamsSerdes.PurchaseSerde(); 25 | Serde purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); 26 | Serde rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); 27 | Serde stringSerde = Serdes.String(); 28 | 29 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 30 | 31 | KStream purchaseKStream = streamsBuilder.stream("transactions", Consumed.with(stringSerde, purchaseSerde)) 32 | .mapValues(p -> Purchase.builder(p).maskCreditCard().build()); 33 | 34 | KStream patternKStream = purchaseKStream.mapValues(purchase -> PurchasePattern.builder(purchase).build()); 35 | 36 | patternKStream.to("patterns", Produced.with(stringSerde,purchasePatternSerde)); 37 | 38 | 39 | KStream rewardsKStream = purchaseKStream.mapValues(purchase -> RewardAccumulator.builder(purchase).build()); 40 | 41 | 42 | rewardsKStream.to("rewards", Produced.with(stringSerde,rewardAccumulatorSerde)); 43 | 44 | purchaseKStream.to("purchases", Produced.with(Serdes.String(),purchaseSerde)); 45 | 46 | return streamsBuilder.build(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_9/DeserializerErrorHandler.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_9; 2 | 3 | import org.apache.kafka.clients.consumer.ConsumerRecord; 4 | import org.apache.kafka.streams.errors.DeserializationExceptionHandler; 5 | import org.apache.kafka.streams.processor.ProcessorContext; 6 | import org.slf4j.Logger; 7 | import org.slf4j.LoggerFactory; 8 | 9 | import java.util.Map; 10 | 11 | 12 | public class DeserializerErrorHandler implements DeserializationExceptionHandler { 13 | 14 | private static final Logger LOG = LoggerFactory.getLogger(DeserializerErrorHandler.class); 15 | 16 | @Override 17 | public DeserializationHandlerResponse handle(ProcessorContext context, ConsumerRecord record, Exception exception) { 18 | LOG.error("Received a deserialize error for {} cause {}", record, exception); 19 | return DeserializationHandlerResponse.CONTINUE; 20 | } 21 | 22 | @Override 23 | public void configure(Map map) { 24 | 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_9/StockCountsStreamsConnectIntegrationApplication.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_9; 2 | 3 | 4 | import bbejeck.model.StockTransaction; 5 | import bbejeck.util.serde.StreamsSerdes; 6 | import org.apache.kafka.clients.consumer.ConsumerConfig; 7 | import org.apache.kafka.common.serialization.Serde; 8 | import org.apache.kafka.common.serialization.Serdes; 9 | import org.apache.kafka.streams.Consumed; 10 | import org.apache.kafka.streams.KafkaStreams; 11 | import org.apache.kafka.streams.StreamsBuilder; 12 | import org.apache.kafka.streams.StreamsConfig; 13 | import org.apache.kafka.streams.kstream.Materialized; 14 | import org.apache.kafka.streams.kstream.Produced; 15 | import org.apache.kafka.streams.kstream.Serialized; 16 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 17 | import org.slf4j.Logger; 18 | import org.slf4j.LoggerFactory; 19 | 20 | import java.util.Collections; 21 | import java.util.Properties; 22 | import java.util.concurrent.CountDownLatch; 23 | 24 | public class StockCountsStreamsConnectIntegrationApplication { 25 | 26 | private static final Logger LOG = LoggerFactory.getLogger(StockCountsStreamsConnectIntegrationApplication.class); 27 | 28 | public static void main(String[] args) throws Exception { 29 | 30 | 31 | StreamsConfig streamsConfig = new StreamsConfig(getProperties()); 32 | Serde stringSerde = Serdes.String(); 33 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 34 | Serde longSerde = Serdes.Long(); 35 | 36 | StreamsBuilder builder = new StreamsBuilder(); 37 | 38 | 39 | 40 | builder.stream("dbTxnTRANSACTIONS", Consumed.with(stringSerde, stockTransactionSerde)) 41 | .peek((k, v)-> LOG.info("transactions from database key {} value {}",k, v)) 42 | .groupByKey(Serialized.with(stringSerde, stockTransactionSerde)) 43 | .aggregate(()-> 0L,(symb, stockTxn, numShares) -> numShares + stockTxn.getShares(), 44 | Materialized.with(stringSerde, longSerde)).toStream() 45 | .peek((k,v) -> LOG.info("Aggregated stock sales for {} {}",k, v)) 46 | .to( "stock-counts", Produced.with(stringSerde, longSerde)); 47 | 48 | 49 | KafkaStreams kafkaStreams = new KafkaStreams(builder.build(), streamsConfig); 50 | CountDownLatch doneSignal = new CountDownLatch(1); 51 | 52 | Runtime.getRuntime().addShutdownHook(new Thread(()-> { 53 | doneSignal.countDown(); 54 | LOG.info("Shutting down the Stock Analysis KStream Connect App Started now"); 55 | kafkaStreams.close(); 56 | })); 57 | 58 | 59 | LOG.info("Stock Analysis KStream Connect App Started"); 60 | kafkaStreams.cleanUp(); 61 | kafkaStreams.start(); 62 | doneSignal.await(); 63 | 64 | } 65 | 66 | private static Properties getProperties() { 67 | Properties props = new Properties(); 68 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-connect-stock-analysis-client"); 69 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-connect-stock-analysis-group"); 70 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-connect-stock-analysis-appid"); 71 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 72 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 73 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 74 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 75 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 76 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 77 | props.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, DeserializerErrorHandler.class); 78 | props.put(StreamsConfig.consumerPrefix(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG), Collections.singletonList(bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor.class)); 79 | return props; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_9/StockPerformanceInteractiveQueryDataProducer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_9; 2 | 3 | 4 | import bbejeck.clients.producer.MockDataProducer; 5 | 6 | public class StockPerformanceInteractiveQueryDataProducer { 7 | 8 | public static void main(String[] args) { 9 | MockDataProducer.produceStockTransactionsForIQ(); 10 | Runtime.getRuntime().addShutdownHook(new Thread(MockDataProducer::shutdown)); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/chapter_9/restore/StateRestoreHttpReporter.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_9.restore; 2 | 3 | import bbejeck.webserver.InteractiveQueryServer; 4 | import org.apache.kafka.common.TopicPartition; 5 | import org.apache.kafka.streams.processor.StateRestoreListener; 6 | 7 | public class StateRestoreHttpReporter implements StateRestoreListener { 8 | 9 | private final InteractiveQueryServer interactiveQueryServer; 10 | 11 | public StateRestoreHttpReporter(InteractiveQueryServer interactiveQueryServer) { 12 | this.interactiveQueryServer = interactiveQueryServer; 13 | } 14 | 15 | @Override 16 | public void onRestoreStart(TopicPartition topicPartition, String storeName, long startingOffset, long endingOffset) { 17 | 18 | } 19 | 20 | @Override 21 | public void onBatchRestored(TopicPartition topicPartition, String storeName, long batchEndOffset, long numRestored) { 22 | 23 | } 24 | 25 | @Override 26 | public void onRestoreEnd(TopicPartition topicPartition, String storeName, long totalRestored) { 27 | 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/clients/consumer/ConsumerProperties.java: -------------------------------------------------------------------------------- 1 | package bbejeck.clients.consumer; 2 | 3 | 4 | import java.util.Properties; 5 | import java.util.regex.Pattern; 6 | 7 | public class ConsumerProperties { 8 | 9 | private String keyDeserializer; 10 | private String valueDeserializer; 11 | private String topics; 12 | private Pattern topicPattern; 13 | private String offsetReset; 14 | private String bootstrapServers; 15 | private String groupId; 16 | 17 | private ConsumerProperties(Builder builder) { 18 | keyDeserializer = builder.keyDeserializer; 19 | valueDeserializer = builder.valueDeserializer; 20 | topics = builder.topics; 21 | topicPattern = builder.topicPattern; 22 | offsetReset = builder.offsetReset; 23 | bootstrapServers = builder.bootstrapServers; 24 | groupId = builder.groupId; 25 | } 26 | 27 | 28 | public String getKeyDeserializer() { 29 | return keyDeserializer; 30 | } 31 | 32 | public String getValueDeserializer() { 33 | return valueDeserializer; 34 | } 35 | 36 | public String getTopics() { 37 | return topics; 38 | } 39 | 40 | public Pattern getTopicPattern() { 41 | return topicPattern; 42 | } 43 | 44 | public String getOffsetReset() { 45 | return offsetReset; 46 | } 47 | 48 | public String getBootstrapServers() { 49 | return bootstrapServers; 50 | } 51 | 52 | public String getGroupId() { 53 | return groupId; 54 | } 55 | 56 | public Properties getProperties() { 57 | Properties properties = new Properties(); 58 | properties.put("bootstrap.servers", bootstrapServers); 59 | properties.put("group.id", groupId); 60 | properties.put("auto.offset.reset", offsetReset); 61 | properties.put("enable.auto.commit", "true"); 62 | properties.put("auto.commit.interval.ms", "3000"); 63 | properties.put("key.deserializer", keyDeserializer); 64 | properties.put("value.deserializer", valueDeserializer); 65 | 66 | return properties; 67 | 68 | } 69 | 70 | public static Builder newBuilder() { 71 | return new Builder(); 72 | } 73 | 74 | public static final class Builder { 75 | private String keyDeserializer; 76 | private String valueDeserializer; 77 | private String topics; 78 | private Pattern topicPattern; 79 | private String offsetReset = "latest"; 80 | private String bootstrapServers = "localhost:9092"; 81 | private String groupId; 82 | 83 | private Builder() { 84 | } 85 | 86 | public Builder withKeyDeserializer(String val) { 87 | keyDeserializer = val; 88 | return this; 89 | } 90 | 91 | public Builder withValueDeserializer(String val) { 92 | valueDeserializer = val; 93 | return this; 94 | } 95 | 96 | public Builder withTopics(String val) { 97 | topics = val; 98 | return this; 99 | } 100 | 101 | public Builder withTopicPattern(Pattern val) { 102 | topicPattern = val; 103 | return this; 104 | } 105 | 106 | public Builder withOffsetReset(String val) { 107 | offsetReset = val; 108 | return this; 109 | } 110 | 111 | public Builder withBootstrapServers(String val) { 112 | bootstrapServers = val; 113 | return this; 114 | } 115 | 116 | public Builder withGroupId(String val) { 117 | groupId = val; 118 | return this; 119 | } 120 | 121 | public ConsumerProperties build() { 122 | return new ConsumerProperties(this); 123 | } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/clients/consumer/TypedConsoleConsumer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.clients.consumer; 2 | 3 | 4 | import org.apache.kafka.clients.consumer.Consumer; 5 | import org.apache.kafka.clients.consumer.ConsumerRecord; 6 | import org.apache.kafka.clients.consumer.ConsumerRecords; 7 | import org.apache.kafka.clients.consumer.KafkaConsumer; 8 | 9 | import java.util.Arrays; 10 | import java.util.concurrent.ExecutorService; 11 | import java.util.concurrent.Executors; 12 | import java.util.concurrent.TimeUnit; 13 | 14 | 15 | public class TypedConsoleConsumer { 16 | 17 | private Consumer consumer; 18 | private ConsumerProperties consumerProperties; 19 | private volatile boolean keepConsuming = true; 20 | private ExecutorService executorService = Executors.newSingleThreadExecutor(); 21 | 22 | public TypedConsoleConsumer(ConsumerProperties consumerProperties) { 23 | this.consumerProperties = consumerProperties; 24 | } 25 | 26 | 27 | public TypedConsoleConsumer buildConsumer() { 28 | consumer = new KafkaConsumer<>(consumerProperties.getProperties()); 29 | String[] topics = consumerProperties.getTopics().split(","); 30 | consumer.subscribe(Arrays.asList(topics)); 31 | 32 | return this; 33 | } 34 | 35 | public void start() { 36 | System.out.println("Consumer starting..."); 37 | Runnable run = () -> { 38 | while (keepConsuming) { 39 | ConsumerRecords records = consumer.poll(5000); 40 | for (ConsumerRecord record : records) { 41 | String message = String.format("TypedConsoleConsumer: key = %s value = %s with offset = %d partition = %d", 42 | record.key(), record.value(), record.offset(), record.partition()); 43 | System.out.println(message); 44 | } 45 | } 46 | consumer.close(); 47 | }; 48 | executorService.submit(run); 49 | } 50 | 51 | 52 | public void stop() throws Exception { 53 | System.out.println("Starting shutdown of console consumer"); 54 | keepConsuming = false; 55 | executorService.awaitTermination(5, TimeUnit.SECONDS); 56 | executorService.shutdownNow(); 57 | System.out.println("Shut down now"); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/clients/producer/KeyValueMultiTopicConsoleProducer.java: -------------------------------------------------------------------------------- 1 | package bbejeck.clients.producer; 2 | 3 | import org.apache.kafka.clients.producer.Callback; 4 | import org.apache.kafka.clients.producer.KafkaProducer; 5 | import org.apache.kafka.clients.producer.Producer; 6 | import org.apache.kafka.clients.producer.ProducerRecord; 7 | import org.slf4j.Logger; 8 | import org.slf4j.LoggerFactory; 9 | 10 | import java.io.BufferedReader; 11 | import java.io.InputStreamReader; 12 | import java.util.Arrays; 13 | import java.util.Properties; 14 | 15 | 16 | /** 17 | * This class is used to specifically publish key-value pairs to the 18 | * specified topic(s) via a command line argument. This Producer only sends String keys and 19 | * String values. 20 | * 21 | * This class uses a ':' to split the key value pair entered on the command line. 22 | * 23 | * In the book this class was used in the start of Chapter 5 to demonstrate difference in 24 | * processing between a KTable and a KStream 25 | * 26 | * To stop, type quit. 27 | */ 28 | public class KeyValueMultiTopicConsoleProducer { 29 | 30 | private static final Logger LOG = LoggerFactory.getLogger(KeyValueMultiTopicConsoleProducer.class); 31 | 32 | public static void main(String[] args) throws Exception { 33 | 34 | if (args.length == 0) { 35 | LOG.info("Please specify a topic or comma separated list of topics"); 36 | System.exit(1); 37 | } 38 | 39 | String[] topics = args[0].split(","); 40 | 41 | Properties properties = new Properties(); 42 | properties.put("bootstrap.servers", "localhost:9092"); 43 | properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 44 | properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 45 | properties.put("acks", "1"); 46 | properties.put("retries", "3"); 47 | 48 | try(Producer producer = new KafkaProducer<>(properties)) { 49 | 50 | Callback callback = (metadata, exception) -> { 51 | if (exception != null) { 52 | LOG.error("Error producing message", exception); 53 | } 54 | }; 55 | 56 | LOG.info("This is a key-value command line producer"); 57 | LOG.info("Enter messages in key:value format, type 'quit' to exit"); 58 | LOG.info("Sending messages to topics {}", Arrays.toString(topics)); 59 | 60 | BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); 61 | 62 | String line = reader.readLine(); 63 | 64 | while (!(line.equalsIgnoreCase("quit"))) { 65 | 66 | String[] keyValue = line.split(":"); 67 | String key = keyValue[0]; 68 | String value = keyValue[1]; 69 | 70 | for (String topic : topics) { 71 | ProducerRecord record = new ProducerRecord<>(topic, key, value); 72 | producer.send(record, callback); 73 | } 74 | line = reader.readLine(); 75 | } 76 | } 77 | } 78 | 79 | 80 | } 81 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/collectors/FixedSizePriorityQueue.java: -------------------------------------------------------------------------------- 1 | package bbejeck.collectors; 2 | 3 | 4 | import java.util.Comparator; 5 | import java.util.Iterator; 6 | import java.util.TreeSet; 7 | 8 | /** 9 | * Class that acts as a priority queue but has a fixed size. 10 | * When the maximum number of elements is reached the lowest/highest element 11 | * will be removed. 12 | */ 13 | public class FixedSizePriorityQueue { 14 | 15 | private TreeSet inner; 16 | private int maxSize; 17 | 18 | 19 | public FixedSizePriorityQueue(Comparator comparator, int maxSize) { 20 | this.inner = new TreeSet<>(comparator); 21 | this.maxSize = maxSize; 22 | } 23 | 24 | 25 | public FixedSizePriorityQueue add(T element) { 26 | inner.add(element); 27 | if (inner.size() > maxSize) { 28 | inner.pollLast(); 29 | } 30 | return this; 31 | } 32 | 33 | public FixedSizePriorityQueue remove(T element) { 34 | if (inner.contains(element)) { 35 | inner.remove(element); 36 | } 37 | return this; 38 | } 39 | 40 | public Iterator iterator() { 41 | return inner.iterator(); 42 | } 43 | 44 | @Override 45 | public String toString() { 46 | return "FixedSizePriorityQueue{" + 47 | "QueueContents=" + inner; 48 | 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/collectors/StockTransactionCollector.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.collectors; 18 | 19 | 20 | import bbejeck.model.StockTransaction; 21 | 22 | public class StockTransactionCollector { 23 | 24 | private double amount; 25 | private String tickerSymbol; 26 | private int sharesPurchased; 27 | private int sharesSold; 28 | 29 | public StockTransactionCollector add(StockTransaction transaction){ 30 | if(tickerSymbol == null){ 31 | tickerSymbol = transaction.getSymbol(); 32 | } 33 | 34 | this.amount += transaction.getSharePrice(); 35 | if(transaction.getSector().equalsIgnoreCase("purchase")){ 36 | this.sharesPurchased += transaction.getShares(); 37 | } else{ 38 | this.sharesSold += transaction.getShares(); 39 | } 40 | return this; 41 | } 42 | 43 | @Override 44 | public String toString() { 45 | return "StockTransactionCollector{" + 46 | "amount=" + amount + 47 | ", tickerSymbol='" + tickerSymbol + '\'' + 48 | ", sharesPurchased=" + sharesPurchased + 49 | ", sharesSold=" + sharesSold + 50 | '}'; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/BeerPurchase.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | public class BeerPurchase { 5 | 6 | private Currency currency; 7 | private double totalSale; 8 | private int numberCases; 9 | private String beerType; 10 | 11 | private BeerPurchase(Builder builder) { 12 | currency = builder.currency; 13 | totalSale = builder.totalSale; 14 | numberCases = builder.numberCases; 15 | beerType = builder.beerType; 16 | } 17 | 18 | public static Builder newBuilder() { 19 | return new Builder(); 20 | } 21 | 22 | public static Builder newBuilder(BeerPurchase copy) { 23 | Builder builder = new Builder(); 24 | builder.currency = copy.currency; 25 | builder.totalSale = copy.totalSale; 26 | builder.numberCases = copy.numberCases; 27 | builder.beerType = copy.beerType; 28 | return builder; 29 | } 30 | 31 | 32 | public Currency getCurrency() { 33 | return currency; 34 | } 35 | 36 | public double getTotalSale() { 37 | return totalSale; 38 | } 39 | 40 | public int getNumberCases() { 41 | return numberCases; 42 | } 43 | 44 | public String getBeerType() { 45 | return beerType; 46 | } 47 | 48 | @Override 49 | public String toString() { 50 | return "BeerPurchase{" + 51 | "currency=" + currency + 52 | ", totalSale=" + totalSale + 53 | ", numberCases=" + numberCases + 54 | ", beerType='" + beerType + '\'' + 55 | '}'; 56 | } 57 | 58 | 59 | public static final class Builder { 60 | private Currency currency; 61 | private double totalSale; 62 | private int numberCases; 63 | private String beerType; 64 | 65 | private Builder() { 66 | } 67 | 68 | public Builder currency(Currency val) { 69 | currency = val; 70 | return this; 71 | } 72 | 73 | public Builder totalSale(double val) { 74 | totalSale = val; 75 | return this; 76 | } 77 | 78 | public Builder numberCases(int val) { 79 | numberCases = val; 80 | return this; 81 | } 82 | 83 | public Builder beerType(String val) { 84 | beerType = val; 85 | return this; 86 | } 87 | 88 | public BeerPurchase build() { 89 | return new BeerPurchase(this); 90 | } 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/ClickEvent.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | import java.time.Instant; 4 | 5 | 6 | public class ClickEvent { 7 | 8 | private String symbol; 9 | private String link; 10 | private Instant timestamp; 11 | 12 | public ClickEvent(String symbol, String link, Instant timestamp) { 13 | this.symbol = symbol; 14 | this.link = link; 15 | this.timestamp = timestamp; 16 | } 17 | 18 | public String getSymbol() { 19 | return symbol; 20 | } 21 | 22 | public String getLink() { 23 | return link; 24 | } 25 | 26 | public Instant getTimestamp() { 27 | return timestamp; 28 | } 29 | 30 | @Override 31 | public String toString() { 32 | return "ClickEvent{" + 33 | "symbol='" + symbol + '\'' + 34 | ", link='" + link + '\'' + 35 | ", timestamp=" + timestamp + 36 | '}'; 37 | } 38 | 39 | @Override 40 | public boolean equals(Object o) { 41 | if (this == o) return true; 42 | if (!(o instanceof ClickEvent)) return false; 43 | 44 | ClickEvent that = (ClickEvent) o; 45 | 46 | if (symbol != null ? !symbol.equals(that.symbol) : that.symbol != null) return false; 47 | if (link != null ? !link.equals(that.link) : that.link != null) return false; 48 | return timestamp != null ? timestamp.equals(that.timestamp) : that.timestamp == null; 49 | } 50 | 51 | @Override 52 | public int hashCode() { 53 | int result = symbol != null ? symbol.hashCode() : 0; 54 | result = 31 * result + (link != null ? link.hashCode() : 0); 55 | result = 31 * result + (timestamp != null ? timestamp.hashCode() : 0); 56 | return result; 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/CorrelatedPurchase.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | import java.util.Date; 4 | import java.util.List; 5 | 6 | /** 7 | * User: Bill Bejeck 8 | * Date: 11/20/16 9 | * Time: 7:37 PM 10 | */ 11 | public class CorrelatedPurchase { 12 | 13 | private String customerId; 14 | private List itemsPurchased; 15 | private double totalAmount; 16 | private Date firstPurchaseTime; 17 | private Date secondPurchaseTime; 18 | 19 | private CorrelatedPurchase(Builder builder) { 20 | customerId = builder.customerId; 21 | itemsPurchased = builder.itemsPurchased; 22 | totalAmount = builder.totalAmount; 23 | firstPurchaseTime = builder.firstPurchasedItem; 24 | secondPurchaseTime = builder.secondPurchasedItem; 25 | } 26 | 27 | public static Builder newBuilder() { 28 | return new Builder(); 29 | } 30 | 31 | public String getCustomerId() { 32 | return customerId; 33 | } 34 | 35 | public List getItemsPurchased() { 36 | return itemsPurchased; 37 | } 38 | 39 | public double getTotalAmount() { 40 | return totalAmount; 41 | } 42 | 43 | public Date getFirstPurchaseTime() { 44 | return firstPurchaseTime; 45 | } 46 | 47 | public Date getSecondPurchaseTime() { 48 | return secondPurchaseTime; 49 | } 50 | 51 | 52 | @Override 53 | public String toString() { 54 | return "CorrelatedPurchase{" + 55 | "customerId='" + customerId + '\'' + 56 | ", itemsPurchased=" + itemsPurchased + 57 | ", totalAmount=" + totalAmount + 58 | ", firstPurchaseTime=" + firstPurchaseTime + 59 | ", secondPurchaseTime=" + secondPurchaseTime + 60 | '}'; 61 | } 62 | 63 | public static final class Builder { 64 | private String customerId; 65 | private List itemsPurchased; 66 | private double totalAmount; 67 | private Date firstPurchasedItem; 68 | private Date secondPurchasedItem; 69 | 70 | private Builder() { 71 | } 72 | 73 | public Builder withCustomerId(String val) { 74 | customerId = val; 75 | return this; 76 | } 77 | 78 | public Builder withItemsPurchased(List val) { 79 | itemsPurchased = val; 80 | return this; 81 | } 82 | 83 | public Builder withTotalAmount(double val) { 84 | totalAmount = val; 85 | return this; 86 | } 87 | 88 | public Builder withFirstPurchaseDate(Date val) { 89 | firstPurchasedItem = val; 90 | return this; 91 | } 92 | 93 | public Builder withSecondPurchaseDate(Date val) { 94 | secondPurchasedItem = val; 95 | return this; 96 | } 97 | 98 | public CorrelatedPurchase build() { 99 | return new CorrelatedPurchase(this); 100 | } 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/Currency.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | public enum Currency { 5 | 6 | EURO(1.09), 7 | 8 | POUNDS(1.2929), 9 | 10 | DOLLARS(1.0); 11 | 12 | 13 | Currency(double conversionRate) { 14 | this.conversionRate = conversionRate; 15 | } 16 | 17 | private double conversionRate; 18 | 19 | 20 | public double convertToDollars(double internationalAmount) { 21 | return internationalAmount/conversionRate; 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/CustomerTransactions.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | public class CustomerTransactions { 4 | 5 | private String sessionInfo; 6 | private double totalPrice = 0; 7 | private long totalShares = 0; 8 | 9 | 10 | public CustomerTransactions update(StockTransaction stockTransaction) { 11 | totalShares += stockTransaction.getShares(); 12 | totalPrice += stockTransaction.getSharePrice() * stockTransaction.getShares(); 13 | 14 | return this; 15 | } 16 | 17 | @Override 18 | public String toString() { 19 | return "avg txn=" + totalPrice / totalShares + " sessionInfo='" + sessionInfo; 20 | } 21 | 22 | public void setSessionInfo(String sessionInfo) { 23 | this.sessionInfo = sessionInfo; 24 | } 25 | 26 | public CustomerTransactions merge(CustomerTransactions other) { 27 | this.totalShares += other.totalShares; 28 | this.totalPrice += other.totalPrice; 29 | this.sessionInfo = other.sessionInfo; 30 | return this; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/FinancialNews.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | /** 4 | * User: Bill Bejeck 5 | * Date: 3/21/17 6 | * Time: 11:14 PM 7 | */ 8 | public class FinancialNews { 9 | 10 | private String industry; 11 | private String news; 12 | 13 | public String getIndustry() { 14 | return industry; 15 | } 16 | 17 | public String getNews() { 18 | return news; 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/PublicTradedCompany.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | import java.text.DecimalFormat; 5 | import java.text.NumberFormat; 6 | import java.util.concurrent.ThreadLocalRandom; 7 | 8 | public class PublicTradedCompany { 9 | private double volatility; 10 | private double lastSold; 11 | private String symbol; 12 | private String name; 13 | private String sector; 14 | private String industry; 15 | private double price; 16 | private NumberFormat formatter = new DecimalFormat("#0.00"); 17 | 18 | public String getSymbol() { 19 | return symbol; 20 | } 21 | 22 | public String getName() { 23 | return name; 24 | } 25 | 26 | public String getSector() { 27 | return sector; 28 | } 29 | 30 | public String getIndustry() { 31 | return industry; 32 | } 33 | 34 | public double getPrice() { 35 | return Double.parseDouble(formatter.format(price)); 36 | } 37 | 38 | public PublicTradedCompany(double voltility, double lastSold, String symbol, String name, String sector, String industry) { 39 | this.volatility = volatility; 40 | this.lastSold = lastSold; 41 | this.symbol = symbol.toUpperCase(); 42 | this.name = name; 43 | this.sector = sector; 44 | this.industry = industry; 45 | this.price = lastSold; 46 | } 47 | 48 | public double updateStockPrice() { 49 | double min = (price * -volatility); 50 | double max = (price * volatility); 51 | double randomNum = ThreadLocalRandom.current().nextDouble(min, max + 1); 52 | price = price + randomNum; 53 | return Double.parseDouble(formatter.format(price)); 54 | } 55 | 56 | } -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/PurchaseKey.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | import java.util.Date; 4 | import java.util.Objects; 5 | 6 | 7 | public class PurchaseKey { 8 | 9 | private String customerId; 10 | private Date transactionDate; 11 | 12 | public PurchaseKey(String customerId, Date transactionDate) { 13 | this.customerId = customerId; 14 | this.transactionDate = transactionDate; 15 | } 16 | 17 | public String getCustomerId() { 18 | return customerId; 19 | } 20 | 21 | public Date getTransactionDate() { 22 | return transactionDate; 23 | } 24 | 25 | 26 | @Override 27 | public boolean equals(Object o) { 28 | if (this == o) return true; 29 | if (!(o instanceof PurchaseKey)) return false; 30 | PurchaseKey that = (PurchaseKey) o; 31 | return Objects.equals(customerId, that.customerId) && 32 | Objects.equals(transactionDate, that.transactionDate); 33 | } 34 | 35 | @Override 36 | public int hashCode() { 37 | return Objects.hash(customerId, transactionDate); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/PurchasePattern.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.model; 18 | 19 | import java.util.Date; 20 | 21 | /** 22 | * User: Bill Bejeck 23 | * Date: 2/21/16 24 | * Time: 3:36 PM 25 | */ 26 | public class PurchasePattern { 27 | 28 | private String zipCode; 29 | private String item; 30 | private Date date; 31 | private double amount; 32 | 33 | 34 | private PurchasePattern(Builder builder) { 35 | zipCode = builder.zipCode; 36 | item = builder.item; 37 | date = builder.date; 38 | amount = builder.amount; 39 | 40 | } 41 | 42 | public static Builder newBuilder() { 43 | return new Builder(); 44 | } 45 | 46 | public static Builder builder(Purchase purchase){ 47 | return new Builder(purchase); 48 | 49 | } 50 | public String getZipCode() { 51 | return zipCode; 52 | } 53 | 54 | public String getItem() { 55 | return item; 56 | } 57 | 58 | public Date getDate() { 59 | return date; 60 | } 61 | 62 | public double getAmount() { 63 | return amount; 64 | } 65 | 66 | @Override 67 | public String toString() { 68 | return "PurchasePattern{" + 69 | "zipCode='" + zipCode + '\'' + 70 | ", item='" + item + '\'' + 71 | ", date=" + date + 72 | ", amount=" + amount + 73 | '}'; 74 | } 75 | 76 | @Override 77 | public boolean equals(Object o) { 78 | if (this == o) return true; 79 | if (!(o instanceof PurchasePattern)) return false; 80 | 81 | PurchasePattern that = (PurchasePattern) o; 82 | 83 | if (Double.compare(that.amount, amount) != 0) return false; 84 | if (zipCode != null ? !zipCode.equals(that.zipCode) : that.zipCode != null) return false; 85 | return item != null ? item.equals(that.item) : that.item == null; 86 | } 87 | 88 | @Override 89 | public int hashCode() { 90 | int result; 91 | long temp; 92 | result = zipCode != null ? zipCode.hashCode() : 0; 93 | result = 31 * result + (item != null ? item.hashCode() : 0); 94 | temp = Double.doubleToLongBits(amount); 95 | result = 31 * result + (int) (temp ^ (temp >>> 32)); 96 | return result; 97 | } 98 | 99 | public static final class Builder { 100 | private String zipCode; 101 | private String item; 102 | private Date date; 103 | private double amount; 104 | 105 | private Builder() { 106 | } 107 | 108 | private Builder(Purchase purchase) { 109 | this.zipCode = purchase.getZipCode(); 110 | this.item = purchase.getItemPurchased(); 111 | this.date = purchase.getPurchaseDate(); 112 | this.amount = purchase.getPrice() * purchase.getQuantity(); 113 | } 114 | 115 | public Builder zipCode(String val) { 116 | zipCode = val; 117 | return this; 118 | } 119 | 120 | public Builder item(String val) { 121 | item = val; 122 | return this; 123 | } 124 | 125 | public Builder date(Date val) { 126 | date = val; 127 | return this; 128 | } 129 | 130 | public Builder amount(double amount) { 131 | this.amount = amount; 132 | return this; 133 | } 134 | 135 | public PurchasePattern build() { 136 | return new PurchasePattern(this); 137 | } 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/RewardAccumulator.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.model; 18 | 19 | /** 20 | * User: Bill Bejeck 21 | * Date: 2/20/16 22 | * Time: 9:55 AM 23 | */ 24 | public class RewardAccumulator { 25 | 26 | private String customerId; 27 | private double purchaseTotal; 28 | private int totalRewardPoints; 29 | private int currentRewardPoints; 30 | private int daysFromLastPurchase; 31 | 32 | private RewardAccumulator(String customerId, double purchaseTotal, int rewardPoints) { 33 | this.customerId = customerId; 34 | this.purchaseTotal = purchaseTotal; 35 | this.currentRewardPoints = rewardPoints; 36 | this.totalRewardPoints = rewardPoints; 37 | } 38 | 39 | public String getCustomerId() { 40 | return customerId; 41 | } 42 | 43 | public double getPurchaseTotal() { 44 | return purchaseTotal; 45 | } 46 | 47 | public int getCurrentRewardPoints() { 48 | return currentRewardPoints; 49 | } 50 | 51 | public int getTotalRewardPoints() { 52 | return totalRewardPoints; 53 | } 54 | 55 | public void addRewardPoints(int previousTotalPoints) { 56 | this.totalRewardPoints += previousTotalPoints; 57 | } 58 | 59 | @Override 60 | public String toString() { 61 | return "RewardAccumulator{" + 62 | "customerId='" + customerId + '\'' + 63 | ", purchaseTotal=" + purchaseTotal + 64 | ", totalRewardPoints=" + totalRewardPoints + 65 | ", currentRewardPoints=" + currentRewardPoints + 66 | ", daysFromLastPurchase=" + daysFromLastPurchase + 67 | '}'; 68 | } 69 | 70 | @Override 71 | public boolean equals(Object o) { 72 | if (this == o) return true; 73 | if (!(o instanceof RewardAccumulator)) return false; 74 | 75 | RewardAccumulator that = (RewardAccumulator) o; 76 | 77 | if (Double.compare(that.purchaseTotal, purchaseTotal) != 0) return false; 78 | if (totalRewardPoints != that.totalRewardPoints) return false; 79 | if (currentRewardPoints != that.currentRewardPoints) return false; 80 | if (daysFromLastPurchase != that.daysFromLastPurchase) return false; 81 | return customerId != null ? customerId.equals(that.customerId) : that.customerId == null; 82 | } 83 | 84 | @Override 85 | public int hashCode() { 86 | int result; 87 | long temp; 88 | result = customerId != null ? customerId.hashCode() : 0; 89 | temp = Double.doubleToLongBits(purchaseTotal); 90 | result = 31 * result + (int) (temp ^ (temp >>> 32)); 91 | result = 31 * result + totalRewardPoints; 92 | result = 31 * result + currentRewardPoints; 93 | result = 31 * result + daysFromLastPurchase; 94 | return result; 95 | } 96 | 97 | public static Builder builder(Purchase purchase){return new Builder(purchase);} 98 | 99 | public static final class Builder { 100 | private String customerId; 101 | private double purchaseTotal; 102 | private int daysFromLastPurchase; 103 | private int rewardPoints; 104 | 105 | private Builder(Purchase purchase){ 106 | this.customerId = purchase.getLastName()+","+purchase.getFirstName(); 107 | this.purchaseTotal = purchase.getPrice() * (double) purchase.getQuantity(); 108 | this.rewardPoints = (int) purchaseTotal; 109 | } 110 | 111 | 112 | public RewardAccumulator build(){ 113 | return new RewardAccumulator(customerId, purchaseTotal, rewardPoints); 114 | } 115 | 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/ShareVolume.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | import java.text.NumberFormat; 5 | 6 | public class ShareVolume { 7 | 8 | private String symbol; 9 | private int shares; 10 | private String industry; 11 | 12 | 13 | private ShareVolume(Builder builder) { 14 | symbol = builder.symbol; 15 | shares = builder.shares; 16 | industry = builder.industry; 17 | } 18 | 19 | 20 | public String getIndustry() { 21 | return industry; 22 | } 23 | 24 | public String getSymbol() { 25 | return symbol; 26 | } 27 | 28 | public int getShares() { 29 | return shares; 30 | } 31 | 32 | 33 | @Override 34 | public String toString() { 35 | NumberFormat numberFormat = NumberFormat.getInstance(); 36 | return "ShareVolume{" + 37 | "symbol='" + symbol + '\'' + 38 | ", shares=" + numberFormat.format(shares) + 39 | '}'; 40 | } 41 | 42 | public static ShareVolume sum(ShareVolume csv1, ShareVolume csv2) { 43 | Builder builder = newBuilder(csv1); 44 | builder.shares = csv1.shares + csv2.shares; 45 | return builder.build(); 46 | } 47 | 48 | public static Builder newBuilder() { 49 | return new Builder(); 50 | } 51 | 52 | public static Builder newBuilder(StockTransaction stockTransaction) { 53 | Builder builder = new Builder(); 54 | builder.symbol = stockTransaction.getSymbol(); 55 | builder.shares = stockTransaction.getShares(); 56 | builder.industry = stockTransaction.getIndustry(); 57 | return builder; 58 | } 59 | 60 | public static Builder newBuilder(ShareVolume copy) { 61 | Builder builder = new Builder(); 62 | builder.symbol = copy.symbol; 63 | builder.shares = copy.shares; 64 | builder.industry = copy.industry; 65 | return builder; 66 | } 67 | 68 | 69 | public static final class Builder { 70 | private String symbol; 71 | private int shares; 72 | private String industry; 73 | 74 | private Builder() { 75 | } 76 | 77 | public Builder withSymbol(String val) { 78 | symbol = val; 79 | return this; 80 | } 81 | 82 | public Builder withShares(int val) { 83 | shares = val; 84 | return this; 85 | } 86 | 87 | public Builder withIndustry(String val) { 88 | industry = val; 89 | return this; 90 | } 91 | 92 | public ShareVolume build() { 93 | return new ShareVolume(this); 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/StockPerformance.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | import java.text.DecimalFormat; 5 | import java.time.Instant; 6 | import java.util.ArrayDeque; 7 | 8 | public class StockPerformance { 9 | 10 | private Instant lastUpdateSent; 11 | private static final int MAX_LOOK_BACK = 20; 12 | private double currentPrice = 0.0; 13 | private double priceDifferential = 0.0; 14 | private double shareDifferential = 0.0; 15 | private int currentShareVolume = 0; 16 | private double currentAveragePrice = Double.MIN_VALUE; 17 | private double currentAverageVolume = Double.MIN_VALUE; 18 | private ArrayDeque shareVolumeLookback = new ArrayDeque<>(MAX_LOOK_BACK); 19 | private ArrayDeque sharePriceLookback = new ArrayDeque<>(MAX_LOOK_BACK); 20 | private transient DecimalFormat decimalFormat = new DecimalFormat("#.00"); 21 | 22 | 23 | 24 | public void setLastUpdateSent(Instant lastUpdateSent) { 25 | this.lastUpdateSent = lastUpdateSent; 26 | } 27 | 28 | public void updatePriceStats(double currentPrice) { 29 | this.currentPrice = currentPrice; 30 | priceDifferential = calculateDifferentialFromAverage(currentPrice, currentAveragePrice); 31 | currentAveragePrice = calculateNewAverage(currentPrice, currentAveragePrice, sharePriceLookback); 32 | } 33 | 34 | public void updateVolumeStats(int currentShareVolume) { 35 | this.currentShareVolume = currentShareVolume; 36 | shareDifferential = calculateDifferentialFromAverage((double) currentShareVolume, currentAverageVolume); 37 | currentAverageVolume = calculateNewAverage(currentShareVolume, currentAverageVolume, shareVolumeLookback); 38 | } 39 | 40 | private double calculateDifferentialFromAverage(double value, double average) { 41 | return average != Double.MIN_VALUE ? ((value / average) - 1) * 100.0 : 0.0; 42 | } 43 | 44 | private double calculateNewAverage(double newValue, double currentAverage, ArrayDeque deque) { 45 | if (deque.size() < MAX_LOOK_BACK) { 46 | deque.add(newValue); 47 | 48 | if (deque.size() == MAX_LOOK_BACK) { 49 | currentAverage = deque.stream().reduce(0.0, Double::sum) / MAX_LOOK_BACK; 50 | } 51 | 52 | } else { 53 | double oldestValue = deque.poll(); 54 | deque.add(newValue); 55 | currentAverage = (currentAverage + (newValue / MAX_LOOK_BACK)) - oldestValue / MAX_LOOK_BACK; 56 | } 57 | return currentAverage; 58 | } 59 | 60 | public double priceDifferential() { 61 | return priceDifferential; 62 | } 63 | 64 | public double volumeDifferential() { 65 | return shareDifferential; 66 | } 67 | 68 | public double getCurrentPrice() { 69 | return currentPrice; 70 | } 71 | 72 | public int getCurrentShareVolume() { 73 | return currentShareVolume; 74 | } 75 | 76 | public double getCurrentAveragePrice() { 77 | return currentAveragePrice; 78 | } 79 | 80 | public double getCurrentAverageVolume() { 81 | return currentAverageVolume; 82 | } 83 | 84 | public Instant getLastUpdateSent() { 85 | return lastUpdateSent; 86 | } 87 | 88 | @Override 89 | public String toString() { 90 | return "StockPerformance{" + 91 | "lastUpdateSent= " + lastUpdateSent + 92 | ", currentPrice= " + decimalFormat.format(currentPrice) + 93 | ", currentAveragePrice= " + decimalFormat.format(currentAveragePrice) + 94 | ", percentage difference= " + decimalFormat.format(priceDifferential) + 95 | ", currentShareVolume= " + decimalFormat.format(currentShareVolume) + 96 | ", currentAverageVolume= " + decimalFormat.format(currentAverageVolume) + 97 | ", shareDifferential= " + decimalFormat.format(shareDifferential) + 98 | '}'; 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/StockTickerData.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | public class StockTickerData { 5 | 6 | private double price; 7 | private String symbol; 8 | 9 | public double getPrice() { 10 | return price; 11 | } 12 | 13 | public String getSymbol() { 14 | return symbol; 15 | } 16 | 17 | public StockTickerData(double price, String symbol) { 18 | this.price = price; 19 | this.symbol = symbol; 20 | } 21 | 22 | @Override 23 | public String toString() { 24 | return "StockTickerData{" + 25 | "price=" + price + 26 | ", symbol='" + symbol + '\'' + 27 | '}'; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/StockTransactionSummary.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.model; 18 | 19 | /** 20 | * User: Bill Bejeck 21 | * Date: 2/6/16 22 | * Time: 3:32 PM 23 | */ 24 | public class StockTransactionSummary { 25 | 26 | public double amount; 27 | public String tickerSymbol; 28 | public int sharesPurchased; 29 | public int sharesSold; 30 | private long lastUpdatedTime; 31 | 32 | 33 | 34 | public void update(StockTransaction transaction){ 35 | this.amount += transaction.getSharePrice(); 36 | if(transaction.getSector().equalsIgnoreCase("purchase")){ 37 | this.sharesPurchased += transaction.getShares(); 38 | } else{ 39 | this.sharesSold += transaction.getShares(); 40 | } 41 | this.lastUpdatedTime = System.currentTimeMillis(); 42 | } 43 | 44 | public boolean updatedWithinLastMillis(long currentTime, long limit){ 45 | return currentTime - this.lastUpdatedTime <= limit; 46 | } 47 | 48 | public static StockTransactionSummary fromTransaction(StockTransaction transaction){ 49 | StockTransactionSummary summary = new StockTransactionSummary(); 50 | summary.tickerSymbol = transaction.getSymbol(); 51 | summary.update(transaction); 52 | return summary; 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/TransactionCount.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | /** 4 | * User: Bill Bejeck 5 | * Date: 3/21/17 6 | * Time: 11:04 PM 7 | */ 8 | public class TransactionCount { 9 | 10 | long count; 11 | String symbol; 12 | 13 | private TransactionCount(Builder builder) { 14 | count = builder.count; 15 | symbol = builder.symbol; 16 | } 17 | 18 | public static Builder newBuilder() { 19 | return new Builder(); 20 | } 21 | 22 | public long getCount() { 23 | return count; 24 | } 25 | 26 | public String getSymbol() { 27 | return symbol; 28 | } 29 | 30 | 31 | public static final class Builder { 32 | private long count; 33 | private String symbol; 34 | 35 | private Builder() { 36 | } 37 | 38 | public Builder withCount(long val) { 39 | count = val; 40 | return this; 41 | } 42 | 43 | public Builder withSymbol(String val) { 44 | symbol = val; 45 | return this; 46 | } 47 | 48 | 49 | public TransactionCount build() { 50 | return new TransactionCount(this); 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/TransactionSummary.java: -------------------------------------------------------------------------------- 1 | package bbejeck.model; 2 | 3 | 4 | public class TransactionSummary { 5 | 6 | private String customerId; 7 | private String stockTicker; 8 | private String industry; 9 | private long summaryCount; 10 | private String customerName; 11 | private String companyName; 12 | 13 | 14 | public TransactionSummary(String customerId, String stockTicker, String industry) { 15 | this.customerId = customerId; 16 | this.stockTicker = stockTicker; 17 | this.industry = industry; 18 | } 19 | 20 | public void setSummaryCount(long summaryCount){ 21 | this.summaryCount = summaryCount; 22 | } 23 | 24 | public String getCustomerId() { 25 | return customerId; 26 | } 27 | 28 | public String getStockTicker() { 29 | return stockTicker; 30 | } 31 | 32 | public String getIndustry() { 33 | return industry; 34 | } 35 | 36 | public long getSummaryCount() { 37 | return summaryCount; 38 | } 39 | 40 | public String getCustomerName() { 41 | return customerName; 42 | } 43 | 44 | public TransactionSummary withCustomerName(String customerName) { 45 | this.customerName = customerName; 46 | return this; 47 | } 48 | 49 | public String getCompmanyName() { 50 | return companyName; 51 | } 52 | 53 | public TransactionSummary withCompanyName(String companyName) { 54 | this.companyName = companyName; 55 | return this; 56 | } 57 | 58 | public static TransactionSummary from(StockTransaction transaction){ 59 | return new TransactionSummary(transaction.getCustomerId(), transaction.getSymbol(), transaction.getIndustry()); 60 | } 61 | 62 | @Override 63 | public String toString() { 64 | return "TransactionSummary{" + 65 | "customerId='" + customerId + '\'' + 66 | ", stockTicker='" + stockTicker + '\'' + 67 | ", customerName='" + customerName + '\'' + 68 | ", companyName='" + companyName + '\'' + 69 | '}'; 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/model/Tweet.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.model; 18 | 19 | /** 20 | * User: Bill Bejeck 21 | * Date: 4/21/16 22 | * Time: 8:38 PM 23 | */ 24 | public class Tweet { 25 | 26 | private String id; 27 | private String text; 28 | private String language; 29 | 30 | public String getId() { 31 | return id; 32 | } 33 | 34 | public void setId(String id) { 35 | this.id = id; 36 | } 37 | 38 | public String getText() { 39 | return text; 40 | } 41 | 42 | public void setText(String text) { 43 | this.text = text; 44 | } 45 | 46 | public String getLanguage() { 47 | return language; 48 | } 49 | 50 | public void setLanguage(String language) { 51 | this.language = language; 52 | } 53 | 54 | 55 | @Override 56 | public String toString() { 57 | return "Tweet{" + 58 | "id='" + id + '\'' + 59 | ", text='" + text + '\'' + 60 | ", language='" + language + '\'' + 61 | '}'; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/util/Topics.java: -------------------------------------------------------------------------------- 1 | package bbejeck.util; 2 | 3 | 4 | public enum Topics { 5 | 6 | COMPANIES { 7 | @Override 8 | public String toString() { 9 | return "companies"; 10 | } 11 | }, 12 | CLIENTS { 13 | @Override 14 | public String toString() { 15 | return "clients"; 16 | } 17 | }, 18 | FINANCIAL_NEWS { 19 | @Override 20 | public String toString() { 21 | return "financial-news"; 22 | } 23 | }, 24 | POPS_HOPS_PURCHASES { 25 | @Override 26 | public String topicName() { 27 | return "pops-hops-purchases"; 28 | } 29 | }; 30 | 31 | 32 | public String topicName(){ 33 | return this.toString(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/util/collection/Tuple.java: -------------------------------------------------------------------------------- 1 | package bbejeck.util.collection; 2 | 3 | 4 | public class Tuple { 5 | 6 | public final L _1; 7 | public final R _2; 8 | 9 | public Tuple(L _1, R _2) { 10 | this._1 = _1; 11 | this._2 = _2; 12 | } 13 | 14 | public static Tuple of (L left, R right) { 15 | return new Tuple<>(left, right); 16 | 17 | } 18 | 19 | @Override 20 | public boolean equals(Object o) { 21 | if (this == o) return true; 22 | if (!(o instanceof Tuple)) return false; 23 | 24 | Tuple tuple = (Tuple) o; 25 | 26 | if (_1 != null ? !_1.equals(tuple._1) : tuple._1 != null) return false; 27 | return _2 != null ? _2.equals(tuple._2) : tuple._2 == null; 28 | } 29 | 30 | @Override 31 | public int hashCode() { 32 | int result = _1 != null ? _1.hashCode() : 0; 33 | result = 31 * result + (_2 != null ? _2.hashCode() : 0); 34 | return result; 35 | } 36 | 37 | @Override 38 | public String toString() { 39 | return "Tuple{" + 40 | "_1=" + _1 + 41 | ", _2=" + _2 + 42 | '}'; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/util/datagen/CustomDateGenerator.java: -------------------------------------------------------------------------------- 1 | package bbejeck.util.datagen; 2 | 3 | import java.time.Duration; 4 | import java.time.Instant; 5 | import java.util.Date; 6 | 7 | /** 8 | * Generates dates either with uniform increases in time 9 | * or within random periods of time with increases and occasional 10 | * late arrival data 11 | */ 12 | public class CustomDateGenerator { 13 | 14 | private Instant instant = Instant.now(); 15 | private Duration increaseDuration; 16 | 17 | private CustomDateGenerator(Duration increaseDuration) { 18 | this.increaseDuration = increaseDuration; 19 | } 20 | 21 | public static CustomDateGenerator withTimestampsIncreasingBy(Duration increaseDuration) { 22 | return new CustomDateGenerator(increaseDuration); 23 | } 24 | 25 | public Date get() { 26 | Date date = Date.from(instant); 27 | instant = instant.plus(increaseDuration); 28 | return date; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/util/serializer/FixedSizePriorityQueueAdapter.java: -------------------------------------------------------------------------------- 1 | package bbejeck.util.serializer; 2 | 3 | 4 | import bbejeck.collectors.FixedSizePriorityQueue; 5 | import bbejeck.model.ShareVolume; 6 | import com.google.gson.Gson; 7 | import com.google.gson.TypeAdapter; 8 | import com.google.gson.stream.JsonReader; 9 | import com.google.gson.stream.JsonWriter; 10 | 11 | import java.io.IOException; 12 | import java.util.ArrayList; 13 | import java.util.Comparator; 14 | import java.util.Iterator; 15 | import java.util.List; 16 | 17 | public class FixedSizePriorityQueueAdapter extends TypeAdapter> { 18 | 19 | private Gson gson = new Gson(); 20 | 21 | @Override 22 | public void write(JsonWriter writer, FixedSizePriorityQueue value) throws IOException { 23 | 24 | if (value == null) { 25 | writer.nullValue(); 26 | return; 27 | } 28 | 29 | 30 | Iterator iterator = value.iterator(); 31 | List list = new ArrayList<>(); 32 | while (iterator.hasNext()) { 33 | ShareVolume stockTransaction = iterator.next(); 34 | if (stockTransaction != null) { 35 | list.add(stockTransaction); 36 | } 37 | } 38 | writer.beginArray(); 39 | for (ShareVolume transaction : list) { 40 | writer.value(gson.toJson(transaction)); 41 | } 42 | writer.endArray(); 43 | } 44 | 45 | @Override 46 | public FixedSizePriorityQueue read(JsonReader reader) throws IOException { 47 | List list = new ArrayList<>(); 48 | reader.beginArray(); 49 | while (reader.hasNext()) { 50 | list.add(gson.fromJson(reader.nextString(), ShareVolume.class)); 51 | } 52 | reader.endArray(); 53 | 54 | Comparator c = (c1, c2) -> c2.getShares() - c1.getShares(); 55 | FixedSizePriorityQueue fixedSizePriorityQueue = new FixedSizePriorityQueue<>(c, 5); 56 | 57 | for (ShareVolume transaction : list) { 58 | fixedSizePriorityQueue.add(transaction); 59 | } 60 | 61 | return fixedSizePriorityQueue; 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/util/serializer/JsonDeserializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.util.serializer; 18 | 19 | import bbejeck.collectors.FixedSizePriorityQueue; 20 | import com.google.gson.Gson; 21 | import com.google.gson.GsonBuilder; 22 | import org.apache.kafka.common.serialization.Deserializer; 23 | 24 | import java.lang.reflect.Type; 25 | import java.util.Map; 26 | 27 | /** 28 | * User: Bill Bejeck 29 | * Date: 2/14/16 30 | * Time: 3:26 PM 31 | */ 32 | 33 | public class JsonDeserializer implements Deserializer { 34 | 35 | private Gson gson; 36 | private Class deserializedClass; 37 | private Type reflectionTypeToken; 38 | 39 | public JsonDeserializer(Class deserializedClass) { 40 | this.deserializedClass = deserializedClass; 41 | init(); 42 | 43 | } 44 | 45 | public JsonDeserializer(Type reflectionTypeToken) { 46 | this.reflectionTypeToken = reflectionTypeToken; 47 | init(); 48 | } 49 | 50 | private void init () { 51 | GsonBuilder builder = new GsonBuilder(); 52 | builder.registerTypeAdapter(FixedSizePriorityQueue.class, new FixedSizePriorityQueueAdapter().nullSafe()); 53 | gson = builder.create(); 54 | } 55 | 56 | public JsonDeserializer() { 57 | } 58 | 59 | @Override 60 | @SuppressWarnings("unchecked") 61 | public void configure(Map map, boolean b) { 62 | if(deserializedClass == null) { 63 | deserializedClass = (Class) map.get("serializedClass"); 64 | } 65 | } 66 | 67 | @Override 68 | public T deserialize(String s, byte[] bytes) { 69 | if(bytes == null){ 70 | return null; 71 | } 72 | 73 | Type deserializeFrom = deserializedClass != null ? deserializedClass : reflectionTypeToken; 74 | 75 | return gson.fromJson(new String(bytes),deserializeFrom); 76 | 77 | } 78 | 79 | @Override 80 | public void close() { 81 | 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/main/java/bbejeck/util/serializer/JsonSerializer.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 Bill Bejeck 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | package bbejeck.util.serializer; 18 | 19 | import bbejeck.collectors.FixedSizePriorityQueue; 20 | import com.google.gson.Gson; 21 | import com.google.gson.GsonBuilder; 22 | import org.apache.kafka.common.serialization.Serializer; 23 | 24 | import java.nio.charset.Charset; 25 | import java.util.Map; 26 | 27 | /** 28 | * User: Bill Bejeck 29 | * Date: 2/14/16 30 | * Time: 2:37 PM 31 | */ 32 | public class JsonSerializer implements Serializer { 33 | 34 | private Gson gson; 35 | 36 | public JsonSerializer() { 37 | GsonBuilder builder = new GsonBuilder(); 38 | builder.registerTypeAdapter(FixedSizePriorityQueue.class, new FixedSizePriorityQueueAdapter().nullSafe()); 39 | gson = builder.create(); 40 | } 41 | 42 | @Override 43 | public void configure(Map map, boolean b) { 44 | 45 | } 46 | 47 | @Override 48 | public byte[] serialize(String topic, T t) { 49 | return gson.toJson(t).getBytes(Charset.forName("UTF-8")); 50 | } 51 | 52 | @Override 53 | public void close() { 54 | 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/main/resources/conf/connect-standalone.properties: -------------------------------------------------------------------------------- 1 | # Licensed to the Apache Software Foundation (ASF) under one or more 2 | # contributor license agreements. See the NOTICE file distributed with 3 | # this work for additional information regarding copyright ownership. 4 | # The ASF licenses this file to You under the Apache License, Version 2.0 5 | # (the "License"); you may not use this file except in compliance with 6 | # the License. You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # These are defaults. This file just demonstrates how to override some settings. 17 | bootstrap.servers=localhost:9092 18 | 19 | # The converters specify the format of data in Kafka and how to translate it into Connect data. Every Connect user will 20 | # need to configure these based on the format they want their data in when loaded from or stored into Kafka 21 | key.converter=org.apache.kafka.connect.storage.StringConverter 22 | value.converter=org.apache.kafka.connect.json.JsonConverter 23 | 24 | # Converter-specific settings can be passed in by prefixing the Converter's setting with the converter we want to apply 25 | # it to 26 | key.converter.schemas.enable=false 27 | value.converter.schemas.enable=false 28 | 29 | # The internal converter used for offsets and config data is configurable and must be specified, but most users will 30 | # always want to use the built-in default. Offset and config data is never visible outside of Kafka Connect in this format. 31 | internal.key.converter=org.apache.kafka.connect.json.JsonConverter 32 | internal.value.converter=org.apache.kafka.connect.json.JsonConverter 33 | internal.key.converter.schemas.enable=false 34 | internal.value.converter.schemas.enable=false 35 | 36 | offset.storage.file.filename=/tmp/connect.offsets 37 | # Flush much faster than normal, which is useful for testing/debugging 38 | offset.flush.interval.ms=10000 39 | 40 | # Set to a list of filesystem paths separated by commas (,) to enable class loading isolation for plugins 41 | # (connectors, converters, transformations). The list should consist of top level directories that include 42 | # any combination of: 43 | # a) directories immediately containing jars with plugins and their dependencies 44 | # b) uber-jars with plugins and their dependencies 45 | # c) directories immediately containing the package directory structure of classes of plugins and their dependencies 46 | # Note: symlinks will be followed to discover dependencies or plugins. 47 | # Examples: 48 | # plugin.path=/usr/local/share/java,/usr/local/share/kafka/plugins,/opt/connectors, 49 | 50 | # Need to update to reflect where code installed then copy this file to 51 | # /config 52 | plugin.path=/ks_in_action_code/libs/connect/ -------------------------------------------------------------------------------- /src/main/resources/conf/connector-jdbc.properties: -------------------------------------------------------------------------------- 1 | # copy this file to /config 2 | 3 | name=stock-transaction-connector 4 | connector.class=io.confluent.connect.jdbc.JdbcSourceConnector 5 | tasks.max=1 6 | connection.url=jdbc:h2:tcp://localhost:9989/~/findata 7 | mode=incrementing 8 | incrementing.column.name=TXN_ID 9 | topic.prefix=dbTxn 10 | 11 | transforms=ConvertDate,Rename,ExtractKey,FlattenStruct 12 | transforms.ConvertDate.type=org.apache.kafka.connect.transforms.TimestampConverter$Value 13 | transforms.ConvertDate.field=TXNTS 14 | transforms.ConvertDate.target.type=string 15 | transforms.ConvertDate.format=yyyy-MM-dd'T'HH:mm:ss.SSS-0400 16 | transforms.Rename.type=org.apache.kafka.connect.transforms.ReplaceField$Value 17 | transforms.Rename.renames=SMBL:symbol, SCTR:sector, INDSTY:industry, SHRS:shares, SHRPRC:sharePrice, CSTMRID:customerId, TXNTS:transactionTimestamp 18 | transforms.ExtractKey.type=org.apache.kafka.connect.transforms.ValueToKey 19 | transforms.ExtractKey.fields=symbol 20 | transforms.FlattenStruct.type=org.apache.kafka.connect.transforms.ExtractField$Key 21 | transforms.FlattenStruct.field=symbol 22 | 23 | -------------------------------------------------------------------------------- /src/main/resources/ksql/create_stream.txt: -------------------------------------------------------------------------------- 1 | CREATE STREAM stock_txn_stream (symbol VARCHAR, sector VARCHAR, \ 2 | industry VARCHAR, shares BIGINT, sharePrice DOUBLE, \ 3 | customerId VARCHAR, transactionTimestamp STRING, purchase BOOLEAN) \ 4 | WITH (VALUE_FORMAT = 'JSON', KAFKA_TOPIC = 'stock-transactions'); -------------------------------------------------------------------------------- /src/main/resources/ksql/create_table.txt: -------------------------------------------------------------------------------- 1 | CREATE TABLE stock_txn_table (symbol VARCHAR, sector VARCHAR, \ 2 | industry VARCHAR, shares BIGINT, sharePrice DOUBLE, \ 3 | customerId VARCHAR, transactionTimestamp STRING, purchase BOOLEAN) \ 4 | WITH (KEY = 'symbol', VALUE_FORMAT = 'JSON', KAFKA_TOPIC = 'stock-transactions'); -------------------------------------------------------------------------------- /src/main/resources/ksql/stock_performance_query.ksql: -------------------------------------------------------------------------------- 1 | SELECT symbol, sum(shares) FROM stock_txn_stream WINDOW TUMBLING (SIZE 10 SECONDS) GROUP BY symbol; -------------------------------------------------------------------------------- /src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | 2 | 3 | # stdout Appender 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.Target=System.out 6 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 7 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 8 | 9 | # Producer Interceptor Logs 10 | log4j.appender.producerInterceptor=org.apache.log4j.FileAppender 11 | log4j.appender.producerInterceptor.File=logs/producer_interceptor.log 12 | log4j.appender.producerInterceptor.Append=false 13 | log4j.appender.producerInterceptor.layout=org.apache.log4j.PatternLayout 14 | log4j.appender.producerInterceptor.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 15 | 16 | # Consumer Interceptor Logs 17 | log4j.appender.consumerInterceptor=org.apache.log4j.FileAppender 18 | log4j.appender.consumerInterceptor.File=logs/consumer_interceptor.log 19 | log4j.appender.consumerInterceptor.Append=false 20 | log4j.appender.consumerInterceptor.layout=org.apache.log4j.PatternLayout 21 | log4j.appender.consumerInterceptor.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 22 | 23 | 24 | # State Restore Listener Logs 25 | log4j.appender.restoreListener=org.apache.log4j.FileAppender 26 | log4j.appender.restoreListener.File=logs/state_restore_listener.log 27 | log4j.appender.restoreListener.Append=false 28 | log4j.appender.restoreListener.layout=org.apache.log4j.PatternLayout 29 | log4j.appender.restoreListener.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 30 | 31 | 32 | # Kafka Logs 33 | log4j.appender.kafkaLog=org.apache.log4j.FileAppender 34 | log4j.appender.kafkaLog.File=logs/kafka_streams_in_action.log 35 | log4j.appender.kafkaLog.Append=false 36 | log4j.appender.kafkaLog.layout=org.apache.log4j.PatternLayout 37 | log4j.appender.kafkaLog.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 38 | 39 | 40 | 41 | log4j.logger.bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor=INFO, consumerInterceptor 42 | log4j.additivity.bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor=false 43 | 44 | log4j.logger.bbejeck.chapter_7.interceptors.ZMartProducerInterceptor=INFO, producerInterceptor 45 | log4j.additivity.bbejeck.chapter_7.interceptors.ZMartProducerInterceptor=false 46 | 47 | 48 | log4j.logger.bbejeck.chapter_7.restore.LoggingStateRestoreListener=INFO, restoreListener 49 | log4j.additivity.bbejeck.chapter_7.restore.LoggingStateRestoreListener=false 50 | 51 | log4j.logger.org.apache.kafka=INFO, kafkaLog 52 | log4j.logger.bbejeck=INFO, stdout 53 | log4j.additivity.bbejeck= false 54 | log4j.rootLogger=INFO, stdout 55 | 56 | 57 | 58 | -------------------------------------------------------------------------------- /src/main/resources/webserver/interactiveQueriesApplication.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Title 7 | 8 | 9 | 10 | 11 | 12 | 13 | 27 | 28 | 72 | 73 | 74 | 75 | 76 | 77 |

Kafka Streams Equities Dashboard Application

78 | 79 |
80 |

Total Transactions by Market Sector Table

81 | 82 | 83 |
SectorNumberShares
84 | 85 |
86 | 87 |
88 | 89 |
90 |

Number of Stock Transaction by Stock Symbol Table

91 | 92 | 93 |
Stock SymbolShares Per Transaction Window
94 |
95 | 96 |
97 | 98 |
99 |

Customer Session Equity Activity Table

100 | 101 | 102 |
Customer IdAverage Equity Transaction Spent Per Session
103 |
104 | 105 | 106 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/MockKeyValueStore.java: -------------------------------------------------------------------------------- 1 | package bbejeck; 2 | 3 | import org.apache.kafka.streams.KeyValue; 4 | import org.apache.kafka.streams.processor.ProcessorContext; 5 | import org.apache.kafka.streams.processor.StateStore; 6 | import org.apache.kafka.streams.state.KeyValueIterator; 7 | import org.apache.kafka.streams.state.KeyValueStore; 8 | import org.apache.kafka.test.KeyValueIteratorStub; 9 | 10 | import java.util.ArrayList; 11 | import java.util.HashMap; 12 | import java.util.List; 13 | import java.util.Map; 14 | 15 | 16 | public class MockKeyValueStore implements KeyValueStore { 17 | 18 | protected Map inner = new HashMap<>(); 19 | private String name = "mockStore"; 20 | private boolean open = true; 21 | 22 | public MockKeyValueStore () {} 23 | 24 | 25 | @Override 26 | public void put(K key, V value) { 27 | inner.put(key, value); 28 | } 29 | 30 | @Override 31 | public V putIfAbsent(K key, V value) { 32 | if (!inner.containsKey(key)) { 33 | inner.put(key, value); 34 | } 35 | return value; 36 | } 37 | 38 | @Override 39 | public void putAll(List> entries) { 40 | for (KeyValue entry : entries) { 41 | inner.put(entry.key, entry.value); 42 | } 43 | } 44 | 45 | @Override 46 | public V delete(K key) { 47 | return inner.remove(key); 48 | } 49 | 50 | @Override 51 | public String name() { 52 | return name; 53 | } 54 | 55 | @Override 56 | public void init(ProcessorContext context, StateStore root) { 57 | 58 | } 59 | 60 | @Override 61 | public void flush() { 62 | 63 | } 64 | 65 | @Override 66 | public void close() { 67 | open = false; 68 | } 69 | 70 | @Override 71 | public boolean persistent() { 72 | return false; 73 | } 74 | 75 | @Override 76 | public boolean isOpen() { 77 | return open; 78 | } 79 | 80 | @Override 81 | public V get(K key) { 82 | return inner.get(key); 83 | } 84 | 85 | @Override 86 | public KeyValueIterator range(K from, K to) { 87 | return null; 88 | } 89 | 90 | @Override 91 | public KeyValueIterator all() { 92 | List> entryList = new ArrayList<>(); 93 | for (Map.Entry tupleEntry : inner.entrySet()) { 94 | entryList.add(KeyValue.pair(tupleEntry.getKey(),tupleEntry.getValue())); 95 | } 96 | return new KeyValueIteratorStub<>(entryList.iterator()); 97 | } 98 | 99 | @Override 100 | public long approximateNumEntries() { 101 | return inner.size(); 102 | } 103 | 104 | public Map innerStore() { 105 | return inner; 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/chapter_3/KafkaStreamsYellingIntegrationTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_3; 2 | 3 | import org.apache.kafka.common.serialization.Serdes; 4 | import org.apache.kafka.common.serialization.StringDeserializer; 5 | import org.apache.kafka.common.serialization.StringSerializer; 6 | import org.apache.kafka.common.utils.Time; 7 | import org.apache.kafka.streams.KafkaStreams; 8 | import org.apache.kafka.streams.StreamsBuilder; 9 | import org.apache.kafka.streams.StreamsConfig; 10 | import org.apache.kafka.streams.integration.utils.EmbeddedKafkaCluster; 11 | import org.apache.kafka.streams.integration.utils.IntegrationTestUtils; 12 | import org.apache.kafka.test.StreamsTestUtils; 13 | import org.apache.kafka.test.TestUtils; 14 | import org.junit.After; 15 | import org.junit.Before; 16 | import org.junit.BeforeClass; 17 | import org.junit.ClassRule; 18 | import org.junit.Test; 19 | 20 | import java.util.Arrays; 21 | import java.util.List; 22 | import java.util.Properties; 23 | import java.util.regex.Pattern; 24 | import java.util.stream.Collectors; 25 | 26 | import static org.hamcrest.CoreMatchers.equalTo; 27 | import static org.junit.Assert.assertThat; 28 | 29 | 30 | public class KafkaStreamsYellingIntegrationTest { 31 | 32 | private static final int NUM_BROKERS = 1; 33 | private static final String STRING_SERDE_CLASSNAME = Serdes.String().getClass().getName(); 34 | private final Time mockTime = Time.SYSTEM; 35 | 36 | private KafkaStreams kafkaStreams; 37 | private StreamsConfig streamsConfig; 38 | private Properties producerConfig; 39 | private Properties consumerConfig; 40 | 41 | 42 | private static final String YELL_A_TOPIC = "yell-A-topic"; 43 | private static final String YELL_B_TOPIC = "yell-B-topic"; 44 | private static final String OUT_TOPIC = "out-topic"; 45 | 46 | 47 | @ClassRule 48 | public static final EmbeddedKafkaCluster EMBEDDED_KAFKA = new EmbeddedKafkaCluster(NUM_BROKERS); 49 | 50 | @BeforeClass 51 | public static void setUpAll() throws Exception { 52 | EMBEDDED_KAFKA.createTopic(YELL_A_TOPIC); 53 | EMBEDDED_KAFKA.createTopic(OUT_TOPIC); 54 | } 55 | 56 | 57 | @Before 58 | public void setUp() { 59 | Properties properties = StreamsTestUtils.getStreamsConfig("integrationTest", 60 | EMBEDDED_KAFKA.bootstrapServers(), 61 | STRING_SERDE_CLASSNAME, 62 | STRING_SERDE_CLASSNAME, 63 | new Properties()); 64 | properties.put(IntegrationTestUtils.INTERNAL_LEAVE_GROUP_ON_CLOSE, true); 65 | 66 | streamsConfig = new StreamsConfig(properties); 67 | 68 | producerConfig = TestUtils.producerConfig(EMBEDDED_KAFKA.bootstrapServers(), 69 | StringSerializer.class, 70 | StringSerializer.class); 71 | 72 | consumerConfig = TestUtils.consumerConfig(EMBEDDED_KAFKA.bootstrapServers(), 73 | StringDeserializer.class, 74 | StringDeserializer.class); 75 | } 76 | 77 | @After 78 | public void tearDown() { 79 | if (kafkaStreams != null) { 80 | kafkaStreams.close(); 81 | } 82 | } 83 | 84 | 85 | @Test 86 | public void shouldYellFromMultipleTopics() throws Exception { 87 | 88 | StreamsBuilder streamsBuilder = new StreamsBuilder(); 89 | 90 | streamsBuilder.stream(Pattern.compile("yell.*")) 91 | .mapValues(String::toUpperCase) 92 | .to(OUT_TOPIC); 93 | 94 | kafkaStreams = new KafkaStreams(streamsBuilder.build(), streamsConfig); 95 | kafkaStreams.start(); 96 | 97 | List valuesToSendList = Arrays.asList("this", "should", "yell", "at", "you"); 98 | List expectedValuesList = valuesToSendList.stream() 99 | .map(String::toUpperCase) 100 | .collect(Collectors.toList()); 101 | 102 | IntegrationTestUtils.produceValuesSynchronously(YELL_A_TOPIC, 103 | valuesToSendList, 104 | producerConfig, 105 | mockTime); 106 | int expectedNumberOfRecords = 5; 107 | List actualValues = IntegrationTestUtils.waitUntilMinValuesRecordsReceived(consumerConfig, 108 | OUT_TOPIC, 109 | expectedNumberOfRecords); 110 | 111 | assertThat(actualValues, equalTo(expectedValuesList)); 112 | 113 | EMBEDDED_KAFKA.createTopic(YELL_B_TOPIC); 114 | 115 | valuesToSendList = Arrays.asList("yell", "at", "you", "too"); 116 | IntegrationTestUtils.produceValuesSynchronously(YELL_B_TOPIC, 117 | valuesToSendList, 118 | producerConfig, 119 | mockTime); 120 | 121 | expectedValuesList = valuesToSendList.stream().map(String::toUpperCase).collect(Collectors.toList()); 122 | 123 | expectedNumberOfRecords = 4; 124 | actualValues = IntegrationTestUtils.waitUntilMinValuesRecordsReceived(consumerConfig, 125 | OUT_TOPIC, 126 | expectedNumberOfRecords); 127 | 128 | assertThat(actualValues, equalTo(expectedValuesList)); 129 | 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/chapter_4/TransformerTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_4; 2 | 3 | import org.apache.kafka.test.ProcessorTopologyTestDriver; 4 | import org.junit.Test; 5 | 6 | 7 | public class TransformerTest { 8 | 9 | ProcessorTopologyTestDriver testDriver = null; 10 | 11 | 12 | @Test 13 | public void shouldTransform() { 14 | 15 | 16 | } 17 | 18 | } 19 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/chapter_6/processor/cogrouping/CogroupingMethodHandleProcessorTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_6.processor.cogrouping; 2 | 3 | import bbejeck.MockKeyValueStore; 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import org.apache.kafka.streams.processor.ProcessorContext; 8 | import org.apache.kafka.streams.processor.Punctuator; 9 | import org.junit.jupiter.api.DisplayName; 10 | import org.junit.jupiter.api.Test; 11 | 12 | import java.time.Instant; 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | 16 | import static bbejeck.chapter_6.processor.cogrouping.CogroupingMethodHandleProcessor.TUPLE_STORE_NAME; 17 | import static org.apache.kafka.streams.processor.PunctuationType.STREAM_TIME; 18 | import static org.hamcrest.CoreMatchers.equalTo; 19 | import static org.hamcrest.MatcherAssert.assertThat; 20 | import static org.mockito.Mockito.*; 21 | 22 | public class CogroupingMethodHandleProcessorTest { 23 | 24 | private ProcessorContext processorContext = mock(ProcessorContext.class); 25 | private CogroupingMethodHandleProcessor processor = new CogroupingMethodHandleProcessor(); 26 | private MockKeyValueStore, List>> keyValueStore = new MockKeyValueStore<>(); 27 | private ClickEvent clickEvent = new ClickEvent("ABC", "http://somelink.com", Instant.now()); 28 | private StockTransaction transaction = StockTransaction.newBuilder().withSymbol("ABC").build(); 29 | 30 | @Test 31 | @DisplayName("Processor should initialize correctly") 32 | public void testInitializeCorrectly() { 33 | processor.init(processorContext); 34 | verify(processorContext).schedule(eq(15000L), eq(STREAM_TIME), isA(Punctuator.class)); 35 | verify(processorContext).getStateStore(TUPLE_STORE_NAME); 36 | } 37 | 38 | @Test 39 | @DisplayName("Process method should store results") 40 | public void testProcessCorrectly() { 41 | 42 | when(processorContext.getStateStore(TUPLE_STORE_NAME)).thenReturn(keyValueStore); 43 | 44 | processor.init(processorContext); 45 | 46 | processor.process("ABC", Tuple.of(clickEvent, null)); 47 | 48 | Tuple,List> tuple = keyValueStore.innerStore().get("ABC"); 49 | 50 | assertThat(tuple._1.get(0), equalTo(clickEvent)); 51 | assertThat(tuple._2.isEmpty(), equalTo(true)); 52 | 53 | processor.process("ABC", Tuple.of(null, transaction)); 54 | 55 | assertThat(tuple._1.get(0), equalTo(clickEvent)); 56 | assertThat(tuple._2.get(0), equalTo(transaction)); 57 | 58 | assertThat(tuple._1.size(), equalTo(1)); 59 | assertThat(tuple._2.size(), equalTo(1)); 60 | 61 | } 62 | 63 | @Test 64 | @DisplayName("Punctuate should forward records") 65 | public void testPunctuateProcess(){ 66 | when(processorContext.getStateStore(TUPLE_STORE_NAME)).thenReturn(keyValueStore); 67 | 68 | processor.init(processorContext); 69 | processor.process("ABC", Tuple.of(clickEvent, null)); 70 | processor.process("ABC", Tuple.of(null, transaction)); 71 | 72 | Tuple,List> tuple = keyValueStore.innerStore().get("ABC"); 73 | List clickEvents = new ArrayList<>(tuple._1); 74 | List stockTransactions = new ArrayList<>(tuple._2); 75 | 76 | processor.cogroup(124722348947L); 77 | 78 | verify(processorContext).forward("ABC", Tuple.of(clickEvents, stockTransactions)); 79 | 80 | assertThat(tuple._1.size(), equalTo(0)); 81 | assertThat(tuple._2.size(), equalTo(0)); 82 | } 83 | 84 | } -------------------------------------------------------------------------------- /src/test/java/bbejeck/chapter_8/StockPerformanceStreamsProcessorTopologyTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_8; 2 | 3 | import bbejeck.model.StockPerformance; 4 | import bbejeck.model.StockTransaction; 5 | import bbejeck.util.datagen.DataGenerator; 6 | import bbejeck.util.serde.StreamsSerdes; 7 | import org.apache.kafka.clients.consumer.ConsumerConfig; 8 | import org.apache.kafka.common.serialization.Serde; 9 | import org.apache.kafka.common.serialization.Serdes; 10 | import org.apache.kafka.streams.StreamsConfig; 11 | import org.apache.kafka.streams.Topology; 12 | import org.apache.kafka.streams.state.KeyValueStore; 13 | import org.apache.kafka.test.ProcessorTopologyTestDriver; 14 | import org.junit.jupiter.api.BeforeEach; 15 | import org.junit.jupiter.api.DisplayName; 16 | import org.junit.jupiter.api.Test; 17 | 18 | import java.util.Properties; 19 | 20 | import static org.hamcrest.CoreMatchers.equalTo; 21 | import static org.hamcrest.CoreMatchers.notNullValue; 22 | import static org.hamcrest.MatcherAssert.assertThat; 23 | 24 | /** 25 | * User: Bill Bejeck 26 | * Date: 9/10/17 27 | * Time: 4:36 PM 28 | */ 29 | public class StockPerformanceStreamsProcessorTopologyTest { 30 | 31 | private ProcessorTopologyTestDriver topologyTestDriver; 32 | 33 | @BeforeEach 34 | public void setUp() { 35 | Properties props = new Properties(); 36 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "ks-papi-stock-analysis-client"); 37 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "ks-papi-stock-analysis-group"); 38 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "ks-stock-analysis-appid"); 39 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 40 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 41 | props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest"); 42 | props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 43 | props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); 44 | 45 | 46 | StreamsConfig streamsConfig = new StreamsConfig(props); 47 | 48 | Topology topology = StockPerformanceStreamsProcessorTopology.build(); 49 | 50 | topologyTestDriver = new ProcessorTopologyTestDriver(streamsConfig, topology); 51 | } 52 | 53 | 54 | @Test 55 | @DisplayName("Checking State Store for Value") 56 | public void shouldStorePerformanceObjectInStore() { 57 | 58 | Serde stringSerde = Serdes.String(); 59 | Serde stockTransactionSerde = StreamsSerdes.StockTransactionSerde(); 60 | 61 | StockTransaction stockTransaction = DataGenerator.generateStockTransaction(); 62 | 63 | topologyTestDriver.process("stock-transactions", 64 | stockTransaction.getSymbol(), 65 | stockTransaction, 66 | stringSerde.serializer(), 67 | stockTransactionSerde.serializer()); 68 | 69 | KeyValueStore store = topologyTestDriver.getKeyValueStore("stock-performance-store"); 70 | 71 | assertThat(store.get(stockTransaction.getSymbol()), notNullValue()); 72 | 73 | StockPerformance stockPerformance = store.get(stockTransaction.getSymbol()); 74 | 75 | assertThat(stockPerformance.getCurrentShareVolume(), equalTo(stockTransaction.getShares())); 76 | assertThat(stockPerformance.getCurrentPrice(), equalTo(stockTransaction.getSharePrice())); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/chapter_8/ZMartTopologyTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.chapter_8; 2 | 3 | import bbejeck.model.Purchase; 4 | import bbejeck.model.PurchasePattern; 5 | import bbejeck.model.RewardAccumulator; 6 | import bbejeck.util.datagen.DataGenerator; 7 | import bbejeck.util.serde.StreamsSerdes; 8 | import org.apache.kafka.clients.consumer.ConsumerConfig; 9 | import org.apache.kafka.clients.producer.ProducerRecord; 10 | import org.apache.kafka.common.serialization.Serde; 11 | import org.apache.kafka.common.serialization.Serdes; 12 | import org.apache.kafka.streams.StreamsConfig; 13 | import org.apache.kafka.streams.Topology; 14 | import org.apache.kafka.streams.processor.WallclockTimestampExtractor; 15 | import org.apache.kafka.test.ProcessorTopologyTestDriver; 16 | import org.junit.jupiter.api.BeforeEach; 17 | import org.junit.jupiter.api.DisplayName; 18 | import org.junit.jupiter.api.Test; 19 | 20 | import java.util.Properties; 21 | 22 | import static org.hamcrest.CoreMatchers.equalTo; 23 | import static org.hamcrest.MatcherAssert.assertThat; 24 | 25 | /** 26 | * User: Bill Bejeck 27 | * Date: 9/9/17 28 | * Time: 2:39 PM 29 | */ 30 | public class ZMartTopologyTest { 31 | 32 | private ProcessorTopologyTestDriver topologyTestDriver; 33 | 34 | @BeforeEach 35 | public void setUp() { 36 | Properties props = new Properties(); 37 | props.put(StreamsConfig.CLIENT_ID_CONFIG, "FirstZmart-Kafka-Streams-Client"); 38 | props.put(ConsumerConfig.GROUP_ID_CONFIG, "zmart-purchases"); 39 | props.put(StreamsConfig.APPLICATION_ID_CONFIG, "FirstZmart-Kafka-Streams-App"); 40 | props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); 41 | props.put(StreamsConfig.REPLICATION_FACTOR_CONFIG, 1); 42 | props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, WallclockTimestampExtractor.class); 43 | 44 | StreamsConfig streamsConfig = new StreamsConfig(props); 45 | Topology topology = ZMartTopology.build(); 46 | 47 | topologyTestDriver = new ProcessorTopologyTestDriver(streamsConfig, topology); 48 | } 49 | 50 | 51 | @Test 52 | @DisplayName("Testing the ZMart Topology Flow") 53 | public void testZMartTopology() { 54 | 55 | Serde purchaseSerde = StreamsSerdes.PurchaseSerde(); 56 | Serde purchasePatternSerde = StreamsSerdes.PurchasePatternSerde(); 57 | Serde rewardAccumulatorSerde = StreamsSerdes.RewardAccumulatorSerde(); 58 | Serde stringSerde = Serdes.String(); 59 | 60 | Purchase purchase = DataGenerator.generatePurchase(); 61 | 62 | topologyTestDriver.process("transactions", 63 | null, 64 | purchase, 65 | stringSerde.serializer(), 66 | purchaseSerde.serializer()); 67 | 68 | ProducerRecord record = topologyTestDriver.readOutput("purchases", 69 | stringSerde.deserializer(), 70 | purchaseSerde.deserializer()); 71 | 72 | Purchase expectedPurchase = Purchase.builder(purchase).maskCreditCard().build(); 73 | assertThat(record.value(), equalTo(expectedPurchase)); 74 | 75 | 76 | RewardAccumulator expectedRewardAccumulator = RewardAccumulator.builder(expectedPurchase).build(); 77 | 78 | ProducerRecord accumulatorProducerRecord = topologyTestDriver.readOutput("rewards", 79 | stringSerde.deserializer(), 80 | rewardAccumulatorSerde.deserializer()); 81 | 82 | assertThat(accumulatorProducerRecord.value(), equalTo(expectedRewardAccumulator)); 83 | 84 | PurchasePattern expectedPurchasePattern = PurchasePattern.builder(expectedPurchase).build(); 85 | 86 | ProducerRecord purchasePatternProducerRecord = topologyTestDriver.readOutput("patterns", 87 | stringSerde.deserializer(), 88 | purchasePatternSerde.deserializer()); 89 | 90 | assertThat(purchasePatternProducerRecord.value(), equalTo(expectedPurchasePattern)); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/util/serializer/EventTransactionTupleSerdeTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.util.serializer; 2 | 3 | 4 | import bbejeck.model.ClickEvent; 5 | import bbejeck.model.StockTransaction; 6 | import bbejeck.util.collection.Tuple; 7 | import bbejeck.util.serde.StreamsSerdes; 8 | import org.apache.kafka.common.serialization.Serde; 9 | import org.junit.Before; 10 | import org.junit.Test; 11 | 12 | import java.time.Instant; 13 | import java.util.ArrayList; 14 | import java.util.List; 15 | 16 | import static org.hamcrest.CoreMatchers.is; 17 | import static org.junit.Assert.assertEquals; 18 | import static org.junit.Assert.assertThat; 19 | 20 | public class EventTransactionTupleSerdeTest { 21 | 22 | private StockTransaction transaction; 23 | private ClickEvent clickEvent; 24 | private Tuple, List> eventTuple; 25 | 26 | private Serde, List>> tupleSerde = StreamsSerdes.EventTransactionTupleSerde(); 27 | 28 | @Before 29 | public void setUp() { 30 | 31 | transaction = StockTransaction.newBuilder() 32 | .withCustomerId("custId") 33 | .withIndustry("foo") 34 | .withPurchase(false) 35 | .withSector("sector") 36 | .withSharePrice(25.25) 37 | .withShares(500) 38 | .withSymbol("XYZ").build(); 39 | 40 | clickEvent = new ClickEvent("XYZ", "http://link", Instant.now()); 41 | List eventList = new ArrayList<>(); 42 | List transactionList = new ArrayList<>(); 43 | 44 | eventList.add(clickEvent); 45 | transactionList.add(transaction); 46 | 47 | eventTuple = Tuple.of(eventList, transactionList); 48 | } 49 | 50 | 51 | @Test 52 | public void testSerializeDeserialize() throws Exception { 53 | 54 | byte[] bytes = tupleSerde.serializer().serialize("topic", eventTuple); 55 | 56 | Tuple, List> deserializedTuple = tupleSerde.deserializer().deserialize("topic", bytes); 57 | 58 | List deserializedEvts = deserializedTuple._1; 59 | List deserializedTxns = deserializedTuple._2; 60 | 61 | assertThat(deserializedEvts.size(), is(1)); 62 | assertThat(deserializedTxns.size(), is(1)); 63 | assertEquals(deserializedEvts.get(0), clickEvent); 64 | assertEquals(deserializedTxns.get(0), transaction); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /src/test/java/bbejeck/util/serializer/PurchaseKeySerdeTest.java: -------------------------------------------------------------------------------- 1 | package bbejeck.util.serializer; 2 | 3 | import bbejeck.model.PurchaseKey; 4 | import bbejeck.util.serde.StreamsSerdes; 5 | import org.apache.kafka.common.serialization.Serde; 6 | import org.junit.Before; 7 | import org.junit.Test; 8 | 9 | import java.util.Date; 10 | 11 | import static org.mockito.internal.matchers.Equality.areEqual; 12 | 13 | 14 | /** 15 | * User: Bill Bejeck 16 | * Date: 3/25/18 17 | * Time: 6:50 PM 18 | */ 19 | public class PurchaseKeySerdeTest { 20 | 21 | private PurchaseKey purchaseKey; 22 | private Serde purchaseKeySerde = StreamsSerdes.purchaseKeySerde(); 23 | 24 | @Before 25 | public void setUp() { 26 | purchaseKey = new PurchaseKey("123345", new Date()); 27 | } 28 | 29 | 30 | @Test 31 | public void testSerializePurchaseKey() { 32 | byte[] serialized = purchaseKeySerde.serializer().serialize("topic", purchaseKey); 33 | PurchaseKey deserialized = purchaseKeySerde.deserializer().deserialize("topic", serialized); 34 | areEqual(purchaseKey, deserialized); 35 | 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/test/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | 2 | 3 | # stdout Appender 4 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 5 | log4j.appender.stdout.Target=System.out 6 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 7 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 8 | 9 | # Producer Interceptor Logs 10 | log4j.appender.producerInterceptor=org.apache.log4j.FileAppender 11 | log4j.appender.producerInterceptor.File=logs/producer_interceptor.log 12 | log4j.appender.producerInterceptor.Append=false 13 | log4j.appender.producerInterceptor.layout=org.apache.log4j.PatternLayout 14 | log4j.appender.producerInterceptor.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 15 | 16 | # Consumer Interceptor Logs 17 | log4j.appender.consumerInterceptor=org.apache.log4j.FileAppender 18 | log4j.appender.consumerInterceptor.File=logs/consumer_interceptor.log 19 | log4j.appender.consumerInterceptor.Append=false 20 | log4j.appender.consumerInterceptor.layout=org.apache.log4j.PatternLayout 21 | log4j.appender.consumerInterceptor.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 22 | 23 | 24 | # State Restore Listener Logs 25 | log4j.appender.restoreListener=org.apache.log4j.FileAppender 26 | log4j.appender.restoreListener.File=logs/state_restore_listener.log 27 | log4j.appender.restoreListener.Append=false 28 | log4j.appender.restoreListener.layout=org.apache.log4j.PatternLayout 29 | log4j.appender.restoreListener.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 30 | 31 | 32 | # Kafka Logs 33 | log4j.appender.kafkaLog=org.apache.log4j.FileAppender 34 | log4j.appender.kafkaLog.File=logs/kafka_streams_in_action.log 35 | log4j.appender.kafkaLog.Append=false 36 | log4j.appender.kafkaLog.layout=org.apache.log4j.PatternLayout 37 | log4j.appender.kafkaLog.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 38 | 39 | 40 | 41 | log4j.logger.bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor=INFO, consumerInterceptor 42 | log4j.additivity.bbejeck.chapter_7.interceptors.StockTransactionConsumerInterceptor=false 43 | 44 | log4j.logger.bbejeck.chapter_7.interceptors.ZMartProducerInterceptor=INFO, producerInterceptor 45 | log4j.additivity.bbejeck.chapter_7.interceptors.ZMartProducerInterceptor=false 46 | 47 | 48 | log4j.logger.bbejeck.chapter_7.restore.LoggingStateRestoreListener=INFO, restoreListener 49 | log4j.additivity.bbejeck.chapter_7.restore.LoggingStateRestoreListener=false 50 | 51 | log4j.logger.org.apache.kafka=INFO, kafkaLog 52 | log4j.logger.bbejeck=INFO, stdout 53 | 54 | 55 | 56 | --------------------------------------------------------------------------------