├── .gitignore ├── README.md ├── build.gradle ├── cleanup-kafka.sh ├── gradle └── wrapper │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat └── src └── main ├── java └── co │ └── paralleluniverse │ └── actors │ └── integration │ └── examples │ ├── Msg.java │ ├── Util.java │ ├── direct │ ├── kafka │ │ └── Main.java │ └── zeromq │ │ └── Main.java │ └── proxied │ ├── ConsumerActor.java │ ├── ProducerActor.java │ ├── kafka │ ├── KafkaProxies.java │ └── Main.java │ └── zeromq │ ├── Main.java │ └── ZeroMQProxies.java └── resources └── logback.xml /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | 3 | # Mobile Tools for Java (J2ME) 4 | .mtj.tmp/ 5 | 6 | # Package Files # 7 | *.jar 8 | *.war 9 | *.ear 10 | 11 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml 12 | hs_err_pid* 13 | 14 | .classpath 15 | .project 16 | .settings 17 | /target/ 18 | /build/ 19 | 20 | *.iml 21 | 22 | /.idea/ 23 | /.gradle/ 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # _quasar-actors-integration-examples_ 2 | 3 | ## ZeroMQ 4 | 5 | Clone (or download sources from a release) and [install ZeroMQ 4.1](https://raw.githubusercontent.com/zeromq/zeromq4-1/master/INSTALL) (also available on Linux Brew) [as well as jzmq-jni](https://github.com/zeromq/jzmq/tree/master/jzmq-jni) (not available on Linux Brew as of 2016-04-10), then select your main class at the end of `build.gradle` and run `./gradlew`. 6 | 7 | ## Apache Kafka 8 | 9 | [Download and uncompress the Kafka 0.9.0.1 distro](http://kafka.apache.org/downloads.html) and start ZooKeeper on port 2181 and then Kafka on port 9092 using two different terminals: 10 | 11 | ``` 12 | bin/zookeeper-server-start.sh config/zookeeper.properties 13 | ``` 14 | 15 | ``` 16 | bin/kafka-server-start.sh config/server.properties 17 | ``` 18 | 19 | Then in another terminal create your topics like so: 20 | 21 | ``` 22 | bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 2 --topic kafkadirect 23 | bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partitions 2 --topic kafkaproxied 24 | ``` 25 | 26 | Finally select your main class at the end of `build.gradle` and run `./gradlew`. 27 | 28 | You can use `. cleanup-kafka.sh` to remove all ZooKeeper and Kafka data files if you want to start from a clean situation. -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | apply plugin: 'java' 2 | apply plugin: 'application' 3 | 4 | group = 'co.paralleluniverse' 5 | version = '0.1.0-SNAPSHOT' 6 | status = 'integration' 7 | description = 'Quasar Actors with ZeroMQ and Kafka' 8 | 9 | ext.classifier = ':jdk8' // ':' 10 | sourceCompatibility = 1.8 // 1.7 11 | targetCompatibility = 1.8 // 1.7 12 | 13 | ext.quasarVer = '0.7.5-SNAPSHOT' 14 | ext.comsatVer = '0.7.0' 15 | ext.jzmqVer = '3.1.0' 16 | ext.slf4jSimpleVer = '1.7.21' 17 | ext.junitVer = '4.12' 18 | ext.kafkaClientsVer = '0.9.0.1' 19 | 20 | [compileJava, compileTestJava]*.options*.encoding = 'UTF-8' 21 | 22 | configurations { 23 | quasar 24 | } 25 | 26 | configurations.all { 27 | resolutionStrategy { 28 | failOnVersionConflict() 29 | 30 | force "org.slf4j:slf4j-api:${slf4jSimpleVer}" 31 | force "org.apache.kafka:kafka-clients:$kafkaClientsVer" 32 | } 33 | } 34 | 35 | repositories { 36 | mavenLocal() 37 | mavenCentral() 38 | maven { url 'https://oss.sonatype.org/content/repositories/releases' } 39 | maven { url 'https://oss.sonatype.org/content/repositories/snapshots' } 40 | } 41 | 42 | dependencies { 43 | compile "co.paralleluniverse:quasar-core:${quasarVer}${classifier}" 44 | compile "co.paralleluniverse:quasar-actors:${quasarVer}" 45 | 46 | compile "org.zeromq:jzmq:${jzmqVer}" 47 | 48 | compile "co.paralleluniverse:comsat-kafka:${comsatVer}" 49 | 50 | quasar "co.paralleluniverse:quasar-core:${quasarVer}${classifier}@jar" 51 | 52 | runtime "org.slf4j:slf4j-simple:${slf4jSimpleVer}" 53 | 54 | testCompile "junit:junit:$junitVer" 55 | } 56 | 57 | applicationDefaultJvmArgs = [ 58 | '-Dco.paralleluniverse.fibers.verifyInstrumentation=true', 59 | '-Dco.paralleluniverse.fibers.detectRunawayFibers=false', 60 | "-javaagent:${configurations.quasar.singleFile}" // =v, =d 61 | ] 62 | 63 | task wrapper(type: Wrapper) { 64 | gradleVersion = '2.12' 65 | } 66 | 67 | // mainClassName = 'co.paralleluniverse.actors.integration.examples.direct.zeromq.Main' 68 | mainClassName = 'co.paralleluniverse.actors.integration.examples.proxied.zeromq.Main' 69 | // mainClassName = 'co.paralleluniverse.actors.integration.examples.direct.kafka.Main' 70 | // mainClassName = 'co.paralleluniverse.actors.integration.examples.proxied.kafka.Main' 71 | 72 | defaultTasks 'run' 73 | -------------------------------------------------------------------------------- /cleanup-kafka.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo rm -rf /tmp/zookeeper 3 | sudo rm -rf /tmp/kafka-logs 4 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | #Sun Apr 03 11:40:17 IDT 2016 2 | distributionBase=GRADLE_USER_HOME 3 | distributionPath=wrapper/dists 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | distributionUrl=https\://services.gradle.org/distributions/gradle-2.12-bin.zip 7 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ############################################################################## 4 | ## 5 | ## Gradle start up script for UN*X 6 | ## 7 | ############################################################################## 8 | 9 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 10 | DEFAULT_JVM_OPTS="" 11 | 12 | APP_NAME="Gradle" 13 | APP_BASE_NAME=`basename "$0"` 14 | 15 | # Use the maximum available, or set MAX_FD != -1 to use that value. 16 | MAX_FD="maximum" 17 | 18 | warn ( ) { 19 | echo "$*" 20 | } 21 | 22 | die ( ) { 23 | echo 24 | echo "$*" 25 | echo 26 | exit 1 27 | } 28 | 29 | # OS specific support (must be 'true' or 'false'). 30 | cygwin=false 31 | msys=false 32 | darwin=false 33 | case "`uname`" in 34 | CYGWIN* ) 35 | cygwin=true 36 | ;; 37 | Darwin* ) 38 | darwin=true 39 | ;; 40 | MINGW* ) 41 | msys=true 42 | ;; 43 | esac 44 | 45 | # Attempt to set APP_HOME 46 | # Resolve links: $0 may be a link 47 | PRG="$0" 48 | # Need this for relative symlinks. 49 | while [ -h "$PRG" ] ; do 50 | ls=`ls -ld "$PRG"` 51 | link=`expr "$ls" : '.*-> \(.*\)$'` 52 | if expr "$link" : '/.*' > /dev/null; then 53 | PRG="$link" 54 | else 55 | PRG=`dirname "$PRG"`"/$link" 56 | fi 57 | done 58 | SAVED="`pwd`" 59 | cd "`dirname \"$PRG\"`/" >/dev/null 60 | APP_HOME="`pwd -P`" 61 | cd "$SAVED" >/dev/null 62 | 63 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 64 | 65 | # Determine the Java command to use to start the JVM. 66 | if [ -n "$JAVA_HOME" ] ; then 67 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 68 | # IBM's JDK on AIX uses strange locations for the executables 69 | JAVACMD="$JAVA_HOME/jre/sh/java" 70 | else 71 | JAVACMD="$JAVA_HOME/bin/java" 72 | fi 73 | if [ ! -x "$JAVACMD" ] ; then 74 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 75 | 76 | Please set the JAVA_HOME variable in your environment to match the 77 | location of your Java installation." 78 | fi 79 | else 80 | JAVACMD="java" 81 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 82 | 83 | Please set the JAVA_HOME variable in your environment to match the 84 | location of your Java installation." 85 | fi 86 | 87 | # Increase the maximum file descriptors if we can. 88 | if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then 89 | MAX_FD_LIMIT=`ulimit -H -n` 90 | if [ $? -eq 0 ] ; then 91 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 92 | MAX_FD="$MAX_FD_LIMIT" 93 | fi 94 | ulimit -n $MAX_FD 95 | if [ $? -ne 0 ] ; then 96 | warn "Could not set maximum file descriptor limit: $MAX_FD" 97 | fi 98 | else 99 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 100 | fi 101 | fi 102 | 103 | # For Darwin, add options to specify how the application appears in the dock 104 | if $darwin; then 105 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 106 | fi 107 | 108 | # For Cygwin, switch paths to Windows format before running java 109 | if $cygwin ; then 110 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 111 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 112 | JAVACMD=`cygpath --unix "$JAVACMD"` 113 | 114 | # We build the pattern for arguments to be converted via cygpath 115 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 116 | SEP="" 117 | for dir in $ROOTDIRSRAW ; do 118 | ROOTDIRS="$ROOTDIRS$SEP$dir" 119 | SEP="|" 120 | done 121 | OURCYGPATTERN="(^($ROOTDIRS))" 122 | # Add a user-defined pattern to the cygpath arguments 123 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 124 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 125 | fi 126 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 127 | i=0 128 | for arg in "$@" ; do 129 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 130 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 131 | 132 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 133 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 134 | else 135 | eval `echo args$i`="\"$arg\"" 136 | fi 137 | i=$((i+1)) 138 | done 139 | case $i in 140 | (0) set -- ;; 141 | (1) set -- "$args0" ;; 142 | (2) set -- "$args0" "$args1" ;; 143 | (3) set -- "$args0" "$args1" "$args2" ;; 144 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 145 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 146 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 147 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 148 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 149 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 150 | esac 151 | fi 152 | 153 | # Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules 154 | function splitJvmOpts() { 155 | JVM_OPTS=("$@") 156 | } 157 | eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS 158 | JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" 159 | 160 | exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" 161 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @if "%DEBUG%" == "" @echo off 2 | @rem ########################################################################## 3 | @rem 4 | @rem Gradle startup script for Windows 5 | @rem 6 | @rem ########################################################################## 7 | 8 | @rem Set local scope for the variables with windows NT shell 9 | if "%OS%"=="Windows_NT" setlocal 10 | 11 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 12 | set DEFAULT_JVM_OPTS= 13 | 14 | set DIRNAME=%~dp0 15 | if "%DIRNAME%" == "" set DIRNAME=. 16 | set APP_BASE_NAME=%~n0 17 | set APP_HOME=%DIRNAME% 18 | 19 | @rem Find java.exe 20 | if defined JAVA_HOME goto findJavaFromJavaHome 21 | 22 | set JAVA_EXE=java.exe 23 | %JAVA_EXE% -version >NUL 2>&1 24 | if "%ERRORLEVEL%" == "0" goto init 25 | 26 | echo. 27 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 28 | echo. 29 | echo Please set the JAVA_HOME variable in your environment to match the 30 | echo location of your Java installation. 31 | 32 | goto fail 33 | 34 | :findJavaFromJavaHome 35 | set JAVA_HOME=%JAVA_HOME:"=% 36 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 37 | 38 | if exist "%JAVA_EXE%" goto init 39 | 40 | echo. 41 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 42 | echo. 43 | echo Please set the JAVA_HOME variable in your environment to match the 44 | echo location of your Java installation. 45 | 46 | goto fail 47 | 48 | :init 49 | @rem Get command-line arguments, handling Windows variants 50 | 51 | if not "%OS%" == "Windows_NT" goto win9xME_args 52 | if "%@eval[2+2]" == "4" goto 4NT_args 53 | 54 | :win9xME_args 55 | @rem Slurp the command line arguments. 56 | set CMD_LINE_ARGS= 57 | set _SKIP=2 58 | 59 | :win9xME_args_slurp 60 | if "x%~1" == "x" goto execute 61 | 62 | set CMD_LINE_ARGS=%* 63 | goto execute 64 | 65 | :4NT_args 66 | @rem Get arguments from the 4NT Shell from JP Software 67 | set CMD_LINE_ARGS=%$ 68 | 69 | :execute 70 | @rem Setup the command line 71 | 72 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 73 | 74 | @rem Execute Gradle 75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 76 | 77 | :end 78 | @rem End local scope for the variables with windows NT shell 79 | if "%ERRORLEVEL%"=="0" goto mainEnd 80 | 81 | :fail 82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 83 | rem the _cmd.exe /c_ return code! 84 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 85 | exit /b 1 86 | 87 | :mainEnd 88 | if "%OS%"=="Windows_NT" endlocal 89 | 90 | :omega 91 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/Msg.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples; 2 | 3 | import java.io.Serializable; 4 | 5 | public final class Msg implements Serializable { 6 | private static final long serialVersionUID = 0L; 7 | 8 | @SuppressWarnings("WeakerAccess") 9 | public int id; 10 | 11 | @SuppressWarnings("unused") 12 | public Msg() {} // For serialization 13 | 14 | public Msg(int id) { 15 | this.id = id; 16 | } 17 | 18 | @Override 19 | public String toString() { 20 | return "Msg(" + id + ")"; 21 | } 22 | 23 | @Override 24 | public boolean equals(Object o) { 25 | if (this == o) return true; 26 | if (o == null || getClass() != o.getClass()) return false; 27 | 28 | final Msg msg = (Msg) o; 29 | 30 | return id == msg.id; 31 | 32 | } 33 | 34 | @Override 35 | public int hashCode() { 36 | return id; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/Util.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples; 2 | 3 | import co.paralleluniverse.common.util.CheckedCallable; 4 | import co.paralleluniverse.fibers.SuspendExecution; 5 | import co.paralleluniverse.fibers.Suspendable; 6 | 7 | import java.util.concurrent.Callable; 8 | import java.util.concurrent.ExecutorService; 9 | 10 | import static co.paralleluniverse.fibers.FiberAsync.runBlocking; 11 | 12 | public final class Util { 13 | public static final Msg EXIT = new Msg(-1); 14 | 15 | public static final int MSGS = 1000; 16 | 17 | public static void exec(ExecutorService es, Runnable r) throws InterruptedException, SuspendExecution { 18 | try { 19 | runBlocking(es, (CheckedCallable) () -> { 20 | r.run(); 21 | return null; 22 | }); 23 | } catch (final Exception e) { 24 | throw new RuntimeException(e); 25 | } 26 | } 27 | 28 | public static V call(ExecutorService es, Callable c) throws InterruptedException, SuspendExecution { 29 | try { 30 | //noinspection RedundantCast 31 | return runBlocking(es, (CheckedCallable) c::call); 32 | } catch (final Exception e) { 33 | throw new RuntimeException(e); 34 | } 35 | } 36 | 37 | private Util() {} 38 | } 39 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/direct/kafka/Main.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.direct.kafka; 2 | 3 | import co.paralleluniverse.actors.Actor; 4 | import co.paralleluniverse.actors.MailboxConfig; 5 | import co.paralleluniverse.actors.integration.examples.Msg; 6 | import co.paralleluniverse.common.util.CheckedCallable; 7 | import co.paralleluniverse.fibers.FiberAsync; 8 | import co.paralleluniverse.fibers.SuspendExecution; 9 | import co.paralleluniverse.fibers.kafka.FiberKafkaProducer; 10 | import com.google.common.base.Charsets; 11 | import org.apache.kafka.clients.consumer.*; 12 | import org.apache.kafka.clients.producer.KafkaProducer; 13 | import org.apache.kafka.clients.producer.ProducerRecord; 14 | 15 | import java.io.*; 16 | import java.util.Arrays; 17 | import java.util.Collections; 18 | import java.util.Properties; 19 | import java.util.concurrent.ExecutionException; 20 | import java.util.concurrent.ExecutorService; 21 | import java.util.concurrent.Executors; 22 | 23 | import static co.paralleluniverse.actors.integration.examples.Util.*; 24 | 25 | public final class Main { 26 | private static final ExecutorService e = Executors.newFixedThreadPool(2); 27 | 28 | public static void main(String[] args) throws ExecutionException, InterruptedException { 29 | try { 30 | final ProducerActor pa = Actor.newActor(ProducerActor.class); 31 | final ConsumerActor ca = Actor.newActor(ConsumerActor.class); 32 | pa.spawn(); 33 | System.err.println("Producer started"); 34 | ca.spawn(); 35 | System.err.println("Consumer started"); 36 | pa.join(); 37 | System.err.println("Producer finished"); 38 | ca.join(); 39 | System.err.println("Consumer finished"); 40 | } finally { 41 | e.shutdown(); 42 | } 43 | } 44 | 45 | private static final MailboxConfig DEFAULT_MBC = new MailboxConfig(); 46 | private static final String BOOTSTRAP = "localhost:9092"; 47 | private static final String TOPIC = "kafkadirect"; 48 | 49 | private final static Properties producerConfig; 50 | 51 | static { 52 | producerConfig = new Properties(); 53 | producerConfig.put("bootstrap.servers", BOOTSTRAP); 54 | producerConfig.put("client.id", "DemoProducer"); 55 | producerConfig.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer"); 56 | producerConfig.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); 57 | } 58 | 59 | private static final class ProducerActor extends Actor { 60 | public ProducerActor() { 61 | super("producer", DEFAULT_MBC); 62 | } 63 | 64 | @Override 65 | protected final Void doRun() throws InterruptedException, SuspendExecution { 66 | try (final FiberKafkaProducer producer = new FiberKafkaProducer<>(new KafkaProducer<>(producerConfig))) { 67 | //noinspection InfiniteLoopStatement 68 | for (int i = 0; i < MSGS ; i++) { 69 | final Msg m = new Msg(i); 70 | try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(1024) ; 71 | final ObjectOutputStream oos = new ObjectOutputStream(baos)) { 72 | oos.writeObject(m); 73 | oos.flush(); 74 | baos.flush(); 75 | System.err.printf("PRODUCER: topic = %s, key = %d, value = %s\n", TOPIC, i, m); 76 | producer.send(new ProducerRecord<>(TOPIC, i, baos.toByteArray())); 77 | producer.flush(); 78 | } catch (final IOException e) { 79 | e.printStackTrace(); 80 | throw new RuntimeException(e); 81 | } 82 | } 83 | System.err.println("PRODUCER: sending EXIT"); 84 | producer.send(new ProducerRecord<>(TOPIC, null, "exit".getBytes(Charsets.US_ASCII))); 85 | System.err.println("PRODUCER: exiting"); 86 | return null; 87 | } 88 | } 89 | } 90 | 91 | private final static Properties consumerConfig; 92 | static { 93 | consumerConfig = new Properties(); 94 | consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP); 95 | consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, "DemoConsumer"); 96 | consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); 97 | consumerConfig.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000"); 98 | consumerConfig.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); 99 | consumerConfig.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.IntegerDeserializer"); 100 | consumerConfig.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer"); 101 | } 102 | 103 | private static final class ConsumerActor extends Actor { 104 | public ConsumerActor() { 105 | super("consumer", DEFAULT_MBC); 106 | } 107 | 108 | @Override 109 | protected final Void doRun() throws InterruptedException, SuspendExecution { 110 | try (final Consumer consumer = new KafkaConsumer<>(consumerConfig)) { 111 | consumer.subscribe(Collections.singletonList(TOPIC)); 112 | for (;;) { 113 | System.err.println("CONSUMER: polling"); 114 | final ConsumerRecords records = call(e, () -> consumer.poll(1000L)); 115 | System.err.println("CONSUMER: received"); 116 | for (final ConsumerRecord record : records) { 117 | final byte[] v = record.value(); 118 | if (Arrays.equals("exit".getBytes(Charsets.US_ASCII), v)) { 119 | System.err.printf("CONSUMER: exiting\n"); 120 | return null; 121 | } 122 | 123 | try (final ByteArrayInputStream bis = new ByteArrayInputStream(v); 124 | final ObjectInputStream ois = new ObjectInputStream(bis)) { 125 | System.err.printf("CONSUMER: topic = %s, offset = %d, key = %d, value = %s\n", record.topic(), record.offset(), record.key(), ois.readObject()); 126 | } catch (final IOException | ClassNotFoundException e) { 127 | e.printStackTrace(); 128 | throw new RuntimeException(e); 129 | } 130 | } 131 | } 132 | } 133 | } 134 | } 135 | 136 | private Main() {} 137 | } 138 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/direct/zeromq/Main.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.direct.zeromq; 2 | 3 | import co.paralleluniverse.actors.Actor; 4 | import co.paralleluniverse.actors.BasicActor; 5 | import co.paralleluniverse.actors.integration.examples.Msg; 6 | import co.paralleluniverse.fibers.SuspendExecution; 7 | import com.google.common.base.Charsets; 8 | import org.zeromq.ZMQ; 9 | 10 | import java.io.*; 11 | import java.util.Arrays; 12 | import java.util.concurrent.ExecutionException; 13 | import java.util.concurrent.ExecutorService; 14 | import java.util.concurrent.Executors; 15 | 16 | import static co.paralleluniverse.actors.integration.examples.Util.*; 17 | 18 | public final class Main { 19 | public static void main(String[] args) throws ExecutionException, InterruptedException { 20 | try { 21 | final ProducerActor pa = Actor.newActor(ProducerActor.class); 22 | final ConsumerActor ca = Actor.newActor(ConsumerActor.class); 23 | pa.spawn(); 24 | System.err.println("Producer started"); 25 | ca.spawn(); 26 | System.err.println("Consumer started"); 27 | pa.join(); 28 | System.err.println("Producer finished"); 29 | ca.join(); 30 | System.err.println("Consumer finished"); 31 | } finally { 32 | ep.shutdown(); 33 | ec.shutdown(); 34 | } 35 | } 36 | 37 | private static final String TARGET_ADDR = "tcp://localhost:8000"; 38 | 39 | private static final ExecutorService ep = Executors.newFixedThreadPool(2); 40 | 41 | @SuppressWarnings("WeakerAccess") 42 | public static final class ProducerActor extends BasicActor { 43 | @Override 44 | protected final Void doRun() throws InterruptedException, SuspendExecution { 45 | try (final ZMQ.Context zmq = ZMQ.context(1 /* IO threads */); 46 | final ZMQ.Socket trgt = zmq.socket(ZMQ.REQ)) { 47 | System.err.println("PRODUCER: connecting to " + TARGET_ADDR); 48 | exec(ep, () -> trgt.connect(TARGET_ADDR)); 49 | for (int i = 0; i < MSGS; i++) { 50 | final Msg m = new Msg(i); 51 | try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(1024) ; 52 | final ObjectOutputStream oos = new ObjectOutputStream(baos)) { 53 | oos.writeObject(m); 54 | oos.flush(); 55 | baos.flush(); 56 | System.err.printf("PRODUCER: sending %s\n", m); 57 | call(ep, () -> trgt.send(baos.toByteArray(), 0)); 58 | System.err.println("PRODUCER: ACK received"); 59 | call(ep, trgt::recv); // ACK 60 | } catch (final IOException e) { 61 | e.printStackTrace(); 62 | throw new RuntimeException(e); 63 | } 64 | } 65 | 66 | System.err.println("PRODUCER: sending exit"); 67 | call(ep, () -> trgt.send("exit".getBytes(Charsets.US_ASCII), 0)); 68 | System.err.println("PRODUCER: exiting"); 69 | return null; 70 | } 71 | } 72 | } 73 | 74 | private static final ExecutorService ec = Executors.newFixedThreadPool(2); 75 | 76 | private static final String SRC_BIND_ENDPOINT = "tcp://*:8000"; 77 | 78 | @SuppressWarnings("WeakerAccess") 79 | public static final class ConsumerActor extends BasicActor { 80 | @Override 81 | protected final Void doRun() throws InterruptedException, SuspendExecution { 82 | try (final ZMQ.Context zmq = ZMQ.context(1 /* IO threads */); 83 | final ZMQ.Socket src = zmq.socket(ZMQ.REP)) { 84 | System.err.println("CONSUMER: binding src"); 85 | exec(ec, () -> src.bind(SRC_BIND_ENDPOINT)); 86 | //noinspection InfiniteLoopStatement 87 | for (;;) { 88 | System.err.println("CONSUMER: receiving"); 89 | final byte[] v = call(ec, src::recv); 90 | System.err.println("CONSUMER: ACKing"); 91 | exec(ec, () -> src.send(ACK)); 92 | if (Arrays.equals("exit".getBytes(Charsets.US_ASCII), v)) { 93 | System.err.println("CONSUMER: exiting"); 94 | return null; 95 | } 96 | 97 | try (final ByteArrayInputStream bis = new ByteArrayInputStream(v); 98 | final ObjectInputStream ois = new ObjectInputStream(bis)) { 99 | System.err.printf("CONSUMER: received %s\n", ois.readObject()); 100 | } catch (final IOException | ClassNotFoundException e) { 101 | e.printStackTrace(); 102 | throw new RuntimeException(e); 103 | } 104 | } 105 | } 106 | } 107 | } 108 | 109 | private static final String ACK = "ACK"; 110 | private Main() {} 111 | } 112 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/proxied/ConsumerActor.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.proxied; 2 | 3 | import co.paralleluniverse.actors.BasicActor; 4 | import co.paralleluniverse.actors.integration.examples.Msg; 5 | import co.paralleluniverse.fibers.SuspendExecution; 6 | 7 | import static co.paralleluniverse.actors.integration.examples.Util.*; 8 | 9 | public final class ConsumerActor extends BasicActor { 10 | @Override 11 | protected final Void doRun() throws InterruptedException, SuspendExecution { 12 | for (; ; ) { 13 | final Msg m = receive(); 14 | System.err.println("USER CONSUMER: " + m); 15 | if (EXIT.equals(m)) 16 | return null; 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/proxied/ProducerActor.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.proxied; 2 | 3 | import co.paralleluniverse.actors.ActorRef; 4 | import co.paralleluniverse.actors.BasicActor; 5 | import co.paralleluniverse.actors.integration.examples.Msg; 6 | import co.paralleluniverse.fibers.SuspendExecution; 7 | 8 | import static co.paralleluniverse.actors.integration.examples.Util.*; 9 | 10 | public final class ProducerActor extends BasicActor { 11 | private final ActorRef target; 12 | 13 | public ProducerActor(ActorRef target) { 14 | this.target = target; 15 | } 16 | 17 | @Override 18 | protected final Void doRun() throws InterruptedException, SuspendExecution { 19 | for (int i = 0; i < MSGS; i++) { 20 | final Msg m = new Msg(i); 21 | System.err.println("USER PRODUCER: " + m); 22 | target.send(m); 23 | } 24 | System.err.println("USER PRODUCER: " + EXIT); 25 | target.send(EXIT); 26 | return null; 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/proxied/kafka/KafkaProxies.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.proxied.kafka; 2 | 3 | import co.paralleluniverse.actors.Actor; 4 | import co.paralleluniverse.actors.ActorRef; 5 | import co.paralleluniverse.actors.ActorSpec; 6 | import co.paralleluniverse.actors.BasicActor; 7 | import co.paralleluniverse.fibers.FiberUtil; 8 | import co.paralleluniverse.fibers.SuspendExecution; 9 | import co.paralleluniverse.fibers.kafka.FiberKafkaProducer; 10 | import org.apache.kafka.clients.consumer.ConsumerRecord; 11 | import org.apache.kafka.clients.consumer.ConsumerRecords; 12 | import org.apache.kafka.clients.consumer.KafkaConsumer; 13 | import org.apache.kafka.clients.producer.KafkaProducer; 14 | import org.apache.kafka.clients.producer.ProducerRecord; 15 | 16 | import java.io.*; 17 | import java.util.*; 18 | import java.util.concurrent.*; 19 | import java.util.concurrent.atomic.AtomicBoolean; 20 | 21 | import static co.paralleluniverse.actors.integration.examples.Util.call; 22 | 23 | @SuppressWarnings("WeakerAccess") 24 | public final class KafkaProxies implements AutoCloseable { 25 | /** 26 | * Creates a new kafka proxy actors factory specific to a cluster and a topic. 27 | * 28 | * @param bootstrap The Kafka bootstrap server(s) 29 | * @param topic The Kafka topic 30 | */ 31 | public KafkaProxies(String bootstrap, String topic) { 32 | this(bootstrap, topic, null, null); 33 | } 34 | 35 | /** 36 | * Creates a new kafka proxy actors factory specific to a cluster and a topic. 37 | * 38 | * @param bootstrap The Kafka bootstrap server(s) 39 | * @param topic The Kafka topic 40 | * @param kafkaProducerPropertiesOverride Overrides Kafka producer properties (can be {@code null}) 41 | * @param kafkaConsumerPropertiesOverride Overrides Kafka consumer properties (can be {@code null}) 42 | */ 43 | public KafkaProxies(String bootstrap, String topic, Properties kafkaProducerPropertiesOverride, Properties kafkaConsumerPropertiesOverride) { 44 | this.topic = topic; 45 | producer = buildProducer(bootstrap, kafkaProducerPropertiesOverride); 46 | startConsumers(bootstrap, kafkaConsumerPropertiesOverride); 47 | } 48 | 49 | /** 50 | * Creates and returns a new Kafka proxy actor. 51 | * 52 | * @param actorID The ID of the target actor 53 | * @param The base type of the message (must be serializable by Kryo) 54 | */ 55 | public final ActorRef create(String actorID) { 56 | ensureOpen(); 57 | 58 | final ActorRef a; 59 | try { 60 | //noinspection unchecked 61 | a = Actor.newActor ( 62 | new ActorSpec<>( 63 | ProducerActor.class.getConstructor(KafkaProxies.class, String.class), 64 | new Object[] { this, actorID } 65 | ) 66 | ).spawn(); 67 | } catch (final NoSuchMethodException e) { 68 | throw new AssertionError(e); 69 | } 70 | tos.add(a); 71 | return a; 72 | } 73 | 74 | /** 75 | * Releases a Kafka proxy actor. 76 | * 77 | * @param ref The proxy actor to release 78 | */ 79 | @SuppressWarnings("unused") 80 | public final void drop(ActorRef ref) throws ExecutionException, InterruptedException { 81 | ensureOpen(); 82 | 83 | //noinspection unchecked 84 | FiberUtil.runInFiber(() -> ref.send(EXIT)); 85 | tos.remove(ref); 86 | } 87 | 88 | /** 89 | * Subscribes a Kafka consumer actor. 90 | * 91 | * @param consumer The actor that will receive Kafka incoming messages 92 | * @param actorID The actor's remote ID 93 | * @param The message type 94 | */ 95 | public final void subscribe(ActorRef consumer, String actorID) { 96 | ensureOpen(); 97 | subscribers.compute(actorID, (s, actorRefs) -> { 98 | final List l = actorRefs != null ? actorRefs : new ArrayList<>(); 99 | l.add(consumer); 100 | return l; 101 | }); 102 | } 103 | 104 | /** 105 | * Unsubscribes a Kafka consumer actor. 106 | * 107 | * @param consumer The actor that will receive Kafka incoming messages 108 | * @param actorID The actor's remote ID 109 | */ 110 | @SuppressWarnings("unused") 111 | public final void unsubscribe(ActorRef consumer, String actorID) { 112 | ensureOpen(); 113 | subscribers.compute(actorID, (s, actorRefs) -> { 114 | final List l = actorRefs != null ? actorRefs : new ArrayList<>(); 115 | l.remove(consumer); 116 | return l; 117 | }); 118 | } 119 | 120 | /** 121 | * Closes all the actors and Kafka objects produced by this factory. 122 | * 123 | * @throws Exception 124 | */ 125 | @Override 126 | public final void close() throws Exception { 127 | if (!closed.compareAndSet(false, true)) 128 | throw new IllegalStateException("closed"); 129 | 130 | try { 131 | FiberUtil.runInFiber(() -> { 132 | for (final ActorRef c : consumers) 133 | //noinspection unchecked 134 | c.send(EXIT); 135 | for (final ActorRef c : tos) 136 | //noinspection unchecked 137 | c.send(EXIT); 138 | consumers.clear(); 139 | tos.clear(); 140 | }); 141 | } finally { 142 | producer.close(); 143 | es.shutdown(); 144 | } 145 | } 146 | 147 | public final class ProducerActor extends BasicActor { 148 | private final String actorID; 149 | 150 | public ProducerActor(String actorID) { 151 | this.actorID = actorID; 152 | } 153 | 154 | @Override 155 | protected final Void doRun() throws InterruptedException, SuspendExecution { 156 | //noinspection InfiniteLoopStatement 157 | for(;;) { 158 | final M m = receive(); 159 | if (EXIT.equals(m)) 160 | return null; 161 | 162 | try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(); 163 | final ObjectOutputStream oos = new ObjectOutputStream(baos)) { 164 | oos.writeObject(new ProxiedMsg(actorID, m)); 165 | oos.flush(); 166 | baos.flush(); 167 | producer.send(new ProducerRecord<>(topic, null, baos.toByteArray())); 168 | producer.flush(); 169 | } catch (final IOException e) { 170 | e.printStackTrace(); 171 | throw new RuntimeException(e); 172 | } 173 | } 174 | } 175 | } 176 | 177 | public final class ConsumerActor extends BasicActor { 178 | @Override 179 | protected Void doRun() throws InterruptedException, SuspendExecution { 180 | //noinspection InfiniteLoopStatement 181 | for (;;) { 182 | // Try extracting from queue 183 | final Object msg = tryReceive((Object m) -> { 184 | if (EXIT.equals(m)) 185 | return EXIT; 186 | if (m != null) { 187 | //noinspection unchecked 188 | final ProxiedMsg rmsg = (ProxiedMsg) m; 189 | final List l = subscribers.get(rmsg.actorID); 190 | if (l != null) { 191 | boolean sent = false; 192 | for (final ActorRef r : l) { 193 | //noinspection unchecked 194 | r.send(rmsg.payload); 195 | sent = true; 196 | } 197 | if (sent) // Someone was listening, remove from queue 198 | return m; 199 | } 200 | } 201 | return null; // No subscribers (leave in queue) or no messages 202 | }); 203 | // Something from queue 204 | if (msg != null) { 205 | if (EXIT.equals(msg)) { 206 | return null; 207 | } 208 | continue; // Go to next cycle -> precedence to queue 209 | } 210 | 211 | // Try receiving 212 | //noinspection Convert2Lambda 213 | final ConsumerRecords records = call(es, () -> 214 | consumer.get().poll(100L) 215 | ); 216 | for (final ConsumerRecord record : records) { 217 | final byte[] v = record.value(); 218 | try (final ByteArrayInputStream bis = new ByteArrayInputStream(v); 219 | final ObjectInputStream ois = new ObjectInputStream(bis)) { 220 | 221 | //noinspection unchecked 222 | final ProxiedMsg rmsg = (ProxiedMsg) ois.readObject(); 223 | final List l = subscribers.get(rmsg.actorID); 224 | if (l != null && l.size() > 0) { 225 | for (final ActorRef r : l) { 226 | //noinspection unchecked 227 | r.send(rmsg.payload); 228 | } 229 | } else { 230 | ref().send(rmsg); // Enqueue 231 | } 232 | } catch (final IOException | ClassNotFoundException e) { 233 | e.printStackTrace(); 234 | throw new RuntimeException(e); 235 | } 236 | } 237 | } 238 | } 239 | } 240 | 241 | private static final Object EXIT = new Object(); 242 | 243 | private final static Properties baseProducerConfig; 244 | static { 245 | baseProducerConfig = new Properties(); 246 | baseProducerConfig.put("acks", "all"); 247 | baseProducerConfig.put("retries", 10); 248 | baseProducerConfig.put("batch.size", 1); 249 | baseProducerConfig.put("linger.ms", 1); 250 | } 251 | private final static Properties baseConsumerConfig; 252 | static { 253 | baseConsumerConfig = new Properties(); 254 | baseConsumerConfig.put("enable.auto.commit", "true"); 255 | baseConsumerConfig.put("auto.commit.interval.ms", "10"); 256 | baseConsumerConfig.put("session.timeout.ms", "30000"); 257 | } 258 | 259 | private final Map> subscribers = new ConcurrentHashMap<>(); 260 | private final List tos = new ArrayList<>(); 261 | private final List consumers = new ArrayList<>(); 262 | private final AtomicBoolean closed = new AtomicBoolean(false); 263 | 264 | private final String topic; 265 | private final FiberKafkaProducer producer; 266 | 267 | private ThreadLocal> consumer; 268 | private ExecutorService es; 269 | 270 | private static final class ProxiedMsg implements Serializable { 271 | private static final long serialVersionUID = 0L; 272 | 273 | public String actorID; 274 | public Object payload; 275 | 276 | @SuppressWarnings("unused") 277 | public ProxiedMsg() {} // For serialization 278 | public ProxiedMsg(String actorID, Object payload) { 279 | this.actorID = actorID; 280 | this.payload = payload; 281 | } 282 | } 283 | 284 | private void ensureOpen() { 285 | if (closed.get()) 286 | throw new IllegalStateException("closed"); 287 | } 288 | 289 | private FiberKafkaProducer buildProducer(String bootstrap, Properties kafkaProducerPropertiesOverride) { 290 | final Properties p = new Properties(baseProducerConfig); 291 | if (kafkaProducerPropertiesOverride != null) 292 | p.putAll(kafkaProducerPropertiesOverride); 293 | if (bootstrap != null) 294 | p.put("bootstrap.servers", bootstrap); 295 | p.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); 296 | p.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer"); 297 | return new FiberKafkaProducer<>(new KafkaProducer<>(p)); 298 | } 299 | 300 | private void startConsumers(String bootstrap, Properties kafkaConsumerPropertiesOverride) { 301 | final Properties p = new Properties(baseConsumerConfig); 302 | if (kafkaConsumerPropertiesOverride != null) 303 | p.putAll(kafkaConsumerPropertiesOverride); 304 | if (bootstrap != null) 305 | p.put("bootstrap.servers", bootstrap); 306 | p.put("group.id", "group"); 307 | p.put("key.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer"); 308 | p.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer"); 309 | 310 | final KafkaConsumer tmp = new KafkaConsumer<>(p); 311 | tmp.subscribe(Collections.singletonList(topic)); 312 | int partitions = tmp.partitionsFor(topic).size(); 313 | tmp.close(); 314 | 315 | es = Executors.newFixedThreadPool(partitions); 316 | consumer = new ThreadLocal>() { 317 | @Override 318 | protected KafkaConsumer initialValue() { 319 | final KafkaConsumer r = new KafkaConsumer<>(p); 320 | r.subscribe(Collections.singletonList(topic)); 321 | return r; 322 | } 323 | }; 324 | 325 | for (int i = 0 ; i < partitions ; i++) { 326 | try { 327 | consumers.add ( 328 | Actor.newActor ( 329 | new ActorSpec<> ( 330 | ConsumerActor.class.getConstructor(KafkaProxies.class), 331 | new Object[] { this } 332 | ) 333 | ).spawn() 334 | ); 335 | } catch (final NoSuchMethodException e) { 336 | throw new AssertionError(e); 337 | } 338 | } 339 | } 340 | } 341 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/proxied/kafka/Main.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.proxied.kafka; 2 | 3 | import co.paralleluniverse.actors.Actor; 4 | import co.paralleluniverse.actors.integration.examples.proxied.ConsumerActor; 5 | import co.paralleluniverse.actors.integration.examples.proxied.ProducerActor; 6 | 7 | import java.util.concurrent.ExecutionException; 8 | 9 | public final class Main { 10 | public static void main(String[] args) throws ExecutionException, InterruptedException { 11 | try (final KafkaProxies proxies = new KafkaProxies("localhost:9092", "kafkaproxied")) { 12 | final ProducerActor pa = Actor.newActor(ProducerActor.class, proxies.create("consumer")); 13 | final ConsumerActor ca = Actor.newActor(ConsumerActor.class); 14 | pa.spawn(); 15 | System.err.println("USER PRODUCER started"); 16 | proxies.subscribe(ca.spawn(), "consumer"); 17 | System.err.println("USER CONSUMER started"); 18 | pa.join(); 19 | System.err.println("USER PRODUCER finished"); 20 | ca.join(); 21 | System.err.println("USER CONSUMER finished"); 22 | } catch (final Exception e) { 23 | e.printStackTrace(); 24 | } 25 | } 26 | 27 | private Main() {} 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/proxied/zeromq/Main.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.proxied.zeromq; 2 | 3 | import co.paralleluniverse.actors.Actor; 4 | import co.paralleluniverse.actors.integration.examples.proxied.ConsumerActor; 5 | import co.paralleluniverse.actors.integration.examples.proxied.ProducerActor; 6 | 7 | import java.util.concurrent.ExecutionException; 8 | 9 | public final class Main { 10 | public static void main(String[] args) throws ExecutionException, InterruptedException { 11 | try (final ZeroMQProxies proxies = new ZeroMQProxies(1)) { 12 | final ProducerActor pa = Actor.newActor(ProducerActor.class, proxies.to("tcp://localhost:8000")); 13 | final ConsumerActor ca = Actor.newActor(ConsumerActor.class); 14 | pa.spawn(); 15 | System.err.println("USER PRODUCER started"); 16 | proxies.subscribe(ca.spawn(), "tcp://*:8000"); 17 | System.err.println("USER CONSUMER started"); 18 | pa.join(); 19 | System.err.println("USER PRODUCER finished"); 20 | ca.join(); 21 | System.err.println("USER CONSUMER finished"); 22 | } catch (final Exception e) { 23 | e.printStackTrace(); 24 | } 25 | } 26 | 27 | private Main() {} 28 | } 29 | -------------------------------------------------------------------------------- /src/main/java/co/paralleluniverse/actors/integration/examples/proxied/zeromq/ZeroMQProxies.java: -------------------------------------------------------------------------------- 1 | package co.paralleluniverse.actors.integration.examples.proxied.zeromq; 2 | 3 | import co.paralleluniverse.actors.*; 4 | import co.paralleluniverse.actors.integration.examples.Util; 5 | import co.paralleluniverse.fibers.FiberUtil; 6 | import co.paralleluniverse.fibers.SuspendExecution; 7 | import org.zeromq.ZMQ; 8 | 9 | import java.io.*; 10 | import java.util.*; 11 | import java.util.concurrent.*; 12 | import java.util.concurrent.atomic.AtomicBoolean; 13 | 14 | @SuppressWarnings("WeakerAccess") 15 | public final class ZeroMQProxies implements AutoCloseable { 16 | private final ZMQ.Context zmq; 17 | 18 | /** 19 | * Creates a new ZeroMQ proxy actors factory. 20 | * 21 | * @param ioThreads The number of IO threads to be used. 22 | */ 23 | public ZeroMQProxies(int ioThreads) { 24 | zmq = ZMQ.context(ioThreads); 25 | } 26 | 27 | /** 28 | * Creates and returns a new ZeroMQ proxy actor. 29 | * 30 | * @param trgtZMQAddress The ZeroMQ address of the target actor. 31 | */ 32 | public final ActorRef to(String trgtZMQAddress) { 33 | if (trgtZMQAddress == null) 34 | throw new IllegalArgumentException("`trgtZMQAddress` must be non-null"); 35 | 36 | ensureOpen(); 37 | 38 | //noinspection UnnecessaryLocalVariable 39 | final ActorRef a = producerProxies 40 | .computeIfAbsent(trgtZMQAddress, (k) -> { 41 | try { 42 | //noinspection unchecked 43 | return Actor.newActor ( 44 | new ActorSpec<> ( 45 | ProducerActor.class.getConstructor(ZeroMQProxies.class, String.class), 46 | new Object[] { this, trgtZMQAddress } 47 | ) 48 | ).spawn(); 49 | } catch (final NoSuchMethodException e) { 50 | throw new AssertionError(e); 51 | } 52 | }); 53 | 54 | //noinspection unchecked 55 | return a; 56 | } 57 | 58 | /** 59 | * Releases a ZeroMQ proxy actor. 60 | * 61 | * @param trgtZMQAddress The ZeroMQ address of the target actor. 62 | */ 63 | @SuppressWarnings("unused") 64 | public final void drop(String trgtZMQAddress) throws ExecutionException, InterruptedException { 65 | ensureOpen(); 66 | 67 | //noinspection unchecked 68 | FiberUtil.runInFiber(() -> producerProxies.get(trgtZMQAddress).send(EXIT)); 69 | producerProxies.remove(trgtZMQAddress); 70 | } 71 | 72 | /** 73 | * Subscribes a consumer actor to a ZeroMQ endpoint. 74 | * 75 | * @param consumer The consumer actor. 76 | * @param srcZMQEndpoint The ZeroMQ endpoint. 77 | */ 78 | public final void subscribe(ActorRef consumer, String srcZMQEndpoint) { 79 | if (consumer == null || srcZMQEndpoint == null) 80 | throw new IllegalArgumentException("`consumer` and `srcZMQEndpoint` must be non-null"); 81 | 82 | ensureOpen(); 83 | 84 | producerProxies 85 | .computeIfAbsent(srcZMQEndpoint, (k) -> { 86 | try { 87 | //noinspection unchecked 88 | return Actor.newActor ( 89 | new ActorSpec<> ( 90 | ConsumerActor.class.getConstructor(ZeroMQProxies.class, String.class), 91 | new Object[] { this, srcZMQEndpoint } 92 | ) 93 | ).spawn(); 94 | } catch (final NoSuchMethodException e) { 95 | throw new AssertionError(e); 96 | } 97 | }); 98 | 99 | subscribers.computeIfAbsent(srcZMQEndpoint, (k) -> new ArrayList<>()).add(consumer); 100 | } 101 | 102 | /** 103 | * Unsubscribes a consumer actor from a ZeroMQ endpoint. 104 | * 105 | * @param consumer The consumer actor. 106 | * @param srcZMQEndpoint The ZeroMQ endpoint. 107 | */ 108 | @SuppressWarnings("unused") 109 | public final void unsubscribe(ActorRef consumer, String srcZMQEndpoint) { 110 | if (srcZMQEndpoint == null) 111 | throw new IllegalArgumentException("`srcZMQEndpoint` must be non-null"); 112 | 113 | ensureOpen(); 114 | 115 | subscribers.compute(srcZMQEndpoint, (s, actorRefs) -> { 116 | if (actorRefs != null) 117 | actorRefs.remove(consumer); 118 | return actorRefs; 119 | }); 120 | } 121 | 122 | /** 123 | * Closes all the actors and ZeroMQ objects produced by this factory. 124 | * 125 | * @throws Exception 126 | */ 127 | @Override 128 | public final void close() throws Exception { 129 | if (!closed.compareAndSet(false, true)) 130 | throw new IllegalStateException("closed"); 131 | 132 | try { 133 | FiberUtil.runInFiber(() -> { 134 | for (final ActorRef a : producerProxies.values()) 135 | //noinspection unchecked 136 | a.send(EXIT); 137 | producerProxies.clear(); 138 | for (final ActorRef a : consumerProxies.values()) 139 | //noinspection unchecked 140 | a.send(EXIT); 141 | consumerProxies.clear(); 142 | subscribers.clear(); 143 | }); 144 | } finally { 145 | e.shutdown(); 146 | } 147 | } 148 | 149 | public final class ProducerActor extends BasicActor { 150 | private final String trgtZMQAddress; 151 | 152 | public ProducerActor(String trgtZMQAddress) { 153 | this.trgtZMQAddress = trgtZMQAddress; 154 | } 155 | 156 | @Override 157 | protected final Void doRun() throws InterruptedException, SuspendExecution { 158 | try (final ZMQ.Socket trgt = zmq.socket(ZMQ.REQ)) { 159 | System.err.printf("PROXY PRODUCER: connecting to %s\n", trgtZMQAddress); 160 | Util.exec(e, () -> trgt.connect(trgtZMQAddress)); 161 | //noinspection InfiniteLoopStatement 162 | for (;;) { 163 | System.err.println("PROXY PRODUCER: receiving from the mailbox"); 164 | final Object m = receive(); 165 | if (m == null || EXIT.equals(m)) { 166 | System.err.println("PROXY PRODUCER: exiting"); 167 | return null; 168 | } else { 169 | System.err.printf("PROXY PRODUCER: forwarding %s\n", m); 170 | try (final ByteArrayOutputStream baos = new ByteArrayOutputStream(); 171 | final ObjectOutputStream oos = new ObjectOutputStream(baos)) { 172 | oos.writeObject(m); 173 | oos.flush(); 174 | baos.flush(); 175 | Util.exec(e, () -> trgt.send(baos.toByteArray(), 0)); 176 | System.err.println("PROXY PRODUCER: waiting for ACK"); 177 | Util.exec(e, trgt::recv); // ACK 178 | } catch (final IOException e) { 179 | e.printStackTrace(); 180 | throw new RuntimeException(e); 181 | } 182 | } 183 | } 184 | } 185 | } 186 | } 187 | 188 | public final class ConsumerActor extends BasicActor { 189 | private final String srcZMQEndpoint; 190 | 191 | public ConsumerActor(String srcZMQEndpoint) { 192 | this.srcZMQEndpoint = srcZMQEndpoint; 193 | } 194 | 195 | @Override 196 | protected Void doRun() throws InterruptedException, SuspendExecution { 197 | try(final ZMQ.Socket src = zmq.socket(ZMQ.REP)) { 198 | System.err.printf("PROXY CONSUMER: binding %s\n", srcZMQEndpoint); 199 | Util.exec(e, () -> src.bind(srcZMQEndpoint)); 200 | src.setReceiveTimeOut(100); 201 | //noinspection InfiniteLoopStatement 202 | for (;;) { 203 | // Try extracting from queue 204 | final Object m = tryReceive((Object o) -> { 205 | if (EXIT.equals(o)) 206 | return EXIT; 207 | if (o != null) { 208 | //noinspection unchecked 209 | final List l = subscribers.get(srcZMQEndpoint); 210 | if (l != null) { 211 | boolean sent = false; 212 | for (final ActorRef r : l) { 213 | //noinspection unchecked 214 | r.send(o); 215 | sent = true; 216 | } 217 | if (sent) // Someone was listening, remove from queue 218 | return o; 219 | } 220 | } 221 | return null; // No subscribers (leave in queue) or no messages 222 | }); 223 | // Something processable is there 224 | if (m != null) { 225 | if (EXIT.equals(m)) { 226 | return null; 227 | } 228 | continue; // Go to next cycle -> precedence to queue 229 | } 230 | 231 | System.err.println("PROXY CONSUMER: receiving"); 232 | final byte[] msg = Util.call(e, src::recv); 233 | if (msg != null) { 234 | System.err.println("PROXY CONSUMER: ACKing"); 235 | Util.exec(e, () -> src.send(ACK)); 236 | final Object o; 237 | try (final ByteArrayInputStream bis = new ByteArrayInputStream(msg); 238 | final ObjectInputStream ois = new ObjectInputStream(bis)) { 239 | o = ois.readObject(); 240 | } catch (final IOException | ClassNotFoundException e) { 241 | e.printStackTrace(); 242 | throw new RuntimeException(e); 243 | } 244 | System.err.printf("PROXY CONSUMER: distributing '%s' to %d subscribers\n", o, subscribers.size()); 245 | //noinspection unchecked 246 | for (final ActorRef s : subscribers.getOrDefault(srcZMQEndpoint, (List) Collections.EMPTY_LIST)) 247 | //noinspection unchecked 248 | s.send(o); 249 | } else { 250 | System.err.println("PROXY CONSUMER: receive timeout"); 251 | } 252 | } 253 | } 254 | } 255 | } 256 | 257 | private void ensureOpen() { 258 | if (closed.get()) 259 | throw new IllegalStateException("Already closed."); 260 | } 261 | 262 | private static final ExecutorService e = Executors.newFixedThreadPool(2); 263 | 264 | private static final String ACK = "ACK"; 265 | private static final Object EXIT = new Object(); 266 | 267 | private final Map producerProxies = new ConcurrentHashMap<>(); 268 | 269 | private final Map consumerProxies = new ConcurrentHashMap<>(); 270 | private final Map> subscribers = new ConcurrentHashMap<>(); 271 | 272 | private final AtomicBoolean closed = new AtomicBoolean(false); 273 | } 274 | -------------------------------------------------------------------------------- /src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | %d{ISO8601} %-5level [%thread] %logger{0}: %msg%n 8 | 9 | 10 | 11 | 12 | 1024 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | --------------------------------------------------------------------------------