(NATS_STREAMING_LOCALHOST_URL, "nPub", DEFAULT_SUBJECT, CLUSTER_ID, getUniqueClientName() + "_SUB", data, Integer.class);
48 | NatsStreamingPublisher np = new NatsStreamingPublisher("nPub", CLUSTER_ID, getUniqueClientName() + "_PUB", NATS_STREAMING_LOCALHOST_URL, DEFAULT_SUBJECT, data.size());
49 |
50 | ExecutorService executor = Executors.newFixedThreadPool(1);
51 | // start the subscribers apps
52 | executor.execute(ns);
53 | // wait for subscribers to be ready.
54 | ns.waitUntilReady();
55 |
56 | Thread.sleep(500);
57 | np.publishMessages();
58 |
59 | Thread.sleep(5000);
60 | ns.waitForCompletion();
61 | }
62 |
63 | static String getUniqueClientName() {
64 | return CLIENT_ID + NatsSparkUtilities.generateUniqueID();
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/to_spark/StandardNatsToKeyValueSparkConnectorImpl.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import java.util.Collection;
11 | import java.util.Properties;
12 | import java.util.function.Function;
13 |
14 | import org.apache.spark.storage.StorageLevel;
15 | import org.slf4j.Logger;
16 | import org.slf4j.LoggerFactory;
17 |
18 | import io.nats.client.Message;
19 | import io.nats.client.MessageHandler;
20 | import scala.Tuple2;
21 |
22 | /**
23 | * A NATS to Spark Connector.
24 | *
25 | * It will transfer messages received from NATS into Spark data.
26 | *
27 | * That class extends {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector}<T,R,V>.
28 | */
29 | public class StandardNatsToKeyValueSparkConnectorImpl
30 | extends OmnipotentStandardNatsToSparkConnector, Tuple2, V> {
31 |
32 | /**
33 | *
34 | */
35 | private static final long serialVersionUID = 1L;
36 |
37 | static final Logger logger = LoggerFactory.getLogger(StandardNatsToKeyValueSparkConnectorImpl.class);
38 |
39 | protected Properties enrichedProperties;
40 |
41 | protected StandardNatsToKeyValueSparkConnectorImpl(Class type, StorageLevel storageLevel, Collection subjects, Properties properties,
42 | String queue, String natsUrl, Function dataDecoder, scala.Function1 scalaDataDecoder) {
43 | super(type, storageLevel, subjects, properties, queue, natsUrl);
44 | this.dataDecoder = dataDecoder;
45 | this.scalaDataDecoder = scalaDataDecoder;
46 | }
47 |
48 | protected MessageHandler getMessageHandler() {
49 | return new MessageHandler() {
50 | @Override
51 | public void onMessage(Message m) {
52 | final Tuple2 s = decodeTuple(m);
53 |
54 | if (logger.isTraceEnabled()) {
55 | logger.trace("Received by {} on Subject '{}': {}.", StandardNatsToKeyValueSparkConnectorImpl.this, m.getSubject(), s);
56 | }
57 |
58 | store(s);
59 | }
60 | };
61 | }
62 | }
63 |
64 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/TestClient.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2012, 2016 Apcera Inc.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.spark.test;
9 |
10 | import org.slf4j.Logger;
11 | import org.slf4j.LoggerFactory;
12 |
13 | public abstract class TestClient {
14 | protected static Logger logger = LoggerFactory.getLogger(TestClient.class);
15 |
16 | Object readyLock = new Object();
17 | boolean isReady = false;
18 |
19 | protected String id = "";
20 |
21 | Object completeLock = new Object();
22 | boolean isComplete = false;
23 |
24 | protected int testCount = 0;
25 |
26 | int msgCount = 0;
27 |
28 | public int tallyMessage()
29 | {
30 | return (++msgCount);
31 | }
32 |
33 | public int getMessageCount()
34 | {
35 | return msgCount;
36 | }
37 |
38 | public TestClient(String id, int testCount)
39 | {
40 | this.id = id;
41 | this.testCount = testCount;
42 | }
43 |
44 | public void setReady()
45 | {
46 | logger.debug("Client ({}) is ready.", id);
47 | synchronized (readyLock)
48 | {
49 | if (isReady)
50 | return;
51 |
52 | isReady = true;
53 | readyLock.notifyAll();
54 | }
55 | }
56 |
57 | public void waitUntilReady()
58 | {
59 | synchronized (readyLock)
60 | {
61 | while (!isReady) {
62 | try {
63 | readyLock.wait();
64 | } catch (InterruptedException e) {
65 | e.printStackTrace();
66 | }
67 | }
68 | }
69 | logger.debug("Done waiting for Client ({}) to be ready.", id);
70 | }
71 |
72 | public void setComplete()
73 | {
74 | logger.debug("Client ({}) has completed.", id);
75 |
76 | synchronized(completeLock)
77 | {
78 | if (isComplete)
79 | return;
80 |
81 | isComplete = true;
82 | completeLock.notifyAll();
83 | }
84 | }
85 |
86 | public void waitForCompletion()
87 | {
88 | synchronized (completeLock)
89 | {
90 | while (!isComplete)
91 | {
92 | try {
93 | completeLock.wait();
94 | } catch (InterruptedException e) {
95 | e.printStackTrace();
96 | }
97 | }
98 | }
99 | logger.debug("Done waiting for Client ({}) to complete.", id);
100 | }
101 |
102 | }
103 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/SparkToNatsValidator.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.spark.test;
9 |
10 | import java.io.Serializable;
11 |
12 | import org.apache.spark.api.java.JavaRDD;
13 | import org.apache.spark.api.java.function.Function;
14 | import org.apache.spark.api.java.function.PairFunction;
15 | import org.apache.spark.streaming.api.java.JavaDStream;
16 | import org.apache.spark.streaming.api.java.JavaPairDStream;
17 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
18 |
19 | import scala.Tuple2;
20 |
21 | public class SparkToNatsValidator implements Serializable {
22 |
23 | /**
24 | *
25 | */
26 | private static final long serialVersionUID = 1L;
27 |
28 | public static java.util.function.Function getBytes = (java.util.function.Function & Serializable) str -> str.getBytes();
29 |
30 |
31 | /**
32 | * @return
33 | */
34 | public static JavaDStream generateIntegers(JavaDStream lines) {
35 | return lines.map(str -> Integer.parseInt(str));
36 | }
37 |
38 | /**
39 | * @param subject1
40 | * @param rdd
41 | * @return
42 | */
43 | public static JavaRDD> newSubjectStringTuple(String subject1, JavaRDD rdd) {
44 | return rdd.map((Function>) str -> {
45 | return new Tuple2(subject1, str);
46 | });
47 | }
48 |
49 | /**
50 | * @param subject1
51 | * @param rdd
52 | * @return
53 | */
54 | public static JavaRDD> newSubjectDotStringTuple(String subject1, JavaRDD rdd) {
55 | return rdd.map((Function>) str -> {
56 | return new Tuple2(subject1 + "." + str, str);
57 | });
58 | }
59 |
60 | public static JavaPairDStream getJavaPairDStream(final JavaDStream lines, final JavaStreamingContext ssc, final String subject1) {
61 | //- final JavaDStream lines = ssc.textFileStream(tempDir.getAbsolutePath());
62 | return lines.mapToPair((PairFunction) str -> {
63 | return new Tuple2(subject1 + "." + str, str);
64 | });
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/spark/to_nats/AbstractSparkToNatsStreamingConnectorPool.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import io.nats.streaming.Options;
11 |
12 | public abstract class AbstractSparkToNatsStreamingConnectorPool extends SparkToNatsConnectorPool {
13 |
14 | /**
15 | *
16 | */
17 | private static final long serialVersionUID = 1L;
18 | protected String clusterID;
19 | // TODO No more static, needs to be checked on a cluster
20 | protected Options.Builder optionsBuilder;
21 |
22 | /**
23 | *
24 | */
25 | protected AbstractSparkToNatsStreamingConnectorPool(String clusterID) {
26 | super();
27 | this.clusterID = clusterID;
28 | }
29 |
30 | /**
31 | * @return
32 | * @throws Exception
33 | */
34 | @Override
35 | public SparkToNatsStreamingConnectorImpl newSparkToNatsConnector() throws Exception {
36 | return new SparkToNatsStreamingConnectorImpl( clusterID,
37 | getNatsURL(),
38 | getProperties(),
39 | getConnectionTimeout(),
40 | getOptionsBuilder(),
41 | getDefinedSubjects(),
42 | isStoredAsKeyValue());
43 | }
44 |
45 | /**
46 | * @return the optionsBuilder
47 | */
48 | protected Options.Builder getOptionsBuilder() {
49 | return optionsBuilder;
50 | }
51 |
52 | /**
53 | * @param optionsBuilder the optionsBuilder to set
54 | */
55 | protected void setOptionsBuilder(Options.Builder optionsBuilder) {
56 | this.optionsBuilder = optionsBuilder;
57 | }
58 |
59 | @Override
60 | protected int computeConnectionSignature() {
61 | return sparkToNatsStreamingConnectionSignature(natsURL, properties, subjects, connectionTimeout, clusterID);
62 | }
63 |
64 | /* (non-Javadoc)
65 | * @see java.lang.Object#toString()
66 | */
67 | @Override
68 | public String toString() {
69 | return "SparkToNatsStreamingConnectorPool ["
70 | + (optionsBuilder != null ? "optionsBuilder=" + optionsBuilder + ", " : "")
71 | + (properties != null ? "properties=" + properties + ", " : "")
72 | + (subjects != null ? "subjects=" + subjects + ", " : "")
73 | + (natsURL != null ? "natsURL=" + natsURL + ", " : "")
74 | + ("connectorsPoolMap=" + connectorsPoolMap) + "]";
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/SparkToNatsConnectorTest.java:
--------------------------------------------------------------------------------
1 | package com.logimethods.connector.spark.to_nats;
2 |
3 | import static com.logimethods.connector.spark.to_nats.SparkToNatsConnector.combineSubjects;
4 | import static org.junit.Assert.assertEquals;
5 | import static org.junit.Assert.assertTrue;
6 |
7 | import java.io.IOException;
8 | import java.util.Arrays;
9 | import java.util.Collection;
10 | import java.util.Properties;
11 |
12 | import org.apache.commons.lang3.SerializationUtils;
13 | import org.junit.Test;
14 |
15 | public class SparkToNatsConnectorTest {
16 |
17 | private static final String natsURL = "nats://123.123.123.123:4444";
18 | private static final Properties properties = new Properties();
19 | private static final Collection subjects = Arrays.asList("Hello", "World!");
20 | private static final boolean isStoredAsKeyValue = true;
21 |
22 | @Test
23 | public void testCombineSubjectsNoSubstitution() {
24 | final String subA = "subA";
25 | final String subB = "subB";
26 |
27 | assertEquals(subA, combineSubjects("", subA));
28 | assertEquals(subA+subB, combineSubjects(subA,subB));
29 | }
30 |
31 | @Test
32 | public void testCombineSubjectsWithSubstitution() {
33 | assertEquals("A.C", combineSubjects("*. =>A.", "B.C"));
34 | assertEquals("A.C.D", combineSubjects("*. => A.", "B.C.D"));
35 | assertEquals("B.C.D", combineSubjects("X.=>A.", "B.C.D"));
36 | assertEquals("A.D", combineSubjects("*.*.=>A.", "B.C.D"));
37 | assertEquals("A.B.D", combineSubjects("*.C=>A.B", "B.C.D"));
38 | assertTrue(SparkToNatsConnector.subjectPatternMap.toString(), SparkToNatsConnector.subjectPatternMap.containsKey("*.C=>A.B"));
39 | assertEquals("B.C.D", combineSubjects("*.X.*=>A.B", "B.C.D"));
40 | assertEquals("A.b.C.D", combineSubjects("B=>b", "A.B.C.D"));
41 | assertEquals("A.B.C.D", combineSubjects("^B=>b", "A.B.C.D"));
42 | assertEquals("A.b.B.D", combineSubjects("B=>b", "A.B.B.D"));
43 | }
44 |
45 | @Test
46 | // @See https://github.com/Logimethods/nats-connector-spark/pull/3
47 | // @See https://github.com/nats-io/java-nats-streaming/issues/51
48 | public void testSparkToStandardNatsConnectorImpl_Serialization() throws IOException, ClassNotFoundException {
49 | Long duration = 111l;
50 | SparkToStandardNatsConnectorImpl source =
51 | new SparkToStandardNatsConnectorImpl(natsURL, properties, duration, subjects, isStoredAsKeyValue);
52 |
53 | SparkToStandardNatsConnectorImpl target = SerializationUtils.clone(source);
54 |
55 | assertEquals(source.getNatsURL(), target.getNatsURL());
56 | assertEquals(source.getProperties(), target.getProperties());
57 | assertEquals(source.getSubjects(), target.getSubjects());
58 | assertEquals(duration, target.connectionTimeout);
59 | assertEquals(isStoredAsKeyValue, target.isStoredAsKeyValue());
60 | }
61 |
62 | }
63 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/to_spark/NatsStreamingToKeyValueSparkConnectorImpl.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import java.util.Collection;
11 | import java.util.Properties;
12 | import java.util.function.Function;
13 |
14 | import org.apache.spark.storage.StorageLevel;
15 | import org.slf4j.Logger;
16 | import org.slf4j.LoggerFactory;
17 |
18 | import io.nats.streaming.Message;
19 | import io.nats.streaming.MessageHandler;
20 | import io.nats.streaming.SubscriptionOptions;
21 | import scala.Tuple2;
22 |
23 | /**
24 | * A NATS Streaming to a Key/Value Spark Stream Connector.
25 | *
26 | * It will transfer messages received from NATS into Spark data.
27 | *
28 | * That class extends {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector}<T,R,V>.
29 | */
30 | public class NatsStreamingToKeyValueSparkConnectorImpl
31 | extends OmnipotentNatsStreamingToSparkConnector, Tuple2, V> {
32 |
33 | private static final long serialVersionUID = 1L;
34 |
35 | protected static final Logger logger = LoggerFactory.getLogger(NatsStreamingToKeyValueSparkConnectorImpl.class);
36 |
37 | protected NatsStreamingToKeyValueSparkConnectorImpl(Class type, StorageLevel storageLevel, Collection subjects,
38 | Properties properties, String queue, String natsUrl, String clusterID, String clientID,
39 | SubscriptionOptions subscriptionOpts, SubscriptionOptions.Builder subscriptionOptsBuilder, Function dataDecoder, scala.Function1 scalaDataDecoder) {
40 | super(type, storageLevel, clusterID, clientID);
41 | this.subjects = subjects;
42 | this.properties = properties;
43 | this.natsQueue = queue;
44 | this.natsUrl = natsUrl;
45 | this.subscriptionOpts = subscriptionOpts;
46 | this.subscriptionOptsBuilder = subscriptionOptsBuilder;
47 | this.dataDecoder = dataDecoder;
48 | this.scalaDataDecoder = scalaDataDecoder;
49 | }
50 |
51 | @Override
52 | protected MessageHandler getMessageHandler() {
53 | return new MessageHandler() {
54 | @Override
55 | public void onMessage(Message m) {
56 | final Tuple2 s = decodeTuple(m);
57 |
58 | if (logger.isTraceEnabled()) {
59 | logger.trace("Received by {} on Subject '{}': {}.", NatsStreamingToKeyValueSparkConnectorImpl.this,
60 | m.getSubject(), s);
61 | }
62 |
63 | store(s);
64 | }
65 | };
66 | }
67 | }
68 |
69 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/DataSourceSocket.java:
--------------------------------------------------------------------------------
1 | /**
2 | *
3 | */
4 | package com.logimethods.connector.spark.to_nats;
5 |
6 | import java.io.IOException;
7 | import java.io.OutputStreamWriter;
8 | import java.io.PrintWriter;
9 | import java.net.Socket;
10 | import java.util.List;
11 |
12 | import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
13 | import org.apache.spark.streaming.api.java.JavaDStream;
14 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
15 | import org.slf4j.Logger;
16 | import org.slf4j.LoggerFactory;
17 |
18 | /**
19 | * @author laugimethods
20 | *
21 | */
22 | public class DataSourceSocket extends DataSource {
23 |
24 | protected static Logger logger = LoggerFactory.getLogger(DataSourceSocket.class);
25 |
26 | /**
27 | *
28 | */
29 | private static final long serialVersionUID = 1L;
30 | PrintWriter writer;
31 | private String hostnameWrite, hostnameRead;
32 | private int portWrite;
33 | private int portRead;
34 | private Socket socket;
35 |
36 | /**
37 | * @param hostname
38 | * @param port
39 | */
40 | public DataSourceSocket(String hostnameWrite, int portWrite, String hostnameRead, int portRead) {
41 | super();
42 | this.hostnameWrite = hostnameWrite;
43 | this.portWrite = portWrite;
44 | this.hostnameRead = hostnameRead;
45 | this.portRead = portRead;
46 | }
47 |
48 | @Override
49 | public void write(String str) {
50 | writer.println(str);
51 | }
52 |
53 | @Override
54 | public void write(List> data) {
55 | for(Object str: data) {
56 | writer.println(str.toString());
57 | }
58 | writer.flush();
59 | }
60 |
61 | /* (non-Javadoc)
62 | * @see com.logimethods.connector.spark.to_nats.DataSource#dataStream(org.apache.spark.streaming.api.java.JavaStreamingContext)
63 | */
64 | @Override
65 | public JavaDStream dataStream(JavaStreamingContext ssc) {
66 | return ssc.socketTextStream(hostnameRead, portRead);
67 | }
68 |
69 | /* (non-Javadoc)
70 | * @see com.logimethods.connector.spark.to_nats.DataSource#setup()
71 | */
72 | @Override
73 | public void setup() throws IOException {
74 | try {
75 | logger.info("new Socket({}, {}) TENTATIVE", hostnameWrite, portWrite);
76 | socket = new Socket(hostnameWrite, portWrite);
77 | } catch (IOException e) {
78 | logger.error("new Socket({}, {}) PRODUCES {}", hostnameWrite, portWrite, e.getMessage());
79 | throw(e);
80 | }
81 | }
82 |
83 | /* (non-Javadoc)
84 | * @see com.logimethods.connector.spark.to_nats.DataSource#open()
85 | */
86 | @Override
87 | public void open() throws IOException {
88 | writer = new PrintWriter( new OutputStreamWriter(socket.getOutputStream()));
89 | }
90 |
91 | /* (non-Javadoc)
92 | * @see com.logimethods.connector.spark.to_nats.DataSource#close()
93 | */
94 | @Override
95 | public void close() throws IOException {
96 | writer.close();
97 | socket.close();
98 | }
99 |
100 | }
101 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/NatsStreamingPublisher.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2012, 2016 Apcera Inc.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.spark.test;
9 |
10 | import java.io.IOException;
11 | import java.util.concurrent.TimeoutException;
12 |
13 | import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
14 |
15 | import com.logimethods.connector.nats_spark.IncompleteException;
16 |
17 | import io.nats.streaming.NatsStreaming;
18 | import io.nats.streaming.Options;
19 | import io.nats.streaming.StreamingConnection;
20 |
21 | public class NatsStreamingPublisher extends NatsPublisher {
22 |
23 | protected final String clusterID, clientID;
24 |
25 | /**
26 | * @param id
27 | * @param clusterID
28 | * @param clientID
29 | * @param natsUrl
30 | * @param subject
31 | * @param count
32 | */
33 | public NatsStreamingPublisher(String id, String clusterID, String clientID, String natsUrl, String subject, int count) {
34 | super(id, natsUrl, subject, count);
35 | this.clusterID = clusterID;
36 | this.clientID = clientID;
37 | }
38 |
39 | @Override
40 | public void run() {
41 |
42 | try {
43 | publishMessages();
44 | }
45 | catch (Exception ex)
46 | {
47 | ex.printStackTrace();
48 | }
49 | }
50 |
51 | /**
52 | * @throws IOException
53 | * @throws InterruptedException
54 | * @throws TimeoutException
55 | * @throws IncompleteException
56 | */
57 | public void publishMessages() throws IOException, InterruptedException, TimeoutException {
58 | logger.debug("NATS Publisher ({}): Starting", id);
59 |
60 | final Options.Builder optionsBuilder = new Options.Builder();
61 | if (natsUrl != null) {
62 | optionsBuilder.natsUrl(natsUrl);
63 | }
64 | StreamingConnection c;
65 | final Options options = optionsBuilder.build();
66 | try {
67 | c = NatsStreaming.connect(clusterID, clientID, options);
68 | } catch (Exception e) {
69 | logger.error("NatsStreaming.connect({}) PRODUCES {}", clusterID, clientID, ReflectionToStringBuilder.toString(options), e.getMessage());
70 | throw(new IOException(String.format("NatsStreaming.connect(%s, %s, %s)", clusterID, clientID, ReflectionToStringBuilder.toString(options)), e));
71 | }
72 |
73 | logger.debug("A NATS Connection to '{}' has been created.", c);
74 |
75 | setReady();
76 |
77 | for (int i = 0; i < testCount; i++) {
78 | final String payload = NATS_PAYLOAD + INCR.getAndIncrement();
79 | c.publish(subject, payload.getBytes());
80 | logger.trace("Publish '{}' to '{}'.", payload, subject);
81 | tallyMessage();
82 | }
83 |
84 | logger.debug("NATS Publisher ({}): Published {} messages.", id, testCount);
85 |
86 | setComplete();
87 | }
88 |
89 | }
90 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/SparkToNatsConnectorPoolTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static org.junit.Assert.*;
11 |
12 | import java.util.Properties;
13 |
14 | import org.junit.After;
15 | import org.junit.AfterClass;
16 | import org.junit.Before;
17 | import org.junit.BeforeClass;
18 | import org.junit.Test;
19 |
20 | import com.logimethods.connector.nats_spark.IncompleteException;
21 | import com.logimethods.connector.spark.to_nats.SparkToNatsConnector;
22 | import com.logimethods.connector.spark.to_nats.SparkToNatsConnectorPool;
23 |
24 | import static com.logimethods.connector.nats_spark.Constants.*;
25 | import static io.nats.client.Options.*;
26 |
27 | public class SparkToNatsConnectorPoolTest {
28 |
29 | private static final String SUBJECTS = "SUB, SUB";
30 | private static final String URL = "nats://localhost:4333";
31 | static final String clusterID = "test-cluster"; //"my_test_cluster";
32 | protected static final String DEFAULT_SUBJECT = "spark2natsStreamingSubject";
33 |
34 | @BeforeClass
35 | public static void setUpBeforeClass() throws Exception {
36 | }
37 |
38 | @AfterClass
39 | public static void tearDownAfterClass() throws Exception {
40 | }
41 |
42 | @Before
43 | public void setUp() throws Exception {
44 | }
45 |
46 | @After
47 | public void tearDown() throws Exception {
48 | }
49 |
50 | @Test(expected=IncompleteException.class)
51 | public void testEmptySetProperties() throws Exception {
52 | final Properties properties = new Properties();
53 | SparkToNatsConnectorPool.newPool().withProperties(properties).getConnector();
54 | }
55 |
56 | @Test
57 | public void testSetProperties() throws Exception {
58 | final Properties properties = new Properties();
59 | properties.setProperty(PROP_SUBJECTS, SUBJECTS);
60 | properties.setProperty(PROP_URL, URL);
61 | final SparkToNatsConnector> connector = SparkToNatsConnectorPool.newPool().withProperties(properties).getConnector();
62 |
63 | assertEquals(2, connector.getSubjects().size());
64 | assertEquals("SUB", connector.getSubjects().toArray()[0]);
65 |
66 | assertEquals(URL, connector.getNatsURL());
67 | }
68 |
69 | @Test()
70 | public void testStreamingSparkToNatsWithFilledPropertiesPublish() throws Exception {
71 | final Properties properties = new Properties();
72 | properties.setProperty(PROP_SUBJECTS, "sub1,"+DEFAULT_SUBJECT+" , sub2");
73 | final SparkToNatsConnectorPool> connectorPool = SparkToNatsConnectorPool.newStreamingPool(clusterID).withProperties(properties);
74 | final SparkToNatsConnector> connector = connectorPool.getConnector();
75 | assertEquals(3, connector.getSubjects().size());
76 | }
77 |
78 | }
79 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/NATSServer.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Apcera Inc.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.spark.test;
9 |
10 | import java.io.File;
11 | import java.io.IOException;
12 | import java.util.ArrayList;
13 | import java.util.Arrays;
14 | import java.util.List;
15 |
16 | class NATSServer implements AutoCloseable
17 | {
18 | final static String GNATSD = "gnatsd";
19 |
20 | // Enable this for additional server debugging info.
21 | boolean debug = false;
22 |
23 | ProcessBuilder pb;
24 | Process p;
25 | ProcessStartInfo psInfo;
26 |
27 | class ProcessStartInfo {
28 | List arguments = new ArrayList();
29 |
30 | public ProcessStartInfo(String command) {
31 | this.arguments.add(command);
32 | }
33 |
34 | public void addArgument(String arg)
35 | {
36 | this.arguments.addAll(Arrays.asList(arg.split("\\s+")));
37 | }
38 |
39 | String[] getArgsAsArray() {
40 | return arguments.toArray(new String[arguments.size()]);
41 | }
42 |
43 | String getArgsAsString() {
44 | String stringVal = new String();
45 | for (String s : arguments)
46 | stringVal = stringVal.concat(s+" ");
47 | return stringVal.trim();
48 | }
49 |
50 | public String toString() {
51 | return getArgsAsString();
52 | }
53 | }
54 |
55 | /* public NATSServer()
56 | {
57 | this(-1);
58 | }*/
59 |
60 | public NATSServer(int port)
61 | {
62 | psInfo = this.createProcessStartInfo();
63 |
64 | if (port > 1023) {
65 | psInfo.addArgument("-p " + String.valueOf(port));
66 | }
67 | // psInfo.addArgument("-m 8222");
68 |
69 | start();
70 | }
71 |
72 | private String buildConfigFileName(String configFile)
73 | {
74 | return configFile;
75 | }
76 |
77 | public NATSServer(String configFile)
78 | {
79 | psInfo = this.createProcessStartInfo();
80 | psInfo.addArgument("-config " + buildConfigFileName(configFile));
81 | start();
82 | }
83 |
84 | private ProcessStartInfo createProcessStartInfo()
85 | {
86 | psInfo = new ProcessStartInfo(GNATSD);
87 |
88 | if (debug)
89 | {
90 | psInfo.addArgument("-DV");
91 | // psInfo.addArgument("-l gnatsd.log");
92 | }
93 |
94 | return psInfo;
95 | }
96 |
97 | public void start()
98 | {
99 | try {
100 | pb = new ProcessBuilder(psInfo.arguments);
101 | //pb.directory(new File("src/test"));
102 | if (debug)
103 | pb.inheritIO();
104 | else {
105 | pb.redirectError(new File("/dev/null"));
106 | pb.redirectOutput(new File("/dev/null"));
107 | }
108 | p = pb.start();
109 | if (debug)
110 | System.out.println("Started [" + psInfo + "]");
111 | } catch (IOException e) {
112 | // e.printStackTrace();
113 | }
114 | }
115 |
116 | public void shutdown()
117 | {
118 | if (p == null)
119 | return;
120 |
121 | p.destroy();
122 | if (debug)
123 | System.out.println("Stopped [" + psInfo + "]");
124 |
125 | p = null;
126 | }
127 |
128 | @Override
129 | public void close() {
130 | this.shutdown();
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/AbstractSparkToNatsConnectorTest.java:
--------------------------------------------------------------------------------
1 | package com.logimethods.connector.spark.to_nats;
2 |
3 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
4 |
5 | import java.io.File;
6 | import java.io.FileNotFoundException;
7 | import java.io.IOException;
8 | import java.io.PrintWriter;
9 | import java.io.Serializable;
10 | import java.io.UnsupportedEncodingException;
11 | import java.util.List;
12 |
13 | import org.apache.spark.SparkConf;
14 | import org.apache.spark.streaming.Durations;
15 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
16 | import org.junit.After;
17 | import org.junit.AfterClass;
18 | import org.junit.Before;
19 | import org.slf4j.Logger;
20 |
21 | import com.google.common.io.Files;
22 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
23 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
24 |
25 | public class AbstractSparkToNatsConnectorTest implements Serializable {
26 |
27 | /**
28 | *
29 | */
30 | private static final long serialVersionUID = 1L;
31 |
32 | protected static final String DEFAULT_SUBJECT = "spark2natsSubject";
33 | protected static JavaStreamingContext ssc;
34 | protected static Logger logger = null;
35 | // protected File tempDir;
36 | protected static DataSource dataSource;
37 | static {
38 | switch (UnitTestUtilities.getProperty("data_source", "file")) {
39 | case "file": dataSource = new DataSourceFile() ;
40 | break;
41 | case "socket": dataSource =
42 | new DataSourceSocket(UnitTestUtilities.getProperty("socket_hostname_write", "localhost"),
43 | UnitTestUtilities.getIntProperty("socket_port_write", 9998),
44 | UnitTestUtilities.getProperty("socket_hostname_read", "localhost"),
45 | UnitTestUtilities.getIntProperty("socket_port_read", 9999)) ;
46 | break;
47 | }
48 | }
49 | protected int fileTmpIncr = 0;
50 |
51 | /**
52 | * @throws java.lang.Exception
53 | */
54 | @AfterClass
55 | public static void tearDownAfterClass() throws Exception {
56 | UnitTestUtilities.stopDefaultServer();
57 | }
58 |
59 | /**
60 | * @throws java.lang.Exception
61 | */
62 | @Before
63 | public void setUp() throws Exception {
64 | // To avoid "Only one StreamingContext may be started in this JVM. Currently running StreamingContext was started at .../..."
65 | Thread.sleep(500);
66 |
67 | // Create a local StreamingContext with two working thread and batch interval of 1 second
68 | SparkConf conf =
69 | UnitTestUtilities.newSparkConf()
70 | .setAppName("AbstractSparkToNatsConnector");
71 | ssc = new JavaStreamingContext(conf, Durations.seconds(1));
72 |
73 | // tempDir = Files.createTempDir();
74 | // tempDir.deleteOnExit();
75 | dataSource.setup();
76 | }
77 |
78 | /**
79 | * @throws java.lang.Exception
80 | */
81 | @After
82 | public void tearDown() throws Exception {
83 | if (ssc != null) {
84 | ssc.stop();
85 | ssc = null;
86 | }
87 | }
88 |
89 | protected void writeFullData(final List data) throws IOException {
90 | /* final File tmpFile = new File(tempDir.getAbsolutePath(), "tmp" + fileTmpIncr++ +".txt");
91 | final PrintWriter writer = new PrintWriter(tmpFile, "UTF-8");
92 | for(Integer str: data) {
93 | writer.println(str);
94 | }
95 | writer.close();*/
96 | dataSource.open();
97 | dataSource.write(data);
98 | dataSource.close();
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/StandardNatsToSparkWithAttributesTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
11 | import static io.nats.client.Options.PROP_URL;
12 | import static org.junit.Assert.assertEquals;
13 | import static org.junit.Assert.assertTrue;
14 |
15 | import java.io.IOException;
16 | import java.util.Properties;
17 |
18 | import org.apache.spark.storage.StorageLevel;
19 | import org.junit.Test;
20 |
21 | import com.logimethods.connector.nats_spark.IncompleteException;
22 |
23 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
24 |
25 | public class StandardNatsToSparkWithAttributesTest {
26 | protected final static String CLUSTER_ID = "CLUSTER_ID";
27 | protected final static String DURABLE_NAME = "$DURABLE_NAME";
28 | protected final static Properties PROPERTIES = new Properties();
29 |
30 | {
31 | PROPERTIES.setProperty(PROP_SUBJECTS, "sub1,sub3 , sub2");
32 | PROPERTIES.setProperty(PROP_URL, NATS_URL);
33 | }
34 |
35 | @Test
36 | public void testNatsStandardToSparkConnectorImpl_0() throws IncompleteException {
37 | StandardNatsToSparkConnectorImpl connector =
38 | NatsToSparkConnector
39 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
40 | .withProperties(PROPERTIES);
41 | assertTrue(connector instanceof StandardNatsToSparkConnectorImpl);
42 | assertEquals(NATS_URL, connector.getEnrichedProperties().getProperty(PROP_URL));
43 | assertEquals(3, connector.getSubjects().size());
44 | }
45 |
46 | @Test
47 | public void testNatsStandardToSparkConnectorImpl_1() throws IncompleteException {
48 | StandardNatsToSparkConnectorImpl connector =
49 | NatsToSparkConnector
50 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
51 | .withProperties(PROPERTIES)
52 | .withSubjects("SUBJECT");
53 | assertTrue(connector instanceof StandardNatsToSparkConnectorImpl);
54 | assertEquals(NATS_URL, connector.getEnrichedProperties().getProperty(PROP_URL));
55 | assertEquals(1, connector.getSubjects().size());
56 | }
57 |
58 | @Test
59 | public void testNatsStandardToSparkConnectorImpl_2() throws IncompleteException {
60 | StandardNatsToSparkConnectorImpl connector =
61 | NatsToSparkConnector
62 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
63 | .withSubjects("SUBJECT")
64 | .withProperties(PROPERTIES);
65 | assertTrue(connector instanceof StandardNatsToSparkConnectorImpl);
66 | assertEquals(NATS_URL, connector.getEnrichedProperties().getProperty(PROP_URL));
67 | assertEquals(1, connector.getSubjects().size());
68 | }
69 |
70 | /**
71 | * Test method for {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector#receiveFromNats(java.lang.String, int, java.lang.String)}.
72 | * @throws Exception
73 | */
74 | @Test(timeout=360000, expected=Exception.class)
75 | public void testNatsToSparkConnectorWITHOUTSubjects() throws Exception {
76 | NatsToSparkConnector.receiveFromNats(String.class, StorageLevel.MEMORY_ONLY()).withNatsURL(NATS_URL).receive();
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/StandardNatsToKeyValueSparkWithAttributesTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
11 | import static io.nats.client.Options.PROP_URL;
12 | import static org.junit.Assert.assertEquals;
13 | import static org.junit.Assert.assertTrue;
14 |
15 | import java.io.IOException;
16 | import java.util.Properties;
17 |
18 | import org.apache.spark.storage.StorageLevel;
19 | import org.junit.Test;
20 |
21 | import com.logimethods.connector.nats_spark.IncompleteException;
22 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
23 |
24 | public class StandardNatsToKeyValueSparkWithAttributesTest {
25 | protected final static String CLUSTER_ID = "CLUSTER_ID";
26 | protected final static String DURABLE_NAME = "$DURABLE_NAME";
27 | protected final static Properties PROPERTIES = new Properties();
28 |
29 | {
30 | PROPERTIES.setProperty(PROP_SUBJECTS, "sub1,sub3 , sub2");
31 | PROPERTIES.setProperty(PROP_URL, NATS_URL);
32 | }
33 |
34 | @Test
35 | public void testNatsStandardToSparkConnectorImpl_0() throws IncompleteException {
36 | StandardNatsToKeyValueSparkConnectorImpl connector =
37 | NatsToSparkConnector
38 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
39 | .withProperties(PROPERTIES)
40 | .storedAsKeyValue();
41 | assertTrue(connector instanceof StandardNatsToKeyValueSparkConnectorImpl);
42 | assertEquals(NATS_URL, connector.getEnrichedProperties().getProperty(PROP_URL));
43 | assertEquals(3, connector.getSubjects().size());
44 | }
45 |
46 | @Test
47 | public void testNatsStandardToSparkConnectorImpl_1() throws IncompleteException {
48 | StandardNatsToKeyValueSparkConnectorImpl connector =
49 | NatsToSparkConnector
50 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
51 | .withProperties(PROPERTIES).withSubjects("SUBJECT")
52 | .storedAsKeyValue();
53 | assertTrue(connector instanceof StandardNatsToKeyValueSparkConnectorImpl);
54 | assertEquals(NATS_URL, connector.getEnrichedProperties().getProperty(PROP_URL));
55 | assertEquals(1, connector.getSubjects().size());
56 | }
57 |
58 | @Test
59 | public void testNatsStandardToSparkConnectorImpl_2() throws IncompleteException {
60 | StandardNatsToKeyValueSparkConnectorImpl connector =
61 | NatsToSparkConnector
62 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
63 | .withSubjects("SUBJECT")
64 | .withProperties(PROPERTIES)
65 | .storedAsKeyValue();
66 | assertTrue(connector instanceof StandardNatsToKeyValueSparkConnectorImpl);
67 | assertEquals(NATS_URL, connector.getEnrichedProperties().getProperty(PROP_URL));
68 | assertEquals(1, connector.getSubjects().size());
69 | }
70 |
71 | /**
72 | * Test method for {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector#receiveFromNats(java.lang.String, int, java.lang.String)}.
73 | * @throws Exception
74 | */
75 | @Test(timeout=360000, expected=Exception.class)
76 | public void testNatsToSparkConnectorWITHOUTSubjects() throws Exception {
77 | NatsToSparkConnector.receiveFromNats(String.class, StorageLevel.MEMORY_ONLY()).withNatsURL(NATS_URL).receive();
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/to_spark/NatsStreamingToSparkConnectorImpl.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import org.apache.spark.storage.StorageLevel;
11 | import org.apache.spark.streaming.StreamingContext;
12 | import org.apache.spark.streaming.api.java.JavaPairDStream;
13 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
14 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
15 | import org.apache.spark.streaming.dstream.ReceiverInputDStream;
16 | import org.slf4j.Logger;
17 | import org.slf4j.LoggerFactory;
18 |
19 | import io.nats.streaming.Message;
20 | import io.nats.streaming.MessageHandler;
21 | import scala.Tuple2;
22 |
23 | /**
24 | * A NATS Streaming to Spark Connector.
25 | *
26 | * It will transfer messages received from NATS into Spark data.
27 | *
28 | * That class extends {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector}<T,R,V>.
29 | */
30 | public class NatsStreamingToSparkConnectorImpl extends OmnipotentNatsStreamingToSparkConnector, R, R> {
31 |
32 | /**
33 | *
34 | */
35 | private static final long serialVersionUID = 1L;
36 |
37 | static final Logger logger = LoggerFactory.getLogger(NatsStreamingToSparkConnectorImpl.class);
38 |
39 | /* Constructors with subjects provided by the environment */
40 |
41 | protected NatsStreamingToSparkConnectorImpl(Class type, StorageLevel storageLevel, String clusterID, String clientID) {
42 | super(type, storageLevel, clusterID, clientID);
43 | }
44 |
45 | /**
46 | * @param ssc, the (Java based) Spark Streaming Context
47 | * @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
48 | */
49 | public JavaReceiverInputDStream asStreamOf(JavaStreamingContext ssc) {
50 | return ssc.receiverStream(this);
51 | }
52 |
53 | /**
54 | * @param ssc, the (Scala based) Spark Streaming Context
55 | * @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
56 | */
57 | public ReceiverInputDStream asStreamOf(StreamingContext ssc) {
58 | return ssc.receiverStream(this, scala.reflect.ClassTag$.MODULE$.apply(String.class));
59 | }
60 |
61 | /**
62 | * @param ssc, the (Java based) Spark Streaming Context
63 | * @return a Spark Stream, belonging to the provided Context,
64 | * that will collect NATS Messages as Key (the NATS Subject) / Value (the NATS Payload)
65 | */
66 | public JavaPairDStream asStreamOfKeyValue(JavaStreamingContext ssc) {
67 | return ssc.receiverStream(this.storedAsKeyValue()).mapToPair(tuple -> tuple);
68 | }
69 |
70 | /**
71 | * @param ssc, the (Scala based) Spark Streaming Context
72 | * @return a Spark Stream, belonging to the provided Context,
73 | * that will collect NATS Messages as Tuples of (the NATS Subject) / (the NATS Payload)
74 | */
75 | public ReceiverInputDStream> asStreamOfKeyValue(StreamingContext ssc) {
76 | return ssc.receiverStream(this.storedAsKeyValue(), scala.reflect.ClassTag$.MODULE$.apply(Tuple2.class));
77 | }
78 |
79 | @Override
80 | protected MessageHandler getMessageHandler() {
81 | return new MessageHandler() {
82 | @Override
83 | public void onMessage(Message m) {
84 | R s = decodeData(m);
85 | if (logger.isTraceEnabled()) {
86 | logger.trace("Received by {} on Subject '{}': {}.", NatsStreamingToSparkConnectorImpl.this,
87 | m.getSubject(), s);
88 | }
89 | store(s);
90 | }
91 | };
92 | }
93 | }
94 |
95 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/NatsStreamingSubscriber.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Apcera Inc.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.spark.test;
9 |
10 | import java.io.IOException;
11 | import java.util.Collection;
12 | import java.util.LinkedList;
13 | import java.util.concurrent.TimeoutException;
14 |
15 | import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
16 |
17 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
18 |
19 | import io.nats.streaming.Message;
20 | import io.nats.streaming.MessageHandler;
21 | import io.nats.streaming.NatsStreaming;
22 | import io.nats.streaming.Options;
23 | import io.nats.streaming.StreamingConnection;
24 | import io.nats.streaming.Subscription;
25 |
26 | public class NatsStreamingSubscriber extends NatsSubscriber {
27 |
28 | private String clusterName;
29 | private String clientName;
30 | private Class type;
31 | private Collection data;
32 |
33 | /**
34 | * @param natsUrl
35 | * @param id
36 | * @param subject
37 | * @param clusterName
38 | * @param clientName
39 | * @param data
40 | * @param type
41 | */
42 | public NatsStreamingSubscriber(String natsUrl, String id, String subject, String clusterName, String clientName, Collection data, Class type) {
43 | super(natsUrl, id, subject, data.size());
44 | this.type = type;
45 | this.clusterName = clusterName;
46 | this.clientName = clientName;
47 | this.data = new LinkedList(data);
48 | }
49 |
50 | @Override
51 | public void run() {
52 | try {
53 | logger.info("NATS Subscriber ({}): Subscribing to subject: {}", id, subject); //trace
54 |
55 | final Options.Builder optionsBuilder = new Options.Builder();
56 | if (natsUrl != null) {
57 | optionsBuilder.natsUrl(natsUrl);
58 | }
59 | StreamingConnection c;
60 | final Options options = optionsBuilder.build();
61 | try {
62 | c = NatsStreaming.connect(clusterName, clientName, options);
63 | } catch (Exception e) {
64 | logger.error("NatsStreaming.connect({}, {}, {}) PRODUCES {}", clusterName, clientName, ReflectionToStringBuilder.toString(options), e.getMessage());
65 | throw(e);
66 | }
67 |
68 | // AsyncSubscription s = c.subscribeAsync(subject, this);
69 | // s.start();
70 | Subscription sub = c.subscribe(subject, new MessageHandler() {
71 | public void onMessage(Message m) {
72 | final int tallyMessage = tallyMessage();
73 | logger.info("NATS Subscriber ({}) Received {}/{} message(s).", clientName, tallyMessage, testCount);
74 |
75 | //System.out.printf("Received a message: %s\n", m.getData());
76 | final V obj = NatsSparkUtilities.decodeData(type, m.getData());
77 | logger.info("Received a message ({}) on subject: {}", obj, subject);
78 | /* if (! data.remove(obj)) {
79 | throw new RuntimeException(data.toString() + " does not contain " + obj);
80 | }*/
81 |
82 | if (tallyMessage == testCount)
83 | {
84 | logger.info("NATS Subscriber ({}) Received {} messages. Completed.", clientName, testCount);
85 | setComplete();
86 | }
87 | }
88 | });
89 |
90 | setReady();
91 |
92 | logger.info("NATS Subscriber waiting for Completion"); // debug
93 |
94 | waitForCompletion();
95 |
96 | // Unsubscribe
97 | sub.unsubscribe();
98 |
99 | // Close connection
100 | c.close();
101 |
102 | logger.info("NATS Subscriber ({}): Exiting.", id); // debug
103 | }
104 | catch (Exception ex)
105 | {
106 | ex.printStackTrace();
107 | }
108 | }
109 |
110 | public String getClusterName() {
111 | return clusterName;
112 | }
113 |
114 | public String getClientName() {
115 | return clientName;
116 | }
117 |
118 | public Collection getData() {
119 | return data;
120 | }
121 |
122 | }
123 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/api/StandardNatsToSparkConnectorTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark.api;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_URL;
11 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_LOCALHOST_URL;
12 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
13 | import static io.nats.client.Options.PROP_URL;
14 |
15 | import java.util.Properties;
16 |
17 | import org.apache.spark.storage.StorageLevel;
18 | import org.apache.spark.streaming.Duration;
19 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
20 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
21 | import org.junit.Test;
22 |
23 | import com.logimethods.connector.nats.spark.test.NatsPublisher;
24 | import com.logimethods.connector.nats.spark.test.StandardNatsPublisher;
25 | import com.logimethods.connector.nats.to_spark.AbstractNatsToSparkTest;
26 | import com.logimethods.connector.nats.to_spark.NatsToSparkConnector;
27 | import com.logimethods.connector.nats.to_spark.StandardNatsToSparkConnectorImpl;
28 |
29 | public class StandardNatsToSparkConnectorTest extends AbstractNatsToSparkTest {
30 |
31 | @Override
32 | protected NatsPublisher getNatsPublisher(final int nbOfMessages) {
33 | return new StandardNatsPublisher("np", NATS_LOCALHOST_URL, DEFAULT_SUBJECT, nbOfMessages);
34 | }
35 |
36 | @Test(timeout=360000)
37 | public void testNatsToSparkConnectorWithAdditionalPropertiesAndSubjects() throws InterruptedException {
38 |
39 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
40 |
41 | final Properties properties = new Properties();
42 | properties.setProperty(PROP_URL, NATS_URL);
43 | final JavaReceiverInputDStream messages =
44 | NatsToSparkConnector
45 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
46 | .withProperties(properties)
47 | .withSubjects(DEFAULT_SUBJECT)
48 | .asStreamOf(ssc);
49 |
50 | validateTheReceptionOfMessages(ssc, messages);
51 | }
52 |
53 | @Test(timeout=360000)
54 | public void testNatsToSparkConnectorWithAdditionalSubjects() throws InterruptedException {
55 |
56 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
57 |
58 | final JavaReceiverInputDStream messages =
59 | NatsToSparkConnector
60 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
61 | .withNatsURL(NATS_URL)
62 | .withSubjects(DEFAULT_SUBJECT)
63 | .asStreamOf(ssc);
64 |
65 | validateTheReceptionOfMessages(ssc, messages);
66 | }
67 |
68 | @Test(timeout=360000)
69 | public void testNatsToSparkConnectorWithAdditionalPropertiesAndMultipleSubjects() throws InterruptedException {
70 |
71 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
72 |
73 | final Properties properties = new Properties();
74 | final JavaReceiverInputDStream messages =
75 | NatsToSparkConnector
76 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
77 | .withNatsURL(NATS_URL)
78 | .withProperties(properties)
79 | .withSubjects(DEFAULT_SUBJECT, "EXTRA_SUBJECT")
80 | .asStreamOf(ssc);
81 |
82 | validateTheReceptionOfMessages(ssc, messages);
83 | }
84 |
85 | @Test(timeout=360000)
86 | public void testNatsToSparkConnectorWithAdditionalProperties() throws InterruptedException {
87 |
88 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
89 |
90 | final Properties properties = new Properties();
91 | properties.setProperty(PROP_SUBJECTS, "sub1,"+DEFAULT_SUBJECT+" , sub2");
92 | properties.setProperty(PROP_URL, NATS_URL);
93 | final JavaReceiverInputDStream messages =
94 | NatsToSparkConnector
95 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
96 | .withProperties(properties)
97 | .asStreamOf(ssc);
98 |
99 | validateTheReceptionOfMessages(ssc, messages);
100 | }
101 | }
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/api/StandardNatsToSparkKeyValueConnectorTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark.api;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_URL;
11 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_LOCALHOST_URL;
12 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
13 | import static io.nats.client.Options.PROP_URL;
14 |
15 | import java.io.Serializable;
16 | import java.util.Properties;
17 |
18 | import org.apache.spark.storage.StorageLevel;
19 | import org.apache.spark.streaming.Duration;
20 | import org.apache.spark.streaming.api.java.JavaPairDStream;
21 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
22 | import org.junit.Test;
23 |
24 | import com.logimethods.connector.nats.spark.test.NatsPublisher;
25 | import com.logimethods.connector.nats.spark.test.StandardNatsPublisher;
26 | import com.logimethods.connector.nats.to_spark.AbstractNatsToSparkTest;
27 | import com.logimethods.connector.nats.to_spark.NatsToSparkConnector;
28 |
29 | public class StandardNatsToSparkKeyValueConnectorTest extends AbstractNatsToSparkTest implements Serializable {
30 |
31 | @Override
32 | protected NatsPublisher getNatsPublisher(final int nbOfMessages) {
33 | return new StandardNatsPublisher("np", NATS_LOCALHOST_URL, DEFAULT_SUBJECT, nbOfMessages);
34 | }
35 |
36 | @Test(timeout=360000)
37 | public void testNatsToSparkConnectorWithAdditionalPropertiesAndSubjects() throws InterruptedException {
38 |
39 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
40 |
41 | final Properties properties = new Properties();
42 | properties.setProperty(PROP_URL, NATS_URL);
43 |
44 | final JavaPairDStream messages =
45 | NatsToSparkConnector
46 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
47 | .withProperties(properties)
48 | .withSubjects(DEFAULT_SUBJECT)
49 | .asStreamOfKeyValue(ssc);
50 |
51 | validateTheReceptionOfMessages(ssc, messages);
52 | }
53 |
54 | @Test(timeout=360000)
55 | public void testNatsToSparkConnectorWithAdditionalSubjects() throws InterruptedException {
56 |
57 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
58 |
59 | final JavaPairDStream messages =
60 | NatsToSparkConnector
61 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
62 | .withNatsURL(NATS_URL)
63 | .withSubjects(DEFAULT_SUBJECT)
64 | .asStreamOfKeyValue(ssc);
65 |
66 | validateTheReceptionOfMessages(ssc, messages);
67 | }
68 |
69 | @Test(timeout=360000)
70 | public void testNatsToSparkConnectorWithAdditionalPropertiesAndMultipleSubjects() throws InterruptedException {
71 |
72 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
73 |
74 | final Properties properties = new Properties();
75 | final JavaPairDStream messages =
76 | NatsToSparkConnector
77 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
78 | .withNatsURL(NATS_URL)
79 | .withProperties(properties)
80 | .withSubjects(DEFAULT_SUBJECT, "EXTRA_SUBJECT")
81 | .asStreamOfKeyValue(ssc);
82 |
83 | validateTheReceptionOfMessages(ssc, messages);
84 | }
85 |
86 | @Test(timeout=360000)
87 | public void testNatsToSparkConnectorWithAdditionalProperties() throws InterruptedException {
88 |
89 | JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(200));
90 |
91 | final Properties properties = new Properties();
92 | properties.setProperty(PROP_SUBJECTS, "sub1,"+DEFAULT_SUBJECT+" , sub2");
93 | properties.setProperty(PROP_URL, NATS_URL);
94 | final JavaPairDStream messages =
95 | NatsToSparkConnector
96 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
97 | .withProperties(properties)
98 | .asStreamOfKeyValue(ssc);
99 |
100 | validateTheReceptionOfMessages(ssc, messages);
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/AbstractSparkToStandardNatsConnectorLifecycleTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
11 | import static org.junit.Assert.assertEquals;
12 | import static org.junit.Assert.assertTrue;
13 |
14 | import java.time.Duration;
15 | import java.util.List;
16 | import java.util.concurrent.TimeUnit;
17 |
18 | import org.apache.log4j.Level;
19 | import org.apache.spark.streaming.api.java.JavaDStream;
20 | import org.junit.BeforeClass;
21 | import org.junit.Test;
22 | import org.slf4j.LoggerFactory;
23 |
24 | import com.logimethods.connector.nats.spark.test.StandardNatsSubscriber;
25 | import com.logimethods.connector.nats.spark.test.TestClient;
26 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
27 |
28 | //@Ignore
29 | @SuppressWarnings("serial")
30 | public abstract class AbstractSparkToStandardNatsConnectorLifecycleTest extends AbstractSparkToNatsConnectorTest {
31 |
32 | /**
33 | * @throws java.lang.Exception
34 | */
35 | @BeforeClass
36 | public static void setUpBeforeClass() throws Exception {
37 | // Enable tracing for debugging as necessary.
38 | Level level = Level.WARN;
39 | UnitTestUtilities.setLogLevel(SparkToNatsConnectorPool.class, level);
40 | UnitTestUtilities.setLogLevel(SparkToNatsConnector.class, level);
41 | UnitTestUtilities.setLogLevel(SparkToStandardNatsConnectorImpl.class, level);
42 | UnitTestUtilities.setLogLevel(AbstractSparkToStandardNatsConnectorLifecycleTest.class, level);
43 | UnitTestUtilities.setLogLevel(TestClient.class, level);
44 | UnitTestUtilities.setLogLevel("org.apache.spark", Level.WARN);
45 | UnitTestUtilities.setLogLevel("org.spark-project", Level.WARN);
46 |
47 | logger = LoggerFactory.getLogger(AbstractSparkToStandardNatsConnectorLifecycleTest.class);
48 |
49 | UnitTestUtilities.startDefaultServer();
50 | }
51 |
52 | // @Test(timeout=360000) TODO
53 | public void testStaticSparkToNatsWithConnectionLifecycle() throws Exception {
54 | final long poolSize = SparkToStandardNatsConnectorPool.poolSize();
55 |
56 | final List data = UnitTestUtilities.getData();
57 |
58 | final String subject1 = "subject1";
59 |
60 | final String subject2 = "subject2";
61 |
62 | final int partitionsNb = 3;
63 | publishToNats(subject1, subject2, partitionsNb);
64 |
65 | ssc.start();
66 |
67 | TimeUnit.SECONDS.sleep(1);
68 |
69 | final StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, subject1, NATS_LOCALHOST_URL);
70 | final StandardNatsSubscriber ns2 = UnitTestUtilities.getStandardNatsSubscriber(data, subject2, NATS_LOCALHOST_URL);
71 | writeFullData(data);
72 | // wait for the subscribers to complete.
73 | ns1.waitForCompletion();
74 | ns2.waitForCompletion();
75 |
76 | TimeUnit.MILLISECONDS.sleep(100);
77 | assertEquals("The connections Pool size should be the same as the number of Spark partitions",
78 | poolSize + partitionsNb, SparkToStandardNatsConnectorPool.poolSize());
79 |
80 | final StandardNatsSubscriber ns1p = UnitTestUtilities.getStandardNatsSubscriber(data, subject1, NATS_LOCALHOST_URL);
81 | final StandardNatsSubscriber ns2p = UnitTestUtilities.getStandardNatsSubscriber(data, subject2, NATS_LOCALHOST_URL);
82 |
83 | writeFullData(data);
84 | // wait for the subscribers to complete.
85 | ns1p.waitForCompletion();
86 | ns2p.waitForCompletion();
87 | TimeUnit.MILLISECONDS.sleep(100);
88 | assertEquals("The connections Pool size should be the same as the number of Spark partitions",
89 | poolSize + partitionsNb, SparkToStandardNatsConnectorPool.poolSize());
90 |
91 | ssc.stop();
92 | ssc = null;
93 |
94 | logger.debug("Spark Context Stopped");
95 |
96 | TimeUnit.SECONDS.sleep(5);
97 | logger.debug("After 5 sec delay");
98 |
99 | assertTrue("The poolSize() of " + SparkToStandardNatsConnectorPool.connectorsPoolMap + " should have been reverted to its original value",
100 | SparkToStandardNatsConnectorPool.poolSize() == poolSize);
101 | }
102 |
103 | protected abstract void publishToNats(final String subject1, final String subject2, final int partitionsNb);
104 | }
105 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/STANServer.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2015-2016 Apcera Inc.
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 |
9 | package com.logimethods.connector.nats.spark.test;
10 |
11 | import java.io.File;
12 | import java.io.IOException;
13 | import java.util.ArrayList;
14 | import java.util.Arrays;
15 | import java.util.List;
16 |
17 | public class STANServer implements Runnable, AutoCloseable {
18 | static final String STAN_SERVER = "nats-streaming-server";
19 | // Enable this for additional server debugging info.
20 | boolean debug = false;
21 |
22 | ProcessBuilder pb;
23 | Process proc;
24 | ProcessStartInfo psInfo;
25 |
26 | class ProcessStartInfo {
27 | List arguments = new ArrayList();
28 |
29 | public ProcessStartInfo(String command) {
30 | this.arguments.add(command);
31 | }
32 |
33 | public void addArgument(String arg) {
34 | this.arguments.addAll(Arrays.asList(arg.split("\\s+")));
35 | }
36 |
37 | String[] getArgsAsArray() {
38 | return arguments.toArray(new String[arguments.size()]);
39 | }
40 |
41 | String getArgsAsString() {
42 | String stringVal = new String();
43 | for (String s : arguments) {
44 | stringVal = stringVal.concat(s + " ");
45 | }
46 | return stringVal.trim();
47 | }
48 |
49 | public String toString() {
50 | return getArgsAsString();
51 | }
52 | }
53 |
54 | public STANServer() {
55 | this(null, -1, false);
56 | }
57 |
58 | public STANServer(String id) {
59 | this(id, -1, false);
60 | }
61 |
62 | public STANServer(boolean debug) {
63 | this(null, -1, debug);
64 | }
65 |
66 | public STANServer(int port) {
67 | this(null, port, false);
68 | }
69 |
70 | public STANServer(String id, boolean debug) {
71 | this(id, -1, debug);
72 | }
73 |
74 | public STANServer(String id, int port, boolean debug) {
75 | this.debug = debug;
76 | psInfo = this.createProcessStartInfo();
77 |
78 | if (id != null) {
79 | psInfo.addArgument("-id " + id);
80 | }
81 | if (port > 1023) {
82 | psInfo.addArgument("-p " + String.valueOf(port));
83 | }
84 | start();
85 | }
86 |
87 | // private String buildConfigFileName(String configFile) {
88 | // return new String("../src/test/resources/" + configFile);
89 | // }
90 |
91 | // public STANServer(String configFile, boolean debug)
92 | // {
93 | // this.debug = debug;
94 | // psInfo = this.createProcessStartInfo();
95 | // psInfo.addArgument("-config " + buildConfigFileName(configFile));
96 | // start();
97 | // }
98 |
99 | private ProcessStartInfo createProcessStartInfo() {
100 | psInfo = new ProcessStartInfo(STAN_SERVER);
101 |
102 | if (debug) {
103 | // TODO
104 | // psInfo.addArgument("-DV");
105 | }
106 |
107 | return psInfo;
108 | }
109 |
110 | public void start() {
111 | try {
112 | pb = new ProcessBuilder(psInfo.arguments);
113 | pb.directory(new File("target"));
114 | if (debug) {
115 | System.err.println("Inheriting IO, psInfo =" + psInfo);
116 | pb.inheritIO();
117 | } else {
118 | pb.redirectError(new File("/dev/null"));
119 | pb.redirectOutput(new File("/dev/null"));
120 | }
121 | proc = pb.start();
122 | if (debug) {
123 | System.out.println("Started [" + psInfo + "]");
124 | }
125 | } catch (IOException e) {
126 | }
127 | }
128 |
129 | public void shutdown() {
130 | if (proc == null) {
131 | return;
132 | }
133 |
134 | proc.destroy();
135 | if (debug) {
136 | System.out.println("Stopped [" + psInfo + "]");
137 | }
138 |
139 | proc = null;
140 | }
141 |
142 | @Override
143 | public void run() {
144 | // TODO Auto-generated method stub
145 |
146 | }
147 |
148 | @Override
149 | public void close() {
150 | this.shutdown();
151 | }
152 | }
153 |
154 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/NatsToSparkConnectorTest.java:
--------------------------------------------------------------------------------
1 | package com.logimethods.connector.nats.to_spark;
2 |
3 | import static org.junit.Assert.assertEquals;
4 |
5 | import java.io.ByteArrayInputStream;
6 | import java.io.ByteArrayOutputStream;
7 | import java.io.IOException;
8 | import java.io.ObjectInput;
9 | import java.io.ObjectInputStream;
10 | import java.io.ObjectOutput;
11 | import java.io.ObjectOutputStream;
12 | import java.io.Serializable;
13 | import java.util.Properties;
14 | import java.util.function.Function;
15 |
16 | import org.apache.commons.lang3.SerializationUtils;
17 | import org.apache.spark.storage.StorageLevel;
18 | import org.junit.Rule;
19 | import org.junit.Test;
20 | import org.junit.rules.ExpectedException;
21 |
22 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
23 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_URL;
24 |
25 | @SuppressWarnings("serial")
26 | public class NatsToSparkConnectorTest implements Serializable {
27 |
28 | @Rule
29 | public ExpectedException thrown= ExpectedException.none();
30 |
31 | @Test
32 | public void testExtractDataByteArray_String() {
33 | StandardNatsToSparkConnectorImpl connector =
34 | NatsToSparkConnector
35 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY());
36 |
37 | String str = "A small piece of Text!";
38 | byte[] bytes = NatsSparkUtilities.encodeData(str);
39 | assertEquals(str, connector.decodeData(bytes));
40 | }
41 |
42 | @Test
43 | public void testExtractDataByteArray_Float() {
44 | StandardNatsToSparkConnectorImpl connector =
45 | NatsToSparkConnector
46 | .receiveFromNats(Float.class, StorageLevel.MEMORY_ONLY());
47 |
48 | Float f = 1234324234.34f;
49 | byte[] bytes = NatsSparkUtilities.encodeData(f);
50 | assertEquals(f, connector.decodeData(bytes));
51 | }
52 |
53 | @Test
54 | public void testPublicExtractDataByteArray_Float() {
55 | Float f = 1234324234.34f;
56 | byte[] bytes = NatsSparkUtilities.encodeData(f);
57 | assertEquals(f, NatsSparkUtilities.decodeData(float.class, bytes));
58 | }
59 |
60 | @Test
61 | public void testExtractDataByteArray_Exception() {
62 | thrown.expect(UnsupportedOperationException.class);
63 |
64 | StandardNatsToSparkConnectorImpl connector =
65 | NatsToSparkConnector
66 | .receiveFromNats(NatsToSparkConnectorTest.class, StorageLevel.MEMORY_ONLY());
67 |
68 | byte[] bytes = "xxxx".getBytes();
69 | connector.decodeData(bytes);
70 | }
71 |
72 | @Test
73 | public void testExtractDataByteArray_DataExtractor() throws IOException {
74 | final Function dataExtractor = bytes -> {
75 | ByteArrayInputStream bis = new ByteArrayInputStream(bytes);
76 | ObjectInput in = null;
77 | Object o = null;
78 | try {
79 | try {
80 | in = new ObjectInputStream(bis);
81 | } catch (IOException e) {
82 | // TODO Auto-generated catch block
83 | e.printStackTrace();
84 | }
85 | o = in.readObject();
86 | } catch (ClassNotFoundException | IOException e) {
87 | // TODO Auto-generated catch block
88 | e.printStackTrace();
89 | } finally {
90 | try {
91 | if (in != null) {
92 | in.close();
93 | }
94 | } catch (IOException ex) {
95 | // ignore close exception
96 | }
97 | }
98 | return (Dummy) o;
99 |
100 | };
101 | StandardNatsToSparkConnectorImpl connector =
102 | NatsToSparkConnector
103 | .receiveFromNats(Dummy.class, StorageLevel.MEMORY_ONLY())
104 | .withDataDecoder(dataExtractor);
105 |
106 | Dummy dummy = new Dummy("Name");
107 | ByteArrayOutputStream bos = new ByteArrayOutputStream();
108 | ObjectOutput out = new ObjectOutputStream(bos);
109 | out.writeObject(dummy);
110 | out.flush();
111 | byte[] bytes = bos.toByteArray();
112 | bos.close();
113 |
114 | assertEquals(dummy, connector.decodeData(bytes));
115 | }
116 |
117 | @Test
118 | // @See https://github.com/Logimethods/nats-connector-spark/pull/3
119 | // @See https://github.com/nats-io/java-nats-streaming/issues/51
120 | public void testNatsToSparkConnectorImpl_Serialization() throws IOException, ClassNotFoundException {
121 | final Properties properties = new Properties();
122 | properties.setProperty(com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS, "SubjectA,SubjectB , SubjectC");
123 | StandardNatsToSparkConnectorImpl connector =
124 | NatsToSparkConnector
125 | .receiveFromNats(String.class, StorageLevel.MEMORY_ONLY())
126 | .withProperties(properties)
127 | .withNatsURL(NATS_URL)
128 | .withSubjects("DEFAULT_SUBJECT");
129 |
130 | @SuppressWarnings("unchecked")
131 | final StandardNatsToSparkConnectorImpl newConnector = (StandardNatsToSparkConnectorImpl) SerializationUtils.clone(connector);
132 |
133 | assertEquals(NATS_URL, newConnector.getNatsUrl());
134 | }
135 | }
136 |
137 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/to_spark/StandardNatsToSparkConnectorImpl.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import java.util.Properties;
11 |
12 | import org.apache.spark.storage.StorageLevel;
13 | import org.apache.spark.streaming.StreamingContext;
14 | import org.apache.spark.streaming.api.java.JavaPairDStream;
15 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
16 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
17 | import org.apache.spark.streaming.dstream.ReceiverInputDStream;
18 | import org.slf4j.Logger;
19 | import org.slf4j.LoggerFactory;
20 |
21 | import io.nats.client.Message;
22 | import io.nats.client.MessageHandler;
23 | import scala.Tuple2;
24 |
25 | /**
26 | * A NATS to Spark Connector.
27 | *
28 | * It will transfer messages received from NATS into Spark data.
29 | *
30 | * That class extends {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector}<T,R,V>.
31 | */
32 | public class StandardNatsToSparkConnectorImpl extends OmnipotentStandardNatsToSparkConnector, R, R> {
33 |
34 | /**
35 | *
36 | */
37 | private static final long serialVersionUID = 1L;
38 |
39 | static final Logger logger = LoggerFactory.getLogger(StandardNatsToSparkConnectorImpl.class);
40 |
41 | protected StandardNatsToSparkConnectorImpl(Class type, Properties properties, StorageLevel storageLevel, String... subjects) {
42 | super(type, storageLevel, subjects);
43 | logger.debug("CREATE NatsToSparkConnector {} with Properties '{}', Storage Level {} and NATS Subjects '{}'.", this, properties, storageLevel, subjects);
44 | }
45 |
46 | protected StandardNatsToSparkConnectorImpl(Class type, StorageLevel storageLevel, String... subjects) {
47 | super(type, storageLevel, subjects);
48 | logger.debug("CREATE NatsToSparkConnector {} with Storage Level {} and NATS Subjects '{}'.", this, properties, type);
49 | }
50 |
51 | protected StandardNatsToSparkConnectorImpl(Class type, Properties properties, StorageLevel storageLevel) {
52 | super(type, storageLevel);
53 | logger.debug("CREATE NatsToSparkConnector {} with Properties '{}' and Storage Level {}.", this, properties, storageLevel);
54 | }
55 |
56 | protected StandardNatsToSparkConnectorImpl(Class type, StorageLevel storageLevel) {
57 | super(type, storageLevel);
58 | logger.debug("CREATE NatsToSparkConnector {}.", this, properties, storageLevel);
59 | }
60 |
61 | /**
62 | * @param ssc, the (Java based) Spark Streaming Context
63 | * @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
64 | */
65 | public JavaReceiverInputDStream asStreamOf(JavaStreamingContext ssc) {
66 | return ssc.receiverStream(this);
67 | }
68 |
69 | /**
70 | * @param ssc, the (Scala based) Spark Streaming Context
71 | * @return a Spark Stream, belonging to the provided Context, that will collect NATS Messages
72 | */
73 | public ReceiverInputDStream asStreamOf(StreamingContext ssc) {
74 | return ssc.receiverStream(this, scala.reflect.ClassTag$.MODULE$.apply(String.class));
75 | }
76 |
77 | /**
78 | * @param ssc, the (Java based) Spark Streaming Context
79 | * @return a Spark Stream, belonging to the provided Context,
80 | * that will collect NATS Messages as Key (the NATS Subject) / Value (the NATS Payload)
81 | */
82 | public JavaPairDStream asStreamOfKeyValue(JavaStreamingContext ssc) {
83 | return ssc.receiverStream(this.storedAsKeyValue()).mapToPair(tuple -> tuple);
84 | }
85 |
86 | /**
87 | * @param ssc, the (Scala based) Spark Streaming Context
88 | * @return a Spark Stream, belonging to the provided Context,
89 | * that will collect NATS Messages as Tuples of (the NATS Subject) / (the NATS Payload)
90 | */
91 | public ReceiverInputDStream> asStreamOfKeyValue(StreamingContext ssc) {
92 | return ssc.receiverStream(this.storedAsKeyValue(), scala.reflect.ClassTag$.MODULE$.apply(Tuple2.class));
93 | }
94 |
95 | protected MessageHandler getMessageHandler() {
96 | return new MessageHandler() {
97 | @Override
98 | public void onMessage(Message m) {
99 | R s = decodeData(m);
100 | if (logger.isTraceEnabled()) {
101 | logger.trace("Received by {} on Subject '{}' sharing Queue '{}': {}.", StandardNatsToSparkConnectorImpl.this, m.getSubject(), natsQueue, s);
102 | }
103 | store(s);
104 | }
105 | };
106 | }
107 | }
108 |
109 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats_spark/NatsSparkUtilities.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats_spark;
9 |
10 | import java.nio.ByteBuffer;
11 | import java.util.ArrayList;
12 | import java.util.Collection;
13 | import java.util.List;
14 |
15 | /**
16 | * A collection of Static Methods used by the NATS / Spark Connectors.
17 | * @author Laurent Magnin
18 | */
19 | public class NatsSparkUtilities {
20 |
21 | /**
22 | * @param elements, a undefined number of String(s)
23 | * @return a list of all of those Strings
24 | */
25 | public static List transformIntoAList(String... elements) {
26 | ArrayList list = new ArrayList(elements.length);
27 | for (String element: elements){
28 | list.add(element.trim());
29 | }
30 | return list;
31 | }
32 |
33 | /**
34 | * @param obj, any type of Java Object
35 | * @return a unique ID associated with that object
36 | */
37 | public static long generateUniqueID(Object obj) {
38 | return System.identityHashCode(obj) + Thread.currentThread().getId() + java.lang.System.currentTimeMillis();
39 | }
40 |
41 | /**
42 | * @return a unique ID
43 | */
44 | public static long generateUniqueID() {
45 | return Thread.currentThread().getId() + java.lang.System.currentTimeMillis();
46 | }
47 |
48 | /**
49 | * @param str, a String representing substrings separated by ','
50 | * @return a collection of all of those substrings
51 | */
52 | public static Collection extractCollection(String str) {
53 | final String[] subjectsArray = str.split(",");
54 | return transformIntoAList(subjectsArray);
55 | }
56 |
57 | /**
58 | * @param obj, any kind of Object
59 | * @return an array of bytes encoding that object (only for the number types)
60 | * or the String representation of it (through the toString() method)
61 | * @see Class ByteBuffer
62 | */
63 | public static byte[] encodeData(Object obj) {
64 | if (obj instanceof String) {
65 | return ((String) obj).getBytes();
66 | }
67 | if (obj instanceof Double) {
68 | return ByteBuffer.allocate(Double.BYTES).putDouble((Double) obj).array();
69 | }
70 | if (obj instanceof Float) {
71 | return ByteBuffer.allocate(Float.BYTES).putFloat((Float) obj).array();
72 | }
73 | if (obj instanceof Integer) {
74 | return ByteBuffer.allocate(Integer.BYTES).putInt((Integer) obj).array();
75 | }
76 | if (obj instanceof Long) {
77 | return ByteBuffer.allocate(Long.BYTES).putLong((Long) obj).array();
78 | }
79 | if (obj instanceof Byte) {
80 | return ByteBuffer.allocate(Byte.BYTES).put((Byte) obj).array();
81 | }
82 | if (obj instanceof Character) {
83 | return ByteBuffer.allocate(Character.BYTES).putChar((Character) obj).array();
84 | }
85 | if (obj instanceof Short) {
86 | return ByteBuffer.allocate(Short.BYTES).putShort((Short) obj).array();
87 | }
88 | return obj.toString().getBytes();
89 | }
90 |
91 | /**
92 | * @param type, the class of the object to decode
93 | * @param bytes, the content that represent the object to decode
94 | * @return the extracted object
95 | * @throws UnsupportedOperationException , raised when the expected type of the object is not a Number or a String
96 | * @see Class ByteBuffer
97 | */
98 | @SuppressWarnings("unchecked")
99 | public static X decodeData(Class type, byte[] bytes) throws UnsupportedOperationException {
100 | if (type == String.class) {
101 | return (X) new String(bytes);
102 | }
103 | if ((type == Double.class) || (type == double.class)){
104 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
105 | return (X) new Double(buffer.getDouble());
106 | }
107 | if ((type == Float.class) || (type == float.class)){
108 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
109 | return (X) new Float(buffer.getFloat());
110 | }
111 | if ((type == Integer.class) || (type == int.class)){
112 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
113 | return (X) new Integer(buffer.getInt());
114 | }
115 | if ((type == Long.class) || (type == long.class)){
116 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
117 | return (X) new Long(buffer.getLong());
118 | }
119 | if ((type == Byte.class) || (type == byte.class)){
120 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
121 | return (X) new Byte(buffer.get());
122 | }
123 | if ((type == Character.class) || (type == char.class)){
124 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
125 | return (X) new Character(buffer.getChar());
126 | }
127 | if ((type == Short.class) || (type == short.class)){
128 | final ByteBuffer buffer = ByteBuffer.wrap(bytes);
129 | return (X) new Short(buffer.getShort());
130 | }
131 | throw new UnsupportedOperationException("It is not possible to extract Data of type " + type);
132 | }
133 | }
134 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/spark/test/NatsToSparkValidator.java:
--------------------------------------------------------------------------------
1 | package com.logimethods.connector.nats.spark.test;
2 |
3 | import java.io.Serializable;
4 | import java.util.concurrent.atomic.AtomicInteger;
5 |
6 | import org.apache.spark.api.java.JavaPairRDD;
7 | import org.apache.spark.api.java.JavaSparkContext;
8 | import org.apache.spark.api.java.function.PairFunction;
9 | import org.apache.spark.api.java.function.VoidFunction;
10 | import org.apache.spark.streaming.api.java.JavaDStream;
11 | import org.apache.spark.streaming.api.java.JavaPairDStream;
12 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
13 | import org.apache.spark.util.LongAccumulator;
14 | import org.slf4j.Logger;
15 | import org.slf4j.LoggerFactory;
16 |
17 | import scala.Tuple2;
18 |
19 | public class NatsToSparkValidator implements Serializable {
20 | private static final long serialVersionUID = 1L;
21 |
22 | protected static String DEFAULT_SUBJECT_ROOT = "nats2sparkSubject";
23 | protected static int DEFAULT_SUBJECT_INR = 0;
24 | protected static String DEFAULT_SUBJECT;
25 | protected static JavaSparkContext sc;
26 | public static AtomicInteger TOTAL_COUNT = new AtomicInteger();
27 | protected static final Logger logger = LoggerFactory.getLogger(NatsToSparkValidator.class);
28 | protected static Boolean rightNumber = true;
29 | protected static Boolean atLeastSomeData = false;
30 | protected static String payload = null;
31 |
32 | /* protected void validateTheReceptionOfMessages(JavaStreamingContext ssc,
33 | JavaReceiverInputDStream stream) throws InterruptedException {
34 | JavaDStream messages = stream.repartition(3);
35 |
36 | ExecutorService executor = Executors.newFixedThreadPool(6);
37 |
38 | final int nbOfMessages = 5;
39 | NatsPublisher np = getNatsPublisher(nbOfMessages);
40 |
41 | if (logger.isDebugEnabled()) {
42 | messages.print();
43 | }
44 |
45 | messages.foreachRDD(new VoidFunction>() {
46 | private static final long serialVersionUID = 1L;
47 |
48 | @Override
49 | public void call(JavaRDD rdd) throws Exception {
50 | logger.debug("RDD received: {}", rdd.collect());
51 |
52 | final long count = rdd.count();
53 | if ((count != 0) && (count != nbOfMessages)) {
54 | rightNumber = false;
55 | logger.error("The number of messages received should have been {} instead of {}.", nbOfMessages, count);
56 | }
57 |
58 | TOTAL_COUNT.getAndAdd((int) count);
59 |
60 | atLeastSomeData = atLeastSomeData || (count > 0);
61 |
62 | for (String str :rdd.collect()) {
63 | if (! str.startsWith(NatsPublisher.NATS_PAYLOAD)) {
64 | payload = str;
65 | }
66 | }
67 | }
68 | });
69 |
70 | closeTheValidation(ssc, executor, nbOfMessages, np);
71 | }
72 |
73 |
74 | protected void validateTheReceptionOfIntegerMessages(JavaStreamingContext ssc,
75 | JavaReceiverInputDStream stream) throws InterruptedException {
76 | JavaDStream messages = stream.repartition(3);
77 |
78 | ExecutorService executor = Executors.newFixedThreadPool(6);
79 |
80 | final int nbOfMessages = 5;
81 | NatsPublisher np = getNatsPublisher(nbOfMessages);
82 |
83 | // if (logger.isDebugEnabled()) {
84 | messages.print();
85 | // }
86 |
87 | messages.foreachRDD(new VoidFunction>() {
88 | private static final long serialVersionUID = 1L;
89 |
90 | @Override
91 | public void call(JavaRDD rdd) throws Exception {
92 | logger.debug("RDD received: {}", rdd.collect());
93 | System.out.println("RDD received: " + rdd.collect());
94 | final long count = rdd.count();
95 | if ((count != 0) && (count != nbOfMessages)) {
96 | rightNumber = false;
97 | logger.error("The number of messages received should have been {} instead of {}.", nbOfMessages, count);
98 | }
99 |
100 | TOTAL_COUNT.getAndAdd((int) count);
101 |
102 | atLeastSomeData = atLeastSomeData || (count > 0);
103 |
104 | for (Integer value :rdd.collect()) {
105 | if (value < NatsPublisher.NATS_PAYLOAD_INT) {
106 | payload = value.toString();
107 | }
108 | }
109 | }
110 | });
111 |
112 | closeTheValidation(ssc, executor, nbOfMessages, np);
113 | }
114 | */
115 | public static void validateTheReceptionOfIntegerMessages(final JavaDStream messages, final LongAccumulator count) {
116 | messages.count().foreachRDD(rdd -> rdd.foreach(n -> count.add(n)));
117 | }
118 |
119 | public static void validateTheReceptionOfMessages(final JavaPairDStream messages, final LongAccumulator count) {
120 | messages.count().foreachRDD(rdd -> rdd.foreach(n -> count.add(n)));
121 | }
122 |
123 | public static void validateTheReceptionOfPairMessages(final JavaPairDStream messages, final LongAccumulator accum) {
124 | // messages.print();
125 |
126 | JavaPairDStream pairs = messages.mapToPair(s -> new Tuple2(s._1, 1));
127 | JavaPairDStream counts = pairs.reduceByKey((a, b) -> a + b);
128 |
129 | // counts.print();
130 |
131 | counts.foreachRDD((VoidFunction>) pairRDD -> {
132 | pairRDD.foreach((VoidFunction>) tuple -> {
133 | logger.info("{} RECEIVED", tuple);
134 | final long count = tuple._2;
135 | accum.add(count);
136 | });
137 | });
138 | }
139 |
140 |
141 | }
142 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/SparkToNatsStreamingConnectorLifecycleTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
11 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.startStreamingServer;
12 | import static io.nats.client.Options.PROP_URL;
13 | import static org.junit.Assert.assertEquals;
14 | import static org.junit.Assert.assertTrue;
15 |
16 | import java.time.Duration;
17 | import java.util.List;
18 | import java.util.Properties;
19 | import java.util.concurrent.TimeUnit;
20 |
21 | import org.apache.log4j.Level;
22 | import org.apache.spark.streaming.api.java.JavaDStream;
23 | import org.junit.BeforeClass;
24 | import org.junit.Test;
25 | import org.slf4j.LoggerFactory;
26 |
27 | import com.logimethods.connector.nats.spark.test.NatsStreamingSubscriber;
28 | import com.logimethods.connector.nats.spark.test.SparkToNatsValidator;
29 | import com.logimethods.connector.nats.spark.test.TestClient;
30 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
31 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
32 |
33 | //@Ignore
34 | @SuppressWarnings("serial")
35 | public class SparkToNatsStreamingConnectorLifecycleTest extends AbstractSparkToNatsConnectorTest {
36 |
37 | static final String clusterID = "test-cluster"; //"my_test_cluster";
38 |
39 | /**
40 | * @throws java.lang.Exception
41 | */
42 | @BeforeClass
43 | public static void setUpBeforeClass() throws Exception {
44 | // Enable tracing for debugging as necessary.
45 | Level level = Level.WARN;
46 | UnitTestUtilities.setLogLevel(SparkToNatsConnectorPool.class, level);
47 | UnitTestUtilities.setLogLevel(SparkToNatsConnector.class, level);
48 | UnitTestUtilities.setLogLevel(SparkToNatsStreamingConnectorImpl.class, level);
49 | UnitTestUtilities.setLogLevel(SparkToNatsStreamingConnectorLifecycleTest.class, level);
50 | UnitTestUtilities.setLogLevel(TestClient.class, level);
51 | UnitTestUtilities.setLogLevel("org.apache.spark", Level.WARN);
52 | UnitTestUtilities.setLogLevel("org.spark-project", Level.WARN);
53 |
54 | logger = LoggerFactory.getLogger(SparkToNatsStreamingConnectorLifecycleTest.class);
55 | }
56 |
57 | // @Test(timeout=360000) TODO
58 | public void testStaticSparkToNatsWithConnectionLifecycle() throws Exception {
59 | startStreamingServer(clusterID, false);
60 |
61 | long poolSize = SparkToNatsStreamingConnectorPool.poolSize();
62 |
63 | final List data = UnitTestUtilities.getData();
64 |
65 | final String subject1 = "subject1";
66 |
67 | final String subject2 = "subject2";
68 |
69 | final int partitionsNb = 3;
70 | final JavaDStream lines = dataSource.dataStream(ssc).repartition(partitionsNb);
71 | final JavaDStream integers = SparkToNatsValidator.generateIntegers(lines);
72 | //- lines.map(str -> Integer.parseInt(str));
73 |
74 | final Properties properties = new Properties();
75 | properties.setProperty(PROP_URL, NATS_STREAMING_URL);
76 | SparkToNatsConnectorPool
77 | .newStreamingPool(clusterID)
78 | .withProperties(properties)
79 | .withConnectionTimeout(Duration.ofSeconds(2))
80 | .withSubjects(DEFAULT_SUBJECT, subject1, subject2)
81 | .publishToNats(integers);
82 |
83 | ssc.start();
84 |
85 | TimeUnit.SECONDS.sleep(1);
86 |
87 | final NatsStreamingSubscriber ns1 = UnitTestUtilities.getNatsStreamingSubscriber(data, subject1, clusterID, getUniqueClientName() + "_SUB1", NATS_STREAMING_LOCALHOST_URL);
88 | final NatsStreamingSubscriber ns2 = UnitTestUtilities.getNatsStreamingSubscriber(data, subject2, clusterID, getUniqueClientName() + "_SUB1", NATS_STREAMING_LOCALHOST_URL);
89 |
90 | dataSource.open();
91 |
92 | dataSource.write(data);
93 | // wait for the subscribers to complete.
94 | ns1.waitForCompletion();
95 | ns2.waitForCompletion();
96 |
97 | TimeUnit.MILLISECONDS.sleep(200);
98 | assertEquals("The connections Pool size should be the same as the number of Spark partitions",
99 | poolSize + partitionsNb, SparkToNatsStreamingConnectorPool.poolSize());
100 |
101 | final NatsStreamingSubscriber ns1p = UnitTestUtilities.getNatsStreamingSubscriber(data, subject1, clusterID, getUniqueClientName() + "_SUB1", NATS_STREAMING_LOCALHOST_URL);
102 | final NatsStreamingSubscriber ns2p = UnitTestUtilities.getNatsStreamingSubscriber(data, subject2, clusterID, getUniqueClientName() + "_SUB1", NATS_STREAMING_LOCALHOST_URL);
103 | dataSource.write(data);
104 | // wait for the subscribers to complete.
105 | ns1p.waitForCompletion();
106 | ns2p.waitForCompletion();
107 | TimeUnit.MILLISECONDS.sleep(800);
108 | assertEquals("The connections Pool size should be the same as the number of Spark partitions",
109 | poolSize + partitionsNb, SparkToNatsStreamingConnectorPool.poolSize());
110 |
111 | ssc.stop();
112 | ssc = null;
113 |
114 | dataSource.close();
115 |
116 | logger.debug("Spark Context Stopped");
117 |
118 | TimeUnit.SECONDS.sleep(5);
119 | logger.debug("After 5 sec delay");
120 |
121 | assertTrue("The poolSize() of " + SparkToNatsStreamingConnectorPool.connectorsPoolMap + " should have been reverted to its original value",
122 | SparkToNatsStreamingConnectorPool.poolSize() == poolSize);
123 | }
124 |
125 | static String getUniqueClientName() {
126 | return "clientName_" + NatsSparkUtilities.generateUniqueID();
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/nats/to_spark/OmnipotentStandardNatsToSparkConnector.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import static io.nats.client.Options.PROP_URL;
11 |
12 | import java.io.IOException;
13 | import java.util.Collection;
14 | import java.util.Properties;
15 | import java.util.concurrent.TimeoutException;
16 |
17 | import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
18 | import org.apache.spark.storage.StorageLevel;
19 |
20 | import com.logimethods.connector.nats_spark.IncompleteException;
21 |
22 | import io.nats.client.Connection;
23 | import io.nats.client.Dispatcher;
24 | import io.nats.client.MessageHandler;
25 | import io.nats.client.Nats;
26 | import io.nats.client.Options;
27 |
28 | /**
29 | * A NATS to Spark Connector.
30 | *
31 | * It will transfer messages received from NATS into Spark data.
32 | *
33 | * That class extends {@link com.logimethods.connector.nats.to_spark.NatsToSparkConnector}<T,R,V>.
34 | */
35 | @SuppressWarnings("serial")
36 | public abstract class OmnipotentStandardNatsToSparkConnector extends NatsToSparkConnector {
37 |
38 | protected OmnipotentStandardNatsToSparkConnector(Class type, Properties properties, StorageLevel storageLevel, String... subjects) {
39 | super(type, storageLevel, subjects);
40 | this.properties = properties;
41 | setNatsQueue();
42 | }
43 |
44 | protected OmnipotentStandardNatsToSparkConnector(Class type, StorageLevel storageLevel, String... subjects) {
45 | super(type, storageLevel, subjects);
46 | setNatsQueue();
47 | }
48 |
49 | protected OmnipotentStandardNatsToSparkConnector(Class type, Properties properties, StorageLevel storageLevel) {
50 | super(type, storageLevel);
51 | this.properties = properties;
52 | setNatsQueue();
53 | }
54 |
55 | protected OmnipotentStandardNatsToSparkConnector(Class type, StorageLevel storageLevel) {
56 | super(type, storageLevel);
57 | setNatsQueue();
58 | }
59 |
60 | protected OmnipotentStandardNatsToSparkConnector(Class type, StorageLevel storageLevel, Collection subjects, Properties properties, String queue, String natsUrl) {
61 | super(type, storageLevel, subjects, properties, queue, natsUrl);
62 | }
63 |
64 | /**
65 | */
66 | protected StandardNatsToKeyValueSparkConnectorImpl storedAsKeyValue() {
67 | return new StandardNatsToKeyValueSparkConnectorImpl(type, storageLevel(), subjects, properties, natsQueue, natsUrl, dataDecoder, scalaDataDecoder);
68 | }
69 |
70 | protected Properties enrichedProperties;
71 |
72 | /** Create a socket connection and receive data until receiver is stopped
73 | * @throws IncompleteException
74 | * @throws TimeoutException
75 | * @throws IOException
76 | * @throws InterruptedException
77 | * @throws IllegalArgumentException
78 | * @throws IllegalStateException **/
79 | protected void receive() throws IncompleteException, IOException, TimeoutException, IllegalStateException, IllegalArgumentException, InterruptedException {
80 |
81 | // Make connection and initialize streams
82 | Connection connection;
83 | final Options options = new Options.Builder(getEnrichedProperties()).build();
84 | try {
85 | connection = Nats.connect(options);
86 | } catch (Exception e) {
87 | logger.error("Nats.connect({}, {}, {}) PRODUCES {}", ReflectionToStringBuilder.toString(options), e.getMessage());
88 | throw(e);
89 | }
90 | logger.info("A NATS from '{}' to Spark Connection has been created for '{}', sharing Queue '{}'.", connection.getConnectedUrl(), this, natsQueue);
91 |
92 | Runtime.getRuntime().addShutdownHook(new Thread(new Runnable(){
93 | @Override
94 | public void run() {
95 | logger.debug("Caught CTRL-C, shutting down gracefully..." + connection);
96 | try {
97 | Thread.sleep(500); // To allow the `dispatcher.unsubscribe(subject);` to be call
98 | if (connection != null ) connection.close();
99 | } catch (InterruptedException e) {
100 | logger.debug(e.getMessage());
101 | }
102 | }
103 | }));
104 |
105 | for (String subject: getSubjects()) {
106 | final Dispatcher dispatcher = connection.createDispatcher(getMessageHandler()).subscribe(subject, natsQueue);
107 | logger.info("Listening on {}.", subject);
108 |
109 | Runtime.getRuntime().addShutdownHook(new Thread(new Runnable(){
110 | @Override
111 | public void run() {
112 | try {
113 | logger.debug("Caught CTRL-C, shutting down gracefully..." + dispatcher);
114 | dispatcher.unsubscribe(subject);
115 | connection.closeDispatcher(dispatcher);
116 | } catch (IllegalStateException e) {
117 | if (logger.isDebugEnabled()) {
118 | logger.error("Exception while unsubscribing " + e.toString());
119 | }
120 | }
121 | }
122 | }));
123 | }
124 | }
125 |
126 | protected Properties getEnrichedProperties() throws IncompleteException {
127 | if (enrichedProperties == null) {
128 | enrichedProperties = getProperties();
129 | if (enrichedProperties == null) {
130 | enrichedProperties = new Properties();
131 | }
132 | if (natsUrl != null) {
133 | enrichedProperties.setProperty(PROP_URL, natsUrl);
134 | }
135 | }
136 | return enrichedProperties;
137 | }
138 |
139 | abstract protected MessageHandler getMessageHandler();
140 | }
141 |
142 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/spark/to_nats/AbstractSparkToNatsConnector.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import java.io.Serializable;
11 | import java.time.Duration;
12 | import java.util.ArrayList;
13 | import java.util.Arrays;
14 | import java.util.Collection;
15 | import java.util.Properties;
16 |
17 | import org.slf4j.Logger;
18 |
19 | import com.logimethods.connector.nats_spark.IncompleteException;
20 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
21 |
22 | import static com.logimethods.connector.nats_spark.Constants.*;
23 |
24 | abstract class AbstractSparkToNatsConnector implements Serializable {
25 |
26 | /**
27 | *
28 | */
29 | private static final long serialVersionUID = 1L;
30 | protected transient Integer connectionSignature;
31 |
32 | /**
33 | *
34 | */
35 | protected AbstractSparkToNatsConnector() {
36 | super();
37 | }
38 |
39 | protected AbstractSparkToNatsConnector(String natsURL, Properties properties, Long connectionTimeout, String... subjects) {
40 | super();
41 | setProperties(properties);
42 | setSubjects(NatsSparkUtilities.transformIntoAList(subjects));
43 | setNatsURL(natsURL);
44 | setConnectionTimeout(connectionTimeout);
45 | }
46 |
47 | protected AbstractSparkToNatsConnector(String natsURL, Properties properties, Long connectionTimeout, Collection subjects) {
48 | super();
49 | setProperties(properties);
50 | setSubjects(subjects);
51 | setNatsURL(natsURL);
52 | setConnectionTimeout(connectionTimeout);
53 | }
54 |
55 | protected abstract Logger getLogger();
56 |
57 | /**
58 | * @param properties the properties to set
59 | */
60 | protected abstract void setProperties(Properties properties);
61 | protected abstract Properties getProperties();
62 |
63 | /**
64 | * @param subjects the subjects to set
65 | */
66 | protected abstract void setSubjects(Collection subjects);
67 | protected abstract Collection getSubjects();
68 |
69 | /**
70 | * @param subjects the subjects to set
71 | */
72 | protected abstract void setNatsURL(String natsURL);
73 | protected abstract String getNatsURL();
74 |
75 | /**
76 | * @return the connectionTimeout
77 | */
78 | protected abstract Long getConnectionTimeout();
79 | /**
80 | * @param connectionTimeout the connectionTimeout to set
81 | */
82 | protected abstract void setConnectionTimeout(Long connectionTimeout);
83 |
84 | /**
85 | * @return the storedAsKeyValue
86 | */
87 | protected abstract boolean isStoredAsKeyValue();
88 | /**
89 | * @param storedAsKeyValue the storedAsKeyValue to set
90 | */
91 | protected abstract void setStoredAsKeyValue(boolean storedAsKeyValue);
92 |
93 | /**
94 | * @param properties, the properties to set
95 | * @return the connector itself
96 | */
97 | @SuppressWarnings("unchecked")
98 | public T withProperties(Properties properties) {
99 | setProperties(properties);
100 | return (T)this;
101 | }
102 |
103 | /**
104 | * @param subjects, the subjects to set
105 | * @return the connector itself
106 | */
107 | @SuppressWarnings("unchecked")
108 | public T withSubjects(String... subjects) {
109 | setSubjects(NatsSparkUtilities.transformIntoAList(subjects));
110 | return (T)this;
111 | }
112 |
113 | /**
114 | * @param natsURL, the NATS URL to set
115 | * @return the connector itself
116 | */
117 | @SuppressWarnings("unchecked")
118 | public T withNatsURL(String natsURL) {
119 | setNatsURL(natsURL);
120 | return (T)this;
121 | }
122 |
123 | /**
124 | * @param duration, the duration to set
125 | * @return the connector itself
126 | */
127 | @SuppressWarnings("unchecked")
128 | public T withConnectionTimeout(Duration duration) {
129 | setConnectionTimeout(duration.toNanos());
130 | return (T)this;
131 | }
132 |
133 | protected Collection getDefinedSubjects() throws IncompleteException {
134 | if ((getSubjects() == null) || (getSubjects().size() == 0)) {
135 | final String subjectsStr = getProperties() != null ?
136 | getProperties().getProperty(PROP_SUBJECTS) :
137 | null;
138 | if (subjectsStr == null) {
139 | if (isStoredAsKeyValue()) {
140 | setSubjects(new ArrayList(Arrays.asList("")));
141 | } else {
142 | throw new IncompleteException("" + this + " needs at least one NATS Subject.");
143 | }
144 | } else {
145 | setSubjects(NatsSparkUtilities.extractCollection(subjectsStr));
146 | getLogger().debug("Subject provided by the Properties: '{}'", getSubjects());
147 | }
148 | }
149 | return getSubjects();
150 | }
151 |
152 | protected int sparkToStandardNatsConnectionSignature(String natsURL, Properties properties, Collection subjects, Long connectionTimeout) {
153 | final int prime = 31;
154 | int result = 1;
155 | result = prime * result + ((natsURL == null) ? 0 : natsURL.hashCode());
156 | result = prime * result + ((properties == null) ? 0 : properties.hashCode());
157 | result = prime * result + ((subjects == null) ? 0 : subjects.hashCode());
158 | result = prime * result + ((connectionTimeout == null) ? 0 : connectionTimeout.hashCode());
159 | return result;
160 | }
161 |
162 | protected int sparkToNatsStreamingConnectionSignature(String natsURL, Properties properties, Collection subjects, Long connectionTimeout, String clusterID) {
163 | final int prime = 31;
164 | int result = 1 + sparkToStandardNatsConnectionSignature(natsURL, properties, subjects, connectionTimeout);
165 | result = prime * result + ((clusterID == null) ? 0 : clusterID.hashCode());
166 | return result;
167 | }
168 |
169 | protected abstract int computeConnectionSignature();
170 |
171 | /**
172 | * @return the sealedHashCode
173 | */
174 | protected Integer getConnectionSignature() {
175 | if (connectionSignature == null) {
176 | connectionSignature = computeConnectionSignature();
177 | }
178 | return connectionSignature;
179 | }
180 |
181 | /**
182 | * @param connectionSignature the connectionSignature to set
183 | */
184 | protected void setConnectionSignature(Integer connectionSignature) {
185 | this.connectionSignature = connectionSignature;
186 | }
187 |
188 | }
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/NatsStreamingToSparkWithAttributesTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
11 | import static io.nats.client.Options.PROP_URL;
12 | import static org.junit.Assert.*;
13 |
14 | import java.time.Instant;
15 | import java.time.temporal.ChronoUnit;
16 | import java.util.Properties;
17 |
18 | import org.apache.spark.storage.StorageLevel;
19 | import org.junit.Test;
20 |
21 | import com.logimethods.connector.nats_spark.IncompleteException;
22 |
23 | import io.nats.streaming.SubscriptionOptions;
24 |
25 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.* ;
26 |
27 | public class NatsStreamingToSparkWithAttributesTest {
28 | protected final static String CLUSTER_ID = "CLUSTER_ID";
29 | //- private static final String STAN_URL = "nats://localhost:" + NATS_STREAMING_PORT;
30 | private static final String ALT_NATS_STREAMING_URL = "nats://1.1.1.1:" + NATS_STREAMING_PORT;
31 | protected final static String DURABLE_NAME = "$DURABLE_NAME";
32 | protected final static Properties PROPERTIES = new Properties();
33 |
34 | {
35 | PROPERTIES.setProperty(PROP_SUBJECTS, "sub1,sub3 , sub2");
36 | PROPERTIES.setProperty(PROP_URL, NATS_STREAMING_URL);
37 | }
38 |
39 | @Test
40 | public void testNatsStreamingToSparkConnectorImpl_0() throws IncompleteException {
41 | NatsStreamingToSparkConnectorImpl connector =
42 | NatsToSparkConnector
43 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
44 | .withProperties(PROPERTIES);
45 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
46 | assertFalse(connector.keepConnectionDurable());
47 | assertEquals(connector.getNatsUrl().toString(), NATS_STREAMING_URL, connector.getNatsUrl());
48 | assertEquals(connector.getSubjects().toString(), 3, connector.getSubjects().size());
49 | }
50 |
51 | @Test
52 | public void testNatsStreamingToSparkConnectorImpl_1() throws IncompleteException {
53 | NatsStreamingToSparkConnectorImpl connector =
54 | NatsToSparkConnector
55 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
56 | .withNatsURL(ALT_NATS_STREAMING_URL)
57 | .withSubjects("sub1", "sub2");
58 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
59 | assertEquals(ALT_NATS_STREAMING_URL, connector.natsUrl);
60 | assertEquals(connector.getSubjects().toString(), 2, connector.getSubjects().size());
61 | }
62 |
63 | @Test
64 | public void testNatsStreamingToSparkConnectorImpl_1_1() throws IncompleteException {
65 | NatsStreamingToSparkConnectorImpl connector =
66 | NatsToSparkConnector
67 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
68 | .withNatsURL(ALT_NATS_STREAMING_URL)
69 | .withSubjects("sub1", "sub2")
70 | .withProperties(PROPERTIES);
71 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
72 | assertEquals(ALT_NATS_STREAMING_URL, connector.natsUrl);
73 | assertEquals(connector.getSubjects().toString(), 2, connector.getSubjects().size());
74 | }
75 |
76 | @Test
77 | public void testNatsStreamingToSparkConnectorImpl_2() {
78 | SubscriptionOptions.Builder optsBuilder = new SubscriptionOptions.Builder().durableName(DURABLE_NAME);
79 | NatsStreamingToSparkConnectorImpl connector =
80 | NatsToSparkConnector
81 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
82 | .withNatsURL(NATS_STREAMING_URL)
83 | .subscriptionOptionsBuilder(optsBuilder)
84 | .withSubjects("SUBJECT");
85 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
86 | assertEquals(DURABLE_NAME, connector.getSubscriptionOptions().getDurableName());
87 | }
88 |
89 | @Test
90 | public void testNatsStreamingToSparkConnectorImpl_3() {
91 | NatsStreamingToSparkConnectorImpl connector =
92 | NatsToSparkConnector
93 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
94 | .withNatsURL(NATS_STREAMING_URL)
95 | .startWithLastReceived()
96 | .durableName(DURABLE_NAME)
97 | .withSubjects("SUBJECT");
98 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
99 | assertTrue(connector.keepConnectionDurable());
100 | assertEquals(DURABLE_NAME, connector.getSubscriptionOptions().getDurableName());
101 | }
102 |
103 | @Test
104 | public void testNatsStreamingToSparkConnectorImpl_4() {
105 | final Instant start = Instant.now().minus(30, ChronoUnit.MINUTES);
106 | SubscriptionOptions.Builder optsBuilder = new SubscriptionOptions.Builder().durableName(DURABLE_NAME).startAtTime(start);
107 | final String newName = "NEW NAME";
108 | NatsStreamingToSparkConnectorImpl connector =
109 | NatsToSparkConnector
110 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
111 | .withNatsURL(NATS_STREAMING_URL)
112 | //.withProperties(PROPERTIES)
113 | .subscriptionOptionsBuilder(optsBuilder)
114 | .durableName(newName)
115 | .withSubjects("SUBJECT");
116 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
117 | assertTrue(connector.keepConnectionDurable());
118 | assertEquals(newName, connector.getSubscriptionOptions().getDurableName());
119 | assertEquals(start, connector.getSubscriptionOptions().getStartTime());
120 | }
121 |
122 | @Test
123 | public void testNatsStreamingToSparkConnectorImpl_5() {
124 | final Instant start = Instant.now().minus(30, ChronoUnit.MINUTES);
125 | NatsStreamingToSparkConnectorImpl connector =
126 | NatsToSparkConnector
127 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
128 | .withNatsURL(NATS_STREAMING_URL)
129 | //.withProperties(PROPERTIES)
130 | .durableName(DURABLE_NAME)
131 | .startAtTime(start)
132 | .withSubjects("SUBJECT");
133 | assertTrue(connector instanceof NatsStreamingToSparkConnectorImpl);
134 | assertEquals(DURABLE_NAME, connector.getSubscriptionOptions().getDurableName());
135 | assertEquals(start, connector.getSubscriptionOptions().getStartTime());
136 | }
137 | }
138 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/NatsStreamingToKeyValueSparkWithAttributesTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.nats.to_spark;
9 |
10 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
11 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
12 | import static io.nats.client.Options.PROP_URL;
13 | import static org.junit.Assert.assertEquals;
14 | import static org.junit.Assert.assertTrue;
15 |
16 | import java.time.Instant;
17 | import java.time.temporal.ChronoUnit;
18 | import java.util.Properties;
19 |
20 | import org.apache.spark.storage.StorageLevel;
21 | import org.junit.Test;
22 |
23 | import com.logimethods.connector.nats_spark.IncompleteException;
24 |
25 | import io.nats.streaming.SubscriptionOptions;
26 |
27 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_STREAMING_PORT ;
28 |
29 | public class NatsStreamingToKeyValueSparkWithAttributesTest {
30 | protected final static String CLUSTER_ID = "CLUSTER_ID";
31 | // private static final String STAN_URL = "nats://localhost:" + NATS_STREAMING_PORT;
32 | private static final String ALT_NATS_STREAMING_URL = "nats://1.1.1.1:" + NATS_STREAMING_PORT;
33 | protected final static String DURABLE_NAME = "$DURABLE_NAME";
34 | protected final static Properties PROPERTIES = new Properties();
35 |
36 | {
37 | PROPERTIES.setProperty(PROP_SUBJECTS, "sub1,sub3 , sub2");
38 | PROPERTIES.setProperty(PROP_URL, NATS_STREAMING_URL);
39 | }
40 |
41 | @Test
42 | public void testNatsStreamingToKeyValueSparkConnectorImpl_0() throws IncompleteException {
43 | NatsStreamingToKeyValueSparkConnectorImpl connector =
44 | NatsToSparkConnector
45 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
46 | .withProperties(PROPERTIES)
47 | .storedAsKeyValue();
48 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
49 | assertEquals(connector.getNatsUrl().toString(), NATS_STREAMING_URL, connector.getNatsUrl());
50 | assertEquals(connector.getSubjects().toString(), 3, connector.getSubjects().size());
51 | }
52 |
53 | @Test
54 | public void testNatsStreamingToKeyValueSparkConnectorImpl_1() throws IncompleteException {
55 | NatsStreamingToKeyValueSparkConnectorImpl connector =
56 | NatsToSparkConnector
57 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
58 | .withNatsURL(ALT_NATS_STREAMING_URL)
59 | .withSubjects("sub1", "sub2")
60 | .storedAsKeyValue();
61 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
62 | assertEquals(ALT_NATS_STREAMING_URL, connector.natsUrl);
63 | assertEquals(connector.getSubjects().toString(), 2, connector.getSubjects().size());
64 | }
65 |
66 | @Test
67 | public void testNatsStreamingToKeyValueSparkConnectorImpl_1_1() throws IncompleteException {
68 | NatsStreamingToKeyValueSparkConnectorImpl connector =
69 | NatsToSparkConnector
70 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
71 | .withNatsURL(ALT_NATS_STREAMING_URL)
72 | .withSubjects("sub1", "sub2")
73 | .withProperties(PROPERTIES)
74 | .storedAsKeyValue();
75 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
76 | assertEquals(ALT_NATS_STREAMING_URL, connector.natsUrl);
77 | assertEquals(connector.getSubjects().toString(), 2, connector.getSubjects().size());
78 | }
79 |
80 | @Test
81 | public void testNatsStreamingToKeyValueSparkConnectorImpl_2() {
82 | SubscriptionOptions.Builder optsBuilder = new SubscriptionOptions.Builder().durableName(DURABLE_NAME);
83 | NatsStreamingToKeyValueSparkConnectorImpl connector =
84 | NatsToSparkConnector
85 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
86 | .withNatsURL(NATS_STREAMING_URL)
87 | .subscriptionOptionsBuilder(optsBuilder)
88 | .withSubjects("SUBJECT")
89 | .storedAsKeyValue();
90 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
91 | assertEquals(DURABLE_NAME, connector.getSubscriptionOptions().getDurableName());
92 | }
93 |
94 | @Test
95 | public void testNatsStreamingToKeyValueSparkConnectorImpl_3() {
96 | NatsStreamingToKeyValueSparkConnectorImpl connector =
97 | NatsToSparkConnector
98 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
99 | .withNatsURL(NATS_STREAMING_URL)
100 | .startWithLastReceived()
101 | .durableName(DURABLE_NAME)
102 | .withSubjects("SUBJECT")
103 | .storedAsKeyValue();
104 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
105 | assertEquals(DURABLE_NAME, connector.getSubscriptionOptions().getDurableName());
106 | }
107 |
108 | @Test
109 | public void testNatsStreamingToKeyValueSparkConnectorImpl_4() {
110 | final Instant start = Instant.now().minus(30, ChronoUnit.MINUTES);
111 | SubscriptionOptions.Builder optsBuilder = new SubscriptionOptions.Builder().durableName(DURABLE_NAME).startAtTime(start);
112 | final String newName = "NEW NAME";
113 | NatsStreamingToKeyValueSparkConnectorImpl connector =
114 | NatsToSparkConnector
115 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
116 | .withNatsURL(NATS_STREAMING_URL)
117 | //.withProperties(PROPERTIES)
118 | .subscriptionOptionsBuilder(optsBuilder)
119 | .durableName(newName)
120 | .withSubjects("SUBJECT")
121 | .storedAsKeyValue();
122 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
123 | assertEquals(newName, connector.getSubscriptionOptions().getDurableName());
124 | assertEquals(start, connector.getSubscriptionOptions().getStartTime());
125 | }
126 |
127 | @Test
128 | public void testNatsStreamingToKeyValueSparkConnectorImpl_5() {
129 | final Instant start = Instant.now().minus(30, ChronoUnit.MINUTES);
130 | NatsStreamingToKeyValueSparkConnectorImpl connector =
131 | NatsToSparkConnector
132 | .receiveFromNatsStreaming(String.class, StorageLevel.MEMORY_ONLY(), CLUSTER_ID)
133 | .withNatsURL(NATS_STREAMING_URL)
134 | //.withProperties(PROPERTIES)
135 | .durableName(DURABLE_NAME)
136 | .startAtTime(start)
137 | .withSubjects("SUBJECT")
138 | .storedAsKeyValue();
139 | assertTrue(connector instanceof NatsStreamingToKeyValueSparkConnectorImpl);
140 | assertEquals(DURABLE_NAME, connector.getSubscriptionOptions().getDurableName());
141 | assertEquals(start, connector.getSubscriptionOptions().getStartTime());
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/spark/to_nats/SparkToStandardNatsConnectorImpl.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static io.nats.client.Options.PROP_URL;
11 |
12 | import java.io.IOException;
13 | import java.util.Collection;
14 | import java.util.Date;
15 | import java.util.Properties;
16 | import java.util.concurrent.TimeoutException;
17 |
18 | import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
19 | import org.slf4j.Logger;
20 | import org.slf4j.LoggerFactory;
21 |
22 | import io.nats.client.Connection;
23 | import io.nats.client.Message;
24 | import io.nats.client.Nats;
25 | import io.nats.client.Options;
26 |
27 | class SparkToStandardNatsConnectorImpl extends SparkToNatsConnector {
28 |
29 | /**
30 | *
31 | */
32 | private static final long serialVersionUID = 1L;
33 | protected static final Logger logger = LoggerFactory.getLogger(SparkToStandardNatsConnectorImpl.class);
34 | protected transient Connection connection;
35 |
36 | /**
37 | * @param properties
38 | * @param subjects
39 | */
40 | protected SparkToStandardNatsConnectorImpl() {
41 | super();
42 | }
43 |
44 | /**
45 | * @param properties
46 | * @param subjects
47 | * @param b
48 | */
49 | protected SparkToStandardNatsConnectorImpl(String natsURL, Properties properties, Long connectionTimeout,
50 | Collection subjects, boolean isStoredAsKeyValue) {
51 | super(natsURL, properties, connectionTimeout, subjects);
52 | setStoredAsKeyValue(isStoredAsKeyValue);
53 | }
54 |
55 | /**
56 | * @param properties
57 | * @param subjects
58 | */
59 | protected SparkToStandardNatsConnectorImpl(String natsURL, Properties properties, Long connectionTimeout,
60 | String... subjects) {
61 | super(natsURL, properties, connectionTimeout, subjects);
62 | }
63 |
64 | /**
65 | * A method that will publish the provided String into NATS through the defined subjects.
66 | * @param obj the String that will be published to NATS.
67 | * @throws Exception is thrown when there is no Connection nor Subject defined.
68 | */
69 | @Override
70 | protected void publishToNats(byte[] payload) throws Exception {
71 | resetClosingTimeout();
72 |
73 | final Connection localConnection = getConnection();
74 | for (String subject : getDefinedSubjects()) {
75 | localConnection.publish(subject, payload);
76 |
77 | logger.trace("Send '{}' from Spark to NATS ({})", payload, subject);
78 | }
79 | }
80 |
81 | // TODO Check Javadoc
82 | /**
83 | * A method that will publish the provided String into NATS through the defined subjects.
84 | * @param obj the String that will be published to NATS.
85 | * @throws Exception is thrown when there is no Connection nor Subject defined.
86 | */
87 | @Override
88 | protected void publishToNats(String postSubject, byte[] payload) throws Exception {
89 | resetClosingTimeout();
90 |
91 | final Connection localConnection = getConnection();
92 | for (String preSubject : getDefinedSubjects()) {
93 | final String subject = combineSubjects(preSubject, postSubject);
94 | localConnection.publish(subject, payload);
95 |
96 | logger.trace("Send '{}' from Spark to NATS ({})", payload, subject);
97 | }
98 | }
99 |
100 | protected synchronized Connection getConnection() throws Exception {
101 | if (connection == null) {
102 | connection = createConnection();
103 | }
104 | return connection;
105 | }
106 |
107 | protected Connection createConnection() throws IOException, TimeoutException, Exception {
108 | final Connection newConnection =
109 | (getProperties() != null) ? NatsConnect(new Options.Builder(getProperties()).build()) :
110 | (getNatsURL() != null ) ? NatsConnect(getNatsURL()) :
111 | NatsConnect();
112 |
113 | logger.debug("A NATS Connection {} has been created for {}", newConnection, this);
114 |
115 | Runtime.getRuntime().addShutdownHook(new Thread(new Runnable(){
116 | @Override
117 | public void run() {
118 | logger.debug("Caught CTRL-C, shutting down gracefully... " + this);
119 | try {
120 | newConnection.close();
121 | } catch (InterruptedException e) {
122 | logger.warn(e.getMessage());
123 | }
124 | }
125 | }));
126 | return newConnection;
127 | }
128 |
129 | private Connection NatsConnect() throws IOException, InterruptedException {
130 | try {
131 | return Nats.connect();
132 | } catch (Exception e) {
133 | logger.error("Nats.connect("+io.nats.client.Options.DEFAULT_URL+") PRODUCES {}", e.getMessage());
134 | throw(e);
135 | }
136 | }
137 |
138 | private Connection NatsConnect(Options options) throws IOException, InterruptedException {
139 | try {
140 | return Nats.connect(options);
141 | } catch (Exception e) {
142 | logger.error("Nats.connect("+ReflectionToStringBuilder.toString(options)+") PRODUCES {}", e.getMessage());
143 | throw(e);
144 | }
145 | }
146 |
147 | private Connection NatsConnect(String url) throws IOException, InterruptedException {
148 | try {
149 | return Nats.connect(url);
150 | } catch (Exception e) {
151 | logger.error("Nats.connect("+url+") PRODUCES {}", e.getMessage());
152 | throw(e);
153 | }
154 | }
155 |
156 | @Override
157 | protected synchronized void closeConnection() {
158 | logger.debug("At {}, ready to close '{}' by {}", new Date().getTime(), connection, super.toString());
159 | removeFromPool();
160 |
161 | if (connection != null) {
162 | try {
163 | connection.close();
164 | } catch (InterruptedException e) {
165 | logger.warn(e.getMessage());
166 | }
167 | logger.debug("{} has been CLOSED by {}", connection, super.toString());
168 | connection = null;
169 | }
170 | }
171 |
172 | @Override
173 | protected void removeFromPool() {
174 | SparkToStandardNatsConnectorPool.removeConnectorFromPool(this);
175 | }
176 |
177 | protected String getsNatsUrlKey() {
178 | return PROP_URL;
179 | }
180 |
181 | @Override
182 | protected int computeConnectionSignature() {
183 | return sparkToStandardNatsConnectionSignature(natsURL, properties, subjects, connectionTimeout);
184 | }
185 |
186 | /* (non-Javadoc)
187 | * @see java.lang.Object#toString()
188 | */
189 | @Override
190 | public String toString() {
191 | return "SparkToStandardNatsConnectorImpl ["
192 | + internalId + " / "
193 | + super.toString() + " : "
194 | + "connection=" + connection + ", "
195 | + (properties != null ? "properties=" + properties + ", " : "")
196 | + (subjects != null ? "subjects=" + subjects + ", " : "")
197 | + (natsURL != null ? "natsURL=" + natsURL + ", " : "")
198 | + "storedAsKeyValue=" + storedAsKeyValue + "]";
199 | }
200 | }
201 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/SparkToStandardNatsConnectorPoolTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
11 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
12 | import static com.logimethods.connector.nats_spark.NatsSparkUtilities.encodeData;
13 | import static io.nats.client.Options.PROP_URL;
14 |
15 | import java.io.File;
16 | import java.io.Serializable;
17 | import java.util.List;
18 | import java.util.Properties;
19 |
20 | import org.apache.log4j.Level;
21 | import org.apache.spark.api.java.JavaRDD;
22 | import org.apache.spark.api.java.function.Function;
23 | import org.apache.spark.api.java.function.PairFunction;
24 | import org.apache.spark.streaming.api.java.JavaDStream;
25 | import org.apache.spark.streaming.api.java.JavaPairDStream;
26 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
27 | import org.junit.BeforeClass;
28 | import org.junit.Test;
29 | import org.slf4j.LoggerFactory;
30 |
31 | import com.logimethods.connector.nats.spark.test.SparkToNatsValidator;
32 | import com.logimethods.connector.nats.spark.test.StandardNatsSubscriber;
33 | import com.logimethods.connector.nats.spark.test.TestClient;
34 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
35 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
36 | import com.logimethods.connector.spark.to_nats.SparkToNatsConnector;
37 | import com.logimethods.connector.spark.to_nats.SparkToNatsConnectorPool;
38 | import com.logimethods.connector.spark.to_nats.SparkToStandardNatsConnectorImpl;
39 |
40 | import scala.Tuple2;
41 |
42 | //@Ignore
43 | @SuppressWarnings("serial")
44 | public class SparkToStandardNatsConnectorPoolTest extends AbstractSparkToNatsConnectorTest {
45 |
46 | /**
47 | * @throws java.lang.Exception
48 | */
49 | @BeforeClass
50 | public static void setUpBeforeClass() throws Exception {
51 | // Enable tracing for debugging as necessary.
52 | Level level = Level.WARN;
53 | UnitTestUtilities.setLogLevel(SparkToNatsConnectorPool.class, level);
54 | UnitTestUtilities.setLogLevel(SparkToNatsConnector.class, level);
55 | UnitTestUtilities.setLogLevel(SparkToStandardNatsConnectorImpl.class, level);
56 | UnitTestUtilities.setLogLevel(SparkToStandardNatsConnectorPoolTest.class, level);
57 | UnitTestUtilities.setLogLevel(TestClient.class, level);
58 | UnitTestUtilities.setLogLevel("org.apache.spark", Level.WARN);
59 | UnitTestUtilities.setLogLevel("org.spark-project", Level.WARN);
60 |
61 | logger = LoggerFactory.getLogger(SparkToStandardNatsConnectorPoolTest.class);
62 |
63 | UnitTestUtilities.startDefaultServer();
64 | }
65 |
66 | @Test(timeout=360000)
67 | public void testStaticSparkToNatsIncludingMultipleSubjects() throws Exception {
68 | final List data = UnitTestUtilities.getData();
69 |
70 | final String subject1 = "subject1";
71 | final StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, subject1, NATS_LOCALHOST_URL);
72 |
73 | final String subject2 = "subject2";
74 | final StandardNatsSubscriber ns2 = UnitTestUtilities.getStandardNatsSubscriber(data, subject2, NATS_LOCALHOST_URL);
75 |
76 | final JavaDStream lines = dataSource.dataStream(ssc);
77 |
78 | SparkToNatsConnectorPool.newPool().withSubjects(DEFAULT_SUBJECT, subject1, subject2).withNatsURL(NATS_URL).publishToNats(lines);
79 |
80 | ssc.start();
81 |
82 | Thread.sleep(2000);
83 |
84 | writeFullData(data);
85 |
86 | // wait for the subscribers to complete.
87 | ns1.waitForCompletion();
88 | ns2.waitForCompletion();
89 | }
90 |
91 | @Test(timeout=360000)
92 | public void testStaticSparkStoredToNatsAsKeyValue() throws Exception {
93 | final List data = UnitTestUtilities.getData();
94 |
95 | final String subject1 = "subject1";
96 | final StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, subject1 + ".>", NATS_LOCALHOST_URL);
97 |
98 | JavaPairDStream keyValues = SparkToNatsValidator.getJavaPairDStream(dataSource.dataStream(ssc), ssc, subject1);
99 |
100 | SparkToNatsConnectorPool.newPool()
101 | .withNatsURL(NATS_URL)
102 | .publishToNatsAsKeyValue(keyValues, SparkToNatsValidator.getBytes);
103 | // .publishToNatsAsKeyValue(keyValues, (java.util.function.Function & Serializable) str -> str.getBytes());
104 |
105 | ssc.start();
106 |
107 | Thread.sleep(2000);
108 |
109 | writeFullData(data);
110 |
111 | // wait for the subscribers to complete.
112 | ns1.waitForCompletion();
113 | }
114 |
115 | @Test(timeout=360000)
116 | public void testStaticSparkToNatsWithMultipleSubjects() throws Exception {
117 | final List data = UnitTestUtilities.getData();
118 |
119 | final String subject1 = "subject1";
120 | final StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, subject1, NATS_LOCALHOST_URL);
121 |
122 | final String subject2 = "subject2";
123 | final StandardNatsSubscriber ns2 = UnitTestUtilities.getStandardNatsSubscriber(data, subject2, NATS_LOCALHOST_URL);
124 |
125 | final JavaDStream lines = dataSource.dataStream(ssc);
126 |
127 | SparkToNatsConnectorPool.newPool().withSubjects(DEFAULT_SUBJECT, subject1, subject2).withNatsURL(NATS_URL).publishToNats(lines);
128 |
129 | ssc.start();
130 |
131 | Thread.sleep(2000);
132 |
133 | writeFullData(data);
134 |
135 | // wait for the subscribers to complete.
136 | ns1.waitForCompletion();
137 | ns2.waitForCompletion();
138 | }
139 |
140 | @Test(timeout=360000)
141 | public void testStaticSparkToNatsWithMultipleProperties() throws Exception {
142 | final List data = UnitTestUtilities.getData();
143 |
144 | final String subject1 = "subject1";
145 | final StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, subject1, NATS_LOCALHOST_URL);
146 |
147 | final String subject2 = "subject2";
148 | final StandardNatsSubscriber ns2 = UnitTestUtilities.getStandardNatsSubscriber(data, subject2, NATS_LOCALHOST_URL);
149 |
150 | final JavaDStream lines = dataSource.dataStream(ssc);
151 |
152 | final Properties properties = new Properties();
153 | properties.setProperty(PROP_URL, NATS_URL);
154 | properties.setProperty(PROP_SUBJECTS, subject1+","+DEFAULT_SUBJECT+" , "+subject2);
155 |
156 | SparkToNatsConnectorPool.newPool().withProperties(properties).publishToNats(lines);
157 |
158 | ssc.start();
159 |
160 | Thread.sleep(2000);
161 |
162 | writeFullData(data);
163 |
164 | // wait for the subscribers to complete.
165 | ns1.waitForCompletion();
166 | ns2.waitForCompletion();
167 | }
168 | }
169 |
--------------------------------------------------------------------------------
/src/main/java/com/logimethods/connector/spark/to_nats/SparkToNatsStreamingConnectorImpl.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static io.nats.client.Options.PROP_URL;
11 |
12 | import java.io.IOException;
13 | import java.util.Collection;
14 | import java.util.Date;
15 | import java.util.Properties;
16 | import java.util.concurrent.TimeoutException;
17 |
18 | import org.slf4j.Logger;
19 | import org.slf4j.LoggerFactory;
20 |
21 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
22 |
23 | import io.nats.streaming.AckHandler;
24 | import io.nats.streaming.NatsStreaming;
25 | import io.nats.streaming.Options;
26 | import io.nats.streaming.StreamingConnection;
27 |
28 | class SparkToNatsStreamingConnectorImpl extends SparkToNatsConnector implements AckHandler {
29 |
30 | /**
31 | *
32 | */
33 | private static final long serialVersionUID = 1L;
34 | protected static final Logger logger = LoggerFactory.getLogger(SparkToNatsStreamingConnectorImpl.class);
35 | protected final String clusterID;
36 | protected final static String CLIENT_ID_ROOT = "SparkToNatsStreamingConnector_";
37 | protected transient String clientID;
38 | protected Options.Builder optionsBuilder;
39 | protected transient StreamingConnection connection;
40 |
41 | /**
42 | *
43 | protected SparkToNatsStreamingConnectorImpl(String clusterID) {
44 | super();
45 | this.clusterID = clusterID;
46 | }
47 | */
48 |
49 | /**
50 | * @param properties
51 | * @param subjects
52 | * @param b
53 | */
54 | protected SparkToNatsStreamingConnectorImpl(String clusterID, String natsURL, Properties properties,
55 | Long connectionTimeout, Options.Builder optionsBuilder, Collection subjects, boolean isStoredAsKeyValue) {
56 | super(natsURL, properties, connectionTimeout, subjects);
57 | this.optionsBuilder = optionsBuilder;
58 | this.clusterID = clusterID;
59 | setStoredAsKeyValue(isStoredAsKeyValue);
60 | }
61 |
62 | /**
63 | * @param properties
64 | * @param subjects
65 | */
66 | protected SparkToNatsStreamingConnectorImpl(String clusterID, String natsURL, Properties properties, Long connectionTimeout, Options.Builder optionsBuilder, String... subjects) {
67 | super(natsURL, properties, connectionTimeout, subjects);
68 | this.optionsBuilder = optionsBuilder;
69 | this.clusterID = clusterID;
70 | }
71 |
72 | /**
73 | * @return the clientID
74 | */
75 | protected String getClientID() {
76 | if (clientID == null ) {
77 | clientID = CLIENT_ID_ROOT + NatsSparkUtilities.generateUniqueID(this);
78 | }
79 | return clientID;
80 | }
81 |
82 | @Override
83 | protected void publishToNats(byte[] payload) throws Exception {
84 | resetClosingTimeout();
85 |
86 | final StreamingConnection localConnection = getConnection();
87 | for (String subject : getDefinedSubjects()) {
88 | localConnection.publish(subject, payload);
89 |
90 | logger.trace("Publish '{}' from Spark to NATS STREAMING ({})", payload, subject);
91 | }
92 | }
93 |
94 | @Override
95 | protected void publishToNats(String postSubject, byte[] payload) throws Exception {
96 | resetClosingTimeout();
97 |
98 | logger.debug("Received '{}' from Spark with '{}' Subject", payload, postSubject);
99 |
100 | final StreamingConnection localConnection = getConnection();
101 | for (String preSubject : getDefinedSubjects()) {
102 | final String subject = combineSubjects(preSubject, postSubject);
103 | localConnection.publish(subject, payload);
104 |
105 | logger.trace("Publish '{}' from Spark to NATS STREAMING ({})", payload, subject);
106 | }
107 | }
108 |
109 | // The ack handler will be invoked when a publish acknowledgement is received
110 | // @See https://github.com/nats-io/java-nats-streaming#asynchronous-publishing
111 | public void onAck(String guid, Exception err) {
112 | if (err != null) {
113 | logger.error("Error publishing msg id %s: %s\n", guid, err.getMessage());
114 | } else {
115 | logger.trace("Received ack for msg id %s\n", guid);
116 | }
117 | }
118 |
119 | protected synchronized StreamingConnection getConnection() throws Exception {
120 | if (connection == null) {
121 | connection = createConnection();
122 | }
123 | return connection;
124 | }
125 |
126 | protected Options.Builder getOptionsBuilder() throws Exception {
127 | if (optionsBuilder == null) {
128 | optionsBuilder = new Options.Builder().natsUrl(getNatsURL());
129 | }
130 | return optionsBuilder;
131 | }
132 |
133 | protected StreamingConnection createConnection() throws IOException, TimeoutException, Exception {
134 | StreamingConnection newConnection;
135 | final Options options = getOptionsBuilder().build();
136 | try {
137 | newConnection = NatsStreaming.connect(clusterID, getClientID(), options);
138 | } catch (Exception e) {
139 | logger.error("NatsStreaming.connect({}, {}, {}) PRODUCES {}", clusterID, getClientID(), options, e.getMessage());
140 | throw(e);
141 | }
142 | logger.debug("A NATS Connection {} has been created for {}", newConnection, this);
143 |
144 | Runtime.getRuntime().addShutdownHook(new Thread(new Runnable(){
145 | @Override
146 | public void run() {
147 | logger.debug("Caught CTRL-C, shutting down gracefully..." + this);
148 | try {
149 | newConnection.close();
150 | } catch (IOException | TimeoutException | InterruptedException e) {
151 | if (logger.isDebugEnabled()) {
152 | logger.error("Exception while unsubscribing " + e.toString());
153 | }
154 | }
155 | }
156 | }));
157 | return newConnection;
158 | }
159 |
160 | @Override
161 | protected synchronized void closeConnection() {
162 | logger.debug("At {}, ready to close '{}' by {}", new Date().getTime(), connection, super.toString());
163 | removeFromPool();
164 |
165 | if (connection != null) {
166 | try {
167 | connection.close();
168 | logger.debug("{} has been CLOSED by {}", connection, super.toString());
169 | } catch (IOException | TimeoutException | InterruptedException e) {
170 | if (logger.isDebugEnabled()) {
171 | logger.error("Exception while closing the connection: {} by {}", e, this);
172 | }
173 | }
174 | connection = null;
175 | }
176 | }
177 |
178 | @Override
179 | protected void removeFromPool() {
180 | SparkToNatsStreamingConnectorPool.removeConnectorFromPool(this);
181 | }
182 |
183 | protected String getsNatsUrlKey() {
184 | return PROP_URL;
185 | }
186 |
187 | @Override
188 | protected int computeConnectionSignature() {
189 | return sparkToNatsStreamingConnectionSignature(natsURL, properties, subjects, connectionTimeout, clusterID);
190 | }
191 |
192 | /* (non-Javadoc)
193 | * @see java.lang.Object#toString()
194 | */
195 | @Override
196 | public String toString() {
197 | return "SparkToNatsStreamingConnectorImpl [" + Integer.toHexString(hashCode()) + " : "
198 | + (clusterID != null ? " : clusterID=" + clusterID + ", " : "")
199 | + (clientID != null ? "clientID=" + clientID + ", " : "")
200 | + (optionsBuilder != null ? "optionsBuilder=" + optionsBuilder + ", " : "")
201 | + (connection != null ? "connection=" + connection + ", " : "")
202 | + (properties != null ? "properties=" + properties + ", " : "")
203 | + (subjects != null ? "subjects=" + subjects + ", " : "")
204 | + (natsURL != null ? "natsURL=" + natsURL + ", " : "")
205 | + "storedAsKeyValue=" + storedAsKeyValue + "]";
206 | }
207 | }
208 |
--------------------------------------------------------------------------------
/src/test/java/org/apache/spark/examples/streaming/JavaCustomReceiver.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Licensed to the Apache Software Foundation (ASF) under one or more
3 | * contributor license agreements. See the NOTICE file distributed with
4 | * this work for additional information regarding copyright ownership.
5 | * The ASF licenses this file to You under the Apache License, Version 2.0
6 | * (the "License"); you may not use this file except in compliance with
7 | * the License. You may obtain a copy of the License at
8 | *
9 | * http://www.apache.org/licenses/LICENSE-2.0
10 | *
11 | * Unless required by applicable law or agreed to in writing, software
12 | * distributed under the License is distributed on an "AS IS" BASIS,
13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | * See the License for the specific language governing permissions and
15 | * limitations under the License.
16 | */
17 |
18 | package org.apache.spark.examples.streaming;
19 |
20 | import com.google.common.io.Closeables;
21 |
22 | import org.apache.log4j.Level;
23 | import org.apache.log4j.Logger;
24 | import org.apache.spark.SparkConf;
25 | import org.apache.spark.api.java.JavaPairRDD;
26 | import org.apache.spark.api.java.function.FlatMapFunction;
27 | import org.apache.spark.api.java.function.Function2;
28 | import org.apache.spark.api.java.function.PairFunction;
29 | import org.apache.spark.storage.StorageLevel;
30 | import org.apache.spark.streaming.Duration;
31 | import org.apache.spark.streaming.api.java.JavaDStream;
32 | import org.apache.spark.streaming.api.java.JavaPairDStream;
33 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
34 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
35 | import org.apache.spark.streaming.receiver.Receiver;
36 | import scala.Tuple2;
37 |
38 | import java.io.BufferedReader;
39 | import java.io.InputStreamReader;
40 | import java.net.ConnectException;
41 | import java.net.Socket;
42 | import java.nio.ByteBuffer;
43 | import java.nio.charset.StandardCharsets;
44 | import java.util.Arrays;
45 | import java.util.Iterator;
46 | import java.util.regex.Pattern;
47 |
48 | /**
49 | * Custom Receiver that receives data over a socket. Received bytes is interpreted as
50 | * text and \n delimited lines are considered as records. They are then counted and printed.
51 | *
52 | * Usage: JavaCustomReceiver
53 | * is the Spark master URL. In local mode, should be 'local[n]' with n > 1.
54 | * and of the TCP server that Spark Streaming would connect to receive data.
55 | *
56 | * To run this on your local machine, you need to first run a Netcat server
57 | * `$ nc -lk 9999`
58 | * and then run the example
59 | * `$ bin/run-example org.apache.spark.examples.streaming.JavaCustomReceiver localhost 9999`
60 | */
61 |
62 | @Deprecated
63 | public class JavaCustomReceiver extends Receiver> {
64 | private static final Pattern SPACE = Pattern.compile(" ");
65 |
66 | public static void main(String[] args) throws Exception {
67 | if (args.length < 2) {
68 | System.err.println("Usage: JavaCustomReceiver ");
69 | System.exit(1);
70 | }
71 |
72 | // StreamingExamples.setStreamingLogLevels();
73 | // https://github.com/apache/spark/blob/39e2bad6a866d27c3ca594d15e574a1da3ee84cc/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
74 | boolean log4jInitialized = Logger.getRootLogger().getAllAppenders().hasMoreElements();
75 | if (!log4jInitialized) {
76 | // We first log something to initialize Spark's default logging, then we override the
77 | // logging level.
78 | /// logInfo("Setting log level to [WARN] for streaming example." +
79 | /// " To override add a custom log4j.properties to the classpath.")
80 | Logger.getRootLogger().setLevel(Level.WARN);
81 | }
82 |
83 | // Create the context with a 1 second batch size
84 | SparkConf sparkConf = new SparkConf().setAppName("JavaCustomReceiver").setMaster("local[*]").set("spark.driver.host", "localhost"); // https://issues.apache.org/jira/browse/
85 | JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(5000));
86 |
87 | // Create an input stream with the custom receiver on target ip:port and count the
88 | // words in input stream of \n delimited text (eg. generated by 'nc')
89 | final JavaReceiverInputDStream> receiverStream = ssc.receiverStream(new JavaCustomReceiver(args[0], Integer.parseInt(args[1])));
90 | PairFunction mapFunction = new PairFunction() {
91 |
92 | @Override
93 | public Tuple2 call(Object arg0) throws Exception {
94 | return (Tuple2) arg0;
95 | }
96 |
97 | };
98 | final JavaPairDStream keyValues = receiverStream.mapToPair(mapFunction);
99 |
100 | JavaPairDStream byKeys = keyValues.reduceByKey((a, b) -> a + b);
101 | byKeys.print();
102 |
103 | /* JavaDStream words = lines.flatMap(new FlatMapFunction() {
104 | @Override
105 | public Iterable call(String x) {
106 | // return Arrays.asList(SPACE.split(x)).iterator();
107 | return Arrays.asList(SPACE.split(x));
108 | }
109 | });
110 | JavaPairDStream wordCounts = words.mapToPair(
111 | new PairFunction() {
112 | @Override public Tuple2 call(String s) {
113 | return new Tuple2<>(s, 1);
114 | }
115 | }).reduceByKey(new Function2() {
116 | @Override
117 | public Integer call(Integer i1, Integer i2) {
118 | return i1 + i2;
119 | }
120 | });
121 |
122 | wordCounts.print();*/
123 | ssc.start();
124 | ssc.awaitTermination();
125 | }
126 |
127 | // ============= Receiver code that receives data over a socket ==============
128 |
129 | String host = null;
130 | int port = -1;
131 |
132 | public JavaCustomReceiver(String host_ , int port_) {
133 | super(StorageLevel.MEMORY_AND_DISK_2());
134 | host = host_;
135 | port = port_;
136 | }
137 |
138 | public void onStart() {
139 | // Start the thread that receives data over a connection
140 | new Thread() {
141 | @Override public void run() {
142 | receive();
143 | }
144 | }.start();
145 | }
146 |
147 | public void onStop() {
148 | // There is nothing much to do as the thread calling receive()
149 | // is designed to stop by itself isStopped() returns false
150 | }
151 |
152 | /** Create a socket connection and receive data until receiver is stopped */
153 | private void receive() {
154 | try {
155 | Socket socket = null;
156 | BufferedReader reader = null;
157 | String userInput = null;
158 | try {
159 | // connect to the server
160 | socket = new Socket(host, port);
161 | reader = new BufferedReader(
162 | new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
163 | // Until stopped or connection broken continue reading
164 | while (!isStopped() && (userInput = reader.readLine()) != null) {
165 | System.out.println("Received data '" + userInput + "'");
166 | // store(userInput);
167 |
168 | /** --------------------------------- **/
169 | //final ByteBuffer bytes = ByteBuffer.wrap(userInput.getBytes());
170 | //final ByteBuffer bytes = ByteBuffer.allocateDirect(userInput.length());
171 | //bytes.put(userInput.getBytes());
172 | store(new Tuple2(userInput, 1l));
173 | }
174 | } finally {
175 | Closeables.close(reader, /* swallowIOException = */ true);
176 | Closeables.close(socket, /* swallowIOException = */ true);
177 | }
178 | // Restart in an attempt to connect again when server is active again
179 | restart("Trying to connect again");
180 | } catch(ConnectException ce) {
181 | // restart if could not connect to server
182 | restart("Could not connect", ce);
183 | } catch(Throwable t) {
184 | restart("Error receiving data", t);
185 | }
186 | }
187 | }
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/nats/to_spark/AbstractNatsToSparkTest.java:
--------------------------------------------------------------------------------
1 | package com.logimethods.connector.nats.to_spark;
2 |
3 | import static org.junit.Assert.assertEquals;
4 |
5 | import java.util.concurrent.ExecutorService;
6 | import java.util.concurrent.Executors;
7 |
8 | import org.apache.log4j.Level;
9 | import org.apache.spark.SparkConf;
10 | import org.apache.spark.api.java.JavaRDD;
11 | import org.apache.spark.api.java.JavaSparkContext;
12 | import org.apache.spark.api.java.function.VoidFunction;
13 | import org.apache.spark.streaming.api.java.JavaDStream;
14 | import org.apache.spark.streaming.api.java.JavaPairDStream;
15 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
16 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
17 | import org.apache.spark.util.LongAccumulator;
18 | import org.junit.After;
19 | import org.junit.AfterClass;
20 | import org.junit.Before;
21 | import org.junit.BeforeClass;
22 | import org.slf4j.Logger;
23 | import org.slf4j.LoggerFactory;
24 |
25 | import com.logimethods.connector.nats.spark.test.NatsPublisher;
26 | import com.logimethods.connector.nats.spark.test.NatsToSparkValidator;
27 | import com.logimethods.connector.nats.spark.test.TestClient;
28 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
29 | import com.logimethods.connector.nats.to_spark.api.StandardNatsToSparkConnectorTest;
30 |
31 | public abstract class AbstractNatsToSparkTest {
32 |
33 | protected static String DEFAULT_SUBJECT_ROOT = "nats2sparkSubject";
34 | protected static int DEFAULT_SUBJECT_INR = 0;
35 | protected static String DEFAULT_SUBJECT;
36 | protected static JavaSparkContext sc;
37 | // protected static AtomicInteger TOTAL_COUNT = new AtomicInteger();
38 | protected static Logger logger = null;
39 | protected static Boolean rightNumber = true;
40 | protected static Boolean atLeastSomeData = false;
41 | protected static String payload = null;
42 |
43 | /**
44 | * @throws java.lang.Exception
45 | */
46 | @BeforeClass
47 | public static void setUpBeforeClass() throws Exception {
48 | // Enable tracing for debugging as necessary.
49 | Level level = Level.WARN;
50 | UnitTestUtilities.setLogLevel(NatsToSparkConnector.class, level);
51 | UnitTestUtilities.setLogLevel(StandardNatsToSparkConnectorTest.class, level);
52 | UnitTestUtilities.setLogLevel(TestClient.class, level);
53 | UnitTestUtilities.setLogLevel("org.apache.spark", level);
54 | UnitTestUtilities.setLogLevel("org.spark-project", level);
55 |
56 | logger = LoggerFactory.getLogger(StandardNatsToSparkConnectorTest.class);
57 |
58 | UnitTestUtilities.startDefaultServer();
59 | }
60 |
61 | /**
62 | * @throws java.lang.Exception
63 | */
64 | @AfterClass
65 | public static void tearDownAfterClass() throws Exception {
66 | UnitTestUtilities.stopDefaultServer();
67 | }
68 |
69 | /**
70 | * @throws java.lang.Exception
71 | */
72 | @Before
73 | public void setUp() throws Exception {
74 | // assertTrue(logger.isDebugEnabled());
75 | // assertTrue(LoggerFactory.getLogger(NatsToSparkConnector.class).isTraceEnabled());
76 |
77 | // To avoid "Only one StreamingContext may be started in this JVM. Currently running StreamingContext was started at .../..."
78 | Thread.sleep(500);
79 |
80 | DEFAULT_SUBJECT = DEFAULT_SUBJECT_ROOT + (DEFAULT_SUBJECT_INR++);
81 | NatsToSparkValidator.TOTAL_COUNT.set(0);
82 |
83 | rightNumber = true;
84 | atLeastSomeData = false;
85 |
86 | // https://stackoverflow.com/questions/41864985/hadoop-ioexception-failure-to-login
87 | // UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("sparkuser"));
88 |
89 | SparkConf sparkConf =
90 | UnitTestUtilities.newSparkConf()
91 | .setAppName("My Spark Job");
92 | sc = new JavaSparkContext(sparkConf);
93 | }
94 |
95 | /**
96 | * @throws java.lang.Exception
97 | */
98 | @After
99 | public void tearDown() throws Exception {
100 | if (sc != null)
101 | sc.stop();
102 | }
103 |
104 | protected void validateTheReceptionOfMessages(JavaStreamingContext ssc,
105 | JavaReceiverInputDStream stream) throws InterruptedException {
106 | JavaDStream messages = stream.repartition(3);
107 |
108 | ExecutorService executor = Executors.newFixedThreadPool(6);
109 |
110 | final int nbOfMessages = 5;
111 | NatsPublisher np = getNatsPublisher(nbOfMessages);
112 |
113 | if (logger.isDebugEnabled()) {
114 | messages.print();
115 | }
116 |
117 | messages.foreachRDD(new VoidFunction>() {
118 | private static final long serialVersionUID = 1L;
119 |
120 | @Override
121 | public void call(JavaRDD rdd) throws Exception {
122 | logger.debug("RDD received: {}", rdd.collect());
123 |
124 | final long count = rdd.count();
125 | if ((count != 0) && (count != nbOfMessages)) {
126 | rightNumber = false;
127 | logger.error("The number of messages received should have been {} instead of {}.", nbOfMessages, count);
128 | }
129 |
130 | NatsToSparkValidator.TOTAL_COUNT.getAndAdd((int) count);
131 |
132 | atLeastSomeData = atLeastSomeData || (count > 0);
133 |
134 | for (String str :rdd.collect()) {
135 | if (! str.startsWith(NatsPublisher.NATS_PAYLOAD)) {
136 | payload = str;
137 | }
138 | }
139 | }
140 | });
141 |
142 | closeTheValidation(ssc, executor, nbOfMessages, np);
143 | }
144 |
145 |
146 | protected void validateTheReceptionOfIntegerMessages(JavaStreamingContext ssc,
147 | JavaReceiverInputDStream stream) throws InterruptedException {
148 | JavaDStream messages = stream.repartition(3);
149 |
150 | ExecutorService executor = Executors.newFixedThreadPool(6);
151 |
152 | final int nbOfMessages = 5;
153 | NatsPublisher np = getNatsPublisher(nbOfMessages);
154 |
155 | // if (logger.isDebugEnabled()) {
156 | messages.print();
157 | // }
158 |
159 | /* messages.foreachRDD(new VoidFunction>() {
160 | private static final long serialVersionUID = 1L;
161 |
162 | @Override
163 | public void call(JavaRDD rdd) throws Exception {
164 | logger.debug("RDD received: {}", rdd.collect());
165 | System.out.println("RDD received: " + rdd.collect());
166 | final long count = rdd.count();
167 | if ((count != 0) && (count != nbOfMessages)) {
168 | rightNumber = false;
169 | logger.error("The number of messages received should have been {} instead of {}.", nbOfMessages, count);
170 | }
171 |
172 | NatsToSparkValidator.TOTAL_COUNT.getAndAdd((int) count);
173 |
174 | atLeastSomeData = atLeastSomeData || (count > 0);
175 |
176 | for (Integer value :rdd.collect()) {
177 | if (value < NatsPublisher.NATS_PAYLOAD_INT) {
178 | payload = value.toString();
179 | }
180 | }
181 | }
182 | });*/
183 |
184 | final LongAccumulator count = ssc.sparkContext().sc().longAccumulator();
185 | NatsToSparkValidator.validateTheReceptionOfIntegerMessages(messages, count);
186 |
187 | closeTheValidation(ssc, executor, nbOfMessages, np);
188 | assertEquals(nbOfMessages, count.sum());
189 | }
190 |
191 | protected void validateTheReceptionOfMessages(final JavaStreamingContext ssc,
192 | final JavaPairDStream messages) throws InterruptedException {
193 |
194 | ExecutorService executor = Executors.newFixedThreadPool(6);
195 |
196 | final int nbOfMessages = 5;
197 | NatsPublisher np = getNatsPublisher(nbOfMessages);
198 |
199 | if (logger.isDebugEnabled()) {
200 | messages.print();
201 | }
202 |
203 | final LongAccumulator count = ssc.sparkContext().sc().longAccumulator();
204 | NatsToSparkValidator.validateTheReceptionOfMessages(messages, count);
205 |
206 | closeTheValidation(ssc, executor, nbOfMessages, np);
207 | assertEquals(nbOfMessages, count.sum());
208 | }
209 |
210 | protected void closeTheValidation(JavaStreamingContext ssc, ExecutorService executor, final int nbOfMessages,
211 | NatsPublisher np) throws InterruptedException {
212 | ssc.start();
213 | Thread.sleep(1000);
214 | // start the publisher
215 | executor.execute(np);
216 | np.waitUntilReady();
217 | Thread.sleep(2000);
218 | ssc.close();
219 | Thread.sleep(2000);
220 | }
221 |
222 | protected abstract NatsPublisher getNatsPublisher(final int nbOfMessages);
223 | }
224 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/SparkToStandardNatsConnectorTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
11 |
12 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
13 | import static io.nats.client.Options.PROP_URL;
14 | import static org.junit.Assert.fail;
15 |
16 | import java.util.Arrays;
17 | import java.util.List;
18 | import java.util.Properties;
19 |
20 | import org.apache.log4j.Level;
21 | import org.apache.spark.SparkConf;
22 | import org.apache.spark.api.java.JavaRDD;
23 | import org.apache.spark.api.java.JavaSparkContext;
24 | import org.apache.spark.api.java.function.Function;
25 | import org.apache.spark.api.java.function.VoidFunction;
26 | import org.junit.After;
27 | import org.junit.AfterClass;
28 | import org.junit.Before;
29 | import org.junit.BeforeClass;
30 | //import org.junit.Ignore;
31 | import org.junit.Test;
32 | import org.slf4j.Logger;
33 | import org.slf4j.LoggerFactory;
34 |
35 | import com.logimethods.connector.nats.spark.test.SparkToNatsValidator;
36 | import com.logimethods.connector.nats.spark.test.StandardNatsSubscriber;
37 | import com.logimethods.connector.nats.spark.test.TestClient;
38 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
39 | import com.logimethods.connector.spark.to_nats.SparkToNatsConnector;
40 | import com.logimethods.connector.spark.to_nats.SparkToStandardNatsConnectorImpl;
41 |
42 | import scala.Tuple2;
43 |
44 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.*;
45 |
46 | //@Ignore
47 | public class SparkToStandardNatsConnectorTest {
48 |
49 | protected static final String DEFAULT_SUBJECT = "spark2natsSubject";
50 | protected static JavaSparkContext sc;
51 | static Logger logger = null;
52 |
53 | /**
54 | * @throws java.lang.Exception
55 | */
56 | @BeforeClass
57 | public static void setUpBeforeClass() throws Exception {
58 | // Enable tracing for debugging as necessary.
59 | Level level = Level.WARN;
60 | UnitTestUtilities.setLogLevel(SparkToNatsConnector.class, level);
61 | UnitTestUtilities.setLogLevel(SparkToStandardNatsConnectorImpl.class, level);
62 | UnitTestUtilities.setLogLevel(SparkToStandardNatsConnectorTest.class, level);
63 | UnitTestUtilities.setLogLevel(TestClient.class, level);
64 | UnitTestUtilities.setLogLevel("org.apache.spark", level);
65 | UnitTestUtilities.setLogLevel("org.spark-project", level);
66 |
67 | logger = LoggerFactory.getLogger(SparkToStandardNatsConnectorTest.class);
68 |
69 | SparkConf sparkConf =
70 | UnitTestUtilities.newSparkConf()
71 | .setAppName("SparkToStandardNatsConnector");
72 | sc = new JavaSparkContext(sparkConf);
73 |
74 | UnitTestUtilities.startDefaultServer();
75 | }
76 |
77 | /**
78 | * @throws java.lang.Exception
79 | */
80 | @AfterClass
81 | public static void tearDownAfterClass() throws Exception {
82 | UnitTestUtilities.stopDefaultServer();
83 | sc.stop();
84 | }
85 |
86 | /**
87 | * @throws java.lang.Exception
88 | */
89 | @Before
90 | public void setUp() throws Exception {
91 | }
92 |
93 | /**
94 | * @throws java.lang.Exception
95 | */
96 | @After
97 | public void tearDown() throws Exception {
98 | }
99 |
100 | /**
101 | * @param data
102 | * @return
103 | */
104 | /* protected StandardNatsSubscriber getStandardNatsSubscriber(final List data, String subject) {
105 | ExecutorService executor = Executors.newFixedThreadPool(1);
106 |
107 | StandardNatsSubscriber ns1 = new StandardNatsSubscriber(NATS_SERVER_URL, subject + "_id", subject, data.size());
108 |
109 | // start the subscribers apps
110 | executor.execute(ns1);
111 |
112 | // wait for subscribers to be ready.
113 | ns1.waitUntilReady();
114 | return ns1;
115 | }*/
116 |
117 | @Test(timeout=360000)
118 | public void testStaticSparkToNatsNoSubjects() throws Exception {
119 | JavaRDD rdd = UnitTestUtilities.getJavaRDD(sc);
120 |
121 | try {
122 | SparkToNatsConnector.newConnection().withNatsURL(NATS_URL).publishToNats(rdd);
123 | } catch (Exception e) {
124 | if (e.getMessage().contains("needs at least one NATS Subject"))
125 | return;
126 | else
127 | throw e;
128 | }
129 |
130 | fail("An Exception(\"SparkToNatsConnector needs at least one Subject\") should have been raised.");
131 | }
132 |
133 | @Test(timeout=360000)
134 | public void testStaticKeyValueSparkToNatsNoSubjects() throws Exception {
135 | String subject1 = "subject1";
136 |
137 | JavaRDD> stream = getKeyValueStream(subject1);
138 | SparkToNatsConnector.newConnection().withNatsURL(NATS_URL).publishToNatsAsKeyValue(stream);
139 | }
140 |
141 | protected JavaRDD> getKeyValueStream(String subject1) {
142 | final List data = UnitTestUtilities.getData();
143 | JavaRDD rdd = sc.parallelize(data);
144 |
145 | JavaRDD> stream =
146 | SparkToNatsValidator.newSubjectStringTuple(subject1, rdd);
147 | return stream;
148 | }
149 |
150 | @Test(timeout=360000)
151 | public void testStaticSparkToNatsWithMultipleSubjects() throws Exception {
152 | final List data = UnitTestUtilities.getData();
153 |
154 | String subject1 = "subject1";
155 | StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, subject1, NATS_LOCALHOST_URL);
156 |
157 | String subject2 = "subject2";
158 | StandardNatsSubscriber ns2 = UnitTestUtilities.getStandardNatsSubscriber(data, subject2, NATS_LOCALHOST_URL);
159 |
160 | JavaRDD rdd = sc.parallelize(data);
161 |
162 | SparkToNatsConnector
163 | .newConnection()
164 | .withNatsURL(NATS_URL)
165 | .withSubjects(DEFAULT_SUBJECT, subject1, subject2)
166 | .publishToNats(rdd);
167 |
168 | // wait for the subscribers to complete.
169 | ns1.waitForCompletion();
170 | ns2.waitForCompletion();
171 | }
172 |
173 | @Test(timeout=360000)
174 | public void testStaticKeyValueSparkToNatsWithMultipleSubjects() throws Exception {
175 | final List data = UnitTestUtilities.getData();
176 |
177 | final String rootSubject = "ROOT";
178 |
179 | String subject1 = "subject1";
180 | StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, rootSubject + "." + subject1 + ".>", NATS_LOCALHOST_URL);
181 |
182 | String subject2 = "subject2";
183 | StandardNatsSubscriber ns2 = UnitTestUtilities.getStandardNatsSubscriber(data, rootSubject + "." + subject2 + ".>", NATS_LOCALHOST_URL);
184 |
185 | JavaRDD rdd = sc.parallelize(data);
186 | JavaRDD> stream1 =
187 | SparkToNatsValidator.newSubjectDotStringTuple(subject1, rdd);
188 | JavaRDD> stream2 =
189 | SparkToNatsValidator.newSubjectDotStringTuple(subject2, rdd);
190 | JavaRDD> stream = stream1.union(stream2);
191 |
192 | SparkToNatsConnector
193 | .newConnection()
194 | .withNatsURL(NATS_URL)
195 | .withSubjects(rootSubject + ".")
196 | .publishToNatsAsKeyValue(stream);
197 |
198 | // wait for the subscribers to complete.
199 | ns1.waitForCompletion();
200 | ns2.waitForCompletion();
201 | }
202 |
203 | @Test(timeout=360000)
204 | public void testStaticSparkToNatsWithProperties() throws Exception {
205 | final List data = UnitTestUtilities.getData();
206 |
207 | StandardNatsSubscriber ns1 = UnitTestUtilities.getStandardNatsSubscriber(data, DEFAULT_SUBJECT, NATS_LOCALHOST_URL);
208 |
209 | JavaRDD rdd = sc.parallelize(data);
210 |
211 | final Properties properties = new Properties();
212 | properties.setProperty(PROP_URL, NATS_URL);
213 | properties.setProperty(PROP_SUBJECTS, "sub1,"+DEFAULT_SUBJECT+" , sub2");
214 |
215 | // rdd.foreach(SparkToNatsConnector.newConnection().withProperties(properties).publishToNats());
216 | SparkToNatsConnector.newConnection().withProperties(properties).publishToNats(rdd);
217 |
218 | // wait for the subscribers to complete.
219 | ns1.waitForCompletion();
220 | }
221 | }
222 |
--------------------------------------------------------------------------------
/src/test/java/com/logimethods/connector/spark/to_nats/SparkToNatsStreamingConnectorPoolTest.java:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | * Copyright (c) 2016 Logimethods
3 | * All rights reserved. This program and the accompanying materials
4 | * are made available under the terms of the MIT License (MIT)
5 | * which accompanies this distribution, and is available at
6 | * http://opensource.org/licenses/MIT
7 | *******************************************************************************/
8 | package com.logimethods.connector.spark.to_nats;
9 |
10 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_STREAMING_LOCALHOST_URL;
11 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.NATS_STREAMING_URL;
12 | import static com.logimethods.connector.nats.spark.test.UnitTestUtilities.startStreamingServer;
13 | import static com.logimethods.connector.nats_spark.Constants.PROP_SUBJECTS;
14 | import static io.nats.client.Options.PROP_URL;
15 | import static org.junit.Assert.fail;
16 |
17 | import java.io.IOException;
18 | import java.util.List;
19 | import java.util.Properties;
20 | import java.util.concurrent.TimeoutException;
21 |
22 | import org.apache.log4j.Level;
23 | import org.apache.spark.streaming.api.java.JavaDStream;
24 | import org.junit.BeforeClass;
25 | import org.junit.Test;
26 | import org.slf4j.LoggerFactory;
27 |
28 | import com.logimethods.connector.nats.spark.test.NatsStreamingSubscriber;
29 | import com.logimethods.connector.nats.spark.test.STANServer;
30 | import com.logimethods.connector.nats.spark.test.SparkToNatsValidator;
31 | import com.logimethods.connector.nats.spark.test.TestClient;
32 | import com.logimethods.connector.nats.spark.test.UnitTestUtilities;
33 | import com.logimethods.connector.nats.to_spark.NatsToSparkConnector;
34 | import com.logimethods.connector.nats_spark.IncompleteException;
35 | import com.logimethods.connector.nats_spark.NatsSparkUtilities;
36 |
37 | import io.nats.streaming.NatsStreaming;
38 | import io.nats.streaming.Options;
39 | import io.nats.streaming.StreamingConnection;
40 |
41 | // Call first $ nats-streaming-server -m 8222 -p 4223
42 | public class SparkToNatsStreamingConnectorPoolTest extends AbstractSparkToNatsConnectorTest {
43 |
44 | /**
45 | *
46 | */
47 | private static final long serialVersionUID = 1L;
48 | static final String clusterID = "test-cluster"; //"my_test_cluster";
49 |
50 | /**
51 | * @throws java.lang.Exception
52 | */
53 | @BeforeClass
54 | public static void setUpBeforeClass() throws Exception {
55 | // Enable tracing for debugging as necessary.
56 | Level level = Level.DEBUG;
57 | UnitTestUtilities.setLogLevel(SparkToNatsConnectorPool.class, level);
58 | UnitTestUtilities.setLogLevel(NatsToSparkConnector.class, level);
59 | UnitTestUtilities.setLogLevel(SparkToNatsStreamingConnectorPoolTest.class, level);
60 | UnitTestUtilities.setLogLevel(SparkToNatsStreamingConnectorImpl.class, level);
61 | UnitTestUtilities.setLogLevel(SparkToNatsConnector.class, level);
62 | UnitTestUtilities.setLogLevel(TestClient.class, level);
63 | UnitTestUtilities.setLogLevel("org.apache.spark", Level.WARN);
64 | UnitTestUtilities.setLogLevel("org.spark-project", Level.WARN);
65 |
66 | logger = LoggerFactory.getLogger(SparkToNatsStreamingConnectorPoolTest.class);
67 | }
68 |
69 | // @Test(timeout=360000)
70 | public void testBasicPublish() {
71 | // Run a STAN server
72 | try (STANServer s = UnitTestUtilities.startStreamingServer(clusterID, false)) {
73 | Options options = new Options.Builder().natsUrl(NATS_STREAMING_LOCALHOST_URL).build();
74 | try ( StreamingConnection sc =
75 | NatsStreaming.connect(clusterID, getUniqueClientName(), options)) {
76 | sc.publish("foo", "Hello World!".getBytes());
77 | } catch (IOException | TimeoutException | InterruptedException e) {
78 | System.err.println(options);
79 | fail(e.getMessage());
80 | }
81 | }
82 | }
83 |
84 | @Test(timeout=360000)
85 | public void testStreamingSparkToNatsPublish() throws InterruptedException, IOException, TimeoutException {
86 | String subject1 = "subject1";
87 | String subject2 = "subject2";
88 | final SparkToNatsConnectorPool> connectorPool =
89 | SparkToNatsConnectorPool.newStreamingPool(clusterID).withSubjects(DEFAULT_SUBJECT, subject1, subject2).withNatsURL(NATS_STREAMING_URL);
90 |
91 | validateConnectorPool(subject1, subject2, connectorPool);
92 | }
93 |
94 | @Test(expected=IncompleteException.class)
95 | public void testEmptyStreamingSparkToNatsPublish() throws Exception {
96 | final SparkToNatsConnectorPool> connectorPool = SparkToNatsConnectorPool.newStreamingPool(clusterID);
97 | connectorPool.getConnector();
98 | }
99 |
100 | @Test(expected=IncompleteException.class)
101 | public void testEmptyStreamingSparkToNatsWithEmptyPropertiesPublish() throws Exception {
102 | final Properties properties = new Properties();
103 | final SparkToNatsConnectorPool> connectorPool = SparkToNatsConnectorPool.newStreamingPool(clusterID).withProperties(properties);
104 | connectorPool.getConnector();
105 | }
106 |
107 | @Test(timeout=360000)
108 | public void testStreamingSparkToNatsWithPROP_URLPropertiesPublish() throws InterruptedException, IOException, TimeoutException {
109 | String subject1 = "subject1";
110 | String subject2 = "subject2";
111 | final Properties properties = new Properties();
112 | properties.setProperty(PROP_URL, NATS_STREAMING_URL);
113 | final SparkToNatsConnectorPool> connectorPool =
114 | SparkToNatsConnectorPool.newStreamingPool(clusterID).withProperties(properties).withSubjects(DEFAULT_SUBJECT, subject1, subject2);
115 |
116 | validateConnectorPool(subject1, subject2, connectorPool);
117 | }
118 |
119 | @Test(timeout=360000)
120 | public void testStreamingSparkToNatsWithFullPropertiesPublish() throws InterruptedException, IOException, TimeoutException {
121 | String subject1 = "subject1";
122 | String subject2 = "subject2";
123 | final Properties properties = new Properties();
124 | properties.setProperty(PROP_URL, NATS_STREAMING_URL);
125 | properties.setProperty(PROP_SUBJECTS, subject1 + ","+DEFAULT_SUBJECT+" , "+subject2);
126 | final SparkToNatsConnectorPool> connectorPool =
127 | SparkToNatsConnectorPool.newStreamingPool(clusterID).withProperties(properties);
128 |
129 | validateConnectorPool(subject1, subject2, connectorPool);
130 | }
131 |
132 | /**
133 | * @param subject1
134 | * @param subject2
135 | * @param connectorPool
136 | * @throws InterruptedException
137 | * @throws TimeoutException
138 | * @throws IOException
139 | */
140 | protected void validateConnectorPool(String subject1, String subject2,
141 | final SparkToNatsConnectorPool> connectorPool) throws InterruptedException, IOException, TimeoutException {
142 |
143 | // Run a STAN server
144 | startStreamingServer(clusterID, false);
145 | // ConnectionFactory connectionFactory = new ConnectionFactory(clusterID, getUniqueClientName());
146 | // connectionFactory.setNatsUrl("nats://localhost:" + STANServerPORT);
147 | // Connection stanc = Nats.connect();
148 | // logger.debug("ConnectionFactory ready: " + stanc);
149 | final List data = UnitTestUtilities.getData();
150 |
151 | NatsStreamingSubscriber ns1 = UnitTestUtilities.getNatsStreamingSubscriber(data, subject1, clusterID, getUniqueClientName() + "_SUB1", NATS_STREAMING_LOCALHOST_URL);
152 | logger.debug("ns1 NatsStreamingSubscriber ready");
153 |
154 | NatsStreamingSubscriber ns2 = UnitTestUtilities.getNatsStreamingSubscriber(data, subject2, clusterID, getUniqueClientName() + "_SUB2", NATS_STREAMING_LOCALHOST_URL);
155 | logger.debug("ns2 NatsStreamingSubscriber ready");
156 |
157 | //- JavaDStream integers = SparkToNatsStreamingValidator.generateIntegers(ssc, ssc.textFileStream(tempDir.getAbsolutePath()));
158 | JavaDStream integers = SparkToNatsValidator.generateIntegers(dataSource.dataStream(ssc));
159 | integers.print();
160 | connectorPool.publishToNats(integers);
161 |
162 | ssc.start();
163 |
164 | Thread.sleep(1000);
165 |
166 | /*File tmpFile = new File(tempDir.getAbsolutePath(), "tmp.txt");
167 | PrintWriter writer = new PrintWriter(tmpFile, "UTF-8");
168 | for(Integer str: data) {
169 | writer.println(str);
170 | }
171 | writer.close();*/
172 | // writeTmpFile(data);
173 | dataSource.open();
174 | dataSource.write(data);
175 |
176 | // wait for the subscribers to complete.
177 | ns1.waitForCompletion();
178 | ns2.waitForCompletion();
179 |
180 | dataSource.close();
181 | }
182 |
183 | static String getUniqueClientName() {
184 | return "clientName_" + NatsSparkUtilities.generateUniqueID();
185 | }
186 | }
187 |
--------------------------------------------------------------------------------