producer = new Producer<>(new ProducerConfig(properties));
32 | IoTDataProducer iotProducer = new IoTDataProducer(producer);
33 | iotProducer.generateIoTEvent(properties.getProperty("kafka.topic"));
34 | }
35 |
36 | /**
37 | * Method runs in while loop and generates random IoT data in JSON with below format.
38 | *
39 | * {"vehicleId":"52f08f03-cd14-411a-8aef-ba87c9a99997","vehicleType":"Public Transport","routeId":"route-43","latitude":",-85.583435","longitude":"38.892395","timestamp":1465471124373,"speed":80.0,"fuelLevel":28.0}
40 | *
41 | * @throws InterruptedException
42 | */
43 | private void generateIoTEvent(String topic) throws InterruptedException {
44 | List routeList = Arrays.asList(
45 | new String[]{"Route-37", "Route-43", "Route-82"}
46 | );
47 | List vehicleTypeList = Arrays.asList(
48 | new String[]{"Large Truck", "Small Truck", "Private Car", "Bus", "Taxi"}
49 | );
50 | Random rand = new Random();
51 | logger.info("Sending events");
52 |
53 | while (true) {
54 | List events = generateVehicleWithPositions(routeList, vehicleTypeList, rand);
55 | for (IoTData event : events) {
56 | producer.send(new KeyedMessage<>(topic, event));
57 | }
58 | Thread.sleep(rand.nextInt(5000 - 2000) + 2000);//random delay of 2 to 5 seconds
59 | }
60 | }
61 |
62 | private List generateVehicleWithPositions(
63 | final List routeList,
64 | final List vehicleTypeList,
65 | final Random rand
66 | ) {
67 | List eventList = new ArrayList<>();
68 | String vehicleId = UUID.randomUUID().toString();
69 | String vehicleType = vehicleTypeList.get(rand.nextInt(5));
70 | String routeId = routeList.get(rand.nextInt(3));
71 | Date timestamp = new Date();
72 | double speed = rand.nextInt(80) + 20;// random speed between 20 to 100
73 | double fuelLevel = rand.nextInt(30) + 10;
74 | float []coords = getCoordinates();
75 | for (int i = 0; i < 5; i++) {// Add 5 events for each vehicle (Moving)
76 | coords[0] = coords[0] + (float)0.0001;
77 | coords[1] = coords[1] + (float)0.0001;
78 | IoTData event = new IoTData(
79 | vehicleId,
80 | vehicleType,
81 | routeId,
82 | String.format("%s", coords[0]),
83 | String.format("%s", coords[1]),
84 | timestamp,
85 | speed,
86 | fuelLevel
87 | );
88 | eventList.add(event);
89 | }
90 | return eventList;
91 | }
92 |
93 |
94 | /**
95 | * Method to generate random latitude and longitude for routes
96 | * @return
97 | */
98 | private float[] getCoordinates() {
99 | Random rand = new Random();
100 | int latPrefix = rand.nextInt(3) + 52;
101 | int longPrefix = rand.nextInt(3) + 7;
102 | float latitude = latPrefix + rand.nextFloat();
103 | float longitude = longPrefix + rand.nextFloat();
104 | longitude = longitude * -1;
105 | return new float[]{latitude, longitude};
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/iot-kafka-producer/src/main/java/com/apssouza/kafka/PropertyFileReader.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.kafka;
2 |
3 | import java.io.IOException;
4 | import java.io.InputStream;
5 | import java.util.Properties;
6 |
7 | import org.apache.log4j.Logger;
8 |
9 | /**
10 | * Utility class to read property file
11 | *
12 | * @author abaghel
13 | *
14 | */
15 | public class PropertyFileReader {
16 | private static final Logger logger = Logger.getLogger(PropertyFileReader.class);
17 | private static Properties prop = new Properties();
18 | public static Properties readPropertyFile() throws Exception {
19 | if (prop.isEmpty()) {
20 | InputStream input = PropertyFileReader.class.getClassLoader().getResourceAsStream("iot-kafka.properties");
21 | try {
22 | prop.load(input);
23 | } catch (IOException ex) {
24 | logger.error(ex);
25 | throw ex;
26 | } finally {
27 | if (input != null) {
28 | input.close();
29 | }
30 | }
31 | }
32 | return prop;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/iot-kafka-producer/src/main/resources/iot-kafka.properties:
--------------------------------------------------------------------------------
1 | # Kafka properties
2 | #### if running from the host the kafka port will be 29092, when running from docker it will be 9092
3 | zookeeper.connect=localhost:2181
4 | metadata.broker.list=localhost:29092
5 | request.required.acks=1
6 | serializer.class=com.apssouza.kafka.IoTDataEncoder
7 | kafka.topic=iot-data-event
8 |
--------------------------------------------------------------------------------
/iot-kafka-producer/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, file, stdout
3 |
4 | # Direct log messages to a log file
5 | log4j.appender.file=org.apache.log4j.RollingFileAppender
6 | log4j.appender.file.File=/tmp/iot-kafka.log
7 | log4j.appender.file.MaxFileSize=10MB
8 | log4j.appender.file.MaxBackupIndex=10
9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
11 |
12 | # Direct log messages to stdout
13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
14 | log4j.appender.stdout.Target=System.out
15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
--------------------------------------------------------------------------------
/iot-spark-processor/pom.xml:
--------------------------------------------------------------------------------
1 |
3 | 4.0.0
4 |
5 | com.apssouza.iot
6 | iot-spark-processor
7 | 1.0.0
8 | IoT Spark Processor
9 |
10 |
11 | 3.4.0
12 | UTF-8
13 | UTF-8
14 |
15 |
16 |
17 |
18 |
19 | org.apache.spark
20 | spark-core_2.12
21 | ${spark-version}
22 |
23 |
24 | org.apache.spark
25 | spark-streaming_2.12
26 | ${spark-version}
27 |
28 |
29 | org.apache.spark
30 | spark-sql_2.12
31 | ${spark-version}
32 |
33 |
34 | org.apache.spark
35 | spark-mllib_2.12
36 | ${spark-version}
37 |
38 |
39 | org.apache.spark
40 | spark-streaming-kafka-0-10_2.12
41 | ${spark-version}
42 |
43 |
44 |
45 |
46 | com.datastax.spark
47 | spark-cassandra-connector_2.12
48 | 3.0.0
49 |
50 |
51 | joda-time
52 | joda-time
53 | 2.10.9
54 |
55 |
56 |
57 | log4j
58 | log4j
59 | 1.2.17
60 |
61 |
62 |
63 |
64 |
65 |
66 | ${basedir}/src/main/resources
67 |
68 |
69 |
70 |
71 | org.apache.maven.plugins
72 | maven-compiler-plugin
73 | 3.1
74 |
75 | 11
76 | 11
77 |
78 |
79 |
80 | org.apache.maven.plugins
81 | maven-shade-plugin
82 | 2.4.3
83 |
84 |
85 | package
86 |
87 | shade
88 |
89 |
90 |
91 |
92 | *:*
93 |
94 | META-INF/*.SF
95 | META-INF/*.DSA
96 | META-INF/*.RSA
97 |
98 |
99 |
100 |
101 |
103 | reference.conf
104 |
105 |
107 | com.apssouza.iot.streaming.StreamingProcessor
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/StreamTesting.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot;
2 |
3 | import org.apache.spark.SparkConf;
4 | import org.apache.spark.streaming.Durations;
5 | import org.apache.spark.streaming.api.java.JavaDStream;
6 | import org.apache.spark.streaming.api.java.JavaPairDStream;
7 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
8 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
9 |
10 | import java.util.Arrays;
11 |
12 | import scala.Tuple2;
13 |
14 | public class StreamTesting {
15 |
16 | public static final void main(String[] s) throws InterruptedException {
17 | SparkConf conf = new SparkConf()
18 | .setAppName(StreamTesting.class.getName())
19 | .setMaster("local[*]")
20 | .set("spark.driver.bindAddress", "127.0.0.1");
21 | JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(30));
22 |
23 | JavaReceiverInputDStream lines = jssc.socketTextStream("localhost", 9999);
24 | lines.print();
25 |
26 | JavaDStream length = lines.map(x -> x.length());
27 | length.print();
28 |
29 | JavaDStream words = lines.flatMap(x -> Arrays.asList(x.split(" ")).iterator());
30 | words.print();
31 |
32 | JavaDStream reduce = words.reduce((a, b) -> a.length() >= b.length() ? a : b);
33 | reduce.print();
34 |
35 | JavaDStream filter = words.filter(x -> !x.equals("hello"));
36 | filter.print();
37 |
38 | JavaPairDStream pairs = words.mapToPair(x -> new Tuple2<>(x, 1));
39 | pairs.print();
40 |
41 | JavaPairDStream sum = pairs.reduceByKey((a,b) -> a + b);
42 | sum.print();
43 |
44 | JavaPairDStream countByValue = words.countByValue();
45 | countByValue.print();
46 |
47 | JavaPairDStream swap = countByValue.mapToPair(x -> x.swap());
48 | swap.print();
49 |
50 | JavaPairDStream sort = swap.transformToPair(x -> x.sortByKey(false));
51 | sort.print();
52 |
53 | jssc.start();
54 | jssc.awaitTermination();
55 | jssc.stop();
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/batch/BatchHeatMapProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.batch;
2 |
3 | import com.apssouza.iot.common.dto.Measurement;
4 | import com.apssouza.iot.common.entity.HeatMapData;
5 | import com.apssouza.iot.common.TimestampComparator;
6 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
7 | import com.apssouza.iot.common.dto.Coordinate;
8 | import com.apssouza.iot.common.dto.IoTData;
9 |
10 | import org.apache.spark.api.java.JavaPairRDD;
11 | import org.apache.spark.api.java.JavaRDD;
12 |
13 | import scala.Tuple2;
14 |
15 | import java.io.IOException;
16 | import java.util.Calendar;
17 | import java.util.Date;
18 | import java.util.HashMap;
19 | import java.util.Map;
20 | import java.util.concurrent.TimeUnit;
21 |
22 |
23 | /**
24 | * Process the batch heat map calculation
25 | *
26 | * @author apssouza22
27 | */
28 | public class BatchHeatMapProcessor {
29 |
30 | public static void processHeatMap(JavaRDD dataFrame) throws IOException {
31 | JavaRDD measurements = transformToMeasurements(dataFrame);
32 | JavaRDD roundedCoordinates = roundCoordinates(measurements);
33 | Date minTimestamp = measurements.min(new TimestampComparator()).getTimestamp();
34 | Date maxTimestamp = measurements.max(new TimestampComparator()).getTimestamp();
35 | long diffInMillies = Math.abs(minTimestamp.getTime() - maxTimestamp.getTime());
36 | long diffInDays = TimeUnit.DAYS.convert(diffInMillies, TimeUnit.MILLISECONDS);
37 |
38 | Calendar c = Calendar.getInstance();
39 | c.setTime(minTimestamp);
40 | c.set(Calendar.HOUR_OF_DAY, 0);
41 | c.set(Calendar.MINUTE, 0);
42 | Date start = c.getTime();
43 |
44 | for (int i = 0; i < diffInDays; i++) {
45 | c.setTime(start);
46 | c.add(Calendar.DATE, 1);
47 | Date end = c.getTime();
48 | processInterval(roundedCoordinates, start, end);
49 | start = end;
50 | }
51 | }
52 |
53 | private static void processInterval(
54 | JavaRDD roundedCoordinates,
55 | Date start,
56 | Date end
57 | ) {
58 | JavaRDD measurementsFilteredByTime = filterByTime(roundedCoordinates, start, end);
59 | JavaPairRDD counts = countPerGridBox(measurementsFilteredByTime);
60 | JavaRDD countInArea = getCountInArea(counts, start);
61 | save(countInArea);
62 | }
63 |
64 | private static void save(JavaRDD mapDataJavaRDD) {
65 | Map columnNameMappings = new HashMap<>();
66 | columnNameMappings.put("latitude", "latitude");
67 | columnNameMappings.put("longitude", "longitude");
68 | columnNameMappings.put("totalCount", "totalcount");
69 | columnNameMappings.put("timeStamp", "timestamp");
70 |
71 | CassandraJavaUtil.javaFunctions(mapDataJavaRDD).writerBuilder(
72 | "traffickeyspace",
73 | "heat_map_batch",
74 | CassandraJavaUtil.mapToRow(HeatMapData.class, columnNameMappings)
75 | ).saveToCassandra();
76 | }
77 |
78 |
79 | private static JavaRDD getCountInArea(JavaPairRDD tuples, Date day) {
80 | return tuples.map(tuple -> {
81 | Coordinate coordinate = tuple._1();
82 | Integer count = tuple._2();
83 | return new HeatMapData(coordinate.getLatitude(), coordinate.getLongitude(), count, day);
84 | });
85 | }
86 |
87 |
88 | /**
89 | * Converts each row from the iotData to a Measurement
90 | *
91 | * @param iotData | Spark SQL context
92 | * @return A set containing all data from the CSV file as Measurements
93 | */
94 | private static JavaRDD transformToMeasurements(JavaRDD iotData) {
95 | return iotData.map(row -> {
96 | Coordinate coordinate = new Coordinate(
97 | Double.valueOf(row.getLatitude()),
98 | Double.valueOf(row.getLongitude())
99 | );
100 | return new Measurement(coordinate, row.getTimestamp());
101 | });
102 | }
103 |
104 |
105 | /**
106 | * Maps the measurements by rounding the coordinate. The world is defined by a grid of boxes, each box has a size of 0.0005 by 0.0005. Every mapping will be rounded to the center of the box it is
107 | * part of. Boundary cases will be rounded up, so a coordinate on (-0.00025,0) will be rounded to (0,0), while the coordinate (0.00025,0) will be rounded to (0.0005,0).
108 | *
109 | * @param measurements | The dataset of measurements
110 | * @return A set of measurements with rounded coordinates
111 | */
112 | private static JavaRDD roundCoordinates(JavaRDD measurements) {
113 | return measurements.map(measurement -> {
114 | double roundedLatitude = (double) (5 * Math.round((measurement.getCoordinate().getLatitude() * 10000) / 5)) / 10000;
115 | double roundedLongitude = (double) (5 * Math.round((measurement.getCoordinate().getLongitude() * 10000) / 5)) / 10000;
116 | Coordinate roundedCoordinate = new Coordinate(roundedLatitude, roundedLongitude);
117 | measurement.setRoundedCoordinate(roundedCoordinate);
118 | return measurement;
119 | }
120 | );
121 | }
122 |
123 | /**
124 | * Filter the measurements in a given time period
125 | *
126 | * @param measurements | The dataset of measurements
127 | * @param start | Start of the time period
128 | * @param end | End of the time period
129 | * @return A set of measurements in the given time period
130 | */
131 | private static JavaRDD filterByTime(JavaRDD measurements, Date start, Date end) {
132 | return measurements.filter(
133 | measurement -> measurement.getTimestamp().after(start)
134 | && measurement.getTimestamp().before(end)
135 | );
136 | }
137 |
138 | /**
139 | * Reduces the dataset by counting the number of measurements for a specific grid box (rounded coordinate)
140 | *
141 | * @param measurements | The dataset of measurements
142 | * @return A set of tuples linking rounded coordinates to their number of occurrences
143 | */
144 | private static JavaPairRDD countPerGridBox(JavaRDD measurements) {
145 | return measurements.mapToPair(
146 | measurement -> new Tuple2<>(measurement.getRoundedCoordinate(), 1)
147 | ).reduceByKey((a, b) -> a + b);
148 | }
149 |
150 | }
151 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/batch/BatchProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.batch;
2 |
3 | import com.apssouza.iot.common.dto.IoTData;
4 | import com.apssouza.iot.common.dto.POIData;
5 | import com.apssouza.iot.common.ProcessorUtils;
6 | import com.apssouza.iot.common.PropertyFileReader;
7 | import com.datastax.spark.connector.util.JavaApiHelper;
8 |
9 | import org.apache.spark.SparkConf;
10 | import org.apache.spark.sql.Dataset;
11 | import org.apache.spark.sql.Row;
12 | import org.apache.spark.sql.SparkSession;
13 |
14 | import java.util.Properties;
15 |
16 | /**
17 | * Class responsible to start the process from the parque file
18 | */
19 | public class BatchProcessor {
20 |
21 |
22 | public static void main(String[] args) throws Exception {
23 | // String file = "iot-spark-local.properties";
24 | String fileProp = "iot-spark.properties";
25 | Properties prop = PropertyFileReader.readPropertyFile(fileProp);
26 | prop.setProperty("com.iot.app.spark.app.name", "Iot Batch Processor");
27 |
28 | var file = prop.getProperty("com.iot.app.hdfs") + "iot-data-parque";
29 | String[] jars = {prop.getProperty("com.iot.app.jar")};
30 | var conf = ProcessorUtils.getSparkConf(prop, "batch-processor");
31 | conf.setJars(jars);
32 | var sparkSession = SparkSession.builder().config(conf).getOrCreate();
33 | //broadcast variables. We will monitor vehicles on Route 37 which are of type Truck
34 | //Basically we are sending the data to each worker nodes on a Spark cluster.
35 | var classTag = JavaApiHelper.getClassTag(POIData.class);
36 | var broadcastPOIValues = sparkSession
37 | .sparkContext()
38 | .broadcast(getPointOfInterest(), classTag);
39 |
40 | var dataFrame = getDataFrame(sparkSession, file);
41 | var rdd = dataFrame.javaRDD().map(BatchProcessor::transformToIotData);
42 | BatchHeatMapProcessor.processHeatMap(rdd);
43 | BatchTrafficDataProcessor.processPOIData(rdd, broadcastPOIValues);
44 | BatchTrafficDataProcessor.processTotalTrafficData(rdd);
45 | BatchTrafficDataProcessor.processWindowTrafficData(rdd);
46 | sparkSession.close();
47 | sparkSession.stop();
48 | }
49 |
50 | private static POIData getPointOfInterest() {
51 | //poi data
52 | POIData poiData = new POIData();
53 | poiData.setLatitude(33.877495);
54 | poiData.setLongitude(-95.50238);
55 | poiData.setRadius(30);//30 km
56 | poiData.setRoute("Route-37");
57 | poiData.setVehicle("Truck");
58 | return poiData;
59 | }
60 |
61 | private static IoTData transformToIotData(Row row) {
62 | return new IoTData(
63 | row.getString(6),
64 | row.getString(7),
65 | row.getString(3),
66 | row.getString(1),
67 | row.getString(2),
68 | row.getDate(5),
69 | row.getDouble(4),
70 | row.getDouble(0)
71 | );
72 | }
73 |
74 |
75 | public static Dataset getDataFrame(SparkSession sqlContext, String file) {
76 | return sqlContext.read()
77 | .parquet(file);
78 | }
79 |
80 |
81 | private static SparkConf getSparkConfig(Properties prop, String[] jars) {
82 | return new SparkConf()
83 | .setAppName(prop.getProperty("com.iot.app.spark.app.name"))
84 | .setMaster(prop.getProperty("com.iot.app.spark.master"))
85 | .set("spark.cassandra.connection.host", prop.getProperty("com.iot.app.cassandra.host"))
86 | .set("spark.cassandra.connection.port", prop.getProperty("com.iot.app.cassandra.port"))
87 | .set("spark.cassandra.auth.username", prop.getProperty("com.iot.app.cassandra.username"))
88 | .set("spark.cassandra.auth.password", prop.getProperty("com.iot.app.cassandra.password"))
89 | .set("spark.cassandra.connection.keep_alive_ms", prop.getProperty("com.iot.app.cassandra.keep_alive"));
90 | }
91 |
92 | }
93 |
94 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/batch/BatchTrafficDataProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.batch;
2 |
3 | import com.apssouza.iot.common.dto.AggregateKey;
4 | import com.apssouza.iot.common.dto.POIData;
5 | import com.apssouza.iot.common.entity.WindowTrafficData;
6 | import com.apssouza.iot.common.IotDataTimestampComparator;
7 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
8 | import com.apssouza.iot.common.entity.POITrafficData;
9 | import com.apssouza.iot.common.entity.TotalTrafficData;
10 | import com.apssouza.iot.common.GeoDistanceCalculator;
11 | import com.apssouza.iot.common.dto.IoTData;
12 |
13 | import org.apache.log4j.Logger;
14 | import org.apache.spark.api.java.JavaPairRDD;
15 | import org.apache.spark.api.java.JavaRDD;
16 | import org.apache.spark.api.java.function.Function;
17 | import org.apache.spark.broadcast.Broadcast;
18 |
19 | import scala.Tuple2;
20 |
21 | import java.text.SimpleDateFormat;
22 | import java.util.Calendar;
23 | import java.util.Date;
24 | import java.util.HashMap;
25 | import java.util.Map;
26 | import java.util.concurrent.TimeUnit;
27 |
28 | /**
29 | * Class to process IoT data stream and to produce traffic data details.
30 | *
31 | * @author abaghel
32 | */
33 | public class BatchTrafficDataProcessor {
34 | private static final Logger logger = Logger.getLogger(BatchTrafficDataProcessor.class);
35 |
36 | /**
37 | * Method to get total traffic counts of different type of vehicles for each route.
38 | *
39 | * @param filteredIotDataStream IoT data stream
40 | */
41 | public static void processTotalTrafficData(JavaRDD filteredIotDataStream) {
42 | // We need to get count of vehicle group by routeId and vehicleType
43 | JavaPairRDD countDStreamPair = filteredIotDataStream
44 | .mapToPair(iot -> new Tuple2<>(
45 | new AggregateKey(iot.getRouteId(), iot.getVehicleType()),
46 | 1L
47 | ))
48 | .reduceByKey((a, b) -> a + b);
49 |
50 | JavaRDD trafficDStream = countDStreamPair
51 | .map(BatchTrafficDataProcessor::transformToTotalTrafficData);
52 |
53 | persistTotalTraffic(trafficDStream);
54 | }
55 |
56 | private static void persistTotalTraffic(JavaRDD trafficDStream) {
57 | // Map Cassandra table column
58 | Map columnNameMappings = new HashMap();
59 | columnNameMappings.put("routeId", "routeid");
60 | columnNameMappings.put("vehicleType", "vehicletype");
61 | columnNameMappings.put("totalCount", "totalcount");
62 | columnNameMappings.put("timeStamp", "timestamp");
63 | columnNameMappings.put("recordDate", "recorddate");
64 |
65 | CassandraJavaUtil.javaFunctions(trafficDStream).writerBuilder(
66 | "traffickeyspace",
67 | "total_traffic_batch",
68 | CassandraJavaUtil.mapToRow(TotalTrafficData.class, columnNameMappings)
69 | ).saveToCassandra();
70 | }
71 |
72 |
73 | /**
74 | * Method to get window traffic counts of different type of vehicles for each route. Window duration = 30 seconds and Slide interval = 10 seconds
75 | *
76 | * @param filteredIotDataStream IoT data stream
77 | */
78 | public static void processWindowTrafficData(JavaRDD filteredIotDataStream) {
79 | Date minTimestamp = filteredIotDataStream.min(new IotDataTimestampComparator()).getTimestamp();
80 | Date maxTimestamp = filteredIotDataStream.max(new IotDataTimestampComparator()).getTimestamp();
81 | long diffInMillies = Math.abs(minTimestamp.getTime() - maxTimestamp.getTime());
82 | long diff = TimeUnit.DAYS.convert(diffInMillies, TimeUnit.MILLISECONDS);
83 | Calendar c = Calendar.getInstance();
84 | c.setTime(minTimestamp);
85 | c.set(Calendar.HOUR_OF_DAY, 0);
86 | c.set(Calendar.MINUTE, 0);
87 | Date start = c.getTime();
88 | for (int i = 0; i < diff; i++) {
89 | c.setTime(start);
90 | c.add(Calendar.DATE, 1);
91 | Date end = c.getTime();
92 | processInterval(filteredIotDataStream, start, end);
93 | start = end;
94 | }
95 | }
96 |
97 | private static void processInterval(JavaRDD data, Date start, Date end) {
98 | JavaRDD filteredData = filterByTime(data, start, end);
99 | JavaRDD trafficDStream = getWindowTrafficData(filteredData);
100 | persistWindowTraffic(trafficDStream);
101 | }
102 |
103 | private static void persistWindowTraffic(JavaRDD trafficDStream) {
104 | // Map Cassandra table column
105 | Map columnNameMappings = new HashMap<>();
106 | columnNameMappings.put("routeId", "routeid");
107 | columnNameMappings.put("vehicleType", "vehicletype");
108 | columnNameMappings.put("totalCount", "totalcount");
109 | columnNameMappings.put("timeStamp", "timestamp");
110 | columnNameMappings.put("recordDate", "recorddate");
111 |
112 | // call CassandraStreamingJavaUtil function to save in DB
113 | CassandraJavaUtil.javaFunctions(trafficDStream).writerBuilder(
114 | "traffickeyspace",
115 | "window_traffic_batch",
116 | CassandraJavaUtil.mapToRow(WindowTrafficData.class, columnNameMappings)
117 | ).saveToCassandra();
118 | }
119 |
120 | private static JavaRDD getWindowTrafficData(JavaRDD filteredData) {
121 | JavaPairRDD javaPairRDD = filteredData.mapToPair(iot -> new Tuple2<>(
122 | new AggregateKey(iot.getRouteId(), iot.getVehicleType()),
123 | 1L
124 | ));
125 |
126 | // Transform to dstream of TrafficData
127 | return javaPairRDD.map(windowTrafficDataFunc);
128 | }
129 |
130 | /**
131 | * Filter the data in a given time period
132 | *
133 | * @param data | The dataset of data
134 | * @param start | Start of the time period
135 | * @param end | End of the time period
136 | * @return A set of data in the given time period
137 | */
138 | private static JavaRDD filterByTime(JavaRDD data, Date start, Date end) {
139 | return data.filter(measurement -> (
140 | measurement.getTimestamp().equals(start) || measurement.getTimestamp().after(start)
141 | ) && measurement.getTimestamp().before(end)
142 | );
143 | }
144 |
145 | /**
146 | * Method to get the vehicles which are in radius of POI and their distance from POI.
147 | *
148 | * @param nonFilteredIotDataStream original IoT data stream
149 | * @param broadcastPOIValues variable containing POI coordinates, route and vehicle types to monitor.
150 | */
151 | public static void processPOIData(JavaRDD nonFilteredIotDataStream, Broadcast broadcastPOIValues) {
152 | // Filter by routeId,vehicleType and in POI range
153 | JavaRDD iotDataStreamFiltered = filterVehicleInPOIRange(nonFilteredIotDataStream, broadcastPOIValues);
154 |
155 | // pair with poi
156 | JavaPairRDD poiDStreamPair = iotDataStreamFiltered.mapToPair(
157 | iot -> new Tuple2<>(iot, broadcastPOIValues.value())
158 | );
159 |
160 | // Transform to dstream of POITrafficData
161 | JavaRDD trafficDStream = poiDStreamPair.map(BatchTrafficDataProcessor::transformToPoiTrafficData);
162 | persistPOI(trafficDStream);
163 | }
164 |
165 | private static void persistPOI(JavaRDD trafficDStream) {
166 | // Map Cassandra table column
167 | Map columnNameMappings = new HashMap();
168 | columnNameMappings.put("vehicleId", "vehicleid");
169 | columnNameMappings.put("distance", "distance");
170 | columnNameMappings.put("vehicleType", "vehicletype");
171 | columnNameMappings.put("timeStamp", "timestamp");
172 |
173 | // call CassandraStreamingJavaUtil function to save in DB
174 | CassandraJavaUtil.javaFunctions(trafficDStream)
175 | .writerBuilder(
176 | "traffickeyspace",
177 | "poi_traffic_batch",
178 | CassandraJavaUtil.mapToRow(POITrafficData.class, columnNameMappings)
179 | )
180 | // .withConstantTTL(120)//keeping data for 2 minutes
181 | .saveToCassandra();
182 | }
183 |
184 | private static JavaRDD filterVehicleInPOIRange(JavaRDD nonFilteredIotDataStream, Broadcast broadcastPOIValues) {
185 | return nonFilteredIotDataStream
186 | .filter(iot -> (
187 | iot.getRouteId().equals(broadcastPOIValues.value().getRoute())
188 | && iot.getVehicleType().contains(broadcastPOIValues.value().getVehicle())
189 | && GeoDistanceCalculator.isInPOIRadius(
190 | Double.valueOf(iot.getLatitude()),
191 | Double.valueOf(iot.getLongitude()),
192 | broadcastPOIValues.value().getLatitude(),
193 | broadcastPOIValues.value().getLongitude(),
194 | broadcastPOIValues.value().getRadius()
195 | )
196 | ));
197 | }
198 |
199 | //Function to create TotalTrafficData object from IoT data
200 | private static final TotalTrafficData transformToTotalTrafficData(Tuple2 tuple) {
201 | logger.debug("Total Count : " + "key " + tuple._1().getRouteId() + "-" + tuple._1().getVehicleType() + " value " + tuple._2());
202 | TotalTrafficData trafficData = new TotalTrafficData();
203 | trafficData.setRouteId(tuple._1().getRouteId());
204 | trafficData.setVehicleType(tuple._1().getVehicleType());
205 | trafficData.setTotalCount(tuple._2());
206 | trafficData.setTimeStamp(new Date());
207 | trafficData.setRecordDate(new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
208 | return trafficData;
209 | };
210 |
211 | //Function to create WindowTrafficData object from IoT data
212 | private static final Function, WindowTrafficData> windowTrafficDataFunc = (tuple -> {
213 | logger.debug("Window Count : " + "key " + tuple._1().getRouteId() + "-" + tuple._1().getVehicleType() + " value " + tuple._2());
214 | WindowTrafficData trafficData = new WindowTrafficData();
215 | trafficData.setRouteId(tuple._1().getRouteId());
216 | trafficData.setVehicleType(tuple._1().getVehicleType());
217 | trafficData.setTotalCount(tuple._2());
218 | trafficData.setTimeStamp(new Date());
219 | trafficData.setRecordDate(new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
220 | return trafficData;
221 | });
222 |
223 | //Function to create POITrafficData object from IoT data
224 | private static POITrafficData transformToPoiTrafficData(Tuple2 tuple) {
225 | POITrafficData poiTraffic = new POITrafficData();
226 | poiTraffic.setVehicleId(tuple._1.getVehicleId());
227 | poiTraffic.setVehicleType(tuple._1.getVehicleType());
228 | poiTraffic.setTimeStamp(new Date());
229 | double distance = GeoDistanceCalculator.getDistance(
230 | Double.valueOf(tuple._1.getLatitude()).doubleValue(),
231 | Double.valueOf(tuple._1.getLongitude()).doubleValue(),
232 | tuple._2.getLatitude(), tuple._2.getLongitude()
233 | );
234 | logger.debug("Distance for " + tuple._1.getLatitude() + "," + tuple._1.getLongitude() + "," + tuple._2.getLatitude() + "," + tuple._2.getLongitude() + " = " + distance);
235 | poiTraffic.setDistance(distance);
236 | return poiTraffic;
237 | }
238 |
239 | }
240 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/batch/LatestOffSetReader.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.batch;
2 |
3 | import org.apache.kafka.common.TopicPartition;
4 | import org.apache.spark.api.java.JavaRDD;
5 | import org.apache.spark.sql.AnalysisException;
6 | import org.apache.spark.sql.Dataset;
7 | import org.apache.spark.sql.Row;
8 | import org.apache.spark.sql.SparkSession;
9 |
10 | import java.util.Map;
11 | import java.util.stream.Collectors;
12 |
13 | import scala.Tuple2;
14 |
15 | /**
16 | * Read from the HDFS the latest processed kafka offset
17 | */
18 | public class LatestOffSetReader {
19 |
20 | private Dataset parquetData;
21 |
22 | final SparkSession sparkSession;
23 | final String file;
24 |
25 | public LatestOffSetReader(final SparkSession sparkSession, final String file) {
26 | this.sparkSession = sparkSession;
27 | this.file = file;
28 | }
29 |
30 | public LatestOffSetReader read() {
31 | parquetData = sparkSession.read().parquet(file);
32 | return this;
33 | }
34 |
35 | private JavaRDD query() throws AnalysisException {
36 | parquetData.createTempView("traffic");
37 | return parquetData.sqlContext()
38 | .sql("select max(untilOffset) as untilOffset, topic, kafkaPartition from traffic group by topic, kafkaPartition")
39 | .javaRDD();
40 | }
41 |
42 | public Map offsets() throws AnalysisException {
43 | return this.query()
44 | .collect()
45 | .stream()
46 | .map(LatestOffSetReader::mapToPartition)
47 | .collect(Collectors.toMap(Tuple2::_1, Tuple2::_2));
48 | }
49 |
50 | private static Tuple2 mapToPartition(Row row) {
51 | TopicPartition topicPartition = new TopicPartition(
52 | row.getString(row.fieldIndex("topic")),
53 | row.getInt(row.fieldIndex("kafkaPartition"))
54 | );
55 | Long offSet = Long.valueOf(row.getString(row.fieldIndex("untilOffset")));
56 | return new Tuple2<>(
57 | topicPartition,
58 | offSet
59 | );
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/GeoDistanceCalculator.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 | /**
4 | * Class to calculate the distance between to locations on earth using coordinates (latitude and longitude).
5 | * This class uses "haversine" formula to calculate the great-circle distance between two points on earth.
6 | * http://www.movable-type.co.uk/scripts/latlong.html
7 | *
8 | * @author abaghel
9 | *
10 | */
11 | public class GeoDistanceCalculator {
12 | /**
13 | * Method to get shortest distance over the earths surface in Kilometer between two locations
14 | *
15 | * @param lat1 latitude of location A
16 | * @param lon1 longitude of location A
17 | * @param lat2 latitude of location B
18 | * @param lon2 longitude of location B
19 | * @return distance between A and B in Kilometer
20 | *
21 | */
22 | public static double getDistance(double lat1, double lon1, double lat2, double lon2) {
23 | //Earth radius in KM
24 | final int r = 6371;
25 |
26 | Double latDistance = Math.toRadians(lat2 - lat1);
27 | Double lonDistance = Math.toRadians(lon2 - lon1);
28 | Double a = Math.sin(latDistance / 2) * Math.sin(latDistance / 2) + Math.cos(Math.toRadians(lat1))
29 | * Math.cos(Math.toRadians(lat2)) * Math.sin(lonDistance / 2) * Math.sin(lonDistance / 2);
30 | Double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
31 | double distance = r * c;
32 |
33 | return distance;
34 | }
35 |
36 | /**
37 | * Method to check if current location is in radius of point of interest (POI) location
38 | *
39 | * @param currentLat latitude of current location
40 | * @param currentLon longitude of current location
41 | * @param poiLat latitude of POI location
42 | * @param poiLon longitude of POI location
43 | * @param radius radius in Kilometer from POI
44 | * @return true if in POI radius otherwise false
45 | *
46 | */
47 | public static boolean isInPOIRadius(double currentLat, double currentLon, double poiLat, double poiLon,double radius){
48 | double distance = getDistance(currentLat,currentLon,poiLat,poiLon);
49 | if(distance <= radius){
50 | return true;
51 | }
52 | return false;
53 | }
54 |
55 | }
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/IoTDataDeserializer.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 | import com.fasterxml.jackson.databind.ObjectMapper;
4 | import com.apssouza.iot.common.dto.IoTData;
5 |
6 | import org.apache.kafka.common.serialization.Deserializer;
7 |
8 | import java.util.Map;
9 |
10 | /**
11 | * Class to deserialize JSON string to IoTData java object
12 | *
13 | * @author abaghel
14 | *
15 | */
16 | public class IoTDataDeserializer implements Deserializer {
17 |
18 | private static ObjectMapper objectMapper = new ObjectMapper();
19 |
20 | public IoTData fromBytes(byte[] bytes) {
21 | try {
22 | return objectMapper.readValue(bytes, IoTData.class);
23 | } catch (Exception e) {
24 | e.printStackTrace();
25 | }
26 | return null;
27 | }
28 |
29 | @Override
30 | public void configure(Map map, boolean b) {
31 |
32 | }
33 |
34 | @Override
35 | public IoTData deserialize(String s, byte[] bytes) {
36 | return fromBytes((byte[]) bytes);
37 | }
38 |
39 | @Override
40 | public void close() {
41 |
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/IotDataTimestampComparator.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 |
4 | import com.apssouza.iot.common.dto.IoTData;
5 |
6 | import java.io.Serializable;
7 | import java.util.Comparator;
8 |
9 | public class IotDataTimestampComparator implements Comparator, Serializable {
10 |
11 | @Override
12 | public int compare(IoTData o1, IoTData o2) {
13 | if(o1 == null && o2 == null) {
14 | return 0;
15 | } else if(o1 == null || o1.getTimestamp() == null) {
16 | return 1;
17 | } else if(o2 == null || o2.getTimestamp() == null) {
18 | return -1;
19 | } else {
20 | return o1.getTimestamp().compareTo(o2.getTimestamp());
21 | }
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/Mapper.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 | import com.apssouza.iot.common.dto.IoTData;
4 |
5 | import java.text.ParseException;
6 | import java.text.SimpleDateFormat;
7 | import java.util.Date;
8 |
9 | public class Mapper {
10 |
11 | public static IoTData parseToIotData(Object[] columns) {
12 | Date timestamp1 = null;
13 | try {
14 | timestamp1 = new SimpleDateFormat("yyyy-MM-dd").parse(columns[0].toString());
15 | } catch (ParseException e) {
16 | throw new RuntimeException(e);
17 | }
18 | IoTData ioTData = new IoTData(
19 | columns[1].toString(),
20 | columns[2].toString(),
21 | columns[3].toString(),
22 | columns[4].toString(),
23 | columns[5].toString(),
24 | new java.sql.Date(timestamp1.getTime()),
25 | Double.valueOf(columns[6].toString()),
26 | Double.valueOf(columns[7].toString())
27 | );
28 | return ioTData;
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/ProcessorUtils.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 | import org.apache.spark.SparkConf;
4 |
5 | import java.util.Properties;
6 |
7 | public class ProcessorUtils {
8 |
9 | public static SparkConf getSparkConf(Properties prop, String appName) {
10 |
11 | var sparkConf = new SparkConf()
12 | .setAppName(appName)
13 | .setMaster(prop.getProperty("com.iot.app.spark.master"))
14 | .set("spark.cassandra.connection.host", prop.getProperty("com.iot.app.cassandra.host"))
15 | .set("spark.cassandra.connection.port", prop.getProperty("com.iot.app.cassandra.port"))
16 | .set("spark.cassandra.auth.username", prop.getProperty("com.iot.app.cassandra.username"))
17 | .set("spark.cassandra.auth.password", prop.getProperty("com.iot.app.cassandra.password"))
18 | .set("spark.cassandra.connection.keep_alive_ms", prop.getProperty("com.iot.app.cassandra.keep_alive"));
19 |
20 | if ("local".equals(prop.getProperty("com.iot.app.env"))) {
21 | sparkConf.set("spark.driver.bindAddress", "127.0.0.1");
22 | }
23 | return sparkConf;
24 | }
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/PropertyFileReader.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 | import java.io.IOException;
4 | import java.io.InputStream;
5 | import java.util.Properties;
6 |
7 | import org.apache.log4j.Logger;
8 |
9 | /**
10 | * Utility class to read property file
11 | */
12 | public class PropertyFileReader {
13 |
14 | private static final Logger logger = Logger.getLogger(PropertyFileReader.class);
15 |
16 | private static Properties prop = new Properties();
17 |
18 | public static Properties readPropertyFile(String file) throws Exception {
19 | if (prop.isEmpty()) {
20 | InputStream input = PropertyFileReader.class.getClassLoader().getResourceAsStream(file);
21 | try {
22 | prop.load(input);
23 | } catch (IOException ex) {
24 | logger.error(ex);
25 | throw ex;
26 | } finally {
27 | if (input != null) {
28 | input.close();
29 | }
30 | }
31 | }
32 | return prop;
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/TimestampComparator.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common;
2 |
3 |
4 | import com.apssouza.iot.common.dto.Measurement;
5 |
6 | import java.io.Serializable;
7 | import java.util.Comparator;
8 |
9 | public class TimestampComparator implements Comparator, Serializable {
10 |
11 | @Override
12 | public int compare(Measurement o1, Measurement o2) {
13 | if(o1 == null && o2 == null) {
14 | return 0;
15 | } else if(o1 == null || o1.getTimestamp() == null) {
16 | return 1;
17 | } else if(o2 == null || o2.getTimestamp() == null) {
18 | return -1;
19 | } else {
20 | return o1.getTimestamp().compareTo(o2.getTimestamp());
21 | }
22 | }
23 |
24 | }
25 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/dto/AggregateKey.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.dto;
2 |
3 | import java.io.Serializable;
4 |
5 | /**
6 | * Key class for calculation
7 | *
8 | * @author abaghel
9 | */
10 | public class AggregateKey implements Serializable {
11 |
12 | private String routeId;
13 | private String vehicleType;
14 |
15 | public AggregateKey(String routeId, String vehicleType) {
16 | super();
17 | this.routeId = routeId;
18 | this.vehicleType = vehicleType;
19 | }
20 |
21 | public String getRouteId() {
22 | return routeId;
23 | }
24 |
25 | public String getVehicleType() {
26 | return vehicleType;
27 | }
28 |
29 | @Override
30 | public int hashCode() {
31 | final int prime = 31;
32 | int result = 1;
33 | result = prime * result + ((routeId == null) ? 0 : routeId.hashCode());
34 | result = prime * result + ((vehicleType == null) ? 0 : vehicleType.hashCode());
35 | return result;
36 | }
37 |
38 | @Override
39 | public boolean equals(Object obj) {
40 | if (obj != null && obj instanceof AggregateKey) {
41 | AggregateKey other = (AggregateKey) obj;
42 | if (other.getRouteId() != null && other.getVehicleType() != null) {
43 | if ((other.getRouteId().equals(this.routeId)) && (other.getVehicleType().equals(this.vehicleType))) {
44 | return true;
45 | }
46 | }
47 | }
48 | return false;
49 | }
50 |
51 |
52 | }
53 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/dto/Coordinate.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.dto;
2 |
3 | import java.io.Serializable;
4 |
5 | /**
6 | * Coordinate data transfer object
7 | *
8 | * @author apssouza22
9 | */
10 | public class Coordinate implements Serializable {
11 |
12 | private double latitude;
13 |
14 | private double longitude;
15 |
16 | public Coordinate(double latitude, double longitude) {
17 | this.latitude = latitude;
18 | this.longitude = longitude;
19 | }
20 |
21 | public double getLatitude() {
22 | return latitude;
23 | }
24 |
25 | public void setLatitude(double latitude) {
26 | this.latitude = latitude;
27 | }
28 |
29 | public double getLongitude() {
30 | return longitude;
31 | }
32 |
33 | public void setLongitude(double longitude) {
34 | this.longitude = longitude;
35 | }
36 |
37 | @Override
38 | public boolean equals(Object o) {
39 | if (this == o) return true;
40 | if (o == null || getClass() != o.getClass()) return false;
41 |
42 | Coordinate that = (Coordinate) o;
43 |
44 | if (Double.compare(that.latitude, latitude) != 0) return false;
45 | if (Double.compare(that.longitude, longitude) != 0) return false;
46 |
47 | return true;
48 | }
49 |
50 | @Override
51 | public int hashCode() {
52 | int result;
53 | long temp;
54 | temp = Double.doubleToLongBits(latitude);
55 | result = (int) (temp ^ (temp >>> 32));
56 | temp = Double.doubleToLongBits(longitude);
57 | result = 31 * result + (int) (temp ^ (temp >>> 32));
58 | return result;
59 | }
60 |
61 | @Override
62 | public String toString() {
63 | return "Coordinate{" +
64 | "latitude=" + latitude +
65 | ", longitude=" + longitude +
66 | '}';
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/dto/IoTData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.dto;
2 |
3 | import java.io.Serializable;
4 | import java.sql.Date;
5 | import java.util.Map;
6 |
7 | import com.fasterxml.jackson.annotation.JsonFormat;
8 |
9 | /**
10 | * Class to represent the IoT vehicle data.
11 | *
12 | */
13 | public class IoTData implements Serializable {
14 |
15 | private String vehicleId;
16 | private String vehicleType;
17 | private String routeId;
18 | private String latitude;
19 | private String longitude;
20 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone = "IST")
21 | private Date timestamp;
22 | private double speed;
23 | private double fuelLevel;
24 | private Map metaData;
25 |
26 | public IoTData() {
27 |
28 | }
29 |
30 | public IoTData(
31 | String vehicleId,
32 | String vehicleType,
33 | String routeId,
34 | String latitude,
35 | String longitude,
36 | Date timestamp,
37 | double speed,
38 | double fuelLevel
39 | ) {
40 | super();
41 | this.vehicleId = vehicleId;
42 | this.vehicleType = vehicleType;
43 | this.routeId = routeId;
44 | this.longitude = longitude;
45 | this.latitude = latitude;
46 | this.timestamp = timestamp;
47 | this.speed = speed;
48 | this.fuelLevel = fuelLevel;
49 | }
50 |
51 | public String getVehicleId() {
52 | return vehicleId;
53 | }
54 |
55 | public String getVehicleType() {
56 | return vehicleType;
57 | }
58 |
59 | public String getRouteId() {
60 | return routeId;
61 | }
62 |
63 | public String getLongitude() {
64 | return longitude;
65 | }
66 |
67 | public String getLatitude() {
68 | return latitude;
69 | }
70 |
71 | public Date getTimestamp() {
72 | return timestamp;
73 | }
74 |
75 | public double getSpeed() {
76 | return speed;
77 | }
78 |
79 | public double getFuelLevel() {
80 | return fuelLevel;
81 | }
82 |
83 |
84 | public void setMetaData(Map metaData) {
85 | this.metaData = metaData;
86 | }
87 |
88 | public Map getMetaData() {
89 | return metaData;
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/dto/Measurement.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.dto;
2 |
3 | import java.io.Serializable;
4 | import java.util.Date;
5 |
6 | /**
7 | * Measurement entity
8 | *
9 | * @author apssouza22
10 | */
11 | public class Measurement implements Serializable {
12 |
13 | private Coordinate coordinate;
14 |
15 | private Coordinate roundedCoordinate;
16 |
17 | private Date timestamp;
18 |
19 | public Measurement(Coordinate coordinate, Date timestamp) {
20 | this.coordinate = coordinate;
21 | this.timestamp = timestamp;
22 | }
23 |
24 | public Coordinate getCoordinate() {
25 | return coordinate;
26 | }
27 |
28 | public void setCoordinate(Coordinate coordinate) {
29 | this.coordinate = coordinate;
30 | }
31 |
32 | public Coordinate getRoundedCoordinate() {
33 | return roundedCoordinate;
34 | }
35 |
36 | public void setRoundedCoordinate(Coordinate roundedCoordinate) {
37 | this.roundedCoordinate = roundedCoordinate;
38 | }
39 |
40 | public Date getTimestamp() {
41 | return timestamp;
42 | }
43 |
44 | public void setTimestamp(Date timestamp) {
45 | this.timestamp = timestamp;
46 | }
47 |
48 | @Override
49 | public boolean equals(Object o) {
50 | if (this == o) return true;
51 | if (o == null || getClass() != o.getClass()) return false;
52 |
53 | Measurement that = (Measurement) o;
54 |
55 | if (coordinate != null ? !coordinate.equals(that.coordinate) : that.coordinate != null) return false;
56 | if (timestamp != null ? !timestamp.equals(that.timestamp) : that.timestamp != null) return false;
57 |
58 | return true;
59 | }
60 |
61 | @Override
62 | public int hashCode() {
63 | int result = coordinate != null ? coordinate.hashCode() : 0;
64 | result = 31 * result + (timestamp != null ? timestamp.hashCode() : 0);
65 | return result;
66 | }
67 |
68 | }
69 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/dto/POIData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.dto;
2 |
3 | import java.io.Serializable;
4 |
5 | /**
6 | * Class to represent attributes of POI
7 | *
8 | * @author abaghel
9 | */
10 | public class POIData implements Serializable {
11 | private double latitude;
12 | private double longitude;
13 | private double radius;
14 | private String vehicle;
15 | private String route;
16 |
17 | public double getLatitude() {
18 | return latitude;
19 | }
20 |
21 | public void setLatitude(double latitude) {
22 | this.latitude = latitude;
23 | }
24 |
25 | public double getLongitude() {
26 | return longitude;
27 | }
28 |
29 | public void setLongitude(double longitude) {
30 | this.longitude = longitude;
31 | }
32 |
33 | public double getRadius() {
34 | return radius;
35 | }
36 |
37 | public void setRadius(double radius) {
38 | this.radius = radius;
39 | }
40 |
41 | public void setVehicle(final String truck) {
42 | this.vehicle = truck;
43 | }
44 |
45 | public void setRoute(final String route) {
46 | this.route = route;
47 | }
48 |
49 | public String getVehicle() {
50 | return vehicle;
51 | }
52 |
53 | public String getRoute() {
54 | return route;
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/entity/HeatMapData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.entity;
2 |
3 | import java.io.Serializable;
4 | import java.util.Date;
5 |
6 | /**
7 | * Heatmap data entity
8 | *
9 | */
10 | public class HeatMapData implements Serializable {
11 |
12 | private double latitude;
13 | private double longitude;
14 | private int totalCount;
15 | private Date timeStamp;
16 |
17 | public HeatMapData(double latitude, double longitude, int count, Date timeStamp) {
18 | this.latitude = latitude;
19 | this.longitude = longitude;
20 | this.totalCount = count;
21 | this.timeStamp = timeStamp;
22 | }
23 |
24 | public double getLatitude() {
25 | return latitude;
26 | }
27 |
28 | public double getLongitude() {
29 | return longitude;
30 | }
31 |
32 | public int getTotalCount() {
33 | return totalCount;
34 | }
35 |
36 | public Date getTimeStamp() {
37 | return timeStamp;
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/entity/POITrafficData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.entity;
2 |
3 | import java.io.Serializable;
4 | import java.util.Date;
5 | import com.fasterxml.jackson.annotation.JsonFormat;
6 |
7 | /**
8 | * Class to represent poi_tarffic db table
9 | *
10 | */
11 | public class POITrafficData implements Serializable {
12 |
13 | private String vehicleId;
14 | private double distance;
15 | private String vehicleType;
16 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST")
17 | private Date timeStamp;
18 |
19 | public String getVehicleId() {
20 | return vehicleId;
21 | }
22 |
23 | public void setVehicleId(String vehicleId) {
24 | this.vehicleId = vehicleId;
25 | }
26 |
27 | public double getDistance() {
28 | return distance;
29 | }
30 |
31 | public void setDistance(double distance) {
32 | this.distance = distance;
33 | }
34 |
35 | public String getVehicleType() {
36 | return vehicleType;
37 | }
38 |
39 | public void setVehicleType(String vehicleType) {
40 | this.vehicleType = vehicleType;
41 | }
42 |
43 | public Date getTimeStamp() {
44 | return timeStamp;
45 | }
46 |
47 | public void setTimeStamp(Date timeStamp) {
48 | this.timeStamp = timeStamp;
49 | }
50 |
51 | }
52 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/entity/TotalTrafficData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.entity;
2 |
3 | import java.io.Serializable;
4 | import java.util.Date;
5 | import com.fasterxml.jackson.annotation.JsonFormat;
6 |
7 | /**
8 | * Class to represent total_tarffic db table
9 | *
10 | *
11 | */
12 | public class TotalTrafficData implements Serializable{
13 |
14 | private String routeId;
15 | private String vehicleType;
16 | private long totalCount;
17 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST")
18 | private Date timeStamp;
19 | private String recordDate;
20 |
21 | public String getRouteId() {
22 | return routeId;
23 | }
24 | public void setRouteId(String routeId) {
25 | this.routeId = routeId;
26 | }
27 | public String getVehicleType() {
28 | return vehicleType;
29 | }
30 | public void setVehicleType(String vehicleType) {
31 | this.vehicleType = vehicleType;
32 | }
33 | public long getTotalCount() {
34 | return totalCount;
35 | }
36 | public void setTotalCount(long totalCount) {
37 | this.totalCount = totalCount;
38 | }
39 | public Date getTimeStamp() {
40 | return timeStamp;
41 | }
42 | public void setTimeStamp(Date timeStamp) {
43 | this.timeStamp = timeStamp;
44 | }
45 |
46 | public String getRecordDate() {
47 | return recordDate;
48 | }
49 |
50 | public void setRecordDate(String recordDate) {
51 | this.recordDate = recordDate;
52 | }
53 |
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/common/entity/WindowTrafficData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.common.entity;
2 |
3 | import java.io.Serializable;
4 | import java.util.Date;
5 |
6 | import com.fasterxml.jackson.annotation.JsonFormat;
7 |
8 | /**
9 | * Class to represent window_tarffic db table
10 | *
11 | */
12 | public class WindowTrafficData implements Serializable {
13 |
14 | private String routeId;
15 | private String vehicleType;
16 | private long totalCount;
17 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone = "MST")
18 | private Date timeStamp;
19 | private String recordDate;
20 |
21 | public String getRouteId() {
22 | return routeId;
23 | }
24 |
25 | public void setRouteId(String routeId) {
26 | this.routeId = routeId;
27 | }
28 |
29 | public String getVehicleType() {
30 | return vehicleType;
31 | }
32 |
33 | public void setVehicleType(String vehicleType) {
34 | this.vehicleType = vehicleType;
35 | }
36 |
37 | public long getTotalCount() {
38 | return totalCount;
39 | }
40 |
41 | public void setTotalCount(long totalCount) {
42 | this.totalCount = totalCount;
43 | }
44 |
45 | public Date getTimeStamp() {
46 | return timeStamp;
47 | }
48 |
49 | public void setTimeStamp(Date timeStamp) {
50 | this.timeStamp = timeStamp;
51 | }
52 |
53 | public String getRecordDate() {
54 | return recordDate;
55 | }
56 |
57 | public void setRecordDate(String recordDate) {
58 | this.recordDate = recordDate;
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/ml/ModelPipeline.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.ml;
2 |
3 | import org.apache.spark.ml.feature.VectorAssembler;
4 | import org.apache.spark.ml.linalg.Vectors;
5 | import org.apache.spark.ml.regression.LinearRegression;
6 | import org.apache.spark.ml.regression.LinearRegressionModel;
7 | import org.apache.spark.ml.regression.LinearRegressionTrainingSummary;
8 | import org.apache.spark.sql.Dataset;
9 | import org.apache.spark.sql.Row;
10 | import org.apache.spark.sql.SparkSession;
11 | import static org.apache.spark.sql.functions.col;
12 | import static org.apache.spark.sql.functions.concat;
13 | import static org.apache.spark.sql.functions.lit;
14 | import static org.apache.spark.sql.types.DataTypes.IntegerType;
15 |
16 | import java.io.IOException;
17 |
18 | public class ModelPipeline {
19 |
20 | private SparkSession sparkSession;
21 | private Dataset dataFrame;
22 | private Dataset trainingData;
23 | private Dataset testData;
24 | private LinearRegression lr;
25 | private LinearRegressionModel lrModel;
26 | private Dataset dataset;
27 |
28 | public ModelPipeline( SparkSession sparkSession) {
29 | this.sparkSession = sparkSession;
30 | }
31 |
32 | public static ModelPipeline getInstance( SparkSession sparkSession) {
33 | return new ModelPipeline(sparkSession);
34 | }
35 |
36 | public ModelPipeline loadData( String file) {
37 | this.dataFrame = sparkSession.read().parquet(file);
38 | return this;
39 | }
40 |
41 | public ModelPipeline transformData() {
42 | dataFrame = dataFrame.withColumnRenamed("speed", "label")
43 | .withColumn("grid", concat(col("latitude"), lit('|'), col("longitude")))
44 | .withColumn("lat",dataFrame.col("latitude").cast(IntegerType))
45 | .withColumn("long",dataFrame.col("longitude").cast(IntegerType));
46 |
47 | // Dataset df_sample2 = dataFrame2.withColumn("label", when(col("label").isNotNull().
48 | // and(col("label").equalTo(lit("Yes"))), lit(1)).otherwise(lit(0)));
49 |
50 | // After VectorAssembler you have to have a training dataset with label and features columns.
51 | // https://spark.apache.org/docs/latest/ml-features#vectorassembler
52 | VectorAssembler VA = new VectorAssembler()
53 | .setInputCols(new String[]{"lat","long", "dayOfWeek"})
54 | .setOutputCol("features");
55 | this.dataset = VA.transform(dataFrame);
56 | dataset.show();
57 | dataset.printSchema();
58 | return this;
59 | }
60 |
61 | public ModelPipeline splitData(double training, double test) {
62 | var splits = dataset.randomSplit(new double[] { training, test }, 11L);
63 | this.trainingData = splits[0];
64 | this.testData = splits[1];
65 | return this;
66 | }
67 |
68 | public ModelPipeline createModel() {
69 | this.lr = new LinearRegression()
70 | .setMaxIter(10)
71 | .setRegParam(0.3)
72 | .setElasticNetParam(0.8);
73 | return this;
74 | }
75 |
76 | public ModelPipeline trainModel() {
77 | this.lrModel = lr.fit(trainingData);
78 |
79 | // Print the coefficients and intercept for linear regression.
80 | System.out.println("Coefficients: "
81 | + lrModel.coefficients() + " Intercept: " + lrModel.intercept());
82 |
83 | // Summarize the model over the training set and print out some metrics.
84 | LinearRegressionTrainingSummary trainingSummary = lrModel.summary();
85 | System.out.println("numIterations: " + trainingSummary.totalIterations());
86 | System.out.println("objectiveHistory: " + Vectors.dense(trainingSummary.objectiveHistory()));
87 | System.out.println("Training RMSE: " + trainingSummary.rootMeanSquaredError());
88 | System.out.println("Training r2: " + trainingSummary.r2());
89 | trainingSummary.residuals().show();
90 | return this;
91 | }
92 |
93 | public ModelPipeline evaluateModel() {
94 | var evaluationSummary = lrModel.evaluate(testData);
95 | System.out.println("Test RMSE: " + evaluationSummary.rootMeanSquaredError());
96 | System.out.println("Test R2: " + evaluationSummary.r2());
97 |
98 | Dataset predictions = evaluationSummary.predictions();
99 | predictions.select(
100 | predictions.col("features"),
101 | predictions.col("label"),
102 | predictions.col("prediction")
103 | ).show();
104 |
105 | return this;
106 | }
107 |
108 | public ModelPipeline saveModel(String model) throws IOException {
109 | lrModel.save(model);
110 | return this;
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/ml/SpeedPrediction.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.ml;
2 |
3 | import com.apssouza.iot.common.PropertyFileReader;
4 |
5 | import org.apache.spark.SparkConf;
6 | import org.apache.spark.ml.linalg.Vectors;
7 | import org.apache.spark.ml.regression.LinearRegressionModel;
8 | import org.apache.spark.sql.SparkSession;
9 |
10 | import java.util.Properties;
11 |
12 | public class SpeedPrediction {
13 |
14 | public static void main(String[] args) throws Exception {
15 | String fileProp = "iot-spark.properties";
16 | Properties prop = PropertyFileReader.readPropertyFile(fileProp);
17 |
18 | var file = prop.getProperty("com.iot.app.hdfs") + "iot-data-parque";
19 | var model = prop.getProperty("com.iot.app.hdfs") + "model-prediction";
20 | String[] jars = {prop.getProperty("com.iot.app.jar")};
21 | prop.setProperty("com.iot.app.spark.app.name", "Iot ML");
22 |
23 | var conf = getSparkConf(prop);
24 | conf.setJars(jars);
25 | var sparkSession = SparkSession.builder().config(conf).getOrCreate();
26 | ModelPipeline
27 | .getInstance(sparkSession)
28 | .loadData(file)
29 | .transformData()
30 | .splitData(0.8, 0.2)
31 | .createModel()
32 | .trainModel()
33 | .evaluateModel()
34 | .saveModel(model)
35 | ;
36 |
37 | LinearRegressionModel sameModel = LinearRegressionModel.load(model);
38 | var newData = Vectors.dense(new double[]{33.0,7.0});
39 | double prediction = sameModel.predict(newData);
40 | System.out.println("Model Prediction on New Data = " + prediction);
41 |
42 | }
43 | public static SparkConf getSparkConf(Properties prop) {
44 | var sparkConf = new SparkConf()
45 | .setAppName("ml-prediction")
46 | .setMaster(prop.getProperty("com.iot.app.spark.master"));
47 | if ("local".equals(prop.getProperty("com.iot.app.env"))) {
48 | sparkConf.set("spark.driver.bindAddress", "127.0.0.1");
49 | }
50 | return sparkConf;
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/streaming/PointOfInterestProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.streaming;
2 |
3 | import com.apssouza.iot.common.dto.IoTData;
4 | import com.apssouza.iot.common.dto.POIData;
5 | import com.apssouza.iot.common.entity.POITrafficData;
6 | import com.apssouza.iot.common.GeoDistanceCalculator;
7 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
8 | import static com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions;
9 |
10 | import org.apache.log4j.Logger;
11 | import org.apache.spark.broadcast.Broadcast;
12 | import org.apache.spark.streaming.api.java.JavaDStream;
13 | import org.joda.time.Duration;
14 |
15 | import java.util.Date;
16 | import java.util.HashMap;
17 | import java.util.Map;
18 |
19 | import scala.Tuple2;
20 |
21 | public class PointOfInterestProcessor {
22 |
23 | private static final Logger logger = Logger.getLogger(RealtimeTrafficDataProcessor.class);
24 |
25 | /**
26 | * Method to get and save vehicles that are in POI radius and their distance from POI.
27 | *
28 | * @param dataStream original IoT data stream
29 | * @param broadcastPOIValues variable containing POI coordinates, route and vehicle types to monitor.
30 | */
31 | public static void processPOIData(
32 | JavaDStream dataStream,
33 | Broadcast broadcastPOIValues
34 | ) {
35 |
36 | JavaDStream trafficDStream = dataStream
37 | .filter(iot -> filterVehicleInPOI(iot, broadcastPOIValues))
38 | .mapToPair(iot -> new Tuple2<>(iot, broadcastPOIValues.value()))
39 | .map(PointOfInterestProcessor::transformToPOITrafficData);
40 |
41 | saveToCassandra(trafficDStream);
42 | }
43 |
44 | private static void saveToCassandra(final JavaDStream trafficDStream) {
45 | // Map Cassandra table column
46 | Map columnNameMappings = new HashMap<>();
47 | columnNameMappings.put("vehicleId", "vehicleid");
48 | columnNameMappings.put("distance", "distance");
49 | columnNameMappings.put("vehicleType", "vehicletype");
50 | columnNameMappings.put("timeStamp", "timestamp");
51 |
52 | // call CassandraStreamingJavaUtil function to save in DB
53 | javaFunctions(trafficDStream)
54 | .writerBuilder(
55 | "traffickeyspace",
56 | "poi_traffic",
57 | CassandraJavaUtil.mapToRow(POITrafficData.class, columnNameMappings)
58 | )
59 | .withConstantTTL(Duration.standardSeconds(120))//keeping data for 30 seconds
60 | .saveToCassandra();
61 | }
62 |
63 | /**
64 | * Filter vehicles in the point of interest range
65 | * @param iot
66 | * @param broadcastPOIValues
67 | * @return
68 | */
69 | private static boolean filterVehicleInPOI(IoTData iot, Broadcast broadcastPOIValues){
70 | return GeoDistanceCalculator.isInPOIRadius(
71 | Double.valueOf(iot.getLatitude()),
72 | Double.valueOf(iot.getLongitude()),
73 | broadcastPOIValues.value().getLatitude(),
74 | broadcastPOIValues.value().getLongitude(),
75 | broadcastPOIValues.value().getRadius()
76 | );
77 | }
78 |
79 | private static POITrafficData transformToPOITrafficData(Tuple2 tuple) {
80 | POITrafficData poiTraffic = new POITrafficData();
81 | poiTraffic.setVehicleId(tuple._1.getVehicleId());
82 | poiTraffic.setVehicleType(tuple._1.getVehicleType());
83 | poiTraffic.setTimeStamp(new Date());
84 | double distance = GeoDistanceCalculator.getDistance(
85 | Double.valueOf(tuple._1.getLatitude()).doubleValue(),
86 | Double.valueOf(tuple._1.getLongitude()).doubleValue(),
87 | tuple._2.getLatitude(), tuple._2.getLongitude()
88 | );
89 | logger.debug("Distance for " + tuple._1.getLatitude() + "," + tuple._1.getLongitude() + "," +
90 | tuple._2.getLatitude() + "," + tuple._2.getLongitude() + " = " + distance);
91 |
92 | poiTraffic.setDistance(distance);
93 | return poiTraffic;
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/streaming/RealTimeHeatMapProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.streaming;
2 |
3 | import com.apssouza.iot.common.dto.Coordinate;
4 | import com.apssouza.iot.common.dto.Measurement;
5 | import com.apssouza.iot.common.entity.HeatMapData;
6 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
7 | import com.apssouza.iot.common.dto.IoTData;
8 |
9 | import org.apache.spark.streaming.Durations;
10 | import org.apache.spark.streaming.api.java.JavaDStream;
11 |
12 | import scala.Tuple2;
13 |
14 | import java.io.IOException;
15 | import java.util.Date;
16 | import java.util.HashMap;
17 | import java.util.Map;
18 |
19 | import static com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions;
20 |
21 | /**
22 | * Process the real-time heat map calculation
23 | *
24 | * @author apssouza22
25 | */
26 | public class RealTimeHeatMapProcessor {
27 |
28 |
29 | public static void processHeatMap(JavaDStream streaming) throws IOException {
30 | JavaDStream heatMapStream = streaming
31 | .map(RealTimeHeatMapProcessor::mapToMeasurement)
32 | .map(RealTimeHeatMapProcessor::roundCoordinates)
33 | .mapToPair(measurement -> new Tuple2<>(measurement.getRoundedCoordinate(), 1))
34 | .reduceByKeyAndWindow((a, b) -> a + b, Durations.seconds(120), Durations.seconds(10))
35 | .map(RealTimeHeatMapProcessor::mapHeatMap);
36 |
37 | save(heatMapStream);
38 | }
39 |
40 | /**
41 | * Converts each row from the dataset to a Measurement
42 | *
43 | * @return A set containing all data from the CSV file as Measurements
44 | */
45 | private static Measurement mapToMeasurement(IoTData row) {
46 | Coordinate coordinate = new Coordinate(
47 | Double.parseDouble(row.getLatitude()),
48 | Double.parseDouble(row.getLongitude())
49 | );
50 | return new Measurement(coordinate, row.getTimestamp());
51 | }
52 |
53 | /**
54 | * Maps the measurements by rounding the coordinate. The world is defined by a grid of boxes, each box has a size of
55 | * 0.0005 by 0.0005. Every mapping will be rounded to the center of the box it is part of. Boundary cases will be
56 | * rounded up, so a coordinate on (-0.00025,0) will be rounded to (0,0), while the coordinate (0.00025,0) will be
57 | * rounded to (0.0005,0).
58 | *
59 | * @param measurement
60 | * @return A set of measurements with rounded coordinates
61 | */
62 | private static Measurement roundCoordinates(Measurement measurement) {
63 | double roundedLatitude = 5 * Math.round(measurement.getCoordinate().getLatitude() * 10000 / 5) / 10000;
64 | double roundedLongitude = 5 * Math.round(measurement.getCoordinate().getLongitude() * 10000 / 5) / 10000;
65 |
66 | Coordinate roundedCoordinate = new Coordinate(roundedLatitude, roundedLongitude);
67 | measurement.setRoundedCoordinate(roundedCoordinate);
68 | return measurement;
69 | }
70 |
71 | private static HeatMapData mapHeatMap(Tuple2 tuple) {
72 | Coordinate coordinate = tuple._1();
73 | return new HeatMapData(coordinate.getLatitude(), coordinate.getLongitude(), tuple._2(), new Date());
74 | }
75 |
76 |
77 | private static void save(JavaDStream heatMapStream) {
78 | // Map Cassandra table column
79 | Map columnNameMappings = new HashMap<>();
80 | columnNameMappings.put("latitude", "latitude");
81 | columnNameMappings.put("longitude", "longitude");
82 | columnNameMappings.put("totalCount", "totalcount");
83 | columnNameMappings.put("timeStamp", "timestamp");
84 |
85 | // call CassandraStreamingJavaUtil function to save in DB
86 | javaFunctions(heatMapStream).writerBuilder(
87 | "traffickeyspace",
88 | "heat_map",
89 | CassandraJavaUtil.mapToRow(HeatMapData.class, columnNameMappings)
90 | ).saveToCassandra();
91 | }
92 |
93 | }
94 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/streaming/RealtimeTrafficDataProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.streaming;
2 |
3 | import static com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions;
4 |
5 | import java.text.SimpleDateFormat;
6 | import java.util.Date;
7 | import java.util.HashMap;
8 |
9 | import org.apache.log4j.Logger;
10 | import org.apache.spark.streaming.Durations;
11 | import org.apache.spark.streaming.State;
12 | import org.apache.spark.streaming.StateSpec;
13 | import org.apache.spark.streaming.api.java.JavaDStream;
14 |
15 | import com.apssouza.iot.common.dto.AggregateKey;
16 | import com.apssouza.iot.common.entity.WindowTrafficData;
17 | import com.datastax.spark.connector.japi.CassandraJavaUtil;
18 | import com.apssouza.iot.common.entity.TotalTrafficData;
19 | import com.apssouza.iot.common.dto.IoTData;
20 |
21 | import scala.Tuple2;
22 |
23 | /**
24 | * Class to process IoT data stream and to produce traffic data details.
25 | *
26 | * @author abaghel
27 | */
28 | public class RealtimeTrafficDataProcessor {
29 |
30 | private static final Logger logger = Logger.getLogger(RealtimeTrafficDataProcessor.class);
31 |
32 | /**
33 | * Method to get window traffic counts of different type of vehicles for each route. Window duration = 30 seconds
34 | * and Slide interval = 10 seconds
35 | *
36 | * @param filteredIotDataStream IoT data stream
37 | */
38 | public static void processWindowTrafficData(JavaDStream filteredIotDataStream) {
39 | // reduce by key and window (30 sec window and 10 sec slide).
40 | JavaDStream trafficDStream = filteredIotDataStream
41 | .mapToPair(iot -> new Tuple2<>(new AggregateKey(iot.getRouteId(), iot.getVehicleType()), 1L))
42 | .reduceByKeyAndWindow((a, b) -> a + b, Durations.seconds(30), Durations.seconds(10))
43 | .map(RealtimeTrafficDataProcessor::mapToWindowTrafficData);
44 |
45 | saveWindTrafficData(trafficDStream);
46 | }
47 |
48 | /**
49 | * Method to get total traffic counts of different type of vehicles for each route.
50 | *
51 | * @param filteredIotDataStream IoT data stream
52 | */
53 | public static void processTotalTrafficData(JavaDStream filteredIotDataStream) {
54 | // Need to keep state for total count
55 | StateSpec> stateSpec = StateSpec
56 | .function(RealtimeTrafficDataProcessor::updateState)
57 | .timeout(Durations.seconds(3600));
58 |
59 | // We need to get count of vehicle group by routeId and vehicleType
60 | JavaDStream trafficDStream = filteredIotDataStream
61 | .mapToPair(iot -> new Tuple2<>(new AggregateKey(iot.getRouteId(), iot.getVehicleType()), 1L))
62 | .reduceByKey((a, b) -> a + b)
63 | .mapWithState(stateSpec)
64 | .map(tuple2 -> tuple2)
65 | .map(RealtimeTrafficDataProcessor::mapToTrafficData);
66 |
67 | saveTotalTrafficData(trafficDStream);
68 | }
69 |
70 | private static void saveTotalTrafficData(final JavaDStream trafficDStream) {
71 | // Map Cassandra table column
72 | HashMap columnNameMappings = new HashMap<>();
73 | columnNameMappings.put("routeId", "routeid");
74 | columnNameMappings.put("vehicleType", "vehicletype");
75 | columnNameMappings.put("totalCount", "totalcount");
76 | columnNameMappings.put("timeStamp", "timestamp");
77 | columnNameMappings.put("recordDate", "recorddate");
78 |
79 | // call CassandraStreamingJavaUtil function to save in DB
80 | javaFunctions(trafficDStream).writerBuilder(
81 | "traffickeyspace",
82 | "total_traffic",
83 | CassandraJavaUtil.mapToRow(TotalTrafficData.class, columnNameMappings)
84 | ).saveToCassandra();
85 | }
86 |
87 |
88 | private static void saveWindTrafficData(final JavaDStream trafficDStream) {
89 | // Map Cassandra table column
90 | HashMap columnNameMappings = new HashMap<>();
91 | columnNameMappings.put("routeId", "routeid");
92 | columnNameMappings.put("vehicleType", "vehicletype");
93 | columnNameMappings.put("totalCount", "totalcount");
94 | columnNameMappings.put("timeStamp", "timestamp");
95 | columnNameMappings.put("recordDate", "recorddate");
96 |
97 | // call CassandraStreamingJavaUtil function to save in DB
98 | javaFunctions(trafficDStream).writerBuilder(
99 | "traffickeyspace",
100 | "window_traffic",
101 | CassandraJavaUtil.mapToRow(WindowTrafficData.class, columnNameMappings)
102 | ).saveToCassandra();
103 | }
104 |
105 | /**
106 | * Function to create WindowTrafficData object from IoT data
107 | *
108 | * @param tuple
109 | * @return
110 | */
111 | private static WindowTrafficData mapToWindowTrafficData(Tuple2 tuple) {
112 | logger.debug("Window Count : " +
113 | "key " + tuple._1().getRouteId() + "-" + tuple._1().getVehicleType() +
114 | " value " + tuple._2());
115 |
116 | WindowTrafficData trafficData = new WindowTrafficData();
117 | trafficData.setRouteId(tuple._1().getRouteId());
118 | trafficData.setVehicleType(tuple._1().getVehicleType());
119 | trafficData.setTotalCount(tuple._2());
120 | trafficData.setTimeStamp(new Date());
121 | trafficData.setRecordDate(new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
122 | return trafficData;
123 | }
124 |
125 | private static TotalTrafficData mapToTrafficData(Tuple2 tuple) {
126 | logger.debug(
127 | "Total Count : " + "key " + tuple._1().getRouteId() + "-" + tuple._1().getVehicleType() + " value " +
128 | tuple._2());
129 | TotalTrafficData trafficData = new TotalTrafficData();
130 | trafficData.setRouteId(tuple._1().getRouteId());
131 | trafficData.setVehicleType(tuple._1().getVehicleType());
132 | trafficData.setTotalCount(tuple._2());
133 | trafficData.setTimeStamp(new Date());
134 | trafficData.setRecordDate(new SimpleDateFormat("yyyy-MM-dd").format(new Date()));
135 | return trafficData;
136 | }
137 |
138 |
139 | /**
140 | * Function to get running sum by maintaining the state
141 | *
142 | * @param key
143 | * @param currentSum
144 | * @param state
145 | * @return
146 | */
147 | private static Tuple2 updateState(
148 | AggregateKey key,
149 | org.apache.spark.api.java.Optional currentSum,
150 | State state
151 | ) {
152 | Long objectOption = currentSum.get();
153 | objectOption = objectOption == null ? 0l : objectOption;
154 | long totalSum = objectOption + (state.exists() ? state.get() : 0);
155 | Tuple2 total = new Tuple2<>(key, totalSum);
156 | state.update(totalSum);
157 | return total;
158 | }
159 |
160 | }
161 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/streaming/StreamProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.streaming;
2 |
3 | import com.apssouza.iot.common.dto.IoTData;
4 | import com.apssouza.iot.common.dto.POIData;
5 |
6 | import org.apache.kafka.clients.consumer.ConsumerRecord;
7 | import org.apache.log4j.Logger;
8 | import org.apache.spark.api.java.JavaRDD;
9 | import org.apache.spark.api.java.Optional;
10 | import org.apache.spark.api.java.function.Function2;
11 | import org.apache.spark.broadcast.Broadcast;
12 | import org.apache.spark.sql.Dataset;
13 | import org.apache.spark.sql.Row;
14 | import org.apache.spark.sql.SaveMode;
15 | import org.apache.spark.sql.SparkSession;
16 | import org.apache.spark.streaming.Durations;
17 | import org.apache.spark.streaming.State;
18 | import org.apache.spark.streaming.StateSpec;
19 | import org.apache.spark.streaming.api.java.JavaDStream;
20 | import org.apache.spark.streaming.api.java.JavaMapWithStateDStream;
21 | import org.apache.spark.streaming.api.java.JavaPairDStream;
22 | import org.apache.spark.streaming.kafka010.HasOffsetRanges;
23 | import org.apache.spark.streaming.kafka010.OffsetRange;
24 |
25 | import java.io.IOException;
26 | import java.io.Serializable;
27 | import java.util.ArrayList;
28 | import java.util.HashMap;
29 | import java.util.Iterator;
30 | import java.util.List;
31 | import java.util.Map;
32 |
33 | import scala.Tuple2;
34 |
35 | public class StreamProcessor implements Serializable {
36 |
37 | private static final Logger logger = Logger.getLogger(StreamProcessor.class);
38 |
39 | final JavaDStream> directKafkaStream;
40 | private JavaDStream transformedStream;
41 | private JavaDStream filteredStream;
42 |
43 |
44 | public StreamProcessor(JavaDStream> directKafkaStream) {
45 | this.directKafkaStream = directKafkaStream;
46 | }
47 |
48 | private static JavaRDD transformRecord(JavaRDD> item) {
49 | OffsetRange[] offsetRanges;
50 | offsetRanges = ((HasOffsetRanges) item.rdd()).offsetRanges();
51 | return item.mapPartitionsWithIndex(addMetaData(offsetRanges), true);
52 | }
53 |
54 | private static Function2>, Iterator> addMetaData(
55 | final OffsetRange[] offsetRanges
56 | ) {
57 | return (index, items) -> {
58 | List list = new ArrayList<>();
59 | while (items.hasNext()) {
60 | ConsumerRecord next = items.next();
61 | IoTData dataItem = next.value();
62 |
63 | Map meta = new HashMap<>();
64 | meta.put("topic", offsetRanges[index].topic());
65 | meta.put("fromOffset", "" + offsetRanges[index].fromOffset());
66 | meta.put("kafkaPartition", "" + offsetRanges[index].partition());
67 | meta.put("untilOffset", "" + offsetRanges[index].untilOffset());
68 | meta.put("dayOfWeek", "" + dataItem.getTimestamp().toLocalDate().getDayOfWeek().getValue());
69 |
70 | dataItem.setMetaData(meta);
71 | list.add(dataItem);
72 | }
73 | return list.iterator();
74 | };
75 | }
76 |
77 | public StreamProcessor transform() {
78 | this.transformedStream = directKafkaStream.transform(StreamProcessor::transformRecord);
79 | return this;
80 | }
81 |
82 | public StreamProcessor appendToHDFS(final SparkSession sql, final String file) {
83 | transformedStream.foreachRDD(rdd -> {
84 | if (rdd.isEmpty()) {
85 | return;
86 | }
87 | Dataset dataFrame = sql.createDataFrame(rdd, IoTData.class);
88 | Dataset dfStore = dataFrame.selectExpr(
89 | "fuelLevel", "latitude", "longitude",
90 | "routeId", "speed", "timestamp", "vehicleId", "vehicleType",
91 | "metaData.fromOffset as fromOffset",
92 | "metaData.untilOffset as untilOffset",
93 | "metaData.kafkaPartition as kafkaPartition",
94 | "metaData.topic as topic",
95 | "metaData.dayOfWeek as dayOfWeek"
96 | );
97 | dfStore.printSchema();
98 | dfStore.write()
99 | .partitionBy("topic", "kafkaPartition", "dayOfWeek")
100 | .mode(SaveMode.Append)
101 | .parquet(file);
102 | }
103 | );
104 | return this;
105 | }
106 |
107 | public StreamProcessor processPOIData(final Broadcast broadcastPOIValues) {
108 | PointOfInterestProcessor.processPOIData(transformedStream, broadcastPOIValues);
109 | return this;
110 | }
111 |
112 | public StreamProcessor processTotalTrafficData() {
113 | RealtimeTrafficDataProcessor.processTotalTrafficData(filteredStream);
114 | return this;
115 | }
116 |
117 | public StreamProcessor processWindowTrafficData() {
118 | RealtimeTrafficDataProcessor.processWindowTrafficData(filteredStream);
119 | return this;
120 | }
121 |
122 | public StreamProcessor processHeatMap() throws IOException {
123 | RealTimeHeatMapProcessor.processHeatMap(filteredStream);
124 | return this;
125 | }
126 |
127 |
128 | public StreamProcessor filterVehicle() {
129 | //We need filtered stream for total and traffic data calculation
130 | var map = mapToPair(transformedStream);
131 | var key = reduceByKey(map);
132 | var state = mapWithState(key);
133 | this.filteredStream = filterByState(state).map(tuple -> tuple._1);
134 | return this;
135 | }
136 |
137 | private JavaDStream> filterByState(final JavaMapWithStateDStream> state) {
138 | var dsStream = state .filter(tuple -> tuple._2.equals(Boolean.FALSE));
139 | logger.info("Starting Stream Processing");
140 | dsStream.print();
141 | return dsStream;
142 | }
143 |
144 | private JavaMapWithStateDStream> mapWithState(final JavaPairDStream key) {
145 | // Check vehicle Id is already processed
146 | StateSpec> stateFunc = StateSpec
147 | .function(StreamProcessor::updateState)
148 | .timeout(Durations.seconds(3600));//maintain state for one hour
149 |
150 | var dStream = key.mapWithState(stateFunc);
151 | dStream.print();
152 | return dStream;
153 | }
154 |
155 | private JavaPairDStream reduceByKey(final JavaPairDStream map) {
156 | JavaPairDStream dStream = map.reduceByKey((a, b) -> a);
157 | dStream.print();
158 | return dStream;
159 | }
160 |
161 | private JavaPairDStream mapToPair(final JavaDStream stream){
162 | var dStream = stream.mapToPair(iot -> new Tuple2<>(iot.getVehicleId(), iot));
163 | dStream.print();
164 | return dStream;
165 | }
166 |
167 | public StreamProcessor cache() {
168 | this.filteredStream.cache();
169 | return this;
170 | }
171 |
172 | /**
173 | * Create tuple (IotData, boolean) the boolean will be defined to true if iot object exists in the state
174 | *
175 | * @param str
176 | * @param iot
177 | * @param state
178 | * @return
179 | */
180 | private static Tuple2 updateState(String str, Optional iot, State state) {
181 | Tuple2 vehicle = new Tuple2<>(iot.get(), false);
182 | if (state.exists()) {
183 | vehicle = new Tuple2<>(iot.get(), true);
184 | } else {
185 | state.update(Boolean.TRUE);
186 | }
187 | return vehicle;
188 | }
189 |
190 | }
191 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/java/com/apssouza/iot/streaming/StreamingProcessor.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.streaming;
2 |
3 | import com.apssouza.iot.batch.LatestOffSetReader;
4 | import com.apssouza.iot.common.ProcessorUtils;
5 | import com.apssouza.iot.common.dto.IoTData;
6 | import com.apssouza.iot.common.dto.POIData;
7 | import com.apssouza.iot.common.IoTDataDeserializer;
8 | import com.apssouza.iot.common.PropertyFileReader;
9 | import com.datastax.spark.connector.util.JavaApiHelper;
10 |
11 | import org.apache.kafka.clients.consumer.ConsumerConfig;
12 | import org.apache.kafka.clients.consumer.ConsumerRecord;
13 | import org.apache.kafka.clients.consumer.OffsetAndMetadata;
14 | import org.apache.kafka.clients.consumer.OffsetCommitCallback;
15 | import org.apache.kafka.common.TopicPartition;
16 | import org.apache.kafka.common.serialization.StringDeserializer;
17 | import org.apache.log4j.Logger;
18 | import org.apache.spark.SparkConf;
19 | import org.apache.spark.api.java.JavaRDD;
20 | import org.apache.spark.broadcast.Broadcast;
21 | import org.apache.spark.sql.SparkSession;
22 | import org.apache.spark.streaming.Durations;
23 | import org.apache.spark.streaming.api.java.JavaInputDStream;
24 | import org.apache.spark.streaming.api.java.JavaStreamingContext;
25 | import org.apache.spark.streaming.kafka010.CanCommitOffsets;
26 | import org.apache.spark.streaming.kafka010.ConsumerStrategies;
27 | import org.apache.spark.streaming.kafka010.HasOffsetRanges;
28 | import org.apache.spark.streaming.kafka010.KafkaUtils;
29 | import org.apache.spark.streaming.kafka010.LocationStrategies;
30 | import org.apache.spark.streaming.kafka010.OffsetRange;
31 |
32 | import java.io.Serializable;
33 | import java.util.Arrays;
34 | import java.util.HashMap;
35 | import java.util.List;
36 | import java.util.Map;
37 | import java.util.Properties;
38 |
39 | import scala.reflect.ClassTag;
40 |
41 | /**
42 | * This class consumes Kafka IoT messages and creates stream for processing the IoT data.
43 | *
44 | * @author apssouza22
45 | */
46 | public class StreamingProcessor implements Serializable {
47 |
48 | private static final Logger logger = Logger.getLogger(StreamingProcessor.class);
49 | private final Properties prop;
50 |
51 | public StreamingProcessor(Properties properties) {
52 | this.prop = properties;
53 | }
54 |
55 | public static void main(String[] args) throws Exception {
56 | // String file = "iot-spark-local.properties";
57 | String file = "iot-spark.properties";
58 | Properties prop = PropertyFileReader.readPropertyFile(file);
59 | StreamingProcessor streamingProcessor = new StreamingProcessor(prop);
60 | streamingProcessor.start();
61 | }
62 |
63 | private void start() throws Exception {
64 | String parqueFile = prop.getProperty("com.iot.app.hdfs") + "iot-data-parque";
65 | Map kafkaProperties = getKafkaParams(prop);
66 | SparkConf conf = ProcessorUtils.getSparkConf(prop, "streaming-processor");
67 |
68 | //batch interval of 5 seconds for incoming stream
69 | JavaStreamingContext streamingContext = new JavaStreamingContext(conf, Durations.seconds(5));
70 |
71 | //Please note that while data checkpointing is useful for stateful processing, it comes with a latency cost.
72 | // Hence, it's necessary to use this wisely.
73 | // This is necessary because we keep state in some operations.
74 | // We are not using this for fault-tolerance. For that, we use Kafka offset @see commitOffset
75 |
76 | streamingContext.checkpoint(prop.getProperty("com.iot.app.spark.checkpoint.dir"));
77 | SparkSession sparkSession = SparkSession.builder().config(conf).getOrCreate();
78 | Map offsets = getOffsets(parqueFile, sparkSession);
79 | JavaInputDStream> kafkaStream = getKafkaStream(
80 | prop,
81 | streamingContext,
82 | kafkaProperties,
83 | offsets
84 | );
85 |
86 | logger.info("Starting Stream Processing");
87 |
88 | //broadcast variables. We will monitor vehicles on Route 37 which are of type Truck
89 | //Basically we are sending the data to each worker nodes on a Spark cluster.
90 | ClassTag classTag = JavaApiHelper.getClassTag(POIData.class);
91 | Broadcast broadcastPOIValues = sparkSession
92 | .sparkContext()
93 | .broadcast(getPointOfInterest(), classTag);
94 |
95 | StreamProcessor streamProcessor = new StreamProcessor(kafkaStream);
96 | streamProcessor.transform()
97 | .appendToHDFS(sparkSession, parqueFile)
98 | .processPOIData(broadcastPOIValues)
99 | .filterVehicle()
100 | .cache()
101 | .processTotalTrafficData()
102 | .processWindowTrafficData()
103 | .processHeatMap();
104 |
105 | commitOffset(kafkaStream);
106 |
107 | streamingContext.start();
108 | streamingContext.awaitTermination();
109 | }
110 |
111 | private Map getOffsets(final String parqueFile, final SparkSession sparkSession) {
112 | try {
113 | LatestOffSetReader latestOffSetReader = new LatestOffSetReader(sparkSession, parqueFile);
114 | return latestOffSetReader.read().offsets();
115 | } catch (Exception e) {
116 | return new HashMap<>();
117 | }
118 | }
119 |
120 | private POIData getPointOfInterest() {
121 | POIData poiData = new POIData();
122 | poiData.setLatitude(53.877495);
123 | poiData.setLongitude(-6.50238);
124 | poiData.setRadius(100);//100 km
125 | return poiData;
126 | }
127 |
128 | /**
129 | * Commit the ack to kafka after process have completed
130 | * This is our fault-tolerance implementation
131 | *
132 | * @param directKafkaStream
133 | */
134 | private void commitOffset(JavaInputDStream> directKafkaStream) {
135 | directKafkaStream.foreachRDD((JavaRDD> trafficRdd) -> {
136 | if (!trafficRdd.isEmpty()) {
137 | OffsetRange[] offsetRanges = ((HasOffsetRanges) trafficRdd.rdd()).offsetRanges();
138 |
139 | CanCommitOffsets canCommitOffsets = (CanCommitOffsets) directKafkaStream.inputDStream();
140 | canCommitOffsets.commitAsync(offsetRanges, new TrafficOffsetCommitCallback());
141 | }
142 | });
143 | }
144 |
145 |
146 | private Map getKafkaParams(Properties prop) {
147 | Map kafkaProperties = new HashMap<>();
148 | kafkaProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, prop.getProperty("com.iot.app.kafka.brokerlist"));
149 | kafkaProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
150 | kafkaProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, IoTDataDeserializer.class);
151 | kafkaProperties.put(ConsumerConfig.GROUP_ID_CONFIG, prop.getProperty("com.iot.app.kafka.topic"));
152 | kafkaProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, prop.getProperty("com.iot.app.kafka.resetType"));
153 | kafkaProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
154 | return kafkaProperties;
155 | }
156 |
157 |
158 | private JavaInputDStream> getKafkaStream(
159 | Properties prop,
160 | JavaStreamingContext streamingContext,
161 | Map kafkaProperties,
162 | Map fromOffsets
163 | ) {
164 | List topicSet = Arrays.asList(new String[]{prop.getProperty("com.iot.app.kafka.topic")});
165 | if (fromOffsets.isEmpty()) {
166 | return KafkaUtils.createDirectStream(
167 | streamingContext,
168 | LocationStrategies.PreferConsistent(),
169 | ConsumerStrategies.Subscribe(topicSet, kafkaProperties)
170 | );
171 | }
172 |
173 | return KafkaUtils.createDirectStream(
174 | streamingContext,
175 | LocationStrategies.PreferConsistent(),
176 | ConsumerStrategies.Subscribe(topicSet, kafkaProperties, fromOffsets)
177 | );
178 | }
179 |
180 |
181 |
182 | }
183 |
184 | final class TrafficOffsetCommitCallback implements OffsetCommitCallback, Serializable {
185 |
186 | private static final Logger log = Logger.getLogger(TrafficOffsetCommitCallback.class);
187 |
188 | @Override
189 | public void onComplete(Map offsets, Exception exception) {
190 | log.info("---------------------------------------------------");
191 | log.info(String.format("{0} | {1}", new Object[]{offsets, exception}));
192 | log.info("---------------------------------------------------");
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/resources/iot-spark-local.properties:
--------------------------------------------------------------------------------
1 | #Kafka properties
2 | com.iot.app.kafka.zookeeper=localhost:2181
3 | com.iot.app.kafka.brokerlist=localhost:29092
4 | com.iot.app.kafka.topic=iot-data-event
5 | com.iot.app.kafka.resetType=earliest
6 |
7 | #Spark properties
8 | com.iot.app.spark.app.name=Iot Data Processor
9 | com.iot.app.spark.master=local[*]
10 | com.iot.app.spark.checkpoint.dir=/tmp/iot-streaming-data
11 | com.iot.app.hdfs=/Users/alexsouza/projects/my/opensource/lambda-arch/iot-spark-processor/data/
12 | com.iot.app.jar=/Users/alexsouza/projects/my/opensource/lambda-arch/iot-spark-processor/target/iot-spark-processor-1.0.0.jar
13 |
14 | #Cassandra propertis
15 | com.iot.app.cassandra.host=127.0.0.1
16 | com.iot.app.cassandra.port=9042
17 | com.iot.app.cassandra.keep_alive=10000
18 | com.iot.app.cassandra.username=cassandra
19 | com.iot.app.cassandra.password=cassandra
20 |
21 | # Miscellaneous
22 | com.iot.app.env=local
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/resources/iot-spark.properties:
--------------------------------------------------------------------------------
1 | # Kafka properties
2 | com.iot.app.kafka.zookeeper=zookeeper:2181
3 | com.iot.app.kafka.brokerlist=kafka:9092
4 | com.iot.app.kafka.topic=iot-data-event
5 | com.iot.app.kafka.resetType=earliest
6 |
7 | #Spark properties
8 | com.iot.app.spark.app.name=Iot Data Processor
9 | com.iot.app.spark.master=spark://spark-master:7077
10 | com.iot.app.spark.checkpoint.dir=hdfs://namenode:8020/lambda-arch/checkpoint
11 | com.iot.app.hdfs=hdfs://namenode:8020/lambda-arch/
12 | com.iot.app.jar=/opt/spark-data/iot-spark-processor-1.0.0.jar
13 |
14 | #Cassandra propertis
15 | com.iot.app.cassandra.host=172.22.0.6
16 | com.iot.app.cassandra.port=9042
17 | com.iot.app.cassandra.keep_alive=10000
18 | com.iot.app.cassandra.username=cassandra
19 | com.iot.app.cassandra.password=cassandra
20 |
21 | # Miscellaneous
22 | com.iot.app.env=cluster
23 |
--------------------------------------------------------------------------------
/iot-spark-processor/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=WARN, file, stdout
3 |
4 | # Direct log messages to a log file
5 | log4j.appender.file=org.apache.log4j.RollingFileAppender
6 | log4j.appender.file.File=/tmp/iot-spark.log
7 | log4j.appender.file.MaxFileSize=10MB
8 | log4j.appender.file.MaxBackupIndex=10
9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
11 |
12 | # Direct log messages to stdout
13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
14 | log4j.appender.stdout.Target=System.out
15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
--------------------------------------------------------------------------------
/iot-spark-processor/src/test/java/com/apssouza/iot/streaming/RealTimeHeatMapProcessorTest.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.streaming;
2 |
3 |
4 | public class RealTimeHeatMapProcessorTest {
5 |
6 | // public static void main(String[] args) {
7 | // double roundedLatitude = 5 * Math.round(-5.2036514 * 10000 / 5) / 10000;
8 | // double roundedLongitude = 5 * Math.round(53.315853 * 10000 / 5) / 10000;
9 | // System.out.println(roundedLatitude);
10 | // System.out.println(roundedLongitude);
11 | // }
12 | }
13 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | 4.0.0
5 |
6 | org.springframework.boot
7 | spring-boot-starter-parent
8 | 2.1.2.RELEASE
9 |
10 |
11 | com.apssouza.iot
12 | iot-springboot-dashboard
13 | 1.0.0
14 | IoT Spring Boot Dashboard
15 |
16 |
17 |
18 |
19 | 1.8
20 |
21 |
22 |
23 |
24 | org.springframework.boot
25 | spring-boot-starter-websocket
26 |
27 |
28 | org.springframework.boot
29 | spring-boot-starter-logging
30 |
31 |
32 |
33 |
34 | org.springframework.boot
35 | spring-boot-starter-data-cassandra
36 |
37 |
38 | org.springframework.boot
39 | spring-boot-starter-test
40 | test
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 | org.springframework.boot
49 | spring-boot-maven-plugin
50 |
51 |
52 | org.apache.maven.plugins
53 | maven-compiler-plugin
54 |
55 | 1.8
56 | 1.8
57 |
58 |
59 |
60 |
61 |
62 |
63 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/IoTDataDashboard.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot;
2 |
3 | import org.springframework.boot.SpringApplication;
4 | import org.springframework.boot.autoconfigure.SpringBootApplication;
5 | import org.springframework.data.cassandra.repository.config.EnableCassandraRepositories;
6 | import org.springframework.scheduling.annotation.EnableScheduling;
7 |
8 | /**
9 | * Spring boot application class for Dashboard.
10 | *
11 | * @author abaghel
12 | */
13 | @SpringBootApplication
14 | @EnableScheduling
15 | @EnableCassandraRepositories("com.apssouza.iot.dao")
16 | public class IoTDataDashboard {
17 | public static void main(String[] args) {
18 | SpringApplication.run(IoTDataDashboard.class, args);
19 | }
20 | }
21 |
22 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/HeatMapDataRepository.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao;
2 |
3 | import com.apssouza.iot.dao.entity.HeatMapData;
4 | import com.apssouza.iot.dao.entity.TotalTrafficData;
5 |
6 | import org.springframework.data.cassandra.repository.CassandraRepository;
7 | import org.springframework.data.cassandra.repository.Query;
8 | import org.springframework.stereotype.Repository;
9 |
10 | import java.util.Date;
11 | import java.util.UUID;
12 |
13 | /**
14 | * DAO class for total_traffic
15 | *
16 | * @author apssouza22
17 | */
18 | @Repository
19 | public interface HeatMapDataRepository extends CassandraRepository {
20 |
21 | @Query("SELECT * FROM traffickeyspace.heat_map WHERE timestamp = ?0 ALLOW FILTERING")
22 | Iterable findHeatMapByDate(Date date);
23 | }
24 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/POITrafficDataRepository.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao;
2 |
3 | import org.springframework.data.cassandra.repository.CassandraRepository;
4 | import org.springframework.stereotype.Repository;
5 |
6 | import com.apssouza.iot.dao.entity.POITrafficData;
7 |
8 | import java.util.UUID;
9 |
10 | /**
11 | * DAO class for poi_traffic
12 | *
13 | */
14 | @Repository
15 | public interface POITrafficDataRepository extends CassandraRepository{
16 |
17 | }
18 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/TotalTrafficDataRepository.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao;
2 |
3 | import org.springframework.data.cassandra.repository.CassandraRepository;
4 | import org.springframework.data.cassandra.repository.Query;
5 | import org.springframework.stereotype.Repository;
6 |
7 | import com.apssouza.iot.dao.entity.TotalTrafficData;
8 |
9 | import java.util.UUID;
10 |
11 | /**
12 | * DAO class for total_traffic
13 | *
14 | * @author abaghel
15 | *
16 | */
17 | @Repository
18 | public interface TotalTrafficDataRepository extends CassandraRepository{
19 |
20 | @Query("SELECT * FROM traffickeyspace.total_traffic WHERE recorddate = ?0 ALLOW FILTERING")
21 | Iterable findTrafficDataByDate(String date);
22 | }
23 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/WindowTrafficDataRepository.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao;
2 |
3 | import org.springframework.data.cassandra.repository.CassandraRepository;
4 | import org.springframework.data.cassandra.repository.Query;
5 | import org.springframework.stereotype.Repository;
6 |
7 | import com.apssouza.iot.dao.entity.WindowTrafficData;
8 |
9 | import java.util.UUID;
10 |
11 | /**
12 | * DAO class for window_traffic
13 | *
14 | * @author abaghel
15 | *
16 | */
17 | @Repository
18 | public interface WindowTrafficDataRepository extends CassandraRepository{
19 |
20 | @Query("SELECT * FROM traffickeyspace.window_traffic WHERE recorddate = ?0 ALLOW FILTERING")
21 | Iterable findTrafficDataByDate(String date);
22 |
23 | }
24 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/entity/HeatMapData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao.entity;
2 |
3 | import com.fasterxml.jackson.annotation.JsonFormat;
4 | import org.springframework.data.cassandra.core.cql.PrimaryKeyType;
5 | import org.springframework.data.cassandra.core.mapping.Column;
6 | import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn;
7 | import org.springframework.data.cassandra.core.mapping.Table;
8 |
9 | import java.io.Serializable;
10 | import java.util.Date;
11 |
12 | /**
13 | * Heatmap data entity
14 | * @author apssouza22
15 | */
16 | @Table("heat_map")
17 | public class HeatMapData implements Serializable {
18 | @PrimaryKeyColumn(name = "latitude",ordinal = 0,type = PrimaryKeyType.PARTITIONED)
19 | private double latitude;
20 | @PrimaryKeyColumn(name = "longitude",ordinal = 1,type = PrimaryKeyType.CLUSTERED)
21 | private double longitude;
22 | @Column(value = "totalcount")
23 | private int totalCount;
24 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST")
25 | @Column(value = "timestamp")
26 | private Date timeStamp;
27 |
28 | public HeatMapData(double latitude, double longitude, int totalCount, Date timeStamp) {
29 | this.latitude = latitude;
30 | this.longitude = longitude;
31 | this.totalCount = totalCount;
32 | this.timeStamp = timeStamp;
33 | }
34 |
35 | public double getLatitude() {
36 | return latitude;
37 | }
38 |
39 | public double getLongitude() {
40 | return longitude;
41 | }
42 |
43 | public int getTotalCount() {
44 | return totalCount;
45 | }
46 |
47 | public Date getTimeStamp() {
48 | return timeStamp;
49 | }
50 |
51 | public void setLatitude(double latitude) {
52 | this.latitude = latitude;
53 | }
54 |
55 | public void setLongitude(double longitude) {
56 | this.longitude = longitude;
57 | }
58 |
59 | public void setTotalCount(int totalCount) {
60 | this.totalCount = totalCount;
61 | }
62 |
63 | public void setTimeStamp(Date timeStamp) {
64 | this.timeStamp = timeStamp;
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/entity/POITrafficData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao.entity;
2 |
3 | import com.fasterxml.jackson.annotation.JsonFormat;
4 | import org.springframework.data.cassandra.core.cql.PrimaryKeyType;
5 | import org.springframework.data.cassandra.core.mapping.Column;
6 | import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn;
7 | import org.springframework.data.cassandra.core.mapping.Table;
8 |
9 | import java.io.Serializable;
10 | import java.util.Date;
11 |
12 |
13 | /**
14 | * Entity class for poi_traffic db table
15 | *
16 | *
17 | */
18 | @Table("poi_traffic")
19 | public class POITrafficData implements Serializable{
20 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST")
21 | @PrimaryKeyColumn(name = "timeStamp",ordinal = 0,type = PrimaryKeyType.PARTITIONED)
22 | private Date timeStamp;
23 | @PrimaryKeyColumn(name = "recordDate",ordinal = 1,type = PrimaryKeyType.CLUSTERED)
24 | private String recordDate;
25 | @Column(value = "vehicleId")
26 | private String vehicleId;
27 | @Column(value = "distance")
28 | private double distance;
29 | @Column(value = "vehicleType")
30 | private String vehicleType;
31 |
32 | public Date getTimeStamp() {
33 | return timeStamp;
34 | }
35 | public void setTimeStamp(Date timeStamp) {
36 | this.timeStamp = timeStamp;
37 | }
38 | public String getRecordDate() {
39 | return recordDate;
40 | }
41 | public void setRecordDate(String recordDate) {
42 | this.recordDate = recordDate;
43 | }
44 | public String getVehicleId() {
45 | return vehicleId;
46 | }
47 | public void setVehicleId(String vehicleId) {
48 | this.vehicleId = vehicleId;
49 | }
50 | public double getDistance() {
51 | return distance;
52 | }
53 | public void setDistance(double distance) {
54 | this.distance = distance;
55 | }
56 | public String getVehicleType() {
57 | return vehicleType;
58 | }
59 | public void setVehicleType(String vehicleType) {
60 | this.vehicleType = vehicleType;
61 | }
62 |
63 | }
64 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/entity/TotalTrafficData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao.entity;
2 |
3 | import com.fasterxml.jackson.annotation.JsonFormat;
4 | import org.springframework.data.cassandra.core.cql.PrimaryKeyType;
5 | import org.springframework.data.cassandra.core.mapping.Column;
6 | import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn;
7 | import org.springframework.data.cassandra.core.mapping.Table;
8 |
9 | import java.io.Serializable;
10 | import java.util.Date;
11 |
12 | /**
13 | * Entity class for total_traffic db table
14 | *
15 | * @author abaghel
16 | *
17 | */
18 | @Table("total_traffic")
19 | public class TotalTrafficData implements Serializable{
20 | @PrimaryKeyColumn(name = "routeid",ordinal = 0,type = PrimaryKeyType.PARTITIONED)
21 | private String routeId;
22 | @PrimaryKeyColumn(name = "recordDate",ordinal = 1,type = PrimaryKeyType.CLUSTERED)
23 | private String recordDate;
24 | @PrimaryKeyColumn(name = "vehicletype",ordinal = 2,type = PrimaryKeyType.CLUSTERED)
25 | private String vehicleType;
26 | @Column(value = "totalcount")
27 | private long totalCount;
28 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST")
29 | @Column(value = "timestamp")
30 | private Date timeStamp;
31 |
32 | public String getRouteId() {
33 | return routeId;
34 | }
35 | public void setRouteId(String routeId) {
36 | this.routeId = routeId;
37 | }
38 | public String getRecordDate() {
39 | return recordDate;
40 | }
41 | public void setRecordDate(String recordDate) {
42 | this.recordDate = recordDate;
43 | }
44 | public String getVehicleType() {
45 | return vehicleType;
46 | }
47 | public void setVehicleType(String vehicleType) {
48 | this.vehicleType = vehicleType;
49 | }
50 | public long getTotalCount() {
51 | return totalCount;
52 | }
53 | public void setTotalCount(long totalCount) {
54 | this.totalCount = totalCount;
55 | }
56 | public Date getTimeStamp() {
57 | return timeStamp;
58 | }
59 | public void setTimeStamp(Date timeStamp) {
60 | this.timeStamp = timeStamp;
61 | }
62 | @Override
63 | public String toString() {
64 | return "TrafficData [routeId=" + routeId + ", vehicleType=" + vehicleType + ", totalCount=" + totalCount
65 | + ", timeStamp=" + timeStamp + "]";
66 | }
67 |
68 |
69 | }
70 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dao/entity/WindowTrafficData.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dao.entity;
2 |
3 |
4 | import com.fasterxml.jackson.annotation.JsonFormat;
5 | import org.springframework.data.cassandra.core.cql.PrimaryKeyType;
6 | import org.springframework.data.cassandra.core.mapping.Column;
7 | import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn;
8 | import org.springframework.data.cassandra.core.mapping.Table;
9 |
10 | import java.io.Serializable;
11 | import java.util.Date;
12 |
13 | /**
14 | * Entity class for window_traffic db table
15 | *
16 | * @author abaghel
17 | *
18 | */
19 | @Table("window_traffic")
20 | public class WindowTrafficData implements Serializable{
21 | @PrimaryKeyColumn(name = "routeid",ordinal = 0,type = PrimaryKeyType.PARTITIONED)
22 | private String routeId;
23 | @PrimaryKeyColumn(name = "recordDate",ordinal = 1,type = PrimaryKeyType.CLUSTERED)
24 | private String recordDate;
25 | @PrimaryKeyColumn(name = "vehicletype",ordinal = 2,type = PrimaryKeyType.CLUSTERED)
26 | private String vehicleType;
27 | @Column(value = "totalcount")
28 | private long totalCount;
29 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST")
30 | @Column(value = "timestamp")
31 | private Date timeStamp;
32 |
33 | public String getRouteId() {
34 | return routeId;
35 | }
36 | public void setRouteId(String routeId) {
37 | this.routeId = routeId;
38 | }
39 | public String getRecordDate() {
40 | return recordDate;
41 | }
42 | public void setRecordDate(String recordDate) {
43 | this.recordDate = recordDate;
44 | }
45 | public String getVehicleType() {
46 | return vehicleType;
47 | }
48 | public void setVehicleType(String vehicleType) {
49 | this.vehicleType = vehicleType;
50 | }
51 | public long getTotalCount() {
52 | return totalCount;
53 | }
54 | public void setTotalCount(long totalCount) {
55 | this.totalCount = totalCount;
56 | }
57 | public Date getTimeStamp() {
58 | return timeStamp;
59 | }
60 | public void setTimeStamp(Date timeStamp) {
61 | this.timeStamp = timeStamp;
62 | }
63 | @Override
64 | public String toString() {
65 | return "TrafficData [routeId=" + routeId + ", vehicleType=" + vehicleType + ", totalCount=" + totalCount
66 | + ", timeStamp=" + timeStamp + "]";
67 | }
68 |
69 |
70 | }
71 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dashboard/Response.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dashboard;
2 |
3 | import java.io.Serializable;
4 | import java.util.List;
5 |
6 | import com.apssouza.iot.dao.entity.HeatMapData;
7 | import com.apssouza.iot.dao.entity.POITrafficData;
8 | import com.apssouza.iot.dao.entity.TotalTrafficData;
9 | import com.apssouza.iot.dao.entity.WindowTrafficData;
10 |
11 | /**
12 | * Response object containing traffic details that will be sent to dashboard.
13 | *
14 | * @author abaghel
15 | */
16 | public class Response implements Serializable {
17 | private List totalTraffic;
18 | private List windowTraffic;
19 | private List poiTraffic;
20 | private List heatMap;
21 |
22 | public List getTotalTraffic() {
23 | return totalTraffic;
24 | }
25 |
26 | public void setTotalTraffic(List totalTraffic) {
27 | this.totalTraffic = totalTraffic;
28 | }
29 |
30 | public List getWindowTraffic() {
31 | return windowTraffic;
32 | }
33 |
34 | public void setWindowTraffic(List windowTraffic) {
35 | this.windowTraffic = windowTraffic;
36 | }
37 |
38 | public List getPoiTraffic() {
39 | return poiTraffic;
40 | }
41 |
42 | public void setPoiTraffic(List poiTraffic) {
43 | this.poiTraffic = poiTraffic;
44 | }
45 |
46 | public void setHeatMap(List heatMap) {
47 | this.heatMap = heatMap;
48 | }
49 |
50 | public List getHeatMap() {
51 | return heatMap;
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dashboard/TrafficDataService.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dashboard;
2 |
3 | import java.text.DateFormat;
4 | import java.text.SimpleDateFormat;
5 | import java.util.ArrayList;
6 | import java.util.Date;
7 | import java.util.List;
8 | import java.util.logging.Logger;
9 |
10 | import com.apssouza.iot.dao.TotalTrafficDataRepository;
11 | import com.apssouza.iot.dao.entity.TotalTrafficData;
12 | import com.apssouza.iot.dao.HeatMapDataRepository;
13 | import com.apssouza.iot.dao.entity.HeatMapData;
14 | import org.springframework.beans.factory.annotation.Autowired;
15 | import org.springframework.messaging.simp.SimpMessagingTemplate;
16 | import org.springframework.scheduling.annotation.Scheduled;
17 | import org.springframework.stereotype.Service;
18 |
19 | import com.apssouza.iot.dao.POITrafficDataRepository;
20 | import com.apssouza.iot.dao.WindowTrafficDataRepository;
21 | import com.apssouza.iot.dao.entity.POITrafficData;
22 | import com.apssouza.iot.dao.entity.WindowTrafficData;
23 |
24 | /**
25 | * Service class to send traffic data messages to dashboard ui at fixed interval using web-socket.
26 | */
27 | @Service
28 | public class TrafficDataService {
29 | private static final Logger logger = Logger.getLogger(TrafficDataService.class.getName());
30 |
31 | @Autowired
32 | private SimpMessagingTemplate template;
33 |
34 | @Autowired
35 | private TotalTrafficDataRepository totalRepository;
36 |
37 | @Autowired
38 | private WindowTrafficDataRepository windowRepository;
39 |
40 | @Autowired
41 | private POITrafficDataRepository poiRepository;
42 |
43 | @Autowired
44 | private HeatMapDataRepository heatMapDataRepository;
45 |
46 | private static DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
47 |
48 | //Method sends traffic data message in every 15 seconds.
49 | @Scheduled(fixedRate = 15000)
50 | public void trigger() {
51 | List totalTrafficList = new ArrayList<>();
52 | List windowTrafficList = new ArrayList<>();
53 | List poiTrafficList = new ArrayList<>();
54 | List heatmapData = new ArrayList<>();
55 | //Call dao methods
56 | totalRepository.findTrafficDataByDate(sdf.format(new Date())).forEach(e -> totalTrafficList.add(e));
57 | windowRepository.findTrafficDataByDate(sdf.format(new Date())).forEach(e -> windowTrafficList.add(e));
58 | poiRepository.findAll().forEach(e -> poiTrafficList.add(e));
59 | heatMapDataRepository.findAll().forEach(e -> heatmapData.add(e));
60 | //prepare response
61 | Response response = new Response();
62 | response.setTotalTraffic(totalTrafficList);
63 | response.setWindowTraffic(windowTrafficList);
64 | response.setPoiTraffic(poiTrafficList);
65 | response.setHeatMap(heatmapData);
66 | logger.info("Sending to UI " + response);
67 | //send to ui
68 | this.template.convertAndSend("/topic/trafficData", response);
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/java/com/apssouza/iot/dashboard/WebSocketConfig.java:
--------------------------------------------------------------------------------
1 | package com.apssouza.iot.dashboard;
2 |
3 | import org.springframework.context.annotation.Configuration;
4 | import org.springframework.messaging.simp.config.MessageBrokerRegistry;
5 | import org.springframework.web.socket.config.annotation.AbstractWebSocketMessageBrokerConfigurer;
6 | import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker;
7 | import org.springframework.web.socket.config.annotation.StompEndpointRegistry;
8 | import org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer;
9 |
10 | /**
11 | * Web-Socket message broker configuration class to send data using SockJS
12 | * to dashboard html page.
13 | *
14 | * @author abaghel
15 | */
16 | @Configuration
17 | @EnableWebSocketMessageBroker
18 | public class WebSocketConfig implements WebSocketMessageBrokerConfigurer {
19 | //sockJS can get message using this endpoint
20 | public void registerStompEndpoints(StompEndpointRegistry registry) {
21 | registry.addEndpoint("/stomp").withSockJS();
22 | }
23 |
24 | //configure message broker
25 | @Override
26 | public void configureMessageBroker(MessageBrokerRegistry config) {
27 | config.enableSimpleBroker("/topic");
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/application.properties:
--------------------------------------------------------------------------------
1 | spring.data.cassandra.keyspace-name= traffickeyspace
2 | spring.data.cassandra.port= 9042
3 | spring.data.cassandra.password=cassandra
4 | spring.data.cassandra.contact-points=localhost
5 | server.port = 3000
6 | spring.data.cassandra.username= cassandra
7 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/iot-springboot.properties:
--------------------------------------------------------------------------------
1 | #Cassandra properties
2 | com.iot.app.cassandra.host=127.0.0.1
3 | com.iot.app.cassandra.port=9042
4 | com.iot.app.cassandra.keyspace=traffickeyspace
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/log4j.properties:
--------------------------------------------------------------------------------
1 | # Root logger option
2 | log4j.rootLogger=INFO, file, stdout
3 |
4 | # Direct log messages to a log file
5 | log4j.appender.file=org.apache.log4j.RollingFileAppender
6 | log4j.appender.file.File=/tmp/iot-springboot.log
7 | log4j.appender.file.MaxFileSize=10MB
8 | log4j.appender.file.MaxBackupIndex=10
9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout
10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
11 |
12 | # Direct log messages to stdout
13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender
14 | log4j.appender.stdout.Target=System.out
15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/static/css/style.css:
--------------------------------------------------------------------------------
1 | .outerTable {
2 | table-layout: fixed;
3 | border: 2px solid black;
4 | }
5 |
6 | .outerTable th {
7 | text-align: center;
8 | background-color: #d4d4aa;
9 | color: blue;
10 | }
11 |
12 | .outerTable>thead>tr>th {
13 | border: 2px solid black;
14 | }
15 |
16 | .innerTable {
17 | table-layout: fixed;
18 | }
19 |
20 | .innerTable th {
21 | text-align: center;
22 | background-color: #4CAF50;
23 | color: white;
24 | }
25 |
26 | .innerTable tr {
27 | text-align: center;
28 | }
29 |
30 | .innerTable>tbody>tr>td {
31 | font-family:Arial-Black;
32 | }
33 |
34 | .innerTable>tbody:nth-of-type(odd){
35 | background-color: #B6EA7D;
36 | }
37 |
38 | .innerTable>tbody:nth-of-type(even){
39 | background-color: #BFEAA3;
40 | }
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/static/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | IoT Traffic Data Monitoring Dashboard
5 |
6 |
7 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
IoT Traffic Data Monitoring Dashboard
28 |
29 |
30 |
31 |
36 |
37 |
38 |
39 |
40 |
41 |
42 | Total Traffic Chart
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | Total Traffic Data
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 | Route Vehicle Count
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 | Route 37 Traffic Chart
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 | Last 30 Seconds Window
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 | Route Vehicle Count
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 | POI Vehicle Chart
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 | Vehicles At a POI (last 2 minutes)
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 | Vehicle Distance
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 | Qtd of Vehicles per Area (last 2 minutes)
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 | Grid box center
161 | Quantity
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 | `
185 |
186 |
187 |
190 |
191 |
192 |
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/static/js/bootstrap.min.js:
--------------------------------------------------------------------------------
1 | /*!
2 | * Bootstrap v3.3.6 (http://getbootstrap.com)
3 | * Copyright 2011-2015 Twitter, Inc.
4 | * Licensed under the MIT license
5 | */
6 | if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){return a(b.target).is(this)?b.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.6",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a(f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.6",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target);d.hasClass("btn")||(d=d.closest(".btn")),b.call(d,"toggle"),a(c.target).is('input[type="radio"]')||a(c.target).is('input[type="checkbox"]')||c.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));return a>this.$items.length-1||0>a?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){return this.sliding?void 0:this.slide("next")},c.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.6",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.6",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&jdocument.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth
',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),c.isInStateTrue()?void 0:(clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide())},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-mo.width?"left":"left"==h&&k.left-lg.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;jg.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;(e||!/destroy|hide/.test(b))&&(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.6",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:''}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.6",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b=e[a]&&(void 0===e[a+1]||b .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.6",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return c>e?"top":!1;if("bottom"==this.affixed)return null!=c?e+this.unpin<=f.top?!1:"bottom":a-d>=e+g?!1:"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&c>=e?"top":null!=d&&i+j>=a-d?"bottom":!1},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery);
--------------------------------------------------------------------------------
/iot-springboot-dashboard/src/main/resources/static/js/script.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @author apssouza22
3 | **/
4 | var map;
5 | function initMap() {
6 | map = new google.maps.Map(document.getElementById('map'), {
7 | zoom: 6,
8 | center: {lat: 53.36, lng: -6.263},
9 | mapTypeId: 'satellite'
10 | });
11 | }
12 |
13 |
14 | var totalTrafficChartData={
15 | labels : ["Vehicle"],
16 | datasets : [{
17 | label : "Route",
18 | data : [1]
19 | }
20 | ]
21 | };
22 |
23 | var route37TrafficChartData={
24 | labels : ["Vehicle"],
25 | datasets : [{
26 | data : [1]
27 | }
28 | ]
29 | };
30 |
31 | var poiTrafficChartData={
32 | labels : ["Vehicle"],
33 | datasets : [{
34 | data : [1]
35 | }
36 | ]
37 | };
38 |
39 | jQuery(document).ready(function() {
40 | //Charts
41 | var ctx1 = document.getElementById("totalTrafficChart").getContext("2d");
42 | window.tChart = new Chart(ctx1, {
43 | type: 'bar',
44 | data: totalTrafficChartData
45 | });
46 |
47 | var ctx2 = document.getElementById("route37TrafficChart").getContext("2d");
48 | window.wChart = new Chart(ctx2, {
49 | type: 'doughnut',
50 | data: route37TrafficChartData
51 | });
52 |
53 | var ctx3 = document.getElementById("poiTrafficChart").getContext("2d");
54 | window.pChart = new Chart(ctx3, {
55 | type: 'radar',
56 | data: poiTrafficChartData
57 | });
58 |
59 |
60 | //tables
61 | var totalTrafficList = jQuery("#total_traffic");
62 | var windowTrafficList = jQuery("#window_traffic");
63 | var poiTrafficList = jQuery("#poi_traffic");
64 |
65 | //use sockjs
66 | var socket = new SockJS('/stomp');
67 | var stompClient = Stomp.over(socket);
68 |
69 | stompClient.connect({ }, function(frame) {
70 | //subscribe "/topic/trafficData" message
71 | stompClient.subscribe("/topic/trafficData", function(data) {
72 | var dataList = data.body;
73 | var resp=jQuery.parseJSON(dataList);
74 | points = resp.heatMap.map(e => new google.maps.LatLng(e.latitude, e.longitude))
75 | new google.maps.visualization.HeatmapLayer({
76 | data: points,
77 | map: map
78 | });
79 |
80 | //Grid Box total
81 | var gridBoxTotal='';
82 | jQuery.each(resp.heatMap, function(i,vh) {
83 | gridBoxTotal +=""+ vh.latitude+" / "+vh.longitude+" "+vh.totalCount+" ";
84 | });
85 | jQuery("#gridBoxContent tbody").html(gridBoxTotal);
86 |
87 | //Total traffic
88 | var totalOutput='';
89 | jQuery.each(resp.totalTraffic, function(i,vh) {
90 | totalOutput +=""+ vh.routeId+" "+vh.vehicleType+" "+vh.totalCount+" ";
91 | });
92 | var t_tabl_start = "Route Vehicle Count ";
93 | var t_tabl_end = "
";
94 | totalTrafficList.html(t_tabl_start+totalOutput+t_tabl_end);
95 |
96 | //Window traffic
97 | var windowOutput='';
98 | jQuery.each(resp.windowTraffic, function(i,vh) {
99 | windowOutput +=""+ vh.routeId+" "+vh.vehicleType+" "+vh.totalCount+" ";
100 | });
101 | var w_tabl_start = "Route Vehicle Count ";
102 | var w_tabl_end = "
";
103 | windowTrafficList.html(w_tabl_start+windowOutput+w_tabl_end);
104 |
105 | //POI data
106 | var poiOutput='';
107 | jQuery.each(resp.poiTraffic, function(i,vh) {
108 | poiOutput +=""+vh.vehicleType+" "+vh.distance+" ";
109 | });
110 | var p_tabl_start = "Vehicle Distance ";
111 | var p_tabl_end = "
";
112 | poiTrafficList.html(p_tabl_start+poiOutput+p_tabl_end);
113 |
114 | //draw total traffic chart
115 | drawBarChart(resp.totalTraffic,totalTrafficChartData);
116 | window.tChart.update();
117 |
118 | //draw route-37 traffic chart
119 | drawDoughnutChart(resp.totalTraffic,route37TrafficChartData);
120 | window.wChart.update();
121 |
122 | //draw poi chart
123 | drawRadarChart(resp.poiTraffic,poiTrafficChartData);
124 | window.pChart.update();
125 |
126 | });
127 | });
128 | });
129 |
130 | function drawBarChart(trafficDetail,trafficChartData){
131 | //Prepare data for total traffic chart
132 | var chartLabel = [ "Bus","Large Truck", "Private Car","Small Truck", "Taxi"];
133 | var routeName = ["Route-37", "Route-82", "Route-43"];
134 | var chartData0 =[0,0,0,0,0], chartData1 =[0,0,0,0,0], chartData2 =[0,0,0,0,0];
135 |
136 | jQuery.each(trafficDetail, function(i,vh) {
137 |
138 | if(vh.routeId == routeName[0]){
139 | chartData0.splice(chartLabel.indexOf(vh.vehicleType),1,vh.totalCount);
140 | }
141 | if(vh.routeId == routeName[1]){
142 | chartData1.splice(chartLabel.indexOf(vh.vehicleType),1,vh.totalCount);
143 | }
144 | if(vh.routeId == routeName[2]){
145 | chartData2.splice(chartLabel.indexOf(vh.vehicleType),1,vh.totalCount);
146 | }
147 | });
148 |
149 | var trafficData = {
150 | labels : chartLabel,
151 | datasets : [{
152 | label : routeName[0],
153 | borderColor : "#878BB6",
154 | backgroundColor : "#878BB6",
155 | data : chartData0
156 | },
157 | {
158 | label : routeName[1],
159 | borderColor : "#4ACAB4",
160 | backgroundColor : "#4ACAB4",
161 | data : chartData1
162 | },
163 | {
164 | label : routeName[2],
165 | borderColor : "#FFEA88",
166 | backgroundColor : "#FFEA88",
167 | data : chartData2
168 | }
169 |
170 | ]
171 | };
172 | //update chart
173 | trafficChartData.datasets=trafficData.datasets;
174 | trafficChartData.labels=trafficData.labels;
175 | }
176 |
177 | function drawDoughnutChart(trafficDetail,trafficChartData){
178 | //Prepare data for Doughnut chart
179 | var chartData =[];
180 | var chartLabel = [];
181 | jQuery.each(trafficDetail, function(i,vh) {
182 | if(vh.routeId == "Route-37"){
183 | chartLabel.push(vh.vehicleType);
184 | chartData.push(vh.totalCount);
185 | }
186 | });
187 | var pieChartData = {
188 | labels : chartLabel,
189 | datasets : [{
190 | backgroundColor : ["#E81574","#DDE815","#B315E8","#e9967a","#90ee90"],
191 | data : chartData
192 | }]
193 | };
194 |
195 | //update chart
196 | trafficChartData.datasets=pieChartData.datasets;
197 | trafficChartData.labels=pieChartData.labels;
198 | }
199 |
200 |
201 | function drawRadarChart(trafficDetail,trafficChartData){
202 | var vTypeLabel =["Large Truck", "Small Truck"];
203 | var chartLabel = [];
204 | var chartData =[];
205 |
206 | jQuery.each(trafficDetail, function(i,vh) {
207 | chartData.push(vh.distance);
208 | //chartLabel.push(vh.vehicleId);
209 | chartLabel.push('V-'+(i+1));
210 | });
211 |
212 | var radarChartData = {
213 | labels : chartLabel,
214 | datasets : []
215 | };
216 |
217 | for(i=0; iy;c=p<=y?++b:--b){r=e.charAt(c);if(r===t.NULL){break}i+=r}}return new n(o,a,i)};n.unmarshall=function(n){var i,r,o,s;r=n.split(RegExp(""+t.NULL+t.LF+"*"));s={frames:[],partial:""};s.frames=function(){var t,n,o,s;o=r.slice(0,-1);s=[];for(t=0,n=o.length;t>> "+r)}while(true){if(r.length>this.maxWebSocketFrameSize){this.ws.send(r.substring(0,this.maxWebSocketFrameSize));r=r.substring(this.maxWebSocketFrameSize);if(typeof this.debug==="function"){this.debug("remaining = "+r.length)}}else{return this.ws.send(r)}}};r.prototype._setupHeartbeat=function(n){var r,o,s,u,a,c;if((a=n.version)!==i.VERSIONS.V1_1&&a!==i.VERSIONS.V1_2){return}c=function(){var t,e,i,r;i=n["heart-beat"].split(",");r=[];for(t=0,e=i.length;t>> PING"):void 0}}(this))}if(!(this.heartbeat.incoming===0||o===0)){s=Math.max(this.heartbeat.incoming,o);if(typeof this.debug==="function"){this.debug("check PONG every "+s+"ms")}return this.ponger=i.setInterval(s,function(t){return function(){var n;n=e()-t.serverActivity;if(n>s*2){if(typeof t.debug==="function"){t.debug("did not receive server activity for the last "+n+"ms")}return t.ws.close()}}}(this))}};r.prototype._parseConnect=function(){var t,e,n,i;t=1<=arguments.length?o.call(arguments,0):[];i={};switch(t.length){case 2:i=t[0],e=t[1];break;case 3:if(t[1]instanceof Function){i=t[0],e=t[1],n=t[2]}else{i.login=t[0],i.passcode=t[1],e=t[2]}break;case 4:i.login=t[0],i.passcode=t[1],e=t[2],n=t[3];break;default:i.login=t[0],i.passcode=t[1],e=t[2],n=t[3],i.host=t[4]}return[i,e,n]};r.prototype.connect=function(){var r,s,u,a;r=1<=arguments.length?o.call(arguments,0):[];a=this._parseConnect.apply(this,r);u=a[0],this.connectCallback=a[1],s=a[2];if(typeof this.debug==="function"){this.debug("Opening Web Socket...")}this.ws.onmessage=function(i){return function(r){var o,u,a,c,f,h,l,p,d,g,b,m,v;c=typeof ArrayBuffer!=="undefined"&&r.data instanceof ArrayBuffer?(o=new Uint8Array(r.data),typeof i.debug==="function"?i.debug("--- got data length: "+o.length):void 0,function(){var t,e,n;n=[];for(t=0,e=o.length;t
3 | 4.0.0
4 | com.iot.app
5 | iot-traffic-monitor
6 | 1.0.0
7 | pom
8 | IoT Traffic Monitor
9 |
10 |
11 | iot-kafka-producer
12 | iot-spark-processor
13 | iot-springboot-dashboard
14 |
15 |
16 |
--------------------------------------------------------------------------------
/project-orchestrate.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | ########################################################################
4 | # title: Build Complete Project
5 | # author: Alexsandro souza (https://dev.to/apssouza22)
6 | # url: https://github.com/apssouza22
7 | # description: Build complete Big data pipeline
8 | # usage: ./project-orchestrate.sh
9 | ########################################################################
10 |
11 | # Create casandra schema
12 | docker exec cassandra-iot cqlsh --username cassandra --password cassandra -f /schema.cql
13 |
14 | # Create Kafka topic "iot-data-event"
15 | docker exec kafka-iot kafka-topics --create --topic iot-data-event --partitions 1 --replication-factor 1 --if-not-exists --zookeeper zookeeper:2181
16 |
17 | # Create our folders on Hadoop file system and total permission to those
18 | docker exec namenode hdfs dfs -rm -r /lambda-arch
19 | docker exec namenode hdfs dfs -mkdir /lambda-arch
20 | docker exec namenode hdfs dfs -mkdir /lambda-arch/checkpoint
21 | docker exec namenode hdfs dfs -chmod -R 777 /lambda-arch
22 | docker exec namenode hdfs dfs -chmod -R 777 /lambda-arch/checkpoint
23 |
24 | # Install libc6-compat lib in both sparks containers - Not required anymore
25 | #docker exec spark-master apk add --no-cache libc6-compat
26 | #docker exec spark-worker-1 apk add --no-cache libc6-compat
27 |
--------------------------------------------------------------------------------