├── .arcconfig ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── iot-kafka-producer ├── README.md ├── pom.xml └── src │ └── main │ ├── java │ └── com │ │ └── iot │ │ └── app │ │ └── kafka │ │ ├── producer │ │ └── IoTDataProducer.java │ │ ├── util │ │ ├── IoTDataEncoder.java │ │ └── PropertyFileReader.java │ │ └── vo │ │ └── IoTData.java │ └── resources │ ├── iot-kafka.properties │ └── log4j.properties ├── iot-ksql-processor ├── README.md ├── resources │ ├── kafka.connect.properties │ ├── kafka.ksql.connect.properties │ ├── kubernetes │ │ ├── kafka.connect.properties │ │ ├── kafka.ksql.connect.properties │ │ ├── origin.sink.properties │ │ ├── poi_traffic.sink.properties │ │ ├── setup_yb_connect_sink.sh │ │ ├── total_traffic.sink.properties │ │ └── window_traffic.sink.properties │ ├── origin.sink.properties │ ├── poi_traffic.sink.properties │ ├── total_traffic.sink.properties │ └── window_traffic.sink.properties └── setup_streams.ksql ├── iot-spark-processor ├── README.md ├── pom.xml └── src │ └── main │ ├── java │ └── com │ │ └── iot │ │ └── app │ │ └── spark │ │ ├── entity │ │ ├── POITrafficData.java │ │ ├── TotalTrafficData.java │ │ └── WindowTrafficData.java │ │ ├── processor │ │ ├── IoTDataProcessor.java │ │ └── IoTTrafficDataProcessor.java │ │ ├── util │ │ ├── GeoDistanceCalculator.java │ │ ├── IoTDataDecoder.java │ │ └── PropertyFileReader.java │ │ └── vo │ │ ├── AggregateKey.java │ │ ├── IoTData.java │ │ └── POIData.java │ └── resources │ ├── iot-spark.properties │ └── log4j.properties ├── iot-springboot-dashboard ├── IoTData.cql ├── README.md ├── pom.xml └── src │ └── main │ ├── java │ └── com │ │ └── iot │ │ └── app │ │ └── springboot │ │ ├── dao │ │ ├── CassandraConfig.java │ │ ├── POITrafficDataRepository.java │ │ ├── TotalTrafficDataRepository.java │ │ ├── WindowTrafficDataRepository.java │ │ └── entity │ │ │ ├── POITrafficData.java │ │ │ ├── TotalTrafficData.java │ │ │ └── WindowTrafficData.java │ │ ├── dashboard │ │ ├── IoTDataDashboard.java │ │ ├── TrafficDataService.java │ │ └── WebSocketConfig.java │ │ └── vo │ │ └── Response.java │ └── resources │ ├── iot-springboot.properties │ ├── log4j.properties │ └── static │ ├── css │ ├── bootstrap.min.css │ └── style.css │ ├── index.html │ └── js │ ├── Chart.min.js │ ├── bootstrap.min.js │ ├── jquery-1.12.4.min.js │ ├── sockjs-1.1.1.min.js │ └── stomp.min.js ├── kubernetes └── helm │ ├── .helmignore │ ├── README.md │ └── yb-iot-helm │ ├── .helmignore │ ├── Chart.yaml │ ├── templates │ ├── NOTES.txt │ ├── _helpers.tpl │ ├── deployment.yaml │ └── service.yaml │ └── values.yaml ├── pom.xml ├── resources └── IoTData.cql ├── yb-iot-fleet-management-screenshot.png ├── yb-iot-fleet-mgmt-ksql-arch.png └── yb-iot-fleet-mgmt-spark-arch.png /.arcconfig: -------------------------------------------------------------------------------- 1 | { 2 | "phabricator.uri" : "https://phabricator.dev.yugabyte.com", 3 | "repository.callsign": "IoT-Kafka-YugaByte-Spring", 4 | "git:arc.feature.start.default" : "upstream/master", 5 | "arc.land.onto.default": "master", 6 | "arc.feature.start.default" : "master" 7 | } 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | logs 3 | target 4 | .idea 5 | .idea_modules 6 | /.classpath 7 | /.project 8 | /.settings 9 | /RUNNING_PID 10 | dependency-reduced-pom.xml 11 | 12 | # VIM/emacs stuff 13 | *.swp 14 | *~ 15 | core* 16 | 17 | oprofile_data 18 | .metadata/ 19 | *.iml 20 | 21 | app/.DS_STORE 22 | .DS_Store 23 | app/views/.DS_Store 24 | 25 | 26 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Docker image for https://github.com/YugaByte/yb-iot-fleet-management 2 | FROM openjdk:8-jre-alpine 3 | MAINTAINER YugaByte 4 | ENV container=yugabyte-iot 5 | 6 | # App install directory 7 | ENV IOT_HOME=/home/yugabyte-iot 8 | WORKDIR $IOT_HOME 9 | 10 | # Copy all the application jars. 11 | COPY ./iot-kafka-producer/target/iot-kafka-producer-1.0.0.jar $IOT_HOME/iot-kafka-producer.jar 12 | COPY ./iot-springboot-dashboard/target/iot-springboot-dashboard-1.0.0.jar $IOT_HOME/iot-springboot-dashboard.jar 13 | 14 | # Expose necessary ports. 15 | EXPOSE 8080 16 | 17 | # 18 | # To build: 19 | # cd yb-iot-fleet-management 20 | # docker build . -t yb-iot 21 | # docker tag yugabytedb/yb-iot: 22 | # docker push yugabytedb/yb-iot:latest 23 | # 24 | # To run: 25 | # docker run -p 8080:8080 --name yb-iot yb-iot 26 | # 27 | # Stop: 28 | # docker stop yb-iot 29 | # docker rm yb-iot 30 | # 31 | # Notes: 32 | # Needs to be able to talk to port 9042(YugaByte DB) and port 9092(Confluent Kafka) 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IoT Fleet Management 2 | 3 | YugaByte DB is world's 1st open source database that is both NoSQL (Cassandra & Redis compatible) and SQL (PostgreSQL compatible) at the same time. It is purpose-built to power fast-growing online services on public, private and hybrid clouds with transactional data integrity, low latency, high throughput and multi-region scalability while also using popular NoSQL and SQL APIs. 4 | 5 | This is a sample application that shows how real-time streaming applications (such as those in the IoT vertical) can leverage YugaByte DB as a highly reliable, elastic operational database. It uses YugaByte DB's Cassandra-compatible YCQL API. 6 | 7 | ## Scenario 8 | 9 | Here is a brief description of the scenario. 10 | 11 | Assume that a fleet management company wants to track their fleet of vehicles, which are of different types (18 Wheelers, busses, large trucks, etc). 12 | 13 | Below is a view of the dashboard of the running app. 14 | 15 | ![IoT Fleet Management Dashboard](https://github.com/YugaByte/yb-iot-fleet-management/blob/master/yb-iot-fleet-management-screenshot.png) 16 | 17 | The above dashboard can be used to monitor the different vehicle types and the routes they have taken both over the lifetime of the app as well as over the last 30 second window. It also points out the trucks that are near road closures, which might cause a delay in the shipping schedule. 18 | 19 | 20 | ## Architecture 21 | 22 | The IoT Fleet Management application contains the following four components: 23 | 24 | - IoT Kafka Producer 25 | This component emulates data being emitted from a connected vehicle, and generates data for the Kafka topic `iot-data-event`. The data emitted is of the format shown below. 26 | ``` 27 | {"vehicleId":"0bf45cac-d1b8-4364-a906-980e1c2bdbcb","vehicleType":"Taxi","routeId":"Route-37","longitude":"-95.255615","latitude":"33.49808","timestamp":"2017-10-16 12:31:03","speed":49.0,"fuelLevel":38.0} 28 | ``` 29 | 30 | - IoT Real-Time Data Processor 31 | This component reads data from Kafka topic `iot-data-event` and computes the following: 32 | - Total traffic snapshot 33 | - Last 30 seconds traffic snapshot 34 | - Vehicles near a point of interest 35 | 36 | There are two ways the app can perform this analysis. First is through KSQL, Confluent's SQL-like streaming query language for Kafka, and second is through Apache Spark as an external stream processing engine. 37 | 38 | - IoT Database 39 | This component is based on YugaByte DB. YugaByte DB's Cassandra-compatible YCQL API is used to integrate with other components of the app. 40 | 41 | - IoT Spring Boot Dashboard 42 | This app uses the Java Spring Boot framework with its integration for Cassandra as the data layer, using the Cassandra Query Language (CQL) internally. 43 | 44 | ### Architecture with KSQL 45 | ![Architecture with KSQL](https://github.com/YugaByte/yb-iot-fleet-management/blob/master/yb-iot-fleet-mgmt-ksql-arch.png) 46 | 47 | ### Architecture with Apache Spark Streaming 48 | ![Architecture with Apache Spark Streaming](https://github.com/YugaByte/yb-iot-fleet-management/blob/master/yb-iot-fleet-mgmt-spark-arch.png) 49 | 50 | ## Prerequisites 51 | 52 | For building these projects it requires following tools. Please refer README.md files of individual projects for more details. 53 | - JDK - 1.8 + 54 | - Maven - 3.3 + 55 | - Confluent Open Source - 5.0.0 (we assume this is installed in the `~/yb-kafka/confluent-os/confluent-5.0.0` directory). 56 | - YugaByte Connect sink - 1.0.0 (clone this into `~/yb-kafka/yb-kafka-connector`). 57 | 58 | ## Run using kubernetes 59 | Refer [here](https://github.com/YugaByte/yb-iot-fleet-management/tree/master/kubernetes/helm) for the setup and run steps using a helm based kubernetes environment. 60 | 61 | ## Steps to setup local environment 62 | 1. Clone this repository. 63 | ```sh 64 | git clone https://github.com/YugaByte/yb-iot-fleet-management.git ~ 65 | ``` 66 | 67 | 2. Build the required binaries. 68 | ```sh 69 | cd ~/yb-iot-fleet-management 70 | mvn package 71 | ``` 72 | 73 | 3. Download Confluent Open Source from https://www.confluent.io/download/. This is a manual step, since an email id is needed to register (as of Nov 2018). 74 | Unbundle the content of the tar.gz to location `~/yb-kafka/confluent-os/confluent-5.0.0` using these steps. 75 | ``` 76 | mkdir -p ~/yb-kafka/confluent-os 77 | cd ~/yb-kafka/confluent-os 78 | tar -xvf confluent-5.0.0-2.11.tar.gz 79 | ``` 80 | 81 | 4. Include dependent components into Kafka connectors: 82 | - Build the jar from this repo and copy it for use by Kafka: 83 | ``` 84 | cd ~/yb-kafka/ 85 | git clone https://github.com/YugaByte/yb-kafka-connector.git 86 | cd ~/yb-kafka/yb-kafka-connector/ 87 | mvn clean install -DskipTests 88 | mkdir ~/yb-kafka/confluent-os/confluent-5.0.0/share/java/kafka-connect-yugabyte/ 89 | cp ~/yb-kafka/yb-kafka-connector/target/yb-kafka-connnector-1.0.0.jar ~/yb-kafka/confluent-os/confluent-5.0.0/share/java/kafka-connect-yugabyte/ 90 | ``` 91 | - Setup the property files for use by Connect Sink. 92 | ``` 93 | cd ~/yb-iot-fleet-management 94 | cp iot-ksql-processor/resources/kafka.*connect.properties ~/yb-kafka/confluent-os/confluent-5.0.0/etc/kafka/ 95 | mkdir -p ~/yb-kafka/confluent-os/confluent-5.0.0/etc/kafka-connect-yugabyte 96 | cp iot-ksql-processor/resources/*.sink.properties ~/yb-kafka/confluent-os/confluent-5.0.0/etc/kafka-connect-yugabyte 97 | ``` 98 | - Download the dependent jars from maven central repository using the following commands. 99 | ``` 100 | cd ~/yb-kafka/confluent-os/confluent-5.0.0/share/java/kafka-connect-yugabyte/ 101 | wget http://central.maven.org/maven2/io/netty/netty-all/4.1.25.Final/netty-all-4.1.25.Final.jar 102 | wget http://central.maven.org/maven2/com/yugabyte/cassandra-driver-core/3.2.0-yb-18/cassandra-driver-core-3.2.0-yb-18.jar 103 | wget http://central.maven.org/maven2/com/codahale/metrics/metrics-core/3.0.1/metrics-core-3.0.1.jar 104 | ``` 105 | 106 | The final list of jars should look like this: 107 | ``` 108 | $ ls -al 109 | -rw-r--r--@ 85449 Oct 27 2013 metrics-core-3.0.1.jar 110 | -rw-r--r--@ 3823147 Oct 27 15:18 netty-all-4.1.25.Final.jar 111 | -rw-r--r-- 1100520 Oct 29 11:18 cassandra-driver-core-3.2.0-yb-18.jar 112 | -rw-r--r-- 14934 Oct 29 11:19 yb-kafka-connnector-1.0.0.jar 113 | ``` 114 | 115 | 5. Do the following to run Kafka and related components: 116 | ``` 117 | export PATH=$PATH:~/yb-kafka/confluent-os/confluent-5.0.0/bin 118 | confluent start ksql-server 119 | confluent status 120 | ``` 121 | 122 | The output for the `confluent status` should look like 123 | ``` 124 | control-center is [DOWN] 125 | ksql-server is [UP] 126 | connect is [DOWN] 127 | kafka-rest is [DOWN] 128 | schema-registry is [UP] 129 | kafka is [UP] 130 | zookeeper is [UP] 131 | ``` 132 | *Note*: It is required that the `DOWN` components in this list are not actually enabled. 133 | 134 | 6. Create the origin Kafka topic 135 | ``` 136 | ~/yb-kafka/confluent-os/confluent-5.0.0/bin/kafka-topics --create --zookeeper localhost:2181 --replication-factor 1 --partitions 1 --topic iot-data-event 137 | ``` 138 | *Note*: This is needed to be done only the first time. 139 | 140 | 7. Install YugaByte DB. 141 | - [Install YugaByte DB and start a local cluster](https://docs.yugabyte.com/quick-start/install/). 142 | 143 | 8. Create the YugaByte DB tables 144 | - Create the keyspaces and tables by running the following command. You can find `cqlsh` in the `bin` sub-directory located inside the YugaByte installation folder. 145 | ``` 146 | $> cqlsh -f resources/IoTData.cql 147 | ``` 148 | 149 | 9. Run the origin topic YugaByte DB Connect Sink 150 | ``` 151 | cd ~/yb-kafka/confluent-os/confluent-5.0.0 152 | nohup ./bin/connect-standalone ./etc/kafka/kafka.connect.properties ./etc/kafka-connect-yugabyte/origin.sink.properties >& origin_sink.txt & 153 | ``` 154 | This will insert the origin topic data into the YugaByte DB CQL table `TrafficKeySpace.Origin_Table`. 155 | 156 | ## Running the application 157 | From the top level directory of this repo, run the following 158 | 159 | 1. Start the data producer. 160 | ```sh 161 | cd ~/yb-iot-fleet-management 162 | java -jar iot-kafka-producer/target/iot-kafka-producer-1.0.0.jar 163 | ``` 164 | 165 | It should start emitting data points to the Kafka topic. You should see something like the following as the output on the console: 166 | ``` 167 | 2017-10-16 12:31:52 INFO IoTDataEncoder:28 - {"vehicleId":"0bf45cac-d1b8-4364-a906-980e1c2bdbcb","vehicleType":"Taxi","routeId":"Route-37","longitude":"-95.255615","latitude":"33.49808","timestamp":"2017-10-16 12:31:03","speed":49.0,"fuelLevel":38.0} 168 | 169 | 2017-10-16 12:31:53 INFO IoTDataEncoder:28 - {"vehicleId":"600863bc-c918-4c8e-a90b-7d66db4958e0","vehicleType":"18 Wheeler","routeId":"Route-43","longitude":"-97.918175","latitude":"35.78791","timestamp":"2017-10-16 12:31:03","speed":59.0,"fuelLevel":12.0} 170 | ``` 171 | 172 | 2. Start the data processing application 173 | Use either of these options: 174 | - Spark 175 | - Run the spark app using this 176 | ```sh 177 | java -jar iot-spark-processor/target/iot-spark-processor-1.0.0.jar 178 | ``` 179 | - KSQL 180 | - Setup the KSQL tables/streams 181 | ``` 182 | ksql <& ksql_sink.txt & 191 | ``` 192 | 193 | 3. Start the UI application. 194 | ```sh 195 | java -jar ~/yb-iot-fleet-management/iot-springboot-dashboard/target/iot-springboot-dashboard-1.0.0.jar 196 | ``` 197 | 198 | 4. Now open the dashboard UI in a web browser. The application will refresh itself periodically. 199 | ``` 200 | http://localhost:8080 201 | ``` 202 | -------------------------------------------------------------------------------- /iot-kafka-producer/README.md: -------------------------------------------------------------------------------- 1 | # IoT Kafka Producer 2 | IoT Kafka Producer is a Maven application for generating IoT Data events using Apache Kafka. This project requires following tools and technologies. 3 | 4 | - JDK - 1.8 5 | - Maven - 3.3.9 6 | - ZooKeeper - 3.4.8 7 | - Kafka - 2.10-0.10.0.0 8 | 9 | You can build and run this application using below commands. Please check resources/iot-kafka.properties for configuration details. 10 | 11 | ```sh 12 | mvn package 13 | mvn exec:java -Dexec.mainClass="com.iot.app.kafka.producer.IoTDataProducer" 14 | 15 | ``` 16 | 17 | Alternate way to run this application is using the “iot-kafka-producer-1.0.0.jar” file created by maven. Open command prompt, go to target folder and execute below command. 18 | 19 | ```sh 20 | java -jar iot-kafka-producer-1.0.0.jar 21 | ``` -------------------------------------------------------------------------------- /iot-kafka-producer/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.iot.app.kafka 5 | iot-kafka-producer 6 | 1.0.0 7 | IoT Kafka Producer 8 | 9 | 10 | 11 | org.apache.kafka 12 | kafka_2.10 13 | 0.8.1 14 | 15 | 16 | com.fasterxml.jackson.core 17 | jackson-core 18 | 2.6.6 19 | 20 | 21 | com.fasterxml.jackson.core 22 | jackson-databind 23 | 2.6.6 24 | 25 | 26 | com.fasterxml.jackson.core 27 | jackson-annotations 28 | 2.6.6 29 | 30 | 31 | log4j 32 | log4j 33 | 1.2.17 34 | 35 | 36 | junit 37 | junit 38 | 4.12 39 | 40 | 41 | 42 | 43 | 44 | ${basedir}/src/main/resources 45 | 46 | 47 | 48 | 49 | org.apache.maven.plugins 50 | maven-compiler-plugin 51 | 3.1 52 | 53 | 1.8 54 | 1.8 55 | 56 | 57 | 58 | org.apache.maven.plugins 59 | maven-shade-plugin 60 | 2.4.3 61 | 62 | 63 | package 64 | 65 | shade 66 | 67 | 68 | 69 | 71 | com.iot.app.kafka.producer.IoTDataProducer 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | -------------------------------------------------------------------------------- /iot-kafka-producer/src/main/java/com/iot/app/kafka/producer/IoTDataProducer.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.kafka.producer; 2 | 3 | import java.util.ArrayList; 4 | import java.util.Arrays; 5 | import java.util.Collections; 6 | import java.util.Date; 7 | import java.util.List; 8 | import java.util.Properties; 9 | import java.util.Random; 10 | import java.util.UUID; 11 | 12 | import org.apache.log4j.Logger; 13 | 14 | import com.iot.app.kafka.util.PropertyFileReader; 15 | import com.iot.app.kafka.vo.IoTData; 16 | 17 | import kafka.javaapi.producer.Producer; 18 | import kafka.producer.KeyedMessage; 19 | import kafka.producer.ProducerConfig; 20 | 21 | /** 22 | * IoT data event producer class which uses Kafka producer for events. 23 | * 24 | * @author abaghel 25 | * 26 | */ 27 | public class IoTDataProducer { 28 | 29 | private static final Logger logger = Logger.getLogger(IoTDataProducer.class); 30 | 31 | public static void main(String[] args) throws Exception { 32 | //read config file 33 | Properties prop = PropertyFileReader.readPropertyFile(); 34 | String zookeeper = prop.getProperty("com.iot.app.kafka.zookeeper"); 35 | if (System.getProperty("com.iot.app.kafka.zookeeper") != null) { 36 | zookeeper = System.getProperty("com.iot.app.kafka.zookeeper"); 37 | } 38 | String brokerList = prop.getProperty("com.iot.app.kafka.brokerlist"); 39 | if (System.getProperty("com.iot.app.kafka.brokerlist") != null) { 40 | brokerList = System.getProperty("com.iot.app.kafka.brokerlist"); 41 | } 42 | String topic = prop.getProperty("com.iot.app.kafka.topic"); 43 | if (System.getProperty("com.iot.app.kafka.topic") != null) { 44 | topic = System.getProperty("com.iot.app.kafka.topic"); 45 | } 46 | logger.info("Using Zookeeper=" + zookeeper + " ,Broker-list=" + brokerList + " and topic " + topic); 47 | 48 | // set producer properties 49 | Properties properties = new Properties(); 50 | properties.put("zookeeper.connect", zookeeper); 51 | properties.put("metadata.broker.list", brokerList); 52 | properties.put("request.required.acks", "1"); 53 | properties.put("serializer.class", "com.iot.app.kafka.util.IoTDataEncoder"); 54 | //generate event 55 | Producer producer = new Producer(new ProducerConfig(properties)); 56 | IoTDataProducer iotProducer = new IoTDataProducer(); 57 | iotProducer.generateIoTEvent(producer,topic); 58 | } 59 | 60 | 61 | /** 62 | * Method runs in while loop and generates random IoT data in JSON with below format. 63 | * 64 | * {"vehicleId":"52f08f03-cd14-411a-8aef-ba87c9a99997","vehicleType":"Public Transport","routeId":"route-43","latitude":",-85.583435","longitude":"38.892395","timestamp":1465471124373,"speed":80.0,"fuelLevel":28.0} 65 | * 66 | * @throws InterruptedException 67 | * 68 | * 69 | */ 70 | private void generateIoTEvent(Producer producer, String topic) throws InterruptedException { 71 | List routeList = Arrays.asList(new String[]{"Route-37", "Route-43", "Route-82"}); 72 | List vehicleTypeList = Arrays.asList(new String[]{"Large Truck", "Small Truck", "Van", "18 Wheeler", "Car"}); 73 | Random rand = new Random(); 74 | logger.info("Sending events"); 75 | // generate event in loop 76 | while (true) { 77 | List eventList = new ArrayList(); 78 | for (int i = 0; i < 100; i++) {// create 100 vehicles 79 | String vehicleId = UUID.randomUUID().toString(); 80 | String vehicleType = vehicleTypeList.get(rand.nextInt(5)); 81 | String routeId = routeList.get(rand.nextInt(3)); 82 | double speed = rand.nextInt(100 - 20) + 20;// random speed between 20 to 100 83 | double fuelLevel = rand.nextInt(40 - 10) + 10; 84 | for (int j = 0; j < 5; j++) {// Add 5 events for each vehicle 85 | String coords = getCoordinates(routeId); 86 | String latitude = coords.substring(0, coords.indexOf(",")); 87 | String longitude = coords.substring(coords.indexOf(",") + 1, coords.length()); 88 | // The timestamp field is set during event submission to get different values across events. 89 | IoTData event = new IoTData(vehicleId, vehicleType, routeId, latitude, longitude, null, speed, fuelLevel); 90 | eventList.add(event); 91 | } 92 | } 93 | Collections.shuffle(eventList);// shuffle for random events 94 | for (IoTData event : eventList) { 95 | event.setTimestamp(new Date()); 96 | KeyedMessage data = new KeyedMessage(topic, event); 97 | producer.send(data); 98 | Thread.sleep(rand.nextInt(1000 - 500) + 500);//random delay of 0.5 to 1 second 99 | } 100 | } 101 | } 102 | 103 | //Method to generate random latitude and longitude for routes 104 | private String getCoordinates(String routeId) { 105 | Random rand = new Random(); 106 | int latPrefix = 0; 107 | int longPrefix = -0; 108 | if (routeId.equals("Route-37")) { 109 | latPrefix = 33; 110 | longPrefix = -96; 111 | } else if (routeId.equals("Route-82")) { 112 | latPrefix = 34; 113 | longPrefix = -97; 114 | } else if (routeId.equals("Route-43")) { 115 | latPrefix = 35; 116 | longPrefix = -98; 117 | } 118 | Float lati = latPrefix + rand.nextFloat(); 119 | Float longi = longPrefix + rand.nextFloat(); 120 | return lati + "," + longi; 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /iot-kafka-producer/src/main/java/com/iot/app/kafka/util/IoTDataEncoder.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.kafka.util; 2 | 3 | import org.apache.log4j.Logger; 4 | 5 | import com.fasterxml.jackson.core.JsonProcessingException; 6 | import com.fasterxml.jackson.databind.ObjectMapper; 7 | import com.iot.app.kafka.vo.IoTData; 8 | 9 | import kafka.serializer.Encoder; 10 | import kafka.utils.VerifiableProperties; 11 | 12 | /** 13 | * Class to convert IoTData java object to JSON String 14 | * 15 | * @author abaghel 16 | * 17 | */ 18 | public class IoTDataEncoder implements Encoder { 19 | 20 | private static final Logger logger = Logger.getLogger(IoTDataEncoder.class); 21 | private static ObjectMapper objectMapper = new ObjectMapper(); 22 | public IoTDataEncoder(VerifiableProperties verifiableProperties) { 23 | 24 | } 25 | public byte[] toBytes(IoTData iotEvent) { 26 | try { 27 | String msg = objectMapper.writeValueAsString(iotEvent); 28 | logger.info(msg); 29 | return msg.getBytes(); 30 | } catch (JsonProcessingException e) { 31 | logger.error("Error in Serialization", e); 32 | } 33 | return null; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /iot-kafka-producer/src/main/java/com/iot/app/kafka/util/PropertyFileReader.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.kafka.util; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Properties; 6 | 7 | import org.apache.log4j.Logger; 8 | 9 | /** 10 | * Utility class to read property file 11 | * 12 | * @author abaghel 13 | * 14 | */ 15 | public class PropertyFileReader { 16 | private static final Logger logger = Logger.getLogger(PropertyFileReader.class); 17 | private static Properties prop = new Properties(); 18 | public static Properties readPropertyFile() throws Exception { 19 | if (prop.isEmpty()) { 20 | InputStream input = PropertyFileReader.class.getClassLoader().getResourceAsStream("iot-kafka.properties"); 21 | try { 22 | prop.load(input); 23 | } catch (IOException ex) { 24 | logger.error(ex); 25 | throw ex; 26 | } finally { 27 | if (input != null) { 28 | input.close(); 29 | } 30 | } 31 | } 32 | return prop; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /iot-kafka-producer/src/main/java/com/iot/app/kafka/vo/IoTData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.kafka.vo; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | 6 | import com.fasterxml.jackson.annotation.JsonFormat; 7 | 8 | /** 9 | * Class to represent the IoT vehicle data. 10 | * 11 | * @author abaghel 12 | * 13 | */ 14 | public class IoTData implements Serializable{ 15 | 16 | private String vehicleId; 17 | private String vehicleType; 18 | private String routeId; 19 | private String longitude; 20 | private String latitude; 21 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 22 | private Date timestamp; 23 | private double speed; 24 | private double fuelLevel; 25 | 26 | public IoTData(){ 27 | 28 | } 29 | 30 | public IoTData(String vehicleId, String vehicleType, String routeId, String latitude, String longitude, 31 | Date timestamp, double speed, double fuelLevel) { 32 | super(); 33 | this.vehicleId = vehicleId; 34 | this.vehicleType = vehicleType; 35 | this.routeId = routeId; 36 | this.longitude = longitude; 37 | this.latitude = latitude; 38 | this.timestamp = timestamp; 39 | this.speed = speed; 40 | this.fuelLevel = fuelLevel; 41 | } 42 | 43 | public String getVehicleId() { 44 | return vehicleId; 45 | } 46 | 47 | public String getVehicleType() { 48 | return vehicleType; 49 | } 50 | 51 | public String getRouteId() { 52 | return routeId; 53 | } 54 | 55 | public String getLongitude() { 56 | return longitude; 57 | } 58 | 59 | public String getLatitude() { 60 | return latitude; 61 | } 62 | 63 | public Date getTimestamp() { 64 | return timestamp; 65 | } 66 | 67 | public void setTimestamp(Date ts) { 68 | timestamp = ts; 69 | } 70 | 71 | public double getSpeed() { 72 | return speed; 73 | } 74 | 75 | public double getFuelLevel() { 76 | return fuelLevel; 77 | } 78 | 79 | } 80 | -------------------------------------------------------------------------------- /iot-kafka-producer/src/main/resources/iot-kafka.properties: -------------------------------------------------------------------------------- 1 | #Kafka properties 2 | com.iot.app.kafka.zookeeper=localhost:2181 3 | com.iot.app.kafka.brokerlist=localhost:9092 4 | com.iot.app.kafka.topic=iot-data-event -------------------------------------------------------------------------------- /iot-kafka-producer/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=INFO, file, stdout 3 | 4 | # Direct log messages to a log file 5 | log4j.appender.file=org.apache.log4j.RollingFileAppender 6 | log4j.appender.file.File=/tmp/iot-kafka.log 7 | log4j.appender.file.MaxFileSize=10MB 8 | log4j.appender.file.MaxBackupIndex=10 9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout 10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 11 | 12 | # Direct log messages to stdout 13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 14 | log4j.appender.stdout.Target=System.out 15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n -------------------------------------------------------------------------------- /iot-ksql-processor/README.md: -------------------------------------------------------------------------------- 1 | # IoT KSQL Processor 2 | 3 | IoT KSQL Processor provides a sequence of steps to setup and process Kafka streams. Processed data is persisted in to YugaByte DB. 4 | 5 | ## Prerequisites 6 | This project requires following tools. 7 | - Confluent 5.0.0 or later, for KSQL. Assumed to be installed in `~/yb-kafka/confluent-os/confluent-5.0.0`. 8 | - YugaByte DB, [installed](https://docs.yugabyte.com/quick-start/install/) and local cluster started up. `cqlsh` is present in this install. 9 | - The setup steps from the top-level [README](https://github.com/YugaByte/yb-iot-fleet-management/blob/master/README.md) should have been performed. 10 | 11 | ## Running real-time IoT KSQL processor 12 | Please perform the following steps from the *top level directory* of this repo. 13 | 14 | - Create the KSQL streams/tables. 15 | ``` 16 | ksql < cqlsh 31 | select count(*) from TrafficKeySpace.Origin_Table; 32 | select count(*) from TrafficKeySpace.Total_Traffic; 33 | select count(*) from TrafficKeySpace.Window_Traffic; 34 | select count(*) from TrafficKeySpace.Poi_Traffic; 35 | ``` 36 | 37 | Once the SpringBoot app is running, one can visualize it at `http://localhost:8080` as well. 38 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kafka.connect.properties: -------------------------------------------------------------------------------- 1 | # Sample file with kafka server properties. 2 | 3 | # Parameters for use by (local) kafka server. 4 | bootstrap.servers=localhost:9092 5 | offset.storage.file.filename=/tmp/yb.connect.offsets 6 | offset.flush.interval.ms=10000 7 | 8 | # Format of data in Kafka and how to translate it into YB connect sink data. 9 | key.converter=org.apache.kafka.connect.storage.StringConverter 10 | value.converter=org.apache.kafka.connect.json.JsonConverter 11 | key.converter.schemas.enable=false 12 | value.converter.schemas.enable=false 13 | 14 | # Location of YB sink jar. 15 | plugin.path=share/java 16 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kafka.ksql.connect.properties: -------------------------------------------------------------------------------- 1 | # Sample file with kafka server properties. 2 | 3 | # Parameters for use by (local) kafka server. 4 | bootstrap.servers=localhost:9092 5 | offset.storage.file.filename=/tmp/yb.connect.offsets 6 | offset.flush.interval.ms=10000 7 | 8 | # Format of data in Kafka and how to translate it into YB connect sink data. 9 | key.converter=org.apache.kafka.connect.storage.StringConverter 10 | value.converter=org.apache.kafka.connect.json.JsonConverter 11 | key.converter.schemas.enable=false 12 | value.converter.schemas.enable=false 13 | 14 | # Location of YB sink jar. 15 | plugin.path=share/java 16 | 17 | # Rest port to avoid conflict with origin table connect. 18 | rest.port=8082 19 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/kafka.connect.properties: -------------------------------------------------------------------------------- 1 | # Sample file with kafka server properties for use in kubernetes env. 2 | 3 | # Parameters for use by (local) kafka server. 4 | bootstrap.servers=kafka-demo-cp-kafka-headless:9092 5 | offset.storage.file.filename=/tmp/yb.connect.offsets 6 | offset.flush.interval.ms=10000 7 | 8 | # Format of data in Kafka and how to translate it into YB connect sink data. 9 | key.converter=org.apache.kafka.connect.storage.StringConverter 10 | value.converter=org.apache.kafka.connect.json.JsonConverter 11 | key.converter.schemas.enable=false 12 | value.converter.schemas.enable=false 13 | 14 | # Location of YB sink jar. 15 | plugin.path=share/java 16 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/kafka.ksql.connect.properties: -------------------------------------------------------------------------------- 1 | # Sample file with kafka server properties. 2 | 3 | # Parameters for use by (local) kafka server. 4 | bootstrap.servers=kafka-demo-cp-kafka-headless:9092 5 | offset.storage.file.filename=/tmp/yb.connect.offsets 6 | offset.flush.interval.ms=10000 7 | 8 | # Format of data in Kafka and how to translate it into YB connect sink data. 9 | key.converter=org.apache.kafka.connect.storage.StringConverter 10 | value.converter=org.apache.kafka.connect.json.JsonConverter 11 | key.converter.schemas.enable=false 12 | value.converter.schemas.enable=false 13 | 14 | # Location of YB sink jar. 15 | plugin.path=share/java 16 | 17 | # Rest port to avoid conflict with origin table connect. 18 | rest.port=8082 19 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/origin.sink.properties: -------------------------------------------------------------------------------- 1 | // Sample file to connect input from original Kafka topic into YugaByte DB. 2 | name=yugabyte-sink 3 | connector.class=com.yb.connect.sink.YBSinkConnector 4 | 5 | topics=iot-data-event 6 | 7 | yugabyte.cql.keyspace=TrafficKeySpace 8 | yugabyte.cql.tablename=Origin_Table 9 | yugabyte.cql.contact.points=yb-tservers:9042 10 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/poi_traffic.sink.properties: -------------------------------------------------------------------------------- 1 | name=yugabyte-sink-poi 2 | connector.class=com.yb.connect.sink.YBSinkConnector 3 | 4 | topics=poi_traffic 5 | 6 | yugabyte.cql.keyspace=TrafficKeySpace 7 | yugabyte.cql.tablename=Poi_Traffic 8 | yugabyte.cql.contact.points=yb-tservers:9042 9 | 10 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/setup_yb_connect_sink.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | 5 | print_help() { 6 | cat <<-EOT 7 | This script runs all the setup steps needed for Kafka to YugaByte DB Connect Sink. 8 | Usage: ${0##*/} 9 | Options: 10 | --kafka_helm_name 11 | The name used during the helm install of cp-helm-charts. Required. 12 | EOT 13 | } 14 | 15 | kafka_name="" 16 | 17 | setup_yb_kafka_sink() { 18 | cd ~/code/yb-iot-fleet-management/iot-ksql-processor/resources/ 19 | mkdir -p kafka-connect-yugabyte-deps 20 | cd kafka-connect-yugabyte-deps 21 | echo "Copying dependent jars." 22 | wget http://central.maven.org/maven2/io/netty/netty-all/4.1.25.Final/netty-all-4.1.25.Final.jar >& /dev/null 23 | wget http://central.maven.org/maven2/com/yugabyte/cassandra-driver-core/3.2.0-yb-18/cassandra-driver-core-3.2.0-yb-18.jar >& /dev/null 24 | wget http://central.maven.org/maven2/com/codahale/metrics/metrics-core/3.0.1/metrics-core-3.0.1.jar >& /dev/null 25 | kubectl cp netty-all-4.1.25.Final.jar $kafka_name-cp-kafka-0:/usr/share/java/kafka -c cp-kafka-broker 26 | kubectl cp cassandra-driver-core-3.2.0-yb-18.jar $kafka_name-cp-kafka-0:/usr/share/java/kafka -c cp-kafka-broker 27 | kubectl cp metrics-core-3.0.1.jar $kafka_name-cp-kafka-0:/usr/share/java/kafka -c cp-kafka-broker 28 | 29 | echo "Copying property files." 30 | cd ~/code/yb-kafka-connector 31 | mvn clean install -DskipTests >& /dev/null 32 | kubectl cp target/yb-kafka-connnector-1.0.0.jar $kafka_name-cp-kafka-0:/usr/share/java/kafka -c cp-kafka-broker 33 | 34 | cd ~/code/yb-iot-fleet-management/iot-ksql-processor/resources 35 | kubectl cp kubernetes/ $kafka_name-cp-kafka-0:/etc/kafka -c cp-kafka-broker 36 | 37 | # cleanup 38 | cd ~/code/yb-iot-fleet-management/iot-ksql-processor/resources/ 39 | rm -rf kafka-connect-yugabyte-deps 40 | } 41 | 42 | while [ $# -gt 0 ]; do 43 | case "$1" in 44 | --kafka_helm_name) 45 | kafka_name="$2" 46 | shift 47 | ;; 48 | -h|--help) 49 | print_help 50 | exit 51 | ;; 52 | *) 53 | echo "Invalid command line arg : $1" 54 | print_help 55 | exit 56 | esac 57 | shift 58 | done 59 | 60 | if [ -z "$kafka_name" ]; then 61 | echo "'kafka_name' needs to be a valid string - name given to the the kafka helm install." >&2 62 | print_help >& 2 63 | exit 1 64 | fi 65 | 66 | setup_yb_kafka_sink 67 | 68 | echo "Setup for Kafka YugaByte DB Connect sink done." 69 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/total_traffic.sink.properties: -------------------------------------------------------------------------------- 1 | name=yugabyte-sink-total 2 | connector.class=com.yb.connect.sink.YBSinkConnector 3 | 4 | topics=total_traffic 5 | 6 | yugabyte.cql.keyspace=TrafficKeySpace 7 | yugabyte.cql.tablename=Total_Traffic 8 | yugabyte.cql.contact.points=yb-tservers:9042 9 | 10 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/kubernetes/window_traffic.sink.properties: -------------------------------------------------------------------------------- 1 | name=yugabyte-sink-window 2 | connector.class=com.yb.connect.sink.YBSinkConnector 3 | 4 | topics=window_traffic 5 | 6 | yugabyte.cql.keyspace=TrafficKeySpace 7 | yugabyte.cql.tablename=Window_Traffic 8 | yugabyte.cql.contact.points=yb-tservers:9042 9 | 10 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/origin.sink.properties: -------------------------------------------------------------------------------- 1 | // Sample file to connect input from original Kafka topic into YugaByte DB. 2 | name=yugabyte-sink 3 | connector.class=com.yb.connect.sink.YBSinkConnector 4 | 5 | topics=iot-data-event 6 | 7 | yugabyte.cql.keyspace=TrafficKeySpace 8 | yugabyte.cql.tablename=Origin_Table 9 | yugabyte.cql.contact.points=127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042 10 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/poi_traffic.sink.properties: -------------------------------------------------------------------------------- 1 | name=yugabyte-sink-poi 2 | connector.class=com.yb.connect.sink.YBSinkConnector 3 | 4 | topics=poi_traffic 5 | 6 | yugabyte.cql.keyspace=TrafficKeySpace 7 | yugabyte.cql.tablename=Poi_Traffic 8 | yugabyte.cql.contact.points=127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042 9 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/total_traffic.sink.properties: -------------------------------------------------------------------------------- 1 | name=yugabyte-sink-total 2 | connector.class=com.yb.connect.sink.YBSinkConnector 3 | 4 | topics=total_traffic 5 | 6 | yugabyte.cql.keyspace=TrafficKeySpace 7 | yugabyte.cql.tablename=Total_Traffic 8 | yugabyte.cql.contact.points=127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042 9 | -------------------------------------------------------------------------------- /iot-ksql-processor/resources/window_traffic.sink.properties: -------------------------------------------------------------------------------- 1 | name=yugabyte-sink-window 2 | connector.class=com.yb.connect.sink.YBSinkConnector 3 | 4 | topics=window_traffic 5 | 6 | yugabyte.cql.keyspace=TrafficKeySpace 7 | yugabyte.cql.tablename=Window_Traffic 8 | yugabyte.cql.contact.points=127.0.0.1:9042,127.0.0.2:9042,127.0.0.3:9042 9 | -------------------------------------------------------------------------------- /iot-ksql-processor/setup_streams.ksql: -------------------------------------------------------------------------------- 1 | CREATE STREAM traffic_stream ( 2 | vehicleId varchar, 3 | vehicleType varchar, 4 | routeId varchar, 5 | timeStamp varchar, 6 | latitude varchar, 7 | longitude varchar) 8 | WITH ( 9 | KAFKA_TOPIC='iot-data-event', 10 | VALUE_FORMAT='json', 11 | TIMESTAMP='timeStamp', 12 | TIMESTAMP_FORMAT='yyyy-MM-dd HH:mm:ss'); 13 | 14 | CREATE TABLE total_traffic 15 | WITH ( PARTITIONS=1, 16 | KAFKA_TOPIC='total_traffic', 17 | TIMESTAMP='timeStamp', 18 | TIMESTAMP_FORMAT='yyyy-MM-dd HH:mm:ss') AS 19 | SELECT routeId, 20 | vehicleType, 21 | count(vehicleId) AS totalCount, 22 | max(rowtime) AS timeStamp, 23 | TIMESTAMPTOSTRING(max(rowtime), 'yyyy-MM-dd') AS recordDate 24 | FROM traffic_stream 25 | GROUP BY routeId, vehicleType; 26 | 27 | CREATE TABLE window_traffic 28 | WITH ( TIMESTAMP='timeStamp', 29 | KAFKA_TOPIC='window_traffic', 30 | TIMESTAMP_FORMAT='yyyy-MM-dd HH:mm:ss', 31 | PARTITIONS=1) AS 32 | SELECT routeId, 33 | vehicleType, 34 | count(vehicleId) AS totalCount, 35 | max(rowtime) AS timeStamp, 36 | TIMESTAMPTOSTRING(max(rowtime), 'yyyy-MM-dd') AS recordDate 37 | FROM traffic_stream 38 | WINDOW HOPPING (SIZE 30 SECONDS, ADVANCE BY 10 SECONDS) 39 | GROUP BY routeId, vehicleType; 40 | 41 | CREATE STREAM poi_traffic 42 | WITH ( PARTITIONS=1, 43 | KAFKA_TOPIC='poi_traffic', 44 | TIMESTAMP='timeStamp', 45 | TIMESTAMP_FORMAT='yyyy-MM-dd HH:mm:ss') AS 46 | SELECT vehicleId, 47 | vehicleType, 48 | cast(GEO_DISTANCE(cast(latitude AS double),cast(longitude AS double),33.877495,-95.50238,'KM') AS bigint) AS distance, 49 | timeStamp 50 | FROM traffic_stream 51 | WHERE GEO_DISTANCE(cast(latitude AS double),cast(longitude AS double),33.877495,-95.50238,'KM') < 30; 52 | -------------------------------------------------------------------------------- /iot-spark-processor/README.md: -------------------------------------------------------------------------------- 1 | # IoT Spark Processor 2 | IoT Spark Processor is a Maven application for processing IoT Data streams using Apache Spark. Processed data is persisted in to Cassandra Database. This project requires following tools and technologies. 3 | 4 | - JDK - 1.8 5 | - Maven - 3.3.9 6 | - ZooKeeper - 3.4.8 7 | - Kafka - 2.10-0.10.0.0 8 | - Cassandra - 2.2.6 9 | - Spark - 1.6.2 Pre-built for Hadoop 2.6 10 | 11 | Please refer "IoTData.cql" file to create Keyspace and Tables in Cassandra Database, which are required by this application. 12 | 13 | You can build and run this application using below commands. Please check resources/iot-spark.properties for configuration details. 14 | 15 | ```sh 16 | mvn package 17 | spark-submit --class "com.iot.app.spark.processor.IoTDataProcessor” iot-spark-processor-1.0.0.jar 18 | ``` -------------------------------------------------------------------------------- /iot-spark-processor/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.iot.app.spark 5 | iot-spark-processor 6 | 1.0.0 7 | IoT Spark Processor 8 | 9 | 10 | 11 | 12 | org.apache.spark 13 | spark-core_2.10 14 | 1.6.2 15 | 16 | 17 | org.apache.spark 18 | spark-streaming_2.10 19 | 1.6.2 20 | 21 | 22 | org.apache.spark 23 | spark-streaming-kafka_2.10 24 | 1.6.2 25 | 26 | 27 | org.apache.spark 28 | spark-sql_2.10 29 | 1.6.2 30 | 31 | 32 | 33 | com.datastax.spark 34 | spark-cassandra-connector_2.10 35 | 1.6.0 36 | 37 | 38 | 39 | log4j 40 | log4j 41 | 1.2.17 42 | 43 | 44 | junit 45 | junit 46 | 4.12 47 | 48 | 49 | 50 | 51 | 52 | ${basedir}/src/main/resources 53 | 54 | 55 | 56 | 57 | org.apache.maven.plugins 58 | maven-compiler-plugin 59 | 3.1 60 | 61 | 1.8 62 | 1.8 63 | 64 | 65 | 66 | org.apache.maven.plugins 67 | maven-shade-plugin 68 | 2.4.3 69 | 70 | 71 | package 72 | 73 | shade 74 | 75 | 76 | 77 | 78 | *:* 79 | 80 | META-INF/*.SF 81 | META-INF/*.DSA 82 | META-INF/*.RSA 83 | 84 | 85 | 86 | 87 | 89 | reference.conf 90 | 91 | 93 | com.iot.app.spark.processor.IoTDataProcessor 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/entity/POITrafficData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.entity; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | import com.fasterxml.jackson.annotation.JsonFormat; 6 | 7 | /** 8 | * Class to represent poi_tarffic db table 9 | * 10 | * @author abaghel 11 | * 12 | */ 13 | public class POITrafficData implements Serializable { 14 | 15 | private String vehicleId; 16 | private double distance; 17 | private String vehicleType; 18 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 19 | private Date timeStamp; 20 | 21 | public String getVehicleId() { 22 | return vehicleId; 23 | } 24 | 25 | public void setVehicleId(String vehicleId) { 26 | this.vehicleId = vehicleId; 27 | } 28 | 29 | public double getDistance() { 30 | return distance; 31 | } 32 | 33 | public void setDistance(double distance) { 34 | this.distance = distance; 35 | } 36 | 37 | public String getVehicleType() { 38 | return vehicleType; 39 | } 40 | 41 | public void setVehicleType(String vehicleType) { 42 | this.vehicleType = vehicleType; 43 | } 44 | 45 | public Date getTimeStamp() { 46 | return timeStamp; 47 | } 48 | 49 | public void setTimeStamp(Date timeStamp) { 50 | this.timeStamp = timeStamp; 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/entity/TotalTrafficData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.entity; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | import com.fasterxml.jackson.annotation.JsonFormat; 6 | 7 | /** 8 | * Class to represent total_tarffic db table 9 | * 10 | * @author abaghel 11 | * 12 | */ 13 | public class TotalTrafficData implements Serializable{ 14 | 15 | private String routeId; 16 | private String vehicleType; 17 | private long totalCount; 18 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 19 | private Date timeStamp; 20 | private String recordDate; 21 | 22 | public String getRouteId() { 23 | return routeId; 24 | } 25 | public void setRouteId(String routeId) { 26 | this.routeId = routeId; 27 | } 28 | public String getVehicleType() { 29 | return vehicleType; 30 | } 31 | public void setVehicleType(String vehicleType) { 32 | this.vehicleType = vehicleType; 33 | } 34 | public long getTotalCount() { 35 | return totalCount; 36 | } 37 | public void setTotalCount(long totalCount) { 38 | this.totalCount = totalCount; 39 | } 40 | public Date getTimeStamp() { 41 | return timeStamp; 42 | } 43 | public void setTimeStamp(Date timeStamp) { 44 | this.timeStamp = timeStamp; 45 | } 46 | 47 | public String getRecordDate() { 48 | return recordDate; 49 | } 50 | 51 | public void setRecordDate(String recordDate) { 52 | this.recordDate = recordDate; 53 | } 54 | 55 | 56 | } 57 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/entity/WindowTrafficData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.entity; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | import com.fasterxml.jackson.annotation.JsonFormat; 6 | 7 | /** 8 | * Class to represent window_tarffic db table 9 | * 10 | * @author abaghel 11 | * 12 | */ 13 | public class WindowTrafficData implements Serializable{ 14 | 15 | private String routeId; 16 | private String vehicleType; 17 | private long totalCount; 18 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 19 | private Date timeStamp; 20 | private String recordDate; 21 | public String getRouteId() { 22 | return routeId; 23 | } 24 | public void setRouteId(String routeId) { 25 | this.routeId = routeId; 26 | } 27 | public String getVehicleType() { 28 | return vehicleType; 29 | } 30 | public void setVehicleType(String vehicleType) { 31 | this.vehicleType = vehicleType; 32 | } 33 | public long getTotalCount() { 34 | return totalCount; 35 | } 36 | public void setTotalCount(long totalCount) { 37 | this.totalCount = totalCount; 38 | } 39 | public Date getTimeStamp() { 40 | return timeStamp; 41 | } 42 | public void setTimeStamp(Date timeStamp) { 43 | this.timeStamp = timeStamp; 44 | } 45 | 46 | public String getRecordDate() { 47 | return recordDate; 48 | } 49 | 50 | public void setRecordDate(String recordDate) { 51 | this.recordDate = recordDate; 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/processor/IoTDataProcessor.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.processor; 2 | 3 | import java.util.HashMap; 4 | import java.util.HashSet; 5 | import java.util.Map; 6 | import java.util.Properties; 7 | import java.util.Set; 8 | 9 | import org.apache.log4j.Logger; 10 | import org.apache.spark.SparkConf; 11 | import org.apache.spark.api.java.function.Function3; 12 | import org.apache.spark.broadcast.Broadcast; 13 | import org.apache.spark.streaming.Durations; 14 | import org.apache.spark.streaming.State; 15 | import org.apache.spark.streaming.StateSpec; 16 | import org.apache.spark.streaming.api.java.JavaDStream; 17 | import org.apache.spark.streaming.api.java.JavaMapWithStateDStream; 18 | import org.apache.spark.streaming.api.java.JavaPairDStream; 19 | import org.apache.spark.streaming.api.java.JavaPairInputDStream; 20 | import org.apache.spark.streaming.api.java.JavaStreamingContext; 21 | import org.apache.spark.streaming.kafka.KafkaUtils; 22 | 23 | import com.google.common.base.Optional; 24 | import com.iot.app.spark.util.IoTDataDecoder; 25 | import com.iot.app.spark.util.PropertyFileReader; 26 | import com.iot.app.spark.vo.IoTData; 27 | import com.iot.app.spark.vo.POIData; 28 | 29 | import kafka.serializer.StringDecoder; 30 | import scala.Tuple2; 31 | import scala.Tuple3; 32 | 33 | /** 34 | * This class consumes Kafka IoT messages and creates stream for processing the IoT data. 35 | * 36 | * @author abaghel 37 | * 38 | */ 39 | public class IoTDataProcessor { 40 | 41 | private static final Logger logger = Logger.getLogger(IoTDataProcessor.class); 42 | 43 | public static void main(String[] args) throws Exception { 44 | //read Spark and Cassandra properties and create SparkConf 45 | Properties prop = PropertyFileReader.readPropertyFile(); 46 | String cassandraHost = prop.getProperty("com.iot.app.cassandra.host"); 47 | if (System.getProperty("com.iot.app.cassandra.host") != null) { 48 | cassandraHost = System.getProperty("com.iot.app.cassandra.host"); 49 | } 50 | String cassandraPort = prop.getProperty("com.iot.app.cassandra.port"); 51 | if (System.getProperty("com.iot.app.cassandra.port") != null) { 52 | cassandraPort = System.getProperty("com.iot.app.cassandra.port"); 53 | } 54 | SparkConf conf = new SparkConf() 55 | .setAppName(prop.getProperty("com.iot.app.spark.app.name")) 56 | .setMaster(prop.getProperty("com.iot.app.spark.master")) 57 | .set("spark.cassandra.connection.host", cassandraHost) 58 | .set("spark.cassandra.connection.port", cassandraPort) 59 | .set("spark.cassandra.connection.keep_alive_ms", prop.getProperty("com.iot.app.cassandra.keep_alive")); 60 | //batch interval of 5 seconds for incoming stream 61 | JavaStreamingContext jssc = new JavaStreamingContext(conf, Durations.seconds(5)); 62 | //add check point directory 63 | jssc.checkpoint(prop.getProperty("com.iot.app.spark.checkpoint.dir")); 64 | 65 | //read and set Kafka properties 66 | Map kafkaParams = new HashMap(); 67 | kafkaParams.put("zookeeper.connect", prop.getProperty("com.iot.app.kafka.zookeeper")); 68 | kafkaParams.put("metadata.broker.list", prop.getProperty("com.iot.app.kafka.brokerlist")); 69 | String topic = prop.getProperty("com.iot.app.kafka.topic"); 70 | Set topicsSet = new HashSet(); 71 | topicsSet.add(topic); 72 | //create direct kafka stream 73 | JavaPairInputDStream directKafkaStream = KafkaUtils.createDirectStream( 74 | jssc, 75 | String.class, 76 | IoTData.class, 77 | StringDecoder.class, 78 | IoTDataDecoder.class, 79 | kafkaParams, 80 | topicsSet 81 | ); 82 | logger.info("Starting Stream Processing"); 83 | 84 | //We need non filtered stream for poi traffic data calculation 85 | JavaDStream nonFilteredIotDataStream = directKafkaStream.map(tuple -> tuple._2()); 86 | 87 | //We need filtered stream for total and traffic data calculation 88 | JavaPairDStream iotDataPairStream = nonFilteredIotDataStream.mapToPair(iot -> new Tuple2(iot.getVehicleId(),iot)).reduceByKey((a, b) -> a ); 89 | 90 | // Check vehicle Id is already processed 91 | JavaMapWithStateDStream> iotDStreamWithStatePairs = iotDataPairStream 92 | .mapWithState(StateSpec.function(processedVehicleFunc).timeout(Durations.seconds(3600)));//maintain state for one hour 93 | 94 | // Filter processed vehicle ids and keep un-processed 95 | JavaDStream> filteredIotDStreams = iotDStreamWithStatePairs.map(tuple2 -> tuple2) 96 | .filter(tuple -> tuple._2.equals(Boolean.FALSE)); 97 | 98 | // Get stream of IoTdata 99 | JavaDStream filteredIotDataStream = filteredIotDStreams.map(tuple -> tuple._1); 100 | 101 | //cache stream as it is used in total and window based computation 102 | filteredIotDataStream.cache(); 103 | 104 | //process data 105 | IoTTrafficDataProcessor iotTrafficProcessor = new IoTTrafficDataProcessor(); 106 | iotTrafficProcessor.processTotalTrafficData(filteredIotDataStream); 107 | iotTrafficProcessor.processWindowTrafficData(filteredIotDataStream); 108 | 109 | //poi data 110 | POIData poiData = new POIData(); 111 | poiData.setLatitude(33.877495); 112 | poiData.setLongitude(-95.50238); 113 | poiData.setRadius(30);//30 km 114 | 115 | //broadcast variables. We will monitor vehicles on Route 37 which are of type Truck 116 | Broadcast> broadcastPOIValues = jssc.sparkContext().broadcast(new Tuple3<>(poiData,"Route-37","Truck")); 117 | //call method to process stream 118 | iotTrafficProcessor.processPOIData(nonFilteredIotDataStream,broadcastPOIValues); 119 | 120 | //start context 121 | jssc.start(); 122 | jssc.awaitTermination(); 123 | } 124 | //Funtion to check processed vehicles. 125 | private static final Function3, State, Tuple2> processedVehicleFunc = (String, iot, state) -> { 126 | Tuple2 vehicle = new Tuple2<>(iot.get(),false); 127 | if(state.exists()){ 128 | vehicle = new Tuple2<>(iot.get(),true); 129 | }else{ 130 | state.update(Boolean.TRUE); 131 | } 132 | return vehicle; 133 | }; 134 | 135 | } 136 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/processor/IoTTrafficDataProcessor.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.processor; 2 | 3 | import static com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions; 4 | 5 | import java.text.SimpleDateFormat; 6 | import java.util.Date; 7 | import java.util.HashMap; 8 | import java.util.Map; 9 | 10 | import org.apache.log4j.Logger; 11 | import org.apache.spark.api.java.function.Function; 12 | import org.apache.spark.api.java.function.Function3; 13 | import org.apache.spark.broadcast.Broadcast; 14 | import org.apache.spark.streaming.Durations; 15 | import org.apache.spark.streaming.State; 16 | import org.apache.spark.streaming.StateSpec; 17 | import org.apache.spark.streaming.api.java.JavaDStream; 18 | import org.apache.spark.streaming.api.java.JavaMapWithStateDStream; 19 | import org.apache.spark.streaming.api.java.JavaPairDStream; 20 | 21 | import com.datastax.spark.connector.japi.CassandraJavaUtil; 22 | import com.google.common.base.Optional; 23 | import com.iot.app.spark.entity.POITrafficData; 24 | import com.iot.app.spark.entity.TotalTrafficData; 25 | import com.iot.app.spark.entity.WindowTrafficData; 26 | import com.iot.app.spark.util.GeoDistanceCalculator; 27 | import com.iot.app.spark.vo.AggregateKey; 28 | import com.iot.app.spark.vo.IoTData; 29 | import com.iot.app.spark.vo.POIData; 30 | 31 | import scala.Tuple2; 32 | import scala.Tuple3; 33 | 34 | /** 35 | * Class to process IoT data stream and to produce traffic data details. 36 | * 37 | * @author abaghel 38 | * 39 | */ 40 | public class IoTTrafficDataProcessor { 41 | private static final Logger logger = Logger.getLogger(IoTTrafficDataProcessor.class); 42 | 43 | /** 44 | * Method to get total traffic counts of different type of vehicles for each route. 45 | * 46 | * @param filteredIotDataStream IoT data stream 47 | */ 48 | public void processTotalTrafficData(JavaDStream filteredIotDataStream) { 49 | 50 | // We need to get count of vehicle group by routeId and vehicleType 51 | JavaPairDStream countDStreamPair = filteredIotDataStream 52 | .mapToPair(iot -> new Tuple2<>(new AggregateKey(iot.getRouteId(), iot.getVehicleType()), 1L)) 53 | .reduceByKey((a, b) -> a + b); 54 | 55 | // Need to keep state for total count 56 | JavaMapWithStateDStream> countDStreamWithStatePair = countDStreamPair 57 | .mapWithState(StateSpec.function(totalSumFunc).timeout(Durations.seconds(3600)));//maintain state for one hour 58 | 59 | // Transform to dstream of TrafficData 60 | JavaDStream> countDStream = countDStreamWithStatePair.map(tuple2 -> tuple2); 61 | JavaDStream trafficDStream = countDStream.map(totalTrafficDataFunc); 62 | 63 | // Map Cassandra table column 64 | Map columnNameMappings = new HashMap(); 65 | columnNameMappings.put("routeId", "routeid"); 66 | columnNameMappings.put("vehicleType", "vehicletype"); 67 | columnNameMappings.put("totalCount", "totalcount"); 68 | columnNameMappings.put("timeStamp", "timestamp"); 69 | columnNameMappings.put("recordDate", "recorddate"); 70 | 71 | // call CassandraStreamingJavaUtil function to save in DB 72 | javaFunctions(trafficDStream).writerBuilder("traffickeyspace", "total_traffic", 73 | CassandraJavaUtil.mapToRow(TotalTrafficData.class, columnNameMappings)).saveToCassandra(); 74 | } 75 | 76 | /** 77 | * Method to get window traffic counts of different type of vehicles for each route. 78 | * Window duration = 30 seconds and Slide interval = 10 seconds 79 | * 80 | * @param filteredIotDataStream IoT data stream 81 | */ 82 | public void processWindowTrafficData(JavaDStream filteredIotDataStream) { 83 | 84 | // reduce by key and window (30 sec window and 10 sec slide). 85 | JavaPairDStream countDStreamPair = filteredIotDataStream 86 | .mapToPair(iot -> new Tuple2<>(new AggregateKey(iot.getRouteId(), iot.getVehicleType()), 1L)) 87 | .reduceByKeyAndWindow((a, b) -> a + b, Durations.seconds(30), Durations.seconds(10)); 88 | 89 | // Transform to dstream of TrafficData 90 | JavaDStream trafficDStream = countDStreamPair.map(windowTrafficDataFunc); 91 | 92 | // Map Cassandra table column 93 | Map columnNameMappings = new HashMap(); 94 | columnNameMappings.put("routeId", "routeid"); 95 | columnNameMappings.put("vehicleType", "vehicletype"); 96 | columnNameMappings.put("totalCount", "totalcount"); 97 | columnNameMappings.put("timeStamp", "timestamp"); 98 | columnNameMappings.put("recordDate", "recorddate"); 99 | 100 | // call CassandraStreamingJavaUtil function to save in DB 101 | javaFunctions(trafficDStream).writerBuilder("traffickeyspace", "window_traffic", 102 | CassandraJavaUtil.mapToRow(WindowTrafficData.class, columnNameMappings)).saveToCassandra(); 103 | } 104 | 105 | /** 106 | * Method to get the vehicles which are in radius of POI and their distance from POI. 107 | * 108 | * @param nonFilteredIotDataStream original IoT data stream 109 | * @param broadcastPOIValues variable containing POI coordinates, route and vehicle types to monitor. 110 | */ 111 | public void processPOIData(JavaDStream nonFilteredIotDataStream,Broadcast> broadcastPOIValues) { 112 | 113 | // Filter by routeId,vehicleType and in POI range 114 | JavaDStream iotDataStreamFiltered = nonFilteredIotDataStream 115 | .filter(iot -> (iot.getRouteId().equals(broadcastPOIValues.value()._2()) 116 | && iot.getVehicleType().contains(broadcastPOIValues.value()._3()) 117 | && GeoDistanceCalculator.isInPOIRadius(Double.valueOf(iot.getLatitude()), 118 | Double.valueOf(iot.getLongitude()), broadcastPOIValues.value()._1().getLatitude(), 119 | broadcastPOIValues.value()._1().getLongitude(), 120 | broadcastPOIValues.value()._1().getRadius()))); 121 | 122 | // pair with poi 123 | JavaPairDStream poiDStreamPair = iotDataStreamFiltered 124 | .mapToPair(iot -> new Tuple2<>(iot, broadcastPOIValues.value()._1())); 125 | 126 | // Transform to dstream of POITrafficData 127 | JavaDStream trafficDStream = poiDStreamPair.map(poiTrafficDataFunc); 128 | 129 | // Map Cassandra table column 130 | Map columnNameMappings = new HashMap(); 131 | columnNameMappings.put("vehicleId", "vehicleid"); 132 | columnNameMappings.put("distance", "distance"); 133 | columnNameMappings.put("vehicleType", "vehicletype"); 134 | columnNameMappings.put("timeStamp", "timestamp"); 135 | 136 | // call CassandraStreamingJavaUtil function to save in DB 137 | javaFunctions(trafficDStream) 138 | .writerBuilder("traffickeyspace", "poi_traffic",CassandraJavaUtil.mapToRow(POITrafficData.class, columnNameMappings)) 139 | .withConstantTTL(120)//keeping data for 2 minutes 140 | .saveToCassandra(); 141 | } 142 | 143 | //Function to get running sum by maintaining the state 144 | private static final Function3, State,Tuple2> totalSumFunc = (key,currentSum,state) -> { 145 | long totalSum = currentSum.or(0L) + (state.exists() ? state.get() : 0); 146 | Tuple2 total = new Tuple2<>(key, totalSum); 147 | state.update(totalSum); 148 | return total; 149 | }; 150 | 151 | //Function to create TotalTrafficData object from IoT data 152 | private static final Function, TotalTrafficData> totalTrafficDataFunc = (tuple -> { 153 | logger.debug("Total Count : " + "key " + tuple._1().getRouteId() + "-" + tuple._1().getVehicleType() + " value "+ tuple._2()); 154 | TotalTrafficData trafficData = new TotalTrafficData(); 155 | trafficData.setRouteId(tuple._1().getRouteId()); 156 | trafficData.setVehicleType(tuple._1().getVehicleType()); 157 | trafficData.setTotalCount(tuple._2()); 158 | trafficData.setTimeStamp(new Date()); 159 | trafficData.setRecordDate(new SimpleDateFormat("yyyy-MM-dd").format(new Date())); 160 | return trafficData; 161 | }); 162 | 163 | //Function to create WindowTrafficData object from IoT data 164 | private static final Function, WindowTrafficData> windowTrafficDataFunc = (tuple -> { 165 | logger.debug("Window Count : " + "key " + tuple._1().getRouteId() + "-" + tuple._1().getVehicleType()+ " value " + tuple._2()); 166 | WindowTrafficData trafficData = new WindowTrafficData(); 167 | trafficData.setRouteId(tuple._1().getRouteId()); 168 | trafficData.setVehicleType(tuple._1().getVehicleType()); 169 | trafficData.setTotalCount(tuple._2()); 170 | trafficData.setTimeStamp(new Date()); 171 | trafficData.setRecordDate(new SimpleDateFormat("yyyy-MM-dd").format(new Date())); 172 | return trafficData; 173 | }); 174 | 175 | //Function to create POITrafficData object from IoT data 176 | private static final Function, POITrafficData> poiTrafficDataFunc = (tuple -> { 177 | POITrafficData poiTraffic = new POITrafficData(); 178 | poiTraffic.setVehicleId(tuple._1.getVehicleId()); 179 | poiTraffic.setVehicleType(tuple._1.getVehicleType()); 180 | poiTraffic.setTimeStamp(new Date()); 181 | double distance = GeoDistanceCalculator.getDistance(Double.valueOf(tuple._1.getLatitude()).doubleValue(), 182 | Double.valueOf(tuple._1.getLongitude()).doubleValue(), tuple._2.getLatitude(), tuple._2.getLongitude()); 183 | logger.debug("Distance for " + tuple._1.getLatitude() + "," + tuple._1.getLongitude() + ","+ tuple._2.getLatitude() + "," + tuple._2.getLongitude() + " = " + distance); 184 | poiTraffic.setDistance(distance); 185 | return poiTraffic; 186 | }); 187 | 188 | } 189 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/util/GeoDistanceCalculator.java: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yugabyte/yb-iot-fleet-management/ca4a7aefde82ac971c50bb52dd1e1be82adaefb3/iot-spark-processor/src/main/java/com/iot/app/spark/util/GeoDistanceCalculator.java -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/util/IoTDataDecoder.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.util; 2 | 3 | import com.fasterxml.jackson.databind.ObjectMapper; 4 | import com.iot.app.spark.vo.IoTData; 5 | 6 | import kafka.serializer.Decoder; 7 | import kafka.utils.VerifiableProperties; 8 | 9 | /** 10 | * Class to deserialize JSON string to IoTData java object 11 | * 12 | * @author abaghel 13 | * 14 | */ 15 | public class IoTDataDecoder implements Decoder { 16 | 17 | private static ObjectMapper objectMapper = new ObjectMapper(); 18 | 19 | public IoTDataDecoder(VerifiableProperties verifiableProperties) { 20 | 21 | } 22 | public IoTData fromBytes(byte[] bytes) { 23 | try { 24 | return objectMapper.readValue(bytes, IoTData.class); 25 | } catch (Exception e) { 26 | e.printStackTrace(); 27 | } 28 | return null; 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/util/PropertyFileReader.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.util; 2 | 3 | import java.io.IOException; 4 | import java.io.InputStream; 5 | import java.util.Properties; 6 | import org.apache.log4j.Logger; 7 | 8 | /** 9 | * Utility class to read property file 10 | * 11 | * @author abaghel 12 | * 13 | */ 14 | public class PropertyFileReader { 15 | private static final Logger logger = Logger.getLogger(PropertyFileReader.class); 16 | private static Properties prop = new Properties(); 17 | public static Properties readPropertyFile() throws Exception { 18 | if (prop.isEmpty()) { 19 | InputStream input = PropertyFileReader.class.getClassLoader().getResourceAsStream("iot-spark.properties"); 20 | try { 21 | prop.load(input); 22 | } catch (IOException ex) { 23 | logger.error(ex); 24 | throw ex; 25 | } finally { 26 | if (input != null) { 27 | input.close(); 28 | } 29 | } 30 | } 31 | return prop; 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/vo/AggregateKey.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.vo; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * Key class for calculation 7 | * 8 | * @author abaghel 9 | * 10 | */ 11 | public class AggregateKey implements Serializable { 12 | 13 | private String routeId; 14 | private String vehicleType; 15 | 16 | public AggregateKey(String routeId, String vehicleType) { 17 | super(); 18 | this.routeId = routeId; 19 | this.vehicleType = vehicleType; 20 | } 21 | 22 | public String getRouteId() { 23 | return routeId; 24 | } 25 | 26 | public String getVehicleType() { 27 | return vehicleType; 28 | } 29 | 30 | @Override 31 | public int hashCode() { 32 | final int prime = 31; 33 | int result = 1; 34 | result = prime * result + ((routeId == null) ? 0 : routeId.hashCode()); 35 | result = prime * result + ((vehicleType == null) ? 0 : vehicleType.hashCode()); 36 | return result; 37 | } 38 | 39 | @Override 40 | public boolean equals(Object obj) { 41 | if(obj !=null && obj instanceof AggregateKey){ 42 | AggregateKey other = (AggregateKey)obj; 43 | if(other.getRouteId() != null && other.getVehicleType() != null){ 44 | if((other.getRouteId().equals(this.routeId)) && (other.getVehicleType().equals(this.vehicleType))){ 45 | return true; 46 | } 47 | } 48 | } 49 | return false; 50 | } 51 | 52 | 53 | } 54 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/vo/IoTData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.vo; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | import com.fasterxml.jackson.annotation.JsonFormat; 6 | 7 | /** 8 | * Class to represent the IoT vehicle data. 9 | * 10 | * @author abaghel 11 | * 12 | */ 13 | public class IoTData implements Serializable{ 14 | 15 | private String vehicleId; 16 | private String vehicleType; 17 | private String routeId; 18 | private String latitude; 19 | private String longitude; 20 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="IST") 21 | private Date timestamp; 22 | private double speed; 23 | private double fuelLevel; 24 | 25 | public IoTData(){ 26 | 27 | } 28 | 29 | public IoTData(String vehicleId, String vehicleType, String routeId, String latitude, String longitude, 30 | Date timestamp, double speed, double fuelLevel) { 31 | super(); 32 | this.vehicleId = vehicleId; 33 | this.vehicleType = vehicleType; 34 | this.routeId = routeId; 35 | this.longitude = longitude; 36 | this.latitude = latitude; 37 | this.timestamp = timestamp; 38 | this.speed = speed; 39 | this.fuelLevel = fuelLevel; 40 | } 41 | 42 | public String getVehicleId() { 43 | return vehicleId; 44 | } 45 | 46 | public String getVehicleType() { 47 | return vehicleType; 48 | } 49 | 50 | public String getRouteId() { 51 | return routeId; 52 | } 53 | 54 | public String getLongitude() { 55 | return longitude; 56 | } 57 | 58 | public String getLatitude() { 59 | return latitude; 60 | } 61 | 62 | public Date getTimestamp() { 63 | return timestamp; 64 | } 65 | 66 | public double getSpeed() { 67 | return speed; 68 | } 69 | 70 | public double getFuelLevel() { 71 | return fuelLevel; 72 | } 73 | 74 | } 75 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/java/com/iot/app/spark/vo/POIData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.spark.vo; 2 | 3 | import java.io.Serializable; 4 | 5 | /** 6 | * Class to represent attributes of POI 7 | * 8 | * @author abaghel 9 | * 10 | */ 11 | public class POIData implements Serializable { 12 | private double latitude; 13 | private double longitude; 14 | private double radius; 15 | 16 | public double getLatitude() { 17 | return latitude; 18 | } 19 | public void setLatitude(double latitude) { 20 | this.latitude = latitude; 21 | } 22 | public double getLongitude() { 23 | return longitude; 24 | } 25 | public void setLongitude(double longitude) { 26 | this.longitude = longitude; 27 | } 28 | public double getRadius() { 29 | return radius; 30 | } 31 | public void setRadius(double radius) { 32 | this.radius = radius; 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /iot-spark-processor/src/main/resources/iot-spark.properties: -------------------------------------------------------------------------------- 1 | #Kafka properties 2 | com.iot.app.kafka.zookeeper=localhost:2181 3 | com.iot.app.kafka.brokerlist=localhost:9092 4 | com.iot.app.kafka.topic=iot-data-event 5 | 6 | #Spark properties 7 | com.iot.app.spark.app.name=Iot Data Processor 8 | com.iot.app.spark.master=local[4] 9 | com.iot.app.spark.checkpoint.dir=/tmp/iot-streaming-data 10 | 11 | #Cassandra propertis 12 | com.iot.app.cassandra.host=127.0.0.1 13 | com.iot.app.cassandra.port=9042 14 | com.iot.app.cassandra.keep_alive=10000 -------------------------------------------------------------------------------- /iot-spark-processor/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=WARN, file, stdout 3 | 4 | # Direct log messages to a log file 5 | log4j.appender.file=org.apache.log4j.RollingFileAppender 6 | log4j.appender.file.File=/tmp/iot-spark.log 7 | log4j.appender.file.MaxFileSize=10MB 8 | log4j.appender.file.MaxBackupIndex=10 9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout 10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 11 | 12 | # Direct log messages to stdout 13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 14 | log4j.appender.stdout.Target=System.out 15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n -------------------------------------------------------------------------------- /iot-springboot-dashboard/IoTData.cql: -------------------------------------------------------------------------------- 1 | //Create keyspace 2 | CREATE KEYSPACE IF NOT EXISTS TrafficKeySpace WITH replication = {'class':'SimpleStrategy', 'replication_factor':1}; 3 | 4 | // Drop existing tables 5 | DROP TABLE IF EXISTS TrafficKeySpace.Total_Traffic; 6 | DROP TABLE IF EXISTS TrafficKeySpace.Window_Traffic; 7 | DROP TABLE IF EXISTS TrafficKeySpace.Poi_Traffic; 8 | 9 | //Create table 10 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Total_Traffic (routeId text , vehicleType text, totalCount bigint, timeStamp timestamp,recordDate text,PRIMARY KEY (routeId,recordDate,vehicleType)); 11 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Window_Traffic (routeId text , vehicleType text, totalCount bigint, timeStamp timestamp,recordDate text,PRIMARY KEY (routeId,recordDate,vehicleType)); 12 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Poi_Traffic(vehicleid text , vehicletype text , distance bigint, timeStamp timestamp,PRIMARY KEY (vehicleid)); 13 | 14 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/README.md: -------------------------------------------------------------------------------- 1 | # IoT Spring Boot Dashboard 2 | IoT Spring Boot Dashboard is a Maven application which queries Cassandra Database and pushes data to UI.This application is built using 3 | 4 | - Spring Boot 5 | - jQuery.js 6 | - Bootstrap.js 7 | - Sockjs.js 8 | - Stomp.js 9 | - Chart.js 10 | 11 | This project requires following tools and technologies. 12 | 13 | - JDK - 1.8 14 | - Maven - 3.3.9 15 | - Cassandra - 2.2.6 16 | 17 | Please refer "resources/IoTData.cql" file to create Keyspace and Tables in Cassandra Database, which are required by this application. This is the same file which is available in "iot-spark-processor" project. 18 | 19 | You can build and run this application using below commands. Please check resources/iot-springboot.properties for configuration details. 20 | 21 | ```sh 22 | mvn package 23 | mvn exec:java -Dexec.mainClass="com.iot.app.springboot.dashboard.IoTDataDashboard" 24 | ``` 25 | 26 | Alternate way to run this application is using the “iot-springboot-dashboard-1.0.0.jar” file created by maven. Open command prompt, go to target folder and execute below command. 27 | 28 | ```sh 29 | java -jar iot-springboot-dashboard-1.0.0.jar 30 | ``` 31 | Open browser and entre http://localhost:8080 to see the Dashboard 32 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.iot.app.springboot 5 | iot-springboot-dashboard 6 | 1.0.0 7 | IoT Spring Boot Dashboard 8 | 9 | 10 | 11 | org.springframework.boot 12 | spring-boot-starter-parent 13 | 1.3.5.RELEASE 14 | 15 | 16 | 17 | 18 | 19 | org.springframework.boot 20 | spring-boot-starter-websocket 21 | 22 | 23 | org.springframework.boot 24 | spring-boot-starter-logging 25 | 26 | 27 | 28 | 29 | org.springframework.boot 30 | spring-boot-starter-data-cassandra 31 | 32 | 33 | 34 | log4j 35 | log4j 36 | 37 | 38 | junit 39 | junit 40 | 41 | 42 | 43 | 44 | 45 | 46 | ${basedir}/src/main/resources 47 | 48 | 49 | 50 | 51 | org.apache.maven.plugins 52 | maven-compiler-plugin 53 | 3.1 54 | 55 | 1.8 56 | 1.8 57 | 58 | 59 | 60 | org.springframework.boot 61 | spring-boot-maven-plugin 62 | 1.3.5.RELEASE 63 | 64 | 65 | 66 | repackage 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | com.iot.app.springboot.dashboard.IoTDataDashboard 75 | 76 | 77 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/CassandraConfig.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao; 2 | 3 | import org.springframework.beans.factory.annotation.Autowired; 4 | import org.springframework.context.annotation.Bean; 5 | import org.springframework.context.annotation.Configuration; 6 | import org.springframework.context.annotation.PropertySource; 7 | import org.springframework.core.env.Environment; 8 | import org.springframework.data.cassandra.config.CassandraClusterFactoryBean; 9 | import org.springframework.data.cassandra.config.java.AbstractCassandraConfiguration; 10 | import org.springframework.data.cassandra.mapping.BasicCassandraMappingContext; 11 | import org.springframework.data.cassandra.mapping.CassandraMappingContext; 12 | import org.springframework.data.cassandra.repository.config.EnableCassandraRepositories; 13 | 14 | import org.apache.log4j.Logger; 15 | 16 | /** 17 | * Spring bean configuration for Cassandra db. 18 | * 19 | * @author abaghel 20 | * 21 | */ 22 | @Configuration 23 | @PropertySource(value = {"classpath:iot-springboot.properties"}) 24 | @EnableCassandraRepositories(basePackages = {"com.iot.app.springboot.dao"}) 25 | public class CassandraConfig extends AbstractCassandraConfiguration { 26 | private static final Logger logger = Logger.getLogger(CassandraConfig.class); 27 | 28 | @Autowired 29 | private Environment environment; 30 | 31 | @Bean 32 | public CassandraClusterFactoryBean cluster() { 33 | CassandraClusterFactoryBean cluster = new CassandraClusterFactoryBean(); 34 | String cassandraHost = environment.getProperty("com.iot.app.cassandra.host"); 35 | if (System.getProperty("com.iot.app.cassandra.host") != null) { 36 | cassandraHost = System.getProperty("com.iot.app.cassandra.host"); 37 | } 38 | String cassandraPort = environment.getProperty("com.iot.app.cassandra.port"); 39 | if (System.getProperty("com.iot.app.cassandra.port") != null) { 40 | cassandraPort = System.getProperty("com.iot.app.cassandra.port"); 41 | } 42 | logger.info("Using cassandra host=" + cassandraHost + " port=" + cassandraPort); 43 | cluster.setContactPoints(cassandraHost); 44 | cluster.setPort(Integer.parseInt(cassandraPort)); 45 | return cluster; 46 | } 47 | 48 | @Bean 49 | public CassandraMappingContext cassandraMapping(){ 50 | return new BasicCassandraMappingContext(); 51 | } 52 | 53 | @Override 54 | @Bean 55 | protected String getKeyspaceName() { 56 | return environment.getProperty("com.iot.app.cassandra.keyspace"); 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/POITrafficDataRepository.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao; 2 | 3 | import org.springframework.data.cassandra.repository.CassandraRepository; 4 | import org.springframework.stereotype.Repository; 5 | 6 | import com.iot.app.springboot.dao.entity.POITrafficData; 7 | 8 | /** 9 | * DAO class for poi_traffic 10 | * 11 | * @author abaghel 12 | * 13 | */ 14 | @Repository 15 | public interface POITrafficDataRepository extends CassandraRepository{ 16 | 17 | } 18 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/TotalTrafficDataRepository.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao; 2 | 3 | import org.springframework.data.cassandra.repository.CassandraRepository; 4 | import org.springframework.data.cassandra.repository.Query; 5 | import org.springframework.stereotype.Repository; 6 | 7 | import com.iot.app.springboot.dao.entity.TotalTrafficData; 8 | 9 | /** 10 | * DAO class for total_traffic 11 | * 12 | * @author abaghel 13 | * 14 | */ 15 | @Repository 16 | public interface TotalTrafficDataRepository extends CassandraRepository{ 17 | 18 | @Query("SELECT * FROM traffickeyspace.total_traffic WHERE recorddate = ?0 ALLOW FILTERING") 19 | Iterable findTrafficDataByDate(String date); 20 | } 21 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/WindowTrafficDataRepository.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao; 2 | 3 | import org.springframework.data.cassandra.repository.CassandraRepository; 4 | import org.springframework.data.cassandra.repository.Query; 5 | import org.springframework.stereotype.Repository; 6 | 7 | import com.iot.app.springboot.dao.entity.WindowTrafficData; 8 | 9 | /** 10 | * DAO class for window_traffic 11 | * 12 | * @author abaghel 13 | * 14 | */ 15 | @Repository 16 | public interface WindowTrafficDataRepository extends CassandraRepository{ 17 | 18 | @Query("SELECT * FROM traffickeyspace.window_traffic WHERE recorddate = ?0 ALLOW FILTERING") 19 | Iterable findTrafficDataByDate(String date); 20 | 21 | } 22 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/entity/POITrafficData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao.entity; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | 6 | import org.springframework.cassandra.core.PrimaryKeyType; 7 | import org.springframework.data.cassandra.mapping.Column; 8 | import org.springframework.data.cassandra.mapping.PrimaryKeyColumn; 9 | import org.springframework.data.cassandra.mapping.Table; 10 | 11 | import com.fasterxml.jackson.annotation.JsonFormat; 12 | 13 | /** 14 | * Entity class for poi_traffic db table 15 | * 16 | * @author abaghel 17 | * 18 | */ 19 | @Table("poi_traffic") 20 | public class POITrafficData implements Serializable{ 21 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 22 | @PrimaryKeyColumn(name = "timeStamp",ordinal = 0,type = PrimaryKeyType.PARTITIONED) 23 | private Date timeStamp; 24 | @PrimaryKeyColumn(name = "recordDate",ordinal = 1,type = PrimaryKeyType.CLUSTERED) 25 | private String recordDate; 26 | @Column(value = "vehicleId") 27 | private String vehicleId; 28 | @Column(value = "distance") 29 | private double distance; 30 | @Column(value = "vehicleType") 31 | private String vehicleType; 32 | 33 | public Date getTimeStamp() { 34 | return timeStamp; 35 | } 36 | public void setTimeStamp(Date timeStamp) { 37 | this.timeStamp = timeStamp; 38 | } 39 | public String getRecordDate() { 40 | return recordDate; 41 | } 42 | public void setRecordDate(String recordDate) { 43 | this.recordDate = recordDate; 44 | } 45 | public String getVehicleId() { 46 | return vehicleId; 47 | } 48 | public void setVehicleId(String vehicleId) { 49 | this.vehicleId = vehicleId; 50 | } 51 | public double getDistance() { 52 | return distance; 53 | } 54 | public void setDistance(double distance) { 55 | this.distance = distance; 56 | } 57 | public String getVehicleType() { 58 | return vehicleType; 59 | } 60 | public void setVehicleType(String vehicleType) { 61 | this.vehicleType = vehicleType; 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/entity/TotalTrafficData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao.entity; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | 6 | import org.springframework.cassandra.core.PrimaryKeyType; 7 | import org.springframework.data.cassandra.mapping.Column; 8 | import org.springframework.data.cassandra.mapping.PrimaryKeyColumn; 9 | import org.springframework.data.cassandra.mapping.Table; 10 | 11 | import com.fasterxml.jackson.annotation.JsonFormat; 12 | 13 | /** 14 | * Entity class for total_traffic db table 15 | * 16 | * @author abaghel 17 | * 18 | */ 19 | @Table("total_traffic") 20 | public class TotalTrafficData implements Serializable{ 21 | @PrimaryKeyColumn(name = "routeid",ordinal = 0,type = PrimaryKeyType.PARTITIONED) 22 | private String routeId; 23 | @PrimaryKeyColumn(name = "recordDate",ordinal = 1,type = PrimaryKeyType.CLUSTERED) 24 | private String recordDate; 25 | @PrimaryKeyColumn(name = "vehicletype",ordinal = 2,type = PrimaryKeyType.CLUSTERED) 26 | private String vehicleType; 27 | @Column(value = "totalcount") 28 | private long totalCount; 29 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 30 | @Column(value = "timestamp") 31 | private Date timeStamp; 32 | 33 | public String getRouteId() { 34 | return routeId; 35 | } 36 | public void setRouteId(String routeId) { 37 | this.routeId = routeId; 38 | } 39 | public String getRecordDate() { 40 | return recordDate; 41 | } 42 | public void setRecordDate(String recordDate) { 43 | this.recordDate = recordDate; 44 | } 45 | public String getVehicleType() { 46 | return vehicleType; 47 | } 48 | public void setVehicleType(String vehicleType) { 49 | this.vehicleType = vehicleType; 50 | } 51 | public long getTotalCount() { 52 | return totalCount; 53 | } 54 | public void setTotalCount(long totalCount) { 55 | this.totalCount = totalCount; 56 | } 57 | public Date getTimeStamp() { 58 | return timeStamp; 59 | } 60 | public void setTimeStamp(Date timeStamp) { 61 | this.timeStamp = timeStamp; 62 | } 63 | @Override 64 | public String toString() { 65 | return "TrafficData [routeId=" + routeId + ", vehicleType=" + vehicleType + ", totalCount=" + totalCount 66 | + ", timeStamp=" + timeStamp + "]"; 67 | } 68 | 69 | 70 | } 71 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dao/entity/WindowTrafficData.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dao.entity; 2 | 3 | import java.io.Serializable; 4 | import java.util.Date; 5 | 6 | import org.springframework.cassandra.core.PrimaryKeyType; 7 | import org.springframework.data.cassandra.mapping.Column; 8 | import org.springframework.data.cassandra.mapping.PrimaryKeyColumn; 9 | import org.springframework.data.cassandra.mapping.Table; 10 | 11 | import com.fasterxml.jackson.annotation.JsonFormat; 12 | 13 | /** 14 | * Entity class for window_traffic db table 15 | * 16 | * @author abaghel 17 | * 18 | */ 19 | @Table("window_traffic") 20 | public class WindowTrafficData implements Serializable{ 21 | @PrimaryKeyColumn(name = "routeid",ordinal = 0,type = PrimaryKeyType.PARTITIONED) 22 | private String routeId; 23 | @PrimaryKeyColumn(name = "recordDate",ordinal = 1,type = PrimaryKeyType.CLUSTERED) 24 | private String recordDate; 25 | @PrimaryKeyColumn(name = "vehicletype",ordinal = 2,type = PrimaryKeyType.CLUSTERED) 26 | private String vehicleType; 27 | @Column(value = "totalcount") 28 | private long totalCount; 29 | @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone="MST") 30 | @Column(value = "timestamp") 31 | private Date timeStamp; 32 | 33 | public String getRouteId() { 34 | return routeId; 35 | } 36 | public void setRouteId(String routeId) { 37 | this.routeId = routeId; 38 | } 39 | public String getRecordDate() { 40 | return recordDate; 41 | } 42 | public void setRecordDate(String recordDate) { 43 | this.recordDate = recordDate; 44 | } 45 | public String getVehicleType() { 46 | return vehicleType; 47 | } 48 | public void setVehicleType(String vehicleType) { 49 | this.vehicleType = vehicleType; 50 | } 51 | public long getTotalCount() { 52 | return totalCount; 53 | } 54 | public void setTotalCount(long totalCount) { 55 | this.totalCount = totalCount; 56 | } 57 | public Date getTimeStamp() { 58 | return timeStamp; 59 | } 60 | public void setTimeStamp(Date timeStamp) { 61 | this.timeStamp = timeStamp; 62 | } 63 | @Override 64 | public String toString() { 65 | return "TrafficData [routeId=" + routeId + ", vehicleType=" + vehicleType + ", totalCount=" + totalCount 66 | + ", timeStamp=" + timeStamp + "]"; 67 | } 68 | 69 | 70 | } 71 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dashboard/IoTDataDashboard.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dashboard; 2 | 3 | import org.springframework.boot.SpringApplication; 4 | import org.springframework.boot.autoconfigure.SpringBootApplication; 5 | import org.springframework.context.annotation.ComponentScan; 6 | import org.springframework.scheduling.annotation.EnableScheduling; 7 | 8 | /** 9 | * Spring boot application class for Dashboard. 10 | * 11 | * @author abaghel 12 | * 13 | */ 14 | @SpringBootApplication 15 | @EnableScheduling 16 | @ComponentScan(basePackages = {"com.iot.app.springboot.dashboard", "com.iot.app.springboot.dao"}) 17 | public class IoTDataDashboard { 18 | public static void main(String[] args) { 19 | SpringApplication.run(IoTDataDashboard.class, args); 20 | } 21 | } 22 | 23 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dashboard/TrafficDataService.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dashboard; 2 | 3 | import java.text.DateFormat; 4 | import java.text.SimpleDateFormat; 5 | import java.util.ArrayList; 6 | import java.util.Date; 7 | import java.util.List; 8 | import org.apache.log4j.Logger; 9 | 10 | import org.springframework.beans.factory.annotation.Autowired; 11 | import org.springframework.messaging.simp.SimpMessagingTemplate; 12 | import org.springframework.scheduling.annotation.Scheduled; 13 | import org.springframework.stereotype.Service; 14 | 15 | import com.iot.app.springboot.dao.POITrafficDataRepository; 16 | import com.iot.app.springboot.dao.TotalTrafficDataRepository; 17 | import com.iot.app.springboot.dao.WindowTrafficDataRepository; 18 | import com.iot.app.springboot.dao.entity.POITrafficData; 19 | import com.iot.app.springboot.dao.entity.TotalTrafficData; 20 | import com.iot.app.springboot.dao.entity.WindowTrafficData; 21 | import com.iot.app.springboot.vo.Response; 22 | 23 | /** 24 | * Service class to send traffic data messages to dashboard ui at fixed interval using web-socket. 25 | * 26 | * @author abaghel 27 | * 28 | */ 29 | @Service 30 | public class TrafficDataService { 31 | private static final Logger logger = Logger.getLogger(TrafficDataService.class); 32 | 33 | @Autowired 34 | private SimpMessagingTemplate template; 35 | 36 | @Autowired 37 | private TotalTrafficDataRepository totalRepository; 38 | 39 | @Autowired 40 | private WindowTrafficDataRepository windowRepository; 41 | 42 | @Autowired 43 | private POITrafficDataRepository poiRepository; 44 | 45 | private static DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); 46 | 47 | //Method sends traffic data message in every 5 seconds. 48 | @Scheduled(fixedRate = 5000) 49 | public void trigger() { 50 | List totalTrafficList = new ArrayList(); 51 | List windowTrafficList = new ArrayList(); 52 | List poiTrafficList = new ArrayList(); 53 | //Call dao methods 54 | totalRepository.findTrafficDataByDate(sdf.format(new Date())).forEach(e -> totalTrafficList.add(e)); 55 | windowRepository.findTrafficDataByDate(sdf.format(new Date())).forEach(e -> windowTrafficList.add(e)); 56 | poiRepository.findAll().forEach(e -> poiTrafficList.add(e)); 57 | //prepare response 58 | Response response = new Response(); 59 | response.setTotalTraffic(totalTrafficList); 60 | response.setWindowTraffic(windowTrafficList); 61 | response.setPoiTraffic(poiTrafficList); 62 | logger.info("Sending to UI "+response); 63 | //send to ui 64 | this.template.convertAndSend("/topic/trafficData", response); 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/dashboard/WebSocketConfig.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.dashboard; 2 | 3 | import org.springframework.context.annotation.Configuration; 4 | import org.springframework.messaging.simp.config.MessageBrokerRegistry; 5 | import org.springframework.web.socket.config.annotation.AbstractWebSocketMessageBrokerConfigurer; 6 | import org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker; 7 | import org.springframework.web.socket.config.annotation.StompEndpointRegistry; 8 | 9 | /** 10 | * Web-Socket message broker configuration class to send data using SockJS 11 | * to dashboard html page. 12 | * 13 | * @author abaghel 14 | * 15 | */ 16 | @Configuration 17 | @EnableWebSocketMessageBroker 18 | public class WebSocketConfig extends AbstractWebSocketMessageBrokerConfigurer { 19 | //sockJS can get message using this endpoint 20 | public void registerStompEndpoints(StompEndpointRegistry registry) { 21 | registry.addEndpoint("/stomp").withSockJS(); 22 | } 23 | //configure message broker 24 | @Override 25 | public void configureMessageBroker(MessageBrokerRegistry config) { 26 | config.enableSimpleBroker("/topic"); 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/java/com/iot/app/springboot/vo/Response.java: -------------------------------------------------------------------------------- 1 | package com.iot.app.springboot.vo; 2 | 3 | import java.io.Serializable; 4 | import java.util.List; 5 | 6 | import com.iot.app.springboot.dao.entity.POITrafficData; 7 | import com.iot.app.springboot.dao.entity.TotalTrafficData; 8 | import com.iot.app.springboot.dao.entity.WindowTrafficData; 9 | 10 | /** 11 | * Response object containing traffic details that will be sent to dashboard. 12 | * 13 | * @author abaghel 14 | * 15 | */ 16 | public class Response implements Serializable { 17 | private List totalTraffic; 18 | private List windowTraffic; 19 | private List poiTraffic; 20 | 21 | public List getTotalTraffic() { 22 | return totalTraffic; 23 | } 24 | public void setTotalTraffic(List totalTraffic) { 25 | this.totalTraffic = totalTraffic; 26 | } 27 | public List getWindowTraffic() { 28 | return windowTraffic; 29 | } 30 | public void setWindowTraffic(List windowTraffic) { 31 | this.windowTraffic = windowTraffic; 32 | } 33 | public List getPoiTraffic() { 34 | return poiTraffic; 35 | } 36 | public void setPoiTraffic(List poiTraffic) { 37 | this.poiTraffic = poiTraffic; 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/resources/iot-springboot.properties: -------------------------------------------------------------------------------- 1 | #Cassandra properties 2 | com.iot.app.cassandra.host=127.0.0.1 3 | com.iot.app.cassandra.port=9042 4 | com.iot.app.cassandra.keyspace=traffickeyspace -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Root logger option 2 | log4j.rootLogger=INFO, file, stdout 3 | 4 | # Direct log messages to a log file 5 | log4j.appender.file=org.apache.log4j.RollingFileAppender 6 | log4j.appender.file.File=/tmp/iot-springboot.log 7 | log4j.appender.file.MaxFileSize=10MB 8 | log4j.appender.file.MaxBackupIndex=10 9 | log4j.appender.file.layout=org.apache.log4j.PatternLayout 10 | log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n 11 | 12 | # Direct log messages to stdout 13 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 14 | log4j.appender.stdout.Target=System.out 15 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 16 | log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/resources/static/css/style.css: -------------------------------------------------------------------------------- 1 | .main-title { 2 | color: #202951; 3 | font-family: Montserrat, sans-serif; 4 | font-size: 48px; 5 | } 6 | 7 | .main-container { 8 | background-color: #EEE; 9 | padding: 25px; 10 | } 11 | 12 | .outerTable { 13 | table-layout: fixed; 14 | background-color: #FFF; 15 | border: 1px solid #CCC; 16 | } 17 | 18 | .outerTable th { 19 | text-align: center; 20 | background-color: #FFF; 21 | color: #202951; 22 | } 23 | 24 | .outerTable>thead>tr>th { 25 | } 26 | 27 | .innerTable { 28 | table-layout: fixed; 29 | } 30 | 31 | .innerTable th { 32 | text-align: center; 33 | background-color: #202951; 34 | color: white; 35 | } 36 | 37 | .innerTable tr { 38 | text-align: center; 39 | } 40 | 41 | .innerTable>tbody>tr>td { 42 | font-family:Arial; 43 | vertical-align: middle; 44 | } 45 | 46 | .innerTable>tbody:nth-of-type(odd){ 47 | background-color: #FFF; 48 | } 49 | 50 | .innerTable>tbody:nth-of-type(even){ 51 | background-color: #EEE; 52 | } 53 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/resources/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | YB IoT IoT Fleet Management | Dashboard 5 | 6 | 7 | 8 | 9 | 10 |
11 |
12 |
13 |

IoT Fleet Management - Dashboard

14 |
15 |
16 |
17 |
18 |
19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 |
Total Traffic - Vehicle Type and Route (Bar Chart)
31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 50 | 51 | 52 |
Total Traffic - Vehicle Type and Route (Tabular)
40 |
41 | 42 | 43 | 44 | 45 | 46 | 47 |
RouteVehicleCountTime
48 |
49 |
53 |
54 |
55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 |
Route 37 Traffic (Donut Chart)
67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 86 | 87 | 88 |
Last 30 Seconds Window (Tabular)
76 |
77 | 78 | 79 | 80 | 81 | 82 | 83 |
RouteVehicleCountTime
84 |
85 |
89 |
90 |
91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 |
Vehicles At Road Closures (Radar Chart)
103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 122 | 123 | 124 |
Trucks At Road Closures (Tabular)
112 |
113 | 114 | 115 | 116 | 117 | 118 | 119 |
Vehicle IdVehicleDistanceTime
120 |
121 |
125 |
126 |
127 |
128 | 129 | 130 | 131 | 132 | ` 133 | 134 | 135 | 355 | 356 | 357 | -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/resources/static/js/bootstrap.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Bootstrap v3.3.6 (http://getbootstrap.com) 3 | * Copyright 2011-2015 Twitter, Inc. 4 | * Licensed under the MIT license 5 | */ 6 | if("undefined"==typeof jQuery)throw new Error("Bootstrap's JavaScript requires jQuery");+function(a){"use strict";var b=a.fn.jquery.split(" ")[0].split(".");if(b[0]<2&&b[1]<9||1==b[0]&&9==b[1]&&b[2]<1||b[0]>2)throw new Error("Bootstrap's JavaScript requires jQuery version 1.9.1 or higher, but lower than version 3")}(jQuery),+function(a){"use strict";function b(){var a=document.createElement("bootstrap"),b={WebkitTransition:"webkitTransitionEnd",MozTransition:"transitionend",OTransition:"oTransitionEnd otransitionend",transition:"transitionend"};for(var c in b)if(void 0!==a.style[c])return{end:b[c]};return!1}a.fn.emulateTransitionEnd=function(b){var c=!1,d=this;a(this).one("bsTransitionEnd",function(){c=!0});var e=function(){c||a(d).trigger(a.support.transition.end)};return setTimeout(e,b),this},a(function(){a.support.transition=b(),a.support.transition&&(a.event.special.bsTransitionEnd={bindType:a.support.transition.end,delegateType:a.support.transition.end,handle:function(b){return a(b.target).is(this)?b.handleObj.handler.apply(this,arguments):void 0}})})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var c=a(this),e=c.data("bs.alert");e||c.data("bs.alert",e=new d(this)),"string"==typeof b&&e[b].call(c)})}var c='[data-dismiss="alert"]',d=function(b){a(b).on("click",c,this.close)};d.VERSION="3.3.6",d.TRANSITION_DURATION=150,d.prototype.close=function(b){function c(){g.detach().trigger("closed.bs.alert").remove()}var e=a(this),f=e.attr("data-target");f||(f=e.attr("href"),f=f&&f.replace(/.*(?=#[^\s]*$)/,""));var g=a(f);b&&b.preventDefault(),g.length||(g=e.closest(".alert")),g.trigger(b=a.Event("close.bs.alert")),b.isDefaultPrevented()||(g.removeClass("in"),a.support.transition&&g.hasClass("fade")?g.one("bsTransitionEnd",c).emulateTransitionEnd(d.TRANSITION_DURATION):c())};var e=a.fn.alert;a.fn.alert=b,a.fn.alert.Constructor=d,a.fn.alert.noConflict=function(){return a.fn.alert=e,this},a(document).on("click.bs.alert.data-api",c,d.prototype.close)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.button"),f="object"==typeof b&&b;e||d.data("bs.button",e=new c(this,f)),"toggle"==b?e.toggle():b&&e.setState(b)})}var c=function(b,d){this.$element=a(b),this.options=a.extend({},c.DEFAULTS,d),this.isLoading=!1};c.VERSION="3.3.6",c.DEFAULTS={loadingText:"loading..."},c.prototype.setState=function(b){var c="disabled",d=this.$element,e=d.is("input")?"val":"html",f=d.data();b+="Text",null==f.resetText&&d.data("resetText",d[e]()),setTimeout(a.proxy(function(){d[e](null==f[b]?this.options[b]:f[b]),"loadingText"==b?(this.isLoading=!0,d.addClass(c).attr(c,c)):this.isLoading&&(this.isLoading=!1,d.removeClass(c).removeAttr(c))},this),0)},c.prototype.toggle=function(){var a=!0,b=this.$element.closest('[data-toggle="buttons"]');if(b.length){var c=this.$element.find("input");"radio"==c.prop("type")?(c.prop("checked")&&(a=!1),b.find(".active").removeClass("active"),this.$element.addClass("active")):"checkbox"==c.prop("type")&&(c.prop("checked")!==this.$element.hasClass("active")&&(a=!1),this.$element.toggleClass("active")),c.prop("checked",this.$element.hasClass("active")),a&&c.trigger("change")}else this.$element.attr("aria-pressed",!this.$element.hasClass("active")),this.$element.toggleClass("active")};var d=a.fn.button;a.fn.button=b,a.fn.button.Constructor=c,a.fn.button.noConflict=function(){return a.fn.button=d,this},a(document).on("click.bs.button.data-api",'[data-toggle^="button"]',function(c){var d=a(c.target);d.hasClass("btn")||(d=d.closest(".btn")),b.call(d,"toggle"),a(c.target).is('input[type="radio"]')||a(c.target).is('input[type="checkbox"]')||c.preventDefault()}).on("focus.bs.button.data-api blur.bs.button.data-api",'[data-toggle^="button"]',function(b){a(b.target).closest(".btn").toggleClass("focus",/^focus(in)?$/.test(b.type))})}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.carousel"),f=a.extend({},c.DEFAULTS,d.data(),"object"==typeof b&&b),g="string"==typeof b?b:f.slide;e||d.data("bs.carousel",e=new c(this,f)),"number"==typeof b?e.to(b):g?e[g]():f.interval&&e.pause().cycle()})}var c=function(b,c){this.$element=a(b),this.$indicators=this.$element.find(".carousel-indicators"),this.options=c,this.paused=null,this.sliding=null,this.interval=null,this.$active=null,this.$items=null,this.options.keyboard&&this.$element.on("keydown.bs.carousel",a.proxy(this.keydown,this)),"hover"==this.options.pause&&!("ontouchstart"in document.documentElement)&&this.$element.on("mouseenter.bs.carousel",a.proxy(this.pause,this)).on("mouseleave.bs.carousel",a.proxy(this.cycle,this))};c.VERSION="3.3.6",c.TRANSITION_DURATION=600,c.DEFAULTS={interval:5e3,pause:"hover",wrap:!0,keyboard:!0},c.prototype.keydown=function(a){if(!/input|textarea/i.test(a.target.tagName)){switch(a.which){case 37:this.prev();break;case 39:this.next();break;default:return}a.preventDefault()}},c.prototype.cycle=function(b){return b||(this.paused=!1),this.interval&&clearInterval(this.interval),this.options.interval&&!this.paused&&(this.interval=setInterval(a.proxy(this.next,this),this.options.interval)),this},c.prototype.getItemIndex=function(a){return this.$items=a.parent().children(".item"),this.$items.index(a||this.$active)},c.prototype.getItemForDirection=function(a,b){var c=this.getItemIndex(b),d="prev"==a&&0===c||"next"==a&&c==this.$items.length-1;if(d&&!this.options.wrap)return b;var e="prev"==a?-1:1,f=(c+e)%this.$items.length;return this.$items.eq(f)},c.prototype.to=function(a){var b=this,c=this.getItemIndex(this.$active=this.$element.find(".item.active"));return a>this.$items.length-1||0>a?void 0:this.sliding?this.$element.one("slid.bs.carousel",function(){b.to(a)}):c==a?this.pause().cycle():this.slide(a>c?"next":"prev",this.$items.eq(a))},c.prototype.pause=function(b){return b||(this.paused=!0),this.$element.find(".next, .prev").length&&a.support.transition&&(this.$element.trigger(a.support.transition.end),this.cycle(!0)),this.interval=clearInterval(this.interval),this},c.prototype.next=function(){return this.sliding?void 0:this.slide("next")},c.prototype.prev=function(){return this.sliding?void 0:this.slide("prev")},c.prototype.slide=function(b,d){var e=this.$element.find(".item.active"),f=d||this.getItemForDirection(b,e),g=this.interval,h="next"==b?"left":"right",i=this;if(f.hasClass("active"))return this.sliding=!1;var j=f[0],k=a.Event("slide.bs.carousel",{relatedTarget:j,direction:h});if(this.$element.trigger(k),!k.isDefaultPrevented()){if(this.sliding=!0,g&&this.pause(),this.$indicators.length){this.$indicators.find(".active").removeClass("active");var l=a(this.$indicators.children()[this.getItemIndex(f)]);l&&l.addClass("active")}var m=a.Event("slid.bs.carousel",{relatedTarget:j,direction:h});return a.support.transition&&this.$element.hasClass("slide")?(f.addClass(b),f[0].offsetWidth,e.addClass(h),f.addClass(h),e.one("bsTransitionEnd",function(){f.removeClass([b,h].join(" ")).addClass("active"),e.removeClass(["active",h].join(" ")),i.sliding=!1,setTimeout(function(){i.$element.trigger(m)},0)}).emulateTransitionEnd(c.TRANSITION_DURATION)):(e.removeClass("active"),f.addClass("active"),this.sliding=!1,this.$element.trigger(m)),g&&this.cycle(),this}};var d=a.fn.carousel;a.fn.carousel=b,a.fn.carousel.Constructor=c,a.fn.carousel.noConflict=function(){return a.fn.carousel=d,this};var e=function(c){var d,e=a(this),f=a(e.attr("data-target")||(d=e.attr("href"))&&d.replace(/.*(?=#[^\s]+$)/,""));if(f.hasClass("carousel")){var g=a.extend({},f.data(),e.data()),h=e.attr("data-slide-to");h&&(g.interval=!1),b.call(f,g),h&&f.data("bs.carousel").to(h),c.preventDefault()}};a(document).on("click.bs.carousel.data-api","[data-slide]",e).on("click.bs.carousel.data-api","[data-slide-to]",e),a(window).on("load",function(){a('[data-ride="carousel"]').each(function(){var c=a(this);b.call(c,c.data())})})}(jQuery),+function(a){"use strict";function b(b){var c,d=b.attr("data-target")||(c=b.attr("href"))&&c.replace(/.*(?=#[^\s]+$)/,"");return a(d)}function c(b){return this.each(function(){var c=a(this),e=c.data("bs.collapse"),f=a.extend({},d.DEFAULTS,c.data(),"object"==typeof b&&b);!e&&f.toggle&&/show|hide/.test(b)&&(f.toggle=!1),e||c.data("bs.collapse",e=new d(this,f)),"string"==typeof b&&e[b]()})}var d=function(b,c){this.$element=a(b),this.options=a.extend({},d.DEFAULTS,c),this.$trigger=a('[data-toggle="collapse"][href="#'+b.id+'"],[data-toggle="collapse"][data-target="#'+b.id+'"]'),this.transitioning=null,this.options.parent?this.$parent=this.getParent():this.addAriaAndCollapsedClass(this.$element,this.$trigger),this.options.toggle&&this.toggle()};d.VERSION="3.3.6",d.TRANSITION_DURATION=350,d.DEFAULTS={toggle:!0},d.prototype.dimension=function(){var a=this.$element.hasClass("width");return a?"width":"height"},d.prototype.show=function(){if(!this.transitioning&&!this.$element.hasClass("in")){var b,e=this.$parent&&this.$parent.children(".panel").children(".in, .collapsing");if(!(e&&e.length&&(b=e.data("bs.collapse"),b&&b.transitioning))){var f=a.Event("show.bs.collapse");if(this.$element.trigger(f),!f.isDefaultPrevented()){e&&e.length&&(c.call(e,"hide"),b||e.data("bs.collapse",null));var g=this.dimension();this.$element.removeClass("collapse").addClass("collapsing")[g](0).attr("aria-expanded",!0),this.$trigger.removeClass("collapsed").attr("aria-expanded",!0),this.transitioning=1;var h=function(){this.$element.removeClass("collapsing").addClass("collapse in")[g](""),this.transitioning=0,this.$element.trigger("shown.bs.collapse")};if(!a.support.transition)return h.call(this);var i=a.camelCase(["scroll",g].join("-"));this.$element.one("bsTransitionEnd",a.proxy(h,this)).emulateTransitionEnd(d.TRANSITION_DURATION)[g](this.$element[0][i])}}}},d.prototype.hide=function(){if(!this.transitioning&&this.$element.hasClass("in")){var b=a.Event("hide.bs.collapse");if(this.$element.trigger(b),!b.isDefaultPrevented()){var c=this.dimension();this.$element[c](this.$element[c]())[0].offsetHeight,this.$element.addClass("collapsing").removeClass("collapse in").attr("aria-expanded",!1),this.$trigger.addClass("collapsed").attr("aria-expanded",!1),this.transitioning=1;var e=function(){this.transitioning=0,this.$element.removeClass("collapsing").addClass("collapse").trigger("hidden.bs.collapse")};return a.support.transition?void this.$element[c](0).one("bsTransitionEnd",a.proxy(e,this)).emulateTransitionEnd(d.TRANSITION_DURATION):e.call(this)}}},d.prototype.toggle=function(){this[this.$element.hasClass("in")?"hide":"show"]()},d.prototype.getParent=function(){return a(this.options.parent).find('[data-toggle="collapse"][data-parent="'+this.options.parent+'"]').each(a.proxy(function(c,d){var e=a(d);this.addAriaAndCollapsedClass(b(e),e)},this)).end()},d.prototype.addAriaAndCollapsedClass=function(a,b){var c=a.hasClass("in");a.attr("aria-expanded",c),b.toggleClass("collapsed",!c).attr("aria-expanded",c)};var e=a.fn.collapse;a.fn.collapse=c,a.fn.collapse.Constructor=d,a.fn.collapse.noConflict=function(){return a.fn.collapse=e,this},a(document).on("click.bs.collapse.data-api",'[data-toggle="collapse"]',function(d){var e=a(this);e.attr("data-target")||d.preventDefault();var f=b(e),g=f.data("bs.collapse"),h=g?"toggle":e.data();c.call(f,h)})}(jQuery),+function(a){"use strict";function b(b){var c=b.attr("data-target");c||(c=b.attr("href"),c=c&&/#[A-Za-z]/.test(c)&&c.replace(/.*(?=#[^\s]*$)/,""));var d=c&&a(c);return d&&d.length?d:b.parent()}function c(c){c&&3===c.which||(a(e).remove(),a(f).each(function(){var d=a(this),e=b(d),f={relatedTarget:this};e.hasClass("open")&&(c&&"click"==c.type&&/input|textarea/i.test(c.target.tagName)&&a.contains(e[0],c.target)||(e.trigger(c=a.Event("hide.bs.dropdown",f)),c.isDefaultPrevented()||(d.attr("aria-expanded","false"),e.removeClass("open").trigger(a.Event("hidden.bs.dropdown",f)))))}))}function d(b){return this.each(function(){var c=a(this),d=c.data("bs.dropdown");d||c.data("bs.dropdown",d=new g(this)),"string"==typeof b&&d[b].call(c)})}var e=".dropdown-backdrop",f='[data-toggle="dropdown"]',g=function(b){a(b).on("click.bs.dropdown",this.toggle)};g.VERSION="3.3.6",g.prototype.toggle=function(d){var e=a(this);if(!e.is(".disabled, :disabled")){var f=b(e),g=f.hasClass("open");if(c(),!g){"ontouchstart"in document.documentElement&&!f.closest(".navbar-nav").length&&a(document.createElement("div")).addClass("dropdown-backdrop").insertAfter(a(this)).on("click",c);var h={relatedTarget:this};if(f.trigger(d=a.Event("show.bs.dropdown",h)),d.isDefaultPrevented())return;e.trigger("focus").attr("aria-expanded","true"),f.toggleClass("open").trigger(a.Event("shown.bs.dropdown",h))}return!1}},g.prototype.keydown=function(c){if(/(38|40|27|32)/.test(c.which)&&!/input|textarea/i.test(c.target.tagName)){var d=a(this);if(c.preventDefault(),c.stopPropagation(),!d.is(".disabled, :disabled")){var e=b(d),g=e.hasClass("open");if(!g&&27!=c.which||g&&27==c.which)return 27==c.which&&e.find(f).trigger("focus"),d.trigger("click");var h=" li:not(.disabled):visible a",i=e.find(".dropdown-menu"+h);if(i.length){var j=i.index(c.target);38==c.which&&j>0&&j--,40==c.which&&jdocument.documentElement.clientHeight;this.$element.css({paddingLeft:!this.bodyIsOverflowing&&a?this.scrollbarWidth:"",paddingRight:this.bodyIsOverflowing&&!a?this.scrollbarWidth:""})},c.prototype.resetAdjustments=function(){this.$element.css({paddingLeft:"",paddingRight:""})},c.prototype.checkScrollbar=function(){var a=window.innerWidth;if(!a){var b=document.documentElement.getBoundingClientRect();a=b.right-Math.abs(b.left)}this.bodyIsOverflowing=document.body.clientWidth
',trigger:"hover focus",title:"",delay:0,html:!1,container:!1,viewport:{selector:"body",padding:0}},c.prototype.init=function(b,c,d){if(this.enabled=!0,this.type=b,this.$element=a(c),this.options=this.getOptions(d),this.$viewport=this.options.viewport&&a(a.isFunction(this.options.viewport)?this.options.viewport.call(this,this.$element):this.options.viewport.selector||this.options.viewport),this.inState={click:!1,hover:!1,focus:!1},this.$element[0]instanceof document.constructor&&!this.options.selector)throw new Error("`selector` option must be specified when initializing "+this.type+" on the window.document object!");for(var e=this.options.trigger.split(" "),f=e.length;f--;){var g=e[f];if("click"==g)this.$element.on("click."+this.type,this.options.selector,a.proxy(this.toggle,this));else if("manual"!=g){var h="hover"==g?"mouseenter":"focusin",i="hover"==g?"mouseleave":"focusout";this.$element.on(h+"."+this.type,this.options.selector,a.proxy(this.enter,this)),this.$element.on(i+"."+this.type,this.options.selector,a.proxy(this.leave,this))}}this.options.selector?this._options=a.extend({},this.options,{trigger:"manual",selector:""}):this.fixTitle()},c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.getOptions=function(b){return b=a.extend({},this.getDefaults(),this.$element.data(),b),b.delay&&"number"==typeof b.delay&&(b.delay={show:b.delay,hide:b.delay}),b},c.prototype.getDelegateOptions=function(){var b={},c=this.getDefaults();return this._options&&a.each(this._options,function(a,d){c[a]!=d&&(b[a]=d)}),b},c.prototype.enter=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusin"==b.type?"focus":"hover"]=!0),c.tip().hasClass("in")||"in"==c.hoverState?void(c.hoverState="in"):(clearTimeout(c.timeout),c.hoverState="in",c.options.delay&&c.options.delay.show?void(c.timeout=setTimeout(function(){"in"==c.hoverState&&c.show()},c.options.delay.show)):c.show())},c.prototype.isInStateTrue=function(){for(var a in this.inState)if(this.inState[a])return!0;return!1},c.prototype.leave=function(b){var c=b instanceof this.constructor?b:a(b.currentTarget).data("bs."+this.type);return c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c)),b instanceof a.Event&&(c.inState["focusout"==b.type?"focus":"hover"]=!1),c.isInStateTrue()?void 0:(clearTimeout(c.timeout),c.hoverState="out",c.options.delay&&c.options.delay.hide?void(c.timeout=setTimeout(function(){"out"==c.hoverState&&c.hide()},c.options.delay.hide)):c.hide())},c.prototype.show=function(){var b=a.Event("show.bs."+this.type);if(this.hasContent()&&this.enabled){this.$element.trigger(b);var d=a.contains(this.$element[0].ownerDocument.documentElement,this.$element[0]);if(b.isDefaultPrevented()||!d)return;var e=this,f=this.tip(),g=this.getUID(this.type);this.setContent(),f.attr("id",g),this.$element.attr("aria-describedby",g),this.options.animation&&f.addClass("fade");var h="function"==typeof this.options.placement?this.options.placement.call(this,f[0],this.$element[0]):this.options.placement,i=/\s?auto?\s?/i,j=i.test(h);j&&(h=h.replace(i,"")||"top"),f.detach().css({top:0,left:0,display:"block"}).addClass(h).data("bs."+this.type,this),this.options.container?f.appendTo(this.options.container):f.insertAfter(this.$element),this.$element.trigger("inserted.bs."+this.type);var k=this.getPosition(),l=f[0].offsetWidth,m=f[0].offsetHeight;if(j){var n=h,o=this.getPosition(this.$viewport);h="bottom"==h&&k.bottom+m>o.bottom?"top":"top"==h&&k.top-mo.width?"left":"left"==h&&k.left-lg.top+g.height&&(e.top=g.top+g.height-i)}else{var j=b.left-f,k=b.left+f+c;jg.right&&(e.left=g.left+g.width-k)}return e},c.prototype.getTitle=function(){var a,b=this.$element,c=this.options;return a=b.attr("data-original-title")||("function"==typeof c.title?c.title.call(b[0]):c.title)},c.prototype.getUID=function(a){do a+=~~(1e6*Math.random());while(document.getElementById(a));return a},c.prototype.tip=function(){if(!this.$tip&&(this.$tip=a(this.options.template),1!=this.$tip.length))throw new Error(this.type+" `template` option must consist of exactly 1 top-level element!");return this.$tip},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".tooltip-arrow")},c.prototype.enable=function(){this.enabled=!0},c.prototype.disable=function(){this.enabled=!1},c.prototype.toggleEnabled=function(){this.enabled=!this.enabled},c.prototype.toggle=function(b){var c=this;b&&(c=a(b.currentTarget).data("bs."+this.type),c||(c=new this.constructor(b.currentTarget,this.getDelegateOptions()),a(b.currentTarget).data("bs."+this.type,c))),b?(c.inState.click=!c.inState.click,c.isInStateTrue()?c.enter(c):c.leave(c)):c.tip().hasClass("in")?c.leave(c):c.enter(c)},c.prototype.destroy=function(){var a=this;clearTimeout(this.timeout),this.hide(function(){a.$element.off("."+a.type).removeData("bs."+a.type),a.$tip&&a.$tip.detach(),a.$tip=null,a.$arrow=null,a.$viewport=null})};var d=a.fn.tooltip;a.fn.tooltip=b,a.fn.tooltip.Constructor=c,a.fn.tooltip.noConflict=function(){return a.fn.tooltip=d,this}}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.popover"),f="object"==typeof b&&b;(e||!/destroy|hide/.test(b))&&(e||d.data("bs.popover",e=new c(this,f)),"string"==typeof b&&e[b]())})}var c=function(a,b){this.init("popover",a,b)};if(!a.fn.tooltip)throw new Error("Popover requires tooltip.js");c.VERSION="3.3.6",c.DEFAULTS=a.extend({},a.fn.tooltip.Constructor.DEFAULTS,{placement:"right",trigger:"click",content:"",template:''}),c.prototype=a.extend({},a.fn.tooltip.Constructor.prototype),c.prototype.constructor=c,c.prototype.getDefaults=function(){return c.DEFAULTS},c.prototype.setContent=function(){var a=this.tip(),b=this.getTitle(),c=this.getContent();a.find(".popover-title")[this.options.html?"html":"text"](b),a.find(".popover-content").children().detach().end()[this.options.html?"string"==typeof c?"html":"append":"text"](c),a.removeClass("fade top bottom left right in"),a.find(".popover-title").html()||a.find(".popover-title").hide()},c.prototype.hasContent=function(){return this.getTitle()||this.getContent()},c.prototype.getContent=function(){var a=this.$element,b=this.options;return a.attr("data-content")||("function"==typeof b.content?b.content.call(a[0]):b.content)},c.prototype.arrow=function(){return this.$arrow=this.$arrow||this.tip().find(".arrow")};var d=a.fn.popover;a.fn.popover=b,a.fn.popover.Constructor=c,a.fn.popover.noConflict=function(){return a.fn.popover=d,this}}(jQuery),+function(a){"use strict";function b(c,d){this.$body=a(document.body),this.$scrollElement=a(a(c).is(document.body)?window:c),this.options=a.extend({},b.DEFAULTS,d),this.selector=(this.options.target||"")+" .nav li > a",this.offsets=[],this.targets=[],this.activeTarget=null,this.scrollHeight=0,this.$scrollElement.on("scroll.bs.scrollspy",a.proxy(this.process,this)),this.refresh(),this.process()}function c(c){return this.each(function(){var d=a(this),e=d.data("bs.scrollspy"),f="object"==typeof c&&c;e||d.data("bs.scrollspy",e=new b(this,f)),"string"==typeof c&&e[c]()})}b.VERSION="3.3.6",b.DEFAULTS={offset:10},b.prototype.getScrollHeight=function(){return this.$scrollElement[0].scrollHeight||Math.max(this.$body[0].scrollHeight,document.documentElement.scrollHeight)},b.prototype.refresh=function(){var b=this,c="offset",d=0;this.offsets=[],this.targets=[],this.scrollHeight=this.getScrollHeight(),a.isWindow(this.$scrollElement[0])||(c="position",d=this.$scrollElement.scrollTop()),this.$body.find(this.selector).map(function(){var b=a(this),e=b.data("target")||b.attr("href"),f=/^#./.test(e)&&a(e);return f&&f.length&&f.is(":visible")&&[[f[c]().top+d,e]]||null}).sort(function(a,b){return a[0]-b[0]}).each(function(){b.offsets.push(this[0]),b.targets.push(this[1])})},b.prototype.process=function(){var a,b=this.$scrollElement.scrollTop()+this.options.offset,c=this.getScrollHeight(),d=this.options.offset+c-this.$scrollElement.height(),e=this.offsets,f=this.targets,g=this.activeTarget;if(this.scrollHeight!=c&&this.refresh(),b>=d)return g!=(a=f[f.length-1])&&this.activate(a);if(g&&b=e[a]&&(void 0===e[a+1]||b .dropdown-menu > .active").removeClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!1),b.addClass("active").find('[data-toggle="tab"]').attr("aria-expanded",!0),h?(b[0].offsetWidth,b.addClass("in")):b.removeClass("fade"),b.parent(".dropdown-menu").length&&b.closest("li.dropdown").addClass("active").end().find('[data-toggle="tab"]').attr("aria-expanded",!0),e&&e()}var g=d.find("> .active"),h=e&&a.support.transition&&(g.length&&g.hasClass("fade")||!!d.find("> .fade").length);g.length&&h?g.one("bsTransitionEnd",f).emulateTransitionEnd(c.TRANSITION_DURATION):f(),g.removeClass("in")};var d=a.fn.tab;a.fn.tab=b,a.fn.tab.Constructor=c,a.fn.tab.noConflict=function(){return a.fn.tab=d,this};var e=function(c){c.preventDefault(),b.call(a(this),"show")};a(document).on("click.bs.tab.data-api",'[data-toggle="tab"]',e).on("click.bs.tab.data-api",'[data-toggle="pill"]',e)}(jQuery),+function(a){"use strict";function b(b){return this.each(function(){var d=a(this),e=d.data("bs.affix"),f="object"==typeof b&&b;e||d.data("bs.affix",e=new c(this,f)),"string"==typeof b&&e[b]()})}var c=function(b,d){this.options=a.extend({},c.DEFAULTS,d),this.$target=a(this.options.target).on("scroll.bs.affix.data-api",a.proxy(this.checkPosition,this)).on("click.bs.affix.data-api",a.proxy(this.checkPositionWithEventLoop,this)),this.$element=a(b),this.affixed=null,this.unpin=null,this.pinnedOffset=null,this.checkPosition()};c.VERSION="3.3.6",c.RESET="affix affix-top affix-bottom",c.DEFAULTS={offset:0,target:window},c.prototype.getState=function(a,b,c,d){var e=this.$target.scrollTop(),f=this.$element.offset(),g=this.$target.height();if(null!=c&&"top"==this.affixed)return c>e?"top":!1;if("bottom"==this.affixed)return null!=c?e+this.unpin<=f.top?!1:"bottom":a-d>=e+g?!1:"bottom";var h=null==this.affixed,i=h?e:f.top,j=h?g:b;return null!=c&&c>=e?"top":null!=d&&i+j>=a-d?"bottom":!1},c.prototype.getPinnedOffset=function(){if(this.pinnedOffset)return this.pinnedOffset;this.$element.removeClass(c.RESET).addClass("affix");var a=this.$target.scrollTop(),b=this.$element.offset();return this.pinnedOffset=b.top-a},c.prototype.checkPositionWithEventLoop=function(){setTimeout(a.proxy(this.checkPosition,this),1)},c.prototype.checkPosition=function(){if(this.$element.is(":visible")){var b=this.$element.height(),d=this.options.offset,e=d.top,f=d.bottom,g=Math.max(a(document).height(),a(document.body).height());"object"!=typeof d&&(f=e=d),"function"==typeof e&&(e=d.top(this.$element)),"function"==typeof f&&(f=d.bottom(this.$element));var h=this.getState(g,b,e,f);if(this.affixed!=h){null!=this.unpin&&this.$element.css("top","");var i="affix"+(h?"-"+h:""),j=a.Event(i+".bs.affix");if(this.$element.trigger(j),j.isDefaultPrevented())return;this.affixed=h,this.unpin="bottom"==h?this.getPinnedOffset():null,this.$element.removeClass(c.RESET).addClass(i).trigger(i.replace("affix","affixed")+".bs.affix")}"bottom"==h&&this.$element.offset({top:g-b-f})}};var d=a.fn.affix;a.fn.affix=b,a.fn.affix.Constructor=c,a.fn.affix.noConflict=function(){return a.fn.affix=d,this},a(window).on("load",function(){a('[data-spy="affix"]').each(function(){var c=a(this),d=c.data();d.offset=d.offset||{},null!=d.offsetBottom&&(d.offset.bottom=d.offsetBottom),null!=d.offsetTop&&(d.offset.top=d.offsetTop),b.call(c,d)})})}(jQuery); -------------------------------------------------------------------------------- /iot-springboot-dashboard/src/main/resources/static/js/stomp.min.js: -------------------------------------------------------------------------------- 1 | // Generated by CoffeeScript 1.7.1 2 | /* 3 | Stomp Over WebSocket http://www.jmesnil.net/stomp-websocket/doc/ | Apache License V2.0 4 | 5 | Copyright (C) 2010-2013 [Jeff Mesnil](http://jmesnil.net/) 6 | Copyright (C) 2012 [FuseSource, Inc.](http://fusesource.com) 7 | */ 8 | (function(){var t,e,n,i,r={}.hasOwnProperty,o=[].slice;t={LF:"\n",NULL:"\x00"};n=function(){var e;function n(t,e,n){this.command=t;this.headers=e!=null?e:{};this.body=n!=null?n:""}n.prototype.toString=function(){var e,i,o,s,u;e=[this.command];o=this.headers["content-length"]===false?true:false;if(o){delete this.headers["content-length"]}u=this.headers;for(i in u){if(!r.call(u,i))continue;s=u[i];e.push(""+i+":"+s)}if(this.body&&!o){e.push("content-length:"+n.sizeOfUTF8(this.body))}e.push(t.LF+this.body);return e.join(t.LF)};n.sizeOfUTF8=function(t){if(t){return encodeURI(t).match(/%..|./g).length}else{return 0}};e=function(e){var i,r,o,s,u,a,c,f,h,l,p,d,g,b,m,v,y;s=e.search(RegExp(""+t.LF+t.LF));u=e.substring(0,s).split(t.LF);o=u.shift();a={};d=function(t){return t.replace(/^\s+|\s+$/g,"")};v=u.reverse();for(g=0,m=v.length;gy;c=p<=y?++b:--b){r=e.charAt(c);if(r===t.NULL){break}i+=r}}return new n(o,a,i)};n.unmarshall=function(n){var i,r,o,s;r=n.split(RegExp(""+t.NULL+t.LF+"*"));s={frames:[],partial:""};s.frames=function(){var t,n,o,s;o=r.slice(0,-1);s=[];for(t=0,n=o.length;t>> "+r)}while(true){if(r.length>this.maxWebSocketFrameSize){this.ws.send(r.substring(0,this.maxWebSocketFrameSize));r=r.substring(this.maxWebSocketFrameSize);if(typeof this.debug==="function"){this.debug("remaining = "+r.length)}}else{return this.ws.send(r)}}};r.prototype._setupHeartbeat=function(n){var r,o,s,u,a,c;if((a=n.version)!==i.VERSIONS.V1_1&&a!==i.VERSIONS.V1_2){return}c=function(){var t,e,i,r;i=n["heart-beat"].split(",");r=[];for(t=0,e=i.length;t>> PING"):void 0}}(this))}if(!(this.heartbeat.incoming===0||o===0)){s=Math.max(this.heartbeat.incoming,o);if(typeof this.debug==="function"){this.debug("check PONG every "+s+"ms")}return this.ponger=i.setInterval(s,function(t){return function(){var n;n=e()-t.serverActivity;if(n>s*2){if(typeof t.debug==="function"){t.debug("did not receive server activity for the last "+n+"ms")}return t.ws.close()}}}(this))}};r.prototype._parseConnect=function(){var t,e,n,i;t=1<=arguments.length?o.call(arguments,0):[];i={};switch(t.length){case 2:i=t[0],e=t[1];break;case 3:if(t[1]instanceof Function){i=t[0],e=t[1],n=t[2]}else{i.login=t[0],i.passcode=t[1],e=t[2]}break;case 4:i.login=t[0],i.passcode=t[1],e=t[2],n=t[3];break;default:i.login=t[0],i.passcode=t[1],e=t[2],n=t[3],i.host=t[4]}return[i,e,n]};r.prototype.connect=function(){var r,s,u,a;r=1<=arguments.length?o.call(arguments,0):[];a=this._parseConnect.apply(this,r);u=a[0],this.connectCallback=a[1],s=a[2];if(typeof this.debug==="function"){this.debug("Opening Web Socket...")}this.ws.onmessage=function(i){return function(r){var o,u,a,c,f,h,l,p,d,g,b,m,v;c=typeof ArrayBuffer!=="undefined"&&r.data instanceof ArrayBuffer?(o=new Uint8Array(r.data),typeof i.debug==="function"?i.debug("--- got data length: "+o.length):void 0,function(){var t,e,n;n=[];for(t=0,e=o.length;t& kafka_orgin_sink_out.txt 135 | ``` 136 | 137 | Similarly for the other aggregated tables, setup the sink connectors: 138 | ``` 139 | kubectl exec -it kafka-demo-cp-kafka-0 -c cp-kafka-broker /usr/bin/connect-standalone -- /etc/kafka/kubernetes/kafka.ksql.connect.properties /etc/kafka/kubernetes/total_traffic.sink.properties /etc/kafka/kubernetes/window_traffic.sink.properties /etc/kafka/kubernetes/poi_traffic.sink.properties >& kafka_ksql_sink_out.txt 140 | ``` 141 | 142 | ## Start the IoT App 143 | 144 | Grab the Kafka, Zookeeper and YB-tserver headless service names: 145 | ``` 146 | $ kubectl get services 147 | NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE 148 | kafka-demo-cp-kafka-headless ClusterIP None 9092/TCP 1h 149 | kafka-demo-cp-zookeeper-headless ClusterIP None 2888/TCP,3888/TCP 1h 150 | yb-tservers ClusterIP None 7100/TCP,9000/TCP,6379/TCP,9042/TCP 2h 151 | ``` 152 | 153 | Use these as the endpoints for services to start the IoT app 154 | ``` 155 | cd ~/code/yb-iot-fleet-management/kubernetes/helm 156 | helm install yb-iot-helm --name iot-demo --set kafkaHostPort=kafka-demo-cp-kafka-headless:9092,zookeeperHostPort=kafka-demo-cp-zookeeper-headless:2181,yugabyteDBHost=yb-tservers --wait 157 | ``` 158 | 159 | The pod for this app looks like 160 | ``` 161 | NAME READY STATUS RESTARTS AGE 162 | iot-demo-yb-iot-helm-fb6b7db6f-rwskv 2/2 Running 0 46s 163 | ``` 164 | 165 | This pod has two containers: 166 | - One ingests data into the origin topic of `iot-data-event`, which also get transformed into other streams/tables via KSQL and published to new streams/tables. 167 | - Other container reads the YCQL tables from YugaByte DB and reports in the springboard based UI. 168 | 169 | ### Check the IoT App UI 170 | When using minikube, run the following to expose the load balancer endpoints for YugaByte DB UI and the app UI respectively. 171 | ``` 172 | minikube service iot-demo-yb-iot-helm --url 173 | ``` 174 | 175 | For non-minikube setups, one can use the `EXTERNAL-IP`:8080 from app's load balancer service to get the visual output of the fleet movement analytics. 176 | ``` 177 | $ kubectl get services 178 | NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE 179 | iot-demo-yb-iot-helm LoadBalancer 10.7.254.87 104.198.9.175 8080:31557/TCP 1m 180 | ``` 181 | 182 | This page auto refreshes every 5seconds or so, 183 | 184 | ## YugaByte DB Metrics UI 185 | Independent of the app, the user can also monitor the server side IOPS and related metrics at the `yb-master` UI endpoint. 186 | 187 | On minikube setup, the following command returns the url for master UI. 188 | ``` 189 | minikube service yb-master-ui --url 190 | ``` 191 | 192 | For Kubernetes engines, the `EXTERNAL-IP`:7000 will give the same info: 193 | ``` 194 | kubectl get services 195 | NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE 196 | yb-master-ui LoadBalancer 10.7.241.218 35.227.144.242 7000:30362/TCP 1h 197 | ``` 198 | 199 | The `http://EXTERNAL-IP:7000/tablet-servers` page contains per server current read/write metrics as well as resource usage information. 200 | 201 | One can scale the database cluster to spread the load to account for ever growing data size with streaming apps. This command scales the database servers to 4 pods. 202 | ``` 203 | cd ~/code/yugabyte-db/cloud/kubernetes/helm 204 | helm upgrade yb-iot yugabyte --set replicas.tserver=4 --wait 205 | ``` 206 | The read and write load from the app is distributed to the new added pods as well. 207 | 208 | 209 | ## Next Steps: 210 | - Package yb-kafka-sink jars more cleanly to reduce steps. 211 | - Fork cp-helm-charts and add pre/post kubernetes hooks to ease yb sink connection setup into kafka broker. 212 | - Get YB sink into https://github.com/Landoop/kafka-helm-charts/tree/master/charts. 213 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *~ 18 | # Various IDEs 19 | .project 20 | .idea/ 21 | *.tmproj 22 | .vscode/ 23 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | appVersion: "1.0" 3 | description: A Helm chart for YB IoT Fleet Management App on Kubernetes 4 | name: yb-iot-helm 5 | version: latest 6 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | 1. Get YB IoT App Pods by running this command: 2 | kubectl --namespace {{ .Release.Namespace }} get pods 3 | 4 | 2. Get list of YB IoT App services that are running: 5 | kubectl --namespace {{ .Release.Namespace }} get services 6 | 7 | 3. Get information about the load balancer services: 8 | kubectl get svc --namespace {{ .Release.Namespace }} 9 | 10 | 4. Cleanup YugaByte Pods 11 | helm delete {{ .Release.Name }} --purge 12 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* vim: set filetype=mustache: */}} 2 | {{/* 3 | Expand the name of the chart. 4 | */}} 5 | {{- define "yb-iot-helm.name" -}} 6 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" -}} 7 | {{- end -}} 8 | 9 | {{/* 10 | Create a default fully qualified app name. 11 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 12 | If release name contains chart name it will be used as a full name. 13 | */}} 14 | {{- define "yb-iot-helm.fullname" -}} 15 | {{- if .Values.fullnameOverride -}} 16 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" -}} 17 | {{- else -}} 18 | {{- $name := default .Chart.Name .Values.nameOverride -}} 19 | {{- if contains $name .Release.Name -}} 20 | {{- .Release.Name | trunc 63 | trimSuffix "-" -}} 21 | {{- else -}} 22 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" -}} 23 | {{- end -}} 24 | {{- end -}} 25 | {{- end -}} 26 | 27 | {{/* 28 | Create chart name and version as used by the chart label. 29 | */}} 30 | {{- define "yb-iot-helm.chart" -}} 31 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" -}} 32 | {{- end -}} 33 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/templates/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ include "yb-iot-helm.fullname" . }} 5 | labels: 6 | app.kubernetes.io/name: {{ include "yb-iot-helm.name" . }} 7 | helm.sh/chart: {{ include "yb-iot-helm.chart" . }} 8 | app.kubernetes.io/instance: {{ .Release.Name }} 9 | app.kubernetes.io/managed-by: {{ .Release.Service }} 10 | spec: 11 | replicas: {{ .Values.replicaCount }} 12 | selector: 13 | matchLabels: 14 | app.kubernetes.io/name: {{ include "yb-iot-helm.name" . }} 15 | app.kubernetes.io/instance: {{ .Release.Name }} 16 | template: 17 | metadata: 18 | labels: 19 | app.kubernetes.io/name: {{ include "yb-iot-helm.name" . }} 20 | app.kubernetes.io/instance: {{ .Release.Name }} 21 | spec: 22 | containers: 23 | - name: {{ .Chart.Name }} 24 | image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" 25 | imagePullPolicy: {{ .Values.image.pullPolicy }} 26 | ports: 27 | - name: http 28 | containerPort: 80 29 | protocol: TCP 30 | command: 31 | - java 32 | - -Dcom.iot.app.kafka.brokerlist={{ .Values.kafkaHostPort }} 33 | - -Dcom.iot.app.kafka.zookeeper={{ .Values.zookeeperHostPort }} 34 | - -jar 35 | - /home/yugabyte-iot/iot-kafka-producer.jar 36 | - name: {{ .Chart.Name }}-ui 37 | image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}" 38 | imagePullPolicy: {{ .Values.image.pullPolicy }} 39 | ports: 40 | - name: http 41 | containerPort: 8080 42 | protocol: TCP 43 | command: 44 | - java 45 | - -Dcom.iot.app.cassandra.host={{ .Values.yugabyteDBHost }} 46 | - -Dcom.iot.app.cassandra.port={{ .Values.yugabyteDBYCQLPort }} 47 | - -jar 48 | - /home/yugabyte-iot/iot-springboot-dashboard.jar 49 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ include "yb-iot-helm.fullname" . }}-ui 5 | labels: 6 | app.kubernetes.io/name: {{ include "yb-iot-helm.name" . }} 7 | helm.sh/chart: {{ include "yb-iot-helm.chart" . }} 8 | app.kubernetes.io/instance: {{ .Release.Name }} 9 | app.kubernetes.io/managed-by: {{ .Release.Service }} 10 | spec: 11 | type: {{ .Values.service.type }} 12 | ports: 13 | - port: {{ .Values.service.port }} 14 | name: "ui" 15 | selector: 16 | app.kubernetes.io/name: {{ include "yb-iot-helm.name" . }} 17 | app.kubernetes.io/instance: {{ .Release.Name }} 18 | -------------------------------------------------------------------------------- /kubernetes/helm/yb-iot-helm/values.yaml: -------------------------------------------------------------------------------- 1 | # Default values for yb-iot-helm. 2 | # This is a YAML-formatted file. 3 | # Declare variables to be passed into your templates. 4 | 5 | replicaCount: 1 6 | 7 | image: 8 | repository: "yugabytedb/yb-iot" 9 | tag: latest 10 | pullPolicy: Always 11 | 12 | nameOverride: "" 13 | fullnameOverride: "" 14 | 15 | service: 16 | type: LoadBalancer 17 | port: 8080 18 | 19 | kafkaHostPort: localhost:9002 20 | zookeeperHostPort: localhost:2181 21 | yugabyteDBHost: localhost 22 | yugabyteDBYCQLPort: 9042 23 | 24 | resources: {} 25 | 26 | nodeSelector: {} 27 | 28 | tolerations: [] 29 | 30 | affinity: {} 31 | -------------------------------------------------------------------------------- /pom.xml: -------------------------------------------------------------------------------- 1 | 3 | 4.0.0 4 | com.iot.app 5 | iot-traffic-monitor 6 | 1.0.0 7 | pom 8 | IoT Traffic Monitor 9 | 10 | 11 | iot-kafka-producer 12 | iot-spark-processor 13 | iot-springboot-dashboard 14 | 15 | 16 | -------------------------------------------------------------------------------- /resources/IoTData.cql: -------------------------------------------------------------------------------- 1 | // Create keyspace 2 | CREATE KEYSPACE IF NOT EXISTS TrafficKeySpace; 3 | 4 | // Create tables 5 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Origin_Table (vehicleId text, routeId text, vehicleType text, longitude text, latitude text, timeStamp timestamp, speed double, fuelLevel double, PRIMARY KEY ((vehicleId), timeStamp)) WITH default_time_to_live = 3600; 6 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Total_Traffic (routeId text, vehicleType text, totalCount bigint, timeStamp timestamp, recordDate text, PRIMARY KEY (routeId, recordDate, vehicleType)); 7 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Window_Traffic (routeId text, vehicleType text, totalCount bigint, timeStamp timestamp, recordDate text, PRIMARY KEY (routeId, recordDate, vehicleType)); 8 | CREATE TABLE IF NOT EXISTS TrafficKeySpace.Poi_Traffic(vehicleid text, vehicletype text, distance bigint, timeStamp timestamp, PRIMARY KEY (vehicleid)); 9 | 10 | // Select from the tables 11 | SELECT count(*) FROM TrafficKeySpace.Origin_Table; 12 | SELECT count(*) FROM TrafficKeySpace.Total_Traffic; 13 | SELECT count(*) FROM TrafficKeySpace.Window_Traffic; 14 | SELECT count(*) FROM TrafficKeySpace.Poi_Traffic; 15 | 16 | // Truncate the tables 17 | TRUNCATE TABLE TrafficKeySpace.Origin_Table; 18 | TRUNCATE TABLE TrafficKeySpace.Total_Traffic; 19 | TRUNCATE TABLE TrafficKeySpace.Window_Traffic; 20 | TRUNCATE TABLE TrafficKeySpace.Poi_Traffic; 21 | -------------------------------------------------------------------------------- /yb-iot-fleet-management-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yugabyte/yb-iot-fleet-management/ca4a7aefde82ac971c50bb52dd1e1be82adaefb3/yb-iot-fleet-management-screenshot.png -------------------------------------------------------------------------------- /yb-iot-fleet-mgmt-ksql-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yugabyte/yb-iot-fleet-management/ca4a7aefde82ac971c50bb52dd1e1be82adaefb3/yb-iot-fleet-mgmt-ksql-arch.png -------------------------------------------------------------------------------- /yb-iot-fleet-mgmt-spark-arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/yugabyte/yb-iot-fleet-management/ca4a7aefde82ac971c50bb52dd1e1be82adaefb3/yb-iot-fleet-mgmt-spark-arch.png --------------------------------------------------------------------------------