├── DeepStream_Analytics_Applications.pdf ├── LICENSE.md ├── README.md ├── analytics_server_docker ├── README.md ├── cassandra │ ├── Dockerfile │ ├── entrypoint-wrap.sh │ └── schema.cql ├── docker-compose.yml ├── elasticsearch │ ├── Dockerfile │ └── config │ │ └── elasticsearch.yml ├── kafka │ ├── Dockerfile │ ├── LICENSE │ ├── broker-list.sh │ ├── create-topics.sh │ ├── docker_push │ ├── download-kafka.sh │ ├── start-kafka-shell.sh │ ├── start-kafka.sh │ └── versions.sh ├── kibana │ ├── Dockerfile │ └── config │ │ └── kibana.yml ├── logstash │ ├── Dockerfile │ ├── config │ │ └── logstash.yml │ └── pipeline │ │ └── logstash.conf ├── nginx-ui │ ├── Dockerfile │ └── ui.zip ├── node-apis │ ├── Dockerfile │ ├── apis.zip │ └── config │ │ ├── config.json │ │ └── parkingSpots.csv ├── nv-schema.json ├── python │ ├── Dockerfile │ └── tracker.zip ├── readme-images │ ├── architecture.png │ ├── index-creation-1.png │ ├── index-creation-2.png │ └── ui.png ├── spark │ ├── LICENSE │ ├── conf │ │ ├── master │ │ │ └── spark-defaults.conf │ │ └── worker │ │ │ └── spark-defaults.conf │ └── data │ │ └── stream-360-1.0-jar-with-dependencies.jar ├── start.sh └── stop.sh ├── apis ├── .gitignore ├── README.md ├── app │ ├── models │ │ ├── es_model.js │ │ ├── stats_model.js │ │ └── ui_config_model.js │ ├── routes │ │ ├── es.js │ │ ├── index.js │ │ ├── stats.js │ │ └── ui-config.js │ └── ws │ │ └── websocket.js ├── config │ ├── config.json │ └── parkingSpots.csv ├── index.js ├── initializers │ └── server.js ├── package-lock.json ├── package.json └── tests │ └── ws-client.js ├── perception_docker ├── README.md └── run.sh ├── readme-images ├── architecture.png └── test.txt ├── stream ├── .classpath ├── .gitignore ├── .project ├── README.md ├── data │ ├── demo-0.json │ ├── demo-1.json │ └── playbackData.json ├── pom.xml ├── readme-images │ ├── anomaly.png │ ├── batch.png │ └── pipeline.png └── src │ ├── main │ ├── resources │ │ ├── blacklist.txt │ │ ├── docker-config.txt │ │ ├── local-config.txt │ │ ├── log4j.properties │ │ ├── schema │ │ │ └── day2.json │ │ └── whitelist.txt │ └── scala │ │ └── com │ │ └── nvidia │ │ └── ds │ │ ├── batch │ │ └── BatchAnomaly.scala │ │ ├── stream │ │ ├── CassandraSink.scala │ │ ├── KafkaSink.scala │ │ ├── StateMgmt.scala │ │ ├── StreamProcessor.scala │ │ ├── StreamProcessorLoadTest.scala │ │ └── Trajectory.scala │ │ └── util │ │ ├── Encryptor.scala │ │ ├── KProducer.scala │ │ ├── Message.scala │ │ ├── Playback.scala │ │ ├── Util.scala │ │ ├── UtilSpark.scala │ │ └── ValidateJson.scala │ └── test │ └── scala │ └── com │ └── nvidia │ └── ds │ └── util │ ├── KProducerSpec.scala │ └── package-info.java ├── tracker ├── .gitignore ├── README.md ├── code │ ├── __init__.py │ ├── euclidean │ │ ├── __init__.py │ │ └── euchelper.py │ ├── geo │ │ ├── __init__.py │ │ ├── core │ │ │ ├── __init__.py │ │ │ ├── conversions.py │ │ │ └── spatial.py │ │ └── inout │ │ │ ├── __init__.py │ │ │ └── shapefile.py │ ├── mctrack │ │ ├── __init__.py │ │ ├── constants.py │ │ ├── ioutils.py │ │ ├── mctrackbatch.py │ │ ├── mctracker.py │ │ ├── mctrackstream.py │ │ ├── trackerutils.py │ │ ├── tracklog.py │ │ └── validation.py │ └── network │ │ ├── __init__.py │ │ └── networkhelper.py ├── config │ ├── config_360d.json │ ├── config_360d_stream.json │ └── day2.json ├── docs │ └── mctracker │ │ ├── arch.png │ │ ├── parking.png │ │ ├── parking_coverage.png │ │ └── system.md ├── test │ └── config.json └── usecasecode │ └── 360d │ └── stream_track.py └── ui ├── .gitignore ├── README.md ├── config ├── env.js ├── paths.js ├── polyfills.js ├── webpack.config.dev.js ├── webpack.config.prod.js └── webpackDevServer.config.js ├── package-lock.json ├── package.json ├── public ├── MarkersWorker.js ├── favicon.ico ├── index.html ├── react-datetime.css └── video-react.css ├── readme-images ├── Component-Hierarchy.jpeg ├── Garage-Components.jpeg ├── Garage.PNG └── HomePage.PNG ├── scripts ├── build.js ├── start.js └── test.js └── src ├── App.css ├── App.js ├── Component ├── Capacity │ ├── Capacity.css │ └── Capacity.js ├── Flip │ ├── Flip.css │ └── Flip.js ├── Footer │ ├── Footer.css │ └── Footer.js ├── Header │ ├── Header.css │ └── Header.js ├── ListPanel │ ├── Item │ │ ├── Item.css │ │ └── Item.js │ ├── ListPanel.css │ ├── ListPanel.js │ └── PanelItem │ │ ├── Item.css │ │ └── Item.js ├── Map │ ├── CameraMarker │ │ └── CameraMarker.js │ ├── CarMarkers │ │ ├── CarMarker │ │ │ ├── CarMarker.css │ │ │ └── CarMarker.js │ │ └── CarMarkers.js │ ├── GroundOverlay │ │ └── GroundOverlay.js │ ├── LocationMarker │ │ └── LocationMarker.js │ └── Map.js ├── Search │ ├── Search.css │ └── Search.js ├── UI │ ├── Button │ │ ├── Button.css │ │ └── Button.js │ ├── Input │ │ └── Input.js │ └── Loader │ │ ├── Loader.css │ │ └── Loader.js ├── Video │ ├── StreamVideo │ │ ├── HLS.js │ │ └── StreamPlayer.js │ ├── Video.css │ └── Video.js └── Widget │ ├── Widget.css │ └── Widget.js ├── Hoc └── Auxiliary │ └── Auxiliary.js ├── PageBuilder ├── HomeMap.css ├── HomeMap.js └── SmartGaragePage │ ├── SmartGaragePage.css │ └── SmartGaragePage.js ├── assets ├── NVLogo-H-White-Small.png ├── Security_Camera-512.png ├── X-StrpP1_simpleConverted.png ├── blue.png ├── green.png └── mm.png ├── index.css ├── index.js ├── logo.svg └── registerServiceWorker.js /DeepStream_Analytics_Applications.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/DeepStream_Analytics_Applications.pdf -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ------------------------------------------------------ 2 | # This sample application is no longer maintained 3 | # ------------------------------------------------------ 4 | 5 | # DeepStream 5.0 - 360 Degree Smart Parking Application 6 | 7 | ![Architecture](readme-images/architecture.png?raw=true "Architecture") 8 | 9 | This document describes the full end to end smart parking application that is available with DeepStream 5.0. The above architecture provides a reference to build distributed and scalable DeepStream applications. 10 | 11 | 12 | 13 | 14 | ## Introduction 15 | 16 | The perception capabilities of a DeepStream application can now seamlessly be augmented with data analytics capabilities to build complete solutions, offering rich data dashboards for actionable insights. This bridging of DeepStream’s perception capabilities with data analytics frameworks is particularly useful for applications requiring long term trend analytics, global situational awareness, and forensic analysis. This also allows leveraging major Internet of Things (IOT) services as the infrastructure backbone. 17 | 18 | The data analytics backbone is connected to DeepStream applications through a distributed messaging fabric. DeepStream 5.0 offers two new plugins, gstnvmsgconv and gstnvmsgbroker, to transform and connect to various messaging protocols. The protocol supported in this release is Kafka. 19 | 20 | 21 | 22 | To build an end to end implementation of the Analytics layer, DeepStream 5.0 uses open source tools and frameworks that can easily be reproduced for deployment on an on-premise server or in the Cloud. 23 | The framework comprises stream and batch processing capabilities. Every component of the Analytics layer, Message Broker, Streaming, NoSQL, and Search Indexer can be horizontally scaled. The streaming analytics pipeline can be used for processes like anomaly detection, alerting, and computation of statistics like traffic flow rate. Batch processing can be used to extract patterns in the data, look for anomalies over a period of time, and build machine learning models. The data is kept in a NoSQL database for state management, e.g. the occupancy of a building, activity in a store, or people movement in a train station. This also provides the capability for forensic analytics, if needed. The data can be indexed for search and time series analytics. Information generated by streaming and batch processing is exposed through a standard API for visualization. The API can be accessed through REST, WebSocket, or messaging, based on the use case. The user interface allows the user to consume all the relevant information. 24 | Deployment is based on an open source technology stack. The modules and technology stack are shown with respect to the Streaming Data pipeline. 25 | 26 | ## Getting Started 27 | 28 | To get started, clone this repository by either clicking the download button on top right corner, or using the command 29 | 30 | git clone https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application.git 31 | 32 | To run this application, the user needs to start the following docker containers: 33 | 34 | 1. **[Analytics Server](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/analytics_server_docker)**: Check the README inside `analytics_server_docker` directory and follow the steps to start the docker containers. 35 | 2. **[Perception Server](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/perception_docker)**: Check the README inside `perception_docker` directory and follow the steps to start the docker container. 36 | 3. **[Application note](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/DeepStream_Analytics_Applications.pdf)** 37 | 38 | Other modules included in this application are as follows: 39 | 1. [Apis](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/apis) 40 | 2. [Stream](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/stream) 41 | 3. [Tracker](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/tracker) 42 | 4. [Ui](https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/tree/master/ui) 43 | -------------------------------------------------------------------------------- /analytics_server_docker/cassandra/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM cassandra:3.11.2 2 | 3 | WORKDIR /home/cassandra 4 | 5 | COPY entrypoint-wrap.sh . 6 | 7 | COPY schema.cql . 8 | 9 | ENTRYPOINT ["/home/cassandra/entrypoint-wrap.sh"] 10 | 11 | CMD ["cassandra", "-f"] -------------------------------------------------------------------------------- /analytics_server_docker/cassandra/entrypoint-wrap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | until cqlsh -u cassandra -p cassandra -f /home/cassandra/schema.cql; do 4 | echo "cqlsh: Cassandra is unavailable - retry later" 5 | sleep 2 6 | done & 7 | 8 | exec /docker-entrypoint.sh "$@" -------------------------------------------------------------------------------- /analytics_server_docker/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | 4 | cassandra: 5 | container_name: cassandra 6 | build: 7 | context: cassandra/ 8 | ports: 9 | - '7000:7000' 10 | - '7001:7001' 11 | - '7199:7199' 12 | - '9042:9042' 13 | - '9160:9160' 14 | networks: 15 | - smart-garage-360 16 | 17 | elasticsearch: 18 | build: 19 | context: elasticsearch/ 20 | container_name: elasticsearch 21 | volumes: 22 | - ./elasticsearch/config/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro 23 | ports: 24 | - "9200:9200" 25 | - "9300:9300" 26 | environment: 27 | ES_JAVA_OPTS: "-Xmx1024m -Xms256m" 28 | networks: 29 | - smart-garage-360 30 | 31 | logstash: 32 | build: 33 | context: logstash/ 34 | container_name: logstash 35 | volumes: 36 | - ./logstash/config/logstash.yml:/usr/share/logstash/config/logstash.yml:ro 37 | - ./logstash/pipeline:/usr/share/logstash/pipeline:ro 38 | ports: 39 | - "5000:5000" 40 | environment: 41 | LS_JAVA_OPTS: "-Xmx1024m -Xms256m" 42 | networks: 43 | - smart-garage-360 44 | depends_on: 45 | - elasticsearch 46 | - cassandra 47 | - kafka 48 | 49 | kibana: 50 | build: 51 | context: kibana/ 52 | container_name: kibana 53 | volumes: 54 | - ./kibana/config/:/usr/share/kibana/config:ro 55 | ports: 56 | - "5601:5601" 57 | networks: 58 | - smart-garage-360 59 | depends_on: 60 | - elasticsearch 61 | 62 | zookeeper: 63 | image: wurstmeister/zookeeper 64 | container_name: zookeeper 65 | ports: 66 | - "2181:2181" 67 | networks: 68 | - smart-garage-360 69 | kafka: 70 | build: 71 | context: kafka/ 72 | container_name: kafka 73 | ports: 74 | - "9092:9092" 75 | networks: 76 | - smart-garage-360 77 | environment: 78 | KAFKA_ADVERTISED_HOST_NAME: ${IP_ADDRESS} 79 | HOSTNAME_COMMAND: "route -n | awk '/UG[ \t]/{print $$2}'" 80 | KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 81 | KAFKA_CREATE_TOPICS: "metromind-start:8:1,metromind-raw:8:1,metromind-anomaly:8:1" 82 | volumes: 83 | - /var/run/docker.sock:/var/run/docker.sock 84 | depends_on: 85 | - zookeeper 86 | 87 | master: 88 | image: gettyimages/spark:2.2.0-hadoop-2.7 89 | container_name: spark-master 90 | command: bin/spark-class org.apache.spark.deploy.master.Master -h master 91 | hostname: master 92 | environment: 93 | MASTER: spark://master:7077 94 | SPARK_CONF_DIR: /conf 95 | SPARK_PUBLIC_DNS: localhost 96 | expose: 97 | - 7001 98 | - 7002 99 | - 7003 100 | - 7004 101 | - 7005 102 | - 7006 103 | - 7077 104 | - 6066 105 | ports: 106 | - 4040:4040 107 | - 6066:6066 108 | - 7077:7077 109 | - 8080:8080 110 | networks: 111 | - smart-garage-360 112 | volumes: 113 | - ./spark/conf/master:/conf 114 | - ./spark/data:/tmp/data 115 | depends_on: 116 | - logstash 117 | 118 | worker: 119 | image: gettyimages/spark:2.2.0-hadoop-2.7 120 | container_name: spark-worker 121 | command: bin/spark-class org.apache.spark.deploy.worker.Worker spark://master:7077 122 | hostname: worker 123 | environment: 124 | SPARK_CONF_DIR: /conf 125 | SPARK_WORKER_CORES: 4 126 | SPARK_WORKER_MEMORY: 8g 127 | SPARK_WORKER_PORT: 8881 128 | SPARK_WORKER_WEBUI_PORT: 8081 129 | SPARK_PUBLIC_DNS: localhost 130 | links: 131 | - master 132 | expose: 133 | - 7012 134 | - 7013 135 | - 7014 136 | - 7015 137 | - 7016 138 | - 8881 139 | ports: 140 | - 8081:8081 141 | networks: 142 | - smart-garage-360 143 | volumes: 144 | - ./spark/conf/worker:/conf 145 | - ./spark/data:/tmp/data 146 | depends_on: 147 | - logstash 148 | 149 | apis: 150 | container_name: apis 151 | environment: 152 | IP_ADDRESS: ${IP_ADDRESS} 153 | NODE_PORT: 3000 154 | build: 155 | context: node-apis/ 156 | ports: 157 | - "3000:3000" 158 | depends_on: 159 | - elasticsearch 160 | - cassandra 161 | volumes: 162 | - ./node-apis/config/:/home/node/config:ro 163 | command: npm start 164 | networks: 165 | - smart-garage-360 166 | 167 | ui: 168 | container_name: ui 169 | build: 170 | context: nginx-ui/ 171 | args: 172 | REACT_APP_BACKEND_IP_ADDRESS: ${IP_ADDRESS} 173 | REACT_APP_BACKEND_PORT: 3000 174 | REACT_APP_GOOGLE_MAP_API_KEY: ${GOOGLE_MAP_API_KEY} 175 | ports: 176 | - "80:80" 177 | depends_on: 178 | - apis 179 | networks: 180 | - smart-garage-360 181 | 182 | python-tracker-module: 183 | container_name: python-tracker-module 184 | build: 185 | context: python/ 186 | depends_on: 187 | - logstash 188 | networks: 189 | - smart-garage-360 190 | 191 | networks: 192 | smart-garage-360: 193 | driver: bridge 194 | 195 | 196 | -------------------------------------------------------------------------------- /analytics_server_docker/elasticsearch/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/elastic/elasticsearch-docker 2 | FROM docker.elastic.co/elasticsearch/elasticsearch-oss:6.4.0 3 | 4 | # Add your elasticsearch plugins setup here 5 | # Example: RUN elasticsearch-plugin install analysis-icu 6 | -------------------------------------------------------------------------------- /analytics_server_docker/elasticsearch/config/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Elasticsearch configuration from elasticsearch-docker. 3 | ## from https://github.com/elastic/elasticsearch-docker/blob/master/build/elasticsearch/elasticsearch.yml 4 | # 5 | cluster.name: "docker-cluster" 6 | network.host: 0.0.0.0 7 | 8 | # minimum_master_nodes need to be explicitly set when bound on a public IP 9 | # set to 1 to allow single node clusters 10 | # Details: https://github.com/elastic/elasticsearch/pull/17288 11 | discovery.zen.minimum_master_nodes: 1 12 | 13 | ## Use single node discovery in order to disable production mode and avoid bootstrap checks 14 | ## see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html 15 | # 16 | discovery.type: single-node 17 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM openjdk:8u171-jre-alpine 2 | 3 | ARG kafka_version=1.1.1 4 | ARG scala_version=2.12 5 | ARG glibc_version=2.27-r0 6 | 7 | MAINTAINER wurstmeister 8 | 9 | ENV KAFKA_VERSION=$kafka_version \ 10 | SCALA_VERSION=$scala_version \ 11 | KAFKA_HOME=/opt/kafka \ 12 | GLIBC_VERSION=$glibc_version 13 | 14 | ENV PATH=${PATH}:${KAFKA_HOME}/bin 15 | 16 | COPY download-kafka.sh start-kafka.sh broker-list.sh create-topics.sh versions.sh /tmp/ 17 | 18 | RUN apk add --no-cache bash curl jq docker \ 19 | && mkdir /opt \ 20 | && chmod a+x /tmp/*.sh \ 21 | && mv /tmp/start-kafka.sh /tmp/broker-list.sh /tmp/create-topics.sh /tmp/versions.sh /usr/bin \ 22 | && sync && /tmp/download-kafka.sh \ 23 | && tar xfz /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz -C /opt \ 24 | && rm /tmp/kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz \ 25 | && ln -s /opt/kafka_${SCALA_VERSION}-${KAFKA_VERSION} /opt/kafka \ 26 | && rm /tmp/* \ 27 | && wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk \ 28 | && apk add --no-cache --allow-untrusted glibc-${GLIBC_VERSION}.apk \ 29 | && rm glibc-${GLIBC_VERSION}.apk 30 | 31 | 32 | VOLUME ["/kafka"] 33 | 34 | # Use "exec" form so that it runs as PID 1 (useful for graceful shutdown) 35 | CMD ["start-kafka.sh"] 36 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/broker-list.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | CONTAINERS=$(docker ps | grep 9092 | awk '{print $1}') 4 | BROKERS=$(for CONTAINER in ${CONTAINERS}; do docker port "$CONTAINER" 9092 | sed -e "s/0.0.0.0:/$HOST_IP:/g"; done) 5 | echo "${BROKERS/$'\n'/,}" 6 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/create-topics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ -z "$KAFKA_CREATE_TOPICS" ]]; then 4 | exit 0 5 | fi 6 | 7 | if [[ -z "$START_TIMEOUT" ]]; then 8 | START_TIMEOUT=600 9 | fi 10 | 11 | start_timeout_exceeded=false 12 | count=0 13 | step=10 14 | while netstat -lnt | awk '$4 ~ /:'"$KAFKA_PORT"'$/ {exit 1}'; do 15 | echo "waiting for kafka to be ready" 16 | sleep $step; 17 | count=$((count + step)) 18 | if [ $count -gt $START_TIMEOUT ]; then 19 | start_timeout_exceeded=true 20 | break 21 | fi 22 | done 23 | 24 | if $start_timeout_exceeded; then 25 | echo "Not able to auto-create topic (waited for $START_TIMEOUT sec)" 26 | exit 1 27 | fi 28 | 29 | # introduced in 0.10. In earlier versions, this will fail because the topic already exists. 30 | # shellcheck disable=SC1091 31 | source "/usr/bin/versions.sh" 32 | if [[ "$MAJOR_VERSION" == "0" && "$MINOR_VERSION" -gt "9" ]] || [[ "$MAJOR_VERSION" -gt "0" ]]; then 33 | KAFKA_0_10_OPTS="--if-not-exists" 34 | fi 35 | 36 | # Expected format: 37 | # name:partitions:replicas:cleanup.policy 38 | IFS="${KAFKA_CREATE_TOPICS_SEPARATOR-,}"; for topicToCreate in $KAFKA_CREATE_TOPICS; do 39 | echo "creating topics: $topicToCreate" 40 | IFS=':' read -r -a topicConfig <<< "$topicToCreate" 41 | config= 42 | if [ -n "${topicConfig[3]}" ]; then 43 | config="--config=cleanup.policy=${topicConfig[3]}" 44 | fi 45 | 46 | COMMAND="JMX_PORT='' ${KAFKA_HOME}/bin/kafka-topics.sh \\ 47 | --create \\ 48 | --zookeeper ${KAFKA_ZOOKEEPER_CONNECT} \\ 49 | --topic ${topicConfig[0]} \\ 50 | --partitions ${topicConfig[1]} \\ 51 | --replication-factor ${topicConfig[2]} \\ 52 | ${config} \\ 53 | ${KAFKA_0_10_OPTS} &" 54 | eval "${COMMAND}" 55 | done 56 | 57 | wait 58 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/docker_push: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | BASE_IMAGE="wurstmeister/kafka" 4 | IMAGE_VERSION="$1" 5 | 6 | if [ -z "$IMAGE_VERSION" ]; then 7 | echo "No IMAGE_VERSION var specified" 8 | exit 1 9 | fi 10 | 11 | echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin 12 | TARGET="$BASE_IMAGE:$IMAGE_VERSION" 13 | docker tag "$BASE_IMAGE" "$TARGET" 14 | docker push "$TARGET" 15 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/download-kafka.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | # shellcheck disable=SC1091 4 | source "/usr/bin/versions.sh" 5 | 6 | FILENAME="kafka_${SCALA_VERSION}-${KAFKA_VERSION}.tgz" 7 | 8 | url=$(curl --stderr /dev/null "https://www.apache.org/dyn/closer.cgi?path=/kafka/${KAFKA_VERSION}/${FILENAME}&as_json=1" | jq -r '"\(.preferred)\(.path_info)"') 9 | 10 | # Test to see if the suggested mirror has this version, currently pre 2.1.1 versions 11 | # do not appear to be actively mirrored. This may also be useful if closer.cgi is down. 12 | if [[ ! $(curl -s -f -I "${url}") ]]; then 13 | echo "Mirror does not have desired version, downloading direct from Apache" 14 | url="https://archive.apache.org/dist/kafka/${KAFKA_VERSION}/${FILENAME}" 15 | fi 16 | 17 | echo "Downloading Kafka from $url" 18 | wget "${url}" -O "/tmp/${FILENAME}" 19 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/start-kafka-shell.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -e HOST_IP=$1 -e ZK=$2 -i -t wurstmeister/kafka /bin/bash 3 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/start-kafka.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | # Allow specific kafka versions to perform any unique bootstrap operations 4 | OVERRIDE_FILE="/opt/overrides/${KAFKA_VERSION}.sh" 5 | if [[ -x "$OVERRIDE_FILE" ]]; then 6 | echo "Executing override file $OVERRIDE_FILE" 7 | eval "$OVERRIDE_FILE" 8 | fi 9 | 10 | # Store original IFS config, so we can restore it at various stages 11 | ORIG_IFS=$IFS 12 | 13 | if [[ -z "$KAFKA_ZOOKEEPER_CONNECT" ]]; then 14 | echo "ERROR: missing mandatory config: KAFKA_ZOOKEEPER_CONNECT" 15 | exit 1 16 | fi 17 | 18 | if [[ -z "$KAFKA_PORT" ]]; then 19 | export KAFKA_PORT=9092 20 | fi 21 | 22 | create-topics.sh & 23 | unset KAFKA_CREATE_TOPICS 24 | 25 | if [[ -z "$KAFKA_ADVERTISED_PORT" && \ 26 | -z "$KAFKA_LISTENERS" && \ 27 | -z "$KAFKA_ADVERTISED_LISTENERS" && \ 28 | -S /var/run/docker.sock ]]; then 29 | KAFKA_ADVERTISED_PORT=$(docker port "$(hostname)" $KAFKA_PORT | sed -r 's/.*:(.*)/\1/g') 30 | export KAFKA_ADVERTISED_PORT 31 | fi 32 | 33 | if [[ -z "$KAFKA_BROKER_ID" ]]; then 34 | if [[ -n "$BROKER_ID_COMMAND" ]]; then 35 | KAFKA_BROKER_ID=$(eval "$BROKER_ID_COMMAND") 36 | export KAFKA_BROKER_ID 37 | else 38 | # By default auto allocate broker ID 39 | export KAFKA_BROKER_ID=-1 40 | fi 41 | fi 42 | 43 | if [[ -z "$KAFKA_LOG_DIRS" ]]; then 44 | export KAFKA_LOG_DIRS="/kafka/kafka-logs-$HOSTNAME" 45 | fi 46 | 47 | if [[ -n "$KAFKA_HEAP_OPTS" ]]; then 48 | sed -r -i 's/(export KAFKA_HEAP_OPTS)="(.*)"/\1="'"$KAFKA_HEAP_OPTS"'"/g' "$KAFKA_HOME/bin/kafka-server-start.sh" 49 | unset KAFKA_HEAP_OPTS 50 | fi 51 | 52 | if [[ -n "$HOSTNAME_COMMAND" ]]; then 53 | HOSTNAME_VALUE=$(eval "$HOSTNAME_COMMAND") 54 | 55 | # Replace any occurences of _{HOSTNAME_COMMAND} with the value 56 | IFS=$'\n' 57 | for VAR in $(env); do 58 | if [[ $VAR =~ ^KAFKA_ && "$VAR" =~ "_{HOSTNAME_COMMAND}" ]]; then 59 | eval "export ${VAR//_\{HOSTNAME_COMMAND\}/$HOSTNAME_VALUE}" 60 | fi 61 | done 62 | IFS=$ORIG_IFS 63 | fi 64 | 65 | if [[ -n "$PORT_COMMAND" ]]; then 66 | PORT_VALUE=$(eval "$PORT_COMMAND") 67 | 68 | # Replace any occurences of _{PORT_COMMAND} with the value 69 | IFS=$'\n' 70 | for VAR in $(env); do 71 | if [[ $VAR =~ ^KAFKA_ && "$VAR" =~ "_{PORT_COMMAND}" ]]; then 72 | eval "export ${VAR//_\{PORT_COMMAND\}/$PORT_VALUE}" 73 | fi 74 | done 75 | IFS=$ORIG_IFS 76 | fi 77 | 78 | if [[ -n "$RACK_COMMAND" && -z "$KAFKA_BROKER_RACK" ]]; then 79 | KAFKA_BROKER_RACK=$(eval "$RACK_COMMAND") 80 | export KAFKA_BROKER_RACK 81 | fi 82 | 83 | # Try and configure minimal settings or exit with error if there isn't enough information 84 | if [[ -z "$KAFKA_ADVERTISED_HOST_NAME$KAFKA_LISTENERS" ]]; then 85 | if [[ -n "$KAFKA_ADVERTISED_LISTENERS" ]]; then 86 | echo "ERROR: Missing environment variable KAFKA_LISTENERS. Must be specified when using KAFKA_ADVERTISED_LISTENERS" 87 | exit 1 88 | elif [[ -z "$HOSTNAME_VALUE" ]]; then 89 | echo "ERROR: No listener or advertised hostname configuration provided in environment." 90 | echo " Please define KAFKA_LISTENERS / (deprecated) KAFKA_ADVERTISED_HOST_NAME" 91 | exit 1 92 | fi 93 | 94 | # Maintain existing behaviour 95 | # If HOSTNAME_COMMAND is provided, set that to the advertised.host.name value if listeners are not defined. 96 | export KAFKA_ADVERTISED_HOST_NAME="$HOSTNAME_VALUE" 97 | fi 98 | 99 | #Issue newline to config file in case there is not one already 100 | echo "" >> "$KAFKA_HOME/config/server.properties" 101 | 102 | ( 103 | function updateConfig() { 104 | key=$1 105 | value=$2 106 | file=$3 107 | 108 | # Omit $value here, in case there is sensitive information 109 | echo "[Configuring] '$key' in '$file'" 110 | 111 | # If config exists in file, replace it. Otherwise, append to file. 112 | if grep -E -q "^#?$key=" "$file"; then 113 | sed -r -i "s@^#?$key=.*@$key=$value@g" "$file" #note that no config values may contain an '@' char 114 | else 115 | echo "$key=$value" >> "$file" 116 | fi 117 | } 118 | 119 | # Fixes #312 120 | # KAFKA_VERSION + KAFKA_HOME + grep -rohe KAFKA[A-Z0-0_]* /opt/kafka/bin | sort | uniq | tr '\n' '|' 121 | EXCLUSIONS="|KAFKA_VERSION|KAFKA_HOME|KAFKA_DEBUG|KAFKA_GC_LOG_OPTS|KAFKA_HEAP_OPTS|KAFKA_JMX_OPTS|KAFKA_JVM_PERFORMANCE_OPTS|KAFKA_LOG|KAFKA_OPTS|" 122 | 123 | # Read in env as a new-line separated array. This handles the case of env variables have spaces and/or carriage returns. See #313 124 | IFS=$'\n' 125 | for VAR in $(env) 126 | do 127 | env_var=$(echo "$VAR" | cut -d= -f1) 128 | if [[ "$EXCLUSIONS" = *"|$env_var|"* ]]; then 129 | echo "Excluding $env_var from broker config" 130 | continue 131 | fi 132 | 133 | if [[ $env_var =~ ^KAFKA_ ]]; then 134 | kafka_name=$(echo "$env_var" | cut -d_ -f2- | tr '[:upper:]' '[:lower:]' | tr _ .) 135 | updateConfig "$kafka_name" "${!env_var}" "$KAFKA_HOME/config/server.properties" 136 | fi 137 | 138 | if [[ $env_var =~ ^LOG4J_ ]]; then 139 | log4j_name=$(echo "$env_var" | tr '[:upper:]' '[:lower:]' | tr _ .) 140 | updateConfig "$log4j_name" "${!env_var}" "$KAFKA_HOME/config/log4j.properties" 141 | fi 142 | done 143 | ) 144 | 145 | if [[ -n "$CUSTOM_INIT_SCRIPT" ]] ; then 146 | eval "$CUSTOM_INIT_SCRIPT" 147 | fi 148 | 149 | exec "$KAFKA_HOME/bin/kafka-server-start.sh" "$KAFKA_HOME/config/server.properties" 150 | -------------------------------------------------------------------------------- /analytics_server_docker/kafka/versions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | 3 | MAJOR_VERSION=$(echo "$KAFKA_VERSION" | cut -d. -f1) 4 | export MAJOR_VERSION 5 | 6 | MINOR_VERSION=$(echo "$KAFKA_VERSION" | cut -d. -f2) 7 | export MINOR_VERSION 8 | -------------------------------------------------------------------------------- /analytics_server_docker/kibana/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/elastic/kibana-docker 2 | FROM docker.elastic.co/kibana/kibana-oss:6.4.0 3 | 4 | # Add your kibana plugins setup here 5 | # Example: RUN kibana-plugin install 6 | -------------------------------------------------------------------------------- /analytics_server_docker/kibana/config/kibana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Kibana configuration from kibana-docker. 3 | ## from https://github.com/elastic/kibana-docker/blob/master/build/kibana/config/kibana.yml 4 | # 5 | server.name: kibana 6 | server.host: "0" 7 | elasticsearch.url: http://elasticsearch:9200 8 | -------------------------------------------------------------------------------- /analytics_server_docker/logstash/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/elastic/logstash-docker 2 | FROM docker.elastic.co/logstash/logstash-oss:6.4.0 3 | 4 | # Add your logstash plugins setup here 5 | # Example: RUN logstash-plugin install logstash-filter-json 6 | RUN logstash-plugin install logstash-filter-json logstash-output-kafka 7 | RUN logstash-plugin install logstash-filter-json logstash-input-kafka 8 | RUN logstash-plugin install logstash-filter-json logstash-filter-fingerprint -------------------------------------------------------------------------------- /analytics_server_docker/logstash/config/logstash.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ## Default Logstash configuration from logstash-docker. 3 | ## from https://github.com/elastic/logstash-docker/blob/master/build/logstash/config/logstash-oss.yml 4 | # 5 | http.host: "0.0.0.0" 6 | path.config: /usr/share/logstash/pipeline 7 | -------------------------------------------------------------------------------- /analytics_server_docker/logstash/pipeline/logstash.conf: -------------------------------------------------------------------------------- 1 | input { 2 | tcp { 3 | port => 5000 4 | type => "tcp5000" 5 | } 6 | kafka { 7 | type => "metromind-start" 8 | consumer_threads => 2 9 | topics => ["metromind-start"] 10 | decorate_events => true 11 | codec => "plain" 12 | key_deserializer_class => "org.apache.kafka.common.serialization.StringDeserializer" 13 | bootstrap_servers => "kafka:9092" 14 | } 15 | kafka { 16 | type => "metromind-raw" 17 | consumer_threads => 2 18 | topics => ["metromind-raw"] 19 | decorate_events => true 20 | codec => "plain" 21 | key_deserializer_class => "org.apache.kafka.common.serialization.StringDeserializer" 22 | bootstrap_servers => "kafka:9092" 23 | } 24 | kafka { 25 | type => "metromind-anomaly" 26 | consumer_threads => 1 27 | topics => ["metromind-anomaly"] 28 | decorate_events => true 29 | codec => "plain" 30 | key_deserializer_class => "org.apache.kafka.common.serialization.StringDeserializer" 31 | bootstrap_servers => "kafka:9092" 32 | } 33 | 34 | } 35 | 36 | ## Add your filters / logstash plugins configuration here 37 | 38 | filter { 39 | json { source => "message" } 40 | 41 | if [type] == "metromind-anomaly" { 42 | fingerprint { 43 | method => "SHA1" 44 | key => "HMAC" 45 | source => [ "@timestamp" ] 46 | target => "Id" 47 | } 48 | } 49 | grok { 50 | match => ["@timestamp", "%{YEAR:[@metadata][year]}-%{MONTHNUM:[@metadata][month]}-%{MONTHDAY:[@metadata][day]}T%{GREEDYDATA}"] 51 | } 52 | 53 | mutate { 54 | remove_field => ["kafka", "message"] 55 | } 56 | } 57 | 58 | 59 | output { 60 | if [type] == "tcp5000" { 61 | elasticsearch { 62 | hosts => "elasticsearch:9200" 63 | } 64 | } 65 | if [type] == "metromind-start" or [type] == "metromind-raw" { 66 | 67 | elasticsearch { 68 | hosts => "elasticsearch:9200" 69 | index => "%{type}-day2-%{[@metadata][year]}-%{[@metadata][month]}" 70 | document_type => "logs" 71 | retry_max_interval => 10 72 | action => "index" 73 | timeout => 60 74 | } 75 | 76 | }else { 77 | elasticsearch { 78 | hosts => "elasticsearch:9200" 79 | index => "%{type}-day2-%{[@metadata][year]}-%{[@metadata][month]}" 80 | document_type => "logs" 81 | retry_max_interval => 10 82 | action => "index" 83 | document_id => "%{Id}" 84 | timeout => 60 85 | } 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /analytics_server_docker/nginx-ui/Dockerfile: -------------------------------------------------------------------------------- 1 | # build environment 2 | FROM node:8 as ui-builder 3 | 4 | LABEL stage=ui-builder 5 | 6 | # Create app directory 7 | WORKDIR /home/ui 8 | 9 | COPY ui.zip . 10 | 11 | RUN apt-get update && apt-get install unzip 12 | 13 | RUN unzip ui.zip && rm ui.zip 14 | 15 | RUN npm install 16 | 17 | ARG REACT_APP_BACKEND_IP_ADDRESS 18 | 19 | ARG REACT_APP_BACKEND_PORT 20 | 21 | ARG REACT_APP_GOOGLE_MAP_API_KEY 22 | 23 | ENV REACT_APP_BACKEND_IP_ADDRESS $REACT_APP_BACKEND_IP_ADDRESS 24 | 25 | ENV REACT_APP_BACKEND_PORT $REACT_APP_BACKEND_PORT 26 | 27 | ENV REACT_APP_GOOGLE_MAP_API_KEY $REACT_APP_GOOGLE_MAP_API_KEY 28 | 29 | RUN npm run build 30 | 31 | # production environment 32 | FROM nginx 33 | 34 | COPY --from=ui-builder /home/ui/build /usr/share/nginx/html 35 | 36 | CMD ["nginx", "-g", "daemon off;"] -------------------------------------------------------------------------------- /analytics_server_docker/nginx-ui/ui.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/nginx-ui/ui.zip -------------------------------------------------------------------------------- /analytics_server_docker/node-apis/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:8 2 | 3 | # Create app directory 4 | WORKDIR /home/node 5 | 6 | COPY apis.zip . 7 | 8 | RUN apt-get update && apt-get install unzip 9 | 10 | RUN unzip apis.zip && rm apis.zip 11 | 12 | RUN npm install 13 | -------------------------------------------------------------------------------- /analytics_server_docker/node-apis/apis.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/node-apis/apis.zip -------------------------------------------------------------------------------- /analytics_server_docker/node-apis/config/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "home": { 3 | "name": "Home", 4 | "username_api": "", 5 | "googleMap": { 6 | "defaultCenter": { 7 | "lat": 37.2667081, 8 | "lng": -121.9852038 9 | }, 10 | "defaultZoom": 14, 11 | "maxZoom": 21, 12 | "minZoom": 10, 13 | "mapTypeControl": true, 14 | "mapTypeId": "roadmap" 15 | }, 16 | "locations": [ 17 | { 18 | "name": "garage", 19 | "lat": 37.287535, 20 | "lng": -121.98473 21 | } 22 | ] 23 | }, 24 | "garage": { 25 | "name": "Garage", 26 | "defaults": { 27 | "level": "P1" 28 | }, 29 | "bounds": { 30 | "north": 37.2886489370708, 31 | "south": 37.2864695830171, 32 | "east": -121.983629765596, 33 | "west": -121.986218361030 34 | }, 35 | "googleMap": { 36 | "defaultCenter": { 37 | "lat": 37.287535, 38 | "lng": -121.98473 39 | }, 40 | "defaultZoom": 19, 41 | "maxZoom": 21, 42 | "minZoom": 10, 43 | "mapTypeControl": true, 44 | "mapTypeId": "roadmap" 45 | }, 46 | "groundOverlay": { 47 | "p1GroundImage": "assets/X-StrpP1_simpleConverted.png", 48 | "p1Bounds": { 49 | "north": 37.2881998, 50 | "south": 37.2863798, 51 | "east": -121.9838699, 52 | "west": -121.9859025 53 | } 54 | }, 55 | "isLive": false, 56 | "live": { 57 | "webSocket": { 58 | "url": "", 59 | "startTimestamp": "", 60 | "garageId": "endeavor", 61 | "garageLevel": "P1", 62 | "dialogAutoCloseSeconds": 5 63 | }, 64 | "apis": { 65 | "baseurl": "", 66 | "alerts": "/es/alerts", 67 | "events": "/es/events", 68 | "kpi": "/stats/endeavor", 69 | "startTimestamp": "", 70 | "alertEventRefreshIntervalSeconds": 5, 71 | "uiDelaySeconds": 30, 72 | "alertEventListLength": 20 73 | } 74 | }, 75 | "playback": { 76 | "webSocket": { 77 | "url": "", 78 | "startTimestamp": "2018-11-10T01:40:26.000Z", 79 | "garageId": "endeavor", 80 | "garageLevel": "P1", 81 | "dialogAutoCloseSeconds": 5 82 | }, 83 | "apis": { 84 | "baseurl": "", 85 | "alerts": "/es/alerts", 86 | "events": "/es/events", 87 | "kpi": "/stats/endeavor", 88 | "startTimestamp": "2018-11-10T01:40:26.000Z", 89 | "alertEventRefreshIntervalSeconds": 5, 90 | "autoRefreshIntervalMinutes": 30, 91 | "uiDelaySeconds": 0, 92 | "alertEventListLength": 20 93 | } 94 | }, 95 | "backend": { 96 | "cassandraHosts": ["cassandra:9042"], 97 | "cassandraKeyspace": "metromind", 98 | "esHost": "elasticsearch", 99 | "esPort": 9200, 100 | "esAnomalyIndex": "metromind-anomaly-day2*", 101 | "esEventsIndex": "metromind-start-day2*", 102 | "eventCompressionSize":2, 103 | "anomalyEventQuerySize":25, 104 | "parkingSpotConfigFile": "config/parkingSpots.csv", 105 | "sensorType": "Camera", 106 | "garageLevel": "P1", 107 | "webSocketSendPeriodInMs": 500, 108 | "carRemovalPeriodInMs": 5000, 109 | "originLat": 37.287280, 110 | "originLon": -121.9850525 111 | } 112 | } 113 | } -------------------------------------------------------------------------------- /analytics_server_docker/node-apis/config/parkingSpots.csv: -------------------------------------------------------------------------------- 1 | ParkingSpotId 2 | P1-PS-444 3 | P1-PS-443 4 | P1-PS-442 5 | P1-PS-441 6 | P1-PS-440 7 | P1-PS-439 8 | P1-PS-438 9 | P1-PS-437 10 | P1-PS-335 11 | P1-PS-334 12 | P1-PS-333 13 | P1-PS-332 14 | P1-PS-331 15 | P1-PS-330 16 | P1-PS-329 17 | P1-PS-328 18 | P1-PS-436 19 | P1-PS-435 20 | P1-PS-434 21 | P1-PS-433 22 | P1-PS-432 23 | P1-PS-431 24 | P1-PS-430 25 | P1-PS-429 26 | P1-PS-327 27 | P1-PS-326 28 | P1-PS-325 29 | P1-PS-324 30 | P1-PS-323 31 | P1-PS-322 32 | P1-PS-321 33 | P1-PS-320 34 | P1-PS-319 35 | P1-PS-318 36 | P1-PS-428 37 | P1-PS-427 38 | P1-PS-426 39 | P1-PS-425 40 | P1-PS-424 41 | P1-PS-423 42 | P1-PS-317 43 | P1-PS-316 44 | P1-PS-315 45 | P1-PS-314 46 | P1-PS-313 47 | P1-PS-312 48 | P1-PS-311 49 | P1-PS-422 50 | P1-PS-421 51 | P1-PS-420 52 | P1-PS-419 53 | P1-PS-418 54 | P1-PS-310 55 | P1-PS-309 56 | P1-PS-308 57 | P1-PS-307 58 | P1-PS-306 59 | P1-PS-305 60 | P1-PS-304 61 | P1-PS-303 62 | P1-PS-52 63 | P1-PS-53 64 | P1-PS-54 65 | P1-PS-55 66 | P1-PS-56 67 | P1-PS-57 68 | P1-PS-58 69 | P1-PS-59 70 | P1-PS-60 71 | P1-PS-61 72 | P1-PS-62 73 | P1-PS-63 74 | P1-PS-64 75 | P1-PS-65 76 | P1-PS-66 77 | P1-PS-67 78 | P1-PS-68 79 | P1-PS-69 80 | P1-PS-70 81 | P1-PS-71 82 | P1-PS-72 83 | P1-PS-73 84 | P1-PS-74 85 | P1-PS-75 86 | P1-PS-76 87 | P1-PS-77 88 | P1-PS-78 89 | P1-PS-79 90 | P1-PS-80 91 | P1-PS-81 92 | P1-PS-82 93 | P1-PS-83 94 | P1-PS-84 95 | P1-PS-85 96 | P1-PS-86 97 | P1-PS-87 98 | P1-PS-88 99 | P1-PS-89 100 | P1-PS-90 101 | P1-PS-91 102 | P1-PS-92 103 | P1-PS-93 104 | P1-PS-213 105 | P1-PS-212 106 | P1-PS-211 107 | P1-PS-210 108 | P1-PS-209 109 | P1-PS-208 110 | P1-PS-207 111 | P1-PS-206 112 | P1-PS-205 113 | P1-PS-204 114 | P1-PS-203 115 | P1-PS-202 116 | P1-PS-201 117 | P1-PS-200 118 | P1-PS-199 119 | P1-PS-198 120 | P1-PS-197 121 | P1-PS-196 122 | P1-PS-195 123 | P1-PS-194 124 | P1-PS-193 125 | P1-PS-192 126 | P1-PS-191 127 | P1-PS-190 128 | P1-PS-189 129 | P1-PS-188 130 | P1-PS-187 131 | P1-PS-186 132 | P1-PS-185 133 | P1-PS-184 134 | P1-PS-183 135 | P1-PS-182 136 | P1-PS-181 137 | P1-PS-51 138 | P1-PS-50 139 | P1-PS-49 140 | P1-PS-48 141 | P1-PS-47 142 | P1-PS-46 143 | P1-PS-417 144 | P1-PS-416 145 | P1-PS-415 146 | -------------------------------------------------------------------------------- /analytics_server_docker/python/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6 2 | 3 | WORKDIR /home/python-tracker-module 4 | 5 | COPY tracker.zip . 6 | 7 | RUN apt-get update && apt-get install unzip 8 | 9 | ENV PYTHONPATH /home/python-tracker-module/code 10 | 11 | RUN unzip tracker.zip && rm tracker.zip 12 | 13 | RUN pip install -r requirements.txt 14 | 15 | CMD ["python", "usecasecode/360d/stream_track.py","--sconfig=/home/python-tracker-module/config/config_360d_stream.json","--config=/home/python-tracker-module/config/config_360d.json"] -------------------------------------------------------------------------------- /analytics_server_docker/python/tracker.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/python/tracker.zip -------------------------------------------------------------------------------- /analytics_server_docker/readme-images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/readme-images/architecture.png -------------------------------------------------------------------------------- /analytics_server_docker/readme-images/index-creation-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/readme-images/index-creation-1.png -------------------------------------------------------------------------------- /analytics_server_docker/readme-images/index-creation-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/readme-images/index-creation-2.png -------------------------------------------------------------------------------- /analytics_server_docker/readme-images/ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/readme-images/ui.png -------------------------------------------------------------------------------- /analytics_server_docker/spark/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 Getty Images, Inc 2 | 3 | MIT License 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining 6 | a copy of this software and associated documentation files (the 7 | "Software"), to deal in the Software without restriction, including 8 | without limitation the rights to use, copy, modify, merge, publish, 9 | distribute, sublicense, and/or sell copies of the Software, and to 10 | permit persons to whom the Software is furnished to do so, subject to 11 | the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be 14 | included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /analytics_server_docker/spark/conf/master/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # Default system properties included when running spark-submit. 2 | # This is useful for setting default environmental settings. 3 | 4 | spark.driver.port 7001 5 | spark.fileserver.port 7002 6 | spark.broadcast.port 7003 7 | spark.replClassServer.port 7004 8 | spark.blockManager.port 7005 9 | spark.executor.port 7006 10 | 11 | spark.broadcast.factory=org.apache.spark.broadcast.HttpBroadcastFactory 12 | spark.port.maxRetries 4 13 | -------------------------------------------------------------------------------- /analytics_server_docker/spark/conf/worker/spark-defaults.conf: -------------------------------------------------------------------------------- 1 | # Default system properties included when running spark-submit. 2 | # This is useful for setting default environmental settings. 3 | 4 | #spark.driver.port 7101 5 | spark.fileserver.port 7012 6 | spark.broadcast.port 7013 7 | spark.replClassServer.port 7014 8 | spark.blockManager.port 7015 9 | spark.executor.port 7016 10 | 11 | spark.broadcast.factory=org.apache.spark.broadcast.HttpBroadcastFactory 12 | spark.port.maxRetries 4 13 | -------------------------------------------------------------------------------- /analytics_server_docker/spark/data/stream-360-1.0-jar-with-dependencies.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/analytics_server_docker/spark/data/stream-360-1.0-jar-with-dependencies.jar -------------------------------------------------------------------------------- /analytics_server_docker/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | export IP_ADDRESS=xxx.xxx.xx.xx 4 | 5 | export GOOGLE_MAP_API_KEY= 6 | 7 | sudo -E docker-compose up -d 8 | 9 | sleep 10s 10 | 11 | sudo docker exec -it spark-master ./bin/spark-submit --class com.nvidia.ds.stream.StreamProcessor --master spark://master:7077 --executor-memory 8G --total-executor-cores 4 /tmp/data/stream-360-1.0-jar-with-dependencies.jar 12 | -------------------------------------------------------------------------------- /analytics_server_docker/stop.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | sudo -E docker-compose down --rmi all --volumes --remove-orphans 4 | 5 | sudo docker image prune --filter label=stage=ui-builder --force 6 | -------------------------------------------------------------------------------- /apis/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/* 2 | .vscode/* 3 | .DS_Store* -------------------------------------------------------------------------------- /apis/app/models/ui_config_model.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // Loading required libraries and config file 4 | var deepcopy = require("deepcopy"); 5 | const config=require('../../config/config.json') 6 | 7 | module.exports = { 8 | /** Reads the config file and sets the url for backend after reading the environment variable and sends the config to UI*/ 9 | getUiConfig: function (req, res, next) { 10 | let configObj = deepcopy(config); 11 | let apiMode=null; 12 | if(configObj.garage.isLive){ 13 | delete configObj.garage.playback; 14 | apiMode="live"; 15 | }else{ 16 | delete configObj.garage.live; 17 | apiMode="playback"; 18 | } 19 | delete configObj.garage.backend; 20 | let hostIpAddress=process.env.IP_ADDRESS; 21 | let backendPort=process.env.NODE_PORT; 22 | configObj.garage[apiMode].webSocket.url="ws://"+hostIpAddress+":"+backendPort; 23 | configObj.garage[apiMode].apis.baseurl="http://"+hostIpAddress+":"+backendPort; 24 | res.json(configObj); 25 | } 26 | } -------------------------------------------------------------------------------- /apis/app/routes/es.js: -------------------------------------------------------------------------------- 1 | var esModel = require('../models/es_model'); 2 | 3 | module.exports = function (router) { 4 | 'use strict'; 5 | 6 | // This will handle the url calls for /es/events-deprecated 7 | router.route('/events-deprecated') 8 | .get(esModel.searchEventsDeprecated) 9 | .post(esModel.searchEventsDeprecated) 10 | ; 11 | 12 | // This will handle the url calls for /es/alerts 13 | router.route('/alerts') 14 | .get(esModel.searchAnomaly) 15 | .post(esModel.searchAnomaly) 16 | ; 17 | 18 | // This will handle the url calls for /es/events 19 | router.route('/events') 20 | .get(esModel.searchEvents) 21 | .post(esModel.searchEvents) 22 | ; 23 | 24 | }; -------------------------------------------------------------------------------- /apis/app/routes/index.js: -------------------------------------------------------------------------------- 1 | var changeCase = require('change-case'); 2 | var express = require('express'); 3 | var routes = require('require-dir')(); 4 | var winston = require('winston'); 5 | var logger = winston.createLogger({ 6 | transports: [ 7 | new (winston.transports.Console)({ 'timestamp': true }) 8 | ], 9 | exitOnError: false 10 | }); 11 | 12 | module.exports = function(app) { 13 | 'use strict'; 14 | 15 | logger.info('[ROUTES] Initing routers:' + Object.keys(routes)); 16 | 17 | // Initialize all routes 18 | Object.keys(routes).forEach(function(routeName) { 19 | var router = express.Router(); 20 | // You can add some middleware here 21 | // router.use(someMiddleware); 22 | 23 | logger.info('[ROUTES] Initing router for ' + routeName); 24 | // Initialize the route to add its functionality to router 25 | require('./' + routeName)(router); 26 | 27 | // Add router to the speficied route name in the app 28 | app.use('/' + changeCase.paramCase(routeName), router); 29 | logger.info('[ROUTES] Inited under API call:' + ('/' + changeCase.paramCase(routeName))); 30 | }); 31 | }; -------------------------------------------------------------------------------- /apis/app/routes/stats.js: -------------------------------------------------------------------------------- 1 | var statsModel = require('../models/stats_model'); 2 | 3 | module.exports = function (router) { 4 | 'use strict'; 5 | 6 | // This will handle the url calls for /stats/:garageId 7 | router.route('/:garageId') 8 | .get(statsModel.getStats) 9 | .post(statsModel.getStats) 10 | ; 11 | }; -------------------------------------------------------------------------------- /apis/app/routes/ui-config.js: -------------------------------------------------------------------------------- 1 | var uiConfigModel = require('../models/ui_config_model'); 2 | 3 | module.exports = function (router) { 4 | 'use strict'; 5 | 6 | // This will handle the url calls for /ui-config 7 | router.route('/') 8 | .get(uiConfigModel.getUiConfig); 9 | }; -------------------------------------------------------------------------------- /apis/config/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "home": { 3 | "name": "Home", 4 | "username_api": "", 5 | "googleMap": { 6 | "defaultCenter": { 7 | "lat": 37.2667081, 8 | "lng": -121.9852038 9 | }, 10 | "defaultZoom": 14, 11 | "maxZoom": 21, 12 | "minZoom": 10, 13 | "mapTypeControl": true, 14 | "mapTypeId": "roadmap" 15 | }, 16 | "locations": [ 17 | { 18 | "name": "garage", 19 | "lat": 37.287535, 20 | "lng": -121.98473 21 | } 22 | ] 23 | }, 24 | "garage": { 25 | "name": "Garage", 26 | "defaults": { 27 | "level": "P1" 28 | }, 29 | "bounds": { 30 | "north": 37.2886489370708, 31 | "south": 37.2864695830171, 32 | "east": -121.983629765596, 33 | "west": -121.986218361030 34 | }, 35 | "googleMap": { 36 | "defaultCenter": { 37 | "lat": 37.287535, 38 | "lng": -121.98473 39 | }, 40 | "defaultZoom": 19, 41 | "maxZoom": 21, 42 | "minZoom": 10, 43 | "mapTypeControl": true, 44 | "mapTypeId": "roadmap" 45 | }, 46 | "groundOverlay": { 47 | "p1GroundImage": "assets/X-StrpP1_simpleConverted.png", 48 | "p1Bounds": { 49 | "north": 37.2881998, 50 | "south": 37.2863798, 51 | "east": -121.9838699, 52 | "west": -121.9859025 53 | } 54 | }, 55 | "isLive": false, 56 | "live": { 57 | "webSocket": { 58 | "url": "", 59 | "startTimestamp": "", 60 | "garageId": "endeavor", 61 | "garageLevel": "P1", 62 | "dialogAutoCloseSeconds": 5 63 | }, 64 | "apis": { 65 | "baseurl": "", 66 | "alerts": "/es/alerts", 67 | "events": "/es/events", 68 | "kpi": "/stats/endeavor", 69 | "startTimestamp": "", 70 | "alertEventRefreshIntervalSeconds": 5, 71 | "uiDelaySeconds": 30, 72 | "alertEventListLength": 20 73 | } 74 | }, 75 | "playback": { 76 | "webSocket": { 77 | "url": "", 78 | "startTimestamp": "2018-11-10T01:40:26.000Z", 79 | "garageId": "endeavor", 80 | "garageLevel": "P1", 81 | "dialogAutoCloseSeconds": 5 82 | }, 83 | "apis": { 84 | "baseurl": "", 85 | "alerts": "/es/alerts", 86 | "events": "/es/events", 87 | "kpi": "/stats/endeavor", 88 | "startTimestamp": "2018-11-10T01:40:26.000Z", 89 | "alertEventRefreshIntervalSeconds": 5, 90 | "autoRefreshIntervalMinutes": 30, 91 | "uiDelaySeconds": 0, 92 | "alertEventListLength": 20 93 | } 94 | }, 95 | "backend": { 96 | "cassandraHosts": ["cassandra:9042"], 97 | "cassandraKeyspace": "metromind", 98 | "esHost": "elasticsearch", 99 | "esPort": 9200, 100 | "esAnomalyIndex": "metromind-anomaly-day2*", 101 | "esEventsIndex": "metromind-start-day2*", 102 | "eventCompressionSize":2, 103 | "anomalyEventQuerySize":25, 104 | "eventApiQueryResultSize":200, 105 | "parkingSpotConfigFile": "config/parkingSpots.csv", 106 | "sensorType": "Camera", 107 | "garageLevel": "P1", 108 | "webSocketSendPeriodInMs": 500, 109 | "carRemovalPeriodInMs": 5000, 110 | "originLat": 37.287280, 111 | "originLon": -121.9850525 112 | } 113 | } 114 | } -------------------------------------------------------------------------------- /apis/config/parkingSpots.csv: -------------------------------------------------------------------------------- 1 | ParkingSpotId 2 | P1-PS-444 3 | P1-PS-443 4 | P1-PS-442 5 | P1-PS-441 6 | P1-PS-440 7 | P1-PS-439 8 | P1-PS-438 9 | P1-PS-437 10 | P1-PS-335 11 | P1-PS-334 12 | P1-PS-333 13 | P1-PS-332 14 | P1-PS-331 15 | P1-PS-330 16 | P1-PS-329 17 | P1-PS-328 18 | P1-PS-436 19 | P1-PS-435 20 | P1-PS-434 21 | P1-PS-433 22 | P1-PS-432 23 | P1-PS-431 24 | P1-PS-430 25 | P1-PS-429 26 | P1-PS-327 27 | P1-PS-326 28 | P1-PS-325 29 | P1-PS-324 30 | P1-PS-323 31 | P1-PS-322 32 | P1-PS-321 33 | P1-PS-320 34 | P1-PS-319 35 | P1-PS-318 36 | P1-PS-428 37 | P1-PS-427 38 | P1-PS-426 39 | P1-PS-425 40 | P1-PS-424 41 | P1-PS-423 42 | P1-PS-317 43 | P1-PS-316 44 | P1-PS-315 45 | P1-PS-314 46 | P1-PS-313 47 | P1-PS-312 48 | P1-PS-311 49 | P1-PS-422 50 | P1-PS-421 51 | P1-PS-420 52 | P1-PS-419 53 | P1-PS-418 54 | P1-PS-310 55 | P1-PS-309 56 | P1-PS-308 57 | P1-PS-307 58 | P1-PS-306 59 | P1-PS-305 60 | P1-PS-304 61 | P1-PS-303 62 | P1-PS-52 63 | P1-PS-53 64 | P1-PS-54 65 | P1-PS-55 66 | P1-PS-56 67 | P1-PS-57 68 | P1-PS-58 69 | P1-PS-59 70 | P1-PS-60 71 | P1-PS-61 72 | P1-PS-62 73 | P1-PS-63 74 | P1-PS-64 75 | P1-PS-65 76 | P1-PS-66 77 | P1-PS-67 78 | P1-PS-68 79 | P1-PS-69 80 | P1-PS-70 81 | P1-PS-71 82 | P1-PS-72 83 | P1-PS-73 84 | P1-PS-74 85 | P1-PS-75 86 | P1-PS-76 87 | P1-PS-77 88 | P1-PS-78 89 | P1-PS-79 90 | P1-PS-80 91 | P1-PS-81 92 | P1-PS-82 93 | P1-PS-83 94 | P1-PS-84 95 | P1-PS-85 96 | P1-PS-86 97 | P1-PS-87 98 | P1-PS-88 99 | P1-PS-89 100 | P1-PS-90 101 | P1-PS-91 102 | P1-PS-92 103 | P1-PS-93 104 | P1-PS-213 105 | P1-PS-212 106 | P1-PS-211 107 | P1-PS-210 108 | P1-PS-209 109 | P1-PS-208 110 | P1-PS-207 111 | P1-PS-206 112 | P1-PS-205 113 | P1-PS-204 114 | P1-PS-203 115 | P1-PS-202 116 | P1-PS-201 117 | P1-PS-200 118 | P1-PS-199 119 | P1-PS-198 120 | P1-PS-197 121 | P1-PS-196 122 | P1-PS-195 123 | P1-PS-194 124 | P1-PS-193 125 | P1-PS-192 126 | P1-PS-191 127 | P1-PS-190 128 | P1-PS-189 129 | P1-PS-188 130 | P1-PS-187 131 | P1-PS-186 132 | P1-PS-185 133 | P1-PS-184 134 | P1-PS-183 135 | P1-PS-182 136 | P1-PS-181 137 | P1-PS-51 138 | P1-PS-50 139 | P1-PS-49 140 | P1-PS-48 141 | P1-PS-47 142 | P1-PS-46 143 | P1-PS-417 144 | P1-PS-416 145 | P1-PS-415 146 | -------------------------------------------------------------------------------- /apis/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var server = require('./initializers/server'); 4 | var winston = require('winston'); 5 | var logger = winston.createLogger({ 6 | transports: [ 7 | new (winston.transports.Console)({ 'timestamp': true }) 8 | ], 9 | exitOnError: false 10 | }); 11 | 12 | logger.info('[APP] Starting server initialization'); 13 | 14 | // Initialize the server 15 | 16 | server(function(err){ 17 | if (err) { 18 | logger.error('[APP] initialization failed', err); 19 | } else { 20 | logger.info('[APP] initialized SUCCESSFULLY'); 21 | } 22 | }) 23 | -------------------------------------------------------------------------------- /apis/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "360-apis", 3 | "version": "1.0.0", 4 | "description": "This project creates REST API for Metromind (DS-360 use case)", 5 | "main": "index.js", 6 | "dependencies": { 7 | "cassandra-driver": "^3.5.0", 8 | "change-case": "^3.0.2", 9 | "csvtojson": "^2.0.8", 10 | "deepcopy": "^1.0.0", 11 | "elasticsearch": "^15.1.1", 12 | "express": "^4.16.3", 13 | "morgan": "^1.9.1", 14 | "require-dir": "^1.0.0", 15 | "uuid": "^3.3.2", 16 | "winston": "^3.1.0", 17 | "ws": "^6.0.0" 18 | }, 19 | "scripts": { 20 | "start": "node index.js", 21 | "test": "echo \"Error: no test specified\" && exit 1" 22 | }, 23 | "author": "", 24 | "repository": "", 25 | "license": "" 26 | } 27 | -------------------------------------------------------------------------------- /apis/tests/ws-client.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // A websocket client to test if the websocket is functioning as intended. 4 | // It reads the config from config.json and sends the initial message to server. 5 | // It then console logs the messages that it receives from the websocket. 6 | 7 | var logger = require('winston'); 8 | var deepcopy = require("deepcopy"); 9 | const config=require('../config/config.json'); 10 | 11 | const hostIpAddress=process.env.IP_ADDRESS; 12 | const backendPort=process.env.NODE_PORT; 13 | 14 | const WS_HOST= "ws://"+hostIpAddress+":"+backendPort; 15 | const webSocket = require('ws'); 16 | 17 | const ws = new webSocket(WS_HOST, { 18 | perMessageDeflate: false 19 | }); 20 | 21 | 22 | ws.on('open', function () { 23 | logger.info("[ON OPEN] Opened conn"); 24 | let configObj = deepcopy(config); 25 | let apiMode=null; 26 | let webSocketStartTimestamp=null; 27 | if(configObj.garage.isLive){ 28 | apiMode="live"; 29 | webSocketStartTimestamp=new Date().toISOString(); 30 | }else{ 31 | apiMode="playback"; 32 | webSocketStartTimestamp=configObj.garage.playback.webSocket.startTimestamp; 33 | } 34 | let garageId=configObj.garage[apiMode].webSocket.garageId; 35 | let garageLevel=configObj.garage[apiMode].webSocket.garageLevel; 36 | let uiDelaySeconds=configObj.garage[apiMode].apis.uiDelaySeconds; 37 | let delayedStartTimestamp=new Date(Date.parse(webSocketStartTimestamp)-(uiDelaySeconds*1000)).toISOString();; 38 | var msg = { startTimestamp: delayedStartTimestamp, garageLevel: garageLevel, garageId: garageId } 39 | ws.send(JSON.stringify(msg), function ack(error) { 40 | if (error) { 41 | logger.info("[RECV UPDATES] Unable to send. Closing connection") 42 | } 43 | }); 44 | }); 45 | 46 | ws.on('close', function () { 47 | logger.info('Disconnected'); 48 | }); 49 | 50 | ws.on('error', function (error) { 51 | logger.error("[ON ERR] Sorry connection cannot be opened: " + error); 52 | }); 53 | 54 | ws.on('message', function (message) { 55 | try { 56 | var jsonMessage = JSON.parse(message); 57 | console.log(jsonMessage) 58 | } catch (e) { 59 | logger.error("[MSG ERROR] " + e); 60 | return; 61 | } 62 | }); 63 | -------------------------------------------------------------------------------- /perception_docker/README.md: -------------------------------------------------------------------------------- 1 | ## Setup Instructions 2 | 3 | This document describes the steps involved to run the 360d app using the docker image. 4 | 5 | 6 | 1. Assuming that the application has been cloned from this repository 7 | 8 | $ git clone https://github.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application.git 9 | use the following command to change the current directory. 10 | 11 | $ cd ./deepstream_360_d_smart_parking_application/perception_docker 12 | 13 | Create a directory named 'videos' and change the current directory by executing the command 14 | 15 | $ mkdir videos && cd videos 16 | 17 | Download videos from https://nvidia.app.box.com/s/ezzw0js1ti555vbn3swsggvepcsh3x7e and place it in the 'videos' directory. 18 | 19 | 2. Login to Nvidia container registry (nvcr.io) 20 | 21 | $ docker login nvcr.io 22 | 23 | Enter the username as `$oauthtoken` and copy your NGC APIKey as the password. 24 | Note that the login is sticky and does not have to be repeated every time. 25 | Refer to https://docs.nvidia.com/ngc/ngc-getting-started-guide/index.html for more information. 26 | 27 | 3. Execute the `run.sh` command (sudo maybe required depending on how docker is configured on your system) by going to perception_docker 28 | directory 29 | 30 | 4. When the container starts up, edit config file to set broker url (if required). To do so first install an editor (eg: nano) 31 | 32 | $ apt-get update 33 | 34 | $ apt-get install nano 35 | 36 | 5. Enable logging (optional) 37 | 38 | $ DeepStream360d_Release/sources/tools/nvds_logger/setup_nvds_logger.sh 39 | 40 | **Note:** the log severity level can be edited in the setup script if desired 41 | -- set to 7 if logging of entire messages is required 42 | 43 | 6. Run the 360d app
44 | Eg: 45 | 46 | $ deepstream-360d-app -c DeepStream360d_Release/samples/configs/deepstream-360d-app/source10_gpu0.txt 47 | 48 | 49 | **Note:** if there is an error that looks like below, then the broker address is not valid. 50 | 51 | ERROR from nvmsgbroker: Could not configure supporting library. 52 | Debug info: gstnvmsgbroker.c(325): gst_nvmsgbroker_start (): /GstPipeline:pipeline/GstNvMsgBroker:nvmsgbroker: 53 | unable to connect to broker library 54 | -------------------------------------------------------------------------------- /perception_docker/run.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | DOCKERURL=nvcr.io/nvidia/deepstream_360d:5.0-20.08 3 | xhost + 4 | docker pull $DOCKERURL 5 | docker run --gpus all -it --rm -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY=$DISPLAY -w /root nvcr.io/nvidia/deepstream_360d:5.0-20.08 6 | -------------------------------------------------------------------------------- /readme-images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/readme-images/architecture.png -------------------------------------------------------------------------------- /readme-images/test.txt: -------------------------------------------------------------------------------- 1 | images folder -------------------------------------------------------------------------------- /stream/.classpath: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /stream/.gitignore: -------------------------------------------------------------------------------- 1 | /target/ 2 | /.settings/ 3 | /checkpoint-flowrate/ 4 | /checkpoint-parked-moving/ 5 | /checkpoint-understay/ 6 | /.cache-main 7 | /.cache-tests 8 | /checkpoint-trajectory/ 9 | /.tmpBin/ 10 | -------------------------------------------------------------------------------- /stream/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | stream-360 4 | 5 | 6 | 7 | 8 | 9 | org.scala-ide.sdt.core.scalabuilder 10 | 11 | 12 | 13 | 14 | org.eclipse.m2e.core.maven2Builder 15 | 16 | 17 | 18 | 19 | 20 | org.scala-ide.sdt.core.scalanature 21 | org.eclipse.jdt.core.javanature 22 | org.eclipse.m2e.core.maven2Nature 23 | 24 | 25 | -------------------------------------------------------------------------------- /stream/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Streaming Architecture 3 | 4 | ![Streaming Architecture](readme-images/pipeline.png?raw=true "Streaming") 5 | 6 | **Kafka** is used as the message broker in the reference implementation. Processing between module are decoupled using Kafka when needed. 7 | 8 | The stream processing is done using **Apache Spark**. We use a custom python module for “multi-camera-tracking”. The key streaming modules are as below. 9 | 10 | a) The **multi-camera-tracking** is explained in the track project, primarily it is responsible for deduplicating detection of the same object seen by multiple cameras and tracking the object across cameras. 11 | 12 | b) The **preprocessing** module is responsible of validation of every JSON messages that is being send from the perception layer, it maps each JSON message to Strongly typed domain object. 13 | 14 | c) The **anomaly detection** module maintain state of each and every vehicle/object. As the trajectory of each vehicle is maintained over time, it is very easy to compute information like speed of vehicle, how long a car has stayed in a particular location, is the car stalled in unexpected location? 15 | 16 | d) **Flowrate** module is used to understand the traffic patterns, flow rate, this involves micro-batching of data over sliding window 17 | 18 | This project implements the last 3 module, multi-camera-tracking is done in python. 19 | 20 | The data is persisted in Cassandra and ElasticSearch. Cassandra is used to maintain the state of the parking garage at a given point of time. The state of the parking Garage comprises of parking spot occupancy, car movement in the aisle, car entry and exit. The application can show the current state of the garage, it also enables playback of event from a given point of time. All data events, anomalies are indexed in ElasticSearch for search, timeseries analytics and dashboard. 21 | 22 | ![Batch Architecture](readme-images/batch.png?raw=true "Batch") 23 | The **Batch** processing is done based on the accumulated data over a period of time. The data ideally should be stored on distributed file system like HDFS or S3, for the Reference application in the docker container as we are not using cloud deployment, and the data is read from the kafka topic itself, the entire data for a given topic is read to do batch processing on it. The output of the batch processing is stored in the persistent store and is consumed by the API. 24 | 25 | ## Anomaly Detection, Stateful stream processing 26 | 27 | The implementation is based on Apache Spark structured streaming, a fault tolerant streaming engine. As maintaining trajectories required advanced stateful operations, it uses *mapGroupsWithState* operation. The API allow maintaining user-defined per-group state between triggers for a steaming dataframe. The timeout is set to cleanup any state which has not seen any activity for a configurable period of time. Important to note that the reference implementation use processing time and is based on the clock time, hence it could be affected by changes in the system clock, example clock skew. 28 | 29 | 30 | Trajectories for each and every vehicle is maintained during the period of time the vehicle is seen in the aisle area. Location of car within aisle or parking spot is determined by ROI (region of interest). The perception layer generates the information with respect to location of car. Maintaining trajectories of vehicle gives us the ability to compute many information like speed, time of stay and whether a vehicle is stalled in particular location. The trajectories are cleaned up if no events are seen with respect to a vehicle for configurable period of time. The trajectories are formed based on “moving” events. If a vehicle gets parked after moving through the aisle, the corresponding trajectory will be cleaned up after configurable period of time. 31 | 32 | ![Anomaly Detection](readme-images/anomaly.png?raw=true "Anomaly") 33 | 34 | # Gettting Started 35 | 36 | ### Compile and Install the project 37 | 38 | mvn clean install -Pjar-with-dependencies 39 | 40 | this will genearte the required jar, note the location of new jars created 41 | 42 | [INFO] --- maven-jar-plugin:2.4:jar (default-jar) @ stream-360 --- 43 | [INFO] Building jar: /Users/home/git/stream/target/stream-360-1.0.jar 44 | [INFO] 45 | [INFO] --- maven-assembly-plugin:2.5.5:single (make-assembly) @ stream-360 --- 46 | [INFO] Building jar: /Users/home/git/stream/target/stream-360-1.0-jar-with-dependencies.jar 47 | 48 | 49 | ### Start Spark Streaming job, 50 | This job does following 51 | 52 | a) manages the state of parking garage 53 | b) detects car "understay" anomaly 54 | c) detects car "stalled" anomaly 55 | d) computes flowrate 56 | 57 | 58 | Install Apache Spark, or use a existing cluster, if spark-master is running in docker container 59 | 60 | docker exec -it spark-master /bin/bash 61 | 62 | the docker container picks up the jar file from spark/data 63 | 64 | ./bin/spark-submit --class com.nvidia.ds.stream.StreamProcessor --master spark://master:7077 --executor-memory 4G --total-executor-cores 4 /tmp/data/stream-360-1.0-jar-with-dependencies.jar 65 | 66 | 67 | 68 | 69 | ### Start Spark batch job, 70 | This detects "overstay" anomaly 71 | 72 | 73 | ./bin/spark-submit --class com.nvidia.ds.batch.BatchAnomaly --master spark://master:7077 --executor-memory 4G --total-executor-cores 4 /tmp/data/stream-360-1.0-jar-with-dependencies.jar 74 | -------------------------------------------------------------------------------- /stream/readme-images/anomaly.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/stream/readme-images/anomaly.png -------------------------------------------------------------------------------- /stream/readme-images/batch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/stream/readme-images/batch.png -------------------------------------------------------------------------------- /stream/readme-images/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/stream/readme-images/pipeline.png -------------------------------------------------------------------------------- /stream/src/main/resources/blacklist.txt: -------------------------------------------------------------------------------- 1 | A2J044 2 | JQR82M 3 | QFEO7T 4 | J8HIDE 5 | 4I5F8N 6 | EU38H8 7 | TMB7SD 8 | 7JU1CK 9 | F18DCK 10 | IQ64K5 11 | 1V1V95 12 | 5H9S0L 13 | A990MN 14 | -------------------------------------------------------------------------------- /stream/src/main/resources/docker-config.txt: -------------------------------------------------------------------------------- 1 | kafka-brokers = kafka:9092 2 | cassandra-hosts = cassandra 3 | cassandra-namespace = metromind 4 | elasticsearch-hosts = localhost 5 | topic = metromind-start 6 | countTopic = metromind-count 7 | puck-topic = metromind-puck 8 | blacklist = /blacklist.txt 9 | whitelist = /whitelist.txt 10 | anomalyTopic = metromind-anomaly 11 | eventsTopic = metromind-events 12 | understay = 5 minutes 13 | overstay = 24 hours -------------------------------------------------------------------------------- /stream/src/main/resources/local-config.txt: -------------------------------------------------------------------------------- 1 | kafka-brokers = kafka1.data.nvidiagrid.net:9092,kafka2.data.nvidiagrid.net:9092,kafka3.data.nvidiagrid.net:9092 2 | cassandra-hosts = cassandra1.data.nvidiagrid.net,cassandra2.data.nvidiagrid.net 3 | cassandra-namespace = metromind 4 | elasticsearch-hosts = localhost 5 | topic = metromind-its-start 6 | blacklist = /blacklist.txt 7 | whitelist = /whitelist.txt 8 | anomalyTopic = metromind-anomaly 9 | eventsTopic = metromind-events 10 | understay = 5 minutes 11 | overstay = 24 hours -------------------------------------------------------------------------------- /stream/src/main/resources/log4j.properties: -------------------------------------------------------------------------------- 1 | # Change this to set Spark log level 2 | log4j.logger.org.apache.spark=WARN 3 | 4 | # Silence akka remoting 5 | log4j.logger.Remoting=WARN 6 | 7 | # Ignore messages below warning level from Jetty, because it's a bit verbose 8 | log4j.logger.org.eclipse.jetty=WARN 9 | 10 | log4j.logger.org.apache.kafka=WARN 11 | 12 | 13 | 14 | 15 | log4j.rootLogger=WARN, stdout 16 | log4j.appender.stdout=org.apache.log4j.ConsoleAppender 17 | log4j.appender.stdout.Target=System.out 18 | log4j.appender.stdout.layout=org.apache.log4j.PatternLayout 19 | log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n -------------------------------------------------------------------------------- /stream/src/main/resources/whitelist.txt: -------------------------------------------------------------------------------- 1 | BO9AFE 2 | B94SF6 3 | RL16L6 4 | 4X3128 5 | 26A05Q 6 | ODSR2L 7 | CVXU6B 8 | MIT2XC 9 | 2YMVYW 10 | K6NJ6D 11 | EZFRH6 12 | -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/stream/Trajectory.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.stream 2 | 3 | import math._ 4 | import com.nvidia.ds.util.Coordinate 5 | import java.sql.Timestamp 6 | import com.nvidia.ds.util.Message 7 | 8 | 9 | 10 | 11 | /** 12 | * trajectory class, based on local coordinates, Euclidean space 13 | */ 14 | 15 | case class Trajectory( 16 | id:String, 17 | start: Timestamp, 18 | end: Timestamp, 19 | records: List[Message]) { 20 | 21 | /** 22 | * trajectory based on local coordinates, Euclidean space 23 | */ 24 | val trajectory = records.map(x => x.`object`.coordinate) 25 | def trajectoryLen = trajectory.length 26 | 27 | val head = trajectory.head 28 | val last = trajectory.last 29 | 30 | /** 31 | * smoothen trajectory based on local coordinates, Euclidean space 32 | */ 33 | def smoothTrajectory = { 34 | 35 | if (trajectoryLen < 100) { 36 | 37 | trajectory 38 | } else { 39 | 40 | val t = trajectory.sliding(5).map { l => 41 | 42 | Coordinate( 43 | l.map(c => c.x).sum / l.length, 44 | l.map(c => c.y).sum / l.length, 45 | l.map(c => c.z).sum / l.length) 46 | 47 | } 48 | 49 | trajectory.head +: t.toList :+ trajectory.last 50 | 51 | } 52 | 53 | } 54 | 55 | def smoothTrajectoryLen = smoothTrajectory.length 56 | 57 | /** 58 | * approximate distance computed based on movement of object for each point in the trajectory 59 | */ 60 | def distance = { 61 | 62 | val p1 = trajectory.head 63 | val p2 = trajectory.last 64 | 65 | if (smoothTrajectory.length < 2) { 66 | 0 67 | } else smoothTrajectory.sliding(2).map(x => euclideanDistance(x(0), x(1))).sum 68 | 69 | } 70 | 71 | /** 72 | * kms / hour 73 | */ 74 | def speed = { 75 | 76 | val t = (end.getTime - start.getTime) / 1000 //seconds 77 | 78 | if (t == 0) { 79 | t 80 | } else 81 | distance * 3.6 / t 82 | } 83 | 84 | /** 85 | * straight line distance between to start and end of trajectory 86 | */ 87 | def linearDistance = { 88 | 89 | val p1 = smoothTrajectory.head 90 | val p2 = smoothTrajectory.last 91 | 92 | sqrt(pow(p1.x - p2.x, 2) + pow(p1.y - p2.y, 2)) 93 | 94 | } 95 | 96 | /** 97 | * length of the trajectory in seconds 98 | */ 99 | def timeInterval = { 100 | (end.getTime - start.getTime)/1000.0 101 | } 102 | 103 | /** 104 | * function to compute euclidean distance between two points 105 | */ 106 | private def euclideanDistance(p1: Coordinate, p2: Coordinate) = { 107 | 108 | sqrt(pow(p1.x - p2.x, 2) + pow(p1.y - p2.y, 2)) 109 | 110 | } 111 | 112 | 113 | override def toString() = { 114 | f" moving at $speed%4.2f km/hr, covered $distance%4.2f meters in $timeInterval%4.2f seconds, id = $id" 115 | } 116 | 117 | } 118 | -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/util/Encryptor.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | import javax.crypto.Cipher; 4 | import javax.crypto.spec.IvParameterSpec; 5 | import javax.crypto.spec.SecretKeySpec; 6 | import java.util.Base64 7 | import javax.crypto.SecretKeyFactory 8 | import javax.crypto.spec.PBEKeySpec 9 | 10 | /** 11 | * 12 | * 13 | * Encryption / Decryption utility 14 | */ 15 | object Encryptor { 16 | 17 | //do not use this keys for production 18 | val key = "1234567890abcdef1234567890abcdef"; // 256 bit key 19 | val initVector = "1234567890abcdef"; // 16 bytes IV 20 | 21 | val iv = new IvParameterSpec(initVector.getBytes("UTF-8")); 22 | 23 | val skeySpec = new SecretKeySpec(key.getBytes("UTF-8"), "AES"); 24 | 25 | //val skeySpec1 = generateKey(key) 26 | //val skeySpec2 = generateKey(key) 27 | 28 | //AES/CBC/PKCS5Padding or AES/CBC/PKCS5PADDING 29 | val eCipher = Cipher.getInstance("AES/CBC/PKCS5PADDING"); 30 | eCipher.init(Cipher.ENCRYPT_MODE, skeySpec, iv); 31 | 32 | val dCipher = Cipher.getInstance("AES/CBC/PKCS5PADDING"); 33 | dCipher.init(Cipher.DECRYPT_MODE, skeySpec, iv); 34 | 35 | /** 36 | * returns base 64 encoded string after encryption 37 | */ 38 | def encrypt(plainText: String) = this.synchronized{ 39 | Base64.getEncoder.encodeToString(eCipher.doFinal(plainText.getBytes("UTF-8"))) 40 | 41 | } 42 | 43 | /** 44 | * ' 45 | * returns the de-crypted text 46 | */ 47 | def decrypt(encryptedText: String) = this.synchronized{ 48 | val decoded = Base64.getDecoder.decode(encryptedText.getBytes("UTF-8")) 49 | 50 | new String(dCipher.doFinal(decoded)) 51 | 52 | } 53 | 54 | def generateKey(passphrase: String) = { 55 | val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1"); 56 | val spec = new PBEKeySpec(passphrase.toCharArray(), Integer.toHexString(100).getBytes, 1000, 256); 57 | val key = new SecretKeySpec(factory.generateSecret(spec).getEncoded(), "AES"); 58 | key; 59 | 60 | } 61 | 62 | def main(args: Array[String]) { 63 | 64 | for (i <- 1 to 1000000) { 65 | val e = Encryptor.encrypt("MTH7ARA") 66 | println("encrypted : " + e) 67 | 68 | val d = Encryptor.decrypt(e) 69 | println("decrypted : " + d + " " + d.length()) 70 | } 71 | 72 | } 73 | 74 | } -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/util/KProducer.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | import org.apache.kafka.clients.producer.KafkaProducer 4 | import com.google.gson.GsonBuilder 5 | import java.util.Properties 6 | import org.apache.kafka.clients.producer.ProducerRecord 7 | import org.apache.kafka.clients.admin.AdminClient 8 | import org.apache.kafka.clients.admin.ListTopicsOptions 9 | 10 | /** 11 | * wrapper around kafka producer with default properties 12 | */ 13 | class KProducer(val brokers: String) extends Serializable { 14 | 15 | /** 16 | * init the kafka producer properties 17 | */ 18 | val props = new Properties(); 19 | props.put("bootstrap.servers", brokers); 20 | props.put("retries", "0"); 21 | props.put("linger.ms", "1"); 22 | props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 23 | props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 24 | 25 | /** 26 | * validate connection 27 | */ 28 | try { 29 | // ... 30 | val adminClient = AdminClient.create(props) 31 | adminClient.listTopics(new ListTopicsOptions().timeoutMs(500).listInternal(true)).names().get 32 | 33 | } catch { 34 | case e: Exception => { 35 | println(f"Invalid connection URL $brokers \n") 36 | 37 | e.printStackTrace() 38 | 39 | System.exit(-1) 40 | } 41 | } 42 | 43 | /** 44 | * create kafka producer 45 | */ 46 | val p = new KafkaProducer[String, String](props) 47 | 48 | /** 49 | * used for testing 50 | */ 51 | private[util] def send(e: Message) = { 52 | 53 | val topic = "metromind-start" 54 | 55 | val gson = new GsonBuilder() 56 | .setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") 57 | .create() 58 | 59 | val json = gson.toJson(e).replace("timestamp", "@timestamp") 60 | 61 | val key = if (e.event.`type` == "reset") "reset" else e.sensor.id 62 | //should be based on number of DS box 63 | val partition = key.hashCode() % 8 64 | 65 | p.send(new ProducerRecord[String, String](topic, partition, key, json)) 66 | 67 | } 68 | 69 | /** 70 | * send message to a specified topic 71 | */ 72 | def send(topic: String, partition: Int, key: String, value: String) { 73 | p.send(new ProducerRecord[String, String](topic, partition, key, value)) 74 | } 75 | 76 | /** 77 | * close the producer 78 | */ 79 | def close = p.close() 80 | 81 | } -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/util/Playback.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | object Playback extends App { 4 | 5 | val appName = getClass.getCanonicalName 6 | @transient val sys = System.getProperty("os.name") 7 | 8 | 9 | 10 | val usage = """ 11 | Usage: mvn exec:java -Dexec.mainClass=com.nvidia.ds.util.Playback -Dexec.args = KAFKA_BROKER_IP_ADDRESS:PORT [--input-file inputFile] [--topic-name topic] 12 | """ 13 | 14 | if (args.length == 0) println(usage) 15 | val arglist = args.toList 16 | type OptionMap = Map[String, Any] 17 | 18 | def nextOption(map: OptionMap, list: List[String]): OptionMap = { 19 | def isSwitch(s: String) = (s(0) == '-') 20 | list match { 21 | case Nil => map 22 | case "--input-file" :: value :: tail => 23 | nextOption(map ++ Map("input-file" -> value.trim()), tail) 24 | case "--topic-name" :: value :: tail => 25 | nextOption(map ++ Map("topic-name" -> value.trim()), tail) 26 | case string :: opt2 :: tail if isSwitch(opt2) => 27 | nextOption(map ++ Map("broker-url" -> string), list.tail) 28 | case string :: Nil => nextOption(map ++ Map("broker-url" -> string), list.tail) 29 | case option :: tail => 30 | println("Unknown option " + option) 31 | System.exit(1); Map() 32 | } 33 | } 34 | val options = nextOption(Map(), arglist) 35 | println(usage) 36 | println(options) 37 | 38 | val inputFile = if (options.isDefinedAt("input-file")) options("input-file").toString() else "data/demo-1.json" 39 | val topicName = if (options.isDefinedAt("topic-name")) options("topic-name").toString() else "metromind-start" 40 | 41 | val data = Util.readData(inputFile).toList.sortBy(x => x.timestamp.getTime) 42 | 43 | val config = Util.readConfig("/local-config.txt") 44 | 45 | val brokers = options("broker-url").toString() 46 | 47 | val producer = new KProducer(brokers) 48 | 49 | val (newlat, newlon) = (37.371160038718216, -123.9717545322192) 50 | val (oldlat, oldlon) = (37.3498233, -121.9675349) 51 | 52 | val (latOffset, lonOffet) = (newlat - oldlat, newlon - oldlon) 53 | 54 | var tt = data(0).timestamp.getTime 55 | data.foreach { x => 56 | 57 | val m = if (x.event.`type` == "parked" || x.event.`type` == "empty") { 58 | val p = x.place.parkingSpot 59 | val id = if (p.id contains "PS") p.id else p.level + "-PS-" + p.id 60 | val p_ = p.copy(id = id) 61 | 62 | val place = x.place.copy(parkingSpot = p_) 63 | 64 | x.copy(place = place) 65 | 66 | } else x 67 | 68 | println(x.timestamp + " " + m.event.`type` + " " + m.`object`.vehicle.license + " " + m.place.parkingSpot + " " + m.sensor) 69 | 70 | val json = Util.jsonString(m) 71 | val partition = math.abs(m.event.`type`.hashCode() % 8) 72 | 73 | val d = m.timestamp.getTime - tt 74 | Thread.sleep(d) 75 | tt = m.timestamp.getTime 76 | producer.send(topicName, partition, m.sensor.id, json) 77 | 78 | } 79 | 80 | producer.close 81 | 82 | } -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/util/Util.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | import java.sql.Timestamp 4 | import java.text.SimpleDateFormat 5 | import java.time.LocalDateTime 6 | import java.time.ZoneOffset 7 | import java.util.Calendar 8 | import java.util.TimeZone 9 | 10 | import org.json4s.DefaultFormats 11 | 12 | import com.google.gson.GsonBuilder 13 | 14 | 15 | /** 16 | * util class 17 | */ 18 | 19 | object Util extends Serializable { 20 | 21 | 22 | /** 23 | * default date format 24 | */ 25 | def defaultFormat = 26 | new DefaultFormats { 27 | override def dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") 28 | } 29 | 30 | 31 | /** 32 | * GsonBuilder 33 | */ 34 | def gson = new GsonBuilder() 35 | .setDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") 36 | .create() 37 | 38 | /** 39 | * converts an object to Json string 40 | */ 41 | def jsonString(e: Any) = gson.toJson(e).replace("timestamp", "@timestamp") 42 | 43 | 44 | /** 45 | * read configuration 46 | */ 47 | def readConfig(path: String) = { 48 | val f = scala.io.Source.fromInputStream(getClass.getResourceAsStream(path)) 49 | val lines = f.getLines().map { x => 50 | { 51 | val reg = x.split("=", 2) 52 | 53 | (reg(0).trim(), reg(1).trim()) 54 | } 55 | } 56 | lines.toList.filterNot(p => p._1 == "" || p._2 == "").map(x => x._1 -> x._2).toMap 57 | } 58 | 59 | /** 60 | * use to read set, backlist or whitelist license plate 61 | */ 62 | def readSet(path: String) = { 63 | val f = scala.io.Source.fromInputStream(getClass.getResourceAsStream(path)) 64 | val lines = f.getLines().map { x => 65 | //println(x) 66 | x.trim 67 | 68 | } 69 | lines.toList.toSet 70 | } 71 | 72 | 73 | /** 74 | * safe string to int 75 | */ 76 | def safeStringToInt(str: String): Option[Int] = { 77 | import scala.util.control.Exception._ 78 | catching(classOf[NumberFormatException]) opt str.toInt 79 | } 80 | 81 | 82 | 83 | 84 | val config = readConfig("/local-config.txt") 85 | 86 | 87 | /** 88 | * convert current local time to UTC 89 | */ 90 | def localToGMT() = { 91 | val utc = LocalDateTime.now(ZoneOffset.UTC) 92 | Timestamp.valueOf(utc) 93 | } 94 | 95 | /** 96 | * convert input timestamp to UTC 97 | */ 98 | def to_utc(t: Timestamp) = { 99 | new Timestamp(t.getTime() - Calendar.getInstance().getTimeZone().getOffset(t.getTime())); 100 | } 101 | 102 | 103 | /** 104 | * compute time X hours ago 105 | */ 106 | def timeXhoursAgo(hours: Long) = { 107 | 108 | val utc = LocalDateTime.now(ZoneOffset.UTC) 109 | 110 | val xhours = utc.minusHours(hours) 111 | 112 | Timestamp.valueOf(xhours) 113 | 114 | } 115 | 116 | 117 | /** 118 | * convert string to UTC time 119 | */ 120 | def to_utc_from_String(d: String) = { 121 | 122 | val dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") 123 | 124 | dateFormatter.setTimeZone(TimeZone.getTimeZone("UTC")) 125 | 126 | val date = dateFormatter.parse(d) 127 | 128 | val ts = new Timestamp(date.getTime()); 129 | 130 | Util.to_utc(ts) 131 | 132 | } 133 | 134 | 135 | /** 136 | * read JSON data to list to Message Object 137 | */ 138 | def readData(path: String = "data/traffic.txt") = { 139 | val f = scala.io.Source.fromFile(path) 140 | 141 | //test 142 | import org.json4s._ 143 | import org.json4s.native.JsonMethods._ 144 | 145 | implicit val formats = new DefaultFormats { 146 | override def dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") 147 | } 148 | 149 | val records = f.getLines().map { x => 150 | 151 | val p = parse(x.replace("@timestamp", "timestamp")) 152 | 153 | val e = p.extract[Message] 154 | 155 | e 156 | 157 | } 158 | 159 | records 160 | 161 | } 162 | 163 | val enableEncrption = false 164 | 165 | /** 166 | * encrypt license plate 167 | */ 168 | def encryptLicense(obj: Object) = { 169 | 170 | if (enableEncrption) { 171 | 172 | val l = obj.vehicle.license 173 | 174 | if (l == null || l.trim() == "") { 175 | obj 176 | } else { 177 | val ll = Encryptor.encrypt(l) 178 | val v = obj.vehicle.copy(license = ll) 179 | val o = obj.copy(vehicle = v) 180 | 181 | o 182 | 183 | } 184 | } else obj 185 | 186 | } 187 | 188 | def uuid = java.util.UUID.randomUUID.toString 189 | 190 | 191 | 192 | 193 | 194 | } 195 | 196 | -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/util/UtilSpark.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | import org.apache.spark.sql.SparkSession 4 | import org.apache.spark.sql.types.StructType 5 | import org.apache.spark.sql.types.DoubleType 6 | import org.apache.spark.sql.types.ArrayType 7 | 8 | object UtilSpark { 9 | 10 | 11 | def uuid = java.util.UUID.randomUUID.toString 12 | 13 | 14 | /** 15 | * schema to convert JSON into Message object 16 | */ 17 | def mesageSchema(spark: SparkSession) = { 18 | 19 | import spark.implicits._ 20 | 21 | val location = new StructType() 22 | .add($"lat".double) 23 | .add($"lon".double) 24 | .add($"alt".double) 25 | 26 | val coordinate = new StructType() 27 | .add($"x".double) 28 | .add($"y".double) 29 | .add($"z".double) 30 | 31 | val vehicleSchema = new StructType() 32 | .add($"type".string) 33 | .add($"license".string) 34 | .add($"licenseState".string) 35 | .add($"color".string) 36 | .add($"confidence".float) 37 | .add($"model".string) 38 | .add($"make".string) 39 | 40 | val event = new StructType() 41 | .add($"id".string) 42 | .add($"type".string) 43 | 44 | .add($"source".string) 45 | .add($"email".string) 46 | 47 | val bbox = new StructType() 48 | .add($"topleftx".double) 49 | .add($"toplefty".double) 50 | .add($"bottomrightx".double) 51 | .add($"bottomrighty".double) 52 | 53 | val analyticsModule = new StructType() 54 | .add($"id".string) 55 | .add($"description".string) 56 | //.add($"confidence".double) 57 | .add($"source".string) 58 | .add($"version".string) 59 | 60 | val sensor = new StructType() 61 | .add($"id".string) 62 | .add($"type".string) 63 | .add("location", location) 64 | .add($"description".string) 65 | .add("coordinate", coordinate) 66 | 67 | val entrance = new StructType() 68 | .add($"name".string) 69 | .add($"lane".string) 70 | .add($"level".string) 71 | .add("coordinate", coordinate) 72 | 73 | val parkingSpot = new StructType() 74 | .add($"id".string) 75 | .add($"type".string) 76 | .add($"level".string) 77 | .add("coordinate", coordinate) 78 | 79 | val aisle = new StructType() 80 | .add($"id".string) 81 | .add($"name".string) 82 | .add($"level".string) 83 | .add("coordinate", coordinate) 84 | 85 | val place = new StructType() 86 | .add($"id".string) 87 | .add($"name".string) 88 | .add($"type".string) 89 | .add("location", location) 90 | .add("entrance", entrance) 91 | .add("parkingSpot", parkingSpot) 92 | .add("aisle", aisle) 93 | 94 | val obj = new StructType() 95 | .add($"id".string) 96 | .add("vehicle", vehicleSchema) 97 | .add("bbox", bbox) 98 | .add("signature", ArrayType(DoubleType, false)) 99 | .add($"speed".double) 100 | .add($"direction".double) 101 | .add($"orientation".double) 102 | .add("location", location) 103 | .add("coordinate", coordinate) 104 | 105 | val message = new StructType() 106 | .add($"messageid".string) 107 | .add($"mdsversion".string) 108 | .add($"@timestamp".timestamp) 109 | .add($"used".int) 110 | .add("place", place) 111 | .add("sensor", sensor) 112 | .add("analyticsModule", analyticsModule) 113 | .add("object", obj) 114 | .add("event", event) 115 | .add($"videoPath".string) 116 | 117 | message 118 | 119 | } 120 | 121 | } -------------------------------------------------------------------------------- /stream/src/main/scala/com/nvidia/ds/util/ValidateJson.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | import java.sql.Timestamp 4 | 5 | import org.apache.spark.sql.SparkSession 6 | import org.apache.spark.sql.functions.udf 7 | import org.apache.spark.sql.streaming.Trigger 8 | 9 | import com.github.fge.jackson.JsonLoader 10 | import com.github.fge.jsonschema.main.JsonSchemaFactory 11 | 12 | /** 13 | * validate json messages based on json schema 14 | */ 15 | object ValidateJson { 16 | 17 | @transient val sys = System.getProperty("os.name") 18 | 19 | val appName = getClass.getCanonicalName 20 | 21 | @transient lazy val log = org.apache.log4j.LogManager.getLogger(getClass.getCanonicalName) 22 | 23 | def main(args: Array[String]) { 24 | 25 | val config = if (sys.startsWith("Mac")) Util.readConfig("/local-config.txt") else Util.readConfig("/docker-config.txt") 26 | val brokers = config("kafka-brokers") //"kafka1.data.nvidiagrid.net:9092" // 27 | val topic = config("topic") 28 | 29 | val spark = if (sys.startsWith("Mac")) { 30 | SparkSession 31 | .builder 32 | .appName(appName) 33 | .master("local[8]") 34 | .getOrCreate() 35 | } else { 36 | SparkSession 37 | .builder 38 | .appName(appName) 39 | .getOrCreate() 40 | } 41 | 42 | spark.conf.set("spark.sql.shuffle.partitions", 8) 43 | 44 | import spark.implicits._ 45 | 46 | val validateJson = udf { 47 | 48 | (value: String) => 49 | { 50 | 51 | import scala.collection.JavaConversions._ 52 | 53 | var t = (true, "") 54 | 55 | try { 56 | 57 | val r = ValidateJson.validateJson(value) 58 | 59 | t = (r.isSuccess(), r.iterator().toList.map(x => x.asJson()).mkString("\n")) 60 | 61 | } catch { 62 | case e: Exception => { t = (false, e.getMessage) } 63 | } 64 | 65 | t 66 | 67 | } 68 | 69 | } 70 | 71 | /** 72 | * read event stream 73 | */ 74 | val events = spark.readStream 75 | .format("kafka") 76 | .option("kafka.bootstrap.servers", brokers) 77 | .option("subscribe", topic) 78 | .option("startingOffsets", "latest") 79 | .option("failOnDataLoss", false) 80 | .load() 81 | .selectExpr("timestamp", "CAST(key AS STRING)", "CAST(value AS STRING)") 82 | .select(validateJson($"value").alias("error"), $"value") 83 | .select($"error.*", $"value") 84 | .toDF("flag", "message", "value") 85 | 86 | 87 | /** 88 | * for now print into console, most appropriate is to put into Elasticsearch to history 89 | */ 90 | val consoleQuery = events //.select("event","place")//.filter($"event.type" === "moving") //.withColumn("@timestamp", to_utc_timestamp($"timestamp","PST")) 91 | 92 | .filter(!$"flag") 93 | .writeStream 94 | .trigger(Trigger.ProcessingTime("60 seconds")) 95 | .outputMode("Append") 96 | .format("console") 97 | .option("truncate", "false") 98 | 99 | consoleQuery.start() 100 | 101 | spark.streams.awaitAnyTermination() 102 | 103 | spark.stop() 104 | 105 | } 106 | 107 | /** 108 | * json schema factory 109 | */ 110 | val factory = JsonSchemaFactory.byDefault() 111 | 112 | /** 113 | * json schema-string into json object 114 | */ 115 | val schemaJ = load("/schema/day2.json") 116 | 117 | 118 | /** 119 | * json schema object 120 | */ 121 | val schema = factory.getJsonSchema(schemaJ) 122 | 123 | /** 124 | * Load one resource from the current package as a {@link JsonNode} 125 | * 126 | * @param name name of the resource (MUST start with {@code /} 127 | * @return a JSON document 128 | * @throws IOException resource not found 129 | */ 130 | 131 | def load(name: String) = { 132 | JsonLoader.fromResource(name) 133 | } 134 | 135 | /** 136 | * load json string into a json object and validate against the schema 137 | */ 138 | def validateJson(json: String) = { 139 | val j = JsonLoader.fromString(json) 140 | val r = schema.validate(j) 141 | r 142 | } 143 | 144 | } -------------------------------------------------------------------------------- /stream/src/test/scala/com/nvidia/ds/util/KProducerSpec.scala: -------------------------------------------------------------------------------- 1 | package com.nvidia.ds.util 2 | 3 | import org.scalatest.Matchers 4 | import org.scalatest.FlatSpec 5 | 6 | class KProducerSpec extends FlatSpec with Matchers { 7 | 8 | "KProducer" should "send data" in { 9 | 10 | val config = Util.readConfig("/local-config.txt") 11 | 12 | val brokers = config("kafka-brokers") 13 | 14 | val producer = new KProducer(brokers) 15 | 16 | //sent reset before start 17 | { 18 | val p = Place() 19 | val e = Event("1.0", "reset") 20 | 21 | val resetM = Message(TestMessage.uuid, "1.0", Util.localToGMT(), 0, p, null, null, null, event = e, videoPath = null) 22 | 23 | val json = Util.jsonString(resetM) 24 | //producer.send("metromind-start", 0, "reset", json) 25 | 26 | } 27 | 28 | } 29 | 30 | } -------------------------------------------------------------------------------- /stream/src/test/scala/com/nvidia/ds/util/package-info.java: -------------------------------------------------------------------------------- 1 | /** 2 | * 3 | */ 4 | /** 5 | * @author sujitb 6 | * 7 | */ 8 | package com.nvidia.ds.util; -------------------------------------------------------------------------------- /tracker/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | *$py.class 4 | .vscode/ 5 | .DS_Store 6 | 7 | *.log 8 | *.csv 9 | *.out 10 | *.txt 11 | 12 | -------------------------------------------------------------------------------- /tracker/code/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/__init__.py -------------------------------------------------------------------------------- /tracker/code/euclidean/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/euclidean/__init__.py -------------------------------------------------------------------------------- /tracker/code/geo/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/geo/__init__.py -------------------------------------------------------------------------------- /tracker/code/geo/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/geo/core/__init__.py -------------------------------------------------------------------------------- /tracker/code/geo/core/conversions.py: -------------------------------------------------------------------------------- 1 | """Convert geo shapes 2 | """ 3 | 4 | __version__ = '0.2' 5 | 6 | 7 | def poly_lines_to_lines(polyline_list): 8 | """Convert a set of polylines to a simple lines (start and end points) 9 | 10 | Arguments: 11 | polyline_list {list} -- Set of polylines 12 | 13 | Returns: 14 | list -- Set of lines 15 | """ 16 | line_list = [] 17 | 18 | for polyline in polyline_list: 19 | prev_pt = None 20 | for the_pt in polyline: 21 | if prev_pt is not None: 22 | line_list.append([prev_pt, the_pt]) 23 | prev_pt = the_pt 24 | return line_list 25 | -------------------------------------------------------------------------------- /tracker/code/geo/inout/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/geo/inout/__init__.py -------------------------------------------------------------------------------- /tracker/code/geo/inout/shapefile.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is handles all shapefile operatoons 3 | """ 4 | 5 | __version__ = '0.2' 6 | 7 | import shapefile 8 | 9 | SORT_COORDS = False 10 | 11 | 12 | def get_polygons_from_shape_file(sh_file): 13 | """Get all polygons from a given shape file 14 | 15 | Arguments: 16 | sh_file {[string]} -- The shapefile 17 | 18 | Returns: 19 | [list] -- list of polygons 20 | """ 21 | shpfile = shapefile.Reader(sh_file) 22 | feature_list = [] 23 | field_names = [] 24 | index = 0 25 | if shpfile.fields: 26 | for fld in shpfile.fields[1:]: 27 | if fld: 28 | fname = fld[0] 29 | else: 30 | fname = "field_{}".format(index) 31 | field_names.append(fname) 32 | index += 1 33 | else: 34 | field_names = [] 35 | 36 | index = 0 37 | for shape_rec in shpfile.shapeRecords(): 38 | if shape_rec.shape.shapeType != shapefile.POLYGON: 39 | print("ERROR: We support only polygon shapes currently. The input " 40 | "indicates some other shape(shapeType={}). Skipping this" 41 | "shape.".format(shape_rec.shape.shapeType)) 42 | continue 43 | 44 | if len(shape_rec.shape.points) != 5: 45 | print("Currently we support rectangles (polygons with 5 points " 46 | "incl. end point repeated). Num points found: {}. Skipping " 47 | "polygon. Features of polygon: {}" 48 | .format(len(shape_rec.shape.points), shape_rec.record)) 49 | continue 50 | 51 | pt_list = [] 52 | # End point is repeated in the end. Omit it 53 | for point in shape_rec.shape.points[:-1]: 54 | pt_list.append(point) 55 | 56 | featuredict = {"points": pt_list} 57 | featuredict.update({field_names[i]: shape_rec.record[i] 58 | for i in range(len(field_names))}) 59 | feature_list.append(featuredict) 60 | index += 1 61 | 62 | return feature_list 63 | 64 | 65 | def get_polylines_from_shape_file(sh_file): 66 | """ 67 | Get a list of polygon lines (polylines) 68 | from shapefile 69 | 70 | Arguments: 71 | sh_file {[string]} -- the shapefile name 72 | 73 | Returns: 74 | [list] -- list of polylines 75 | """ 76 | shpfile = shapefile.Reader(sh_file) 77 | feature_list = [] 78 | field_names = [] 79 | index = 0 80 | if shpfile.fields: 81 | for fld in shpfile.fields[1:]: 82 | if fld: 83 | fname = fld[0] 84 | else: 85 | fname = "field_{}".format(index) 86 | field_names.append(fname) 87 | index += 1 88 | else: 89 | field_names = [] 90 | 91 | index = 0 92 | for shape_rec in shpfile.shapeRecords(): 93 | if shape_rec.shape.shapeType != shapefile.POLYLINE: 94 | print("ERROR: The function reads only polylines. The input indicates" 95 | " some other shape (shapeType={}). Skipping this shape." 96 | .format(shape_rec.shape.shapeType)) 97 | continue 98 | 99 | pt_list = [] 100 | # End point is repeated in the end. Omit it 101 | for point in shape_rec.shape.points: 102 | pt_list.append(point) 103 | 104 | fielddict = {"points": pt_list} 105 | fielddict.update({field_names[i]: shape_rec.record[i] 106 | for i in range(len(field_names))}) 107 | feature_list.append(fielddict) 108 | index += 1 109 | 110 | return feature_list 111 | -------------------------------------------------------------------------------- /tracker/code/mctrack/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/mctrack/__init__.py -------------------------------------------------------------------------------- /tracker/code/mctrack/constants.py: -------------------------------------------------------------------------------- 1 | """ 2 | Constants etc 3 | """ 4 | 5 | __version__ = '0.2' 6 | 7 | # Global configuration variables 8 | # ================================ 9 | # Constants for options 10 | # ---------------------------------- 11 | SNAP_POINTS_TO_GRAPH = False 12 | TAKE_ONE_FRAME_PER_PERIOD = True 13 | ASSUME_OBJS_HAVE_SAME_ID_INTRA_FRAME_PERIOD = True 14 | MERGE_CLOSE_BBS_FROM_SAME_CAM = True 15 | APPROX_TIME_PERIOD_TO_PRINT_INFO_IN_SEC = 10.0 16 | 17 | 18 | # Sensitive thresholds (Be careful while tuning) 19 | # --------------------------------- 20 | 21 | # Clustering thresholds 22 | DEF_CLUS_DIST_THRESH_M = 25.0 23 | 24 | # Matching thresholds 25 | DEF_MATCH_MAX_DIST_IN_M = 20.0 26 | MATCH_MAX_DIST_FOR_PULLED_CAR = 10.0 27 | 28 | # How long to hold points for matching 29 | DEF_CARRY_PRUNE_TIME_SEC = 2.5 30 | 31 | 32 | # Not so sensitive features to tune 33 | # --------------------------------- 34 | CLUSTERED_OBJ_ID_PRUNETIME_SEC = 20 35 | 36 | MIN_THRESHOLD_DIST_IN_M_WITHIN_RESAMPLE_TIME = 1 37 | INTRA_FRAME_PERIOD_CLUST_DIST_IN_M = 1.5 38 | INTRA_FRAME_CLUSTER_LARGE_SCALE_FACTOR = 10 39 | CLUSTER_DIFFT_CAMERAS_LARGE_SCALE_FACTOR = 10.0 40 | 41 | CARRY_OVER_LIST_PRUNE_TIME_IN_SEC = 2.5 42 | # HOLD_FOR_PARKED_CAR_PRUNE_TIME_IN_SEC = 30.0 43 | # HOLD_FOR_PULLED_CAR_PRUNE_TIME_IN_SEC = 30.0 44 | HOLD_FOR_PARKED_CAR_PRUNE_TIME_IN_SEC = 0.0 45 | HOLD_FOR_PULLED_CAR_PRUNE_TIME_IN_SEC = 0.0 46 | 47 | # Distance normalization thresholds 48 | DEFAULT_DIST_NORM_XRANGE = 1 49 | DEFAULT_DIST_NORM_YRANGE = 1 50 | DEFAULT_DIST_NORM_MINX = 0 51 | DEFAULT_DIST_NORM_MINY = 0 52 | 53 | # Snapping to map thresholds 54 | MAX_DIST_SNAP_MAP = 20.0 55 | 56 | # Camera and frame rate related variables 57 | # --------------------------------- 58 | # Frame periodicity 59 | RESAMPLE_TIME_IN_SEC = 0.5 60 | 61 | # Other constants 62 | # ----------------------------------- 63 | VEH_KEY_STR_FORMAT = "{}" 64 | UNK_VEH_KEY_STR_FORMAT = VEH_KEY_STR_FORMAT.format('') 65 | SYN_VEHICLE_STRUCT = {"make": "UNKNOWN", 66 | "model": "UNKNOWN", 67 | "color": "UNKNOWN", 68 | "confidence": 0.0, 69 | "license": "UNKNOWN", 70 | "licenseState": "UNKNOWN", 71 | "type": "UNKNOWN"} 72 | -------------------------------------------------------------------------------- /tracker/code/mctrack/ioutils.py: -------------------------------------------------------------------------------- 1 | """Module to handle all file reads and discarding ignored regions 2 | """ 3 | 4 | __version__ = '0.2' 5 | 6 | import json 7 | 8 | import iso8601 9 | from shapely.geometry import Point, Polygon 10 | 11 | from . import trackerutils 12 | 13 | 14 | def remove_inferred(gt_json_list): 15 | """Remove all unnecessary records 16 | Unnecessary records = ["reset] 17 | 18 | Arguments: 19 | gt_json_list {[list]} -- List of Day2 detection dictionaries 20 | 21 | Returns: 22 | [list] -- List of Day2 detection dictionaries with relevant records 23 | """ 24 | json_list = [] 25 | id_so_far = 0 26 | for json_ele in gt_json_list: 27 | event = json_ele.get('event', None) 28 | if event is not None: 29 | if event.get('type', None) not in ['reset']: 30 | json_list.append(json_ele) 31 | id_so_far += 1 32 | return json_list 33 | 34 | 35 | def is_within_time_range(json_ele, start_time, end_time): 36 | """Checks if the day2 dict is within the given timerange 37 | [start_time, end_time) 38 | NOTE: start_time is inclusive, and end_time is exclusive 39 | If start_time is None, then start is not checked 40 | If end_time is None, then end is not checked 41 | 42 | Arguments: 43 | json_ele {dict} -- Vehicle detection in day2 schema 44 | start_time {datetime} -- start_time 45 | end_time {datetime} -- end_time 46 | """ 47 | is_in_range = False 48 | json_time = iso8601.parse_date(json_ele["@timestamp"]) 49 | if start_time is None or json_time >= start_time: 50 | # We are after start 51 | if end_time is None or json_time < end_time: 52 | # We are before end 53 | is_in_range = True 54 | return is_in_range 55 | 56 | 57 | def read_json_list(schema_json_file, start_end_times): 58 | """ 59 | Read jsons from a file. The file is assumed to contain jsons in 60 | day2 schema. Each line will have one record, and no empty or invalid 61 | lines 62 | 63 | Arguments: 64 | schema_json_file {[string]} -- File from which to read jsons 65 | 66 | Returns: 67 | [list] -- List of Day2 detection dictionaries 68 | """ 69 | gt_json_list = [] 70 | start_time = start_end_times.get("start", None) 71 | end_time = start_end_times.get("end", None) 72 | if start_time is not None: 73 | start_time = iso8601.parse_date(start_time) 74 | if end_time is not None: 75 | end_time = iso8601.parse_date(end_time) 76 | with open(schema_json_file, 'r') as fileptr: 77 | for line in fileptr: 78 | line = line.strip() 79 | json_ele = json.loads(line) 80 | if is_within_time_range(json_ele, start_time, end_time): 81 | gt_json_list.append(json_ele) 82 | json_list = remove_inferred(gt_json_list) 83 | return json_list 84 | 85 | 86 | def create_poly_dict(sensor_polypts_dict): 87 | """ 88 | Create a polygon dictionary from a list of polygon points 89 | This can be used for one time creation of polygons so that we 90 | dont have to repeadly construct polygons while checking points 91 | (especially in a streaming mode) 92 | 93 | Arguments: 94 | sensor_polypts_dict {dict} -- dictionary containing 95 | key = some id (e.g., sensor id) 96 | value = list of points (x,y) that make the polygon 97 | 98 | Returns: 99 | dict -- dictionary containing 100 | key = some id (e.g., sensor id) 101 | value = shapely's polygon object 102 | """ 103 | poly_dict = {sensor: 104 | [Polygon(poly) for poly in sensor_polypts_dict[sensor]] 105 | for sensor in sensor_polypts_dict 106 | } 107 | return poly_dict 108 | 109 | 110 | def ignore_false_detections(json_list, ignore_dict): 111 | """ 112 | There might be some areas where vehicle detections are to be ignored 113 | (e.g., because of a lot of FPs in those areas). This function 114 | removes such detections that fall into "ignored" areas 115 | 116 | Arguments: 117 | json_list {[list]} -- List of Day2 detection dictionaries 118 | ignore_dict {[list]} -- dictionary of regions to 119 | be ignored. It will be in the format: 120 | key = camera name 121 | value = list of polygons (shapely.Polygon objects) to be 122 | ignored 123 | 124 | Returns: 125 | [list] -- List of Day2 detection dictionaries 126 | """ 127 | retval = [] 128 | ignored_list = [] 129 | for ele in json_list: 130 | ignore = False 131 | if trackerutils.is_aisle_rec(ele): 132 | sensor_id = ele.get("sensor", {}).get("id", None) 133 | if sensor_id is not None: 134 | ignore_list = ignore_dict.get(sensor_id, None) 135 | if ignore_list is not None: 136 | # Check if the point falls within the polygon 137 | point = trackerutils.get_xy(ele) 138 | if point is not None: 139 | point = Point(point) 140 | for polygon in ignore_list: 141 | if polygon.contains(point): 142 | ignore = True 143 | break 144 | if not ignore: 145 | retval.append(ele) 146 | else: 147 | ignored_list.append(ele) 148 | return retval, ignored_list 149 | -------------------------------------------------------------------------------- /tracker/code/mctrack/mctrackbatch.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module is to use Multi-cam tracking in streaming mode 3 | """ 4 | 5 | 6 | __version__ = '0.2' 7 | 8 | import json 9 | from timeit import default_timer as timer 10 | import logging 11 | 12 | from . import ioutils, trackerutils, mctracker, constants 13 | 14 | 15 | def read_schema_and_infer(schema_json_file, config_file="config.json"): 16 | """ 17 | Function to read an entire json schema file and mctrack from it 18 | 19 | Returns: 20 | [list] -- List of Day2 schema dictionaries with tracked object ids 21 | """ 22 | 23 | config = json.load(open(config_file)) 24 | ignore_dict = config.get( 25 | "IGNORE_DETECTION_DICT_MOVING", {}) 26 | # One time creation of polygons 27 | ignore_poly_dict = ioutils.create_poly_dict(ignore_dict) 28 | 29 | start_end_times = config.get("timeRange", {}) 30 | resample_time_secs = config.get( 31 | "resample_time_sec", constants.RESAMPLE_TIME_IN_SEC) 32 | json_list = ioutils.read_json_list(schema_json_file, start_end_times) 33 | points_list, ignored_list = ioutils.ignore_false_detections( 34 | json_list, ignore_poly_dict) 35 | 36 | mctracker_obj = mctracker.MulticamTracker(config) 37 | _ = mctracker_obj.mclogger.log_input_points(points_list, ignored_list, 0) 38 | 39 | retval = [] 40 | index = 0 41 | time_indexed_json_dict = trackerutils.create_time_windows( 42 | points_list, resample_time_secs) 43 | for timestamp in time_indexed_json_dict.keys(): 44 | 45 | all_json_list = time_indexed_json_dict[timestamp] 46 | 47 | if all_json_list: 48 | start_time = timer() 49 | 50 | mctracker_obj.process_batch(all_json_list) 51 | end_time = timer() 52 | 53 | state_sizes = { 54 | "unidentified_cars": len(mctracker_obj.state.unidentified_cars), 55 | "prev_list": len(mctracker_obj.state.prev_list), 56 | "carry_over_list": len(mctracker_obj.state.carry_over_list), 57 | "retval": len(mctracker_obj.state.retval), 58 | "match_stats": len(mctracker_obj.state.match_stats), 59 | "possible_parked_cars": len(mctracker_obj.state.possible_parked_cars), 60 | } 61 | logging.info("Re-Id Batch: Time taken: %f: State sizes: %s", 62 | float(end_time - start_time), str(state_sizes)) 63 | tmp_ret = mctracker_obj.state.retval 64 | if tmp_ret is not None: 65 | mctracker_obj.remove_all_additional_fields(tmp_ret) 66 | retval += tmp_ret 67 | 68 | index += 1 69 | 70 | mctracker_obj.remove_all_additional_fields(retval) 71 | # retval = fix_movement_after_park(retval) 72 | # retval = super_smooth_end_trajs(retval, prune_dist_thresh=20) 73 | mctracker_obj.mclogger.close_debug_files() 74 | return retval 75 | -------------------------------------------------------------------------------- /tracker/code/mctrack/validation.py: -------------------------------------------------------------------------------- 1 | """Functions for validation of day2 schema""" 2 | import json 3 | import logging 4 | 5 | import iso8601 6 | import jsonschema 7 | 8 | 9 | def schema_validate(record_str, day2_schema): 10 | 11 | retval = None 12 | # 1. Try to decode it into utf-8 13 | # 2. Try to check if it is a valid json 14 | try: 15 | retval = json.loads(record_str) 16 | if day2_schema is not None: 17 | jsonschema.validate(retval, day2_schema) 18 | except UnicodeError as unicode_error: 19 | logging.debug("ERROR: Invalid record: %s : rec=%s", 20 | str(unicode_error), record_str) 21 | retval = None 22 | except jsonschema.ValidationError as val_error: 23 | logging.debug("ERROR: Invalid record: %s : rec=%s", 24 | str(val_error), record_str) 25 | retval = None 26 | except jsonschema.SchemaError as schema_error: 27 | logging.debug("ERROR: Invalid schema: %s : rec=%s", 28 | str(schema_error), day2_schema) 29 | retval = None 30 | except ValueError as value_error: 31 | logging.debug("ERROR: Invalid record: %s : rec=%s", 32 | str(value_error), record_str) 33 | retval = None 34 | return retval 35 | 36 | 37 | def check_timestamp(json_ele): 38 | """" 39 | Function to check timestamp is valid 40 | 41 | Arguments: 42 | json_ele {dict} -- The detection dictionary as a day2 schema 43 | 44 | Returns: 45 | bool -- True if json_ele["@timestamp"] is valid. False otherwise 46 | """ 47 | 48 | try: 49 | if json_ele is None or json_ele.get("@timestamp", None) is None: 50 | return False 51 | _ = iso8601.parse_date(json_ele['@timestamp']) 52 | except iso8601.ParseError: 53 | return False 54 | return True 55 | 56 | 57 | def ignore_bad_records(json_list): 58 | """ 59 | Function that ignores badly formatted json_list. This function should be 60 | called before other processing functions so that we do not crash on bad 61 | record formats 62 | 63 | Arguments: 64 | json_list {list} -- list of detections in day2 format 65 | 66 | Returns: 67 | list -- list of valid records 68 | """ 69 | 70 | retval = [] 71 | check_fn_list = [check_timestamp] 72 | for json_ele in json_list: 73 | if json_ele is not None: 74 | valid = True 75 | for myfn in check_fn_list: 76 | if not myfn(json_ele): 77 | valid = False 78 | break 79 | if valid: 80 | retval.append(json_ele) 81 | return retval 82 | -------------------------------------------------------------------------------- /tracker/code/network/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/code/network/__init__.py -------------------------------------------------------------------------------- /tracker/config/config_360d_stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "profileTime": false, 3 | "msgBrokerConfig": { 4 | "inputKafkaServerUrl": "kafka:9092", 5 | "inputKafkaTopic": "metromind-raw", 6 | "outputKafkaServerUrl": "kafka:9092", 7 | "outputKafkaTopic": "metromind-start" 8 | } 9 | } -------------------------------------------------------------------------------- /tracker/docs/mctracker/arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/docs/mctracker/arch.png -------------------------------------------------------------------------------- /tracker/docs/mctracker/parking.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/docs/mctracker/parking.png -------------------------------------------------------------------------------- /tracker/docs/mctracker/parking_coverage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/tracker/docs/mctracker/parking_coverage.png -------------------------------------------------------------------------------- /tracker/usecasecode/360d/stream_track.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main file for streaming Multicam tracker for 360 degree usecase 3 | """ 4 | __version__ = '0.2' 5 | 6 | import argparse 7 | import json 8 | import logging 9 | import signal 10 | import sys 11 | 12 | from mctrack import mctrackstream 13 | 14 | logging.basicConfig(filename='mctracker360.log', level=logging.INFO) 15 | DEFAULT_CONSUMER_KAFKA_BOOTSTRAP_SERVER_URL = "kafka" 16 | DEFAULT_PRODUCER_KAFKA_BOOTSTRAP_SERVER_URL = "kafka" 17 | 18 | DEFAULT_CONSUMER_KAFKA_TOPIC = "metromind-raw" 19 | DEFAULT_PRODUCER_KAFKA_TOPIC = "metromind-start" 20 | 21 | DEFAULT_MCTRACKER_CONFIG_FILE = "config/config_360d.json" 22 | DEFAULT_STREAM_CONFIG_FILE = "config/config_360d_stream.json" 23 | 24 | mctrack_obj = None 25 | 26 | 27 | def signal_handler(signum, _): 28 | """Signal handler. This function will dump all tracker stats and exit 29 | 30 | Arguments: 31 | signum {int} -- The signal number 32 | frame {list} -- Stack frame 33 | """ 34 | 35 | logging.error("Multicam tracker got a signal: %d", signum) 36 | try: 37 | if mctrack_obj is not None: 38 | mctrack_obj.dump_stats() 39 | except Exception: 40 | pass 41 | exit() 42 | 43 | 44 | def main(): 45 | """Main function. Starts multicam tracker and runs continiously 46 | until killed 47 | """ 48 | global mctrack_obj 49 | parser = argparse.ArgumentParser() 50 | parser.add_argument("-c", "--config", help="Config file for mctracker", 51 | default=DEFAULT_MCTRACKER_CONFIG_FILE) 52 | parser.add_argument("-s", "--sconfig", help="Config file for streaming setup", 53 | default=DEFAULT_STREAM_CONFIG_FILE) 54 | args = parser.parse_args() 55 | 56 | stream_config = None 57 | try: 58 | stream_config = json.load(open(args.sconfig)) 59 | except IOError as ioe: 60 | err_msg = "ERROR: Stream Config I/O Error({}): {}: {}. Quitting".format( 61 | ioe.errno, args.sconfig, ioe.strerror) 62 | logging.error(err_msg) 63 | print(err_msg) 64 | exit() 65 | 66 | except: 67 | err_msg = "ERROR: Stream Config Error: {}: {}. Quitting".format( 68 | args.sconfig, sys.exc_info()[0]) 69 | logging.error(err_msg) 70 | print(err_msg) 71 | exit() 72 | 73 | print(stream_config) 74 | ckafka = (stream_config 75 | .get("msgBrokerConfig", {}) 76 | .get("inputKafkaServerUrl", 77 | DEFAULT_CONSUMER_KAFKA_BOOTSTRAP_SERVER_URL)) 78 | 79 | pkafka = (stream_config 80 | .get("msgBrokerConfig", {}) 81 | .get("outputKafkaServerUrl", 82 | DEFAULT_PRODUCER_KAFKA_BOOTSTRAP_SERVER_URL)) 83 | 84 | itopic = (stream_config 85 | .get("msgBrokerConfig", {}) 86 | .get("inputKafkaTopic", DEFAULT_CONSUMER_KAFKA_TOPIC)) 87 | 88 | otopic = (stream_config 89 | .get("msgBrokerConfig", {}) 90 | .get("outputKafkaTopic", 91 | DEFAULT_CONSUMER_KAFKA_TOPIC)) 92 | time_it_flag = stream_config.get("profileTime", False) 93 | 94 | print("Starting MC-Streaming app with following args:\n" 95 | "consumer kafka server={}\n" 96 | "consumer kafka topic={}\n" 97 | "producer kafka server={}\n" 98 | "producer kafka topic={}\n" 99 | "Time profile={}\n" 100 | "MC Tracker Config File={}\n".format(ckafka, itopic, 101 | pkafka, otopic, 102 | time_it_flag, 103 | args.config)) 104 | 105 | # Set the signal handler for ctrl-c. Since the program runs indefinitely, 106 | # we need to dump some stats when sigint is received 107 | # (when profiling is enabled) 108 | signal.signal(signal.SIGINT, signal_handler) 109 | 110 | mctrack_obj = mctrackstream.McTrackerStream(ckafka, itopic, 111 | pkafka, otopic, 112 | args.config, time_it_flag) 113 | mctrack_obj.start_mctracker() 114 | 115 | 116 | if __name__ == "__main__": 117 | main() 118 | -------------------------------------------------------------------------------- /ui/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/ignore-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | 6 | # testing 7 | /coverage 8 | 9 | # production 10 | /build 11 | 12 | # misc 13 | .DS_Store 14 | .env.local 15 | .env.development.local 16 | .env.test.local 17 | .env.production.local 18 | 19 | npm-debug.log* 20 | yarn-debug.log* 21 | yarn-error.log* 22 | -------------------------------------------------------------------------------- /ui/config/env.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fs = require('fs'); 4 | const path = require('path'); 5 | const paths = require('./paths'); 6 | 7 | // Make sure that including paths.js after env.js will read .env variables. 8 | delete require.cache[require.resolve('./paths')]; 9 | 10 | const NODE_ENV = process.env.NODE_ENV; 11 | if (!NODE_ENV) { 12 | throw new Error( 13 | 'The NODE_ENV environment variable is required but was not specified.' 14 | ); 15 | } 16 | 17 | // https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use 18 | var dotenvFiles = [ 19 | `${paths.dotenv}.${NODE_ENV}.local`, 20 | `${paths.dotenv}.${NODE_ENV}`, 21 | // Don't include `.env.local` for `test` environment 22 | // since normally you expect tests to produce the same 23 | // results for everyone 24 | NODE_ENV !== 'test' && `${paths.dotenv}.local`, 25 | paths.dotenv, 26 | ].filter(Boolean); 27 | 28 | // Load environment variables from .env* files. Suppress warnings using silent 29 | // if this file is missing. dotenv will never modify any environment variables 30 | // that have already been set. Variable expansion is supported in .env files. 31 | // https://github.com/motdotla/dotenv 32 | // https://github.com/motdotla/dotenv-expand 33 | dotenvFiles.forEach(dotenvFile => { 34 | if (fs.existsSync(dotenvFile)) { 35 | require('dotenv-expand')( 36 | require('dotenv').config({ 37 | path: dotenvFile, 38 | }) 39 | ); 40 | } 41 | }); 42 | 43 | // We support resolving modules according to `NODE_PATH`. 44 | // This lets you use absolute paths in imports inside large monorepos: 45 | // https://github.com/facebookincubator/create-react-app/issues/253. 46 | // It works similar to `NODE_PATH` in Node itself: 47 | // https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders 48 | // Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored. 49 | // Otherwise, we risk importing Node.js core modules into an app instead of Webpack shims. 50 | // https://github.com/facebookincubator/create-react-app/issues/1023#issuecomment-265344421 51 | // We also resolve them to make sure all tools using them work consistently. 52 | const appDirectory = fs.realpathSync(process.cwd()); 53 | process.env.NODE_PATH = (process.env.NODE_PATH || '') 54 | .split(path.delimiter) 55 | .filter(folder => folder && !path.isAbsolute(folder)) 56 | .map(folder => path.resolve(appDirectory, folder)) 57 | .join(path.delimiter); 58 | 59 | // Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be 60 | // injected into the application via DefinePlugin in Webpack configuration. 61 | const REACT_APP = /^REACT_APP_/i; 62 | 63 | function getClientEnvironment(publicUrl) { 64 | const raw = Object.keys(process.env) 65 | .filter(key => REACT_APP.test(key)) 66 | .reduce( 67 | (env, key) => { 68 | env[key] = process.env[key]; 69 | return env; 70 | }, 71 | { 72 | // Useful for determining whether we’re running in production mode. 73 | // Most importantly, it switches React into the correct mode. 74 | NODE_ENV: process.env.NODE_ENV || 'development', 75 | // Useful for resolving the correct path to static assets in `public`. 76 | // For example, . 77 | // This should only be used as an escape hatch. Normally you would put 78 | // images into the `src` and `import` them in code to get their paths. 79 | PUBLIC_URL: publicUrl, 80 | } 81 | ); 82 | // Stringify all values so we can feed into Webpack DefinePlugin 83 | const stringified = { 84 | 'process.env': Object.keys(raw).reduce((env, key) => { 85 | env[key] = JSON.stringify(raw[key]); 86 | return env; 87 | }, {}), 88 | }; 89 | 90 | return { raw, stringified }; 91 | } 92 | 93 | module.exports = getClientEnvironment; 94 | -------------------------------------------------------------------------------- /ui/config/paths.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const path = require('path'); 4 | const fs = require('fs'); 5 | const url = require('url'); 6 | 7 | // Make sure any symlinks in the project folder are resolved: 8 | // https://github.com/facebookincubator/create-react-app/issues/637 9 | const appDirectory = fs.realpathSync(process.cwd()); 10 | const resolveApp = relativePath => path.resolve(appDirectory, relativePath); 11 | 12 | const envPublicUrl = process.env.PUBLIC_URL; 13 | 14 | function ensureSlash(path, needsSlash) { 15 | const hasSlash = path.endsWith('/'); 16 | if (hasSlash && !needsSlash) { 17 | return path.substr(path, path.length - 1); 18 | } else if (!hasSlash && needsSlash) { 19 | return `${path}/`; 20 | } else { 21 | return path; 22 | } 23 | } 24 | 25 | const getPublicUrl = appPackageJson => 26 | envPublicUrl || require(appPackageJson).homepage; 27 | 28 | // We use `PUBLIC_URL` environment variable or "homepage" field to infer 29 | // "public path" at which the app is served. 30 | // Webpack needs to know it to put the right 35 | 36 | 37 | 38 | Metropolis 39 | 40 | 41 | 44 |
45 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /ui/public/react-datetime.css: -------------------------------------------------------------------------------- 1 | /** 2 | * https://github.com/YouCanBookMe/react-datetime 3 | */ 4 | 5 | .rdt { 6 | position: relative; 7 | } 8 | .rdtPicker { 9 | display: none; 10 | position: absolute; 11 | width: 250px; 12 | padding: 4px; 13 | margin-top: 1px; 14 | z-index: 99999 !important; 15 | color: #e0e0e0; 16 | background: #424242; 17 | box-shadow: 0 1px 3px rgba(0,0,0,.1); 18 | border: 1px solid #3f3d3d; 19 | } 20 | .rdtOpen .rdtPicker { 21 | display: block; 22 | } 23 | .rdtStatic .rdtPicker { 24 | box-shadow: none; 25 | position: static; 26 | } 27 | 28 | .rdtPicker .rdtTimeToggle { 29 | text-align: center; 30 | } 31 | 32 | .rdtPicker table { 33 | width: 100%; 34 | margin: 0; 35 | } 36 | .rdtPicker td, 37 | .rdtPicker th { 38 | text-align: center; 39 | height: 28px; 40 | } 41 | .rdtPicker td { 42 | cursor: pointer; 43 | } 44 | .rdtPicker td.rdtDay:hover, 45 | .rdtPicker td.rdtHour:hover, 46 | .rdtPicker td.rdtMinute:hover, 47 | .rdtPicker td.rdtSecond:hover, 48 | .rdtPicker .rdtTimeToggle:hover { 49 | background: #e0e0e0; 50 | color: #050505; 51 | cursor: pointer; 52 | } 53 | .rdtPicker td.rdtOld, 54 | .rdtPicker td.rdtNew { 55 | color: #9e9e9e; 56 | } 57 | .rdtPicker td.rdtToday { 58 | position: relative; 59 | } 60 | .rdtPicker td.rdtToday:before { 61 | content: ''; 62 | display: inline-block; 63 | border-left: 7px solid transparent; 64 | border-bottom: 7px solid #9e9e9e; 65 | border-top-color: rgba(0, 0, 0, 0.2); 66 | position: absolute; 67 | bottom: 4px; 68 | right: 4px; 69 | } 70 | .rdtPicker td.rdtActive, 71 | .rdtPicker td.rdtActive:hover { 72 | background-color: #666768; 73 | color: #fff; 74 | text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.25); 75 | } 76 | .rdtPicker td.rdtActive.rdtToday:before { 77 | border-bottom-color: #fff; 78 | } 79 | .rdtPicker td.rdtDisabled, 80 | .rdtPicker td.rdtDisabled:hover { 81 | background: none; 82 | color: #727171; 83 | cursor: not-allowed; 84 | } 85 | 86 | .rdtPicker td span.rdtOld { 87 | color: #999999; 88 | } 89 | .rdtPicker td span.rdtDisabled, 90 | .rdtPicker td span.rdtDisabled:hover { 91 | background: none; 92 | color: #999999; 93 | cursor: not-allowed; 94 | } 95 | .rdtPicker th { 96 | border-bottom: 1px solid #4e4848; 97 | } 98 | .rdtPicker .dow { 99 | width: 14.2857%; 100 | border-bottom: none; 101 | } 102 | .rdtPicker th.rdtSwitch { 103 | width: 100px; 104 | } 105 | .rdtPicker th.rdtNext, 106 | .rdtPicker th.rdtPrev { 107 | font-size: 21px; 108 | vertical-align: top; 109 | } 110 | 111 | .rdtPrev span, 112 | .rdtNext span { 113 | display: block; 114 | -webkit-touch-callout: none; /* iOS Safari */ 115 | -webkit-user-select: none; /* Chrome/Safari/Opera */ 116 | -khtml-user-select: none; /* Konqueror */ 117 | -moz-user-select: none; /* Firefox */ 118 | -ms-user-select: none; /* Internet Explorer/Edge */ 119 | user-select: none; 120 | } 121 | 122 | .rdtPicker th.rdtDisabled, 123 | .rdtPicker th.rdtDisabled:hover { 124 | background: none; 125 | color: #999999; 126 | cursor: not-allowed; 127 | } 128 | .rdtPicker thead tr:first-child th { 129 | cursor: pointer; 130 | } 131 | .rdtPicker thead tr:first-child th:hover { 132 | background: #eeeeee; 133 | color: #424242; 134 | } 135 | 136 | .rdtPicker tfoot { 137 | border-top: 1px solid #4e4848; 138 | } 139 | 140 | .rdtPicker button { 141 | border: none; 142 | background: none; 143 | cursor: pointer; 144 | } 145 | .rdtPicker button:hover { 146 | background-color: #eee; 147 | color: #424242; 148 | } 149 | 150 | .rdtPicker thead button { 151 | width: 100%; 152 | height: 100%; 153 | } 154 | 155 | td.rdtMonth, 156 | td.rdtYear { 157 | height: 50px; 158 | width: 25%; 159 | cursor: pointer; 160 | } 161 | td.rdtMonth:hover, 162 | td.rdtYear:hover { 163 | background: #eee; 164 | color: #424242; 165 | } 166 | 167 | .rdtCounters { 168 | display: inline-block; 169 | } 170 | 171 | .rdtCounters > div { 172 | float: left; 173 | } 174 | 175 | .rdtCounter { 176 | height: 100px; 177 | } 178 | 179 | .rdtCounter { 180 | width: 40px; 181 | } 182 | 183 | .rdtCounterSeparator { 184 | line-height: 100px; 185 | } 186 | 187 | .rdtCounter .rdtBtn { 188 | height: 40%; 189 | line-height: 40px; 190 | cursor: pointer; 191 | display: block; 192 | 193 | -webkit-touch-callout: none; /* iOS Safari */ 194 | -webkit-user-select: none; /* Chrome/Safari/Opera */ 195 | -khtml-user-select: none; /* Konqueror */ 196 | -moz-user-select: none; /* Firefox */ 197 | -ms-user-select: none; /* Internet Explorer/Edge */ 198 | user-select: none; 199 | } 200 | .rdtCounter .rdtBtn:hover { 201 | background: #eee; 202 | color: #424242; 203 | } 204 | .rdtCounter .rdtCount { 205 | height: 20%; 206 | font-size: 1.2em; 207 | } 208 | 209 | .rdtMilli { 210 | vertical-align: middle; 211 | padding-left: 8px; 212 | width: 48px; 213 | } 214 | 215 | .rdtMilli input { 216 | width: 100%; 217 | font-size: 1.2em; 218 | margin-top: 37px; 219 | } 220 | -------------------------------------------------------------------------------- /ui/readme-images/Component-Hierarchy.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/ui/readme-images/Component-Hierarchy.jpeg -------------------------------------------------------------------------------- /ui/readme-images/Garage-Components.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/ui/readme-images/Garage-Components.jpeg -------------------------------------------------------------------------------- /ui/readme-images/Garage.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/ui/readme-images/Garage.PNG -------------------------------------------------------------------------------- /ui/readme-images/HomePage.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NVIDIA-AI-IOT/deepstream_360_d_smart_parking_application/aaeb599a28ab0e2619f3d5540682877a54fcce71/ui/readme-images/HomePage.PNG -------------------------------------------------------------------------------- /ui/scripts/build.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // Do this as the first thing so that any code reading it knows the right env. 4 | process.env.BABEL_ENV = 'production'; 5 | process.env.NODE_ENV = 'production'; 6 | 7 | // Makes the script crash on unhandled rejections instead of silently 8 | // ignoring them. In the future, promise rejections that are not handled will 9 | // terminate the Node.js process with a non-zero exit code. 10 | process.on('unhandledRejection', err => { 11 | throw err; 12 | }); 13 | 14 | // Ensure environment variables are read. 15 | require('../config/env'); 16 | 17 | const path = require('path'); 18 | const chalk = require('chalk'); 19 | const fs = require('fs-extra'); 20 | const webpack = require('webpack'); 21 | const config = require('../config/webpack.config.prod'); 22 | const paths = require('../config/paths'); 23 | const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles'); 24 | const formatWebpackMessages = require('react-dev-utils/formatWebpackMessages'); 25 | const printHostingInstructions = require('react-dev-utils/printHostingInstructions'); 26 | const FileSizeReporter = require('react-dev-utils/FileSizeReporter'); 27 | const printBuildError = require('react-dev-utils/printBuildError'); 28 | 29 | const measureFileSizesBeforeBuild = 30 | FileSizeReporter.measureFileSizesBeforeBuild; 31 | const printFileSizesAfterBuild = FileSizeReporter.printFileSizesAfterBuild; 32 | const useYarn = fs.existsSync(paths.yarnLockFile); 33 | 34 | // These sizes are pretty large. We'll warn for bundles exceeding them. 35 | const WARN_AFTER_BUNDLE_GZIP_SIZE = 512 * 1024; 36 | const WARN_AFTER_CHUNK_GZIP_SIZE = 1024 * 1024; 37 | 38 | // Warn and crash if required files are missing 39 | if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) { 40 | process.exit(1); 41 | } 42 | 43 | // First, read the current file sizes in build directory. 44 | // This lets us display how much they changed later. 45 | measureFileSizesBeforeBuild(paths.appBuild) 46 | .then(previousFileSizes => { 47 | // Remove all content but keep the directory so that 48 | // if you're in it, you don't end up in Trash 49 | fs.emptyDirSync(paths.appBuild); 50 | // Merge with the public folder 51 | copyPublicFolder(); 52 | // Start the webpack build 53 | return build(previousFileSizes); 54 | }) 55 | .then( 56 | ({ stats, previousFileSizes, warnings }) => { 57 | if (warnings.length) { 58 | console.log(chalk.yellow('Compiled with warnings.\n')); 59 | console.log(warnings.join('\n\n')); 60 | console.log( 61 | '\nSearch for the ' + 62 | chalk.underline(chalk.yellow('keywords')) + 63 | ' to learn more about each warning.' 64 | ); 65 | console.log( 66 | 'To ignore, add ' + 67 | chalk.cyan('// eslint-disable-next-line') + 68 | ' to the line before.\n' 69 | ); 70 | } else { 71 | console.log(chalk.green('Compiled successfully.\n')); 72 | } 73 | 74 | console.log('File sizes after gzip:\n'); 75 | printFileSizesAfterBuild( 76 | stats, 77 | previousFileSizes, 78 | paths.appBuild, 79 | WARN_AFTER_BUNDLE_GZIP_SIZE, 80 | WARN_AFTER_CHUNK_GZIP_SIZE 81 | ); 82 | console.log(); 83 | 84 | const appPackage = require(paths.appPackageJson); 85 | const publicUrl = paths.publicUrl; 86 | const publicPath = config.output.publicPath; 87 | const buildFolder = path.relative(process.cwd(), paths.appBuild); 88 | printHostingInstructions( 89 | appPackage, 90 | publicUrl, 91 | publicPath, 92 | buildFolder, 93 | useYarn 94 | ); 95 | }, 96 | err => { 97 | console.log(chalk.red('Failed to compile.\n')); 98 | printBuildError(err); 99 | process.exit(1); 100 | } 101 | ); 102 | 103 | // Create the production build and print the deployment instructions. 104 | function build(previousFileSizes) { 105 | console.log('Creating an optimized production build...'); 106 | 107 | let compiler = webpack(config); 108 | return new Promise((resolve, reject) => { 109 | compiler.run((err, stats) => { 110 | if (err) { 111 | return reject(err); 112 | } 113 | const messages = formatWebpackMessages(stats.toJson({}, true)); 114 | if (messages.errors.length) { 115 | // Only keep the first error. Others are often indicative 116 | // of the same problem, but confuse the reader with noise. 117 | if (messages.errors.length > 1) { 118 | messages.errors.length = 1; 119 | } 120 | return reject(new Error(messages.errors.join('\n\n'))); 121 | } 122 | if ( 123 | process.env.CI && 124 | (typeof process.env.CI !== 'string' || 125 | process.env.CI.toLowerCase() !== 'false') && 126 | messages.warnings.length 127 | ) { 128 | console.log( 129 | chalk.yellow( 130 | '\nTreating warnings as errors because process.env.CI = true.\n' + 131 | 'Most CI servers set it automatically.\n' 132 | ) 133 | ); 134 | return reject(new Error(messages.warnings.join('\n\n'))); 135 | } 136 | return resolve({ 137 | stats, 138 | previousFileSizes, 139 | warnings: messages.warnings, 140 | }); 141 | }); 142 | }); 143 | } 144 | 145 | function copyPublicFolder() { 146 | fs.copySync(paths.appPublic, paths.appBuild, { 147 | dereference: true, 148 | filter: file => file !== paths.appHtml, 149 | }); 150 | } 151 | -------------------------------------------------------------------------------- /ui/scripts/start.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // Do this as the first thing so that any code reading it knows the right env. 4 | process.env.BABEL_ENV = 'development'; 5 | process.env.NODE_ENV = 'development'; 6 | 7 | // Makes the script crash on unhandled rejections instead of silently 8 | // ignoring them. In the future, promise rejections that are not handled will 9 | // terminate the Node.js process with a non-zero exit code. 10 | process.on('unhandledRejection', err => { 11 | throw err; 12 | }); 13 | 14 | // Ensure environment variables are read. 15 | require('../config/env'); 16 | 17 | const fs = require('fs'); 18 | const chalk = require('chalk'); 19 | const webpack = require('webpack'); 20 | const WebpackDevServer = require('webpack-dev-server'); 21 | const clearConsole = require('react-dev-utils/clearConsole'); 22 | const checkRequiredFiles = require('react-dev-utils/checkRequiredFiles'); 23 | const { 24 | choosePort, 25 | createCompiler, 26 | prepareProxy, 27 | prepareUrls, 28 | } = require('react-dev-utils/WebpackDevServerUtils'); 29 | const openBrowser = require('react-dev-utils/openBrowser'); 30 | const paths = require('../config/paths'); 31 | const config = require('../config/webpack.config.dev'); 32 | const createDevServerConfig = require('../config/webpackDevServer.config'); 33 | 34 | const useYarn = fs.existsSync(paths.yarnLockFile); 35 | const isInteractive = process.stdout.isTTY; 36 | 37 | // Warn and crash if required files are missing 38 | if (!checkRequiredFiles([paths.appHtml, paths.appIndexJs])) { 39 | process.exit(1); 40 | } 41 | 42 | // Tools like Cloud9 rely on this. 43 | const DEFAULT_PORT = parseInt(process.env.PORT, 10) || 3000; 44 | const HOST = process.env.HOST || '0.0.0.0'; 45 | 46 | if (process.env.HOST) { 47 | console.log( 48 | chalk.cyan( 49 | `Attempting to bind to HOST environment variable: ${chalk.yellow( 50 | chalk.bold(process.env.HOST) 51 | )}` 52 | ) 53 | ); 54 | console.log( 55 | `If this was unintentional, check that you haven't mistakenly set it in your shell.` 56 | ); 57 | console.log(`Learn more here: ${chalk.yellow('http://bit.ly/2mwWSwH')}`); 58 | console.log(); 59 | } 60 | 61 | // We attempt to use the default port but if it is busy, we offer the user to 62 | // run on a different port. `choosePort()` Promise resolves to the next free port. 63 | choosePort(HOST, DEFAULT_PORT) 64 | .then(port => { 65 | if (port == null) { 66 | // We have not found a port. 67 | return; 68 | } 69 | const protocol = process.env.HTTPS === 'true' ? 'https' : 'http'; 70 | const appName = require(paths.appPackageJson).name; 71 | const urls = prepareUrls(protocol, HOST, port); 72 | // Create a webpack compiler that is configured with custom messages. 73 | const compiler = createCompiler(webpack, config, appName, urls, useYarn); 74 | // Load proxy config 75 | const proxySetting = require(paths.appPackageJson).proxy; 76 | const proxyConfig = prepareProxy(proxySetting, paths.appPublic); 77 | // Serve webpack assets generated by the compiler over a web sever. 78 | const serverConfig = createDevServerConfig( 79 | proxyConfig, 80 | urls.lanUrlForConfig 81 | ); 82 | const devServer = new WebpackDevServer(compiler, serverConfig); 83 | // Launch WebpackDevServer. 84 | devServer.listen(port, HOST, err => { 85 | if (err) { 86 | return console.log(err); 87 | } 88 | if (isInteractive) { 89 | clearConsole(); 90 | } 91 | console.log(chalk.cyan('Starting the development server...\n')); 92 | openBrowser(urls.localUrlForBrowser); 93 | }); 94 | 95 | ['SIGINT', 'SIGTERM'].forEach(function(sig) { 96 | process.on(sig, function() { 97 | devServer.close(); 98 | process.exit(); 99 | }); 100 | }); 101 | }) 102 | .catch(err => { 103 | if (err && err.message) { 104 | console.log(err.message); 105 | } 106 | process.exit(1); 107 | }); 108 | -------------------------------------------------------------------------------- /ui/scripts/test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // Do this as the first thing so that any code reading it knows the right env. 4 | process.env.BABEL_ENV = 'test'; 5 | process.env.NODE_ENV = 'test'; 6 | process.env.PUBLIC_URL = ''; 7 | 8 | // Makes the script crash on unhandled rejections instead of silently 9 | // ignoring them. In the future, promise rejections that are not handled will 10 | // terminate the Node.js process with a non-zero exit code. 11 | process.on('unhandledRejection', err => { 12 | throw err; 13 | }); 14 | 15 | // Ensure environment variables are read. 16 | require('../config/env'); 17 | 18 | const jest = require('jest'); 19 | const argv = process.argv.slice(2); 20 | 21 | // Watch unless on CI or in coverage mode 22 | if (!process.env.CI && argv.indexOf('--coverage') < 0) { 23 | argv.push('--watch'); 24 | } 25 | 26 | 27 | jest.run(argv); 28 | -------------------------------------------------------------------------------- /ui/src/App.css: -------------------------------------------------------------------------------- 1 | .loading { 2 | width: 200px; 3 | height: 200px; 4 | 5 | position: absolute; 6 | top:0; 7 | bottom: 0; 8 | left: 0; 9 | right: 0; 10 | 11 | margin: auto; 12 | font-size: 25px; 13 | } 14 | 15 | .offline { 16 | position: relative; 17 | top: 54px; 18 | text-align: center; 19 | font-size: 30px; 20 | font-weight: bold; 21 | color: white; 22 | background-color: red; 23 | border: 1px solid red; 24 | } -------------------------------------------------------------------------------- /ui/src/App.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import { Switch, Route, Redirect } from 'react-router-dom'; 3 | import Axios from 'axios'; 4 | import { Offline } from 'react-detect-offline'; 5 | //import Moment from 'moment'; 6 | 7 | import classes from './App.css'; 8 | 9 | import HomeMap from './PageBuilder/HomeMap'; 10 | import Loader from './Component/UI/Loader/Loader'; 11 | 12 | /** 13 | * 4 Main tasks: 14 | * 1) read the config file and get the environment variables; 15 | * 2) pass down the props, i.e. environment variables; 16 | * 3) declare the routes for home/garage page; 17 | * 4) offline detection. 18 | */ 19 | class App extends Component { 20 | state = { 21 | envVar: {} 22 | } 23 | 24 | componentDidMount() { 25 | this.source = Axios.CancelToken.source(); 26 | /* query the REST API to retrieve configuration information. */ 27 | Axios.get('http://' + process.env.REACT_APP_BACKEND_IP_ADDRESS + ':' + process.env.REACT_APP_BACKEND_PORT + '/ui-config') 28 | .then((res) => { 29 | /* copy the configuration into envVar */ 30 | const envVar = res.data; 31 | this.setState({ envVar: envVar }); 32 | }) 33 | .catch(err => { 34 | this.setState({ 35 | err: err.message 36 | }); 37 | }) 38 | } 39 | 40 | componentWillUnmount() { 41 | this.source.cancel('Operation canceled by the user'); 42 | clearInterval(this.interval); 43 | } 44 | 45 | render() { 46 | if (this.state.envVar !== undefined && Object.keys(this.state.envVar).length !== 0) { 47 | return ( 48 |
49 | 50 |
You are offline! Please refresh the page when you are online again.
51 |
52 | 53 | {/* /garage focuses the googlemap on a particular garage */} 54 | 55 | } 61 | /> 62 | {/* /home takes googlemap page with one or more markers */} 63 | } /> 64 | 65 | 66 |
67 | ); 68 | } 69 | else { 70 | return ( 71 |
72 | 73 | {this.state.err} 74 |
75 | ); 76 | } 77 | } 78 | } 79 | 80 | export default App; -------------------------------------------------------------------------------- /ui/src/Component/Capacity/Capacity.css: -------------------------------------------------------------------------------- 1 | .kpi { 2 | overflow: hidden; 3 | width: 100%; 4 | } 5 | 6 | .title { 7 | left: 0; 8 | font-size: 35px; 9 | margin: 0 15px; 10 | } 11 | 12 | .kpirow { 13 | margin: 5px 0; 14 | } 15 | 16 | .num { 17 | font-size: 28px; 18 | font-weight: 100; 19 | text-align: right; 20 | } 21 | 22 | .spot { 23 | font-size: 16px; 24 | font-weight: 100; 25 | text-align: left; 26 | padding: 10px; 27 | } 28 | 29 | @media (max-width: 900px) { 30 | .kpi { 31 | overflow: hidden; 32 | margin-bottom: 30px; 33 | width: 100%; 34 | height: 50px; 35 | width: 100%; 36 | } 37 | 38 | .title { 39 | left: 0; 40 | font-size: 22px; 41 | margin: 0 15px; 42 | } 43 | 44 | .num { 45 | font-size: 18px; 46 | font-weight: 100; 47 | text-align: right; 48 | } 49 | 50 | .spot { 51 | font-size: 12px; 52 | font-weight: 100; 53 | text-align: left; 54 | padding: 0; 55 | } 56 | } -------------------------------------------------------------------------------- /ui/src/Component/Capacity/Capacity.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import Moment from 'moment'; 3 | import { Row, Col } from 'react-bootstrap'; 4 | import Axios from 'axios'; 5 | 6 | import classes from './Capacity.css'; 7 | 8 | /** 9 | * Get the KPI (or capacity) of the garage by sending the Ajax query to the backend 10 | * every `alertEventRefreshIntervalSeconds` secs. It's default value is 5 secs. 11 | * 12 | * KPI information includes available/occupied parking spots, and entry/exit flow; 13 | * 14 | * Configurable variable: `alertEventRefreshIntervalSeconds` 15 | */ 16 | class Capacity extends Component { 17 | state = { 18 | id: '', 19 | stats: [], 20 | isLive: this.props.isLive, 21 | alertEventRefreshIntervalSeconds: this.props.config.alertEventRefreshIntervalSeconds, 22 | startTimestamp: this.props.isLive ? Moment.utc().subtract(this.props.config.uiDelaySeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]') : Moment.utc(this.props.startTimestamp).subtract(this.props.config.uiDelaySeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]'), 23 | uiDelaySeconds: this.props.config.uiDelaySeconds, 24 | diffTime: this.props.diffTime 25 | } 26 | 27 | componentDidMount() { 28 | 29 | this.source = Axios.CancelToken.source(); 30 | /* api call needed */ 31 | if (this.props.api !== '') { 32 | 33 | /* create the query */ 34 | const start = this.props.isLive ? Moment.utc().subtract(this.state.uiDelaySeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]') : Moment.utc().subtract(this.state.diffTime + this.state.uiDelaySeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]'); 35 | let prestart = Moment.utc(start).subtract(this.state.alertEventRefreshIntervalSeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]'); 36 | let timestamp = this.state.isLive ? '@timestamp:[* TO ' + start + ']' : '@timestamp:[' + prestart + ' TO ' + start + ']'; 37 | let request = timestamp; 38 | 39 | Axios.get(this.props.api, { 40 | params: { 41 | timeQuery: request 42 | }, 43 | cancelToken: this.source.token 44 | }).then((res) => { 45 | let obj = res.data; 46 | let stats = []; 47 | for (let key in obj) { 48 | let value = obj[key]; 49 | stats.push({ key, value }); 50 | } 51 | this.setState({ id: res.data['id'], stats: stats.splice(1) }); 52 | }) 53 | .catch((thrown) => { 54 | if (Axios.isCancel(thrown)) { 55 | console.log('Request canceled', thrown.message); 56 | } 57 | else { 58 | console.log('something wrong with KPI ajax'); 59 | } 60 | }); 61 | /* make ajax call every alertEventRefreshIntervalSeconds seconds */ 62 | this.interval = setInterval(() => { 63 | 64 | let currentTime = this.props.isLive ? Moment.utc().subtract(this.state.uiDelaySeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]') : Moment.utc().subtract(this.state.diffTime + this.state.uiDelaySeconds, 's').format('YYYY-MM-DDTHH:mm:ss.SSS[Z]'); 65 | let timestamp = this.state.isLive ? '@timestamp:[* TO ' + currentTime + ']' : '@timestamp:[' + start + ' TO ' + currentTime + ']'; 66 | let request = timestamp; 67 | 68 | Axios.get(this.props.api, { 69 | params: { 70 | timeQuery: request 71 | }, 72 | cancelToken: this.source.token 73 | }).then((res) => { 74 | let obj = res.data; 75 | let stats = []; 76 | for (let key in obj) { 77 | let value = obj[key]; 78 | stats.push({ key, value }); 79 | } 80 | this.setState({ id: res.data['id'], stats: stats.splice(1) }); 81 | }) 82 | .catch((thrown) => { 83 | if (Axios.isCancel(thrown)) { 84 | console.log('Request canceled', thrown.message); 85 | } 86 | else { 87 | console.log('something wrong with KPI interval ajax'); 88 | } 89 | }); 90 | }, this.state.alertEventRefreshIntervalSeconds * 1000); 91 | } 92 | } 93 | 94 | componentWillUnmount() { 95 | clearInterval(this.interval); 96 | this.source.cancel('Operation canceled by the user'); 97 | if (this.socket !== undefined) { 98 | this.socket.disconnect(); 99 | } 100 | } 101 | 102 | render() { 103 | /* loop through stats array */ 104 | let col = this.state.stats.map((data, index) => { 105 | return ( 106 | 107 | {data.value} 108 |   109 | {data.key} 110 | 111 | ); 112 | }); 113 | 114 | return ( 115 | 116 | {this.props.name} 117 | 118 | 119 | {col} 120 | 121 | 122 | 123 | 124 | ); 125 | } 126 | } 127 | 128 | export default Capacity; -------------------------------------------------------------------------------- /ui/src/Component/Flip/Flip.css: -------------------------------------------------------------------------------- 1 | /* entire container, keeps perspective */ 2 | .flipContainer { 3 | perspective: 1000px; 4 | width: inherit; 5 | height: inherit; 6 | } 7 | 8 | .flipper { 9 | width: inherit; 10 | height: inherit; 11 | } 12 | 13 | .flipContainer, .front, .back { 14 | width: inherit; 15 | height: inherit; 16 | z-index: 5; 17 | } 18 | 19 | /* flip speed goes here */ 20 | .flipper { 21 | transition: 0.6s; 22 | transform-style: preserve-3d; 23 | 24 | position: relative; 25 | } 26 | 27 | /* hide back of pane during swap */ 28 | .front, .back { 29 | backface-visibility: hidden; 30 | 31 | position: absolute; 32 | top: 0; 33 | left: 0; 34 | } 35 | 36 | /* front pane, placed above back */ 37 | .front { 38 | z-index: 2; 39 | /* for firefox 31 */ 40 | transform: rotateY(0deg); 41 | } 42 | 43 | /* back, initially hidden pane */ 44 | .back { 45 | z-index: 2; 46 | 47 | transform: rotateY(180deg); 48 | } -------------------------------------------------------------------------------- /ui/src/Component/Flip/Flip.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import classes from './Flip.css'; 4 | 5 | /** 6 | * Flip the panel window to switch between anomalies/events list. 7 | * 8 | * In this demo version, the flip function is disable. 9 | * Go to SmartGaragePage.js to enable this function. 10 | */ 11 | const flip = (props) => { 12 | return ( 13 |
14 |
15 |
16 | {props.front} 17 |
18 |
19 | {props.back} 20 |
21 |
22 |
23 | ); 24 | }; 25 | 26 | export default flip; -------------------------------------------------------------------------------- /ui/src/Component/Footer/Footer.css: -------------------------------------------------------------------------------- 1 | .footer { 2 | min-height: 30px !important; 3 | background-color: #343131; 4 | color: #fefefe; 5 | border: none; 6 | z-index: 100; 7 | } 8 | 9 | .footercontainer { 10 | margin-bottom: 10px; 11 | padding: 0 15px; 12 | } -------------------------------------------------------------------------------- /ui/src/Component/Footer/Footer.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Navbar } from 'react-bootstrap'; 3 | 4 | import classes from './Footer.css'; 5 | 6 | /** 7 | * Footer.js contains four elements: 8 | * (1) app’s name; 9 | * (2) app’s version; 10 | * (3) user’s information; 11 | * (4) notices. 12 | */ 13 | const footer = (props) => { 14 | /* show information on the footer */ 15 | return ( 16 | 17 |
18 | METROPOLIS | Ver: beta | User: {props.user} | This is a virtual garage for demo only. 19 |
20 |
21 | ); 22 | }; 23 | 24 | export default footer; -------------------------------------------------------------------------------- /ui/src/Component/Header/Header.css: -------------------------------------------------------------------------------- 1 | .Header { 2 | background-color: #343131; 3 | border-bottom: 4px solid #76b900; 4 | } 5 | 6 | .center { 7 | position: absolute; 8 | left: 50%; 9 | margin-left: -130px !important; 10 | display: block; 11 | color: #fefefe !important; 12 | font-weight: bold; 13 | font-size: 40px; 14 | } 15 | 16 | .questioncircle { 17 | font-size: 18px; 18 | } 19 | 20 | .questioncircle > a { 21 | background-color: inherit !important; 22 | color: white !important; 23 | } 24 | 25 | .questioncircle > a:hover { 26 | background-color: #080808 !important; 27 | color: #76b900 !important; 28 | } 29 | 30 | .input input, .collapse input { 31 | color: #fefefe; 32 | border: none; 33 | background-color: #76b900; 34 | } 35 | 36 | .input1 input { 37 | color: #fefefe; 38 | border: none; 39 | background-color: #76b900; 40 | border-bottom-left-radius: 0px !important; 41 | } 42 | 43 | .button { 44 | border: 1px solid #76b900; 45 | background-color: #76b900; 46 | border-top-left-radius: 0px !important; 47 | border-bottom-left-radius: 0px !important; 48 | opacity: 1 !important; 49 | } 50 | 51 | .button1 { 52 | border: 1px solid #76b900; 53 | background-color: #76b900; 54 | border-top-left-radius: 0px !important; 55 | border-bottom-left-radius: 0px !important; 56 | border-bottom-right-radius: 0px !important; 57 | opacity: 1 !important; 58 | } 59 | 60 | .buttonbetween { 61 | border: 1px solid #76b900; 62 | background-color: #76b900; 63 | border-radius: 0px !important; 64 | opacity: 1 !important; 65 | } 66 | 67 | .button:focus, .buttonbetween:focus, .button1:focus { 68 | background-color: #76b900 !important; 69 | } 70 | 71 | .button:hover, .buttonbetween:hover, .button1:hover { 72 | border: 1px solid #76b900 !important; 73 | background-color: #76b900 !important; 74 | opacity: 1 !important; 75 | } 76 | 77 | .glyphicon { 78 | color: rgba(255,255,255,0.6); 79 | } 80 | 81 | 82 | .live { 83 | position: absolute; 84 | font-size: 30px; 85 | margin: 10px 15px 0 15px; 86 | right: 0; 87 | top: 0; 88 | } 89 | 90 | .live > a { 91 | color: inherit; 92 | } 93 | 94 | .tip > .tooltip-inner { 95 | background-color: #fefefe !important; 96 | color: inherit; 97 | } 98 | 99 | @media (max-width: 500px) { 100 | .center { 101 | font-size: 30px; 102 | left: 60%; 103 | } 104 | } 105 | 106 | @media (max-width: 800px) { 107 | 108 | } -------------------------------------------------------------------------------- /ui/src/Component/Header/Header.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import FontAwesomeIcon from '@fortawesome/react-fontawesome'; 3 | import { faQuestionCircle, faExclamationTriangle } from '@fortawesome/free-solid-svg-icons'; 4 | import { Navbar, Nav, NavItem } from 'react-bootstrap'; 5 | import { Tooltip, OverlayTrigger } from 'react-bootstrap'; 6 | 7 | import classes from './Header.css'; 8 | import logo from '../../assets/NVLogo-H-White-Small.png'; 9 | 10 | /** 11 | * Header.js contains 12 | * 1) nvidia's logo; 13 | * 2) app's name; 14 | * 3) search bar with drop-down calendar; 15 | * 4) alert icon which is an exclamation triangle will be triggered by invalid time bounds in search; 16 | * 5) question mark icon without hyperlink; 17 | * 6) tool icon without hyperlink. 18 | */ 19 | const header = (props) => { 20 | let alertIcon; 21 | 22 | if ( window.location.hash === "#/home" || props.isTimeValid ) { 23 | alertIcon = ( 24 | 29 | ); 30 | } 31 | else { 32 | alertIcon = ( 33 | Invalid time} > 34 | 39 | 40 | ); 41 | } 42 | 43 | return ( 44 | 45 | 46 | {/* left nvidia brand */} 47 | 48 | 49 | Nvidia 50 | 51 | 52 | {/* center METROPOLIS text */} 53 | METROPOLIS 54 | 55 | 56 | 57 | {/* right end question to other websites */} 58 | 63 | {/* alert icon if time query is invalid */} 64 | {alertIcon} 65 | {/* search input group */} 66 | 67 | {props.children} 68 | 69 | 70 | 71 | ); 72 | }; 73 | 74 | export default header; -------------------------------------------------------------------------------- /ui/src/Component/ListPanel/Item/Item.css: -------------------------------------------------------------------------------- 1 | .message { 2 | font-size: 16px; 3 | font-weight: 700px; 4 | padding: 0 10px; 5 | } 6 | 7 | .message > .license { 8 | color: #FF7E00 !important; 9 | } 10 | 11 | .message > .span { 12 | color: #FF7E00 !important; 13 | } 14 | -------------------------------------------------------------------------------- /ui/src/Component/ListPanel/Item/Item.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import Moment from 'moment'; 3 | 4 | import Aux from '../../../Hoc/Auxiliary/Auxiliary'; 5 | import classes from './Item.css'; 6 | 7 | /** 8 | * Details of anomalies/events thumbnail shown in the video window. 9 | */ 10 | const item = (props) => { 11 | let item = []; 12 | if(props.list !== undefined) { 13 | if(window.location.hash === '#/garage'){ 14 | if(props.list['@timestamp'] !== null && props.list['@timestamp'] !== undefined) { 15 | item.push(Moment(props.list['@timestamp']).local().format('YY-MM-DD HH:mm:ss.SSS')); 16 | } 17 | if(props.list.license !== null && props.list.license !== undefined) { 18 | if(props.clickable){ 19 | item.push( |  props.clickPlate(props.list.license)}>{props.list.license}); 20 | } 21 | else { 22 | item.push( | {props.list.license}); 23 | } 24 | } 25 | if(props.list.description !== null && props.list.description !== undefined) { 26 | item.push( | 
{props.list.description}
); 27 | } 28 | } 29 | } 30 | 31 | return ( 32 |
33 | {item} 34 |
35 | ); 36 | }; 37 | 38 | export default item; -------------------------------------------------------------------------------- /ui/src/Component/ListPanel/ListPanel.css: -------------------------------------------------------------------------------- 1 | .alerts { 2 | height: calc(100% - 27px); 3 | overflow: hidden; 4 | margin-top: 55px; 5 | } 6 | 7 | .timestamp { 8 | font-size: 20px; 9 | margin: 0 15px; 10 | } 11 | 12 | .title { 13 | position: absolute; 14 | font-size: 40px; 15 | margin: 0 15px; 16 | left: 0; 17 | top: 0; 18 | } 19 | 20 | .live { 21 | position: absolute; 22 | font-size: 30px; 23 | margin: 10px 15px 0 15px; 24 | right: 0; 25 | top: 0; 26 | } 27 | 28 | .live > a { 29 | color: inherit; 30 | } 31 | 32 | .toggle { 33 | cursor: pointer; 34 | } 35 | 36 | .tip > .tooltip-inner { 37 | background-color: #fefefe !important; 38 | color: inherit; 39 | } 40 | 41 | .list { 42 | height: inherit; 43 | overflow: auto; 44 | } 45 | 46 | .nvidiaList { 47 | list-style: none; 48 | padding: 0; 49 | } 50 | 51 | .nvidiaList a { 52 | text-decoration: underline; 53 | color: inherit; 54 | } 55 | 56 | .nvidiaList li:before { 57 | font-family: 'Glyphicons Halflings'; 58 | content: "\2212"; 59 | vertical-align: middle; 60 | margin-right: 5px; 61 | margin-left: -20px; 62 | } 63 | 64 | .nvidiaList li { 65 | font-size: 16px; 66 | font-weight: bold; 67 | padding: 5px 5px 5px 25px; 68 | } 69 | 70 | @media (max-width: 900px) { 71 | .alerts { 72 | height: calc(100% - 17px); 73 | overflow: hidden; 74 | margin-top: 35px; 75 | } 76 | 77 | .title { 78 | font-size: 28px; 79 | } 80 | 81 | .nvidiaList li { 82 | font-size: 12px; 83 | } 84 | } -------------------------------------------------------------------------------- /ui/src/Component/ListPanel/PanelItem/Item.css: -------------------------------------------------------------------------------- 1 | .red::before { 2 | color: #FF7E00; 3 | } 4 | 5 | .red > .license { 6 | text-decoration: none !important; 7 | color: #FF7E00 !important; 8 | } 9 | 10 | .red > .license:hover { 11 | text-decoration: underline !important; 12 | } 13 | 14 | .green::before { 15 | color: #76b900 16 | } 17 | 18 | .green > .license { 19 | text-decoration: none !important; 20 | color: #76b900 !important; 21 | } 22 | 23 | .green > .license:hover { 24 | text-decoration: underline !important; 25 | } -------------------------------------------------------------------------------- /ui/src/Component/ListPanel/PanelItem/Item.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import Moment from 'moment'; 3 | 4 | import Aux from '../../../Hoc/Auxiliary/Auxiliary'; 5 | import classes from './Item.css'; 6 | 7 | /** 8 | * Details of anomalies/events thumbnail shown in the panel window. 9 | */ 10 | const item = (props) => { 11 | let item = []; 12 | let br = window.innerWidth < 900 ?
: null; 13 | if (props.list['@timestamp'] !== null && props.list['@timestamp'] !== undefined) { 14 | item.push(Moment(props.list['@timestamp']).local().format('YY-MM-DD HH:mm:ss.SSS')); 15 | } 16 | if (props.list.license !== null && props.list.license !== undefined) { 17 | item.push( |  props.clickPlate(props.list.license)}>{props.list.license}); 18 | } 19 | if (props.list.description !== null && props.list.description !== undefined) { 20 | item.push( |{br} {props.list.description}); 21 | } 22 | if (props.list.type !== null && props.list.type !== undefined) { 23 | item.push( |{br} {props.list.type}); 24 | } 25 | if (props.list.entryVideo !== null && props.list.entryVideo !== undefined && props.list.entryVideo !== '') { 26 | item.push( |  props.click(props.list.entryVideo)}>entryVideo); 27 | } 28 | if (props.list.exitVideo !== null && props.list.exitVideo !== undefined && props.list.exitVideo !== '') { 29 | item.push( |  props.click(props.list.exitVideo)}>exitVideo); 30 | } 31 | if (props.list.videoPath !== null && props.list.videoPath !== undefined && props.list.videoPath !== '') { 32 | item.push( |  props.click(props.list.videoPath)}>video); 33 | } 34 | 35 | let li; 36 | if (props.style === 'red') { 37 | li =
  • {item}
  • ; 38 | } 39 | else if (props.style === 'green') { 40 | li =
  • {item}
  • ; 41 | } 42 | 43 | return li; 44 | }; 45 | 46 | export default item; -------------------------------------------------------------------------------- /ui/src/Component/Map/CameraMarker/CameraMarker.js: -------------------------------------------------------------------------------- 1 | import React, { PureComponent } from 'react'; 2 | import { Marker } from 'react-google-maps'; 3 | 4 | import cameraIcon from '../../../assets/Security_Camera-512.png'; 5 | 6 | /** 7 | * The icon represents the link to the video source 8 | * recorded by the entrance cameras. 9 | */ 10 | class CameraMarker extends PureComponent { 11 | render() { 12 | let icon; 13 | 14 | switch (this.props.zoom) { 15 | case 19: 16 | icon = { 17 | url: cameraIcon, 18 | scaledSize: { width: 20, height: 20 } 19 | }; 20 | break; 21 | case 20: 22 | case 21: 23 | icon = { 24 | url: cameraIcon, 25 | scaledSize: { width: 35, height: 35 } 26 | }; 27 | break; 28 | default: 29 | icon = { 30 | url: cameraIcon, 31 | scaledSize: { width: 12, height: 12 } 32 | }; 33 | break; 34 | } 35 | 36 | /* click the camera icon to show the entry/exit video. */ 37 | let camera = ''; 38 | if (this.props.cameras.length !== 0) { 39 | camera = this.props.cameras.map((camera, index) => { 40 | let key = index * Math.random(10).toString(); 41 | return ( 42 | this.props.click(camera.link)} 47 | /> 48 | ); 49 | }); 50 | } 51 | return camera; 52 | } 53 | } 54 | 55 | export default CameraMarker; -------------------------------------------------------------------------------- /ui/src/Component/Map/CarMarkers/CarMarker/CarMarker.css: -------------------------------------------------------------------------------- 1 | .info { 2 | font-size: 16px; 3 | } -------------------------------------------------------------------------------- /ui/src/Component/Map/CarMarkers/CarMarkers.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | 3 | import CarMarker from './CarMarker/CarMarker'; 4 | 5 | const google = window.google; 6 | 7 | /** 8 | * CarMarkers.js manages and updates each car marker, 9 | * including its status and its marker’s style. 10 | * All car markers are located one layer above the ground map. 11 | * 12 | * Use WebSocket to receive and send messages to update the status, 13 | * i.e. parked, empty, and moving of each car marker. 14 | */ 15 | class CarMarkers extends Component { 16 | state = { 17 | cars: {} 18 | } 19 | 20 | componentDidMount() { 21 | /* if there is a websocket url. */ 22 | if (this.props.websocket.url !== '' || this.props.websocket.url !== undefined) { 23 | /* if browser supports web worker. */ 24 | if (window.Worker) { 25 | /* use web worker to finish websocket call and generate data. */ 26 | this.myWorker = new Worker('/MarkersWorker.js'); 27 | 28 | let socketRequest; 29 | socketRequest = JSON.stringify({ 30 | startTimestamp: this.props.websocket.startTimestamp, 31 | garageLevel: this.props.websocket.garageLevel, 32 | garageId: this.props.websocket.garageId 33 | }); 34 | 35 | this.myWorker.postMessage([this.props.websocket.url, socketRequest, window.location.hash]); 36 | this.myWorker.onmessage = (m) => { 37 | this.setState({ cars: m.data }); 38 | } 39 | } 40 | } 41 | } 42 | 43 | shouldComponentUpdate(nextProps, nextState) { 44 | /* only update when zoom/garageLevel/bounds/markers (moving/add/remove) change */ 45 | return this.props.zoom !== nextProps.zoom || this.props.websocket.garageLevel !== nextProps.websocket.garageLevel || !this.props.bounds.equals(nextProps.bounds) || this.state.cars !== nextState.cars; 46 | } 47 | 48 | componentDidUpdate(prevProps, prevState) { 49 | /* if there is a websocket url or zoom changed, and location changed */ 50 | if ((this.props.websocket.url !== '' || this.props.websocket.url !== undefined || this.props.zoom !== prevProps.zoom) && this.props.websocket.garageLevel !== prevProps.websocket.garageLevel) { 51 | this.setState({ 52 | cars: {} 53 | }); 54 | 55 | let socketRequest; 56 | socketRequest = JSON.stringify({ 57 | startTimestamp: this.props.websocket.startTimestamp, 58 | garageLevel: this.props.websocket.garageLevel, 59 | garageId: this.props.websocket.garageId 60 | }); 61 | 62 | this.myWorker.postMessage([this.props.websocket.url, socketRequest, window.location.hash]); 63 | this.myWorker.onmessage = (m) => { 64 | this.setState({ cars: m.data }); 65 | } 66 | } 67 | } 68 | 69 | componentWillUnmount() { 70 | this.myWorker.terminate(); 71 | } 72 | 73 | render() { 74 | 75 | let carmarkers = []; 76 | let isOpen = false; 77 | Object.entries(this.state.cars).forEach(([key, value]) => { 78 | isOpen = value.state === 'moving'; 79 | if (this.props.bounds.contains(new google.maps.LatLng(value.lat, value.lon))) { 80 | carmarkers.push( 81 | 89 | ); 90 | } 91 | }); 92 | return carmarkers; 93 | } 94 | } 95 | 96 | export default CarMarkers; -------------------------------------------------------------------------------- /ui/src/Component/Map/GroundOverlay/GroundOverlay.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { GroundOverlay } from 'react-google-maps'; 3 | 4 | 5 | const groundOverlay = (props) => { 6 | /* the bounds of the garage on the google map */ 7 | return ( 8 | 20 | ); 21 | }; 22 | 23 | export default groundOverlay; -------------------------------------------------------------------------------- /ui/src/Component/Map/LocationMarker/LocationMarker.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import { Marker } from 'react-google-maps'; 3 | 4 | import locationIcon from '../../../assets/mm.png'; 5 | 6 | /** 7 | * The icon represents the location of the garage 8 | * on the homepage of the UI. 9 | */ 10 | class LocationMarker extends Component { 11 | 12 | onClickHandler = (url) => { 13 | window.location.hash = "#/" + url; 14 | } 15 | 16 | render() { 17 | let icon; 18 | 19 | switch (this.props.zoom) { 20 | case 10: 21 | case 11: 22 | case 12: 23 | icon = { 24 | url: locationIcon, 25 | scaledSize: { width: 20, height: 20 } 26 | }; 27 | break; 28 | case 13: 29 | case 14: 30 | icon = { 31 | url: locationIcon, 32 | scaledSize: { width: 40, height: 40 } 33 | }; 34 | break; 35 | case 15: 36 | case 16: 37 | case 17: 38 | case 18: 39 | icon = { 40 | url: locationIcon, 41 | scaledSize: { width: 80, height: 80 } 42 | }; 43 | break; 44 | default: 45 | icon = ''; 46 | break; 47 | } 48 | 49 | let location = this.props.locations.map((location, index) => { 50 | return ( 51 | this.onClickHandler(location[Object.getOwnPropertyNames(location)[0]])} 56 | /> 57 | ); 58 | }); 59 | 60 | return location; 61 | } 62 | } 63 | 64 | export default LocationMarker; -------------------------------------------------------------------------------- /ui/src/Component/Search/Search.css: -------------------------------------------------------------------------------- 1 | .collapse { 2 | margin: 15px 0; 3 | height: 300px; 4 | width: 650px; 5 | } 6 | 7 | .input input, .collapse input { 8 | color: #fefefe; 9 | border: none; 10 | background-color: #76b900; 11 | width: 100% !important; 12 | margin-bottom: 10px; 13 | } 14 | 15 | .collapse input:disabled { 16 | background-color: #76b900; 17 | cursor: auto; 18 | } 19 | 20 | .input1 input { 21 | color: #fefefe; 22 | border: none; 23 | background-color: #76b900; 24 | border-bottom-left-radius: 0px !important; 25 | } 26 | 27 | .timespan { 28 | color: #76b900; 29 | height: 40px; 30 | text-align: center; 31 | padding-top: 5px; 32 | } 33 | 34 | .button { 35 | border: 1px solid #76b900; 36 | background-color: #76b900; 37 | border-top-left-radius: 0px !important; 38 | border-bottom-left-radius: 0px !important; 39 | opacity: 1 !important; 40 | } 41 | 42 | .button1 { 43 | border: 1px solid #76b900; 44 | background-color: #76b900; 45 | border-top-left-radius: 0px !important; 46 | border-bottom-left-radius: 0px !important; 47 | border-bottom-right-radius: 0px !important; 48 | opacity: 1 !important; 49 | } 50 | 51 | .buttonbetween { 52 | border: 1px solid #76b900; 53 | background-color: #76b900; 54 | border-radius: 0px !important; 55 | opacity: 1 !important; 56 | } 57 | 58 | .button:focus, .buttonbetween:focus, .button1:focus { 59 | background-color: #76b900 !important; 60 | } 61 | 62 | .button:hover, .buttonbetween:hover, .button1:hover { 63 | border: 1px solid #76b900 !important; 64 | background-color: #76b900 !important; 65 | opacity: 1 !important; 66 | } 67 | 68 | .glyphicon { 69 | color: rgba(255,255,255,0.6); 70 | } 71 | 72 | @media (max-width: 900px) { 73 | .collapse { 74 | width: 100%; 75 | height: auto; 76 | } 77 | } -------------------------------------------------------------------------------- /ui/src/Component/UI/Button/Button.css: -------------------------------------------------------------------------------- 1 | .default { 2 | color: #343131; 3 | border: 1px solid #76b900; 4 | } 5 | 6 | .default:hover { 7 | background-color: #9dd800; 8 | color: #fefefe; 9 | } 10 | 11 | .primary { 12 | background-color: #76b900; 13 | color: #fefefe; 14 | border: 1px solid #76b900; 15 | } 16 | 17 | .primary:hover { 18 | background-color: #9dd800; 19 | color: #343131; 20 | } 21 | 22 | .danger { 23 | background-color: #fb0a02; 24 | color: #fefefe; 25 | } 26 | 27 | .danger:hover { 28 | background-color: #ff3f38; 29 | } 30 | -------------------------------------------------------------------------------- /ui/src/Component/UI/Button/Button.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Button } from 'react-bootstrap'; 3 | 4 | import classes from './Button.css'; 5 | 6 | 7 | const button = ({bsStyle, bsSize, ...props}) => { 8 | let btnStyle; 9 | switch(bsStyle) { 10 | case 'primary': 11 | btnStyle = classes.primary; 12 | break; 13 | case 'danger': 14 | btnStyle = classes.danger; 15 | break; 16 | case undefined: 17 | case 'default': 18 | default: 19 | btnStyle = classes.default; 20 | break; 21 | } 22 | return ( 23 | 30 | ); 31 | }; 32 | 33 | export default button; -------------------------------------------------------------------------------- /ui/src/Component/UI/Input/Input.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { FormGroup, ControlLabel, FormControl, Checkbox, Col, HelpBlock } from 'react-bootstrap'; 3 | 4 | 5 | const input = ({id, label, change, type, value, help}) => { 6 | let inputCol; 7 | switch(type) { 8 | case 'checkbox': 9 | inputCol = change(id, e.target.checked, e.target.validity.valid, 'input')} />; 10 | break; 11 | case 'number': 12 | inputCol = change(id, e.target.value, e.target.validity.valid, 'input')} />; 13 | break; 14 | default: 15 | inputCol = change(id, e.target.value, e.target.validity.valid, 'input')} />; 16 | } 17 | 18 | return ( 19 | 20 | {label} 21 | 22 | {inputCol} 23 | {help && {help}} 24 | 25 | 26 | ); 27 | }; 28 | 29 | export default input; -------------------------------------------------------------------------------- /ui/src/Component/UI/Loader/Loader.css: -------------------------------------------------------------------------------- 1 | .loader { 2 | border: 16px solid #f3f3f3; 3 | border-radius: 50%; 4 | border-top: 16px solid #76b900; 5 | width: 120px; 6 | height: 120px; 7 | -webkit-animation: spin 2s linear infinite; /* Safari */ 8 | animation: spin 2s linear infinite; 9 | } 10 | 11 | /* Safari */ 12 | @-webkit-keyframes spin { 13 | 0% { -webkit-transform: rotate(0deg); } 14 | 100% { -webkit-transform: rotate(360deg); } 15 | } 16 | 17 | @keyframes spin { 18 | 0% { transform: rotate(0deg); } 19 | 100% { transform: rotate(360deg); } 20 | } -------------------------------------------------------------------------------- /ui/src/Component/UI/Loader/Loader.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import classes from './Loader.css'; 4 | 5 | /** 6 | * Go to Loader.css to customize the loader for the browser. 7 | */ 8 | const loader = (props) => { 9 | return ( 10 |
    11 | 12 |
    13 | ); 14 | }; 15 | 16 | export default loader; -------------------------------------------------------------------------------- /ui/src/Component/Video/StreamVideo/HLS.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | import Hls from 'hls.js'; 3 | 4 | 5 | export default class HLSSource extends Component { 6 | constructor(props, context) { 7 | super(props, context); 8 | this.hls = new Hls(); 9 | } 10 | 11 | componentDidMount() { 12 | /* 13 | * `src` is the property get from this component; 14 | * `video` is the property insert from `Video` component; 15 | * `video` is the html5 video element 16 | */ 17 | const { src, video } = this.props; 18 | /* load hls video source base on hls.js. */ 19 | if (Hls.isSupported()) { 20 | this.hls.loadSource(src); 21 | this.hls.attachMedia(video); 22 | this.hls.on(Hls.Events.MANIFEST_PARSED, () => { 23 | video.play(); 24 | }); 25 | } 26 | } 27 | 28 | componentDidUpdate(prevProps) { 29 | const { src, video } = this.props; 30 | if (prevProps.src !== this.props.src) { 31 | if (Hls.isSupported()) { 32 | this.hls.loadSource(src); 33 | this.hls.attachMedia(video); 34 | this.hls.on(Hls.Events.MANIFEST_PARSED, () => { 35 | video.play(); 36 | }); 37 | } 38 | } 39 | } 40 | 41 | render() { 42 | return ( 43 | 47 | ); 48 | } 49 | 50 | } -------------------------------------------------------------------------------- /ui/src/Component/Video/StreamVideo/StreamPlayer.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Player, BigPlayButton } from 'video-react'; 3 | import HLSSource from './HLS'; 4 | 5 | /** 6 | * Add customized HLSSource component into video-react player. 7 | * The component with `isVideoChild` attribute will be added into `Video` component. 8 | * Please use this url if you test it from local: 9 | * http://www.streambox.fr/playlists/x36xhzz/x36xhzz.m3u8 10 | */ 11 | export default (props) => { 12 | return ( 13 | 14 | 18 | 19 | 20 | ); 21 | }; -------------------------------------------------------------------------------- /ui/src/Component/Video/Video.css: -------------------------------------------------------------------------------- 1 | .close { 2 | font-size: 30px; 3 | } 4 | -------------------------------------------------------------------------------- /ui/src/Component/Video/Video.js: -------------------------------------------------------------------------------- 1 | import React, { Component } from 'react'; 2 | 3 | import classes from './Video.css'; 4 | 5 | import Item from '../ListPanel/Item/Item'; 6 | import StreamVideo from './StreamVideo/StreamPlayer'; 7 | 8 | /** 9 | * Import videos from four different sources, i.e. 10 | * 1) Camera icon on the map; 11 | * 2) Video link in the panel window; 12 | * 3) Pre-recorded video in the playback mode; 13 | * 4) RTSP video stream in the live mode. 14 | */ 15 | class Video extends Component { 16 | state = { 17 | pause: false 18 | } 19 | 20 | onClickHandler = () => { 21 | if (this.state.pause) { 22 | this.videoplayer.pause(); 23 | } 24 | else { 25 | this.videoplayer.play(); 26 | } 27 | this.setState({ pause: !this.state.pause }); 28 | } 29 | 30 | componentDidUpdate() { 31 | if (this.props.seconds !== undefined && this.videoplayer !== undefined) { 32 | this.videoplayer.currentTime = this.props.seconds; 33 | } 34 | } 35 | 36 | render() { 37 | let player, message; 38 | let link; 39 | /* video object comes from alerts/events panel. */ 40 | if (this.props.video !== undefined && typeof (this.props.video) === 'object' && ((this.props.video.entryVideo !== null && this.props.video.entryVideo !== undefined && this.props.video.entryVideo !== '') || (this.props.video.exitVideo !== null && this.props.video.exitVideo !== undefined && this.props.video.exitVideo !== '') || (this.props.video.videoPath !== null && this.props.video.videoPath !== undefined && this.props.video.videoPath !== '') || (this.props.video.clip !== null && this.props.video.clip !== undefined && this.props.video.clip !== ''))) { 41 | link = this.props.video.entryVideo || this.props.video.exitVideo || this.props.video.videoPath || this.props.video.clip; 42 | } 43 | /* video link comes from map(camera) */ 44 | else if (this.props.video !== undefined && typeof (this.props.video) === 'string' && this.props.video !== '' && this.props.video !== null) { 45 | link = this.props.video; 46 | } 47 | else { 48 | link = this.props.defaultLink; 49 | } 50 | 51 | let width = window.innerWidth < 900 ? this.props.width / 2 : this.props.width; 52 | let height = window.innerWidth < 900 ? this.props.height * 0.4 : this.props.height; 53 | 54 | /* if it is pre-recorded video */ 55 | if (link.endsWith('mp4') || link.endsWith('webm') || link.endsWith('ogv')) { 56 | if (link.endsWith('clip5min_smaller.mp4')) { 57 | player = ( 58 |