├── .gitignore ├── LICENSE ├── README.md ├── config.json ├── doc └── images │ ├── social-distance-flow.png │ └── social-distance.png ├── libs ├── __init__.py ├── draw.py ├── geodist.py ├── geometric.py ├── person_trackers.py └── validate.py ├── requirements.txt └── socialdistance.py /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2020, Intel Corporation 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DISCONTINUATION OF PROJECT # 2 | This project will no longer be maintained by Intel. 3 | This project has been identified as having known security escapes. 4 | Intel has ceased development and contributions including, but not limited to, maintenance, bug fixes, new releases, or updates, to this project. 5 | Intel no longer accepts patches to this project. 6 | # Retail Pandemic Reference Implementation - Social Distance 7 | 8 | | Details | | 9 | |-----------------------|---------------| 10 | | Target OS: | Ubuntu\* 18.04 LTS | 11 | | Programming Language: | Python* 3.5 | 12 | | Time to Complete: | 1 hour | 13 | 14 | ![socialdistance](./doc/images/social-distance.png) 15 | 16 | ## What it does 17 | 18 | This reference implementation showcases a retail social distance application that detects people and measures the distance between them. If this distance is less than a value previously provided by the user, then an alert is triggered. 19 | 20 | ## Requirements 21 | 22 | ### Hardware 23 | 24 | * 6th gen or greater Intel® Core™ processors or Intel® Xeon® processor, with 8Gb of RAM 25 | 26 | ### Software 27 | 28 | * [Ubuntu 18.04](http://releases.ubuntu.com/18.04/) 29 | 30 | * [Intel® Distribution of OpenVINO™ toolkit 2020.3 Release](https://software.intel.com/content/www/us/en/develop/tools/openvino-toolkit.html) 31 | 32 | ## How It works 33 | 34 | The application uses the Inference Engine and Model Downloader included in the Intel® Distribution of OpenVINO Toolkit doing the following steps: 35 | 36 | 1. Ingests video from a file, processing it frame by frame. 37 | 2. Detects people in the frame of interest using a DNN model. 38 | 3. Extract features from detected people to track them by using a second DNN model. 39 | 4. Calculates the distance between detected people based on each person’s location, size and perspective, to identify if the minimum social distancing threshold is violated. 40 | 41 | The DNN models are optimized for Intel® architecture and are included with Intel Distribution of OpenVINO™ toolkit. 42 | 43 | ![architecture image](./doc/images/social-distance-flow.png) 44 | 45 | ## Setup 46 | 47 | ### Get the code 48 | 49 | Clone the reference implementation: 50 | 51 | ```bash 52 | sudo apt-get update && sudo apt-get install git 53 | git clone github.com:intel-iot-devkit/social-distance.git 54 | ``` 55 | 56 | ### Install Python requirements 57 | 58 | ```bash 59 | pip3 install -r requirements.txt 60 | ``` 61 | 62 | ### Install Intel® Distribution of OpenVINO™ Toolkit 63 | 64 | Refer to https://software.intel.com/en-us/articles/OpenVINO-Install-Linux for more information about how to install and setup the Intel® Distribution of OpenVINO™ toolkit. 65 | 66 | ### Installing the requirements 67 | 68 | To install the dependencies of the Reference Implementation, run the following commands: 69 | 70 | ```bash 71 | cd 72 | pip3 install -r requirements.txt 73 | ``` 74 | 75 | ### Which model to use 76 | 77 | This application uses the [person-detection-retail-0013](https://docs.openvinotoolkit.org/2020.3/_models_intel_person_detection_retail_0013_description_person_detection_retail_0013.html) and [person-reidentification-retail-0300](https://docs.openvinotoolkit.org/2020.3/_models_intel_person_reidentification_retail_0300_description_person_reidentification_retail_0300.html) Intel® pre-trained models, that can be downloaded using the **model downloader**. The **model downloader** downloads the __.xml__ and __.bin__ files that is used by the application. 78 | 79 | To install the dependencies of the RI and to download the models Intel® model, run the following command: 80 | 81 | ```bash 82 | mkdir models 83 | cd models 84 | python3 /opt/intel/openvino/deployment_tools/open_model_zoo/tools/downloader/downloader.py --name person-detection-retail-0013 --precisions FP32 85 | python3 /opt/intel/openvino/deployment_tools/open_model_zoo/tools/downloader/downloader.py --name person-reidentification-retail-0300 --precisions FP32 86 | ``` 87 | 88 | The models will be downloaded inside the following directories: 89 | 90 | ```bash 91 | - models/intel/person-detection-retail-0013/FP32/ 92 | - models/intel/person-reidentification-retail-0300/FP32/ 93 | ``` 94 | 95 | ### The Config File 96 | 97 | The _config.json_ contains the path to the videos and models that will be used by the application. 98 | 99 | The _config.json_ file is of the form name/value pair. 100 | 101 | Example of the _config.json_ file: 102 | 103 | ```bash 104 | { 105 | "video": "path/to/video/myvideo.mp4", 106 | "pedestrian_model_weights": "models/intel/person-detection-retail-0013/FP32/person-detection-retail-0013.bin", 107 | "pedestrian_model_description": "models/intel/person-detection-retail-0013/FP32/person-detection-retail-0013.xml", 108 | "reidentification_model_weights": "models/intel/person-reidentification-retail-0300/FP32/person-reidentification-retail-0300.bin", 109 | "reidentification_model_description": "models/intel/person-reidentification-retail-0300/FP32/person-reidentification-retail-0300.xml", 110 | "coords": [[0, 0], [0, 100], [100, 100], [100, 0]] 111 | } 112 | ``` 113 | 114 | ### Which Input video to use 115 | 116 | The application works with any input video format supported by [OpenCV](https://opencv.org/). Find sample videos [here](https://github.com/intel-iot-devkit/sample-videos/). 117 | 118 | Sample videos also may be available on platforms such as https://videos.pexels.com (e.g., https://www.pexels.com/video/black-and-white-video-of-people-853889/), subject to the terms and conditions for use of the applicable platform. Intel does not endorse the use of, or grant any rights to, any particular platform, video, or data. 119 | 120 | To use the video, specify the path in config.json file 121 | 122 | ## Setup the environment 123 | 124 | You must configure the environment to use the Intel® Distribution of OpenVINO™ toolkit one time per session by running the following command: 125 | 126 | ```bash 127 | source /opt/intel/openvino/bin/setupvars.sh -pyver 3.5 128 | ``` 129 | 130 | __Note__: This command needs to be executed only once in the terminal where the application will be executed. If the terminal is closed, the command needs to be executed again. 131 | 132 | ## Run the application 133 | 134 | Change the current directory to the project location on your system: 135 | 136 | ```bash 137 | cd /src 138 | ``` 139 | 140 | Run the python script. 141 | 142 | ```bash 143 | python3 socialdistance.py 144 | ``` -------------------------------------------------------------------------------- /config.json: -------------------------------------------------------------------------------- 1 | { 2 | "video": "path/to/video/myvideo.mp4", 3 | "pedestrian_model_weights": "models/intel/person-detection-retail-0013/FP32/person-detection-retail-0013.bin", 4 | "pedestrian_model_description": "models/intel/person-detection-retail-0013/FP32/person-detection-retail-0013.xml", 5 | "reidentification_model_weights": "models/intel/person-reidentification-retail-0300/FP32/person-reidentification-retail-0300.bin", 6 | "reidentification_model_description": "models/intel/person-reidentification-retail-0300/FP32/person-reidentification-retail-0300.xml", 7 | "coords": [[0, 0], [0, 100], [100, 100], [100, 0]] 8 | } 9 | -------------------------------------------------------------------------------- /doc/images/social-distance-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intel-iot-devkit/social-distance/6dfb36a6538e3a7e66ff37ec6ec8b15ad3781cce/doc/images/social-distance-flow.png -------------------------------------------------------------------------------- /doc/images/social-distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intel-iot-devkit/social-distance/6dfb36a6538e3a7e66ff37ec6ec8b15ad3781cce/doc/images/social-distance.png -------------------------------------------------------------------------------- /libs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intel-iot-devkit/social-distance/6dfb36a6538e3a7e66ff37ec6ec8b15ad3781cce/libs/__init__.py -------------------------------------------------------------------------------- /libs/draw.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright (C) 2020 Intel Corporation 3 | 4 | SPDX-License-Identifier: BSD-3-Clause 5 | """ 6 | 7 | import cv2 8 | 9 | # BGR RGB 10 | COLOR = {"yellow": (0, 255, 255), 11 | "white": (255, 255, 255), 12 | "black": (0, 0, 0), 13 | "red": (0, 0, 255), 14 | "green": (0, 128, 0), 15 | "blue": (0, 0, 255), 16 | "grey": (127, 127, 127), 17 | "orange": (0, 128, 255), 18 | "pink": (203, 192, 255), 19 | "magenta": (255, 0, 255), 20 | "green2": (154, 250, 0) 21 | } 22 | 23 | 24 | class Draw: 25 | @staticmethod 26 | def line(frame, coords, color="yellow", thickness=4): 27 | xmin, ymin, xmax, ymax = coords 28 | cv2.line(frame, (xmin, ymin), (xmax, ymax), COLOR[color], thickness) 29 | 30 | @staticmethod 31 | def rectangle(frame, coords, color="yellow", thickness=2): 32 | xmin, ymin, xmax, ymax = coords 33 | cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), COLOR[color], thickness) 34 | 35 | @staticmethod 36 | def circle(frame, center, radius, color, thickness=1): 37 | cv2.circle(frame, center, radius, COLOR[color], thickness, lineType=8, shift=0) 38 | 39 | @staticmethod 40 | def point(frame, center, color): 41 | cv2.circle(frame, center, 4, COLOR[color], -1) 42 | 43 | @staticmethod 44 | def data(frame, data): 45 | for (i, (k, v)) in enumerate(data.items()): 46 | text = "{}: {}".format(k, v) 47 | cv2.putText(frame, text, (10, ((i * 20) + 20)), 48 | cv2.FONT_HERSHEY_SIMPLEX, 0.6, COLOR["red"], 2) 49 | -------------------------------------------------------------------------------- /libs/geodist.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright (C) 2020 Intel Corporation 3 | 4 | SPDX-License-Identifier: BSD-3-Clause 5 | """ 6 | 7 | from shapely.geometry import LineString, Point, Polygon 8 | import math 9 | 10 | def social_distance(frame_shape, a, b, c, d, min_iter=3, min_w=None, max_w=None): 11 | 12 | h, w = frame_shape 13 | A, B ,C ,D = Point(a), Point(b), Point(c), Point(d) 14 | AB = get_line(A, B) 15 | CD = get_line(C, D) 16 | 17 | COEF = 1 18 | 19 | minx = A.x if A.x < C.x else C.x 20 | maxx = B.x if B.x > D.x else D.x 21 | if min_w * 1.8 <= max_w and minx <= w * .1: 22 | return {"euclidean": True, "alert": False, "distance": 0} 23 | 24 | in_border = True if minx < w * .3 or maxx > w - (w * .3) else False 25 | thr = .1 if in_border else .01 26 | if abs(CD.length - AB.length) <= thr or abs(C.y - A.y) <= h*.01: 27 | p = ((CD.length + AB.length / 2) - min_w) / max_w 28 | if p < .3 : 29 | COEF = 1.0 + (1 - p) 30 | 31 | result = euclidean_distance(AB, CD, min_iter, COEF) 32 | return result 33 | 34 | # Calculo pendiente ordenada al origen de la recta BD 35 | bd_a, bd_k = get_line_component(B, D) 36 | ac_a, ac_k = get_line_component(A, C) 37 | 38 | bdinf = -9999999999 if B.x < D.x else 9999999999 39 | BDinf = get_line(D, Point(bdinf, get_y(bdinf, bd_a, bd_k))) 40 | 41 | acinf = -9999999999 if A.x < C.x else 9999999999 42 | ACinf = get_line(C, Point(acinf, get_y(acinf, ac_a, ac_k))) 43 | # PUNTO DE FUGA 44 | PF = BDinf.intersection(ACinf) 45 | euclidean = False 46 | try: 47 | inter = list(PF.coords) 48 | except Exception as e: 49 | euclidean =True 50 | 51 | if euclidean or not list(PF.coords): 52 | p = ((CD.length + AB.length / 2) - min_w) / max_w 53 | if p < .3: 54 | COEF = 1.0 + (1 - p) 55 | 56 | result = euclidean_distance(AB, CD, min_iter, COEF) 57 | return result 58 | 59 | E = Point(get_x(h, ac_a, ac_k), h) 60 | F = Point(get_x(h, bd_a, bd_k), h) 61 | 62 | init_iter = 1 63 | if E.y - C.y < 1: 64 | if bdinf > 0: 65 | EPF = get_line(E, PF) 66 | new_c = cut(EPF, CD.length)[0] 67 | new_c = list(new_c.coords) 68 | new_c = new_c[1] 69 | if A.y < new_c[1]: 70 | init_iter += 1 71 | C = Point(new_c[0], new_c[1]) 72 | 73 | else: 74 | return {"euclidean": False, "alert": False, "iterations": init_iter} 75 | 76 | else: 77 | FPF = get_line(F, PF) 78 | new_d = cut(FPF, CD.length)[0] 79 | new_d = list(new_d.coords) 80 | new_d = new_d[1] 81 | if B.y < new_d[1]: 82 | init_iter += 1 83 | D = Point(new_d[0], new_d[1]) 84 | 85 | else: 86 | return {"euclidean": False, "alert": False, "iterations": init_iter} 87 | 88 | if bdinf > 0: 89 | try: 90 | Z = get_line(F, PF) 91 | except Exception: 92 | print(list(F.coords)) 93 | print(list(PF.coords)) 94 | raise 95 | frac = Z.length / 2 96 | l1, l2 = cut(Z, frac) 97 | if l2.contains(B): 98 | med = Point(list(l2.coords)[0]) 99 | med_b = get_line(med, B).length 100 | med_pf = get_line(med, PF).length 101 | dist = med_b / med_pf 102 | COEF = math.exp(1 + dist) 103 | 104 | else: 105 | Z = get_line(E, PF) 106 | frac = Z.length / 2 107 | 108 | l1, l2 = cut(Z, frac) 109 | if l2.contains(A): 110 | med = Point(list(l2.coords)[0]) 111 | med_a = get_line(med, A).length 112 | med_pf = get_line(med, PF).length 113 | dist = med_a / med_pf 114 | COEF = math.exp(1 + dist) 115 | 116 | 117 | 118 | cnt = get_distance(PF, E, F, C, D, A, B, bdinf, init_iter, COEF) 119 | alert = False if cnt >= min_iter else True 120 | result = {"euclidean": False, "alert": alert, "iterations": cnt} 121 | return result 122 | 123 | 124 | def get_line_component(p1,p2): 125 | run = (p2.x - p1.x) * 1.0 126 | rise = (p2.y - p1.y) * 1.0 127 | try: 128 | a = rise/run 129 | except ZeroDivisionError: 130 | a = rise/0.0000001 131 | 132 | # ordenada al origen k 133 | # k = y - (a * x) 134 | k = p1.y - (a * p1.x) 135 | return a, k 136 | 137 | 138 | def get_line(A, B): 139 | return LineString([A,B]) 140 | 141 | 142 | def get_x(y , a, k): 143 | return (y - k) / a 144 | 145 | 146 | def get_y(x, a, k): 147 | return a * x + k 148 | 149 | 150 | def cut(line, distance): 151 | # Cuts a line in two at a distance from its starting point 152 | if distance <= 0.0 or distance >= line.length: 153 | return [LineString(line)] 154 | coords = list(line.coords) 155 | for i, p in enumerate(coords): 156 | pd = line.project(Point(p)) 157 | if pd == distance: 158 | return [ 159 | LineString(coords[:i+1]), 160 | LineString(coords[i:])] 161 | if pd > distance: 162 | cp = line.interpolate(distance) 163 | return [ 164 | LineString(coords[:i] + [(cp.x, cp.y)]), 165 | LineString([(cp.x, cp.y)] + coords[i:])] 166 | 167 | 168 | def get_distance(PF, E, F, Za, Zb, Zlimit_a, Zlimit_b, inf1_dir, init_iter=1, coef=1): 169 | _inf = inf1_dir 170 | inf = inf1_dir * -1 171 | Za_over_Zlimit = False 172 | PFE = get_line(PF, E) 173 | PFF = get_line(PF, F) 174 | EF = get_line(E, F) 175 | len_EF = EF.length 176 | cnt = init_iter 177 | 178 | while not Za_over_Zlimit: 179 | if inf > 0: 180 | # inf = 9999 181 | # _inf = -9999 182 | 183 | len_ZbF = get_line(Zb, F).length 184 | F_proj = Point(F.x, F.y + len_ZbF) 185 | E_proj = Point(E.x, F.y + len_ZbF + len_EF) 186 | 187 | proj_a, proj_k = get_line_component(E_proj, F_proj) 188 | AUX = Point(get_x(F.y, proj_a, proj_k), F.y) 189 | 190 | aux_a, aux_k = get_line_component(Zb, AUX) 191 | AUXinf = get_line(Point(_inf, get_y(_inf,aux_a, aux_k)), AUX) 192 | Zaux_a = PFE.intersection(AUXinf) 193 | 194 | if Zaux_a.y < Zlimit_a.y: 195 | Za_over_Zlimit = True 196 | else: 197 | cnt +=1 198 | Za = Zaux_a 199 | Zb_aux = get_line(Za ,Point(inf, Za.y)) 200 | Zb = PFF.intersection(Zb_aux) 201 | else: 202 | # inf = - 9999 203 | # _inf = 9999 204 | len_ZaE = get_line(Za, E).length 205 | F_proj = Point(F.x, F.y + len_ZaE + len_EF) 206 | E_proj = Point(E.x, F.y + len_ZaE) 207 | 208 | proj_a, proj_k = get_line_component(E_proj, F_proj) 209 | AUX = Point(get_x(E.y, proj_a, proj_k), E.y) 210 | 211 | aux_a, aux_k = get_line_component(Za, AUX) 212 | AUXinf = get_line(Point(_inf, get_y(_inf, aux_a, aux_k)), AUX) 213 | Zaux_b = PFF.intersection(AUXinf) 214 | 215 | if Zaux_b.y < Zlimit_b.y: 216 | Za_over_Zlimit = True 217 | else: 218 | cnt +=1 219 | Zb = Zaux_b 220 | Za_aux = get_line(Zb ,Point(inf, Zb.y)) 221 | Za = PFE.intersection(Za_aux) 222 | cnt = cnt * coef 223 | return cnt 224 | 225 | 226 | def get_crop(a, b): 227 | axmin, aymin, axmax, aymax = a 228 | bxmin, bymin, bxmax, bymax = b 229 | 230 | cxmin = axmin if axmin < bxmin else bxmin 231 | cymin = aymin if aymin < bymin else bymin 232 | cxmax = axmax if axmax > bxmax else bxmax 233 | cymax = aymax if aymax > bymax else bymax 234 | 235 | return cxmin, cymin, cxmax, cymax 236 | 237 | 238 | def euclidean_distance(AB, CD, min_iter, coef): 239 | Z1 = AB.centroid 240 | Z2 = CD.centroid 241 | min_dist = (AB.length * min_iter) * .8 242 | distance = get_line(Z1, Z2).length * coef 243 | if distance < AB.length: 244 | return {"euclidean": True, "alert": False, "distance": distance} 245 | 246 | alert = True if distance < min_dist else False 247 | return {"euclidean": True, "alert": alert, "distance": distance} 248 | 249 | 250 | -------------------------------------------------------------------------------- /libs/geometric.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright (C) 2020 Intel Corporation 3 | 4 | SPDX-License-Identifier: BSD-3-Clause 5 | """ 6 | 7 | from shapely.geometry import LineString, Point, Polygon 8 | from collections import deque 9 | 10 | 11 | def get_polygon(point_list): 12 | return Polygon(point_list) 13 | 14 | 15 | def get_line(data): 16 | return LineString(data) 17 | 18 | 19 | def get_point(data): 20 | return Point(data) 21 | 22 | 23 | def get_distance(l, p): 24 | return p.distance(l) 25 | 26 | 27 | def get_x(y , a, k): 28 | return (y - k) / a 29 | 30 | 31 | def get_y(x, a, k): 32 | return a * x + k 33 | 34 | 35 | -------------------------------------------------------------------------------- /libs/person_trackers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright (C) 2020 Intel Corporation 3 | 4 | SPDX-License-Identifier: BSD-3-Clause 5 | """ 6 | 7 | from sklearn.metrics.pairwise import cosine_similarity 8 | from collections import deque, OrderedDict 9 | 10 | 11 | class TrackableObject: 12 | def __init__(self, bbox, reid, centroid): 13 | self.bbox = bbox 14 | self.reid = reid 15 | self.centroids = [] 16 | self.centroids.append(centroid) 17 | self.updated = False 18 | 19 | 20 | class PersonTrackers(object): 21 | def __init__(self, trackers): 22 | self.trackers = trackers 23 | self.dissapeared = OrderedDict() 24 | self.trackId_generator = 0 25 | self.similarity_threshold = 0.7 26 | self.max_disappeared = 10 27 | 28 | def similarity(self, trackers): 29 | sim = deque() 30 | if len(self.trackers) > 0: 31 | trackers_number = len(trackers) 32 | track_copy = self.trackers.items() 33 | if trackers_number == 0: 34 | trackers_copy = self.trackers.copy() 35 | for trackerId, data in trackers_copy.items(): 36 | self.dissapeared[trackerId] += 1 37 | if self.dissapeared[trackerId] > self.max_disappeared: 38 | del self.trackers[trackerId] 39 | del self.dissapeared[trackerId] 40 | 41 | else: 42 | for tracker in trackers: 43 | for trackerId, data in track_copy: 44 | try: 45 | cosine = cosine_similarity(tracker.reid, data.reid) 46 | except ValueError as e: 47 | print(e) 48 | continue 49 | if cosine > self.similarity_threshold: 50 | sim.append([trackerId, cosine[0][0]]) 51 | if sim: 52 | max_similarity = self.get_max_similarity(sim) 53 | if max_similarity is None: 54 | continue 55 | self.trackers[max_similarity].reid = tracker.reid 56 | self.trackers[max_similarity].bbox = tracker.bbox 57 | self.trackers[max_similarity].centroids.append(tracker.centroids[0]) 58 | self.dissapeared[max_similarity] = 0 59 | self.trackers[max_similarity].updated = True 60 | else: 61 | self.trackers.update({self.trackId_generator: tracker}) 62 | self.trackers[self.trackId_generator].updated = True 63 | self.dissapeared.update({self.trackId_generator: 0}) 64 | self.trackId_generator += 1 65 | 66 | if trackers_number <= len(track_copy): 67 | trackers_copy = self.trackers.copy() 68 | for trackerId, data in trackers_copy.items(): 69 | if not data.updated: 70 | self.dissapeared[trackerId] += 1 71 | if self.dissapeared[trackerId] > self.max_disappeared: 72 | del self.trackers[trackerId] 73 | del self.dissapeared[trackerId] 74 | continue 75 | self.trackers[trackerId].updated = False 76 | else: 77 | self.register(trackers) 78 | 79 | def register(self, trackers): 80 | for tracker in trackers: 81 | self.trackers.update({self.trackId_generator: tracker}) 82 | self.dissapeared.update({self.trackId_generator: 0}) 83 | self.trackId_generator += 1 84 | 85 | def get_max_similarity(self, simil_list): 86 | def take_second(cosine): 87 | return cosine[1] 88 | simil = sorted(simil_list, key=take_second, reverse=True) 89 | for sim in simil: 90 | if not self.trackers[sim[0]].updated: 91 | return sim[0] 92 | return None 93 | 94 | def clear(self): 95 | self.trackers.clear() 96 | self.trackId_generator = 0 97 | 98 | -------------------------------------------------------------------------------- /libs/validate.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright (C) 2020 Intel Corporation 3 | 4 | SPDX-License-Identifier: BSD-3-Clause 5 | """ 6 | 7 | from jsonschema import validate as json_validate 8 | 9 | schema = {"type": "object", 10 | "required": [ 11 | "coords", 12 | "video", 13 | "pedestrian_model_weights", 14 | "pedestrian_model_description", 15 | "reidentification_model_weights", 16 | "reidentification_model_description" 17 | ], 18 | "additionalProperties": False, 19 | "properties": { 20 | "coords": { 21 | "type": "array", 22 | "items": { 23 | "type": "array", 24 | "items": { 25 | "type": "number" 26 | 27 | }, 28 | "minItems": 2, 29 | "maxItems": 2 30 | }, 31 | "minItems": 4, 32 | "maxItems": 4 33 | }, 34 | "video": {"type": "string", "maxLength": 150}, 35 | "pedestrian_model_weights": {"type": "string", "maxLength": 250}, 36 | "pedestrian_model_description": {"type": "string", "maxLength": 250}, 37 | "reidentification_model_weights": {"type": "string", "maxLength": 250}, 38 | "reidentification_model_description": {"type": "string", "maxLength": 250} 39 | } 40 | } 41 | 42 | 43 | def validate(datadict): 44 | return json_validate(instance=datadict, schema=schema) -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | jsonschema==3.2.0 2 | Shapely==1.7.0 3 | scikit-learn==0.23.1 4 | numpy==1.18.2 5 | -------------------------------------------------------------------------------- /socialdistance.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright (C) 2020 Intel Corporation 3 | 4 | SPDX-License-Identifier: BSD-3-Clause 5 | """ 6 | 7 | import json 8 | from collections import OrderedDict 9 | from itertools import combinations 10 | 11 | import cv2 12 | import os 13 | from libs.draw import Draw 14 | from libs.geodist import social_distance, get_crop 15 | from libs.geometric import get_polygon, get_point, get_line 16 | from libs.person_trackers import PersonTrackers, TrackableObject 17 | from libs.validate import validate 18 | from openvino.inference_engine import IENetwork, IECore 19 | 20 | 21 | class SocialDistance(object): 22 | def __init__(self): 23 | config_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.json") 24 | with open(config_file_path) as f: 25 | cfg = json.load(f) 26 | validate(cfg) 27 | self.running = True 28 | self.videosource = cfg.get("video") 29 | self.model_modelfile = cfg.get("pedestrian_model_weights") 30 | self.model_configfile = cfg.get("pedestrian_model_description") 31 | self.model_modelfile_reid = cfg.get("reidentification_model_weights") 32 | self.model_configfile_reid = cfg.get("reidentification_model_description") 33 | self.coords = cfg.get("coords") 34 | # OPENVINO VARS 35 | self.ov_input_blob = None 36 | self.out_blob = None 37 | self.net = None 38 | self.ov_n = None 39 | self.ov_c = None 40 | self.ov_h = None 41 | self.ov_w = None 42 | self.ov_input_blob_reid = None 43 | self.out_blob_reid = None 44 | self.net_reid = None 45 | self.ov_n_reid = None 46 | self.ov_c_reid = None 47 | self.ov_h_reid = None 48 | self.ov_w_reid = None 49 | # PROCESSOR VARS 50 | self.confidence_threshold = .85 51 | self.iterations = 4 # ~ 5 feets 52 | self.trackers = [] 53 | self.max_disappeared = 90 54 | self.polygon = None 55 | self.trackers = PersonTrackers(OrderedDict()) 56 | self.min_w = 99999 57 | self.max_w = 1 58 | 59 | def load_openvino(self): 60 | try: 61 | ie = IECore() 62 | net = ie.read_network(model=self.model_configfile, weights=self.model_modelfile) 63 | self.ov_input_blob = next(iter(net.inputs)) 64 | self.out_blob = next(iter(net.outputs)) 65 | self.net = ie.load_network(network=net, num_requests=2, device_name="CPU") 66 | # Read and pre-process input image 67 | self.ov_n, self.ov_c, self.ov_h, self.ov_w = net.inputs[self.ov_input_blob].shape 68 | del net 69 | except Exception as e: 70 | raise Exception(f"Load Openvino error:{e}") 71 | self.load_openvino_reid() 72 | 73 | def load_openvino_reid(self): 74 | try: 75 | ie = IECore() 76 | net = ie.read_network(model=self.model_configfile_reid, weights=self.model_modelfile_reid) 77 | self.ov_input_blob_reid = next(iter(net.inputs)) 78 | self.out_blob_reid = next(iter(net.outputs)) 79 | self.net_reid = ie.load_network(network=net, num_requests=2, device_name="CPU") 80 | # Read and pre-process input image 81 | self.ov_n_reid, self.ov_c_reid, self.ov_h_reid, self.ov_w_reid = net.inputs[self.ov_input_blob_reid].shape 82 | del net 83 | except Exception as e: 84 | raise Exception(f"Load Openvino reidentification error:{e}") 85 | 86 | def config_env(self, frame): 87 | h, w = frame.shape[:2] 88 | self.trackers.clear() 89 | 90 | polylist = [] 91 | 92 | for pair in self.coords: 93 | polylist.append([int(pair[0] * w / 100), int(pair[1] * h / 100)]) 94 | 95 | self.polygon = get_polygon(polylist) 96 | 97 | def get_frame(self): 98 | h = w = None 99 | try: 100 | cap = cv2.VideoCapture(self.videosource) 101 | except Exception as e: 102 | raise Exception(f"Video source error: {e}") 103 | 104 | while self.running: 105 | has_frame, frame = cap.read() 106 | if has_frame: 107 | if frame.shape[1] > 2000: 108 | frame = cv2.resize(frame, (int(frame.shape[1] * .3), int(frame.shape[0] * .3))) 109 | 110 | elif frame.shape[1] > 1000: 111 | frame = cv2.resize(frame, (int(frame.shape[1] * .8), int(frame.shape[0] * .8))) 112 | 113 | if w is None or h is None: 114 | h, w = frame.shape[:2] 115 | print(frame.shape) 116 | self.config_env(frame) 117 | 118 | yield frame 119 | else: 120 | self.running = False 121 | return None 122 | 123 | def process_frame(self, frame): 124 | _frame = frame.copy() 125 | trackers = [] 126 | 127 | frame = cv2.resize(frame, (self.ov_w, self.ov_h)) 128 | frame = frame.transpose((2, 0, 1)) 129 | frame = frame.reshape((self.ov_n, self.ov_c, self.ov_h, self.ov_w)) 130 | 131 | self.net.start_async(request_id=0, inputs={self.ov_input_blob: frame}) 132 | 133 | if self.net.requests[0].wait(-1) == 0: 134 | res = self.net.requests[0].outputs[self.out_blob] 135 | 136 | frame = _frame 137 | h, w = frame.shape[:2] 138 | out = res[0][0] 139 | for i, detection in enumerate(out): 140 | 141 | confidence = detection[2] 142 | if confidence > self.confidence_threshold and int(detection[1]) == 1: # 1 => CLASS Person 143 | 144 | xmin = int(detection[3] * w) 145 | ymin = int(detection[4] * h) 146 | xmax = int(detection[5] * w) 147 | ymax = int(detection[6] * h) 148 | 149 | if get_line([[xmin, ymax], [xmax, ymax]]).length < self.min_w: 150 | self.min_w = get_line([[xmin, ymax], [xmax, ymax]]).length 151 | elif get_line([[xmin, ymax], [xmax, ymax]]).length > self.max_w: 152 | self.max_w = get_line([[xmin, ymax], [xmax, ymax]]).length 153 | 154 | cX = int((xmin + xmax) / 2.0) 155 | cY = int(ymax) 156 | point = get_point([cX, cY]) 157 | if not self.polygon.contains(point): 158 | continue 159 | 160 | trackers.append( 161 | TrackableObject((xmin, ymin, xmax, ymax), None, (cX, cY)) 162 | ) 163 | Draw.rectangle(frame, (xmin, ymin, xmax, ymax), "green", 2) 164 | 165 | for tracker in trackers: 166 | person = frame[tracker.bbox[1]:tracker.bbox[3], tracker.bbox[0]:tracker.bbox[2]] 167 | 168 | try: 169 | person = cv2.resize(person, (self.ov_w_reid, self.ov_h_reid)) 170 | except cv2.error as e: 171 | print(f"CV2 RESIZE ERROR: {e}") 172 | continue 173 | 174 | person = person.transpose((2, 0, 1)) # Change data layout from HWC to CHW 175 | person = person.reshape((self.ov_n_reid, self.ov_c_reid, self.ov_h_reid, self.ov_w_reid)) 176 | 177 | self.net_reid.start_async(request_id=0, inputs={self.ov_input_blob: person}) 178 | 179 | if self.net_reid.requests[0].wait(-1) == 0: 180 | res = self.net_reid.requests[0].outputs[self.out_blob_reid] 181 | tracker.reid = res 182 | 183 | self.trackers.similarity(trackers) 184 | if len(self.trackers.trackers) > 0: 185 | track_tuples = list(combinations(self.trackers.trackers.keys(), 2)) 186 | for trackup in track_tuples: 187 | l1 = self.trackers.trackers[trackup[0]].bbox 188 | l2 = self.trackers.trackers[trackup[1]].bbox 189 | 190 | if l1[3] < l2[3]: 191 | a = (l1[0], l1[3]) 192 | b = (l1[2], l1[3]) 193 | c = (l2[0], l2[3]) 194 | d = (l2[2], l2[3]) 195 | else: 196 | c = (l1[0], l1[3]) 197 | d = (l1[2], l1[3]) 198 | a = (l2[0], l2[3]) 199 | b = (l2[2], l2[3]) 200 | 201 | h, w = frame.shape[:2] 202 | result = social_distance((h, w), a, b, c, d, self.iterations, self.min_w, self.max_w) 203 | if result["alert"]: 204 | xmin, ymin, xmax, ymax = get_crop(l1, l2) 205 | Draw.rectangle(frame, l1, "yellow", 2) 206 | Draw.rectangle(frame, l2, "yellow", 2) 207 | Draw.rectangle(frame, (xmin, ymin, xmax, ymax), "red", 3) 208 | return frame 209 | 210 | def render(self, frame): 211 | cv2.namedWindow("output", cv2.WINDOW_NORMAL) 212 | frame = cv2.resize(frame, (960, 540)) 213 | cv2.imshow("Frame", frame) 214 | key = cv2.waitKey(1) & 0xFF 215 | if key == ord("q"): 216 | exit() 217 | 218 | def run(self): 219 | self.load_openvino() 220 | for frame in self.get_frame(): 221 | frame = self.process_frame(frame) 222 | self.render(frame) 223 | 224 | 225 | if __name__ == '__main__': 226 | try: 227 | sd = SocialDistance() 228 | sd.run() 229 | except Exception as exception: 230 | print(exception) 231 | --------------------------------------------------------------------------------