├── .gitignore
├── .idea
├── .gitignore
├── inspectionProfiles
│ └── profiles_settings.xml
├── misc.xml
├── modules.xml
├── pipelineIntegration.iml
└── vcs.xml
├── .vscode
└── settings.json
├── MatlabProcessor.py
├── MatlabProcessor.pyc
├── README.md
├── SmartTrafficApp.py
├── Tests
├── Image Cleaning
│ ├── slow_traffic.PNG
│ └── traffic_test.m
├── Object Detection
│ ├── Lines.jpg
│ ├── bacteria.png
│ ├── busy_traffic.PNG
│ ├── objectDetection.m
│ ├── parkingLot.jpg
│ ├── parkingLot1.jpg
│ ├── parkingLot2.jpg
│ └── rice.jpg
└── videoRendering
│ ├── Traffic.m
│ ├── TrafficTest.mp4
│ ├── TrafficTest2.mp4
│ └── traffic-test.mp4
├── __pycache__
└── SmartTrafficApp.cpython-38.pyc
├── static
├── jsfiles
│ ├── histogram.js
│ └── index.js
├── pictures
│ ├── arrow-down.png
│ ├── car.png
│ ├── cars.png
│ ├── github.png
│ ├── index-background.jpg
│ ├── patrik.JPG
│ ├── sarvath.jpg
│ ├── sathira.jpg
│ ├── smart-traffic.jpg
│ ├── traffic-light.png
│ └── traffic.jpg
├── styles
│ ├── aboutus.css
│ ├── index.css
│ └── layout.css
├── uploads
│ └── traffic-test.mp4
└── videos
│ └── finalVideo.mp4
├── templates
├── aboutus.html
├── index.html
└── layout.html
└── vehicleDetection
├── GettingStarted.html
├── motionTracking.m
├── motionTracking.prj
├── motionTracking
├── __init__.py
└── motionTracking.ctf
├── setup.py
└── v98
├── GettingStarted.html
├── motionTracking.prj
├── motionTracking
├── __init__.py
└── motionTracking.ctf
└── setup.py
/.gitignore:
--------------------------------------------------------------------------------
1 | venv/
2 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Datasource local storage ignored files
5 | /../../../../../:\GitHub\Smart-Traffic-Control-System\pipelineIntegration\.idea/dataSources/
6 | /dataSources.local.xml
7 | # Editor-based HTTP Client requests
8 | /httpRequests/
9 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/pipelineIntegration.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.pythonPath": "/usr/bin/python"
3 | }
--------------------------------------------------------------------------------
/MatlabProcessor.py:
--------------------------------------------------------------------------------
1 | import os
2 | import motionTracking
3 | import matlab
4 | import shutil
5 |
6 |
7 | def run_matlab():
8 | # Get info on current directory and check files
9 | # Then move to the directory where the MATLAB script is
10 | currDir = os.getcwd()
11 | print("Current directory " + currDir)
12 | os.chdir("./vehicleDetection")
13 | # In the linux server
14 | # os.chdir('/root/Smart-Traffic-Control-System/vehicleDetection')
15 | projectDir = os.getcwd()
16 | print("Moved to " + projectDir)
17 | arr = os.listdir(projectDir)
18 | print("Initial files in directory " + str(arr))
19 |
20 | # Start the MATLAB engine and run the motionTracking script
21 | # After running the script show the new files in the directory
22 | # And shut down the MATLAB process to save hassle
23 | eng = motionTracking.initialize()
24 | try:
25 | eng.motionTracking(nargout=0)
26 | except:
27 | os.chdir("./../")
28 | return False
29 | arr2 = os.listdir(projectDir)
30 | print("Final files in directory " + str(arr2))
31 | eng.terminate()
32 | os.chdir("./../")
33 | # if path.exists('./static/videos/finalVideo.avi'):
34 | # os.remove('static/videos/finalVideo.avi')
35 | # shutil.move('vehicleDetection/finalVideo.avi', 'static/videos/')
36 | # else:
37 | # shutil.move('vehicleDetection/finalVideo.avi', 'static/videos')
38 | return True
--------------------------------------------------------------------------------
/MatlabProcessor.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/MatlabProcessor.pyc
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Smart-Traffic-Control-System
2 |
3 | Created By: Patrik Beqo, Sathira Katugaha and Sarvath Sharma
4 |
5 | Link to Web App: https://traffic.xor.dev/
6 |
7 | # Purpose:
8 | We are on the verge of creating a Smart Traffic Control System. By using Image Processing Tools from Matlab we aim to take an input, which will be a video of an intersection where we will interpret the density of the traffic flow throughout certain points in time. This data will then be used to determine the duration of traffic signals and will show a histogram of the traffic flow. The histogram will show the number of cars and from which direction the density is the largest (TBD).
9 |
10 | # Process:
11 | 1. Grab videos of traffic intersections
12 | 2. Analyze the density and patterns of the environment using Matlab
13 | 1. Need to clean image and isolate vehicles on the road using foreground detection
14 | 2. Once cleaned, use Blob Analysis/Kalman Filter to detect and track vehicles
15 | 3. Using a Queue ADT, store the values of the traffic density into a CSV file
16 | 4. Using Python run the MATLAB script and display the graph of the traffic flow on the website
17 | 5. Using a server and host run the entire program instead of locally running it
18 |
19 |
20 | # Video Format:
21 | We will be using .mp4 file format with 60fps video quality.
22 |
23 | # Extra Info:
24 | To watch the actual MATLAB script run make sure you have MATLAB installed and download the GitHub repo. In the motionTracking.m file edit the path of where the video is. The line to change is line 138:
25 |
26 | file = fullfile('..', 'static', 'uploads', 'traffic-test.mp4');
27 |
28 | Change it such that the path is valid (currently is '../static/uploads/traffic-test.mp4')
29 |
--------------------------------------------------------------------------------
/SmartTrafficApp.py:
--------------------------------------------------------------------------------
1 | import csv
2 | import os
3 | from MatlabProcessor import run_matlab
4 | import shutil
5 | from os import path
6 | from os.path import join
7 | from flask import Flask, render_template, flash, request, redirect, url_for
8 | from werkzeug.utils import secure_filename
9 | import redis
10 | from rq import Queue
11 |
12 | ALLOWED_EXTENSIONS = {'mp4', 'MP4'}
13 |
14 | app = Flask(__name__)
15 | app.secret_key = 'oursecretkey'
16 |
17 | r = redis.Redis()
18 | q = Queue(connection=r)
19 |
20 | APP_ROOT = path.dirname(path.abspath(__file__))
21 | UPLOAD_FOLDER = join(APP_ROOT, 'static', 'uploads')
22 | app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
23 |
24 | graphData = None
25 | error = False
26 | extensionError = False
27 |
28 | def allowed_file(filename):
29 | extension = '.' in filename and filename.rsplit('.', 1)[1].lower()
30 | return '.' in filename and extension in ALLOWED_EXTENSIONS and extension
31 |
32 |
33 | def get_data(response):
34 | global graphData
35 | global error
36 | global extensionError
37 | # In Linux server
38 | # if response and path.exists('/root/Smart-Traffic-Control-System/vehicleDetection/finalData.csv'):
39 | if response and path.exists('./vehicleDetection/finalData.csv'):
40 | # In linux server
41 | # with open('/root/Smart-Traffic-Control-System/vehicleDetection/finalData.csv', mode='r') as csv_file:
42 | with open('./vehicleDetection/finalData.csv', mode='r') as csv_file:
43 | # Grab Data
44 | data = list(csv.reader(csv_file))[0]
45 | numPlots = len(data)
46 | timeIntervals = []
47 | numCars = []
48 | for i in range(1, numPlots+1):
49 | timeIntervals.append(i * 10)
50 | for element in data:
51 | numCars.append(int(element))
52 | graphData = [timeIntervals, numCars]
53 | error = False
54 | extensionError = False
55 | else:
56 | graphData = None
57 | error = True
58 | extensionError = False
59 |
60 |
61 | @app.route('/', methods=['GET', 'POST'])
62 | @app.route('/home', methods=['GET', 'POST'])
63 | def home():
64 | global extensionError
65 | global error
66 | global graphData
67 | if request.method == 'POST':
68 | print(request)
69 | print(request.files)
70 | if 'file' not in request.files:
71 | print('no file part')
72 | flash('No file part')
73 | return redirect(url_for('home'))
74 | file = request.files['file']
75 | if file.filename == '':
76 | print('no selected file')
77 | flash('No selected file')
78 | return redirect(url_for('home'))
79 | allowedExtension = allowed_file(file.filename)
80 | print("allowed extension " + allowedExtension if allowedExtension is not False else "")
81 | if file and allowedExtension is not False:
82 | print('creating file')
83 | savePath = join(UPLOAD_FOLDER, "traffic-test." + allowedExtension)
84 | file.save(savePath)
85 | job = q.enqueue(run_matlab, "")
86 | res = job.result
87 | get_data(res)
88 | else:
89 | extensionError = True
90 | error = False
91 | graphData = None
92 | return redirect(url_for('home'))
93 |
94 | # Opening csv file
95 | if request.method == 'GET':
96 | return render_template('index.html', data=graphData, error=error, extensionError=extensionError)
97 |
98 |
99 | @app.route('/aboutus')
100 | def aboutus():
101 | return render_template('aboutus.html')
102 |
103 |
104 | if __name__ == '__main__':
105 | app.run(debug=True)
106 | # On linux server
107 | # app.run(debug=True, host='0.0.0.0', port=5000, threaded=True)
--------------------------------------------------------------------------------
/Tests/Image Cleaning/slow_traffic.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Image Cleaning/slow_traffic.PNG
--------------------------------------------------------------------------------
/Tests/Image Cleaning/traffic_test.m:
--------------------------------------------------------------------------------
1 | function [new_image] = traffic_test()
2 |
3 | a = imread('slow_traffic.PNG');
4 | a = rgb2gray(a);
5 | a = imbinarize(a);
6 | b = imread('busy_traffic.PNG');
7 | b = rgb2gray(b);
8 | b = imbinarize(b);
9 |
10 | x = size(a);
11 |
12 | for i=1:x(1)
13 | for j=1:x(2)
14 | if a(i,j) ~= b(i,j)
15 | a(i,j) = b(i,j);
16 | else
17 | a(i,j) = 0;
18 | end
19 | end
20 | end
21 |
22 | sedisk = strel('disk',2);
23 | better_a = imopen(a,sedisk);
24 | imshow(better_a);
25 | end
26 |
--------------------------------------------------------------------------------
/Tests/Object Detection/Lines.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/Lines.jpg
--------------------------------------------------------------------------------
/Tests/Object Detection/bacteria.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/bacteria.png
--------------------------------------------------------------------------------
/Tests/Object Detection/busy_traffic.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/busy_traffic.PNG
--------------------------------------------------------------------------------
/Tests/Object Detection/objectDetection.m:
--------------------------------------------------------------------------------
1 | clc; % Clear command window.
2 | clear; % Delete all variables.
3 | % Get the original image from the directory
4 | original = imread('./Lines.jpg');
5 | imshow(original);
6 |
7 | % Convert it to grayscale (easier to process than colours)
8 | grayScaled = rgb2gray(original);
9 | imshow(grayScaled);
10 |
11 | % Turn it into a binary image
12 | binaryImage = imbinarize(grayScaled, ...
13 | 'adaptive','ForegroundPolarity','dark');
14 | binaryImage = ~binaryImage;
15 | binaryImage = bwareaopen(binaryImage, 100);
16 | sedisk = strel('disk', 2);
17 | binaryImage = imclose(binaryImage, sedisk);
18 | % binaryImage = imfill(binaryImage, 'holes');
19 | % binaryImage = imclearborder(binaryImage);
20 | imshow(binaryImage);
21 |
22 | % Now we can make a boundary encasing the image and add a text to show the
23 | % number of objects detected
24 | % Hold on is used to retain the current plot while new ones are being
25 | % generated
26 | [B,L,N,A] = bwboundaries(binaryImage);
27 | figure;
28 | imshow(original)
29 | text(10,10,strcat('\color{green}Objects Found:',num2str(length(B))))
30 | hold on;
31 | % Loop through object boundaries
32 | for k = 1:length(B)
33 | boundary = B{k};
34 | plot(boundary(:,2), boundary(:,1), 'g', 'LineWidth', 0.2)
35 | end
--------------------------------------------------------------------------------
/Tests/Object Detection/parkingLot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/parkingLot.jpg
--------------------------------------------------------------------------------
/Tests/Object Detection/parkingLot1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/parkingLot1.jpg
--------------------------------------------------------------------------------
/Tests/Object Detection/parkingLot2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/parkingLot2.jpg
--------------------------------------------------------------------------------
/Tests/Object Detection/rice.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/Object Detection/rice.jpg
--------------------------------------------------------------------------------
/Tests/videoRendering/Traffic.m:
--------------------------------------------------------------------------------
1 | clc; % Clear command window.
2 | clear; % Delete all variables.
3 |
4 | trafficVid = VideoReader('TrafficTest2.mp4'); % Reading in video
5 | nframes = trafficVid.NumFrames; % Calculating number of frames
6 |
7 | % Train model using the first 150 frames
8 | foregroundDetector = vision.ForegroundDetector('NumGaussians', 3, ...
9 | 'NumTrainingFrames', 150);
10 |
11 | % Call to calibrating function
12 | calibrating(trafficVid, foregroundDetector.NumTrainingFrames,...
13 | foregroundDetector);
14 |
15 | % make temp directory to store video, not sure if this is saved after running,
16 | % if you can't find the folder I can try saving it somewhere solid after the video is made
17 | % vidDir = videoOutput;
18 | % mkdir(vidDir)
19 | % make sub folder for video frames
20 | %mkdir(vidDir,'images')
21 |
22 | %Stack Implementation to count cars
23 | old_frame = 0;
24 | total_cars = 0;
25 |
26 | for k = foregroundDetector.NumTrainingFrames : nframes
27 |
28 | % Read frame and get data
29 | % Using the size of the image screen, display a border line on the
30 | % center of the image
31 | singleFrame = readFrame(trafficVid);
32 | [y, x, z] = size(singleFrame);
33 | grid on
34 | x1 = x/4; y1 = y/4; x2 = (3*x)/4; y2 = (3*y)/4;
35 | singleFrame = insertShape(singleFrame, 'Line', [x1 y1 x2 y2], ...
36 | 'LineWidth', 2, 'Color', 'black');
37 |
38 | % Inital image filtering
39 | foreground = step(foregroundDetector, singleFrame);
40 |
41 | % Convert to grayscale to do morphological processing
42 | newImgs = imageEnhancement(foreground);
43 |
44 | % Detect car using blob analysis and displays new image, returns new
45 | % total number of cars data in an array
46 | new_data = vehicleDetection(newImgs, singleFrame, total_cars, ...
47 | old_frame);
48 |
49 | %Updating data
50 | total_cars = new_data(1);
51 | old_frame = new_data(2);
52 |
53 |
54 | % name images from img001.jpg to imgN.jpg
55 | % filename = [sprintf('03%',k) '.jpg'];
56 | % fullname = fullfile(vidDir.'images',filename);
57 |
58 | % name and write the file properly
59 | % img = detectedVehicles;
60 | % imwrite(img,fullname);
61 |
62 | end
63 |
64 | % get all images written
65 | % imageNames = dir(fullfile(vidDir,'images','*.jpg'));
66 | % imageNames = {imageNames.name}';
67 |
68 | % convert to video
69 | % outputVideo = VideoWriter(fullfile(vidDir, 'traffic_out.mp4'));
70 | % outputVideo.FrameRate = trafficVid.FrameRate;
71 |
72 | function calibrating(video, trnframes, model)
73 |
74 | for i=1 : trnframes
75 |
76 | % Read Frame
77 | singleFrame = readFrame(video);
78 |
79 | % Train model
80 | step(model, singleFrame);
81 |
82 | %Insert Text
83 | position = [10,10];
84 | box_color = 'black';
85 | newIMG = insertText(singleFrame,position,'Calibrating...',...
86 | 'FontSize',18,'BoxColor', box_color,'TextColor','white');
87 |
88 | %Output video with calibrating text in top left corner
89 | imshow(newIMG);
90 |
91 | end
92 | end
93 |
94 | function img = imageEnhancement(input)
95 |
96 | %The below is commented since i've added the Vision library for inital
97 | %filtering
98 |
99 | % Generate binary image
100 | %img = rgb2gray(input);
101 | %binaryImage = imbinarize(img, ...
102 | % 'adaptive', 'ForegroundPolarity', 'dark', 'Sensitivity', 0.52);
103 | %binaryImage = ~binaryImage;
104 | %binaryImage = bwareaopen(binaryImage, 175); % Removes small objects
105 |
106 | % After initial filtering remove noise
107 | se1 = strel('disk', 1);
108 | se2 = strel('disk', 2);
109 | imgOpen = imclose(input, se1);
110 | imgClose = imopen(imgOpen, se2);
111 | imgFill = imfill(imgClose, 'holes');
112 | clearBorders = imclearborder(imgFill);
113 | se3 = strel('square', 20);
114 | finalImg = imdilate(clearBorders, se3);
115 | img = finalImg;
116 |
117 | end
118 |
119 | function new_data = vehicleDetection(input, frame, oldTotal, oldFrameNumCars)
120 |
121 | % Performs blob analysis in order to create a green box around cars
122 | % Then count the number of boxes which should be the cars
123 | blobAnalysis = vision.BlobAnalysis('BoundingBoxOutputPort', true, ...
124 | 'AreaOutputPort', false, 'CentroidOutputPort', false, ...
125 | 'MinimumBlobArea', 250, 'ExcludeBorderBlobs', true);
126 | bbox = step(blobAnalysis, input);
127 | result = insertShape(frame, 'Rectangle', bbox, 'Color', 'green');
128 | currFrameNumCars = size(bbox, 1);
129 |
130 | %Updating total_number of cars count
131 | if currFrameNumCars >= oldFrameNumCars
132 |
133 | new_data(1) = oldTotal + (currFrameNumCars - oldFrameNumCars);
134 | new_data(2) = currFrameNumCars;
135 |
136 | else
137 |
138 | new_data(1) = oldTotal;
139 | new_data(2) = currFrameNumCars;
140 |
141 | end
142 |
143 | result = insertText(result, [10 10], new_data(1),...
144 | 'BoxOpacity', 1, 'FontSize', 15);
145 | imshow(result);
146 |
147 | end
--------------------------------------------------------------------------------
/Tests/videoRendering/TrafficTest.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/videoRendering/TrafficTest.mp4
--------------------------------------------------------------------------------
/Tests/videoRendering/TrafficTest2.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/videoRendering/TrafficTest2.mp4
--------------------------------------------------------------------------------
/Tests/videoRendering/traffic-test.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/Tests/videoRendering/traffic-test.mp4
--------------------------------------------------------------------------------
/__pycache__/SmartTrafficApp.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/__pycache__/SmartTrafficApp.cpython-38.pyc
--------------------------------------------------------------------------------
/static/jsfiles/histogram.js:
--------------------------------------------------------------------------------
1 | if( document.getElementById('myChart') ){
2 | var ctx = document.getElementById('myChart').getContext('2d');
3 | var data = JSON.parse(document.getElementById('myChart').getAttribute('data'));
4 | const xAxis = data[0];
5 | const yAxis = data[1];
6 | var chart = new Chart(ctx, {
7 | // The type of chart we want to create
8 | type: 'line',
9 |
10 | // The data for our dataset
11 | data: {
12 | labels: xAxis,
13 | datasets: [{
14 | label: 'Number of Cars vs Time (Seconds)',
15 | backgroundColor: 'rgb(255, 99, 132)',
16 | borderColor: 'rgb(255, 99, 132)',
17 | data: yAxis,
18 | }]
19 | },
20 |
21 | // Configuration options go here
22 | options: {}
23 | });
24 | }
--------------------------------------------------------------------------------
/static/jsfiles/index.js:
--------------------------------------------------------------------------------
1 | var loadingAnimation = document.getElementById('loading-animation');
2 | var graph = document.getElementById('graph');
3 | var helpText = document.getElementById('help-text');
4 | var errorText = document.getElementById('error-text');
5 | var extensionErrorText = document.getElementById('extension-error-text');
6 | var routingLinks = document.getElementById('routing-links');
7 |
8 | document.getElementById('upload-form').addEventListener('submit', (e) => {
9 | routingLinks.style.pointerEvents = 'none';
10 | loadingAnimation.style.display = 'block';
11 | if(graph){
12 | graph.style.display = 'none';
13 | }
14 | if(helpText){
15 | helpText.style.display = 'none';
16 | }
17 | if(errorText){
18 | errorText.style.display = 'none';
19 | }
20 | if(extensionErrorText){
21 | extensionErrorText.style.display = 'none';
22 | }
23 | });
--------------------------------------------------------------------------------
/static/pictures/arrow-down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/arrow-down.png
--------------------------------------------------------------------------------
/static/pictures/car.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/car.png
--------------------------------------------------------------------------------
/static/pictures/cars.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/cars.png
--------------------------------------------------------------------------------
/static/pictures/github.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/github.png
--------------------------------------------------------------------------------
/static/pictures/index-background.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/index-background.jpg
--------------------------------------------------------------------------------
/static/pictures/patrik.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/patrik.JPG
--------------------------------------------------------------------------------
/static/pictures/sarvath.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/sarvath.jpg
--------------------------------------------------------------------------------
/static/pictures/sathira.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/sathira.jpg
--------------------------------------------------------------------------------
/static/pictures/smart-traffic.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/smart-traffic.jpg
--------------------------------------------------------------------------------
/static/pictures/traffic-light.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/traffic-light.png
--------------------------------------------------------------------------------
/static/pictures/traffic.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/pictures/traffic.jpg
--------------------------------------------------------------------------------
/static/styles/aboutus.css:
--------------------------------------------------------------------------------
1 | .intro-card {
2 | color: white;
3 | background-image: linear-gradient(rgba(0, 0, 0, .75), rgba(0, 0, 0, .75)), url(/static/pictures/traffic.jpg);
4 | background-size: cover;
5 | height: calc(100vh - 76px);
6 | display: flex;
7 | padding-top: 300px;
8 | flex-direction: column;
9 | }
10 |
11 | .heading-text {
12 | width: calc(200% / 3);
13 | text-align: center;
14 | align-self: center;
15 | }
16 |
17 | .sub-heading-text{
18 | margin-top: 48px;
19 | }
20 |
21 |
22 | .centered-text {
23 | text-align: center;
24 | padding: 72px 32px;
25 | max-width: 1100px;
26 | margin: 0 auto;
27 | }
28 |
29 | .two-column-left {
30 | display: grid;
31 | grid-template-rows: 1fr 2fr;
32 | grid-template-columns: auto;
33 | grid-template-areas:
34 | " Image "
35 | " Description " ;
36 | padding: 48px 32px;
37 | grid-row-gap: 32px;
38 | }
39 |
40 |
41 | @media only screen and (min-device-width: 768px) {
42 | .two-column-left{
43 | max-width: 1100px;
44 | margin: 0 auto;
45 | grid-template-columns: 1fr 1fr;
46 | grid-template-rows: auto;
47 | grid-template-areas:
48 | " Image Description " ;
49 | padding: 48px 32px;
50 | grid-column-gap: 32px;
51 | }
52 | }
53 |
54 | .two-column-right {
55 | display: grid;
56 | grid-template-rows: 1fr 2fr;
57 | grid-template-columns: auto;
58 | grid-template-areas:
59 | " Image "
60 | " Description " ;
61 | padding: 48px 32px;
62 | grid-row-gap: 32px;
63 | }
64 |
65 | @media only screen and (min-device-width: 768px) {
66 | .two-column-right{
67 | max-width: 1100px;
68 | margin: 0 auto;
69 | grid-template-columns: 1fr 1fr;
70 | grid-template-rows: auto;
71 | grid-template-areas:
72 | " Description Image" ;
73 | padding: 48px 32px;
74 | grid-column-gap: 32px;
75 | }
76 | }
77 |
78 | .about-us-image{
79 | background-size: contain;
80 | grid-area: Image;
81 | height: 250px;
82 | width: 250px;
83 | margin: auto;
84 | }
85 |
86 | .about-us-description{
87 | grid-area: Description;
88 | }
89 |
90 | .arrow-down{
91 | filter: brightness(0) invert(1);
92 | padding: 8px 8px;
93 | }
94 |
95 | .center {
96 | display: block;
97 | margin-top: 48px;
98 | margin-left: auto;
99 | margin-right: auto;
100 | }
--------------------------------------------------------------------------------
/static/styles/index.css:
--------------------------------------------------------------------------------
1 | .homepage-card {
2 | color: white;
3 | background-image: linear-gradient(rgba(0, 0, 0, 0.75), rgba(0, 0, 0, 0.75)),
4 | url(/static/pictures/index-background.jpg);
5 | background-size: cover;
6 | height: calc(100vh - 76px);
7 | width: 100%;
8 | display: block;
9 | padding-top: 128px;
10 | }
11 |
12 | .heading-text-index {
13 | width: calc(200% / 3);
14 | align-self: center;
15 | padding-left: 64px;
16 | margin-top: 48px;
17 | }
18 |
19 | .error{
20 | color: red;
21 | }
22 |
23 | .home-page-image {
24 | grid-area: Image;
25 | height: 200px;
26 | width: 200px;
27 | margin: 0 auto;
28 | }
29 | .upload-description {
30 | margin-top: 64px;
31 | margin-bottom: 16px;
32 | }
33 | .upload-button {
34 | margin-top: 16px;
35 | margin-bottom: 32px;
36 | }
37 |
38 | #graph {
39 | margin: 64px auto;
40 | display: block;
41 | width: 90%;
42 | }
43 |
44 | #loading-animation {
45 | display: none;
46 | }
47 |
48 | .spinner {
49 | width: 50px;
50 | height: 40px;
51 | text-align: center;
52 | font-size: 10px;
53 | }
54 |
55 | .spinner > div {
56 | background-color: white;
57 | height: 100%;
58 | width: 6px;
59 | display: inline-block;
60 |
61 | -webkit-animation: sk-stretchdelay 1.2s infinite ease-in-out;
62 | animation: sk-stretchdelay 1.2s infinite ease-in-out;
63 | }
64 |
65 | .spinner .rect2 {
66 | -webkit-animation-delay: -1.1s;
67 | animation-delay: -1.1s;
68 | }
69 |
70 | .spinner .rect3 {
71 | -webkit-animation-delay: -1s;
72 | animation-delay: -1s;
73 | }
74 |
75 | .spinner .rect4 {
76 | -webkit-animation-delay: -0.9s;
77 | animation-delay: -0.9s;
78 | }
79 |
80 | .spinner .rect5 {
81 | -webkit-animation-delay: -0.8s;
82 | animation-delay: -0.8s;
83 | }
84 |
85 | @-webkit-keyframes sk-stretchdelay {
86 | 0%,
87 | 40%,
88 | 100% {
89 | -webkit-transform: scaleY(0.4);
90 | }
91 | 20% {
92 | -webkit-transform: scaleY(1);
93 | }
94 | }
95 |
96 | @keyframes sk-stretchdelay {
97 | 0%,
98 | 40%,
99 | 100% {
100 | transform: scaleY(0.4);
101 | -webkit-transform: scaleY(0.4);
102 | }
103 | 20% {
104 | transform: scaleY(1);
105 | -webkit-transform: scaleY(1);
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/static/styles/layout.css:
--------------------------------------------------------------------------------
1 | body {
2 | padding-top: 76px;
3 | color:white;
4 | background-color: #181818;
5 | }
6 |
7 | p{
8 | font-size: 1.25rem;
9 | }
10 |
11 | a:hover,:active,:visited{
12 | text-decoration: none;
13 | }
14 |
15 | .github{
16 | margin-bottom: 3px;
17 | margin-left: 6px;
18 | margin-right: 6px;
19 | }
--------------------------------------------------------------------------------
/static/uploads/traffic-test.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/uploads/traffic-test.mp4
--------------------------------------------------------------------------------
/static/videos/finalVideo.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/static/videos/finalVideo.mp4
--------------------------------------------------------------------------------
/templates/aboutus.html:
--------------------------------------------------------------------------------
1 | {% extends "layout.html" %} {% block content %}
2 |
3 |
4 |
5 |
6 | About Us
7 |
8 |
9 | Scroll down to get to know the Smart Traffic App creators and learn a bit more about this app!
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 | Hey there, we are the Smart Traffic Crew. We are three students studying Electrical and Computer Engineering at the
19 | University of Waterloo. We think that traffic control in our modern day cities are not digitalized as well as they could be.
20 | Using Matlab we set out to make a script that could use computer vision to record data about traffic stops automatically.
21 | The script we ended up producing counts how many automobiles pass through an area of road at any given time, making sure to
22 | account for duplicates and vehicles that have left the field of view. This helps regulate the density of traffic. We also
23 | decided to publish this script on a website so users can interact with it and submit their own videos of traffic! We decided
24 | to make a monolithic web application using Flask as this website would remain fairly simple and focus on the performance of
25 | the Matlab script. In order to integrate the Matlab into a flask application we converted our code into a Python library using
26 | the Matlab Compiler Runtime. Scroll down to meet the fresh young minds who made this application!
27 |
28 |
29 |
30 |
31 |
32 | My name is Sathira Katugaha. I am a Computer Engineering student at the University of Waterloo. I contributed to the
33 | construction and architecture of this website and format of the functions in the MATLAB script. I have previously
34 | interned as a Front-end Software Developer for Android and the Web twice at SAP and am currently helping found a new
35 | Software Company. Other than making web and mobile apps I enjoy going on runs, martial arts (specifically Tae Kwon Do),
36 | sketching and photography.
37 |
38 |
39 |
40 |
41 |
42 | My name is Patrik Beqo. I am a Computer Engineering student at the University of Waterloo. I aided in making the
43 | MATLAB algorithm to detect/track the vehicles and the design of the website. I spent my first two co-ops in the
44 | medical industry where I was an IT specialist and QA analyst. I have since upgraded to a Software Developer role at
45 | Ritual. In my free time I enjoy going on hikes, playing sports, and watching movies.
46 |
47 |
48 |
49 |
50 |
51 | My name is Sarvath Sharma. I am an Electrical Engineering student at the University of Waterloo. I helped architect the
52 | computer vision, image processing and create the integration of MATLAB code into a Python library. My first two
53 | co-ops were at Solink as a QA Specialist and Junior Developer and then worked at WATONOMOUS (a autonomous car design team)
54 | where I was a Power Systems Specialist. In my spare time I enjoy working out, gaming, anime and spending time with friends
55 | and family.
56 |
Getting Started with the motionTracking Python Package
17 |
The Library Compiler in MATLAB® Compiler SDK™ creates Python® packages that can be integrated with applications written in Python. It also generates sample Python driver code that can be used to integrate and test the generated components. You can use this guide to set up your environment and run your sample driver application.
18 |
Note: Sample Python driver code is only generated if sample MATLAB code is included during the packaging phase. Samples can be found in the folder named "samples".
If you have full administrator privileges and install to the default location, you do not need to specify any options. Otherwise, use --user to install to your home folder, or --prefix="installdir" to install to "installdir". In the latter case, add "installdir" to the PYTHONPATH environment variable.
48 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/vehicleDetection/motionTracking.m:
--------------------------------------------------------------------------------
1 | % We are implementing a multiple object motion tracking system that
2 | % MathWorks developed. Our initial/personal implmentation can be found in
3 | % Tests/videoRendering
4 |
5 | % Using Kalman Filter and Motion Based tracking to determine and track
6 | % vehicles
7 |
8 | %function runs video detection with the assitance of mini helper functions
9 | function motionTracking()
10 | % Create new object to analyze
11 | videoObj = setupSystem();
12 |
13 | % Creates an empty array of structs with properties to track
14 | trackArr = initializeTracks(); % Create an empty array of tracks.
15 |
16 | nextId = 1; % ID of the next track
17 |
18 | % Initial data of the number of vehicles
19 | global oldFrame;
20 | global totalCars;
21 | oldFrame = 0;
22 | totalCars = 0;
23 |
24 | % Number of training frames
25 | nTrainingFrames = 150;
26 |
27 | %Stores total frames in video without training frames
28 | nFrames = videoObj.reader.NumFrames - nTrainingFrames;
29 |
30 | if nFrames < 200
31 | nFramesStr = num2str(nFrames);
32 | warningStr = "Not enough frames in video. Contains following number of frames: ";
33 | nFramesStr = append(warningStr, nFramesStr);
34 | ME = MException("MyVideo:notEnoughFrames", nFramesStr, nFrames);
35 | throw(ME)
36 | end
37 |
38 | % Call to calibrating function
39 | calibrating(nTrainingFrames);
40 |
41 | %Keeps count of frames
42 | intervalCounter = 0;
43 |
44 | %This should be changed to 108000 for production ( 30 minutes )
45 | numFramesPerInterval = 100;
46 |
47 | %Keeps track of the index in array ( Starts at 1 for Matlab )
48 | index = 1;
49 |
50 | %Data will be stored here, each index represents a time interval
51 | dataToExport = zeros(1, ceil(nFrames / numFramesPerInterval));
52 |
53 | % Initialize the video writer
54 | open(videoObj.finalVideo);
55 |
56 | % Detection and Vehicle count for every frame in the video
57 | while hasFrame(videoObj.reader)
58 | % Stores a single frame of the video
59 | currFrame = readFrame(videoObj.reader);
60 | % Performs image filtering and blob analysis, then stores the centroids,
61 | % bboxes and the filtered Image
62 | [centroids, bboxes, filteredImage] = detectObjects(currFrame);
63 | % Predicts the new location of deteced objects
64 | predictNewLocations();
65 | % This function decides whether or not to use the predicted location
66 | % based on confidence of detection and minimized cost
67 | [assignments, unassignedTracks, unassignedDetections] = ...
68 | detectionToTrackAssignment();
69 | % Updates unidentified tracks as they move
70 | updateAssignedTracks();
71 | % Updates unidentified tracks as they move
72 | updateUnassignedTracks();
73 | % delete tracks for objects that leave frame
74 | deleteLostTracks();
75 | % Creates new tracks for objects that enter frame
76 | createNewTracks();
77 | %Displays results
78 | displayTrackingResults();
79 |
80 | %Adds to interval data
81 | if intervalCounter == numFramesPerInterval
82 | dataToExport(index) = totalCars;
83 | index = index + 1;
84 | % Resets the car count
85 | totalCars = 0;
86 | oldFrame = 0;
87 | intervalCounter = 0;
88 | else
89 | intervalCounter = intervalCounter + 1;
90 | end
91 |
92 | % Wrte the new frames into the video
93 | writeVideo(videoObj.finalVideo, currFrame);
94 | end
95 |
96 | % End off the video writer
97 | close(videoObj.finalVideo);
98 |
99 | % Adds remaining cars to end of array
100 | dataToExport(index) = totalCars;
101 |
102 |
103 | %%%%%% EXPORT FINAL DATA %%%%%%%%%%%%%%%%%%
104 |
105 | writematrix(dataToExport, 'finalData.csv')
106 |
107 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
108 |
109 | %%%%%%%%%%%%% FUNCTION DEFINITIONS %%%%%%%%%%%%%%%%%%%%%%%%%%%%
110 |
111 | % Calibration function
112 | function calibrating(trnframes)
113 |
114 | for i=1 : trnframes
115 |
116 | singleFrame = readFrame(videoObj.reader);
117 |
118 | % Train model
119 | step(videoObj.detector, singleFrame);
120 |
121 | % Insert Text
122 | % position = [10,10];
123 | % box_color = 'black';
124 | % calImage = insertText(singleFrame,position,'Calibrating...',...
125 | % 'FontSize',18,'BoxColor', box_color,'TextColor','white');
126 |
127 | % Output video with calibrating text in top left corner
128 | % imshow(calImage);
129 |
130 | end
131 | end
132 |
133 | % Initial function to setup environment
134 | function videoObj = setupSystem()
135 | % Constructor function that initializes a new object to analyze
136 |
137 | % Video Reader method
138 | file = fullfile('..', 'static', 'uploads', 'traffic-test.mp4');
139 | videoObj.reader = VideoReader(file);
140 |
141 | % We are using 2 video player methods, one for the dislaying and one
142 | % for the foreground detector
143 | % videoObj.filteredPlayer = vision.VideoPlayer('Position', [740, 400, 700, 400]);
144 | % videoObj.videoPlayer = vision.VideoPlayer('Position', [20, 400, 700, 400]);
145 |
146 | % Generate the video writer object
147 | videoObj.finalVideo = VideoWriter('finalVideo.avi', 'Motion JPEG AVI');
148 | videoObj.finalVideo.FrameRate = videoObj.reader.FrameRate;
149 |
150 | % Now we need to just add the methods for the Foreground Detector and
151 | % Blob Analysis of the images
152 | videoObj.detector = vision.ForegroundDetector('NumGaussians', 3, ...
153 | 'NumTrainingFrames', 150, 'MinimumBackgroundRatio', 0.7);
154 |
155 | videoObj.blobAnalyser = vision.BlobAnalysis('BoundingBoxOutputPort', true, ...
156 | 'AreaOutputPort', true, 'CentroidOutputPort', true, ...
157 | 'MinimumBlobArea', 400);
158 | end
159 |
160 | % Function creates an empty array of structs with properties to track
161 | function trackArr = initializeTracks()
162 | % create an empty array of tracks
163 | trackArr = struct(...
164 | 'id', {}, ...
165 | 'bbox', {}, ...
166 | 'kalmanFilter', {}, ...
167 | 'age', {}, ...
168 | 'totalVisibleCount', {}, ...
169 | 'consecutiveInvisibleCount', {});
170 | end
171 |
172 | % Function performs image filtering and blob analysis
173 | function [centroids, bboxes, filteredImage] = detectObjects(currFrame)
174 |
175 | % Detect foreground.
176 | filteredImage = videoObj.detector.step(currFrame);
177 |
178 | % Apply morphological operations to remove noise and fill in holes.
179 | filteredImage = imopen(filteredImage, strel('rectangle', [3,3]));
180 | filteredImage = imclose(filteredImage, strel('rectangle', [15, 15]));
181 | filteredImage = imfill(filteredImage, 'holes');
182 |
183 | % Perform blob analysis to find connected components.
184 | [~, centroids, bboxes] = videoObj.blobAnalyser.step(filteredImage);
185 | end
186 |
187 | % This function is responsible for predicting where the object will be of
188 | % it was covered by an external object (bridge, overpass, etc)
189 | function predictNewLocations()
190 | % By using the Kalman Filter (by MathWorks) we can predict the
191 | % location of each centroid in the given frame. We just need to update
192 | % the bbox around it to show that we have a idea as to where it is
193 | for i = 1:length(trackArr)
194 | bbox = trackArr(i).bbox;
195 |
196 | % Use the Kalman filter to track the object
197 | % We are assuming the velocity is constant so the prediction will
198 | % follow that given speed
199 | predictedCentroid = predict(trackArr(i).kalmanFilter);
200 |
201 | % Update the bounday box so that it follows the centroid
202 | predictedCentroid = int32(predictedCentroid) - bbox(3:4) / 2;
203 | trackArr(i).bbox = [predictedCentroid, bbox(3:4)];
204 | end
205 | end
206 |
207 | % This function decides whether or not to use the predicted location
208 | % based on confidence of detection and minimized cost
209 | function [assignments, unassignedTracks, unassignedDetections] = ...
210 | detectionToTrackAssignment()
211 |
212 | nTracks = length(trackArr);
213 | nDetections = size(centroids, 1);
214 |
215 | % Compute the cost of assigning each detection to each track.
216 | cost = zeros(nTracks, nDetections);
217 | for i = 1:nTracks
218 | cost(i, :) = distance(trackArr(i).kalmanFilter, centroids);
219 | end
220 |
221 | % Solve the assignment problem using built in function.
222 | costOfNonAssignment = 20;
223 | [assignments, unassignedTracks, unassignedDetections] = ...
224 | assignDetectionsToTracks(cost, costOfNonAssignment);
225 | end
226 |
227 | % This function updates and corrects the location estimation we make for
228 | % the tracks we detect
229 | % and updates the age of the tracks accordingly
230 | function updateAssignedTracks()
231 | % finds number of tracks to correct
232 | numAssignedTracks = size(assignments, 1);
233 | for i = 1:numAssignedTracks
234 | % gets id of current track
235 | trackIdx = assignments(i, 1);
236 | % gets id of the detection for the track
237 | detectionIdx = assignments(i, 2);
238 | % gets the centroid from detection
239 | centroid = centroids(detectionIdx, :);
240 | % gets the box drawn for the detection
241 | bbox = bboxes(detectionIdx, :);
242 |
243 | % With the new centroid, corrects and updates the previous track
244 | correct(trackArr(trackIdx).kalmanFilter, centroid);
245 |
246 | % We can not replace the predicted bound box with the detected one
247 | trackArr(trackIdx).bbox = bbox;
248 |
249 | % The track gains age for each update
250 | trackArr(trackIdx).age = trackArr(trackIdx).age + 1;
251 |
252 | % The visibility of the track was updated so we update the count
253 | trackArr(trackIdx).totalVisibleCount = ...
254 | trackArr(trackIdx).totalVisibleCount + 1;
255 | % The invisible count must be set to 0 now that we have corrected
256 | % the prediction
257 | trackArr(trackIdx).consecutiveInvisibleCount = 0;
258 | end
259 | end
260 |
261 | % This function makes sure unassigned tracks are invisible
262 | function updateUnassignedTracks()
263 | % for each track in the unassigned tracks
264 | for i = 1:length(unassignedTracks)
265 | % get the unassigned track
266 | ind = unassignedTracks(i);
267 | % update the age of the unassigned track
268 | trackArr(ind).age = trackArr(ind).age + 1;
269 | % mark unassigned track as invisible
270 | trackArr(ind).consecutiveInvisibleCount = ...
271 | trackArr(ind).consecutiveInvisibleCount + 1;
272 | end
273 | end
274 |
275 | % Function deletes tracks that have been invisible for too many
276 | % consecutive frames
277 | function deleteLostTracks()
278 | if isempty(trackArr)
279 | return;
280 | end
281 |
282 | invisibleForTooLong = 20;
283 | ageThreshold = 8;
284 |
285 | % Compute the fraction of the track's age for which it was visible.
286 | ages = [trackArr(:).age];
287 | totalVisibleCounts = [trackArr(:).totalVisibleCount];
288 | visibility = totalVisibleCounts ./ ages;
289 |
290 | % Find the indices of 'lost' tracks.
291 | lostInds = (ages < ageThreshold & visibility < 0.6) | ...
292 | [trackArr(:).consecutiveInvisibleCount] >= invisibleForTooLong;
293 |
294 | % Delete lost tracks.
295 | trackArr = trackArr(~lostInds);
296 | end
297 |
298 | % This function creates new tracks from unassigned detections.
299 | % Assume that any unassigned detection is a start of a new track.
300 | function createNewTracks()
301 | centroids = centroids(unassignedDetections, :);
302 | bboxes = bboxes(unassignedDetections, :);
303 |
304 | for i = 1:size(centroids, 1)
305 |
306 | centroid = centroids(i,:);
307 | bbox = bboxes(i, :);
308 |
309 | % Create a Kalman filter object.
310 | kalmanFilter = configureKalmanFilter('ConstantVelocity', ...
311 | centroid, [200, 50], [100, 25], 100);
312 |
313 | % Create a new track.
314 | newTrack = struct(...
315 | 'id', nextId, ...
316 | 'bbox', bbox, ...
317 | 'kalmanFilter', kalmanFilter, ...
318 | 'age', 1, ...
319 | 'totalVisibleCount', 1, ...
320 | 'consecutiveInvisibleCount', 0);
321 |
322 | % Add it to the array of tracks.
323 | trackArr(end + 1) = newTrack;
324 |
325 | % Increment the next id.
326 | nextId = nextId + 1;
327 | end
328 | end
329 |
330 | %This function draws a bounding box and label ID for each track ...
331 | % on the video frame and the foreground mask.
332 | %It then displays the frame and the mask in their respective video players
333 | function displayTrackingResults()
334 | % Convert the frame and the mask to uint8 RGB.
335 | currFrame = im2uint8(currFrame);
336 | filteredImage = uint8(repmat(filteredImage, [1, 1, 3])) .* 255;
337 |
338 | minVisibleCount = 8;
339 | if ~isempty(trackArr)
340 |
341 | % Noisy detections tend to result in short-lived tracks.
342 | % Only display tracks that have been visible for more than
343 | % a minimum number of frames.
344 | reliableTrackInds = ...
345 | [trackArr(:).totalVisibleCount] > minVisibleCount;
346 | reliableTracks = trackArr(reliableTrackInds);
347 |
348 | % Display the objects. If an object has not been detected
349 | % in this frame, display its predicted bounding box.
350 | if ~isempty(reliableTracks)
351 | % Get bounding boxes.
352 | bboxes = cat(1, reliableTracks.bbox);
353 |
354 | % Get ids.
355 | ids = int32([reliableTracks(:).id]);
356 |
357 | % Create labels for objects indicating the ones for
358 | % which we display the predicted rather than the actual
359 | % location.
360 | labels = cellstr(int2str(ids'));
361 | predictedTrackInds = ...
362 | [reliableTracks(:).consecutiveInvisibleCount] > 0;
363 | isPredicted = cell(size(labels));
364 | isPredicted(predictedTrackInds) = {' predicted'};
365 | labels = strcat(labels, isPredicted);
366 |
367 |
368 | % A Stack ADT to update the total number of cars count
369 | currFrameNumCars = size(bboxes, 1);
370 |
371 | if currFrameNumCars >= oldFrame
372 | totalCars = totalCars + (currFrameNumCars - oldFrame);
373 | oldFrame = currFrameNumCars;
374 | else
375 | oldFrame = currFrameNumCars;
376 | end
377 |
378 | % Draw the objects on the frame.
379 | currFrame = insertObjectAnnotation(currFrame, 'rectangle', ...
380 | bboxes, labels);
381 |
382 | % Display total cars counter
383 | currFrame = insertText(currFrame, [10 10], totalCars,...
384 | 'BoxOpacity', 1, 'FontSize', 15);
385 | end
386 | end
387 |
388 | % Display the mask and the frame.
389 | % videoObj.filteredPlayer.step(filteredImage);
390 | % videoObj.videoPlayer.step(currFrame);
391 | end
392 | end
393 |
--------------------------------------------------------------------------------
/vehicleDetection/motionTracking.prj:
--------------------------------------------------------------------------------
1 |
2 |
3 | motionTracking
4 |
5 |
6 | 1.0
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 | \motionTracking\
15 | option.installpath.programfiles
16 |
17 |
18 | You must have Python 2.7, 3.6, 3.7, and/or 3.8 on your machine to install this package. Go to the directory containing your package and the file setup.py and execute:
19 |
20 | python setup.py install [options]
21 |
22 | For details, see GettingStarted.html, distributed with this package, or the official Python documentation at:
23 |
24 | https://docs.python.org/2/install/index.html
25 | ${PROJECT_ROOT}\motionTracking\for_testing
26 | ${PROJECT_ROOT}\motionTracking\for_redistribution_files_only
27 | ${PROJECT_ROOT}\motionTracking\for_redistribution
28 | ${PROJECT_ROOT}\motionTracking
29 | false
30 |
31 | subtarget.python.module
32 |
33 |
34 |
35 | true
36 | false
37 | false
38 | MyAppInstaller_web
39 | MyAppInstaller_mcr
40 | MyAppInstaller_app
41 | true
42 | false
43 |
44 | false
45 | false
46 |
47 | false
48 | false
49 |
50 |
51 |
52 | Class1
53 |
54 | D:\GitHub\Smart-Traffic-Control-System\vehicleDetection
55 |
56 | option.cpp.all
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 | ${PROJECT_ROOT}\motionTracking.m
106 |
107 |
108 |
109 |
110 | ${PROJECT_ROOT}\motionTracking.m
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 | D:\GitHub\Smart-Traffic-Control-System\vehicleDetection\motionTracking\for_testing\motionTracking
121 | D:\GitHub\Smart-Traffic-Control-System\vehicleDetection\motionTracking\for_testing\setup.py
122 |
123 |
124 |
125 | D:\MATLAB\R2020b
126 |
127 |
128 |
129 |
130 |
131 |
132 | true
133 |
134 |
135 |
136 |
137 | true
138 |
139 |
140 |
141 |
142 | false
143 | false
144 | true
145 | false
146 | false
147 | false
148 | false
149 | false
150 | 10.0
151 | false
152 | true
153 | win64
154 | true
155 |
156 |
157 |
--------------------------------------------------------------------------------
/vehicleDetection/motionTracking/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2015-2020 MathWorks, Inc.
2 |
3 | # This template is used to generate an __init__.py file for a particular deployable package.
4 |
5 | """ Package for executing deployed MATLAB functions """
6 |
7 | from __future__ import print_function
8 | import atexit
9 | import glob
10 | import importlib
11 | import os
12 | import os.path
13 | import pdb
14 | import platform
15 | import re
16 | import sys
17 | import weakref
18 |
19 | class _PathInitializer(object):
20 | PLATFORM_DICT = {'Windows': ['PATH','dll',''], 'Linux': ['LD_LIBRARY_PATH','so','libmw'], 'Darwin': ['DYLD_LIBRARY_PATH','dylib','libmw']}
21 | SUPPORTED_PYTHON_VERSIONS = ['2_7', '3_6', '3_7', '3_8']
22 | RUNTIME_VERSION_W_DOTS = '9.9'
23 | RUNTIME_VERSION_W_UNDERSCORES = '9_9'
24 | PACKAGE_NAME = 'motionTracking'
25 |
26 | def set_interpreter_version(self):
27 | """Make sure the interpreter version is supported."""
28 | ver = sys.version_info
29 | version = '{0}_{1}'.format(ver[0], ver[1])
30 |
31 | if version in _PathInitializer.SUPPORTED_PYTHON_VERSIONS:
32 | self.interpreter_version = version
33 | else:
34 | version_with_dot = version.replace("_", ".")
35 | raise EnvironmentError("Python {0} is not supported.".format(version_with_dot))
36 |
37 | def __init__(self):
38 | """Initialize the variables."""
39 | self.arch = ''
40 | self.is_linux = False
41 | self.is_mac = False
42 | self.is_windows = False
43 | self.mr_handle = None
44 | self.ml_handle = None
45 | self.system = ''
46 | self.cppext_handle = None
47 |
48 | # path to the folder that stores Python extensions and shared libraries
49 | self.bin_dir = ''
50 |
51 | # path to the folder that stores pure Python matlab_pysdk.runtime code (_runtime_dir)
52 | self.runtime_dir = ''
53 |
54 | # path to the folder that stores the pure Python matlab mlarray code used for type conversion
55 | self.ml_dir = ''
56 |
57 | self.set_interpreter_version()
58 | self.get_platform_info()
59 |
60 | this_folder = os.path.dirname(os.path.realpath(__file__))
61 | self.path_file_name = os.path.join(this_folder, 'paths.{0}.txt'.format(self.arch))
62 |
63 | self.instances_of_this_package = set([])
64 |
65 |
66 | def read_path_file(self):
67 | """Look for a file that lists items to add to path. If present, read it and add the paths."""
68 | filtered_lines = []
69 | if os.path.isfile(self.path_file_name):
70 | pth_file = open(self.path_file_name, 'r')
71 | lines = pth_file.readlines()
72 | for line in lines:
73 | stripped_line = line.strip()
74 | if stripped_line and stripped_line[0] != '#':
75 | filtered_lines.append(stripped_line)
76 | length = len(filtered_lines)
77 | if length == 3:
78 | (bin_dir, runtime_dir, ml_dir) = filtered_lines
79 | if (not os.path.isdir(bin_dir)) or (not os.path.isdir(runtime_dir)) or (not os.path.isdir(ml_dir)):
80 | return False
81 | else:
82 | (self.bin_dir, self.runtime_dir, self.ml_dir) = (bin_dir, runtime_dir, ml_dir)
83 | return True
84 | else:
85 | return False
86 |
87 | def write_path_file(self):
88 | """Write a file that lists items to add to path. If present, it will be overwritten."""
89 | existing_contents = ''
90 | if os.path.isfile(self.path_file_name):
91 | path_file = open(self.path_file_name, 'r')
92 | existing_contents = path_file.readlines()
93 | path_file.close()
94 |
95 | path_file = open(self.path_file_name, 'w')
96 | if self.system == 'Windows':
97 | print('# bin dir: added to both OS path and system path', file=path_file)
98 | else:
99 | print('# bin dir: added to system path', file=path_file)
100 | print(self.bin_dir, file=path_file)
101 | print('', file=path_file)
102 |
103 | print('# runtime dir: added to system path', file=path_file)
104 | print(self.runtime_dir, file=path_file)
105 | print('', file=path_file)
106 |
107 | print('# matlab (mlarray) dir: added to system path', file=path_file)
108 | print(self.ml_dir, file=path_file)
109 | print('', file=path_file)
110 |
111 | if existing_contents:
112 | print(existing_contents, file=path_file)
113 | path_file.close()
114 |
115 | def get_platform_info(self):
116 | """Ask Python for the platform and architecture."""
117 |
118 | # This will return 'Windows', 'Linux', or 'Darwin' (for Mac).
119 | self.system = platform.system()
120 | if not self.system in _PathInitializer.PLATFORM_DICT:
121 | raise RuntimeError('{0} is not a supported platform.'.format(self.system))
122 | else:
123 | # path_var is the OS-dependent name of the path variable ('PATH', 'LD_LIBRARY_PATH', "DYLD_LIBRARY_PATH')
124 | (self.path_var, self.ext, self.lib_prefix) = _PathInitializer.PLATFORM_DICT[self.system]
125 |
126 | if self.system == 'Windows':
127 | self.is_windows = True
128 | bit_str = platform.architecture()[0]
129 | if bit_str == '64bit':
130 | self.arch = 'win64'
131 | elif bit_str == '32bit':
132 | self.arch = 'win32'
133 | else:
134 | raise RuntimeError('{0} is not supported.'.format(bit_str))
135 | elif self.system == 'Linux':
136 | self.is_linux = True
137 | self.arch = 'glnxa64'
138 | elif self.system == 'Darwin':
139 | self.is_mac = True
140 | self.arch = 'maci64'
141 | else:
142 | raise RuntimeError('Operating system {0} is not supported.'.format(self.system))
143 |
144 | def get_paths_from_os(self):
145 | """
146 | Look through the system path for a file whose name contains a runtime version
147 | corresponding to the one with which this package was produced.
148 | """
149 |
150 | # Concatenates the pieces into a string. The double parentheses are necessary.
151 | if self.system == 'Windows':
152 | file_to_find = ''.join((self.lib_prefix, 'mclmcrrt',
153 | _PathInitializer.RUNTIME_VERSION_W_UNDERSCORES, '.', self.ext))
154 | elif self.system == 'Linux':
155 | file_to_find = ''.join((self.lib_prefix, 'mclmcrrt', '.', self.ext, '.',
156 | _PathInitializer.RUNTIME_VERSION_W_DOTS))
157 | elif self.system == 'Darwin':
158 | file_to_find = ''.join((self.lib_prefix, 'mclmcrrt', '.',
159 | _PathInitializer.RUNTIME_VERSION_W_DOTS,
160 | '.', self.ext))
161 | else:
162 | raise RuntimeError('Operating system {0} is not supported.'.format(self.system))
163 |
164 | path_elements = []
165 | if self.path_var in os.environ:
166 | path_elements = os.environ[self.path_var].split(os.pathsep)
167 | if not path_elements:
168 | friendly_os_name = self.system
169 | if friendly_os_name == 'Darwin':
170 | friendly_os_name = 'Mac'
171 | raise RuntimeError('On {0}, you must set the environment variable "{1}" to a non-empty string. {2}'.format(
172 | friendly_os_name, self.path_var, 'For more details, see the package documentation.'))
173 |
174 | path_found = ''
175 | for elem in path_elements:
176 | filename = os.path.join(elem, file_to_find)
177 | if (os.path.isfile(filename)):
178 | path_found = elem
179 | break
180 | if not path_found:
181 | raise RuntimeError('Could not find an appropriate directory for MATLAB or the MATLAB runtime in {0}. Details: {1}'.format(
182 | self.path_var, file_to_find))
183 |
184 | path_components = re.split(r'\\|/', path_found)
185 |
186 | if path_components[-1]:
187 | last_path_component = path_components[-1]
188 | else:
189 | # The directory name ended with a slash, so the last item in the list was an empty string. Go back one more.
190 | last_path_component = path_components[-2]
191 |
192 | if last_path_component != self.arch:
193 | output_str = ''.join(('To call deployed MATLAB code on a {0} machine, you must run a {0} version of Python, ',
194 | 'and your {1} variable must contain an element pointing to "{2}runtime{2}{0}", ',
195 | 'where "" indicates a MATLAB or MATLAB Runtime root. ',
196 | 'Instead, the value found was as follows: {3}'))
197 | raise RuntimeError(output_str.format(self.arch, self.path_var, os.sep, path_found))
198 |
199 | matlabroot = os.path.dirname(os.path.dirname(os.path.normpath(path_found)))
200 | bin_dir = os.path.join(matlabroot, 'bin', self.arch)
201 | runtime_dir = os.path.join(matlabroot, 'toolbox', 'compiler_sdk', 'pysdk_py')
202 | ml_dir = os.path.join(runtime_dir, 'mlarray_dist')
203 | if not os.path.isdir(bin_dir):
204 | raise RuntimeError('Could not find the directory {0}'.format(bin_dir))
205 | if not os.path.isdir(runtime_dir):
206 | raise RuntimeError('Could not find the directory {0}'.format(runtime_dir))
207 | if not os.path.isdir(ml_dir):
208 | raise RuntimeError('Could not find the directory {0}'.format(ml_dir))
209 | (self.bin_dir, self.runtime_dir, self.ml_dir) = (bin_dir, runtime_dir, ml_dir)
210 |
211 | def update_paths(self):
212 | """Update the OS and Python paths."""
213 |
214 | #For Windows, add the bin_dir to the OS path. This is unnecessary
215 | #for Linux and Mac, where the OS can find this information via rpath.
216 | if self.is_windows:
217 | os.environ[self.path_var] = self.bin_dir + os.pathsep + os.environ[self.path_var]
218 |
219 | #Add all paths to the Python path.
220 | sys.path.insert(0, self.bin_dir)
221 | sys.path.insert(0, self.runtime_dir)
222 | sys.path.insert(0, self.ml_dir)
223 |
224 | def import_matlab_pysdk_runtime(self):
225 | """Import matlab_pysdk.runtime. Must be done after update_paths() and import_cppext() are called."""
226 | try:
227 | self.mr_handle = importlib.import_module('matlab_pysdk.runtime')
228 | except Exception as e:
229 | raise e
230 |
231 | if not hasattr(self.mr_handle, '_runtime_version_w_dots'):
232 | raise RuntimeError('Runtime version of package ({0}) does not match runtime version of previously loaded package'.format(
233 | _PathInitializer.RUNTIME_VERSION_W_DOTS))
234 | elif self.mr_handle._runtime_version_w_dots and (self.mr_handle._runtime_version_w_dots != _PathInitializer.RUNTIME_VERSION_W_DOTS):
235 | raise RuntimeError('Runtime version of package ({0}) does not match runtime version of previously loaded package ({1})'.format(
236 | _PathInitializer.RUNTIME_VERSION_W_DOTS,
237 | self.mr_handle._runtime_version_w_dots))
238 | else:
239 | self.mr_handle._runtime_version_w_dots = _PathInitializer.RUNTIME_VERSION_W_DOTS
240 |
241 | self.mr_handle._cppext_handle = self.cppext_handle
242 |
243 | def import_matlab(self):
244 | """Import the matlab package. Must be done after Python system path contains what it needs to."""
245 | try:
246 | self.ml_handle = importlib.import_module('matlab')
247 | except Exception as e:
248 | raise e
249 |
250 | def initialize_package(self):
251 | package_handle = self.mr_handle.DeployablePackage(self, self.PACKAGE_NAME, __file__)
252 | self.instances_of_this_package.add(weakref.ref(package_handle))
253 | package_handle.initialize()
254 | return package_handle
255 |
256 | def initialize_runtime(self, option_list):
257 | if not self.cppext_handle:
258 | raise RuntimeError('Cannot call initialize_application before import_cppext.')
259 | if self.is_mac:
260 | ignored_option_found = False
261 | for option in option_list:
262 | if option in ('-nodisplay', '-nojvm'):
263 | ignored_option_found = True
264 | break
265 | if ignored_option_found:
266 | print('WARNING: Options "-nodisplay" and "-nojvm" are ignored on Mac.')
267 | print('They must be passed to mwpython in order to take effect.')
268 | self.cppext_handle.initializeApplication(option_list)
269 |
270 | def terminate_runtime(self):
271 | if not self.cppext_handle:
272 | raise RuntimeError('Cannot call terminate_application before import_cppext.')
273 | self.cppext_handle.terminateApplication()
274 |
275 | def import_cppext(self):
276 | self.cppext_handle = importlib.import_module("matlabruntimeforpython" + self.interpreter_version)
277 |
278 | try:
279 | _pir = _PathInitializer()
280 | _pir.get_paths_from_os()
281 |
282 | _pir.update_paths()
283 | _pir.import_cppext()
284 | _pir.import_matlab_pysdk_runtime()
285 | _pir.import_matlab()
286 | except Exception as e:
287 | print("Exception caught during initialization of Python interface. Details: {0}".format(e))
288 | raise
289 | # We let the program exit normally.
290 |
291 | def initialize():
292 | """
293 | Initialize package and return a handle.
294 |
295 | Initialize a package consisting of one or more deployed MATLAB functions. The return
296 | value is used as a handle on which any of the functions can be executed. To wait
297 | for all graphical figures to close before continuing, call wait_for_figures_to_close()
298 | on the handle. To close the package, call terminate(), quit() or exit() (which are
299 | synonymous) on the handle. The terminate() function is executed automatically when the
300 | script or session ends.
301 |
302 | Returns
303 | handle - used to execute deployed MATLAB functions and to call terminate()
304 | """
305 | return _pir.initialize_package()
306 |
307 | def initialize_runtime(option_list):
308 | """
309 | Initialize runtime with a list of startup options.
310 |
311 | Initialize the MATLAB Runtime with a list of startup options that will affect
312 | all packages opened within the script or session. If it is not called
313 | explicitly, it will be executed automatically, with an empty list of options,
314 | by the first call to initialize(). Do not call initialize_runtime() after
315 | calling initialize().
316 |
317 | There is no corresponding terminate_runtime() call. The runtime is terminated
318 | automatically when the script or session ends.
319 |
320 | Parameters
321 | option_list - Python list of options; valid options are:
322 | -nodisplay (suppresses display functionality; Linux only)
323 | -nojvm (disables the Java Virtual Machine)
324 | """
325 | if option_list:
326 | if not isinstance(option_list, list) and not isinstance(option_list, tuple):
327 | raise SyntaxError('initialize_runtime takes a list or tuple of strings.')
328 | _pir.initialize_runtime(option_list)
329 |
330 | # Before terminating the process, call terminate_runtime() once on any package. This will
331 | # ensure graceful MATLAB runtime shutdown. After this call, the user should not use
332 | # any MATLAB-related function.
333 | # When running interactively, the user should call exit() after done using the package.
334 | # When running a script, the runtime will automatically be terminated when the script ends.
335 | def terminate_runtime():
336 | _pir.terminate_runtime();
337 |
338 | @atexit.register
339 | def __exit_packages():
340 | for package in _pir.instances_of_this_package:
341 | if package() is not None:
342 | package().terminate()
343 |
--------------------------------------------------------------------------------
/vehicleDetection/motionTracking/motionTracking.ctf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SarvathSharma/Smart-Traffic-Control-System/dcd2b8aaeff71ea0a29e1677e9906efcfd6e384f/vehicleDetection/motionTracking/motionTracking.ctf
--------------------------------------------------------------------------------
/vehicleDetection/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright 2015-2018 The MathWorks, Inc.
2 |
3 | from distutils.core import setup
4 | from distutils.command.clean import clean
5 | from distutils.command.install import install
6 |
7 | class InstallRuntime(install):
8 | # Calls the default run command, then deletes the build area
9 | # (equivalent to "setup clean --all").
10 | def run(self):
11 | install.run(self)
12 | c = clean(self.distribution)
13 | c.all = True
14 | c.finalize_options()
15 | c.run()
16 |
17 | if __name__ == '__main__':
18 |
19 | setup(
20 | name="matlabruntimeforpython",
21 | version="R2020b",
22 | description='A module to call MATLAB from Python',
23 | author='MathWorks',
24 | url='https://www.mathworks.com/',
25 | platforms=['Linux', 'Windows', 'MacOS'],
26 | packages=[
27 | 'motionTracking'
28 | ],
29 | package_data={'motionTracking': ['*.ctf']},
30 | # Executes the custom code above in order to delete the build area.
31 | cmdclass={'install': InstallRuntime}
32 | )
33 |
34 |
35 |
--------------------------------------------------------------------------------
/vehicleDetection/v98/GettingStarted.html:
--------------------------------------------------------------------------------
1 |
2 |
3 | Getting Started with the motionTracking Python Package
4 |
13 |
14 |
15 |
16 |
Getting Started with the motionTracking Python Package
17 |
The Library Compiler in MATLAB® Compiler SDK™ creates Python® packages that can be integrated with applications written in Python. It also generates sample Python driver code that can be used to integrate and test the generated components. You can use this guide to set up your environment and run your sample driver application.
18 |
Note: Sample Python driver code is only generated if sample MATLAB code is included during the packaging phase. Samples can be found in the folder named "samples".
If you have full administrator privileges and install to the default location, you do not need to specify any options. Otherwise, use --user to install to your home folder, or --prefix="installdir" to install to "installdir". In the latter case, add "installdir" to the PYTHONPATH environment variable.