├── Documentation
├── Poster
│ ├── ObjectTrackingCamera.pdf
│ ├── ObjectTrackingCamera.png
│ └── ObjectTrackingCamera.svg
├── Report
│ ├── Final Report.pdf
│ ├── Final Report.tex
│ ├── cmd.png
│ ├── enablecam.png
│ ├── frame.jpg
│ ├── gpio.jpg
│ ├── logo.png
│ └── servo.jpg
└── progress-presentation
│ ├── Final_Gimbal.jpg
│ ├── Progress-Presentation.aux
│ ├── Progress-Presentation.log
│ ├── Progress-Presentation.nav
│ ├── Progress-Presentation.out
│ ├── Progress-Presentation.pdf
│ ├── Progress-Presentation.snm
│ ├── Progress-Presentation.synctex.gz
│ ├── Progress-Presentation.tex
│ ├── Progress-Presentation.toc
│ ├── image1.jpg
│ ├── image2.jpg
│ ├── meanshift.jpg
│ └── result.png
├── Gimbal-Parts-List
├── 1. Brushless Gimbal Motor.jpg
├── 2. Brushless Gimbal Controller .jpg
├── 3. BGC, Damper balls, Bolts.jpg
├── Gimbal_3-axis (3d printing)
│ ├── LICENSE.txt
│ ├── SOURCES.txt
│ ├── attribution_card.html
│ ├── files
│ │ ├── OBGV5.1_top_motor_3-axis_50x50.stl
│ │ ├── OBGV5_3-axis_v5_50x50mm.123dx
│ │ ├── OBGV5_GoPro3_3-axis.stl
│ │ ├── OBGV5_arm1_3-axis.stl
│ │ ├── OBGV5_arm2_3-axis.stl
│ │ ├── OBGV5_arm3_3-axis_CG.stl
│ │ └── OBGV5_top_mount_3-axis_50x50.stl
│ └── images
│ │ ├── 37574ea8b0a8525690b11f20a5445401_preview_featured.jpg
│ │ ├── 564c6e88150f166901a402e6df957297_preview_featured.jpg
│ │ ├── 5d9f698bce4fd6c0918dfd25dc37a5ea_preview_featured.jpg
│ │ ├── 6cb8ffe381b2f736251d6571fcfda029_preview_featured.jpg
│ │ ├── 821ece2c3e98576eb87f1f1e1fac4e60_preview_featured.jpg
│ │ ├── OBGV5_arm1_3-axis_preview_featured.jpg
│ │ ├── OBGV5_arm3_3-axis_CG_preview_featured.jpg
│ │ ├── a7ecc62c2f14c9de5b260e0aaa503662_preview_featured.jpg
│ │ ├── bb9326229714158449c691285b203ab4_preview_featured.jpg
│ │ ├── cebbf42b03b8b3efce62912d2b5503ad_preview_featured.jpg
│ │ └── fc4df6ccc08186e6cf9512eb68fdb4d9_preview_featured.jpg
├── gimbal making (links).txt
└── gimbal making.docx
├── Material for Future work(Drone and Camera)
├── 07429411.pdf
├── 1-s2.0-S2212017316001171-main_2.pdf
├── 2011_iros_teuliere.pdf
├── Fahd_SPIE_06.pdf
└── Rozantsev_Flying_Objects_Detection_2015_CVPR_paper.pdf
├── Object Tracking Camera Task List 27.6.2016.docx
├── Other Algorithms(Object Tracking)
├── Consensus-based Matching and Tracking of Keypoints for Object Tracking
│ ├── functions.py
│ ├── get_points.py
│ ├── run.py
│ └── util.py
├── Matrioska Tracker
│ └── run.py
├── README.md
├── Tracking Learning Detection
│ ├── executable
│ │ ├── D3DCompiler_43.dll
│ │ ├── Qt5Core.dll
│ │ ├── Qt5Gui.dll
│ │ ├── Qt5Widgets.dll
│ │ ├── TLD.exe
│ │ ├── icudt49.dll
│ │ ├── icuin49.dll
│ │ ├── icuuc49.dll
│ │ ├── libEGL.dll
│ │ ├── libGLESv2.dll
│ │ ├── msvcp100.dll
│ │ ├── msvcr100.dll
│ │ ├── opencv_calib3d245.dll
│ │ ├── opencv_core245.dll
│ │ ├── opencv_features2d245.dll
│ │ ├── opencv_flann245.dll
│ │ ├── opencv_highgui245.dll
│ │ ├── opencv_imgproc245.dll
│ │ ├── opencv_legacy245.dll
│ │ ├── opencv_ml245.dll
│ │ ├── opencv_video245.dll
│ │ └── platforms
│ │ │ └── qwindows.dll
│ └── sourcecode
│ │ ├── CameraDS.cpp
│ │ ├── CameraDS.h
│ │ ├── Image.cpp
│ │ ├── Image.h
│ │ ├── MainWindow.cpp
│ │ ├── MainWindow.h
│ │ ├── MainWindow.ui
│ │ ├── TLD.pro
│ │ ├── TLD.pro.user
│ │ ├── TLD
│ │ ├── FerNNClassifier.cpp
│ │ ├── FerNNClassifier.h
│ │ ├── LKTracker.cpp
│ │ ├── LKTracker.h
│ │ ├── TLD.cpp
│ │ ├── TLD.h
│ │ ├── pyTLD
│ │ │ ├── FerNNClassifier.pyc
│ │ │ ├── TLD.py
│ │ │ ├── TLD.pyc
│ │ │ ├── get_points.py
│ │ │ ├── get_points.pyc
│ │ │ └── run.py
│ │ ├── tld_utils.cpp
│ │ └── tld_utils.h
│ │ ├── main.cpp
│ │ └── qedit.h
└── Tracking by SIFT, SURF feature matching, homography
│ ├── get_points.py
│ └── run.py
├── README.md
├── Research papers referred(Object Tracking)
├── 1432960037.pdf
├── CAM Shift
│ ├── 20.pdf
│ ├── 34.pdf
│ ├── An Improved CamShift Algorithm for Target Tracking in Video Surve.pdf
│ ├── C1657073313.pdf
│ ├── G0124146.pdf
│ ├── Object Tracking Using Improved CAMShift .pdf
│ ├── inpra-v2n2-05_3.pdf
│ └── p3-allen.pdf
├── CalonderLSF10.pdf
├── Maresca_The_Matrioska_Tracking_2014_CVPR_paper.pdf
├── brisk.pdf
├── cvpr_2015.pdf
├── ext camshift.pdf
├── lowe-ijcv04.pdf
└── orb_final.pdf
└── Tutorials
├── 1. Colored object tracking using HSV
├── Code
│ └── Colored_object_tracking_using_HSV.py
├── Colored object tracking using HSV.pdf
├── Images
│ ├── Capture.JPG
│ ├── Capture1.JPG
│ ├── Capture2.JPG
│ ├── Capture3.JPG
│ ├── Capture5.JPG
│ ├── Capture6.JPG
│ ├── image1.jpg
│ └── image2.jpg
└── Latex Code
│ ├── Colored object tracking using HSV.aux
│ ├── Colored object tracking using HSV.log
│ ├── Colored object tracking using HSV.out
│ ├── Colored object tracking using HSV.synctex.gz
│ ├── Colored object tracking using HSV.tex
│ └── Colored object tracking using HSV.toc
├── 2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking
├── Code
│ └── run.py
├── Images
│ ├── BackProjection.jpg
│ ├── Cam_Det.png
│ ├── Cam_ROI.png
│ ├── Histogram.jpg
│ ├── Original.jpg
│ ├── Vid_ROI.png
│ ├── Vid_det.png
│ ├── convergedellipse.png
│ ├── convergedimage.png
│ ├── drawing_code.JPG
│ ├── ellipsecomputation.png
│ ├── frame_code.JPG
│ ├── mc1.png
│ ├── mc2.png
│ ├── mc3.png
│ ├── mc4.png
│ ├── meanshift.jpg
│ ├── meanshift.png
│ ├── meanshiftagain.png
│ ├── ms1.png
│ ├── ms2.png
│ ├── ms2nd.png
│ ├── ms3.png
│ ├── ms3rd.png
│ ├── ms4.png
│ ├── msconverged.png
│ ├── msec.png
│ ├── msi.png
│ ├── newmeanshiftaxis.png
│ ├── result.png
│ ├── roi_code.JPG
│ ├── roiforellipse.png
│ ├── scale_estimation.JPG
│ └── shiftedwindow.png
├── Latex Code
│ ├── Tutorial.aux
│ ├── Tutorial.log
│ ├── Tutorial.out
│ ├── Tutorial.pdf
│ ├── Tutorial.synctex.gz
│ ├── Tutorial.tex
│ └── Tutorial.toc
└── Tutorial.pdf
├── 3. How to make your own gimbal
├── 3 axis gimbal 3D designs for 3D printing
│ ├── 3D design files
│ │ ├── OBGV5_3-axis_v5_50x50mm.123dx
│ │ ├── camera fixing mount.stl
│ │ ├── gimbal motor frame 1.stl
│ │ ├── gimbal motor frame 2.stl
│ │ ├── gimbal motor frame 3.stl
│ │ ├── lower vibration and shock observing mount.stl
│ │ └── upper vibration and shock observing mount.stl
│ ├── LICENSE.txt
│ ├── README.txt
│ ├── SOURCES.txt
│ ├── attribution_card.html
│ └── images
│ │ ├── 37574ea8b0a8525690b11f20a5445401_preview_featured.jpg
│ │ ├── 564c6e88150f166901a402e6df957297_preview_featured.jpg
│ │ ├── 5d9f698bce4fd6c0918dfd25dc37a5ea_preview_featured.jpg
│ │ ├── 6cb8ffe381b2f736251d6571fcfda029_preview_featured.jpg
│ │ ├── 821ece2c3e98576eb87f1f1e1fac4e60_preview_featured.jpg
│ │ ├── OBGV5_arm1_3-axis_preview_featured.jpg
│ │ ├── OBGV5_arm3_3-axis_CG_preview_featured.jpg
│ │ ├── a7ecc62c2f14c9de5b260e0aaa503662_preview_featured.jpg
│ │ ├── bb9326229714158449c691285b203ab4_preview_featured.jpg
│ │ ├── cebbf42b03b8b3efce62912d2b5503ad_preview_featured.jpg
│ │ └── fc4df6ccc08186e6cf9512eb68fdb4d9_preview_featured.jpg
├── Images
│ ├── 3_Axis_Gimbal_Example.jpg
│ ├── Aircraft_Movement.JPG
│ ├── Brushless_Gimbal_Motor.jpg
│ ├── Camera_Fixing_Mount.jpg
│ ├── Controller.jpg
│ ├── Final_Gimbal.jpg
│ ├── Gimbal_Motor_Frame_1.jpg
│ ├── Gimbal_Motor_Frame_2.jpg
│ ├── Gimbal_Motor_Frame_3.jpg
│ ├── Shock_Absorber_Balls.jpg
│ ├── balancing_pitch_1.jpg
│ ├── balancing_pitch_2.jpg
│ ├── balancing_pitch_axis.jpg
│ ├── balancing_roll_1.jpg
│ ├── balancing_roll_2.jpg
│ ├── balancing_roll_axis.jpg
│ ├── balancing_yaw_axis.jpg
│ ├── gimbal_making_phase_1.jpg
│ ├── lower_vibration_observing_mount.jpg
│ └── upper_vibration_observing_mount.jpg
├── Latex code
│ ├── Tutorial.aux
│ ├── Tutorial.log
│ ├── Tutorial.out
│ ├── Tutorial.pdf
│ ├── Tutorial.synctex.gz
│ ├── Tutorial.tex
│ └── Tutorial.toc
└── Tutorial.pdf
├── 4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame
├── Images
│ ├── Image1.JPG
│ ├── Image2.JPG
│ ├── Image3.JPG
│ ├── Image4.JPG
│ ├── back_projection.JPG
│ ├── camshift.JPG
│ ├── code_histogram_comparison.JPG
│ ├── eliminated_hsv_frame.JPG
│ ├── frame_elimination_code.JPG
│ ├── frame_hist_code.JPG
│ ├── frame_histogram.JPG
│ ├── giving_roi.JPG
│ ├── histogram_roi.JPG
│ ├── redetection2.JPG
│ ├── redetection3.JPG
│ ├── redetection_back_projection.JPG
│ ├── roi_histogram_code.JPG
│ ├── special_back_projection.JPG
│ ├── test.JPG
│ └── tracking_object.JPG
├── Latex code
│ ├── Tutorial.aux
│ ├── Tutorial.log
│ ├── Tutorial.out
│ ├── Tutorial.pdf
│ ├── Tutorial.synctex.gz
│ ├── Tutorial.tex
│ └── Tutorial.toc
├── Tutorial.pdf
└── code
│ ├── README.md
│ ├── functions.py
│ └── run.py
├── 5. Interfacing the Camera and Gimbal System with the Raspberry-Pi
├── Images
│ ├── cmd.png
│ ├── cmd2.jpg
│ ├── cmd3.jpg
│ ├── enablecam.png
│ ├── frame.jpg
│ ├── gpio.jpg
│ ├── image.jpg
│ └── servo.jpg
├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.pdf
└── Latex code
│ ├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.aux
│ ├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.log
│ ├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.out
│ ├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.pdf
│ ├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.synctex.gz
│ ├── Interfacing the Camera and Gimbal system with the Raspberry-Pi.tex
│ └── Interfacing the Camera and Gimbal system with the Raspberry-Pi.toc
├── 6. Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi
├── Code
│ ├── README.md
│ ├── code.py
│ ├── code.pyc
│ ├── functions.py
│ └── functions.pyc
├── Images
│ ├── 1.png
│ ├── 2.png
│ ├── 3.png
│ ├── s1.jpg
│ ├── s2.jpg
│ └── s3.jpg
├── Latex code
│ ├── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.aux
│ ├── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.log
│ ├── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.out
│ ├── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.pdf
│ ├── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.synctex.gz
│ ├── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.tex
│ └── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.toc
└── Object Tracking using a Pi-Cam interfaced on a Raspberry-Pi.pdf
└── Read me.txt
/Documentation/Poster/ObjectTrackingCamera.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Poster/ObjectTrackingCamera.pdf
--------------------------------------------------------------------------------
/Documentation/Poster/ObjectTrackingCamera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Poster/ObjectTrackingCamera.png
--------------------------------------------------------------------------------
/Documentation/Report/Final Report.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/Final Report.pdf
--------------------------------------------------------------------------------
/Documentation/Report/cmd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/cmd.png
--------------------------------------------------------------------------------
/Documentation/Report/enablecam.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/enablecam.png
--------------------------------------------------------------------------------
/Documentation/Report/frame.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/frame.jpg
--------------------------------------------------------------------------------
/Documentation/Report/gpio.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/gpio.jpg
--------------------------------------------------------------------------------
/Documentation/Report/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/logo.png
--------------------------------------------------------------------------------
/Documentation/Report/servo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/Report/servo.jpg
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Final_Gimbal.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/Final_Gimbal.jpg
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.aux:
--------------------------------------------------------------------------------
1 | \relax
2 | \providecommand\hyper@newdestlabel[2]{}
3 | \providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
4 | \HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
5 | \global\let\oldcontentsline\contentsline
6 | \gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
7 | \global\let\oldnewlabel\newlabel
8 | \gdef\newlabel#1#2{\newlabelxx{#1}#2}
9 | \gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
10 | \AtEndDocument{\ifx\hyper@anchor\@undefined
11 | \let\contentsline\oldcontentsline
12 | \let\newlabel\oldnewlabel
13 | \fi}
14 | \fi}
15 | \global\let\hyper@last\relax
16 | \gdef\HyperFirstAtBeginDocument#1{#1}
17 | \providecommand\HyField@AuxAddToFields[1]{}
18 | \providecommand\HyField@AuxAddToCoFields[2]{}
19 | \@writefile{toc}{\beamer@endinputifotherversion {3.36pt}}
20 | \@writefile{nav}{\beamer@endinputifotherversion {3.36pt}}
21 | \@writefile{nav}{\headcommand {\slideentry {0}{0}{1}{1/1}{}{0}}}
22 | \@writefile{nav}{\headcommand {\beamer@framepages {1}{1}}}
23 | \@writefile{toc}{\beamer@sectionintoc {1}{Overview of Project}{2}{0}{1}}
24 | \@writefile{nav}{\headcommand {\sectionentry {1}{Overview of Project}{2}{Overview of Project}{0}}}
25 | \@writefile{nav}{\headcommand {\beamer@sectionpages {1}{1}}}
26 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {1}{1}}}
27 | \@writefile{nav}{\headcommand {\slideentry {1}{0}{2}{2/2}{}{0}}}
28 | \@writefile{nav}{\headcommand {\beamer@framepages {2}{2}}}
29 | \@writefile{nav}{\headcommand {\slideentry {1}{0}{3}{3/3}{}{0}}}
30 | \@writefile{nav}{\headcommand {\beamer@framepages {3}{3}}}
31 | \@writefile{toc}{\beamer@sectionintoc {2}{Overview of Tasks}{4}{0}{2}}
32 | \@writefile{nav}{\headcommand {\sectionentry {2}{Overview of Tasks}{4}{Overview of Tasks}{0}}}
33 | \@writefile{nav}{\headcommand {\beamer@sectionpages {2}{3}}}
34 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {2}{3}}}
35 | \@writefile{nav}{\headcommand {\slideentry {2}{0}{4}{4/4}{}{0}}}
36 | \@writefile{nav}{\headcommand {\beamer@framepages {4}{4}}}
37 | \@writefile{nav}{\headcommand {\slideentry {2}{0}{5}{5/5}{}{0}}}
38 | \@writefile{nav}{\headcommand {\beamer@framepages {5}{5}}}
39 | \@writefile{toc}{\beamer@sectionintoc {3}{Tasks Accomplised}{6}{0}{3}}
40 | \@writefile{nav}{\headcommand {\sectionentry {3}{Tasks Accomplised}{6}{Tasks Accomplised}{0}}}
41 | \@writefile{nav}{\headcommand {\beamer@sectionpages {4}{5}}}
42 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {4}{5}}}
43 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{6}{6/6}{}{0}}}
44 | \@writefile{nav}{\headcommand {\beamer@framepages {6}{6}}}
45 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{7}{7/7}{}{0}}}
46 | \@writefile{nav}{\headcommand {\beamer@framepages {7}{7}}}
47 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{8}{8/8}{}{0}}}
48 | \@writefile{nav}{\headcommand {\beamer@framepages {8}{8}}}
49 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{9}{9/9}{}{0}}}
50 | \@writefile{nav}{\headcommand {\beamer@framepages {9}{9}}}
51 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{10}{10/10}{}{0}}}
52 | \@writefile{nav}{\headcommand {\beamer@framepages {10}{10}}}
53 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{11}{11/11}{}{0}}}
54 | \@writefile{nav}{\headcommand {\beamer@framepages {11}{11}}}
55 | \@writefile{nav}{\headcommand {\slideentry {3}{0}{12}{12/12}{}{0}}}
56 | \@writefile{nav}{\headcommand {\beamer@framepages {12}{12}}}
57 | \@writefile{toc}{\beamer@sectionintoc {4}{Challenges Faced}{13}{0}{4}}
58 | \@writefile{nav}{\headcommand {\sectionentry {4}{Challenges Faced}{13}{Challenges Faced}{0}}}
59 | \@writefile{nav}{\headcommand {\beamer@sectionpages {6}{12}}}
60 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {6}{12}}}
61 | \@writefile{nav}{\headcommand {\slideentry {4}{0}{13}{13/13}{}{0}}}
62 | \@writefile{nav}{\headcommand {\beamer@framepages {13}{13}}}
63 | \@writefile{toc}{\beamer@sectionintoc {5}{Future Plans}{14}{0}{5}}
64 | \@writefile{nav}{\headcommand {\sectionentry {5}{Future Plans}{14}{Future Plans}{0}}}
65 | \@writefile{nav}{\headcommand {\beamer@sectionpages {13}{13}}}
66 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {13}{13}}}
67 | \@writefile{nav}{\headcommand {\slideentry {5}{0}{14}{14/14}{}{0}}}
68 | \@writefile{nav}{\headcommand {\beamer@framepages {14}{14}}}
69 | \@writefile{toc}{\beamer@sectionintoc {6}{Thank You}{15}{0}{6}}
70 | \@writefile{nav}{\headcommand {\sectionentry {6}{Thank You}{15}{Thank You}{0}}}
71 | \@writefile{nav}{\headcommand {\beamer@sectionpages {14}{14}}}
72 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {14}{14}}}
73 | \@writefile{nav}{\headcommand {\slideentry {6}{0}{15}{15/15}{}{0}}}
74 | \@writefile{nav}{\headcommand {\beamer@framepages {15}{15}}}
75 | \@writefile{nav}{\headcommand {\beamer@partpages {1}{15}}}
76 | \@writefile{nav}{\headcommand {\beamer@subsectionpages {15}{15}}}
77 | \@writefile{nav}{\headcommand {\beamer@sectionpages {15}{15}}}
78 | \@writefile{nav}{\headcommand {\beamer@documentpages {15}}}
79 | \@writefile{nav}{\headcommand {\def \inserttotalframenumber {15}}}
80 |
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.nav:
--------------------------------------------------------------------------------
1 | \beamer@endinputifotherversion {3.36pt}
2 | \headcommand {\slideentry {0}{0}{1}{1/1}{}{0}}
3 | \headcommand {\beamer@framepages {1}{1}}
4 | \headcommand {\sectionentry {1}{Overview of Project}{2}{Overview of Project}{0}}
5 | \headcommand {\beamer@sectionpages {1}{1}}
6 | \headcommand {\beamer@subsectionpages {1}{1}}
7 | \headcommand {\slideentry {1}{0}{2}{2/2}{}{0}}
8 | \headcommand {\beamer@framepages {2}{2}}
9 | \headcommand {\slideentry {1}{0}{3}{3/3}{}{0}}
10 | \headcommand {\beamer@framepages {3}{3}}
11 | \headcommand {\sectionentry {2}{Overview of Tasks}{4}{Overview of Tasks}{0}}
12 | \headcommand {\beamer@sectionpages {2}{3}}
13 | \headcommand {\beamer@subsectionpages {2}{3}}
14 | \headcommand {\slideentry {2}{0}{4}{4/4}{}{0}}
15 | \headcommand {\beamer@framepages {4}{4}}
16 | \headcommand {\slideentry {2}{0}{5}{5/5}{}{0}}
17 | \headcommand {\beamer@framepages {5}{5}}
18 | \headcommand {\sectionentry {3}{Tasks Accomplised}{6}{Tasks Accomplised}{0}}
19 | \headcommand {\beamer@sectionpages {4}{5}}
20 | \headcommand {\beamer@subsectionpages {4}{5}}
21 | \headcommand {\slideentry {3}{0}{6}{6/6}{}{0}}
22 | \headcommand {\beamer@framepages {6}{6}}
23 | \headcommand {\slideentry {3}{0}{7}{7/7}{}{0}}
24 | \headcommand {\beamer@framepages {7}{7}}
25 | \headcommand {\slideentry {3}{0}{8}{8/8}{}{0}}
26 | \headcommand {\beamer@framepages {8}{8}}
27 | \headcommand {\slideentry {3}{0}{9}{9/9}{}{0}}
28 | \headcommand {\beamer@framepages {9}{9}}
29 | \headcommand {\slideentry {3}{0}{10}{10/10}{}{0}}
30 | \headcommand {\beamer@framepages {10}{10}}
31 | \headcommand {\slideentry {3}{0}{11}{11/11}{}{0}}
32 | \headcommand {\beamer@framepages {11}{11}}
33 | \headcommand {\slideentry {3}{0}{12}{12/12}{}{0}}
34 | \headcommand {\beamer@framepages {12}{12}}
35 | \headcommand {\sectionentry {4}{Challenges Faced}{13}{Challenges Faced}{0}}
36 | \headcommand {\beamer@sectionpages {6}{12}}
37 | \headcommand {\beamer@subsectionpages {6}{12}}
38 | \headcommand {\slideentry {4}{0}{13}{13/13}{}{0}}
39 | \headcommand {\beamer@framepages {13}{13}}
40 | \headcommand {\sectionentry {5}{Future Plans}{14}{Future Plans}{0}}
41 | \headcommand {\beamer@sectionpages {13}{13}}
42 | \headcommand {\beamer@subsectionpages {13}{13}}
43 | \headcommand {\slideentry {5}{0}{14}{14/14}{}{0}}
44 | \headcommand {\beamer@framepages {14}{14}}
45 | \headcommand {\sectionentry {6}{Thank You}{15}{Thank You}{0}}
46 | \headcommand {\beamer@sectionpages {14}{14}}
47 | \headcommand {\beamer@subsectionpages {14}{14}}
48 | \headcommand {\slideentry {6}{0}{15}{15/15}{}{0}}
49 | \headcommand {\beamer@framepages {15}{15}}
50 | \headcommand {\beamer@partpages {1}{15}}
51 | \headcommand {\beamer@subsectionpages {15}{15}}
52 | \headcommand {\beamer@sectionpages {15}{15}}
53 | \headcommand {\beamer@documentpages {15}}
54 | \headcommand {\def \inserttotalframenumber {15}}
55 |
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.out:
--------------------------------------------------------------------------------
1 | \BOOKMARK [2][]{Outline0.1}{Overview of Project}{}% 1
2 | \BOOKMARK [2][]{Outline0.2}{Overview of Tasks}{}% 2
3 | \BOOKMARK [2][]{Outline0.3}{Tasks Accomplised}{}% 3
4 | \BOOKMARK [2][]{Outline0.4}{Challenges Faced}{}% 4
5 | \BOOKMARK [2][]{Outline0.5}{Future Plans}{}% 5
6 | \BOOKMARK [2][]{Outline0.6}{Thank You}{}% 6
7 |
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/Progress-Presentation.pdf
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.snm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/Progress-Presentation.snm
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.synctex.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/Progress-Presentation.synctex.gz
--------------------------------------------------------------------------------
/Documentation/progress-presentation/Progress-Presentation.toc:
--------------------------------------------------------------------------------
1 | \beamer@endinputifotherversion {3.36pt}
2 | \beamer@sectionintoc {1}{Overview of Project}{2}{0}{1}
3 | \beamer@sectionintoc {2}{Overview of Tasks}{4}{0}{2}
4 | \beamer@sectionintoc {3}{Tasks Accomplised}{6}{0}{3}
5 | \beamer@sectionintoc {4}{Challenges Faced}{13}{0}{4}
6 | \beamer@sectionintoc {5}{Future Plans}{14}{0}{5}
7 | \beamer@sectionintoc {6}{Thank You}{15}{0}{6}
8 |
--------------------------------------------------------------------------------
/Documentation/progress-presentation/image1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/image1.jpg
--------------------------------------------------------------------------------
/Documentation/progress-presentation/image2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/image2.jpg
--------------------------------------------------------------------------------
/Documentation/progress-presentation/meanshift.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/meanshift.jpg
--------------------------------------------------------------------------------
/Documentation/progress-presentation/result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Documentation/progress-presentation/result.png
--------------------------------------------------------------------------------
/Gimbal-Parts-List/1. Brushless Gimbal Motor.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/1. Brushless Gimbal Motor.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/2. Brushless Gimbal Controller .jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/2. Brushless Gimbal Controller .jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/3. BGC, Damper balls, Bolts.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/3. BGC, Damper balls, Bolts.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/SOURCES.txt:
--------------------------------------------------------------------------------
1 | Sources for Open Brushless Gimbal (3-axis)
2 |
3 | Open Brushless Gimbal (http://www.thingiverse.com/thing:110731)
4 |
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/attribution_card.html:
--------------------------------------------------------------------------------
1 |
2 |
14 |
15 |
16 |
19 |
Open Brushless Gimbal (3-axis) by roumen
20 |
Published on January 5, 2016
21 |
www.thingiverse.com/thing:1247236
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 | Creative Commons - Attribution - Non-Commercial
35 |
36 |
37 |
42 |
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5.1_top_motor_3-axis_50x50.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5.1_top_motor_3-axis_50x50.stl
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_3-axis_v5_50x50mm.123dx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_3-axis_v5_50x50mm.123dx
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_GoPro3_3-axis.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_GoPro3_3-axis.stl
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_arm1_3-axis.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_arm1_3-axis.stl
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_arm2_3-axis.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_arm2_3-axis.stl
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_arm3_3-axis_CG.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_arm3_3-axis_CG.stl
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_top_mount_3-axis_50x50.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/files/OBGV5_top_mount_3-axis_50x50.stl
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/37574ea8b0a8525690b11f20a5445401_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/37574ea8b0a8525690b11f20a5445401_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/564c6e88150f166901a402e6df957297_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/564c6e88150f166901a402e6df957297_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/5d9f698bce4fd6c0918dfd25dc37a5ea_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/5d9f698bce4fd6c0918dfd25dc37a5ea_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/6cb8ffe381b2f736251d6571fcfda029_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/6cb8ffe381b2f736251d6571fcfda029_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/821ece2c3e98576eb87f1f1e1fac4e60_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/821ece2c3e98576eb87f1f1e1fac4e60_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/OBGV5_arm1_3-axis_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/OBGV5_arm1_3-axis_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/OBGV5_arm3_3-axis_CG_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/OBGV5_arm3_3-axis_CG_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/a7ecc62c2f14c9de5b260e0aaa503662_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/a7ecc62c2f14c9de5b260e0aaa503662_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/bb9326229714158449c691285b203ab4_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/bb9326229714158449c691285b203ab4_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/cebbf42b03b8b3efce62912d2b5503ad_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/cebbf42b03b8b3efce62912d2b5503ad_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/fc4df6ccc08186e6cf9512eb68fdb4d9_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/Gimbal_3-axis (3d printing)/images/fc4df6ccc08186e6cf9512eb68fdb4d9_preview_featured.jpg
--------------------------------------------------------------------------------
/Gimbal-Parts-List/gimbal making (links).txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/gimbal making (links).txt
--------------------------------------------------------------------------------
/Gimbal-Parts-List/gimbal making.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Gimbal-Parts-List/gimbal making.docx
--------------------------------------------------------------------------------
/Material for Future work(Drone and Camera)/07429411.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Material for Future work(Drone and Camera)/07429411.pdf
--------------------------------------------------------------------------------
/Material for Future work(Drone and Camera)/1-s2.0-S2212017316001171-main_2.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Material for Future work(Drone and Camera)/1-s2.0-S2212017316001171-main_2.pdf
--------------------------------------------------------------------------------
/Material for Future work(Drone and Camera)/2011_iros_teuliere.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Material for Future work(Drone and Camera)/2011_iros_teuliere.pdf
--------------------------------------------------------------------------------
/Material for Future work(Drone and Camera)/Fahd_SPIE_06.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Material for Future work(Drone and Camera)/Fahd_SPIE_06.pdf
--------------------------------------------------------------------------------
/Material for Future work(Drone and Camera)/Rozantsev_Flying_Objects_Detection_2015_CVPR_paper.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Material for Future work(Drone and Camera)/Rozantsev_Flying_Objects_Detection_2015_CVPR_paper.pdf
--------------------------------------------------------------------------------
/Object Tracking Camera Task List 27.6.2016.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Object Tracking Camera Task List 27.6.2016.docx
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Consensus-based Matching and Tracking of Keypoints for Object Tracking/get_points.py:
--------------------------------------------------------------------------------
1 | # Import the required modules
2 | import cv2
3 |
4 | def run(im):
5 | cv2.imshow('frame', im)
6 |
7 | # List containing top-left and bottom-right to crop the image.
8 | pts_1 = []
9 | pts_2 = []
10 |
11 | rects = []
12 | run.mouse_down = False
13 |
14 | def callback(event, x, y, flags, param):
15 | if event == cv2.EVENT_LBUTTONDOWN:
16 | if len(pts_2) == 1: #pts_2 contains one tuple.
17 | print "WARN: Cannot select another object."
18 | print "Delete the previously selected object using key `d` to mark a new location."
19 | return
20 | run.mouse_down = True
21 | pts_1.append((x, y))
22 | elif event == cv2.EVENT_LBUTTONUP and run.mouse_down == True:
23 | run.mouse_down = False
24 | pts_2.append((x, y))
25 | print "Object selected at [{}, {}]".format(pts_1[-1], pts_2[-1])
26 | elif event == cv2.EVENT_MOUSEMOVE and run.mouse_down == True:
27 | im_draw = im.copy()
28 | cv2.rectangle(im_draw, pts_1[-1], (x, y), (255,255,255), 3)
29 | cv2.imshow('frame', im_draw)
30 |
31 | print "Press and release mouse around the object to be tracked."
32 | cv2.setMouseCallback('frame', callback)
33 |
34 | print "Press key `p` to continue with the selected points."
35 | print "Press key `d` to discard the last object selected."
36 | print "Press key `q` to quit the program."
37 | while True:
38 | # Draw the rectangular boxes on the image
39 | for pt1, pt2 in zip(pts_1, pts_2):
40 | rects.append([pt1[0],pt2[0], pt1[1], pt2[1]])
41 | cv2.rectangle(im, pt1, pt2, (255, 255, 255), 3)
42 | # Display the cropped images
43 | cv2.imshow('frame', im)
44 | key = cv2.waitKey(30)
45 | if key == ord('p'):
46 | # Press key `s` to return the selected points
47 | cv2.destroyAllWindows()
48 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
49 | corrected_point=check_point(point)
50 | return corrected_point
51 | elif key == ord('q'):
52 | # Press key `q` to quit the program
53 | print "Quitting without saving."
54 | exit()
55 | elif key == ord('d'):
56 | # Press ket `d` to delete the last rectangular region
57 | if run.mouse_down == False and pts_1:
58 | print "Object deleted at [{}, {}]".format(pts_1[-1], pts_2[-1])
59 | pts_1.pop()
60 | pts_2.pop()
61 | im_disp = im.copy()
62 | else:
63 | print "No object to delete."
64 | cv2.destroyAllWindows()
65 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
66 | return check_point(point)
67 |
68 | def check_point(points):
69 | for point in points:
70 | #to find min and max x coordinates
71 | if point[0] tl[0]
90 | C2 = y > tl[1]
91 | C3 = x < br[0]
92 | C4 = y < br[1]
93 |
94 | result = C1 & C2 & C3 & C4
95 |
96 | return result
97 |
98 | def keypoints_cv_to_np(keypoints_cv):
99 | keypoints = np.array([k.pt for k in keypoints_cv])
100 | return keypoints
101 |
102 | def find_nearest_keypoints(keypoints, pos, number=1):
103 | if type(pos) is tuple:
104 | pos = np.array(pos)
105 | if type(keypoints) is list:
106 | keypoints = keypoints_cv_to_np(keypoints)
107 |
108 | pos_to_keypoints = np.sqrt(np.power(keypoints - pos, 2).sum(axis=1))
109 | ind = np.argsort(pos_to_keypoints)
110 | return ind[:number]
111 |
112 | def draw_keypoints(keypoints, im, color=(255, 0, 0)):
113 |
114 | for k in keypoints:
115 | radius = 3 # int(k.size / 2)
116 | center = (int(k[0]), int(k[1]))
117 |
118 | # Draw circle
119 | cv2.circle(im, center, radius, color)
120 |
121 | def track(im_prev, im_gray, keypoints, THR_FB=20):
122 | if type(keypoints) is list:
123 | keypoints = keypoints_cv_to_np(keypoints)
124 |
125 | num_keypoints = keypoints.shape[0]
126 |
127 | # Status of tracked keypoint - True means successfully tracked
128 | status = [False] * num_keypoints
129 |
130 | # If at least one keypoint is active
131 | if num_keypoints > 0:
132 | # Prepare data for opencv:
133 | # Add singleton dimension
134 | # Use only first and second column
135 | # Make sure dtype is float32
136 | pts = keypoints[:, None, :2].astype(np.float32)
137 |
138 | # Calculate forward optical flow for prev_location
139 | nextPts, status, _ = cv2.calcOpticalFlowPyrLK(im_prev, im_gray, pts, None)
140 |
141 | # Calculate backward optical flow for prev_location
142 | pts_back, _, _ = cv2.calcOpticalFlowPyrLK(im_gray, im_prev, nextPts, None)
143 |
144 | # Remove singleton dimension
145 | pts_back = squeeze_pts(pts_back)
146 | pts = squeeze_pts(pts)
147 | nextPts = squeeze_pts(nextPts)
148 | status = status.squeeze()
149 |
150 | # Calculate forward-backward error
151 | fb_err = np.sqrt(np.power(pts_back - pts, 2).sum(axis=1))
152 |
153 | # Set status depending on fb_err and lk error
154 | large_fb = fb_err > THR_FB
155 | status = ~large_fb & status.astype(np.bool)
156 |
157 | nextPts = nextPts[status, :]
158 | keypoints_tracked = keypoints[status, :]
159 | keypoints_tracked[:, :2] = nextPts
160 |
161 | else:
162 | keypoints_tracked = np.array([])
163 | return keypoints_tracked, status
164 |
165 | def rotate(pt, rad):
166 | if(rad == 0):
167 | return pt
168 |
169 | pt_rot = np.empty(pt.shape)
170 |
171 | s, c = [f(rad) for f in (math.sin, math.cos)]
172 |
173 | pt_rot[:, 0] = c * pt[:, 0] - s * pt[:, 1]
174 | pt_rot[:, 1] = s * pt[:, 0] + c * pt[:, 1]
175 |
176 | return pt_rot
177 |
178 | def br(bbs):
179 |
180 | result = hstack((bbs[:, [0]] + bbs[:, [2]] - 1, bbs[:, [1]] + bbs[:, [3]] - 1))
181 |
182 | return result
183 |
184 | def bb2pts(bbs):
185 |
186 | pts = hstack((bbs[:, :2], br(bbs)))
187 |
188 | return pts
189 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Matrioska Tracker/run.py:
--------------------------------------------------------------------------------
1 | #importing useful modules
2 | import numpy as np
3 | import cv2
4 | import sys
5 |
6 | #Global variables for track_window
7 | ix,iy,jx,jy = -1,-1,-1,-1
8 |
9 | # mouse callback function
10 | def draw_circle(event,x,y,flags,param):
11 | global ix,iy,jx,jy,drawing,mode
12 |
13 | if event == cv2.EVENT_LBUTTONDOWN:
14 | ix,iy = x,y
15 |
16 | elif event == cv2.EVENT_LBUTTONUP:
17 | jx,jy = x,y
18 | cv2.rectangle(frame,(ix,iy),(x,y),(255,0,0),2)
19 |
20 | #Initializing camera
21 | cap = cv2.VideoCapture(0)
22 |
23 | #Creating object of KNearest class which will be used in feature matching
24 | knn = cv2.KNearest()
25 |
26 | #Creating object of ORB class which will be used to extract oriented
27 | #Features from Accelerated Segment Test and Binary Robust Independent
28 | #Elementary Features in the object and frame
29 | orb = cv2.ORB()
30 |
31 | #Creating a window
32 | cv2.namedWindow('frame',cv2.WINDOW_NORMAL)
33 |
34 | #Regitering frame to mouse callback function
35 | cv2.setMouseCallback('frame',draw_circle)
36 |
37 | #Flag to pause and play the video
38 | pause = False
39 |
40 | #Flag to start tracking
41 | track = False
42 |
43 | #Variables for training
44 | trainResponce = []
45 | train = []
46 |
47 | first_time = True
48 | #Main loop
49 | while(1):
50 | #Reading frame
51 | ret ,frame = cap.read()
52 | #on successful read
53 | if ret == True:
54 | #On first frame
55 | if first_time:
56 | print 'press space to give ROI'
57 | print 'After giving ROI again press spacebar'
58 | first_time = False
59 | #If space is pressed
60 | while(pause):
61 | #Show frame
62 | cv2.imshow('frame',frame)
63 | #If space is again pressed
64 | if cv2.waitKey(1) & 0xff == 32: #ascii of spacebar
65 | pause = False
66 |
67 | # setup initial location of window
68 | r,h,c,w = iy , (jy-iy) , ix , (jx-ix)
69 | #c,r,w,h = ix,iy,(jx-ix),(jy-iy)
70 | #track_window = (c,r,w,h)
71 |
72 | #Creating gray scale image
73 | frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
74 | #Extracting features and descripters from the image
75 | kpFrame, desFrame = orb.detectAndCompute(frame_gray,None)
76 | #Positive means inside ROI
77 | #Negative means outside ROI
78 | kpPositive = []
79 | kpNegative = []
80 | desPositive = []
81 | desNegative = []
82 | #Separating Features and descripters of inside and outside of frame.
83 | for i in range (len(kpFrame)):
84 | if kpFrame[i].pt[0] > c and kpFrame[i].pt[0] < c+w and kpFrame[i].pt[1] > r and kpFrame[i].pt[1] < r+h:
85 | kpPositive.append(kpFrame[i])
86 | desPositive.append(desFrame[i])
87 | else:
88 | kpNegative.append(kpFrame[i])
89 | desNegative.append(desFrame[i])
90 | #Learning descripters
91 | train = train + desPositive + desNegative
92 | #Giving value 1 for positive and value 0 for negative descripters
93 | trainResponceTemp = [1 for i in (desPositive)]
94 | trainResponceTemp = trainResponceTemp + [0 for i in (desNegative)]
95 | trainResponce = trainResponce + trainResponceTemp
96 | track = True
97 | break
98 | #Start tracking
99 | if track == True:
100 | #Read next frame
101 | ret ,frame = cap.read()
102 | #Gray scale image
103 | frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
104 | #getting features and descripters in this frame as test data
105 | kpTest, test = orb.detectAndCompute(frame_gray,None)
106 |
107 | train = np.asarray(train).astype(np.float32)
108 | trainResponce = np.asarray(trainResponce).astype(np.float32)
109 | #Training KNearest object
110 | knn.train(train,trainResponce)
111 | test = np.asarray(test).astype(np.float32)
112 | #Finding 5 nearest neighboursof test data in train data
113 | ret,result,neighbours,dist = knn.find_nearest(test,k=5)
114 |
115 | kp = []
116 | nearest_dist = np.array(dist[:,0])
117 |
118 | #Outlier Filtering by nearest neighbour distance ratio
119 | for i in range (len(nearest_dist)-1):
120 | if nearest_dist[i] < 0.7*nearest_dist[i+1]:
121 | result[i+1] = 0.
122 |
123 | #Discarding all matches whose first neighbour is keypoint of background
124 | for i in range (len(neighbours)):
125 | if neighbours[i][0] == 0.:
126 | result[i] = 0.
127 |
128 | #Saving good keypoints
129 | for i in range (len(kpTest)):
130 | if result[i] == 1.:
131 | kp.append(kpTest[i])
132 |
133 | #Drawing good keypoints
134 | frame = cv2.drawKeypoints(frame,kp,color=(0,255,0), flags=0)
135 |
136 | #Showing frame
137 | cv2.imshow('frame',frame)
138 | k = cv2.waitKey(60) & 0xff
139 | if k == 32:
140 | pause = True
141 | elif k == 27:
142 | break
143 | else:
144 | break
145 |
146 | cv2.destroyAllWindows()
147 | cap.release()
148 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/README.md:
--------------------------------------------------------------------------------
1 | This directory consists other algorithms in which we have worked.
2 |
3 | Details for these algorithms can be found at https://github.com/eYSIP-2016/Object-Tracking-Camera/wiki/Other-Algorithms
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/D3DCompiler_43.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/D3DCompiler_43.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/Qt5Core.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/Qt5Core.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/Qt5Gui.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/Qt5Gui.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/Qt5Widgets.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/Qt5Widgets.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/TLD.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/TLD.exe
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/icudt49.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/icudt49.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/icuin49.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/icuin49.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/icuuc49.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/icuuc49.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/libEGL.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/libEGL.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/libGLESv2.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/libGLESv2.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/msvcp100.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/msvcp100.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/msvcr100.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/msvcr100.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_calib3d245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_calib3d245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_core245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_core245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_features2d245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_features2d245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_flann245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_flann245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_highgui245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_highgui245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_imgproc245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_imgproc245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_legacy245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_legacy245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_ml245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_ml245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_video245.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/opencv_video245.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/platforms/qwindows.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/executable/platforms/qwindows.dll
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/CameraDS.cpp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/CameraDS.cpp
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/CameraDS.h:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/CameraDS.h
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/Image.cpp:
--------------------------------------------------------------------------------
1 | #include "Image.h"
2 | #include "TLD/TLD.h"
3 | #include "CameraDS.h"
4 |
5 | using namespace cv;
6 |
7 | void mouseCallback(int event, int x, int y, int flags, void *param)
8 | {
9 | if( CV_EVENT_LBUTTONDOWN == event )
10 | {
11 | ((int*)param)[0] = x;
12 | ((int*)param)[1] = y;
13 | }
14 | else if( CV_EVENT_MOUSEMOVE == event && CV_EVENT_FLAG_LBUTTON == flags )
15 | {
16 | ((int*)param)[2] = x;
17 | ((int*)param)[3] = y;
18 | ((int*)param)[4] = 1;
19 | }
20 | else if( CV_EVENT_LBUTTONUP == event )
21 | {
22 | if( 1 == ((int*)param)[4] )
23 | {
24 | ((int*)param)[2] = x;
25 | ((int*)param)[3] = y;
26 | ((int*)param)[4] = 2;
27 | }
28 | }
29 | }
30 |
31 | Image::Image(QObject *parent) :
32 | QThread(parent)
33 | {
34 | tld = 0;
35 |
36 | runFlag = 1;
37 |
38 | mousePos[4] = 0;
39 |
40 | namedWindow("Image");
41 | setMouseCallback("Image", mouseCallback, mousePos);
42 |
43 | start();
44 | }
45 |
46 | Image::~Image()
47 | {
48 | wait();
49 |
50 | delete tld;
51 | }
52 |
53 | void Image::run()
54 | {
55 | CameraDS cameraDS;
56 |
57 | if( cameraDS.OpenCamera(0, false, 640, 480) )
58 | {
59 | runFlag = 2;
60 | }
61 | else
62 | {
63 | runFlag = 0;
64 |
65 | return;
66 | }
67 |
68 | Mat frame;
69 | double multiple = 0.5;
70 | Mat lastGray;
71 | Mat currentGray;
72 | BoundingBox boundingBox;
73 | bool status;
74 |
75 | double start;
76 | double end;
77 | char fps[32];
78 |
79 | while( runFlag )
80 | {
81 | frame = cameraDS.QueryFrame();
82 |
83 | if( frame.empty() )
84 | break;
85 |
86 | flip(frame, frame, 1);
87 |
88 | if( 1 == mousePos[4] )
89 | {
90 | boundingBox.x = min(mousePos[0], mousePos[2]) - 2;
91 | boundingBox.y = min(mousePos[1], mousePos[3]) - 2;
92 | boundingBox.width = abs(mousePos[0] - mousePos[2]) + 4;
93 | boundingBox.height = abs(mousePos[1] - mousePos[3]) + 4;
94 |
95 | if( (boundingBox.width - 6) * multiple >= 15 && (boundingBox.height - 6) * multiple >= 15 )
96 | {
97 | rectangle(frame, boundingBox, Scalar(0, 255, 255), 3);
98 | }
99 | else
100 | {
101 | rectangle(frame, boundingBox, Scalar(0, 0, 255), 3);
102 | }
103 | }
104 | else if( 2 == mousePos[4] )
105 | {
106 | mousePos[4] = 0;
107 |
108 | boundingBox.width = (abs(mousePos[0] - mousePos[2]) - 2) * multiple;
109 | boundingBox.height = (abs(mousePos[1] - mousePos[3]) - 2) * multiple;
110 |
111 | if( boundingBox.width < 15 || boundingBox.height < 15 )
112 | continue;
113 |
114 | boundingBox.x = (min(mousePos[0], mousePos[2]) + 1) * multiple;
115 | boundingBox.y = (min(mousePos[1], mousePos[3]) + 1) * multiple;
116 |
117 | cvtColor(frame, lastGray, CV_BGR2GRAY);
118 |
119 | resize(lastGray, lastGray, Size(), multiple, multiple);
120 |
121 | if( tld )
122 | {
123 | delete tld;
124 | }
125 | tld = new TLD;
126 | tld->init(lastGray, boundingBox);
127 |
128 | status = true;
129 |
130 | continue;
131 | }
132 |
133 | if( tld )
134 | {
135 | cvtColor(frame, currentGray, CV_BGR2GRAY);
136 |
137 | resize(currentGray, currentGray, Size(), multiple, multiple);
138 |
139 | tld->processFrame(lastGray, currentGray, boundingBox, status);
140 |
141 | if( status )
142 | {
143 | boundingBox.x = boundingBox.x / multiple - 3;
144 | boundingBox.y = boundingBox.y / multiple - 3;
145 | boundingBox.width = boundingBox.width / multiple + 6;
146 | boundingBox.height = boundingBox.height / multiple + 6;
147 |
148 | rectangle(frame, boundingBox, Scalar(0, 255, 0), 3);
149 | }
150 |
151 | swap(lastGray, currentGray);
152 | }
153 |
154 | start = end;
155 |
156 | end = getTickCount();
157 |
158 | sprintf(fps, "fps : %0.2f", 1.0 / (end - start) * getTickFrequency());
159 |
160 | putText(frame,
161 | fps,
162 | Point(10, frame.size().height - 15),
163 | CV_FONT_HERSHEY_DUPLEX,
164 | 0.5,
165 | Scalar(0, 255, 255));
166 |
167 | if( cvGetWindowHandle("Image") )
168 | {
169 | imshow("Image", frame);
170 | }
171 | else
172 | {
173 | runFlag = 0;
174 |
175 | emit windowClosed();
176 | }
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/Image.h:
--------------------------------------------------------------------------------
1 | #ifndef IMAGE_H
2 | #define IMAGE_H
3 |
4 | #include
5 |
6 | class TLD;
7 |
8 | class Image : public QThread
9 | {
10 | Q_OBJECT
11 | public:
12 | explicit Image(QObject *parent = 0);
13 | ~Image();
14 |
15 | protected:
16 | TLD *tld;
17 | int runFlag;
18 | int mousePos[5];
19 |
20 | protected:
21 | void run();
22 |
23 | signals:
24 | void windowClosed();
25 | };
26 |
27 | #endif // IMAGE_H
28 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/MainWindow.cpp:
--------------------------------------------------------------------------------
1 | #include "MainWindow.h"
2 | #include "ui_MainWindow.h"
3 | #include "Image.h"
4 |
5 | MainWindow::MainWindow(QWidget *parent) :
6 | QMainWindow(parent),
7 | ui(new Ui::MainWindow)
8 | {
9 | ui->setupUi(this);
10 |
11 | image = new Image(this);
12 |
13 | connect(image, SIGNAL(windowClosed()), qApp, SLOT(quit()));
14 | }
15 |
16 | MainWindow::~MainWindow()
17 | {
18 | delete image;
19 |
20 | delete ui;
21 | }
22 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/MainWindow.h:
--------------------------------------------------------------------------------
1 | #ifndef MAINWINDOW_H
2 | #define MAINWINDOW_H
3 |
4 | #include
5 |
6 | namespace Ui {
7 | class MainWindow;
8 | }
9 |
10 | class Image;
11 |
12 | class MainWindow : public QMainWindow
13 | {
14 | Q_OBJECT
15 |
16 | public:
17 | explicit MainWindow(QWidget *parent = 0);
18 | ~MainWindow();
19 |
20 | private:
21 | Ui::MainWindow *ui;
22 |
23 | protected:
24 | Image *image;
25 | };
26 |
27 | #endif // MAINWINDOW_H
28 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/MainWindow.ui:
--------------------------------------------------------------------------------
1 |
2 | MainWindow
3 |
4 |
5 |
6 | 0
7 | 0
8 | 400
9 | 300
10 |
11 |
12 |
13 | MainWindow
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD.pro:
--------------------------------------------------------------------------------
1 | #-------------------------------------------------
2 | #
3 | # Project created by QtCreator 2012-10-10T16:04:07
4 | #
5 | #-------------------------------------------------
6 |
7 | QT += core gui
8 |
9 | greaterThan(QT_MAJOR_VERSION, 4) : QT += widgets
10 |
11 | TARGET = TLD
12 | TEMPLATE = app
13 |
14 | INCLUDEPATH += D:/OpenCV/2.4.5/build/include
15 |
16 | CONFIG(debug, debug | release){
17 | LIBS += D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_core245d.lib \
18 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_highgui245d.lib \
19 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_imgproc245d.lib \
20 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_legacy245d.lib \
21 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_video245d.lib
22 | }
23 |
24 | CONFIG(release, debug | release){
25 | LIBS += D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_core245.lib \
26 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_highgui245.lib \
27 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_imgproc245.lib \
28 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_legacy245.lib \
29 | D:/OpenCV/2.4.5/build/x86/vc10/lib/opencv_video245.lib
30 | }
31 |
32 | SOURCES += main.cpp \
33 | MainWindow.cpp \
34 | Image.cpp \
35 | TLD/TLD.cpp \
36 | TLD/tld_utils.cpp \
37 | TLD/LKTracker.cpp \
38 | TLD/FerNNClassifier.cpp \
39 | CameraDS.cpp
40 |
41 | HEADERS += MainWindow.h \
42 | Image.h \
43 | TLD/TLD.h \
44 | TLD/tld_utils.h \
45 | TLD/LKTracker.h \
46 | TLD/FerNNClassifier.h \
47 | CameraDS.h \
48 | qedit.h
49 |
50 | FORMS += MainWindow.ui
51 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/FerNNClassifier.h:
--------------------------------------------------------------------------------
1 | #ifndef FERNNCLASSIFIER_H
2 | #define FERNNCLASSIFIER_H
3 |
4 | #include
5 | #include
6 |
7 | class FerNNClassifier
8 | {
9 | private:
10 | float thr_fern;
11 | int structSize;
12 | int nstructs;
13 | float valid;
14 | float ncc_thesame;
15 | float thr_nn;
16 | int acum;
17 |
18 | public:
19 | //Parameters
20 | float thr_nn_valid;
21 |
22 | void init();
23 | void prepare(const std::vector &scales);
24 | void getFeatures(const cv::Mat &image, const int &scale_idx, std::vector &fern);
25 | void update(const std::vector &fern, const int &C, const int &N);
26 | float measure_forest(const std::vector &fern);
27 | void trainF(const std::vector,int> > &ferns, const int &resample);
28 | void trainNN(const std::vector &nn_examples);
29 | void NNConf(const cv::Mat &example, std::vector &isin, float &rsconf, float &csconf);
30 | void evaluateTh(const std::vector, int> > &nXT, const std::vector &nExT);
31 | //Ferns Members
32 | int getNumStructs(){return nstructs;}
33 | float getFernTh(){return thr_fern;}
34 | float getNNTh(){return thr_nn;}
35 | struct Feature
36 | {
37 | uchar x1, y1, x2, y2;
38 | Feature() : x1(0), y1(0), x2(0), y2(0) {}
39 | Feature(int _x1, int _y1, int _x2, int _y2)
40 | : x1((uchar)_x1), y1((uchar)_y1), x2((uchar)_x2), y2((uchar)_y2)
41 | {}
42 | bool operator ()(const cv::Mat& patch) const
43 | { return patch.at(y1,x1) > patch.at(y2, x2); }
44 | };
45 | std::vector > features; //Ferns features (one std::vector for each scale)
46 | std::vector< std::vector > nCounter; //negative counter
47 | std::vector< std::vector > pCounter; //positive counter
48 | std::vector< std::vector > posteriors; //Ferns posteriors
49 | float thrN; //Negative threshold
50 | float thrP; //Positive thershold
51 | //NN Members
52 | std::vector pEx; //NN positive examples
53 | std::vector nEx; //NN negative examples
54 | };
55 |
56 | #endif
57 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/LKTracker.cpp:
--------------------------------------------------------------------------------
1 | #include "LKTracker.h"
2 |
3 | using namespace cv;
4 |
5 | LKTracker::LKTracker(){
6 | term_criteria = TermCriteria( TermCriteria::COUNT+TermCriteria::EPS, 20, 0.03);
7 | window_size = Size(4,4);
8 | level = 5;
9 | lambda = 0.5;
10 | }
11 |
12 |
13 | bool LKTracker::trackf2f(const Mat &img1,
14 | const Mat &img2,
15 | vector &points1,
16 | vector &points2)
17 | {
18 | //TODO!:implement c function cvCalcOpticalFlowPyrLK() or Faster tracking function
19 | //Forward-Backward tracking
20 | calcOpticalFlowPyrLK( img1,img2, points1, points2, status,similarity, window_size, level, term_criteria, lambda, 0);
21 | calcOpticalFlowPyrLK( img2,img1, points2, pointsFB, FB_status,FB_error, window_size, level, term_criteria, lambda, 0);
22 | //Compute the real FB-error
23 | for( int i= 0; i median(FB_error) && points with sim_error[i] > median(sim_error)
27 | normCrossCorrelation(img1,img2,points1,points2);
28 | return filterPts(points1,points2);
29 | }
30 |
31 | float LKTracker::getFB()
32 | {
33 | return fbmed;
34 | }
35 |
36 | void LKTracker::normCrossCorrelation(const Mat &img1,
37 | const Mat &img2,
38 | vector &points1,
39 | vector &points2)
40 | {
41 | Mat rec0(10,10,CV_8U);
42 | Mat rec1(10,10,CV_8U);
43 | Mat res(1,1,CV_32F);
44 |
45 | for (int i = 0; i < points1.size(); i++) {
46 | if (status[i] == 1) {
47 | getRectSubPix( img1, Size(10,10), points1[i],rec0 );
48 | getRectSubPix( img2, Size(10,10), points2[i],rec1);
49 | matchTemplate( rec0,rec1, res, CV_TM_CCOEFF_NORMED);
50 | similarity[i] = ((float *)(res.data))[0];
51 |
52 | } else {
53 | similarity[i] = 0.0;
54 | }
55 | }
56 | rec0.release();
57 | rec1.release();
58 | res.release();
59 | }
60 |
61 |
62 | bool LKTracker::filterPts(vector &points1, vector &points2)
63 | {
64 | //Get Error Medians
65 | simmed = median(similarity);
66 | size_t i, k;
67 | for( i=k = 0; i simmed){
71 | points1[k] = points1[i];
72 | points2[k] = points2[i];
73 | FB_error[k] = FB_error[i];
74 | k++;
75 | }
76 | }
77 | if (k==0)
78 | return false;
79 | points1.resize(k);
80 | points2.resize(k);
81 | FB_error.resize(k);
82 |
83 | fbmed = median(FB_error);
84 | for( i=k = 0; i0)
96 | return true;
97 | else
98 | return false;
99 | }
100 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/LKTracker.h:
--------------------------------------------------------------------------------
1 | #ifndef LKTRACKER_H
2 | #define LKTRACKER_H
3 |
4 | #include "tld_utils.h"
5 | #include
6 |
7 | class LKTracker
8 | {
9 | private:
10 | std::vector pointsFB;
11 | cv::Size window_size;
12 | int level;
13 | std::vector status;
14 | std::vector FB_status;
15 | std::vector similarity;
16 | std::vector FB_error;
17 | float simmed;
18 | float fbmed;
19 | cv::TermCriteria term_criteria;
20 | float lambda;
21 | void normCrossCorrelation(const cv::Mat &img1,
22 | const cv::Mat &img2,
23 | std::vector &points1,
24 | std::vector &points2);
25 | bool filterPts(std::vector &points1, std::vector &points2);
26 |
27 | public:
28 | LKTracker();
29 | bool trackf2f(const cv::Mat &img1,
30 | const cv::Mat &img2,
31 | std::vector &points1,
32 | std::vector &points2);
33 | float getFB();
34 | };
35 |
36 | #endif
37 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/TLD.h:
--------------------------------------------------------------------------------
1 | #ifndef TLD_H
2 | #define TLD_H
3 |
4 | #define _WIN32_WINNT 0x0601
5 |
6 | #include
7 | #include
8 | #include "tld_utils.h"
9 | #include "LKTracker.h"
10 | #include "FerNNClassifier.h"
11 | #include
12 |
13 | //Bounding Boxes
14 | struct BoundingBox : public cv::Rect
15 | {
16 | BoundingBox()
17 | {
18 |
19 | }
20 |
21 | BoundingBox(const cv::Rect &r) : cv::Rect(r)
22 | {
23 |
24 | }
25 |
26 | float overlap; //Overlap with current Bounding Box
27 | int sidx; //scale index
28 | };
29 |
30 | //Detection structure
31 | struct DetStruct
32 | {
33 | std::vector bb;
34 | std::vector > patt;
35 | std::vector conf1;
36 | std::vector conf2;
37 | std::vector > isin;
38 | std::vector patch;
39 | };
40 |
41 | //Temporal structure
42 | struct TempStruct
43 | {
44 | std::vector > patt;
45 | std::vector conf;
46 | };
47 |
48 | struct OComparator
49 | {
50 | OComparator(const std::vector &_grid) : grid(_grid)
51 | {
52 |
53 | }
54 |
55 | std::vector grid;
56 |
57 | bool operator ()(const int &idx1, const int &idx2)
58 | {
59 | return grid[idx1].overlap > grid[idx2].overlap;
60 | }
61 | };
62 |
63 | struct CComparator
64 | {
65 | CComparator(const std::vector &_conf) : conf(_conf)
66 | {
67 |
68 | }
69 |
70 | std::vector conf;
71 |
72 | bool operator ()(const int &idx1, const int &idx2)
73 | {
74 | return conf[idx1] > conf[idx2];
75 | }
76 | };
77 |
78 | class TLD
79 | {
80 | private:
81 | cv::PatchGenerator generator;
82 | FerNNClassifier classifier;
83 | LKTracker tracker;
84 | ///Parameters
85 | int bbox_step;
86 | int min_win;
87 | int patch_size;
88 | //initial parameters for positive examples
89 | int num_closest_init;
90 | int num_warps_init;
91 | int noise_init;
92 | float angle_init;
93 | float shift_init;
94 | float scale_init;
95 | //update parameters for positive examples
96 | int num_closest_update;
97 | int num_warps_update;
98 | int noise_update;
99 | float angle_update;
100 | float shift_update;
101 | float scale_update;
102 | //parameters for negative examples
103 | float bad_overlap;
104 | float bad_patches;
105 | ///Variables
106 | //Integral Images
107 | cv::Mat iisum;
108 | cv::Mat iisqsum;
109 | float var;
110 | //Training data
111 | std::vector, int> > pX; //positive ferns
112 | std::vector, int> > nX; //negative ferns
113 | cv::Mat pEx; //positive NN example
114 | std::vector nEx; //negative NN examples
115 | //Test data
116 | std::vector, int> > nXT; //negative data to Test
117 | std::vector nExT; //negative NN examples to Test
118 | //Last frame data
119 | BoundingBox lastbox;
120 | bool lastvalid;
121 | float lastconf;
122 | //Current frame data
123 | //Tracker data
124 | bool tracked;
125 | BoundingBox tbb;
126 | bool tvalid;
127 | float tconf;
128 | //Detector data
129 | TempStruct tmp;
130 | DetStruct dt;
131 | std::vector dbb;
132 | std::vector dvalid;
133 | std::vector dconf;
134 | bool detected;
135 | //Bounding Boxes
136 | std::vector grid;
137 | std::vector scales;
138 | std::vector good_boxes; //indexes of bboxes with overlap > 0.6
139 | std::vector bad_boxes; //indexes of bboxes with overlap < 0.2
140 | BoundingBox bbhull; // hull of good_boxes
141 | BoundingBox best_box; // maximum overlapping bbox
142 |
143 | public:
144 | //Constructors
145 | TLD();
146 | //Methods
147 | void init(const cv::Mat &frame1, const cv::Rect &box);
148 | void generatePositiveData(const cv::Mat &frame, const int &num_warps);
149 | void generateNegativeData(const cv::Mat &frame);
150 | void processFrame(const cv::Mat &img1,
151 | const cv::Mat &img2,
152 | BoundingBox &bbnext,
153 | bool &lastboxfound);
154 | void track(const cv::Mat &img1, const cv::Mat &img2);
155 | void detect(const cv::Mat &frame);
156 | void clusterConf(const std::vector &dbb,
157 | const std::vector &dconf,
158 | std::vector &cbb,
159 | std::vector &cconf);
160 | void evaluate();
161 | void learn(const cv::Mat &img);
162 | //Tools
163 | void buildGrid(const cv::Mat &img, const cv::Rect &box);
164 | float bbOverlap(const BoundingBox &box1, const BoundingBox &box2);
165 | void getOverlappingBoxes(const cv::Rect &box1, const int &num_closest);
166 | void getBBHull();
167 | void getPattern(const cv::Mat &img, cv::Mat &pattern, cv::Scalar &mean, cv::Scalar &stdev);
168 | void bbPoints(std::vector &points, const BoundingBox &bb);
169 | void bbPredict(const std::vector &points1,
170 | const std::vector &points2,
171 | const BoundingBox &bb1,
172 | BoundingBox &bb2);
173 | double getVar(const BoundingBox &box, const cv::Mat &sum, const cv::Mat &sqsum);
174 | bool bbComp(const BoundingBox &bb1, const BoundingBox &bb2);
175 | int clusterBB(const std::vector &dbb, std::vector &indexes);
176 | };
177 |
178 | #endif
179 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/pyTLD/FerNNClassifier.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/pyTLD/FerNNClassifier.pyc
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/pyTLD/TLD.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/pyTLD/TLD.pyc
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/TLD/pyTLD/get_points.py:
--------------------------------------------------------------------------------
1 | # Import the required modules
2 | import cv2
3 |
4 | def run(im):
5 | cv2.imshow('frame', im)
6 |
7 | # List containing top-left and bottom-right to crop the image.
8 | pts_1 = []
9 | pts_2 = []
10 |
11 | rects = []
12 | run.mouse_down = False
13 |
14 | def callback(event, x, y, flags, param):
15 | if event == cv2.EVENT_LBUTTONDOWN:
16 | if len(pts_2) == 1: #pts_2 contains one tuple.
17 | print "WARN: Cannot select another object."
18 | print "Delete the previously selected object using key `d` to mark a new location."
19 | return
20 | run.mouse_down = True
21 | pts_1.append((x, y))
22 | elif event == cv2.EVENT_LBUTTONUP and run.mouse_down == True:
23 | run.mouse_down = False
24 | pts_2.append((x, y))
25 | print "Object selected at [{}, {}]".format(pts_1[-1], pts_2[-1])
26 | elif event == cv2.EVENT_MOUSEMOVE and run.mouse_down == True:
27 | im_draw = im.copy()
28 | cv2.rectangle(im_draw, pts_1[-1], (x, y), (255,255,255), 3)
29 | cv2.imshow('frame', im_draw)
30 |
31 | print "Press and release mouse around the object to be tracked."
32 | cv2.setMouseCallback('frame', callback)
33 |
34 | print "Press key `p` to continue with the selected points."
35 | print "Press key `d` to discard the last object selected."
36 | print "Press key `q` to quit the program."
37 | while True:
38 | # Draw the rectangular boxes on the image
39 | for pt1, pt2 in zip(pts_1, pts_2):
40 | rects.append([pt1[0],pt2[0], pt1[1], pt2[1]])
41 | cv2.rectangle(im, pt1, pt2, (255, 255, 255), 3)
42 | # Display the cropped images
43 | cv2.imshow('frame', im)
44 | key = cv2.waitKey(30)
45 | if key == ord('p'):
46 | # Press key `s` to return the selected points
47 | cv2.destroyAllWindows()
48 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
49 | corrected_point=check_point(point)
50 | return corrected_point
51 | elif key == ord('q'):
52 | # Press key `q` to quit the program
53 | print "Quitting without saving."
54 | exit()
55 | elif key == ord('d'):
56 | # Press ket `d` to delete the last rectangular region
57 | if run.mouse_down == False and pts_1:
58 | print "Object deleted at [{}, {}]".format(pts_1[-1], pts_2[-1])
59 | pts_1.pop()
60 | pts_2.pop()
61 | im_disp = im.copy()
62 | else:
63 | print "No object to delete."
64 | cv2.destroyAllWindows()
65 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
66 | return check_point(point)
67 |
68 | def check_point(points):
69 | for point in points:
70 | #to find min and max x coordinates
71 | if point[0] v)
7 | {
8 | //kofpk
9 | // int n = floor(v.size() / 2);
10 | int n = floor(v.size() / 2.0);
11 | ///kofpk
12 | nth_element(v.begin(), v.begin()+n, v.end());
13 | return v[n];
14 | }
15 |
16 | vector index_shuffle(int begin,int end){
17 | vector indexes(end-begin);
18 | for (int i=begin;i
5 |
6 | float median(std::vector v);
7 |
8 | std::vector index_shuffle(int begin,int end);
9 |
10 | #endif
11 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking Learning Detection/sourcecode/main.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include "MainWindow.h"
3 |
4 | int main(int argc, char *argv[])
5 | {
6 | QApplication a(argc, argv);
7 | MainWindow w;
8 |
9 | return a.exec();
10 | }
11 |
--------------------------------------------------------------------------------
/Other Algorithms(Object Tracking)/Tracking by SIFT, SURF feature matching, homography/get_points.py:
--------------------------------------------------------------------------------
1 | # Import the required modules
2 | import cv2
3 | import argparse
4 |
5 | def run(im):
6 | im_disp = im.copy()
7 | im_draw = im.copy()
8 | window_name = "Select objects to be tracked here."
9 | cv2.namedWindow(window_name, cv2.WINDOW_NORMAL)
10 |
11 | #############
12 | cv2.imshow(window_name, im_draw)
13 | #############
14 |
15 | # List containing top-left and bottom-right to crop the image.
16 | pts_1 = []
17 | pts_2 = []
18 |
19 | rects = []
20 | run.mouse_down = False
21 |
22 | def callback(event, x, y, flags, param):
23 | if event == cv2.EVENT_LBUTTONDOWN:
24 | if len(pts_2) == 1: #pts_2 contains one tuple.
25 | print "WARN: Cannot select another object."
26 | print "Delete the previously selected object using key `d` to mark a new location."
27 | return
28 | run.mouse_down = True
29 | pts_1.append((x, y))
30 | elif event == cv2.EVENT_LBUTTONUP and run.mouse_down == True:
31 | run.mouse_down = False
32 | pts_2.append((x, y))
33 | print "Object selected at [{}, {}]".format(pts_1[-1], pts_2[-1])
34 | elif event == cv2.EVENT_MOUSEMOVE and run.mouse_down == True:
35 | im_draw = im.copy()
36 | cv2.rectangle(im_draw, pts_1[-1], (x, y), (255,255,255), 3)
37 | cv2.imshow(window_name, im_draw)
38 |
39 | print "Press and release mouse around the object to be tracked."
40 | cv2.setMouseCallback(window_name, callback)
41 |
42 | print "Press key `p` to continue with the selected points."
43 | print "Press key `d` to discard the last object selected."
44 | print "Press key `q` to quit the program."
45 |
46 | while True:
47 | # Draw the rectangular boxes on the image
48 | window_name_2 = "Objects to be tracked."
49 | for pt1, pt2 in zip(pts_1, pts_2):
50 | rects.append([pt1[0],pt2[0], pt1[1], pt2[1]])
51 | cv2.rectangle(im_disp, pt1, pt2, (255, 255, 255), 3)
52 | # Display the cropped images
53 | cv2.namedWindow(window_name_2, cv2.WINDOW_NORMAL)
54 | cv2.imshow(window_name_2, im_disp)
55 | key = cv2.waitKey(30)
56 | if key == ord('p'):
57 | # Press key `s` to return the selected points
58 | cv2.destroyAllWindows()
59 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
60 | corrected_point=check_point(point)
61 | return corrected_point
62 | elif key == ord('q'):
63 | # Press key `q` to quit the program
64 | print "Quitting without saving."
65 | exit()
66 | elif key == ord('d'):
67 | # Press ket `d` to delete the last rectangular region
68 | if run.mouse_down == False and pts_1:
69 | print "Object deleted at [{}, {}]".format(pts_1[-1], pts_2[-1])
70 | pts_1.pop()
71 | pts_2.pop()
72 | im_disp = im.copy()
73 | else:
74 | print "No object to delete."
75 | cv2.destroyAllWindows()
76 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
77 | corrected_point=check_point(point)
78 | return corrected_point
79 |
80 | def check_point(points):
81 | out=[]
82 | for point in points:
83 | #to find min and max x coordinates
84 | if point[0] 1:
78 | for m,n in matches:
79 | if m.distance < 0.7*n.distance:
80 | good.append(m)
81 |
82 | #On getting enough good matches
83 | if len(good)>=MIN_MATCH_COUNT:
84 | #Getting matched keypoint's location in ROI
85 | src_pts = np.float32([ kp_roi[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
86 |
87 | #Getting matched keypoint's location in current frame
88 | dst_pts = np.float32([ kp_frame[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
89 |
90 | M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
91 |
92 | #Getting perspective transform
93 | dst = cv2.perspectiveTransform(pts,M)
94 |
95 | #Drawing rotating rectangle around object
96 | cv2.polylines(frame,[np.int32(dst)],True,(255,255,255),3, cv2.CV_AA)
97 |
98 | else:
99 | print "Not enough matches are found - %d/%d" % (len(good),MIN_MATCH_COUNT)
100 |
101 | cv2.imshow('tracking',frame)
102 |
103 | if cv2.waitKey(FRAME_TIME) == 27:
104 | break
105 |
106 | cap.release()
107 | cv2.destroyAllWindows()
108 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 | ***
3 | # eYSIP-2016
4 | # Object Tracking Camera for Autonomous drone
5 | Nowadays, we get drones that say they are tracking objects. But what they actually use is the GPS location of your mobile phone. So basically what the drone is tracking is the phone and not the actual object/human. This was the reason we came up with this idea of making an Object Tracking Camera system that can be mounted on an autonomous drone. This system will track an object using Real-Time Image Processing.
6 |
7 | ##Components and Software Used:
8 | ***
9 | * **Python 2.7.5**
10 | * **Open CV 2.9**
11 | * **Raspberry-Pi B + board**
12 | * **3-D printed Camera mount frame**
13 | * **Raspberry-Pi Camera(experimental purpose)**
14 | * **Tower Pro SG90 mini servos**
15 |
16 |
17 |
18 | ##Description:
19 |
20 | The Object Tracking Camera mounted on the drone takes a Region of Interest(Object) from the user and tracks it using different algorithms and commands the drone to follow the object. Such capabilities are useful in several applications from photo/video shoots to surveys and search and rescue missions.
21 |
22 | ##Features:
23 | ***
24 | - **Selection of Object through an ROI**
25 | - **Effective CAM shift Tracking Algorithm**
26 | - **Camera wont get confused with similar colored objects**
27 | - **Re-recognizing the lost Object**
28 |
29 | ##Key Tasks
30 | ***
31 | * Developing an Object Tracking Algorithm
32 | * Making a Gimbal system for mounting the camera
33 | * Building a Pan-Tilt camera with motors
34 | * Interfacing Raspberry Pi with the camera
35 | * Develop python codes to achieve autonomous behaviour of camera
36 |
37 |
38 | ##Deliverables
39 | ***
40 | * A system that can mounted on an Autonomous drone
41 | * Code and Documentation
42 | * Tutorials explaining individual modules
43 |
44 |
45 | ##Documentation
46 | ***
47 | * Detailed tutorials with actual component pictures on every module and detailed code of the project have been uploaded on GitHub.
48 |
49 |
50 | ##Contributors
51 | ***
52 | * [Abhishek Rathore](https://github.com/AbhishekRathore311)
53 | * [Gopineedi Harsha Vardhan](https://github.com/HarshaVardhan896)
54 |
55 | ## Mentors
56 | ***
57 | * [Pushkar Raj](https://github.com/pushkarraj)
58 | * [Akshat Jain](https://github.com/akshatbjain)
59 | * [Rama Kumar](https://github.com/ramakumarks)
60 |
61 | ##License
62 | ***
63 | This project is open-sourced under [MIT License](http://opensource.org/licenses/MIT)
64 |
65 | # Object-Tracking-Camera
66 |
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/1432960037.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/1432960037.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/20.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/20.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/34.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/34.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/An Improved CamShift Algorithm for Target Tracking in Video Surve.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/An Improved CamShift Algorithm for Target Tracking in Video Surve.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/C1657073313.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/C1657073313.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/G0124146.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/G0124146.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/Object Tracking Using Improved CAMShift .pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/Object Tracking Using Improved CAMShift .pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/inpra-v2n2-05_3.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/inpra-v2n2-05_3.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CAM Shift/p3-allen.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CAM Shift/p3-allen.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/CalonderLSF10.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/CalonderLSF10.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/Maresca_The_Matrioska_Tracking_2014_CVPR_paper.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/Maresca_The_Matrioska_Tracking_2014_CVPR_paper.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/brisk.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/brisk.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/cvpr_2015.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/cvpr_2015.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/ext camshift.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/ext camshift.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/lowe-ijcv04.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/lowe-ijcv04.pdf
--------------------------------------------------------------------------------
/Research papers referred(Object Tracking)/orb_final.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Research papers referred(Object Tracking)/orb_final.pdf
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Code/Colored_object_tracking_using_HSV.py:
--------------------------------------------------------------------------------
1 | ############################################
2 | ## Import numpy for numerical calculations
3 | import numpy as np
4 |
5 | ## Import OpenCV for image processing
6 | import cv2
7 |
8 | ############################################
9 | ## Initialize webcam
10 | cap = cv2.VideoCapture(0) ##use 1 in parameter instead of 0 for external
11 | ##camera
12 | ## You can also use a video by giving location of the video in the
13 | ## parameter,If video is in the same directory of the python file
14 | ## u can use the video directly as
15 | ##cap = cv2.VideoCapture('sample.mov')
16 | ##If you are using video then uncomment previous statement
17 |
18 | ############################################
19 | ## param1 and param2 are minimum and maximum range of hsv values for
20 | ## the following hsv values are of the Orange colored object
21 | ## which we are tracking
22 | param1 = [70,50,50] ## [H_min,S_min,V_min]
23 | param2 = [100,255,255] ## [H_max,S_max,V_max]
24 | ## You can put give range of the HSV values of any color and track
25 | ## that respective colored object
26 |
27 | ############################################
28 | ## np.array will change param1 and param2 into numpy array which
29 | ## OpenCV can understand
30 | lower = np.array(param1)
31 | upper = np.array(param2)
32 |
33 | ############################################
34 | ## Video Loop
35 |
36 | while(1):
37 |
38 | ## Read one frame of the video at a time
39 | ## ret contains True if frame is successfully read otherwise it
40 | ## contains False
41 | ret, frame = cap.read()
42 |
43 | ## If frame is successfully read
44 | if(ret):
45 |
46 | ##Change color space of frame from BGR to HSV
47 | # and stores the converted frame in hsv
48 | hsv = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)
49 |
50 | ## Thresholding hsv frame and extracting the pixels of the desired color
51 | mask = cv2.inRange(hsv, lower, upper)
52 |
53 | ## Removing noise from the Masked Frame (mask)
54 | mask = cv2.GaussianBlur(mask,(5,5),0)
55 |
56 | kernel = np.ones((5,5),np.uint8)
57 | mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
58 | cv2.imshow('mask',mask)
59 |
60 | ##finding contours in mask
61 | ## and storing all the contours in contours array
62 | contours, hierarchy = cv2.findContours(mask,cv2.RETR_TREE,
63 | cv2.CHAIN_APPROX_SIMPLE)
64 |
65 | ## Let area of largest contour is zero
66 | max_contour_area=0
67 |
68 | ############################################
69 | ## Colored object tracking
70 |
71 | ## If the specified colored object is in the frame then there
72 | #will be atleast one contour
73 |
74 | ## If length of contours is atleast 1 only then we need to find
75 | #the index of largest contour
76 |
77 |
78 | if(len(contours) >= 1):
79 | ## Finding index of largest contour among all the contours
80 | #for colored object tracking
81 | for i in range(0,len(contours)):
82 | if(cv2.contourArea(contours[i]) > max_contour_area):
83 | max_contour_area = cv2.contourArea(contours[i])
84 | max_contour_area_index = i
85 |
86 | ## This statement gives co-ordinates of North-West corner
87 | #in x and y
88 | ## And Width and Height in w and h of bounding rectangle
89 | #of Colored object
90 | x,y,w,h=cv2.boundingRect(contours[max_contour_area_index])
91 |
92 | ##create rectangle around the object which you want to track
93 | ##for avoiding the small contours formed
94 | ##when the object is not in the frame
95 | if w*h > 100:
96 | cv2.rectangle(frame, (x, y), (x+w, y+h), (0,0,255), 2)
97 | ##cv2.rectangle parameters are as follows:
98 | ##(image,co-ordinates,Color of the rectangle,thickness)
99 |
100 | cv2.imshow('video',frame)
101 |
102 | ## Break using Escape (esc) key
103 | ## and 60 ms for frame waits(useful for video processing)
104 | if cv2.waitKey(60) == 27: ## 27 - ASCII for escape key
105 | break
106 |
107 | ## If frame is not successfully read or there is no frame to read
108 | #(in case of recorded video) this acts as stop video
109 | else:
110 | break
111 |
112 | ## Releasing camera
113 | cap.release()
114 |
115 | ## Destroy all open windows
116 | cv2.destroyAllWindows()
117 |
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Colored object tracking using HSV.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Colored object tracking using HSV.pdf
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/Capture.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/Capture.JPG
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/Capture1.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/Capture1.JPG
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/Capture2.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/Capture2.JPG
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/Capture3.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/Capture3.JPG
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/Capture5.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/Capture5.JPG
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/Capture6.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/Capture6.JPG
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/image1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/image1.jpg
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Images/image2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Images/image2.jpg
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Latex Code/Colored object tracking using HSV.aux:
--------------------------------------------------------------------------------
1 | \relax
2 | \providecommand\hyper@newdestlabel[2]{}
3 | \providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
4 | \HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
5 | \global\let\oldcontentsline\contentsline
6 | \gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
7 | \global\let\oldnewlabel\newlabel
8 | \gdef\newlabel#1#2{\newlabelxx{#1}#2}
9 | \gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
10 | \AtEndDocument{\ifx\hyper@anchor\@undefined
11 | \let\contentsline\oldcontentsline
12 | \let\newlabel\oldnewlabel
13 | \fi}
14 | \fi}
15 | \global\let\hyper@last\relax
16 | \gdef\HyperFirstAtBeginDocument#1{#1}
17 | \providecommand\HyField@AuxAddToFields[1]{}
18 | \providecommand\HyField@AuxAddToCoFields[2]{}
19 | \@writefile{toc}{\contentsline {section}{\numberline {1}Objective}{3}{section.1}}
20 | \@writefile{toc}{\contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}}
21 | \@writefile{toc}{\contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}}
22 | \@writefile{toc}{\contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}}
23 | \@writefile{toc}{\contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}}
24 | \@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Masked image showing contours in the frame}}{5}{figure.1}}
25 | \@writefile{lof}{\contentsline {figure}{\numberline {2}{\ignorespaces Boundary bounding the largest area contour}}{5}{figure.2}}
26 | \@writefile{lof}{\contentsline {figure}{\numberline {3}{\ignorespaces Boundary bounded on only one side of the bottle that has largest contour}}{6}{figure.3}}
27 | \@writefile{toc}{\contentsline {section}{\numberline {6}Code}{7}{section.6}}
28 | \@writefile{toc}{\contentsline {section}{\numberline {7}Exercise}{7}{section.7}}
29 | \@writefile{toc}{\contentsline {section}{\numberline {8}References}{8}{section.8}}
30 |
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Latex Code/Colored object tracking using HSV.out:
--------------------------------------------------------------------------------
1 | \BOOKMARK [1][-]{section.1}{Objective}{}% 1
2 | \BOOKMARK [1][-]{section.2}{Prerequisites}{}% 2
3 | \BOOKMARK [1][-]{section.3}{Hardware Requirement}{}% 3
4 | \BOOKMARK [1][-]{section.4}{Software Requirement}{}% 4
5 | \BOOKMARK [1][-]{section.5}{Theory and Description}{}% 5
6 | \BOOKMARK [1][-]{section.6}{Code}{}% 6
7 | \BOOKMARK [1][-]{section.7}{Exercise}{}% 7
8 | \BOOKMARK [1][-]{section.8}{References}{}% 8
9 |
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Latex Code/Colored object tracking using HSV.synctex.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/1. Colored object tracking using HSV/Latex Code/Colored object tracking using HSV.synctex.gz
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Latex Code/Colored object tracking using HSV.tex:
--------------------------------------------------------------------------------
1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
2 | % e-Yantra, IIT-Bombay
3 |
4 | % Document Author: Abhishek Rathore, Gopineedi Harsha Vardhan
5 | % Date: 07-June,2016
6 |
7 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
8 |
9 | \documentclass[11pt,a4paper]{article}
10 |
11 | \usepackage{graphicx}
12 | \usepackage{listings}
13 | \usepackage{url}
14 | \usepackage{hyperref}
15 | \graphicspath{{../Images/}}
16 | \title{Tutorial - Colored object tracking using HSV}
17 | \author{e-Yantra Team}
18 | \date{\today}
19 |
20 | \begin{document}
21 | \maketitle
22 | \newpage
23 | \tableofcontents
24 | \newpage
25 | \section{Objective}
26 | The objective of this tutorial is to track a colored object using a Camera/Webcam.
27 | \section{Prerequisites}
28 | User should have handy knowledge of the following for understanding this tutorial.
29 | \begin{itemize}
30 | \item Basics of Python.
31 | \item Basics of OpenCV.
32 | \item Basics of Image processing.
33 | \item Knowledge about BGR and HSV color spaces and conversions.
34 | \end{itemize}
35 | \section{Hardware Requirement}
36 | \begin{itemize}
37 | \item A Computer with an internal or external webcam.
38 | \end{itemize}
39 | \section{Software Requirement}
40 | \begin{itemize}
41 | \item Python 2.7.5
42 | \item OpenCV 2.4.9
43 | \item numpy 1.7.1
44 | \item \textbf{Note :} These are the versions we were working on while creating this tutorial.
45 | \end{itemize}
46 | \section{Theory and Description}
47 | Object detection and segmentation is the most important and challenging fundamental task of computer vision. It is a critical part in many applications such as image search, scene understanding, etc.The easiest way to detect and segment an object from an image is the color based methods . The object and the background should have a significant color difference in order to successfully segment objects using color based methods.
48 | \par Here we are using HSV Colorspace for Object detection instead of RGB Colorspace because unlike RGB, HSV separates luma, or the image intensity, from chroma or the color information.By using HSV/HSL we can also remove intensity,lightness which is not possible in RGB colorspace.Moreover HSV can also detect skin color ,fire color etc.
49 | \begin{itemize}
50 | \item First we read the frames of the video through a webcam using cv2.VideoCapture(0)and we define boundary parameters of HSV values of the color which we are detecting which will be further passed for tracking the object.
51 |
52 | \item Now we convert the RGB frame into HSV color frame and mask the HSV frame using the boundary values passed by us.
53 | \begin{figure}[h!]
54 | \includegraphics[scale=0.9]{Capture.jpg}
55 | \centering
56 | \end{figure}
57 |
58 | \item Masking of all pixels is done by setting value 1(white) of the pixels that have the HSV value as of the object and other pixels in the frame to 0 .
59 | \begin{figure}[h!]
60 | \includegraphics[scale=0.9]{Capture5.jpg}
61 | \centering
62 | \end{figure}
63 |
64 | \item After getting the binary image,we find contours bounding the white blobs in the frame .
65 | \begin{figure}[h!]
66 | \includegraphics[scale=0.9]{Capture6.jpg}
67 | \end{figure}
68 |
69 | \item Now we find the contour areas in the frame.Among all the bounded contours,the contour with the largest area would be our colored object.Refer Figure 1 for better understanding.
70 | \begin{figure}[h!]
71 | \includegraphics[scale=0.4]{Capture2.jpg}
72 | \centering
73 | \caption{Masked image showing contours in the frame}
74 | \end{figure}
75 | \item Now we generate a boundary bounding that contour.In this way we can identify a colored object in a frame.By repeating this process in every frame we can track the object.Refer Figure 2
76 | \begin{figure}[h]
77 | \includegraphics[scale=0.4]{Capture1.jpg}
78 | \centering
79 | \caption{Boundary bounding the largest area contour}
80 | \end{figure}
81 | \item \textbf{Note :} This code works only, when your object (which you want to track) is the largest object of its color in the frame.Refer Figure 3.
82 | \begin{figure}[h!]
83 | \includegraphics[scale=0.4]{Capture3.jpg}
84 | \centering
85 | \caption{Boundary bounded on only one side of the bottle that has largest contour}
86 | \end{figure}
87 | \end{itemize}
88 | \newpage
89 | .
90 | \newpage
91 | \section{Code}
92 | The Python code for this tutorial is available \href{https://github.com/eYSIP-2016/Object-Tracking-Camera/tree/master/Tutorials/1.%20Colored%20object%20tracking%20using%20HSV/Code}{here}
93 | \section{Exercise}
94 | Real time colored object tracking using webcam is shown below.
95 | \newline
96 | \newline
97 | \includegraphics[scale=0.7]{image1.jpg}
98 | \newline
99 | \includegraphics[scale=0.7]{image2.jpg}
100 |
101 | \section{References}
102 | \begin{enumerate}
103 | \item \url{https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_gui/py_drawing_functions/py_drawing_functions.html#drawing-functions}
104 | \item \url{https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_imgproc/py_colorspaces/py_colorspaces.html#converting-colorspaces}
105 | \item \url{https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_imgproc/py_filtering/py_filtering.html#filtering}
106 | \item \url{https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_imgproc/py_contours/py_contours_begin/py_contours_begin.html#contours-getting-started}
107 | \item \url{https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_imgproc/py_contours/py_contour_features/py_contour_features.html#contour-features}
108 | \end{enumerate}
109 |
110 | \end{document}
111 |
112 |
113 |
114 |
--------------------------------------------------------------------------------
/Tutorials/1. Colored object tracking using HSV/Latex Code/Colored object tracking using HSV.toc:
--------------------------------------------------------------------------------
1 | \contentsline {section}{\numberline {1}Objective}{3}{section.1}
2 | \contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}
3 | \contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}
4 | \contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}
5 | \contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}
6 | \contentsline {section}{\numberline {6}Code}{7}{section.6}
7 | \contentsline {section}{\numberline {7}Exercise}{7}{section.7}
8 | \contentsline {section}{\numberline {8}References}{8}{section.8}
9 |
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Code/run.py:
--------------------------------------------------------------------------------
1 | # import the necessary packages
2 | import numpy as np
3 | import cv2
4 |
5 | # (ix,iy) will be north west corner and
6 | # (jx,jy) will be south east corner of ROI rectangle
7 | ix,iy,jx,jy = -1,-1,-1,-1
8 |
9 | # mouse callback function to select ROI in the frame
10 | def select_ROI(event,x,y,flags,param):
11 |
12 | global ix,iy,jx,jy
13 |
14 | # If mouse left button is down,
15 | # set (ix,iy) = current co-ordinates
16 | if event == cv2.EVENT_LBUTTONDOWN:
17 | ix,iy = x,y
18 |
19 | # Else if mouse left button is up,
20 | # set (jx,jy) = current co-ordinates
21 | elif event == cv2.EVENT_LBUTTONUP:
22 | jx,jy = x,y
23 | # Draw rectangle using (ix,iy) and (jx,jy)
24 | cv2.rectangle(frame,(ix,iy),(jx,jy),(255,0,0),2)
25 |
26 | # Grab the reference to the camera
27 | cap = cv2.VideoCapture(0)
28 |
29 | # setup the mouse callback
30 | cv2.namedWindow('frame',cv2.WINDOW_NORMAL)
31 | # Binding select_ROI with the frame
32 | cv2.setMouseCallback('frame',select_ROI)
33 |
34 | # Decides to pause video or not to take ROI
35 | pause = False
36 |
37 | # Decides to track object or not
38 | track = False
39 |
40 | # keep looping over the frames
41 | while(1):
42 | # grab the current frame
43 | ret ,frame = cap.read()
44 |
45 | # check to see if we have reached the end of the
46 | # video
47 | if ret == False:
48 | break
49 |
50 | # If pause is True, then go into ROI selection mode
51 | while(pause):
52 | # Show the current frame only
53 | # As frame is binded with select_ROI mouse callback function
54 | # So you can select ROI on this frame by mouse dragging
55 | cv2.imshow('frame',frame)
56 |
57 | # Press space bar after selecting ROI
58 | # to process the ROI and to track the object
59 | if cv2.waitKey(1) & 0xff == 32: #ascii value for spacebar
60 |
61 | # To prevent this loop to start again.
62 | pause = False
63 |
64 | # setup initial location of window
65 | r,h,c,w = iy , (jy-iy) , ix , (jx-ix)
66 | track_window = (c,r,w,h)
67 |
68 | # set up the ROI for tracking
69 | roi = frame[r:r+h, c:c+w]
70 |
71 | # convert it ROI to the HSV color space
72 | hsv_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2HSV)
73 |
74 | # Masking hsv_roi for good results.
75 | mask = cv2.inRange(hsv_roi, np.array((0., 60.,32.)), np.array((180.,255.,255.)))
76 |
77 | # compute a HSV histogram for the ROI
78 | roi_hist = cv2.calcHist([hsv_roi],[0,1],mask,[180,256],[0,180,0,256])
79 |
80 | # normalize histogram
81 | cv2.normalize(roi_hist,roi_hist,0,255,cv2.NORM_MINMAX)
82 |
83 | # Setup the termination criteria,
84 | # either 10 iteration or move by atleast 1 pt
85 | term_crit = ( cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 10, 1 )
86 |
87 | # Makes track = True to start tracking
88 | track = True
89 |
90 | # To terminate current loop.
91 | break
92 |
93 | # After ROI computation start tracking
94 | if track == True:
95 |
96 | # convert the current frame to the HSV color space
97 | hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
98 |
99 | # Apply backprojection on current frame with respect
100 | # to roi histogram.
101 | dst = cv2.calcBackProject([hsv],[0,1],roi_hist,[0,180,0,256],1)
102 |
103 | # apply cam shift to the back projection, convert the
104 | # points to a bounding box, and then draw them
105 | ret, track_window = cv2.CamShift(dst, track_window, term_crit)
106 |
107 | # Draw it on image
108 | pts = cv2.cv.BoxPoints(ret)
109 | pts = np.int0(pts)
110 | cv2.polylines(frame,[pts],True, [255,0,0], 2)
111 |
112 | # show the frame
113 | cv2.imshow('frame',frame)
114 | k = cv2.waitKey(20) & 0xff
115 |
116 | # If spacebar is pressed,
117 | # puase the frame to take ROI
118 | if k == 32: #ascii value for spacebar
119 | pause = True
120 |
121 | # If Escape key is pressed,
122 | # terminate the video
123 | elif k == 27: #ascii value for escape key
124 | break
125 |
126 | ## Releasing camera
127 | cap.release()
128 |
129 | ## Destroy all open windows
130 | cv2.destroyAllWindows()
131 |
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/BackProjection.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/BackProjection.jpg
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Cam_Det.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Cam_Det.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Cam_ROI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Cam_ROI.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Histogram.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Histogram.jpg
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Original.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Original.jpg
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Vid_ROI.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Vid_ROI.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Vid_det.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/Vid_det.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/convergedellipse.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/convergedellipse.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/convergedimage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/convergedimage.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/drawing_code.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/drawing_code.JPG
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ellipsecomputation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ellipsecomputation.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/frame_code.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/frame_code.JPG
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc1.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc2.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc3.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/mc4.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/meanshift.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/meanshift.jpg
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/meanshift.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/meanshift.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/meanshiftagain.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/meanshiftagain.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms1.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms2.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms2nd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms2nd.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms3.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms3rd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms3rd.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/ms4.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/msconverged.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/msconverged.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/msec.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/msec.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/msi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/msi.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/newmeanshiftaxis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/newmeanshiftaxis.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/result.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/roi_code.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/roi_code.JPG
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/roiforellipse.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/roiforellipse.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/scale_estimation.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/scale_estimation.JPG
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/shiftedwindow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Images/shiftedwindow.png
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.aux:
--------------------------------------------------------------------------------
1 | \relax
2 | \providecommand\hyper@newdestlabel[2]{}
3 | \providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
4 | \HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
5 | \global\let\oldcontentsline\contentsline
6 | \gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
7 | \global\let\oldnewlabel\newlabel
8 | \gdef\newlabel#1#2{\newlabelxx{#1}#2}
9 | \gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
10 | \AtEndDocument{\ifx\hyper@anchor\@undefined
11 | \let\contentsline\oldcontentsline
12 | \let\newlabel\oldnewlabel
13 | \fi}
14 | \fi}
15 | \global\let\hyper@last\relax
16 | \gdef\HyperFirstAtBeginDocument#1{#1}
17 | \providecommand\HyField@AuxAddToFields[1]{}
18 | \providecommand\HyField@AuxAddToCoFields[2]{}
19 | \@writefile{toc}{\contentsline {section}{\numberline {1}Objective}{3}{section.1}}
20 | \@writefile{toc}{\contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}}
21 | \@writefile{toc}{\contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}}
22 | \@writefile{toc}{\contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}}
23 | \@writefile{toc}{\contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}}
24 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Meanshift Algorithm}{4}{subsection.5.1}}
25 | \@writefile{lof}{\contentsline {figure}{\numberline {1}{\ignorespaces Meanshif Algortithm (Courtesy: opencv-python-tutroals.readthedocs.io)\relax }}{4}{figure.caption.2}}
26 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Camshift Algorithm}{5}{subsection.5.2}}
27 | \@writefile{lof}{\contentsline {figure}{\numberline {2}{\ignorespaces Formula (Courtesy: opencv-python-tutroals.readthedocs.io)\relax }}{5}{figure.caption.3}}
28 | \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.2.1}Illustration of Camshift algorithm in a Second video frame}{5}{subsubsection.5.2.1}}
29 | \@writefile{lof}{\contentsline {figure}{\numberline {3}{\ignorespaces Courtesy: opencv-python-tutroals.readthedocs.io\relax }}{6}{figure.caption.4}}
30 | \@writefile{lof}{\contentsline {figure}{\numberline {4}{\ignorespaces ellipse computation and applying Meanshift again (Courtesy: opencv-python-tutroals.readthedocs.io)\relax }}{7}{figure.caption.5}}
31 | \@writefile{lof}{\contentsline {figure}{\numberline {5}{\ignorespaces Converged ellipse calcualtion (Courtesy: opencv-python-tutroals.readthedocs.io)\relax }}{7}{figure.caption.6}}
32 | \@writefile{lof}{\contentsline {figure}{\numberline {6}{\ignorespaces Resulting Camshift (Courtesy: opencv-python-tutroals.readthedocs.io)\relax }}{7}{figure.caption.7}}
33 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Using Camshift Algorithm using Python OpenCV}{8}{subsection.5.3}}
34 | \@writefile{lof}{\contentsline {figure}{\numberline {7}{\ignorespaces Initial ROI operations\relax }}{8}{figure.caption.8}}
35 | \@writefile{lof}{\contentsline {figure}{\numberline {8}{\ignorespaces Frame operations\relax }}{9}{figure.caption.9}}
36 | \@writefile{lof}{\contentsline {figure}{\numberline {9}{\ignorespaces Drawing operations\relax }}{9}{figure.caption.10}}
37 | \@writefile{toc}{\contentsline {section}{\numberline {6}Code}{9}{section.6}}
38 | \@writefile{toc}{\contentsline {section}{\numberline {7}Exercise}{9}{section.7}}
39 | \@writefile{lof}{\contentsline {figure}{\numberline {10}{\ignorespaces Selecting suspect car in a chase as ROI (Courtesy: youtube.com)\relax }}{10}{figure.caption.11}}
40 | \@writefile{lof}{\contentsline {figure}{\numberline {11}{\ignorespaces Tracking the suspect in the frame (Courtesy: youtube.com)\relax }}{10}{figure.caption.12}}
41 | \@writefile{lof}{\contentsline {figure}{\numberline {12}{\ignorespaces Selecting Deodrant as ROI\relax }}{11}{figure.caption.13}}
42 | \@writefile{lof}{\contentsline {figure}{\numberline {13}{\ignorespaces Tracking the Deodrant in the frame\relax }}{11}{figure.caption.14}}
43 | \@writefile{toc}{\contentsline {section}{\numberline {8}References}{12}{section.8}}
44 |
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.log
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.out:
--------------------------------------------------------------------------------
1 | \BOOKMARK [1][-]{section.1}{Objective}{}% 1
2 | \BOOKMARK [1][-]{section.2}{Prerequisites}{}% 2
3 | \BOOKMARK [1][-]{section.3}{Hardware Requirement}{}% 3
4 | \BOOKMARK [1][-]{section.4}{Software Requirement}{}% 4
5 | \BOOKMARK [1][-]{section.5}{Theory and Description}{}% 5
6 | \BOOKMARK [2][-]{subsection.5.1}{Meanshift Algorithm}{section.5}% 6
7 | \BOOKMARK [2][-]{subsection.5.2}{Camshift Algorithm}{section.5}% 7
8 | \BOOKMARK [3][-]{subsubsection.5.2.1}{Illustration of Camshift algorithm in a Second video frame}{subsection.5.2}% 8
9 | \BOOKMARK [2][-]{subsection.5.3}{Using Camshift Algorithm using Python OpenCV}{section.5}% 9
10 | \BOOKMARK [1][-]{section.6}{Code}{}% 10
11 | \BOOKMARK [1][-]{section.7}{Exercise}{}% 11
12 | \BOOKMARK [1][-]{section.8}{References}{}% 12
13 |
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.pdf
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.synctex.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.synctex.gz
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Latex Code/Tutorial.toc:
--------------------------------------------------------------------------------
1 | \contentsline {section}{\numberline {1}Objective}{3}{section.1}
2 | \contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}
3 | \contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}
4 | \contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}
5 | \contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}
6 | \contentsline {subsection}{\numberline {5.1}Meanshift Algorithm}{4}{subsection.5.1}
7 | \contentsline {subsection}{\numberline {5.2}Camshift Algorithm}{5}{subsection.5.2}
8 | \contentsline {subsubsection}{\numberline {5.2.1}Illustration of Camshift algorithm in a Second video frame}{5}{subsubsection.5.2.1}
9 | \contentsline {subsection}{\numberline {5.3}Using Camshift Algorithm using Python OpenCV}{8}{subsection.5.3}
10 | \contentsline {section}{\numberline {6}Code}{9}{section.6}
11 | \contentsline {section}{\numberline {7}Exercise}{9}{section.7}
12 | \contentsline {section}{\numberline {8}References}{12}{section.8}
13 |
--------------------------------------------------------------------------------
/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/2. Tutorial on CAMShift Algorithm and How to use it using OpenCV and Python for Object Tracking/Tutorial.pdf
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/OBGV5_3-axis_v5_50x50mm.123dx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/OBGV5_3-axis_v5_50x50mm.123dx
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/camera fixing mount.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/camera fixing mount.stl
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/gimbal motor frame 1.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/gimbal motor frame 1.stl
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/gimbal motor frame 2.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/gimbal motor frame 2.stl
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/gimbal motor frame 3.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/gimbal motor frame 3.stl
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/lower vibration and shock observing mount.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/lower vibration and shock observing mount.stl
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/upper vibration and shock observing mount.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/3D design files/upper vibration and shock observing mount.stl
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/README.txt:
--------------------------------------------------------------------------------
1 | .: :,
2 | ,:::::::: ::` ::: :::
3 | ,:::::::: ::` ::: :::
4 | .,,:::,,, ::`.:, ... .. .:, .:. ..`... ..` .. .:, .. :: .::, .:,`
5 | ,:: ::::::: ::, ::::::: `:::::::.,:: ::: ::: .:::::: ::::: :::::: .::::::
6 | ,:: :::::::: ::, :::::::: ::::::::.,:: ::: ::: :::,:::, ::::: ::::::, ::::::::
7 | ,:: ::: ::: ::, ::: :::`::. :::.,:: ::,`::`::: ::: ::: `::,` ::: :::
8 | ,:: ::. ::: ::, ::` :::.:: ::.,:: :::::: ::::::::: ::` :::::: :::::::::
9 | ,:: ::. ::: ::, ::` :::.:: ::.,:: .::::: ::::::::: ::` :::::::::::::::
10 | ,:: ::. ::: ::, ::` ::: ::: `:::.,:: :::: :::` ,,, ::` .:: :::.::. ,,,
11 | ,:: ::. ::: ::, ::` ::: ::::::::.,:: :::: :::::::` ::` ::::::: :::::::.
12 | ,:: ::. ::: ::, ::` ::: :::::::`,:: ::. :::::` ::` :::::: :::::.
13 | ::, ,:: ``
14 | ::::::::
15 | ::::::
16 | `,,`
17 |
18 |
19 | http://www.thingiverse.com/thing:1247236
20 | Open Brushless Gimbal (3-axis) by roumen is licensed under the Creative Commons - Attribution - Non-Commercial license.
21 | http://creativecommons.org/licenses/by-nc/3.0/
22 |
23 | # Summary
24 |
25 | I love the Open Brushless Gimbal by turbi but wanted a 3-axis version for my DJI inspired drone. I used turbi's files and changed most of the parts to this version.
26 |
27 | I use 2208 motors and a storm32 BGC which works great for my GoPro Hero3.
28 | The balls are from a standard DJI set. (DJI H3-3D Standard Version Part42 Damping Shock Absorber Ball)
29 |
30 | The Bolts I used are stainless steel:
31 | 3x M3 x 20mm HEX button head (for the motor clamps)
32 | 1x M3 x 20mm HEX button head (for the arm1-arm2 connection)
33 | 4x M3 x 8mm HEX button head (for the motor arm3)
34 | 4x M3 x 6mm HEX flat head (for the GoPro case)
35 |
36 | I added some additional snapshots of the assembly. I only use the 16 x 19 mm mounting holes of the motors. The housing of the motor is clamped in the different parts.
37 |
38 | On request I also added the 123dx design file so you can build further on this design.
39 |
40 | Short action video here: https://youtu.be/80s-3GhD2zk
41 |
42 | Enjoy!
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/SOURCES.txt:
--------------------------------------------------------------------------------
1 | Sources for Open Brushless Gimbal (3-axis)
2 |
3 | Open Brushless Gimbal (http://www.thingiverse.com/thing:110731)
4 |
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/attribution_card.html:
--------------------------------------------------------------------------------
1 |
2 |
14 |
15 |
16 |
19 |
Open Brushless Gimbal (3-axis) by roumen
20 |
Published on January 5, 2016
21 |
www.thingiverse.com/thing:1247236
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 | Creative Commons - Attribution - Non-Commercial
35 |
36 |
37 |
42 |
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/37574ea8b0a8525690b11f20a5445401_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/37574ea8b0a8525690b11f20a5445401_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/564c6e88150f166901a402e6df957297_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/564c6e88150f166901a402e6df957297_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/5d9f698bce4fd6c0918dfd25dc37a5ea_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/5d9f698bce4fd6c0918dfd25dc37a5ea_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/6cb8ffe381b2f736251d6571fcfda029_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/6cb8ffe381b2f736251d6571fcfda029_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/821ece2c3e98576eb87f1f1e1fac4e60_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/821ece2c3e98576eb87f1f1e1fac4e60_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/OBGV5_arm1_3-axis_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/OBGV5_arm1_3-axis_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/OBGV5_arm3_3-axis_CG_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/OBGV5_arm3_3-axis_CG_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/a7ecc62c2f14c9de5b260e0aaa503662_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/a7ecc62c2f14c9de5b260e0aaa503662_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/bb9326229714158449c691285b203ab4_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/bb9326229714158449c691285b203ab4_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/cebbf42b03b8b3efce62912d2b5503ad_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/cebbf42b03b8b3efce62912d2b5503ad_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/fc4df6ccc08186e6cf9512eb68fdb4d9_preview_featured.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/3 axis gimbal 3D designs for 3D printing/images/fc4df6ccc08186e6cf9512eb68fdb4d9_preview_featured.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/3_Axis_Gimbal_Example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/3_Axis_Gimbal_Example.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Aircraft_Movement.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Aircraft_Movement.JPG
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Brushless_Gimbal_Motor.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Brushless_Gimbal_Motor.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Camera_Fixing_Mount.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Camera_Fixing_Mount.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Controller.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Controller.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Final_Gimbal.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Final_Gimbal.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Gimbal_Motor_Frame_1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Gimbal_Motor_Frame_1.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Gimbal_Motor_Frame_2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Gimbal_Motor_Frame_2.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Gimbal_Motor_Frame_3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Gimbal_Motor_Frame_3.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/Shock_Absorber_Balls.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/Shock_Absorber_Balls.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_pitch_1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_pitch_1.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_pitch_2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_pitch_2.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_pitch_axis.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_pitch_axis.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_roll_1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_roll_1.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_roll_2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_roll_2.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_roll_axis.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_roll_axis.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/balancing_yaw_axis.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/balancing_yaw_axis.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/gimbal_making_phase_1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/gimbal_making_phase_1.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/lower_vibration_observing_mount.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/lower_vibration_observing_mount.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Images/upper_vibration_observing_mount.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Images/upper_vibration_observing_mount.jpg
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.aux:
--------------------------------------------------------------------------------
1 | \relax
2 | \providecommand\hyper@newdestlabel[2]{}
3 | \providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
4 | \HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
5 | \global\let\oldcontentsline\contentsline
6 | \gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
7 | \global\let\oldnewlabel\newlabel
8 | \gdef\newlabel#1#2{\newlabelxx{#1}#2}
9 | \gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
10 | \AtEndDocument{\ifx\hyper@anchor\@undefined
11 | \let\contentsline\oldcontentsline
12 | \let\newlabel\oldnewlabel
13 | \fi}
14 | \fi}
15 | \global\let\hyper@last\relax
16 | \gdef\HyperFirstAtBeginDocument#1{#1}
17 | \providecommand\HyField@AuxAddToFields[1]{}
18 | \providecommand\HyField@AuxAddToCoFields[2]{}
19 | \@writefile{toc}{\contentsline {section}{\numberline {1}How to make your own gimbal?}{3}{section.1}}
20 | \@writefile{toc}{\contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}}
21 | \@writefile{toc}{\contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}}
22 | \@writefile{toc}{\contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}}
23 | \@writefile{toc}{\contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}}
24 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.1}What is Gimbal?}{3}{subsection.5.1}}
25 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Roll, Pitch, Yaw Axis}{4}{subsection.5.2}}
26 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Gimbal for quadcopter}{5}{subsection.5.3}}
27 | \@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Balancing the Gimbal}{6}{subsection.5.4}}
28 | \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.4.1}Balance the Pitch axis:}{6}{subsubsection.5.4.1}}
29 | \@writefile{toc}{\contentsline {subsubsection}{\numberline {5.4.2}Figure 3: Balancing the Roll axis:}{7}{subsubsection.5.4.2}}
30 | \@writefile{toc}{\contentsline {section}{\numberline {6}Experiment}{8}{section.6}}
31 | \@writefile{toc}{\contentsline {subsection}{\numberline {6.1}Making a Gimbal}{8}{subsection.6.1}}
32 | \@writefile{toc}{\contentsline {subsubsection}{\numberline {6.1.1}Required 3D printed parts:}{8}{subsubsection.6.1.1}}
33 | \@writefile{toc}{\contentsline {subsubsection}{\numberline {6.1.2}Required purchased parts:}{12}{subsubsection.6.1.2}}
34 | \@writefile{toc}{\contentsline {section}{\numberline {7}Compatible Camera}{16}{section.7}}
35 | \@writefile{toc}{\contentsline {section}{\numberline {8}Exercise}{16}{section.8}}
36 | \@writefile{toc}{\contentsline {section}{\numberline {9}References}{18}{section.9}}
37 |
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.log:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.log
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.out:
--------------------------------------------------------------------------------
1 | \BOOKMARK [1][-]{section.1}{How to make your own gimbal?}{}% 1
2 | \BOOKMARK [1][-]{section.2}{Prerequisites}{}% 2
3 | \BOOKMARK [1][-]{section.3}{Hardware Requirement}{}% 3
4 | \BOOKMARK [1][-]{section.4}{Software Requirement}{}% 4
5 | \BOOKMARK [1][-]{section.5}{Theory and Description}{}% 5
6 | \BOOKMARK [2][-]{subsection.5.1}{What is Gimbal?}{section.5}% 6
7 | \BOOKMARK [2][-]{subsection.5.2}{Roll, Pitch, Yaw Axis}{section.5}% 7
8 | \BOOKMARK [2][-]{subsection.5.3}{Gimbal for quadcopter}{section.5}% 8
9 | \BOOKMARK [2][-]{subsection.5.4}{Balancing the Gimbal}{section.5}% 9
10 | \BOOKMARK [3][-]{subsubsection.5.4.1}{Balance the Pitch axis:}{subsection.5.4}% 10
11 | \BOOKMARK [3][-]{subsubsection.5.4.2}{Figure 3: Balancing the Roll axis:}{subsection.5.4}% 11
12 | \BOOKMARK [1][-]{section.6}{Experiment}{}% 12
13 | \BOOKMARK [2][-]{subsection.6.1}{Making a Gimbal}{section.6}% 13
14 | \BOOKMARK [3][-]{subsubsection.6.1.1}{Required 3D printed parts:}{subsection.6.1}% 14
15 | \BOOKMARK [3][-]{subsubsection.6.1.2}{Required purchased parts:}{subsection.6.1}% 15
16 | \BOOKMARK [1][-]{section.7}{Compatible Camera}{}% 16
17 | \BOOKMARK [1][-]{section.8}{Exercise}{}% 17
18 | \BOOKMARK [1][-]{section.9}{References}{}% 18
19 |
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.pdf
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.synctex.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.synctex.gz
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Latex code/Tutorial.toc:
--------------------------------------------------------------------------------
1 | \contentsline {section}{\numberline {1}How to make your own gimbal?}{3}{section.1}
2 | \contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}
3 | \contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}
4 | \contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}
5 | \contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}
6 | \contentsline {subsection}{\numberline {5.1}What is Gimbal?}{3}{subsection.5.1}
7 | \contentsline {subsection}{\numberline {5.2}Roll, Pitch, Yaw Axis}{4}{subsection.5.2}
8 | \contentsline {subsection}{\numberline {5.3}Gimbal for quadcopter}{5}{subsection.5.3}
9 | \contentsline {subsection}{\numberline {5.4}Balancing the Gimbal}{6}{subsection.5.4}
10 | \contentsline {subsubsection}{\numberline {5.4.1}Balance the Pitch axis:}{6}{subsubsection.5.4.1}
11 | \contentsline {subsubsection}{\numberline {5.4.2}Figure 3: Balancing the Roll axis:}{7}{subsubsection.5.4.2}
12 | \contentsline {section}{\numberline {6}Experiment}{8}{section.6}
13 | \contentsline {subsection}{\numberline {6.1}Making a Gimbal}{8}{subsection.6.1}
14 | \contentsline {subsubsection}{\numberline {6.1.1}Required 3D printed parts:}{8}{subsubsection.6.1.1}
15 | \contentsline {subsubsection}{\numberline {6.1.2}Required purchased parts:}{12}{subsubsection.6.1.2}
16 | \contentsline {section}{\numberline {7}Compatible Camera}{16}{section.7}
17 | \contentsline {section}{\numberline {8}Exercise}{16}{section.8}
18 | \contentsline {section}{\numberline {9}References}{18}{section.9}
19 |
--------------------------------------------------------------------------------
/Tutorials/3. How to make your own gimbal/Tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/3. How to make your own gimbal/Tutorial.pdf
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image1.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image1.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image2.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image2.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image3.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image3.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image4.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/Image4.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/back_projection.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/back_projection.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/camshift.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/camshift.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/code_histogram_comparison.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/code_histogram_comparison.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/eliminated_hsv_frame.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/eliminated_hsv_frame.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/frame_elimination_code.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/frame_elimination_code.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/frame_hist_code.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/frame_hist_code.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/frame_histogram.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/frame_histogram.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/giving_roi.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/giving_roi.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/histogram_roi.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/histogram_roi.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/redetection2.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/redetection2.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/redetection3.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/redetection3.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/redetection_back_projection.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/redetection_back_projection.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/roi_histogram_code.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/roi_histogram_code.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/special_back_projection.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/special_back_projection.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/test.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/test.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/tracking_object.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Images/tracking_object.JPG
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.aux:
--------------------------------------------------------------------------------
1 | \relax
2 | \providecommand\hyper@newdestlabel[2]{}
3 | \providecommand\HyperFirstAtBeginDocument{\AtBeginDocument}
4 | \HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined
5 | \global\let\oldcontentsline\contentsline
6 | \gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
7 | \global\let\oldnewlabel\newlabel
8 | \gdef\newlabel#1#2{\newlabelxx{#1}#2}
9 | \gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
10 | \AtEndDocument{\ifx\hyper@anchor\@undefined
11 | \let\contentsline\oldcontentsline
12 | \let\newlabel\oldnewlabel
13 | \fi}
14 | \fi}
15 | \global\let\hyper@last\relax
16 | \gdef\HyperFirstAtBeginDocument#1{#1}
17 | \providecommand\HyField@AuxAddToFields[1]{}
18 | \providecommand\HyField@AuxAddToCoFields[2]{}
19 | \@writefile{toc}{\contentsline {section}{\numberline {1}Objective}{3}{section.1}}
20 | \@writefile{toc}{\contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}}
21 | \@writefile{toc}{\contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}}
22 | \@writefile{toc}{\contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}}
23 | \@writefile{toc}{\contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}}
24 | \@writefile{toc}{\contentsline {section}{\numberline {6}Experiment}{13}{section.6}}
25 | \@writefile{toc}{\contentsline {section}{\numberline {7}Exercise}{13}{section.7}}
26 | \@writefile{toc}{\contentsline {section}{\numberline {8}Limitations}{16}{section.8}}
27 | \@writefile{toc}{\contentsline {section}{\numberline {9}References}{17}{section.9}}
28 |
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.out:
--------------------------------------------------------------------------------
1 | \BOOKMARK [1][-]{section.1}{Objective}{}% 1
2 | \BOOKMARK [1][-]{section.2}{Prerequisites}{}% 2
3 | \BOOKMARK [1][-]{section.3}{Hardware Requirement}{}% 3
4 | \BOOKMARK [1][-]{section.4}{Software Requirement}{}% 4
5 | \BOOKMARK [1][-]{section.5}{Theory and Description}{}% 5
6 | \BOOKMARK [1][-]{section.6}{Experiment}{}% 6
7 | \BOOKMARK [1][-]{section.7}{Exercise}{}% 7
8 | \BOOKMARK [1][-]{section.8}{Limitations}{}% 8
9 | \BOOKMARK [1][-]{section.9}{References}{}% 9
10 |
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.pdf
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.synctex.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.synctex.gz
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Latex code/Tutorial.toc:
--------------------------------------------------------------------------------
1 | \contentsline {section}{\numberline {1}Objective}{3}{section.1}
2 | \contentsline {section}{\numberline {2}Prerequisites}{3}{section.2}
3 | \contentsline {section}{\numberline {3}Hardware Requirement}{3}{section.3}
4 | \contentsline {section}{\numberline {4}Software Requirement}{3}{section.4}
5 | \contentsline {section}{\numberline {5}Theory and Description}{3}{section.5}
6 | \contentsline {section}{\numberline {6}Experiment}{13}{section.6}
7 | \contentsline {section}{\numberline {7}Exercise}{13}{section.7}
8 | \contentsline {section}{\numberline {8}Limitations}{16}{section.8}
9 | \contentsline {section}{\numberline {9}References}{17}{section.9}
10 |
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Tutorial.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/eYSIP-2016/Object-Tracking-Camera/58093f5aa5112f89b9e9a5a4726df26ac35d405e/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/Tutorial.pdf
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/code/README.md:
--------------------------------------------------------------------------------
1 | #### run.py file is the executable code and functions.py file contains functions used in run.py
--------------------------------------------------------------------------------
/Tutorials/4. Object Tracking (Based on ROI) and Re-recognizing the object if it escapes and comes back into the frame/code/functions.py:
--------------------------------------------------------------------------------
1 | # Import the required modules
2 | import cv2
3 | import math
4 |
5 | def distance(current_center,prev_center):
6 | return math.sqrt((current_center[0]-prev_center[0])**2 + (current_center[1]-prev_center[1])**2)
7 |
8 |
9 | def get_track_window(im):
10 | cv2.imshow('frame', im)
11 |
12 | # List containing top-left and bottom-right to crop the image.
13 | pts_1 = []
14 | pts_2 = []
15 |
16 | rects = []
17 | get_track_window.mouse_down = False
18 |
19 | def callback(event, x, y, flags, param):
20 | if event == cv2.EVENT_LBUTTONDOWN:
21 | if len(pts_2) == 1: #pts_2 contains one tuple.
22 | print "WARN: Cannot select another object."
23 | print "Delete the previously selected object using key `d` to mark a new location."
24 | return
25 | get_track_window.mouse_down = True
26 | pts_1.append((x, y))
27 | elif event == cv2.EVENT_LBUTTONUP and get_track_window.mouse_down == True:
28 | get_track_window.mouse_down = False
29 | pts_2.append((x, y))
30 | print "Object selected at [{}, {}]".format(pts_1[-1], pts_2[-1])
31 | elif event == cv2.EVENT_MOUSEMOVE and get_track_window.mouse_down == True:
32 | im_draw = im.copy()
33 | cv2.rectangle(im_draw, pts_1[-1], (x, y), (255,255,255), 3)
34 | cv2.imshow('frame', im_draw)
35 |
36 | print "Press and release mouse around the object to be tracked."
37 | cv2.setMouseCallback('frame', callback)
38 |
39 | print "Press key `p` to continue with the selected points."
40 | print "Press key `d` to discard the last object selected."
41 | print "Press key `q` to quit the program."
42 | while True:
43 | # Draw the rectangular boxes on the image
44 | for pt1, pt2 in zip(pts_1, pts_2):
45 | rects.append([pt1[0],pt2[0], pt1[1], pt2[1]])
46 | cv2.rectangle(im, pt1, pt2, (255, 255, 255), 3)
47 | cv2.imshow('frame', im)
48 | key = cv2.waitKey(30)
49 | if key == ord('p'):
50 | # Press key `s` to return the selected points
51 | cv2.destroyAllWindows()
52 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
53 | corrected_point=check_point(point)
54 | return corrected_point
55 | elif key == ord('q'):
56 | # Press key `q` to quit the program
57 | print "Quitting without saving."
58 | exit()
59 | elif key == ord('d'):
60 | # Press ket `d` to delete the last rectangular region
61 | if get_track_window.mouse_down == False and pts_1:
62 | print "Object deleted at [{}, {}]".format(pts_1[-1], pts_2[-1])
63 | pts_1.pop()
64 | pts_2.pop()
65 | im_disp = im.copy()
66 | else:
67 | print "No object to delete."
68 | cv2.destroyAllWindows()
69 | point= [(tl + br) for tl, br in zip(pts_1, pts_2)]
70 | return check_point(point)
71 |
72 | def check_point(points):
73 | for point in points:
74 | #to find min and max x coordinates
75 | if point[0]