├── .gitignore
├── LICENSE
├── README.md
├── Traffic_signal
├── Traffic_Signal_schem.jpg
└── Traffic_signal.ino
├── arduino
├── Overtake
│ └── overtake.ino
├── README.md
├── rc_keyboard_control
│ └── rc_keyboard_control.ino
└── sketch_apr21a
│ └── sketch_apr21a.ino
├── computer
├── README.md
├── cascade_xml
│ ├── stop_sign.xml
│ └── traffic_light.xml
├── collect_training_data.py
├── mlp_training.py
├── mlp_xml
│ └── readme.md
├── picam_calibration.py
├── rc_driver.py
├── rc_driver_exp.py
├── rc_driver_overtake.py
└── training_images
│ ├── frame00001.jpg
│ ├── frame00002.jpg
│ ├── frame00003.jpg
│ ├── frame00004.jpg
│ ├── frame00005.jpg
│ ├── frame00006.jpg
│ ├── frame00007.jpg
│ ├── frame00008.jpg
│ ├── frame00009.jpg
│ └── frame00010.jpg
├── raspberryPi
├── README.md
├── stream_client.py
└── ultrasonic_client.py
├── test
├── README.md
├── rc_control_test.py
├── stream_server_test.py
└── ultrasonic_server_test.py
└── testing
└── face.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Optimus Future
2 |
3 | Demo Video!!,
4 |
5 | Full tutorial and Detailed Readme files are comming soon
6 |
7 |
8 |
9 |
10 | Inspired by : https://www.youtube.com/watch?v=BBwEF6WBUQs
11 |
--------------------------------------------------------------------------------
/Traffic_signal/Traffic_Signal_schem.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/Traffic_signal/Traffic_Signal_schem.jpg
--------------------------------------------------------------------------------
/Traffic_signal/Traffic_signal.ino:
--------------------------------------------------------------------------------
1 | const int r = 9; //connect red led at pin 9
2 | const int y = 10; //connect yellow led at pin 10
3 | const int g = 11; //connect green led at pin 11
4 | const int sec = 1000; //seconds defined
5 | void setup()
6 | {
7 | pinMode(r,OUTPUT);
8 | pinMode(y,OUTPUT);
9 | pinMode(g,OUTPUT);
10 | delay(sec);
11 | }
12 |
13 | void loop()
14 | {
15 | digitalWrite(r,HIGH) ;
16 | delay(sec*5);
17 | digitalWrite(r,LOW) ;
18 | digitalWrite(y,HIGH) ;
19 | delay(sec*5);
20 | digitalWrite(y,LOW) ;
21 | digitalWrite(g,HIGH) ;
22 | delay(sec*5);
23 | digitalWrite(g,LOW) ;
24 |
25 | }
26 |
--------------------------------------------------------------------------------
/arduino/Overtake/overtake.ino:
--------------------------------------------------------------------------------
1 |
2 |
3 | // assign pin num
4 | int right_pin = 6; //blue
5 | int left_pin = 7; //orange
6 | int forward_pin = 10; //yellow
7 | int reverse_pin = 9; // red
8 |
9 | // duration for output
10 | int time = 50;
11 | // initial command
12 | int command = 0;
13 |
14 | void setup() {
15 | pinMode(right_pin, OUTPUT);
16 | pinMode(left_pin, OUTPUT);
17 | pinMode(forward_pin, OUTPUT);
18 | pinMode(reverse_pin, OUTPUT);
19 | Serial.begin(115200);
20 | }
21 |
22 | void loop() {
23 | //receive command
24 | if (Serial.available() > 0){
25 | command = Serial.read();
26 | }
27 | else{
28 | reset();
29 | }
30 | send_command(command,time);
31 | }
32 |
33 | void right(int time){
34 | digitalWrite(right_pin, LOW);
35 | delay(time);
36 | }
37 |
38 | void left(int time){
39 | digitalWrite(left_pin, LOW);
40 | delay(time);
41 | }
42 |
43 | void forward(int time){
44 | digitalWrite(forward_pin, LOW);
45 | delay(time);
46 | }
47 |
48 | void reverse(int time){
49 | digitalWrite(reverse_pin, LOW);
50 | delay(time);
51 | }
52 |
53 | void forward_right(int time){
54 | digitalWrite(forward_pin, LOW);
55 | digitalWrite(right_pin, LOW);
56 | delay(time);
57 | }
58 |
59 | void reverse_right(int time){
60 | digitalWrite(reverse_pin, LOW);
61 | digitalWrite(right_pin, LOW);
62 | delay(time);
63 | }
64 |
65 | void forward_left(int time){
66 | digitalWrite(forward_pin, LOW);
67 | digitalWrite(left_pin, LOW);
68 | delay(time);
69 | }
70 |
71 | void reverse_left(int time){
72 | digitalWrite(reverse_pin, LOW);
73 | digitalWrite(left_pin, LOW);
74 | delay(time);
75 | }
76 |
77 | void reset(int time){
78 | digitalWrite(right_pin, LOW);
79 | digitalWrite(forward_pin, LOW);
80 | delay(time);
81 |
82 | digitalWrite(forward_pin, LOW);
83 | delay(time);
84 |
85 | digitalWrite(left_pin, LOW);
86 | digitalWrite(forward_pin, LOW);
87 | delay(time);
88 |
89 | digitalWrite(right_pin, LOW);
90 | digitalWrite(forward_pin, LOW);
91 | delay(time);
92 | }
93 |
94 |
95 | void send_command(int command, int time){
96 | switch (command){
97 |
98 | //reset command
99 | case 0: reset(time); break;
100 |
101 | // single command
102 | case 1: forward(time); break;
103 | case 2: reverse(time); break;
104 | case 3: right(time); break;
105 | case 4: left(time); break;
106 |
107 | //combination command
108 | case 6: forward_right(time); break;
109 | case 7: forward_left(time); break;
110 | case 8: reverse_right(time); break;
111 | case 9: reverse_left(time); break;
112 |
113 |
114 | default: Serial.print("Inalid Command\n");
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/arduino/README.md:
--------------------------------------------------------------------------------
1 | Inspired by this aiticle: [Drive a Lamborghini With Your Keyboard](http://thelivingpearl.com/2013/01/04/drive-a-lamborghini-with-your-keyboard/)
2 |
--------------------------------------------------------------------------------
/arduino/rc_keyboard_control/rc_keyboard_control.ino:
--------------------------------------------------------------------------------
1 |
2 |
3 | // assign pin num
4 | int right_pin = 10; //blue
5 | int left_pin = 6; //orange
6 | int forward_pin = 9; //yellow
7 | int reverse_pin = 7; // red
8 |
9 | // duration for output
10 | int time = 50;
11 | // initial command
12 | int command = 0;
13 |
14 | void setup() {
15 | pinMode(right_pin, OUTPUT);
16 | pinMode(left_pin, OUTPUT);
17 | pinMode(forward_pin, OUTPUT);
18 | pinMode(reverse_pin, OUTPUT);
19 | Serial.begin(115200);
20 | }
21 |
22 | void loop() {
23 | //receive command
24 | if (Serial.available() > 0){
25 | command = Serial.read();
26 | }
27 | else{
28 | reset();
29 | }
30 | send_command(command,time);
31 | }
32 |
33 | void right(int time){
34 | digitalWrite(right_pin, LOW);
35 | delay(time);
36 | }
37 |
38 | void left(int time){
39 | digitalWrite(left_pin, LOW);
40 | delay(time);
41 | }
42 |
43 | void forward(int time){
44 | digitalWrite(forward_pin, LOW);
45 | delay(time);
46 | }
47 |
48 | void reverse(int time){
49 | digitalWrite(reverse_pin, LOW);
50 | delay(time);
51 | }
52 |
53 | void forward_right(int time){
54 | digitalWrite(forward_pin, LOW);
55 | digitalWrite(right_pin, LOW);
56 | delay(time);
57 | }
58 |
59 | void reverse_right(int time){
60 | digitalWrite(reverse_pin, LOW);
61 | digitalWrite(right_pin, LOW);
62 | delay(time);
63 | }
64 |
65 | void forward_left(int time){
66 | digitalWrite(forward_pin, LOW);
67 | digitalWrite(left_pin, LOW);
68 | delay(time);
69 | }
70 |
71 | void reverse_left(int time){
72 | digitalWrite(reverse_pin, LOW);
73 | digitalWrite(left_pin, LOW);
74 | delay(time);
75 | }
76 |
77 | void reset(){
78 | digitalWrite(right_pin, HIGH);
79 | digitalWrite(left_pin, HIGH);
80 | digitalWrite(forward_pin, HIGH);
81 | digitalWrite(reverse_pin, HIGH);
82 | }
83 |
84 | void send_command(int command, int time){
85 | switch (command){
86 |
87 | //reset command
88 | case 0: reset(); break;
89 |
90 | // single command
91 | case 1: forward(time); break;
92 | case 2: reverse(time); break;
93 | case 3: right(time); break;
94 | case 4: left(time); break;
95 |
96 | //combination command
97 | case 6: forward_right(time); break;
98 | case 7: forward_left(time); break;
99 | case 8: reverse_right(time); break;
100 | case 9: reverse_left(time); break;
101 |
102 | default: Serial.print("Inalid Command\n");
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/arduino/sketch_apr21a/sketch_apr21a.ino:
--------------------------------------------------------------------------------
1 |
2 |
3 | // assign pin num
4 | int right_pin = 6; //blue
5 | int left_pin = 7; //orange
6 | int forward_pin = 10; //yellow
7 | int reverse_pin = 9; // red
8 |
9 | // duration for output
10 | int time = 50;
11 | // initial command
12 | int command = 0;
13 |
14 | void setup() {
15 | pinMode(right_pin, OUTPUT);
16 | pinMode(left_pin, OUTPUT);
17 | pinMode(forward_pin, OUTPUT);
18 | pinMode(reverse_pin, OUTPUT);
19 | Serial.begin(115200);
20 | }
21 |
22 | void loop() {
23 | //receive command
24 | if (Serial.available() > 0){
25 | command = Serial.read();
26 | }
27 | else{
28 | reset();
29 | }
30 | send_command(command,time);
31 | }
32 |
33 | void right(int time){
34 | digitalWrite(right_pin, LOW);
35 | delay(time);
36 | }
37 |
38 | void left(int time){
39 | digitalWrite(left_pin, LOW);
40 | delay(time);
41 | }
42 |
43 | void forward(int time){
44 | digitalWrite(forward_pin, LOW);
45 | delay(time);
46 | }
47 |
48 | void reverse(int time){
49 | digitalWrite(reverse_pin, LOW);
50 | delay(time);
51 | }
52 |
53 | void forward_right(int time){
54 | digitalWrite(forward_pin, LOW);
55 | digitalWrite(right_pin, LOW);
56 | delay(time);
57 | }
58 |
59 | void reverse_right(int time){
60 | digitalWrite(reverse_pin, LOW);
61 | digitalWrite(right_pin, LOW);
62 | delay(time);
63 | }
64 |
65 | void forward_left(int time){
66 | digitalWrite(forward_pin, LOW);
67 | digitalWrite(left_pin, LOW);
68 | delay(time);
69 | }
70 |
71 | void reverse_left(int time){
72 | digitalWrite(right_pin, LOW);
73 | digitalWrite(forward_pin, LOW);
74 | delay(time);
75 |
76 | digitalWrite(forward_pin, LOW);
77 | delay(time);
78 |
79 | digitalWrite(left_pin, LOW);
80 | digitalWrite(forward_pin, LOW);
81 | delay(time);
82 |
83 | digitalWrite(right_pin, LOW);
84 | digitalWrite(forward_pin, LOW);
85 | delay(time);
86 | }
87 |
88 | void reset(){
89 | digitalWrite(right_pin, HIGH);
90 | digitalWrite(forward_pin, HIGH);
91 |
92 |
93 |
94 | digitalWrite(left_pin, HIGH);
95 | digitalWrite(reverse_pin, HIGH);
96 |
97 | }
98 |
99 | void send_command(int command, int time){
100 | switch (command){
101 |
102 | //reset command
103 | case 0: reset(); break;
104 |
105 | // single command
106 | case 1: forward(time); break;
107 | case 2: reverse(time); break;
108 | case 3: right(time); break;
109 | case 4: left(time); break;
110 |
111 | //combination command
112 | case 6: forward_right(time); break;
113 | case 7: forward_left(time); break;
114 | case 8: reverse_right(time); break;
115 | case 9: reverse_left(time); break;
116 |
117 | default: Serial.print("Inalid Command\n");
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/computer/README.md:
--------------------------------------------------------------------------------
1 | The distance to stop sign and traffic light is measured by using a single Pi camera, camera calibration is needed.
2 |
3 | For more detail in camera calibration, please refer to [OpenCv-Python Tutorial](http://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_calib3d/py_calibration/py_calibration.html)
4 |
5 | For more detail in calculating distance using monocular vision, please refer to [Vehicle Distance Measurement based on Monocular Vision - Xu Guoyan, Wang Chuanrong, Gao feng, & Wang Jiangfeng (September, 2009)](http://www.paper.edu.cn/download/downPaper/200909-748)
6 |
--------------------------------------------------------------------------------
/computer/cascade_xml/traffic_light.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | BOOST
5 | HAAR
6 | 45
7 | 25
8 |
9 | GAB
10 | 9.9900001287460327e-01
11 | 5.0000000000000000e-01
12 | 9.4999999999999996e-01
13 | 1
14 | 100
15 |
16 | 0
17 | 1
18 | ALL
19 | 20
20 |
21 |
22 | <_>
23 | 3
24 | -1.1182584762573242e+00
25 |
26 | <_>
27 |
28 | 0 -1 92 -1.5218246728181839e-02
29 |
30 | 8.0637812614440918e-01 -9.5652174949645996e-01
31 | <_>
32 |
33 | 0 -1 1 -9.0668499469757080e-03
34 |
35 | 7.8182405233383179e-01 -8.1939858198165894e-01
36 | <_>
37 |
38 | 0 -1 91 -8.2602538168430328e-03
39 |
40 | 6.5766179561614990e-01 -8.2300186157226562e-01
41 |
42 | <_>
43 | 4
44 | -3.8429734110832214e-01
45 |
46 | <_>
47 |
48 | 0 -1 7 -3.1275503337383270e-02
49 |
50 | 7.2270745038986206e-01 -9.2253524065017700e-01
51 | <_>
52 |
53 | 0 -1 82 -2.7673570439219475e-03
54 |
55 | 5.2933508157730103e-01 -9.6118038892745972e-01
56 | <_>
57 |
58 | 0 -1 131 3.4157070331275463e-03
59 |
60 | -7.4300682544708252e-01 5.4688668251037598e-01
61 | <_>
62 |
63 | 0 -1 55 -2.1575871855020523e-02
64 |
65 | 7.5190967321395874e-01 -6.0541456937789917e-01
66 |
67 | <_>
68 | 5
69 | -5.6873494386672974e-01
70 |
71 | <_>
72 |
73 | 0 -1 131 3.0508034396916628e-03
74 |
75 | -6.5454542636871338e-01 7.0804595947265625e-01
76 | <_>
77 |
78 | 0 -1 70 2.0139521802775562e-04
79 |
80 | -8.8296043872833252e-01 4.6185281872749329e-01
81 | <_>
82 |
83 | 0 -1 89 -9.0918154455721378e-05
84 |
85 | -8.7925267219543457e-01 3.7363520264625549e-01
86 | <_>
87 |
88 | 0 -1 13 1.6122728120535612e-03
89 |
90 | 2.5894206762313843e-01 -9.5166242122650146e-01
91 | <_>
92 |
93 | 0 -1 47 -5.7838540669763461e-05
94 |
95 | -9.5217996835708618e-01 2.4426831305027008e-01
96 |
97 | <_>
98 | 5
99 | -6.2908536195755005e-01
100 |
101 | <_>
102 |
103 | 0 -1 9 -1.7118256073445082e-03
104 |
105 | 7.2586411237716675e-01 -5.7894736528396606e-01
106 | <_>
107 |
108 | 0 -1 105 -1.1031275789719075e-04
109 |
110 | 4.3248271942138672e-01 -9.0851449966430664e-01
111 | <_>
112 |
113 | 0 -1 125 -5.0491997972130775e-03
114 |
115 | 3.8107362389564514e-01 -7.9866659641265869e-01
116 | <_>
117 |
118 | 0 -1 27 -2.0671228412538767e-04
119 |
120 | -9.6724623441696167e-01 2.3175306618213654e-01
121 | <_>
122 |
123 | 0 -1 27 1.3348244829103351e-04
124 |
125 | 2.4554981291294098e-01 -9.7370201349258423e-01
126 |
127 | <_>
128 | 5
129 | -1.1076694726943970e+00
130 |
131 | <_>
132 |
133 | 0 -1 68 1.3001039042137563e-04
134 |
135 | 5.7551825046539307e-01 -9.7860962152481079e-01
136 | <_>
137 |
138 | 0 -1 30 -3.3500225981697440e-05
139 |
140 | -8.6870771646499634e-01 3.9780837297439575e-01
141 | <_>
142 |
143 | 0 -1 44 -2.0558938384056091e-02
144 |
145 | 6.1390924453735352e-01 -5.1743024587631226e-01
146 | <_>
147 |
148 | 0 -1 4 -3.0245352536439896e-03
149 |
150 | 5.2975594997406006e-01 -6.3342809677124023e-01
151 | <_>
152 |
153 | 0 -1 133 2.1721082157455385e-04
154 |
155 | 3.3637839555740356e-01 -9.9252820014953613e-01
156 |
157 | <_>
158 | 6
159 | -1.1980488300323486e+00
160 |
161 | <_>
162 |
163 | 0 -1 117 -4.2530021164566278e-04
164 |
165 | -8.3193278312683105e-01 6.2162160873413086e-01
166 | <_>
167 |
168 | 0 -1 123 1.3861652405466884e-04
169 |
170 | 3.6078250408172607e-01 -9.2338705062866211e-01
171 | <_>
172 |
173 | 0 -1 109 -1.2715373188257217e-02
174 |
175 | 5.5411911010742188e-01 -5.1542079448699951e-01
176 | <_>
177 |
178 | 0 -1 14 -2.6816513855010271e-03
179 |
180 | 4.4698977470397949e-01 -6.7423629760742188e-01
181 | <_>
182 |
183 | 0 -1 50 -6.0114997904747725e-04
184 |
185 | -9.4970852136611938e-01 2.5682422518730164e-01
186 | <_>
187 |
188 | 0 -1 39 5.3423915232997388e-05
189 |
190 | 2.3531873524188995e-01 -9.1529178619384766e-01
191 |
192 | <_>
193 | 4
194 | 5.2624002099037170e-02
195 |
196 | <_>
197 |
198 | 0 -1 52 1.2321374379098415e-04
199 |
200 | 5.9589743614196777e-01 -8.0444443225860596e-01
201 | <_>
202 |
203 | 0 -1 122 -2.9269425431266427e-04
204 |
205 | -9.2455589771270752e-01 3.2840943336486816e-01
206 | <_>
207 |
208 | 0 -1 51 -2.5443063350394368e-04
209 |
210 | -8.6902695894241333e-01 2.9218551516532898e-01
211 | <_>
212 |
213 | 0 -1 62 -2.8406906494637951e-05
214 |
215 | -9.2562615871429443e-01 2.3647351562976837e-01
216 |
217 | <_>
218 | 6
219 | -5.5691772699356079e-01
220 |
221 | <_>
222 |
223 | 0 -1 96 -1.7651211237534881e-04
224 |
225 | 5.6299215555191040e-01 -9.3478262424468994e-01
226 | <_>
227 |
228 | 0 -1 59 -2.6983745396137238e-02
229 |
230 | 4.8550808429718018e-01 -5.7044625282287598e-01
231 | <_>
232 |
233 | 0 -1 25 -8.2707917317748070e-04
234 |
235 | -7.5436908006668091e-01 3.9481931924819946e-01
236 | <_>
237 |
238 | 0 -1 74 -3.3318206667900085e-02
239 |
240 | 4.4258514046669006e-01 -6.2007123231887817e-01
241 | <_>
242 |
243 | 0 -1 24 -4.0659449994564056e-02
244 |
245 | 6.7747306823730469e-01 -4.1924440860748291e-01
246 | <_>
247 |
248 | 0 -1 116 1.7721638141665608e-04
249 |
250 | 2.7273333072662354e-01 -9.6762365102767944e-01
251 |
252 | <_>
253 | 9
254 | -1.1224768161773682e+00
255 |
256 | <_>
257 |
258 | 0 -1 15 2.2653568885289133e-04
259 |
260 | 5.8097165822982788e-01 -8.2075470685958862e-01
261 | <_>
262 |
263 | 0 -1 135 1.1001355014741421e-03
264 |
265 | -5.2033698558807373e-01 5.0861692428588867e-01
266 | <_>
267 |
268 | 0 -1 81 -6.0721102636307478e-04
269 |
270 | 4.4948601722717285e-01 -6.5971684455871582e-01
271 | <_>
272 |
273 | 0 -1 3 -2.0594703964889050e-03
274 |
275 | 4.6075913310050964e-01 -5.4609370231628418e-01
276 | <_>
277 |
278 | 0 -1 58 -2.5371506810188293e-02
279 |
280 | 4.1999927163124084e-01 -5.8192580938339233e-01
281 | <_>
282 |
283 | 0 -1 60 2.7266819961369038e-04
284 |
285 | -8.1050950288772583e-01 3.2997298240661621e-01
286 | <_>
287 |
288 | 0 -1 113 9.2536982265301049e-05
289 |
290 | 2.2872088849544525e-01 -9.1734880208969116e-01
291 | <_>
292 |
293 | 0 -1 114 -4.3776165693998337e-04
294 |
295 | -9.4291299581527710e-01 2.3099844157695770e-01
296 | <_>
297 |
298 | 0 -1 75 -5.9191461332375184e-06
299 |
300 | 2.7944248914718628e-01 -8.0519253015518188e-01
301 |
302 | <_>
303 | 6
304 | -6.3339114189147949e-01
305 |
306 | <_>
307 |
308 | 0 -1 106 -7.6895457823411562e-06
309 |
310 | 5.5734407901763916e-01 -7.4757283926010132e-01
311 | <_>
312 |
313 | 0 -1 65 2.1155076101422310e-02
314 |
315 | -3.9223381876945496e-01 5.5961626768112183e-01
316 | <_>
317 |
318 | 0 -1 94 7.9423858551308513e-05
319 |
320 | 3.9795845746994019e-01 -7.4542719125747681e-01
321 | <_>
322 |
323 | 0 -1 132 -1.1796997860074043e-03
324 |
325 | 4.2678046226501465e-01 -5.6197750568389893e-01
326 | <_>
327 |
328 | 0 -1 41 -7.5357063906267285e-05
329 |
330 | -8.3983147144317627e-01 2.6857435703277588e-01
331 | <_>
332 |
333 | 0 -1 57 -3.2889500260353088e-02
334 |
335 | 5.5841594934463501e-01 -4.1907948255538940e-01
336 |
337 | <_>
338 | 7
339 | -4.2552766203880310e-01
340 |
341 | <_>
342 |
343 | 0 -1 119 9.2608337581623346e-05
344 |
345 | 5.4645353555679321e-01 -7.3869347572326660e-01
346 | <_>
347 |
348 | 0 -1 110 1.0352894896641374e-03
349 |
350 | -8.2881146669387817e-01 3.2366982102394104e-01
351 | <_>
352 |
353 | 0 -1 103 -1.0691542411223054e-04
354 |
355 | 2.0922286808490753e-01 -9.2345798015594482e-01
356 | <_>
357 |
358 | 0 -1 54 -5.0032680155709386e-04
359 |
360 | -8.8800156116485596e-01 2.2651962935924530e-01
361 | <_>
362 |
363 | 0 -1 115 1.6262420103885233e-04
364 |
365 | 1.4117328822612762e-01 -9.3459022045135498e-01
366 | <_>
367 |
368 | 0 -1 61 -2.5965880922740325e-05
369 |
370 | 2.1897254884243011e-01 -7.9763734340667725e-01
371 | <_>
372 |
373 | 0 -1 77 6.8813023972325027e-06
374 |
375 | -6.1619734764099121e-01 3.2628849148750305e-01
376 |
377 | <_>
378 | 9
379 | -1.1347165107727051e+00
380 |
381 | <_>
382 |
383 | 0 -1 5 -2.3673165123909712e-03
384 |
385 | 5.5375254154205322e-01 -6.8224298954010010e-01
386 | <_>
387 |
388 | 0 -1 29 -2.2772494412492961e-04
389 |
390 | -8.4745872020721436e-01 2.9582437872886658e-01
391 | <_>
392 |
393 | 0 -1 98 -4.9833563389256597e-04
394 |
395 | 2.7509352564811707e-01 -7.9623371362686157e-01
396 | <_>
397 |
398 | 0 -1 0 -2.4950597435235977e-03
399 |
400 | 3.8401266932487488e-01 -6.0704624652862549e-01
401 | <_>
402 |
403 | 0 -1 22 -1.3221341650933027e-03
404 |
405 | 4.1101419925689697e-01 -5.1578378677368164e-01
406 | <_>
407 |
408 | 0 -1 28 3.5101475077681243e-05
409 |
410 | -7.7102130651473999e-01 2.6260149478912354e-01
411 | <_>
412 |
413 | 0 -1 32 -1.3225190341472626e-03
414 |
415 | 5.4373836517333984e-01 -4.5510113239288330e-01
416 | <_>
417 |
418 | 0 -1 84 -1.7676851712167263e-04
419 |
420 | -9.6311193704605103e-01 2.2451019287109375e-01
421 | <_>
422 |
423 | 0 -1 71 -8.4284460172057152e-05
424 |
425 | 1.7425289750099182e-01 -9.4447797536849976e-01
426 |
427 | <_>
428 | 7
429 | -9.9081629514694214e-01
430 |
431 | <_>
432 |
433 | 0 -1 21 -3.0416352674365044e-03
434 |
435 | -8.3892619609832764e-01 4.9952426552772522e-01
436 | <_>
437 |
438 | 0 -1 3 -3.2510135788470507e-03
439 |
440 | 5.0745302438735962e-01 -4.0581086277961731e-01
441 | <_>
442 |
443 | 0 -1 49 -2.7112653478980064e-02
444 |
445 | 4.7979277372360229e-01 -4.1902795433998108e-01
446 | <_>
447 |
448 | 0 -1 99 -3.3669878030195832e-04
449 |
450 | 2.6228913664817810e-01 -8.6885607242584229e-01
451 | <_>
452 |
453 | 0 -1 85 -6.2557175988331437e-04
454 |
455 | 2.9307699203491211e-01 -6.6085666418075562e-01
456 | <_>
457 |
458 | 0 -1 129 5.7841241359710693e-03
459 |
460 | -6.2330943346023560e-01 3.6265468597412109e-01
461 | <_>
462 |
463 | 0 -1 90 -1.4100826228968799e-04
464 |
465 | 2.9955452680587769e-01 -6.2958890199661255e-01
466 |
467 | <_>
468 | 9
469 | -9.4170409440994263e-01
470 |
471 | <_>
472 |
473 | 0 -1 26 -1.0752207890618593e-04
474 |
475 | -9.1240876913070679e-01 4.9388521909713745e-01
476 | <_>
477 |
478 | 0 -1 17 5.4902193369343877e-05
479 |
480 | 2.3044449090957642e-01 -8.0160576105117798e-01
481 | <_>
482 |
483 | 0 -1 11 2.9557880479842424e-03
484 |
485 | 1.8769079446792603e-01 -8.3098721504211426e-01
486 | <_>
487 |
488 | 0 -1 42 7.4737065006047487e-05
489 |
490 | -8.2648044824600220e-01 2.1240779757499695e-01
491 | <_>
492 |
493 | 0 -1 31 9.2128422111272812e-03
494 |
495 | 3.3417859673500061e-01 -6.1713069677352905e-01
496 | <_>
497 |
498 | 0 -1 80 -5.6273087859153748e-02
499 |
500 | 5.8601039648056030e-01 -4.3135163187980652e-01
501 | <_>
502 |
503 | 0 -1 46 -4.5482371933758259e-04
504 |
505 | -6.7318749427795410e-01 3.0672127008438110e-01
506 | <_>
507 |
508 | 0 -1 6 5.2398513071238995e-04
509 |
510 | -3.4565076231956482e-01 6.4438700675964355e-01
511 | <_>
512 |
513 | 0 -1 107 -1.2128071393817663e-03
514 |
515 | 4.5617285370826721e-01 -4.6154832839965820e-01
516 |
517 | <_>
518 | 9
519 | -1.1487864255905151e+00
520 |
521 | <_>
522 |
523 | 0 -1 112 1.6462607891298831e-04
524 |
525 | 5.0566035509109497e-01 -9.7142857313156128e-01
526 | <_>
527 |
528 | 0 -1 35 6.6264437919016927e-05
529 |
530 | -7.6834422349929810e-01 2.9602336883544922e-01
531 | <_>
532 |
533 | 0 -1 121 -2.4305414990521967e-04
534 |
535 | -9.6120482683181763e-01 1.8722063302993774e-01
536 | <_>
537 |
538 | 0 -1 120 3.8327336369547993e-05
539 |
540 | 2.6676881313323975e-01 -7.1165627241134644e-01
541 | <_>
542 |
543 | 0 -1 34 -9.7052147611975670e-04
544 |
545 | -8.1342124938964844e-01 1.7853492498397827e-01
546 | <_>
547 |
548 | 0 -1 127 -6.1740156525047496e-06
549 |
550 | 1.8331494927406311e-01 -8.3237200975418091e-01
551 | <_>
552 |
553 | 0 -1 93 -1.3953330380900297e-05
554 |
555 | 3.1803333759307861e-01 -5.3453892469406128e-01
556 | <_>
557 |
558 | 0 -1 8 -1.5846057794988155e-03
559 |
560 | 2.7946412563323975e-01 -6.6444915533065796e-01
561 | <_>
562 |
563 | 0 -1 88 -1.6342351213097572e-02
564 |
565 | 5.1273310184478760e-01 -3.8046845793724060e-01
566 |
567 | <_>
568 | 8
569 | -1.2016570568084717e+00
570 |
571 | <_>
572 |
573 | 0 -1 12 -1.0847686789929867e-03
574 |
575 | 5.0191569328308105e-01 -7.9487180709838867e-01
576 | <_>
577 |
578 | 0 -1 102 -5.1427626749500632e-04
579 |
580 | 2.4128369987010956e-01 -8.1255680322647095e-01
581 | <_>
582 |
583 | 0 -1 23 -1.8383808434009552e-02
584 |
585 | 6.4674752950668335e-01 -2.9313954710960388e-01
586 | <_>
587 |
588 | 0 -1 63 -2.5105761596933007e-04
589 |
590 | -9.2148679494857788e-01 2.2645594179630280e-01
591 | <_>
592 |
593 | 0 -1 67 -2.1741576492786407e-02
594 |
595 | 5.2215409278869629e-01 -4.1079953312873840e-01
596 | <_>
597 |
598 | 0 -1 124 -3.4985631704330444e-02
599 |
600 | 3.2559829950332642e-01 -5.9795075654983521e-01
601 | <_>
602 |
603 | 0 -1 79 1.4734102878719568e-03
604 |
605 | -3.5487082600593567e-01 6.0526216030120850e-01
606 | <_>
607 |
608 | 0 -1 45 2.0769808441400528e-02
609 |
610 | -5.1455181837081909e-01 6.2268215417861938e-01
611 |
612 | <_>
613 | 10
614 | -7.2308760881423950e-01
615 |
616 | <_>
617 |
618 | 0 -1 128 9.5574898296035826e-05
619 |
620 | 4.8044693470001221e-01 -9.2063492536544800e-01
621 | <_>
622 |
623 | 0 -1 53 2.8381973970681429e-04
624 |
625 | -8.5529482364654541e-01 2.6645165681838989e-01
626 | <_>
627 |
628 | 0 -1 48 7.6030823402106762e-03
629 |
630 | 3.0882462859153748e-01 -5.7422232627868652e-01
631 | <_>
632 |
633 | 0 -1 78 1.6646791249513626e-02
634 |
635 | -3.3542081713676453e-01 5.9531646966934204e-01
636 | <_>
637 |
638 | 0 -1 19 -1.2090168893337250e-03
639 |
640 | -5.4142624139785767e-01 4.1354033350944519e-01
641 | <_>
642 |
643 | 0 -1 95 -1.6771969967521727e-04
644 |
645 | 2.9962882399559021e-01 -7.2322082519531250e-01
646 | <_>
647 |
648 | 0 -1 126 2.8689959435723722e-05
649 |
650 | 2.6773402094841003e-01 -6.5903985500335693e-01
651 | <_>
652 |
653 | 0 -1 20 6.6815223544836044e-03
654 |
655 | 2.3754282295703888e-01 -7.3575747013092041e-01
656 | <_>
657 |
658 | 0 -1 97 -3.4236656501889229e-03
659 |
660 | 3.6159345507621765e-01 -7.5826853513717651e-01
661 | <_>
662 |
663 | 0 -1 33 1.2732727918773890e-03
664 |
665 | -6.0256642103195190e-01 2.6525798439979553e-01
666 |
667 | <_>
668 | 9
669 | -9.5814341306686401e-01
670 |
671 | <_>
672 |
673 | 0 -1 37 -2.1262167138047516e-04
674 |
675 | -6.6829270124435425e-01 5.3969848155975342e-01
676 | <_>
677 |
678 | 0 -1 108 -1.6208652596105821e-05
679 |
680 | 2.2586794197559357e-01 -8.7113767862319946e-01
681 | <_>
682 |
683 | 0 -1 36 1.1260545579716563e-04
684 |
685 | 2.0030571520328522e-01 -8.3372658491134644e-01
686 | <_>
687 |
688 | 0 -1 134 5.7705468498170376e-04
689 |
690 | -7.9086685180664062e-01 2.3868314921855927e-01
691 | <_>
692 |
693 | 0 -1 56 -1.0837236186489463e-03
694 |
695 | 3.2010525465011597e-01 -5.3199630975723267e-01
696 | <_>
697 |
698 | 0 -1 111 -1.8599703616928309e-04
699 |
700 | 3.0578160285949707e-01 -5.5640172958374023e-01
701 | <_>
702 |
703 | 0 -1 64 -1.5118342707864940e-04
704 |
705 | -8.2822650671005249e-01 1.9903132319450378e-01
706 | <_>
707 |
708 | 0 -1 83 -4.1448799893260002e-03
709 |
710 | -4.3385601043701172e-01 4.1304954886436462e-01
711 | <_>
712 |
713 | 0 -1 69 8.5696432506665587e-05
714 |
715 | -8.7357699871063232e-01 1.9455631077289581e-01
716 |
717 | <_>
718 | 7
719 | -4.0030923485755920e-01
720 |
721 | <_>
722 |
723 | 0 -1 38 1.7059991369023919e-03
724 |
725 | -8.5915493965148926e-01 4.9338373541831970e-01
726 | <_>
727 |
728 | 0 -1 10 -1.6846917569637299e-03
729 |
730 | 4.4672420620918274e-01 -5.3707736730575562e-01
731 | <_>
732 |
733 | 0 -1 40 -5.8449420612305403e-04
734 |
735 | -7.7790278196334839e-01 2.7698427438735962e-01
736 | <_>
737 |
738 | 0 -1 73 -2.4571600370109081e-03
739 |
740 | 2.7406308054924011e-01 -6.4117157459259033e-01
741 | <_>
742 |
743 | 0 -1 43 -5.5974610149860382e-02
744 |
745 | 5.5524331331253052e-01 -3.8878279924392700e-01
746 | <_>
747 |
748 | 0 -1 76 -1.9325233995914459e-02
749 |
750 | 6.0979032516479492e-01 -4.1731300950050354e-01
751 | <_>
752 |
753 | 0 -1 130 -1.2540252646431327e-04
754 |
755 | -9.4642448425292969e-01 2.3837846517562866e-01
756 |
757 | <_>
758 | 11
759 | -1.1879638433456421e+00
760 |
761 | <_>
762 |
763 | 0 -1 104 -2.4424141156487167e-04
764 |
765 | 5.1319646835327148e-01 -7.0621466636657715e-01
766 | <_>
767 |
768 | 0 -1 16 1.4977643149904907e-04
769 |
770 | 2.3306415975093842e-01 -8.6206024885177612e-01
771 | <_>
772 |
773 | 0 -1 2 -4.3414589017629623e-03
774 |
775 | 2.2676137089729309e-01 -7.0749777555465698e-01
776 | <_>
777 |
778 | 0 -1 100 -3.9709286647848785e-04
779 |
780 | 5.0981646776199341e-01 -3.8439393043518066e-01
781 | <_>
782 |
783 | 0 -1 66 -3.4664515405893326e-02
784 |
785 | 8.3707231283187866e-01 -2.4584873020648956e-01
786 | <_>
787 |
788 | 0 -1 18 -7.4850942473858595e-04
789 |
790 | -6.3968110084533691e-01 3.3969947695732117e-01
791 | <_>
792 |
793 | 0 -1 101 9.9156214855611324e-04
794 |
795 | -3.3157727122306824e-01 6.2442117929458618e-01
796 | <_>
797 |
798 | 0 -1 72 9.4228598754853010e-06
799 |
800 | -8.9700716733932495e-01 2.3467294871807098e-01
801 | <_>
802 |
803 | 0 -1 87 -1.0972914751619101e-03
804 |
805 | 4.7398251295089722e-01 -3.7037673592567444e-01
806 | <_>
807 |
808 | 0 -1 118 3.9175640267785639e-05
809 |
810 | 2.4726134538650513e-01 -7.2340226173400879e-01
811 | <_>
812 |
813 | 0 -1 86 -8.2670961273834109e-04
814 |
815 | 2.7788189053535461e-01 -6.1641848087310791e-01
816 |
817 | <_>
818 |
819 | <_>
820 | 0 16 19 8 -1.
821 | <_>
822 | 0 18 19 4 2.
823 | 0
824 | <_>
825 |
826 | <_>
827 | 0 17 12 10 -1.
828 | <_>
829 | 3 17 6 10 2.
830 | 0
831 | <_>
832 |
833 | <_>
834 | 0 19 6 9 -1.
835 | <_>
836 | 3 19 3 9 2.
837 | 0
838 | <_>
839 |
840 | <_>
841 | 0 20 6 4 -1.
842 | <_>
843 | 2 20 2 4 3.
844 | 0
845 | <_>
846 |
847 | <_>
848 | 0 20 4 6 -1.
849 | <_>
850 | 2 20 2 6 2.
851 | 0
852 | <_>
853 |
854 | <_>
855 | 0 20 8 7 -1.
856 | <_>
857 | 4 20 4 7 2.
858 | 0
859 | <_>
860 |
861 | <_>
862 | 0 22 2 4 -1.
863 | <_>
864 | 0 22 1 2 2.
865 | <_>
866 | 1 24 1 2 2.
867 | 0
868 | <_>
869 |
870 | <_>
871 | 1 13 12 21 -1.
872 | <_>
873 | 5 20 4 7 9.
874 | 0
875 | <_>
876 |
877 | <_>
878 | 1 18 6 8 -1.
879 | <_>
880 | 3 18 2 8 3.
881 | 0
882 | <_>
883 |
884 | <_>
885 | 1 18 6 9 -1.
886 | <_>
887 | 3 18 2 9 3.
888 | 0
889 | <_>
890 |
891 | <_>
892 | 2 17 15 6 -1.
893 | <_>
894 | 2 19 15 2 3.
895 | 0
896 | <_>
897 |
898 | <_>
899 | 2 23 22 2 -1.
900 | <_>
901 | 13 23 11 2 2.
902 | 0
903 | <_>
904 |
905 | <_>
906 | 3 17 9 6 -1.
907 | <_>
908 | 6 19 3 2 9.
909 | 0
910 | <_>
911 |
912 | <_>
913 | 3 24 20 1 -1.
914 | <_>
915 | 13 24 10 1 2.
916 | 0
917 | <_>
918 |
919 | <_>
920 | 4 15 13 9 -1.
921 | <_>
922 | 4 18 13 3 3.
923 | 0
924 | <_>
925 |
926 | <_>
927 | 4 18 3 9 -1.
928 | <_>
929 | 5 18 1 9 3.
930 | 0
931 | <_>
932 |
933 | <_>
934 | 4 18 2 4 -1.
935 | <_>
936 | 4 20 2 2 2.
937 | 0
938 | <_>
939 |
940 | <_>
941 | 4 20 2 2 -1.
942 | <_>
943 | 4 21 2 1 2.
944 | 0
945 | <_>
946 |
947 | <_>
948 | 4 20 6 1 -1.
949 | <_>
950 | 6 22 2 1 3.
951 | 1
952 | <_>
953 |
954 | <_>
955 | 4 20 8 1 -1.
956 | <_>
957 | 6 22 4 1 2.
958 | 1
959 | <_>
960 |
961 | <_>
962 | 4 23 15 2 -1.
963 | <_>
964 | 9 23 5 2 3.
965 | 0
966 | <_>
967 |
968 | <_>
969 | 4 25 18 1 -1.
970 | <_>
971 | 13 25 9 1 2.
972 | 0
973 | <_>
974 |
975 | <_>
976 | 4 28 13 4 -1.
977 | <_>
978 | 3 29 13 2 2.
979 | 1
980 | <_>
981 |
982 | <_>
983 | 4 36 16 9 -1.
984 | <_>
985 | 4 39 16 3 3.
986 | 0
987 | <_>
988 |
989 | <_>
990 | 5 1 16 10 -1.
991 | <_>
992 | 5 6 16 5 2.
993 | 0
994 | <_>
995 |
996 | <_>
997 | 5 19 3 6 -1.
998 | <_>
999 | 6 21 1 2 9.
1000 | 0
1001 | <_>
1002 |
1003 | <_>
1004 | 5 20 1 4 -1.
1005 | <_>
1006 | 5 21 1 2 2.
1007 | 0
1008 | <_>
1009 |
1010 | <_>
1011 | 5 20 2 4 -1.
1012 | <_>
1013 | 5 21 2 2 2.
1014 | 0
1015 | <_>
1016 |
1017 | <_>
1018 | 5 21 4 4 -1.
1019 | <_>
1020 | 7 21 2 4 2.
1021 | 0
1022 | <_>
1023 |
1024 | <_>
1025 | 5 22 3 1 -1.
1026 | <_>
1027 | 6 23 1 1 3.
1028 | 1
1029 | <_>
1030 |
1031 | <_>
1032 | 5 23 2 1 -1.
1033 | <_>
1034 | 6 23 1 1 2.
1035 | 0
1036 | <_>
1037 |
1038 | <_>
1039 | 5 23 15 2 -1.
1040 | <_>
1041 | 10 23 5 2 3.
1042 | 0
1043 | <_>
1044 |
1045 | <_>
1046 | 5 36 16 1 -1.
1047 | <_>
1048 | 9 36 8 1 2.
1049 | 0
1050 | <_>
1051 |
1052 | <_>
1053 | 6 7 4 16 -1.
1054 | <_>
1055 | 6 7 2 8 2.
1056 | <_>
1057 | 8 15 2 8 2.
1058 | 0
1059 | <_>
1060 |
1061 | <_>
1062 | 6 17 1 9 -1.
1063 | <_>
1064 | 6 20 1 3 3.
1065 | 0
1066 | <_>
1067 |
1068 | <_>
1069 | 6 17 2 12 -1.
1070 | <_>
1071 | 7 17 1 12 2.
1072 | 0
1073 | <_>
1074 |
1075 | <_>
1076 | 6 21 3 1 -1.
1077 | <_>
1078 | 7 22 1 1 3.
1079 | 1
1080 | <_>
1081 |
1082 | <_>
1083 | 6 22 3 1 -1.
1084 | <_>
1085 | 7 23 1 1 3.
1086 | 1
1087 | <_>
1088 |
1089 | <_>
1090 | 6 22 12 3 -1.
1091 | <_>
1092 | 10 22 4 3 3.
1093 | 0
1094 | <_>
1095 |
1096 | <_>
1097 | 6 23 2 2 -1.
1098 | <_>
1099 | 6 23 1 1 2.
1100 | <_>
1101 | 7 24 1 1 2.
1102 | 0
1103 | <_>
1104 |
1105 | <_>
1106 | 6 23 6 1 -1.
1107 | <_>
1108 | 8 23 2 1 3.
1109 | 0
1110 | <_>
1111 |
1112 | <_>
1113 | 6 24 2 3 -1.
1114 | <_>
1115 | 7 24 1 3 2.
1116 | 0
1117 | <_>
1118 |
1119 | <_>
1120 | 6 24 10 1 -1.
1121 | <_>
1122 | 11 24 5 1 2.
1123 | 0
1124 | <_>
1125 |
1126 | <_>
1127 | 7 0 12 30 -1.
1128 | <_>
1129 | 7 10 12 10 3.
1130 | 0
1131 | <_>
1132 |
1133 | <_>
1134 | 7 1 11 16 -1.
1135 | <_>
1136 | 7 5 11 8 2.
1137 | 0
1138 | <_>
1139 |
1140 | <_>
1141 | 7 12 10 18 -1.
1142 | <_>
1143 | 7 21 10 9 2.
1144 | 0
1145 | <_>
1146 |
1147 | <_>
1148 | 7 16 3 3 -1.
1149 | <_>
1150 | 8 17 1 3 3.
1151 | 1
1152 | <_>
1153 |
1154 | <_>
1155 | 7 16 2 11 -1.
1156 | <_>
1157 | 8 16 1 11 2.
1158 | 0
1159 | <_>
1160 |
1161 | <_>
1162 | 7 16 12 15 -1.
1163 | <_>
1164 | 11 21 4 5 9.
1165 | 0
1166 | <_>
1167 |
1168 | <_>
1169 | 7 16 11 15 -1.
1170 | <_>
1171 | 7 21 11 5 3.
1172 | 0
1173 | <_>
1174 |
1175 | <_>
1176 | 7 20 2 3 -1.
1177 | <_>
1178 | 6 21 2 1 3.
1179 | 1
1180 | <_>
1181 |
1182 | <_>
1183 | 7 21 1 3 -1.
1184 | <_>
1185 | 6 22 1 1 3.
1186 | 1
1187 | <_>
1188 |
1189 | <_>
1190 | 7 21 2 3 -1.
1191 | <_>
1192 | 6 22 2 1 3.
1193 | 1
1194 | <_>
1195 |
1196 | <_>
1197 | 7 23 9 2 -1.
1198 | <_>
1199 | 10 23 3 2 3.
1200 | 0
1201 | <_>
1202 |
1203 | <_>
1204 | 7 24 6 1 -1.
1205 | <_>
1206 | 9 24 2 1 3.
1207 | 0
1208 | <_>
1209 |
1210 | <_>
1211 | 7 29 10 16 -1.
1212 | <_>
1213 | 7 33 10 8 2.
1214 | 0
1215 | <_>
1216 |
1217 | <_>
1218 | 7 35 12 9 -1.
1219 | <_>
1220 | 11 35 4 9 3.
1221 | 0
1222 | <_>
1223 |
1224 | <_>
1225 | 8 0 10 10 -1.
1226 | <_>
1227 | 8 5 10 5 2.
1228 | 0
1229 | <_>
1230 |
1231 | <_>
1232 | 8 12 9 21 -1.
1233 | <_>
1234 | 8 19 9 7 3.
1235 | 0
1236 | <_>
1237 |
1238 | <_>
1239 | 8 14 10 18 -1.
1240 | <_>
1241 | 8 20 10 6 3.
1242 | 0
1243 | <_>
1244 |
1245 | <_>
1246 | 8 19 2 8 -1.
1247 | <_>
1248 | 9 19 1 8 2.
1249 | 0
1250 | <_>
1251 |
1252 | <_>
1253 | 8 20 3 3 -1.
1254 | <_>
1255 | 7 21 3 1 3.
1256 | 1
1257 | <_>
1258 |
1259 | <_>
1260 | 8 21 6 1 -1.
1261 | <_>
1262 | 10 21 2 1 3.
1263 | 0
1264 | <_>
1265 |
1266 | <_>
1267 | 8 23 14 2 -1.
1268 | <_>
1269 | 8 23 7 1 2.
1270 | <_>
1271 | 15 24 7 1 2.
1272 | 0
1273 | <_>
1274 |
1275 | <_>
1276 | 8 29 2 2 -1.
1277 | <_>
1278 | 9 29 1 2 2.
1279 | 0
1280 | <_>
1281 |
1282 | <_>
1283 | 8 35 13 10 -1.
1284 | <_>
1285 | 8 40 13 5 2.
1286 | 0
1287 | <_>
1288 |
1289 | <_>
1290 | 9 14 8 20 -1.
1291 | <_>
1292 | 9 14 4 10 2.
1293 | <_>
1294 | 13 24 4 10 2.
1295 | 0
1296 | <_>
1297 |
1298 | <_>
1299 | 9 14 8 16 -1.
1300 | <_>
1301 | 9 18 8 8 2.
1302 | 0
1303 | <_>
1304 |
1305 | <_>
1306 | 9 20 1 4 -1.
1307 | <_>
1308 | 8 21 1 2 2.
1309 | 1
1310 | <_>
1311 |
1312 | <_>
1313 | 9 20 2 4 -1.
1314 | <_>
1315 | 10 20 1 4 2.
1316 | 0
1317 | <_>
1318 |
1319 | <_>
1320 | 9 20 2 7 -1.
1321 | <_>
1322 | 10 20 1 7 2.
1323 | 0
1324 | <_>
1325 |
1326 | <_>
1327 | 9 21 2 4 -1.
1328 | <_>
1329 | 8 22 2 2 2.
1330 | 1
1331 | <_>
1332 |
1333 | <_>
1334 | 9 21 6 4 -1.
1335 | <_>
1336 | 11 21 2 4 3.
1337 | 0
1338 | <_>
1339 |
1340 | <_>
1341 | 9 21 5 12 -1.
1342 | <_>
1343 | 9 25 5 4 3.
1344 | 0
1345 | <_>
1346 |
1347 | <_>
1348 | 9 21 15 21 -1.
1349 | <_>
1350 | 14 28 5 7 9.
1351 | 0
1352 | <_>
1353 |
1354 | <_>
1355 | 9 23 2 2 -1.
1356 | <_>
1357 | 9 24 2 1 2.
1358 | 0
1359 | <_>
1360 |
1361 | <_>
1362 | 10 12 15 9 -1.
1363 | <_>
1364 | 7 15 15 3 3.
1365 | 1
1366 | <_>
1367 |
1368 | <_>
1369 | 10 14 10 4 -1.
1370 | <_>
1371 | 10 15 10 2 2.
1372 | 0
1373 | <_>
1374 |
1375 | <_>
1376 | 10 15 6 24 -1.
1377 | <_>
1378 | 10 21 6 12 2.
1379 | 0
1380 | <_>
1381 |
1382 | <_>
1383 | 10 20 6 8 -1.
1384 | <_>
1385 | 10 20 3 8 2.
1386 | 1
1387 | <_>
1388 |
1389 | <_>
1390 | 10 21 9 24 -1.
1391 | <_>
1392 | 13 29 3 8 9.
1393 | 0
1394 | <_>
1395 |
1396 | <_>
1397 | 10 21 5 4 -1.
1398 | <_>
1399 | 9 22 5 2 2.
1400 | 1
1401 | <_>
1402 |
1403 | <_>
1404 | 11 21 10 4 -1.
1405 | <_>
1406 | 16 21 5 4 2.
1407 | 0
1408 | <_>
1409 |
1410 | <_>
1411 | 11 22 14 3 -1.
1412 | <_>
1413 | 18 22 7 3 2.
1414 | 0
1415 | <_>
1416 |
1417 | <_>
1418 | 11 23 3 2 -1.
1419 | <_>
1420 | 12 23 1 2 3.
1421 | 0
1422 | <_>
1423 |
1424 | <_>
1425 | 11 24 9 6 -1.
1426 | <_>
1427 | 14 26 3 2 9.
1428 | 0
1429 | <_>
1430 |
1431 | <_>
1432 | 11 26 12 7 -1.
1433 | <_>
1434 | 14 26 6 7 2.
1435 | 0
1436 | <_>
1437 |
1438 | <_>
1439 | 12 3 3 10 -1.
1440 | <_>
1441 | 13 4 1 10 3.
1442 | 1
1443 | <_>
1444 |
1445 | <_>
1446 | 12 8 9 15 -1.
1447 | <_>
1448 | 15 13 3 5 9.
1449 | 0
1450 | <_>
1451 |
1452 | <_>
1453 | 12 22 2 2 -1.
1454 | <_>
1455 | 12 22 2 1 2.
1456 | 1
1457 | <_>
1458 |
1459 | <_>
1460 | 13 16 2 6 -1.
1461 | <_>
1462 | 13 18 2 2 3.
1463 | 0
1464 | <_>
1465 |
1466 | <_>
1467 | 13 16 8 15 -1.
1468 | <_>
1469 | 17 16 4 15 2.
1470 | 0
1471 | <_>
1472 |
1473 | <_>
1474 | 13 19 12 12 -1.
1475 | <_>
1476 | 17 23 4 4 9.
1477 | 0
1478 | <_>
1479 |
1480 | <_>
1481 | 13 20 1 3 -1.
1482 | <_>
1483 | 13 21 1 1 3.
1484 | 0
1485 | <_>
1486 |
1487 | <_>
1488 | 13 22 8 1 -1.
1489 | <_>
1490 | 15 22 4 1 2.
1491 | 0
1492 | <_>
1493 |
1494 | <_>
1495 | 13 25 8 4 -1.
1496 | <_>
1497 | 15 25 4 4 2.
1498 | 0
1499 | <_>
1500 |
1501 | <_>
1502 | 14 18 6 4 -1.
1503 | <_>
1504 | 17 18 3 4 2.
1505 | 0
1506 | <_>
1507 |
1508 | <_>
1509 | 14 19 6 8 -1.
1510 | <_>
1511 | 17 19 3 8 2.
1512 | 0
1513 | <_>
1514 |
1515 | <_>
1516 | 14 23 6 6 -1.
1517 | <_>
1518 | 16 25 2 2 9.
1519 | 0
1520 | <_>
1521 |
1522 | <_>
1523 | 14 24 6 1 -1.
1524 | <_>
1525 | 17 24 3 1 2.
1526 | 0
1527 | <_>
1528 |
1529 | <_>
1530 | 15 7 4 6 -1.
1531 | <_>
1532 | 15 7 2 3 2.
1533 | <_>
1534 | 17 10 2 3 2.
1535 | 0
1536 | <_>
1537 |
1538 | <_>
1539 | 15 15 8 1 -1.
1540 | <_>
1541 | 19 15 4 1 2.
1542 | 0
1543 | <_>
1544 |
1545 | <_>
1546 | 15 18 6 6 -1.
1547 | <_>
1548 | 17 20 2 2 9.
1549 | 0
1550 | <_>
1551 |
1552 | <_>
1553 | 15 23 6 3 -1.
1554 | <_>
1555 | 17 24 2 1 9.
1556 | 0
1557 | <_>
1558 |
1559 | <_>
1560 | 15 23 6 3 -1.
1561 | <_>
1562 | 17 23 2 3 3.
1563 | 0
1564 | <_>
1565 |
1566 | <_>
1567 | 15 24 4 1 -1.
1568 | <_>
1569 | 17 24 2 1 2.
1570 | 0
1571 | <_>
1572 |
1573 | <_>
1574 | 16 22 6 1 -1.
1575 | <_>
1576 | 19 22 3 1 2.
1577 | 0
1578 | <_>
1579 |
1580 | <_>
1581 | 17 6 4 12 -1.
1582 | <_>
1583 | 17 6 2 6 2.
1584 | <_>
1585 | 19 12 2 6 2.
1586 | 0
1587 | <_>
1588 |
1589 | <_>
1590 | 17 16 2 10 -1.
1591 | <_>
1592 | 18 16 1 10 2.
1593 | 0
1594 | <_>
1595 |
1596 | <_>
1597 | 17 16 8 13 -1.
1598 | <_>
1599 | 19 16 4 13 2.
1600 | 0
1601 | <_>
1602 |
1603 | <_>
1604 | 17 19 8 8 -1.
1605 | <_>
1606 | 21 19 4 8 2.
1607 | 0
1608 | <_>
1609 |
1610 | <_>
1611 | 17 23 3 2 -1.
1612 | <_>
1613 | 18 24 1 2 3.
1614 | 1
1615 | <_>
1616 |
1617 | <_>
1618 | 17 26 2 3 -1.
1619 | <_>
1620 | 18 26 1 3 2.
1621 | 0
1622 | <_>
1623 |
1624 | <_>
1625 | 18 17 2 6 -1.
1626 | <_>
1627 | 19 17 1 6 2.
1628 | 0
1629 | <_>
1630 |
1631 | <_>
1632 | 18 20 2 6 -1.
1633 | <_>
1634 | 19 20 1 6 2.
1635 | 0
1636 | <_>
1637 |
1638 | <_>
1639 | 18 20 2 8 -1.
1640 | <_>
1641 | 19 20 1 8 2.
1642 | 0
1643 | <_>
1644 |
1645 | <_>
1646 | 18 21 2 4 -1.
1647 | <_>
1648 | 18 21 1 2 2.
1649 | <_>
1650 | 19 23 1 2 2.
1651 | 0
1652 | <_>
1653 |
1654 | <_>
1655 | 18 22 3 3 -1.
1656 | <_>
1657 | 19 23 1 1 9.
1658 | 0
1659 | <_>
1660 |
1661 | <_>
1662 | 19 18 2 9 -1.
1663 | <_>
1664 | 20 18 1 9 2.
1665 | 0
1666 | <_>
1667 |
1668 | <_>
1669 | 19 19 2 5 -1.
1670 | <_>
1671 | 20 19 1 5 2.
1672 | 0
1673 | <_>
1674 |
1675 | <_>
1676 | 19 20 2 6 -1.
1677 | <_>
1678 | 20 20 1 6 2.
1679 | 0
1680 | <_>
1681 |
1682 | <_>
1683 | 19 21 2 4 -1.
1684 | <_>
1685 | 20 21 1 4 2.
1686 | 0
1687 | <_>
1688 |
1689 | <_>
1690 | 19 23 3 3 -1.
1691 | <_>
1692 | 19 24 3 1 3.
1693 | 0
1694 | <_>
1695 |
1696 | <_>
1697 | 19 24 1 3 -1.
1698 | <_>
1699 | 18 25 1 1 3.
1700 | 1
1701 | <_>
1702 |
1703 | <_>
1704 | 20 1 3 39 -1.
1705 | <_>
1706 | 21 14 1 13 9.
1707 | 0
1708 | <_>
1709 |
1710 | <_>
1711 | 20 5 1 36 -1.
1712 | <_>
1713 | 20 17 1 12 3.
1714 | 0
1715 | <_>
1716 |
1717 | <_>
1718 | 20 20 2 1 -1.
1719 | <_>
1720 | 21 20 1 1 2.
1721 | 0
1722 | <_>
1723 |
1724 | <_>
1725 | 20 20 3 4 -1.
1726 | <_>
1727 | 21 21 1 4 3.
1728 | 1
1729 | <_>
1730 |
1731 | <_>
1732 | 20 23 1 4 -1.
1733 | <_>
1734 | 20 24 1 2 2.
1735 | 0
1736 | <_>
1737 |
1738 | <_>
1739 | 21 18 4 9 -1.
1740 | <_>
1741 | 23 18 2 9 2.
1742 | 0
1743 | <_>
1744 |
1745 | <_>
1746 | 21 19 1 2 -1.
1747 | <_>
1748 | 21 19 1 1 2.
1749 | 1
1750 | <_>
1751 |
1752 | <_>
1753 | 21 19 4 8 -1.
1754 | <_>
1755 | 23 19 2 8 2.
1756 | 0
1757 | <_>
1758 |
1759 | <_>
1760 | 21 20 4 5 -1.
1761 | <_>
1762 | 22 20 2 5 2.
1763 | 0
1764 | <_>
1765 |
1766 | <_>
1767 | 21 21 1 4 -1.
1768 | <_>
1769 | 20 22 1 2 2.
1770 | 1
1771 | <_>
1772 |
1773 | <_>
1774 | 22 18 2 10 -1.
1775 | <_>
1776 | 23 18 1 10 2.
1777 | 0
1778 | <_>
1779 |
1780 | <_>
1781 | 23 20 2 6 -1.
1782 | <_>
1783 | 24 20 1 6 2.
1784 | 0
1785 |
1786 |
--------------------------------------------------------------------------------
/computer/collect_training_data.py:
--------------------------------------------------------------------------------
1 | __author__ = 'robin'
2 |
3 | import numpy as np
4 | import cv2
5 | import serial
6 | import pygame
7 | from pygame.locals import *
8 | import socket
9 | import time
10 | import os
11 |
12 |
13 | class CollectTrainingData(object):
14 |
15 | def __init__(self):
16 |
17 | self.server_socket = socket.socket()
18 | self.server_socket.bind(('192.168.137.1', 8002))
19 | self.server_socket.listen(0)
20 |
21 | # accept a single connection
22 | self.connection = self.server_socket.accept()[0].makefile('rb')
23 |
24 | # connect to a seral port
25 | self.ser = serial.Serial('COM8', 115200, timeout=1)
26 | self.send_inst = True
27 |
28 | # create labels
29 | self.k = np.zeros((4, 4), 'float')
30 | for i in range(4):
31 | self.k[i, i] = 1
32 | self.temp_label = np.zeros((1, 4), 'float')
33 |
34 | pygame.init()
35 | screen = pygame.display.set_mode((400, 300))
36 | pygame.display.set_caption("Robin Autonomous Car")
37 | self.collect_image()
38 |
39 | def collect_image(self):
40 |
41 | saved_frame = 0
42 | total_frame = 0
43 |
44 | # collect images for training
45 | print 'Start collecting images...'
46 | e1 = cv2.getTickCount()
47 | image_array = np.zeros((1, 38400))
48 | label_array = np.zeros((1, 4), 'float')
49 |
50 | # stream video frames one by one
51 | try:
52 | stream_bytes = ' '
53 | frame = 1
54 | while self.send_inst:
55 | stream_bytes += self.connection.read(1024)
56 | first = stream_bytes.find('\xff\xd8')
57 | last = stream_bytes.find('\xff\xd9')
58 | if first != -1 and last != -1:
59 | jpg = stream_bytes[first:last + 2]
60 | stream_bytes = stream_bytes[last + 2:]
61 | image = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_GRAYSCALE)
62 |
63 | # select lower half of the image
64 | roi = image[120:240, :]
65 |
66 | # save streamed images
67 | cv2.imwrite('training_images/frame{:>05}.jpg'.format(frame), image)
68 |
69 | #cv2.imshow('roi_image', roi)
70 | cv2.imshow('image', image)
71 |
72 | # reshape the roi image into one row array
73 | temp_array = roi.reshape(1, 38400).astype(np.float32)
74 |
75 | frame += 1
76 | total_frame += 1
77 |
78 | # get input from human driver
79 | for event in pygame.event.get():
80 | if event.type == KEYDOWN:
81 | key_input = pygame.key.get_pressed()
82 |
83 | # complex orders
84 | if key_input[pygame.K_UP] and key_input[pygame.K_RIGHT]:
85 | print("Forward Right")
86 | image_array = np.vstack((image_array, temp_array))
87 | label_array = np.vstack((label_array, self.k[1]))
88 | saved_frame += 1
89 | self.ser.write(chr(6))
90 |
91 | elif key_input[pygame.K_UP] and key_input[pygame.K_LEFT]:
92 | print("Forward Left")
93 | image_array = np.vstack((image_array, temp_array))
94 | label_array = np.vstack((label_array, self.k[0]))
95 | saved_frame += 1
96 | self.ser.write(chr(7))
97 |
98 | elif key_input[pygame.K_DOWN] and key_input[pygame.K_RIGHT]:
99 | print("Reverse Right")
100 | self.ser.write(chr(8))
101 |
102 | elif key_input[pygame.K_DOWN] and key_input[pygame.K_LEFT]:
103 | print("Reverse Left")
104 | self.ser.write(chr(9))
105 |
106 | # simple orders
107 | elif key_input[pygame.K_UP]:
108 | print("Forward")
109 | saved_frame += 1
110 | image_array = np.vstack((image_array, temp_array))
111 | label_array = np.vstack((label_array, self.k[2]))
112 | self.ser.write(chr(1))
113 |
114 | elif key_input[pygame.K_DOWN]:
115 | print("Reverse")
116 | saved_frame += 1
117 | image_array = np.vstack((image_array, temp_array))
118 | label_array = np.vstack((label_array, self.k[3]))
119 | self.ser.write(chr(2))
120 |
121 | elif key_input[pygame.K_RIGHT]:
122 | print("Right")
123 | image_array = np.vstack((image_array, temp_array))
124 | label_array = np.vstack((label_array, self.k[1]))
125 | saved_frame += 1
126 | self.ser.write(chr(3))
127 |
128 | elif key_input[pygame.K_LEFT]:
129 | print("Left")
130 | image_array = np.vstack((image_array, temp_array))
131 | label_array = np.vstack((label_array, self.k[0]))
132 | saved_frame += 1
133 | self.ser.write(chr(4))
134 |
135 | elif key_input[pygame.K_x] or key_input[pygame.K_q]:
136 | print 'exit'
137 | self.send_inst = False
138 | self.ser.write(chr(0))
139 | break
140 |
141 | elif event.type == pygame.KEYUP:
142 | self.ser.write(chr(0))
143 |
144 | # save training images and labels
145 | train = image_array[1:, :]
146 | train_labels = label_array[1:, :]
147 |
148 | # save training data as a numpy file
149 | file_name = str(int(time.time()))
150 | directory = "training_data"
151 | if not os.path.exists(directory):
152 | os.makedirs(directory)
153 | try:
154 | np.savez(directory + '/' + file_name + '.npz', train=train, train_labels=train_labels)
155 | except IOError as e:
156 | print(e)
157 |
158 | e2 = cv2.getTickCount()
159 | # calculate streaming duration
160 | time0 = (e2 - e1) / cv2.getTickFrequency()
161 | print 'Streaming duration:', time0
162 |
163 | print(train.shape)
164 | print(train_labels.shape)
165 | print 'Total frame:', total_frame
166 | print 'Saved frame:', saved_frame
167 | print 'Dropped frame', total_frame - saved_frame
168 |
169 | finally:
170 | self.connection.close()
171 | self.server_socket.close()
172 |
173 | if __name__ == '__main__':
174 | CollectTrainingData()
175 |
--------------------------------------------------------------------------------
/computer/mlp_training.py:
--------------------------------------------------------------------------------
1 | __author__ = 'robin'
2 |
3 | import cv2
4 | import numpy as np
5 | import glob
6 | import sys
7 | from sklearn.model_selection import train_test_split
8 |
9 | print 'Loading training data...'
10 | e0 = cv2.getTickCount()
11 |
12 | # load training data
13 | image_array = np.zeros((1, 38400))
14 | label_array = np.zeros((1, 4), 'float')
15 | training_data = glob.glob('training_data/*.npz')
16 |
17 | # if no data, exit
18 | if not training_data:
19 | print "No training data in directory, exit"
20 | sys.exit()
21 |
22 | for single_npz in training_data:
23 | with np.load(single_npz) as data:
24 | train_temp = data['train']
25 | train_labels_temp = data['train_labels']
26 | image_array = np.vstack((image_array, train_temp))
27 | label_array = np.vstack((label_array, train_labels_temp))
28 |
29 | X = image_array[1:, :]
30 | y = label_array[1:, :]
31 | print 'Image array shape: ', X.shape
32 | print 'Label array shape: ', y.shape
33 |
34 | e00 = cv2.getTickCount()
35 | time0 = (e00 - e0)/ cv2.getTickFrequency()
36 | print 'Loading image duration:', time0
37 |
38 | # train test split, 7:3
39 | train, test, train_labels, test_labels = train_test_split(X, y, test_size=0.3)
40 |
41 | # set start time
42 | e1 = cv2.getTickCount()
43 |
44 | # create MLP
45 | layer_sizes = np.int32([38400, 32, 4])
46 | model = cv2.ANN_MLP()
47 | model.create(layer_sizes)
48 | criteria = (cv2.TERM_CRITERIA_COUNT | cv2.TERM_CRITERIA_EPS, 500, 0.0001)
49 | #criteria2 = (cv2.TERM_CRITERIA_COUNT, 100, 0.001)
50 | params = dict(term_crit = criteria,
51 | train_method = cv2.ANN_MLP_TRAIN_PARAMS_BACKPROP,
52 | bp_dw_scale = 0.001,
53 | bp_moment_scale = 0.0 )
54 |
55 | print 'Training MLP ...'
56 | num_iter = model.train(train, train_labels, None, params = params)
57 |
58 | # set end time
59 | e2 = cv2.getTickCount()
60 | time = (e2 - e1)/cv2.getTickFrequency()
61 | print 'Training duration:', time
62 | #print 'Ran for %d iterations' % num_iter
63 |
64 | # train data
65 | ret_0, resp_0 = model.predict(train)
66 | prediction_0 = resp_0.argmax(-1)
67 | true_labels_0 = train_labels.argmax(-1)
68 |
69 | train_rate = np.mean(prediction_0 == true_labels_0)
70 | print 'Train accuracy: ', "{0:.2f}%".format(train_rate * 100)
71 |
72 | # test data
73 | ret_1, resp_1 = model.predict(test)
74 | prediction_1 = resp_1.argmax(-1)
75 | true_labels_1 = test_labels.argmax(-1)
76 |
77 | test_rate = np.mean(prediction_1 == true_labels_1)
78 | print 'Test accuracy: ', "{0:.2f}%".format(test_rate * 100)
79 |
80 | # save model
81 | model.save('mlp_xml/mlp.xml')
82 |
--------------------------------------------------------------------------------
/computer/mlp_xml/readme.md:
--------------------------------------------------------------------------------
1 | Neural network xml file is saved here
2 |
--------------------------------------------------------------------------------
/computer/picam_calibration.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Reference:
4 | OpenCV-Python Tutorials - Camera Calibration and 3D Reconstruction
5 | http://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_calib3d/py_calibration/py_calibration.html
6 | """
7 |
8 | import cv2
9 | import numpy as np
10 | import glob
11 |
12 | # termination criteria
13 | criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
14 |
15 | # 6x9 chess board, prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
16 | object_point = np.zeros((6*9, 3), np.float32)
17 | object_point[:, :2] = np.mgrid[0:9, 0:6].T.reshape(-1, 2)
18 |
19 | # 3d point in real world space
20 | object_points = []
21 | # 2d points in image plane
22 | image_points = []
23 | h, w = 0, 0
24 |
25 | images = glob.glob('chess_board/*.jpg')
26 |
27 | for file_name in images:
28 | image = cv2.imread(file_name)
29 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
30 | h, w = gray.shape[:2]
31 |
32 | # find chess board corners
33 | ret, corners = cv2.findChessboardCorners(gray, (9, 6), None)
34 |
35 | # add object points, image points
36 | if ret:
37 | object_points.append(object_point)
38 | cv2.cornerSubPix(gray, corners, (11, 11), (-1, -1), criteria)
39 | image_points.append(corners)
40 |
41 | # draw and display the corners
42 | cv2.drawChessboardCorners(image, (9, 6), corners, ret)
43 | cv2.imshow('image', image)
44 | cv2.waitKey(500)
45 |
46 | # calibration
47 | retval, cameraMatrix, distCoeffs, rvecs, tvecs = cv2.calibrateCamera(object_points, image_points, (w, h), None, None)
48 |
49 | print "camera matrix:\n", cameraMatrix
50 |
51 | # pi camera intrinsic parameters
52 | ay = cameraMatrix[1, 1]
53 | u0 = cameraMatrix[0, 2]
54 | v0 = cameraMatrix[1, 2]
55 | print "Ay:", ay
56 | print "u0:", u0
57 | print "v0:", v0
58 |
59 | cv2.destroyAllWindows()
--------------------------------------------------------------------------------
/computer/rc_driver.py:
--------------------------------------------------------------------------------
1 | __author__ = 'robin'
2 |
3 | import threading
4 | import SocketServer
5 | import serial
6 | import cv2
7 | import numpy as np
8 | import math
9 |
10 | # distance data measured by ultrasonic sensor
11 | sensor_data = " "
12 |
13 |
14 | class NeuralNetwork(object):
15 |
16 | def __init__(self):
17 | self.model = cv2.ANN_MLP()
18 |
19 | def create(self):
20 | layer_size = np.int32([38400, 32, 4])
21 | self.model.create(layer_size)
22 | self.model.load('mlp_xml/mlp.xml')
23 |
24 | def predict(self, samples):
25 | ret, resp = self.model.predict(samples)
26 | return resp.argmax(-1)
27 |
28 |
29 | class RCControl(object):
30 |
31 | def __init__(self):
32 | self.serial_port = serial.Serial('COM8', 115200, timeout=1)
33 |
34 | def steer(self, prediction):
35 | if prediction == 2:
36 | self.serial_port.write(chr(1))
37 | print("Forward")
38 | elif prediction == 0:
39 | self.serial_port.write(chr(7))
40 | print("Left")
41 | elif prediction == 1:
42 | self.serial_port.write(chr(6))
43 | print("Right")
44 | else:
45 | self.stop()
46 |
47 | def stop(self):
48 | self.serial_port.write(chr(0))
49 |
50 |
51 | class DistanceToCamera(object):
52 |
53 | def __init__(self):
54 | # camera params
55 | self.alpha = 8.0 * math.pi / 180
56 | self.v0 = 119.865631204
57 | self.ay = 332.262498472
58 |
59 | def calculate(self, v, h, x_shift, image):
60 | # compute and return the distance from the target point to the camera
61 | d = h / math.tan(self.alpha + math.atan((v - self.v0) / self.ay))
62 | if d > 0:
63 | cv2.putText(image, "%.1fcm" % d,
64 | (image.shape[1] - x_shift, image.shape[0] - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255),
65 | 2)
66 | return d
67 |
68 |
69 | class ObjectDetection(object):
70 |
71 | def __init__(self):
72 | self.red_light = False
73 | self.green_light = False
74 | self.yellow_light = False
75 |
76 | def detect(self, cascade_classifier, gray_image, image):
77 |
78 | # y camera coordinate of the target point 'P'
79 | v = 0
80 |
81 | # minimum value to proceed traffic light state validation
82 | threshold = 150
83 |
84 | # detection
85 | cascade_obj = cascade_classifier.detectMultiScale(
86 | gray_image,
87 | scaleFactor=1.1,
88 | minNeighbors=5,
89 | minSize=(30, 30),
90 | flags=cv2.cv.CV_HAAR_SCALE_IMAGE
91 | )
92 |
93 | # draw a rectangle around the objects
94 | for (x_pos, y_pos, width, height) in cascade_obj:
95 | cv2.rectangle(image, (x_pos + 5, y_pos + 5), (x_pos + width - 5, y_pos + height - 5), (255, 255, 255), 2)
96 | v = y_pos + height - 5
97 | # print(x_pos+5, y_pos+5, x_pos+width-5, y_pos+height-5, width, height)
98 |
99 | # stop sign
100 | if width / height == 1:
101 | cv2.putText(image, 'STOP', (x_pos, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
102 |
103 | # traffic lights
104 | else:
105 | roi = gray_image[y_pos + 10:y_pos + height - 10, x_pos + 10:x_pos + width - 10]
106 | mask = cv2.GaussianBlur(roi, (25, 25), 0)
107 | (minVal, maxVal, minLoc, maxLoc) = cv2.minMaxLoc(mask)
108 |
109 | # check if light is on
110 | if maxVal - minVal > threshold:
111 | cv2.circle(roi, maxLoc, 5, (255, 0, 0), 2)
112 |
113 | # Red light
114 | if 1.0 / 8 * (height - 30) < maxLoc[1] < 4.0 / 8 * (height - 30):
115 | cv2.putText(image, 'Red', (x_pos + 5, y_pos - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
116 | self.red_light = True
117 |
118 | # Green light
119 | elif 5.5 / 8 * (height - 30) < maxLoc[1] < height - 30:
120 | cv2.putText(image, 'Green', (x_pos + 5, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0),
121 | 2)
122 | self.green_light = True
123 |
124 | # yellow light
125 | # elif 4.0/8*(height-30) < maxLoc[1] < 5.5/8*(height-30):
126 | # cv2.putText(image, 'Yellow', (x_pos+5, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 255), 2)
127 | # self.yellow_light = True
128 | return v
129 |
130 |
131 | class SensorDataHandler(SocketServer.BaseRequestHandler):
132 | data = " "
133 |
134 | def handle(self):
135 | global sensor_data
136 | try:
137 | while self.data:
138 | self.data = self.request.recv(1024)
139 | sensor_data = round(float(self.data), 1)
140 | # print "{} sent:".format(self.client_address[0])
141 | print sensor_data
142 | finally:
143 | print "Connection closed on thread 2"
144 |
145 |
146 | class VideoStreamHandler(SocketServer.StreamRequestHandler):
147 | # h1: stop sign
148 | h1 = 15.5 - 10 # cm
149 | # h2: traffic light
150 | h2 = 15.5 - 10
151 |
152 | # create neural network
153 | model = NeuralNetwork()
154 | model.create()
155 |
156 | obj_detection = ObjectDetection()
157 | rc_car = RCControl()
158 |
159 | # cascade classifiers
160 | stop_cascade = cv2.CascadeClassifier('cascade_xml/stop_sign.xml')
161 | light_cascade = cv2.CascadeClassifier('cascade_xml/traffic_light.xml')
162 |
163 | d_to_camera = DistanceToCamera()
164 | d_stop_sign = 25
165 | d_light = 25
166 |
167 | stop_start = 0 # start time when stop at the stop sign
168 | stop_finish = 0
169 | stop_time = 0
170 | drive_time_after_stop = 0
171 |
172 | def handle(self):
173 |
174 | global sensor_data
175 | stream_bytes = ' '
176 | stop_flag = False
177 | stop_sign_active = True
178 |
179 | # stream video frames one by one
180 | try:
181 | while True:
182 | stream_bytes += self.rfile.read(1024)
183 | first = stream_bytes.find('\xff\xd8')
184 | last = stream_bytes.find('\xff\xd9')
185 | if first != -1 and last != -1:
186 | jpg = stream_bytes[first:last + 2]
187 | stream_bytes = stream_bytes[last + 2:]
188 | gray = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_GRAYSCALE)
189 | image = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_UNCHANGED)
190 |
191 | # lower half of the image
192 | half_gray = gray[120:240, :]
193 |
194 | # object detection
195 | v_param1 = self.obj_detection.detect(self.stop_cascade, gray, image)
196 | v_param2 = self.obj_detection.detect(self.light_cascade, gray, image)
197 |
198 | # distance measurement
199 | if v_param1 > 0 or v_param2 > 0:
200 | d1 = self.d_to_camera.calculate(v_param1, self.h1, 300, image)
201 | d2 = self.d_to_camera.calculate(v_param2, self.h2, 100, image)
202 | self.d_stop_sign = d1
203 | self.d_light = d2
204 |
205 | cv2.imshow('image', image)
206 | cv2.imshow('mlp_image', half_gray)
207 |
208 | # reshape image
209 | image_array = half_gray.reshape(1, 38400).astype(np.float32)
210 |
211 | # neural network makes prediction
212 | prediction = self.model.predict(image_array)
213 |
214 | # stop conditions
215 | if sensor_data is not None and sensor_data < 30:
216 | print("Stop, obstacle in front")
217 | self.rc_car.stop()
218 |
219 | elif 0 < self.d_stop_sign < 25 and stop_sign_active:
220 | print("Stop sign ahead")
221 | self.rc_car.stop()
222 |
223 | # stop for 5 seconds
224 | if stop_flag is False:
225 | self.stop_start = cv2.getTickCount()
226 | stop_flag = True
227 | self.stop_finish = cv2.getTickCount()
228 |
229 | self.stop_time = (self.stop_finish - self.stop_start) / cv2.getTickFrequency()
230 | print "Stop time: %.2fs" % self.stop_time
231 |
232 | # 5 seconds later, continue driving
233 | if self.stop_time > 5:
234 | print("Waited for 5 seconds")
235 | stop_flag = False
236 | stop_sign_active = False
237 |
238 | elif 0 < self.d_light < 30:
239 | # print("Traffic light ahead")
240 | if self.obj_detection.red_light:
241 | print("Red light")
242 | self.rc_car.stop()
243 | elif self.obj_detection.green_light:
244 | print("Green light")
245 | pass
246 | elif self.obj_detection.yellow_light:
247 | print("Yellow light flashing")
248 | pass
249 |
250 | self.d_light = 30
251 | self.obj_detection.red_light = False
252 | self.obj_detection.green_light = False
253 | self.obj_detection.yellow_light = False
254 |
255 | else:
256 | self.rc_car.steer(prediction)
257 | self.stop_start = cv2.getTickCount()
258 | self.d_stop_sign = 25
259 |
260 | if stop_sign_active is False:
261 | self.drive_time_after_stop = (self.stop_start - self.stop_finish) / cv2.getTickFrequency()
262 | if self.drive_time_after_stop > 5:
263 | stop_sign_active = True
264 |
265 | if cv2.waitKey(1) & 0xFF == ord('q'):
266 | self.rc_car.stop()
267 | break
268 |
269 | cv2.destroyAllWindows()
270 |
271 | finally:
272 | print "Connection closed on thread 1"
273 |
274 |
275 | class ThreadServer(object):
276 |
277 | def server_thread(host, port):
278 | server = SocketServer.TCPServer((host, port), VideoStreamHandler)
279 | server.serve_forever()
280 |
281 | def server_thread2(host, port):
282 | server = SocketServer.TCPServer((host, port), SensorDataHandler)
283 | server.serve_forever()
284 |
285 | distance_thread = threading.Thread(target=server_thread2, args=('192.168.137.1', 8004))
286 | distance_thread.start()
287 | video_thread = threading.Thread(target=server_thread('192.168.137.1', 8002))
288 | video_thread.start()
289 |
290 |
291 | if __name__ == '__main__':
292 | ThreadServer()
293 |
--------------------------------------------------------------------------------
/computer/rc_driver_exp.py:
--------------------------------------------------------------------------------
1 | __author__ = 'zhengwang'
2 |
3 | import threading
4 | import SocketServer
5 | import serial
6 | import cv2
7 | import numpy as np
8 | import math
9 |
10 | # distance data measured by ultrasonic sensor
11 | sensor_data = " "
12 |
13 |
14 | class NeuralNetwork(object):
15 |
16 | def __init__(self):
17 | self.model = cv2.ANN_MLP()
18 |
19 | def create(self):
20 | layer_size = np.int32([38400, 32, 4])
21 | self.model.create(layer_size)
22 | self.model.load('mlp_xml/mlp.xml')
23 |
24 | def predict(self, samples):
25 | ret, resp = self.model.predict(samples)
26 | return resp.argmax(-1)
27 |
28 |
29 | class RCControl(object):
30 |
31 | def __init__(self):
32 | self.serial_port = serial.Serial('COM8', 115200, timeout=1)
33 |
34 | def steer(self, prediction):
35 | if prediction == 2:
36 | self.serial_port.write(chr(1))
37 | print("Forward")
38 | elif prediction == 0:
39 | self.serial_port.write(chr(7))
40 | print("Left")
41 | elif prediction == 1:
42 | self.serial_port.write(chr(6))
43 | print("Right")
44 | else:
45 | self.stop()
46 |
47 | def stop(self):
48 | self.serial_port.write(chr(0))
49 |
50 |
51 | class DistanceToCamera(object):
52 |
53 | def __init__(self):
54 | # camera params
55 | self.alpha = 8.0 * math.pi / 180
56 | self.v0 = 119.865631204
57 | self.ay = 332.262498472
58 |
59 | def calculate(self, v, h, x_shift, image):
60 | # compute and return the distance from the target point to the camera
61 | d = h / math.tan(self.alpha + math.atan((v - self.v0) / self.ay))
62 | if d > 0:
63 | cv2.putText(image, "%.1fcm" % d,
64 | (image.shape[1] - x_shift, image.shape[0] - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255),
65 | 2)
66 | return d
67 |
68 |
69 | class ObjectDetection(object):
70 |
71 | def __init__(self):
72 | self.red_light = False
73 | self.green_light = False
74 | self.yellow_light = False
75 |
76 | def detect(self, cascade_classifier, gray_image, image):
77 |
78 | # y camera coordinate of the target point 'P'
79 | v = 0
80 |
81 | # minimum value to proceed traffic light state validation
82 | threshold = 150
83 |
84 | # detection
85 | cascade_obj = cascade_classifier.detectMultiScale(
86 | gray_image,
87 | scaleFactor=1.1,
88 | minNeighbors=5,
89 | minSize=(30, 30),
90 | flags=cv2.cv.CV_HAAR_SCALE_IMAGE
91 | )
92 |
93 | # draw a rectangle around the objects
94 | for (x_pos, y_pos, width, height) in cascade_obj:
95 | cv2.rectangle(image, (x_pos + 5, y_pos + 5), (x_pos + width - 5, y_pos + height - 5), (255, 255, 255), 2)
96 | v = y_pos + height - 5
97 | # print(x_pos+5, y_pos+5, x_pos+width-5, y_pos+height-5, width, height)
98 |
99 | # stop sign
100 | if width / height == 1:
101 | cv2.putText(image, 'STOP', (x_pos, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
102 |
103 | # traffic lights
104 | else:
105 | roi = gray_image[y_pos + 10:y_pos + height - 10, x_pos + 10:x_pos + width - 10]
106 | mask = cv2.GaussianBlur(roi, (25, 25), 0)
107 | (minVal, maxVal, minLoc, maxLoc) = cv2.minMaxLoc(mask)
108 |
109 | # check if light is on
110 | if maxVal - minVal > threshold:
111 | cv2.circle(roi, maxLoc, 5, (255, 0, 0), 2)
112 |
113 | # Red light
114 | if 1.0 / 8 * (height - 30) < maxLoc[1] < 4.0 / 8 * (height - 30):
115 | cv2.putText(image, 'Red', (x_pos + 5, y_pos - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
116 | self.red_light = True
117 |
118 | # Green light
119 | elif 5.5 / 8 * (height - 30) < maxLoc[1] < height - 30:
120 | cv2.putText(image, 'Green', (x_pos + 5, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0),
121 | 2)
122 | self.green_light = True
123 |
124 | # yellow light
125 | # elif 4.0/8*(height-30) < maxLoc[1] < 5.5/8*(height-30):
126 | # cv2.putText(image, 'Yellow', (x_pos+5, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 255), 2)
127 | # self.yellow_light = True
128 | return v
129 |
130 |
131 | class SensorDataHandler(SocketServer.BaseRequestHandler):
132 | data = " "
133 |
134 | def handle(self):
135 | global sensor_data
136 | try:
137 | while self.data:
138 | self.data = self.request.recv(1024)
139 | sensor_data = round(float(self.data), 1)
140 | # print "{} sent:".format(self.client_address[0])
141 | print sensor_data
142 | finally:
143 | print "Connection closed on thread 2"
144 |
145 |
146 | class VideoStreamHandler(SocketServer.StreamRequestHandler):
147 | # h1: stop sign
148 | h1 = 15.5 - 10 # cm
149 | # h2: traffic light
150 | h2 = 15.5 - 10
151 |
152 | # create neural network
153 | model = NeuralNetwork()
154 | model.create()
155 |
156 | obj_detection = ObjectDetection()
157 | rc_car = RCControl()
158 |
159 | # cascade classifiers
160 | stop_cascade = cv2.CascadeClassifier('cascade_xml/stop_sign.xml')
161 | light_cascade = cv2.CascadeClassifier('cascade_xml/traffic_light.xml')
162 |
163 | d_to_camera = DistanceToCamera()
164 | d_stop_sign = 25
165 | d_light = 25
166 |
167 | stop_start = 0 # start time when stop at the stop sign
168 | stop_finish = 0
169 | stop_time = 0
170 | drive_time_after_stop = 0
171 |
172 | def handle(self):
173 |
174 | global sensor_data
175 | stream_bytes = ' '
176 | stop_flag = False
177 | stop_sign_active = True
178 |
179 | # stream video frames one by one
180 | try:
181 | while True:
182 | stream_bytes += self.rfile.read(1024)
183 | first = stream_bytes.find('\xff\xd8')
184 | last = stream_bytes.find('\xff\xd9')
185 | if first != -1 and last != -1:
186 | jpg = stream_bytes[first:last + 2]
187 | stream_bytes = stream_bytes[last + 2:]
188 | gray = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_GRAYSCALE)
189 | image = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_UNCHANGED)
190 |
191 | # lower half of the image
192 | half_gray = gray[120:240, :]
193 |
194 | # object detection
195 | v_param1 = self.obj_detection.detect(self.stop_cascade, gray, image)
196 | v_param2 = self.obj_detection.detect(self.light_cascade, gray, image)
197 |
198 | # distance measurement
199 | if v_param1 > 0 or v_param2 > 0:
200 | d1 = self.d_to_camera.calculate(v_param1, self.h1, 300, image)
201 | d2 = self.d_to_camera.calculate(v_param2, self.h2, 100, image)
202 | self.d_stop_sign = d1
203 | self.d_light = d2
204 |
205 | cv2.imshow('image', image)
206 | cv2.imshow('mlp_image', half_gray)
207 |
208 | # reshape image
209 | image_array = half_gray.reshape(1, 38400).astype(np.float32)
210 |
211 | # neural network makes prediction
212 | prediction = self.model.predict(image_array)
213 |
214 | # stop conditions
215 | if sensor_data is not None and sensor_data < 20:
216 | print("Stop, obstacle in front")
217 | self.rc_car.stop()
218 |
219 | elif 0 < self.d_stop_sign < 30 and stop_sign_active:
220 | print("Stop sign ahead")
221 | self.rc_car.stop()
222 |
223 | # stop for 5 seconds
224 | if stop_flag is False:
225 | self.stop_start = cv2.getTickCount()
226 | stop_flag = True
227 | self.stop_finish = cv2.getTickCount()
228 |
229 | self.stop_time = (self.stop_finish - self.stop_start) / cv2.getTickFrequency()
230 | print "Stop time: %.2fs" % self.stop_time
231 |
232 | # 5 seconds later, continue driving
233 | if self.stop_time > 5:
234 | print("Waited for 5 seconds")
235 | stop_flag = False
236 | stop_sign_active = False
237 |
238 | elif 0 < self.d_light < 30:
239 | # print("Traffic light ahead")
240 | if self.obj_detection.red_light:
241 | print("Red light")
242 | self.rc_car.stop()
243 | elif self.obj_detection.green_light:
244 | print("Green light")
245 | pass
246 | elif self.obj_detection.yellow_light:
247 | print("Yellow light flashing")
248 | pass
249 |
250 | self.d_light = 30
251 | self.obj_detection.red_light = False
252 | self.obj_detection.green_light = False
253 | self.obj_detection.yellow_light = False
254 |
255 | else:
256 | self.rc_car.steer(prediction)
257 | self.stop_start = cv2.getTickCount()
258 | self.d_stop_sign = 25
259 |
260 | if stop_sign_active is False:
261 | self.drive_time_after_stop = (self.stop_start - self.stop_finish) / cv2.getTickFrequency()
262 | if self.drive_time_after_stop > 5:
263 | stop_sign_active = True
264 |
265 | if cv2.waitKey(1) & 0xFF == ord('q'):
266 | self.rc_car.stop()
267 | break
268 |
269 | cv2.destroyAllWindows()
270 |
271 | finally:
272 | print "Connection closed on thread 1"
273 |
274 |
275 | class ThreadServer(object):
276 |
277 | def server_thread(host, port):
278 | server = SocketServer.TCPServer((host, port), VideoStreamHandler)
279 | server.serve_forever()
280 |
281 | def server_thread2(host, port):
282 | server = SocketServer.TCPServer((host, port), SensorDataHandler)
283 | server.serve_forever()
284 |
285 | distance_thread = threading.Thread(target=server_thread2, args=('192.168.137.1', 8004))
286 | distance_thread.start()
287 | video_thread = threading.Thread(target=server_thread('192.168.137.1', 8002))
288 | video_thread.start()
289 |
290 |
291 | if __name__ == '__main__':
292 | ThreadServer()
293 |
--------------------------------------------------------------------------------
/computer/rc_driver_overtake.py:
--------------------------------------------------------------------------------
1 | __author__ = 'robin'
2 |
3 | import threading
4 | import SocketServer
5 | import serial
6 | import cv2
7 | import numpy as np
8 | import math
9 |
10 | # distance data measured by ultrasonic sensor
11 | sensor_data = " "
12 |
13 |
14 | class NeuralNetwork(object):
15 |
16 | def __init__(self):
17 | self.model = cv2.ANN_MLP()
18 |
19 | def create(self):
20 | layer_size = np.int32([38400, 32, 4])
21 | self.model.create(layer_size)
22 | self.model.load('mlp_xml/mlp.xml')
23 |
24 | def predict(self, samples):
25 | ret, resp = self.model.predict(samples)
26 | return resp.argmax(-1)
27 |
28 |
29 | class RCControl(object):
30 |
31 | def __init__(self):
32 | self.serial_port = serial.Serial('COM8', 115200, timeout=1)
33 |
34 | def steer(self, prediction):
35 | if prediction == 2:
36 | self.serial_port.write(chr(1))
37 | print("Forward")
38 | elif prediction == 0:
39 | self.serial_port.write(chr(7))
40 | print("Left")
41 | elif prediction == 1:
42 | self.serial_port.write(chr(6))
43 | print("Right")
44 | else:
45 |
46 |
47 | self.serial_port.write(chr(0))
48 | #self.stop()
49 |
50 |
51 |
52 | def stop(self):
53 | self.serial_port.write(chr(0))
54 | def over(self):
55 | self.serial_port.write(chr(6))
56 |
57 |
58 | class DistanceToCamera(object):
59 |
60 | def __init__(self):
61 | # camera params
62 | self.alpha = 8.0 * math.pi / 180
63 | self.v0 = 119.865631204
64 | self.ay = 332.262498472
65 |
66 | def calculate(self, v, h, x_shift, image):
67 | # compute and return the distance from the target point to the camera
68 | d = h / math.tan(self.alpha + math.atan((v - self.v0) / self.ay))
69 | if d > 0:
70 | cv2.putText(image, "%.1fcm" % d,
71 | (image.shape[1] - x_shift, image.shape[0] - 20), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (255, 255, 255),
72 | 2)
73 | return d
74 |
75 |
76 | class ObjectDetection(object):
77 |
78 | def __init__(self):
79 | self.red_light = False
80 | self.green_light = False
81 | self.yellow_light = False
82 |
83 | def detect(self, cascade_classifier, gray_image, image):
84 |
85 | # y camera coordinate of the target point 'P'
86 | v = 0
87 |
88 | # minimum value to proceed traffic light state validation
89 | threshold = 150
90 |
91 | # detection
92 | cascade_obj = cascade_classifier.detectMultiScale(
93 | gray_image,
94 | scaleFactor=1.1,
95 | minNeighbors=5,
96 | minSize=(30, 30),
97 | flags=cv2.cv.CV_HAAR_SCALE_IMAGE
98 | )
99 |
100 | # draw a rectangle around the objects
101 | for (x_pos, y_pos, width, height) in cascade_obj:
102 | cv2.rectangle(image, (x_pos + 5, y_pos + 5), (x_pos + width - 5, y_pos + height - 5), (255, 255, 255), 2)
103 | v = y_pos + height - 5
104 | # print(x_pos+5, y_pos+5, x_pos+width-5, y_pos+height-5, width, height)
105 |
106 | # stop sign
107 | if width / height == 1:
108 | cv2.putText(image, 'STOP', (x_pos, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
109 |
110 | # traffic lights
111 | else:
112 | roi = gray_image[y_pos + 10:y_pos + height - 10, x_pos + 10:x_pos + width - 10]
113 | mask = cv2.GaussianBlur(roi, (25, 25), 0)
114 | (minVal, maxVal, minLoc, maxLoc) = cv2.minMaxLoc(mask)
115 |
116 | # check if light is on
117 | if maxVal - minVal > threshold:
118 | cv2.circle(roi, maxLoc, 5, (255, 0, 0), 2)
119 |
120 | # Red light
121 | if 1.0 / 8 * (height - 30) < maxLoc[1] < 4.0 / 8 * (height - 30):
122 | cv2.putText(image, 'Red', (x_pos + 5, y_pos - 5), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
123 | self.red_light = True
124 |
125 | # Green light
126 | elif 5.5 / 8 * (height - 30) < maxLoc[1] < height - 30:
127 | cv2.putText(image, 'Green', (x_pos + 5, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0),
128 | 2)
129 | self.green_light = True
130 |
131 | # yellow light
132 | # elif 4.0/8*(height-30) < maxLoc[1] < 5.5/8*(height-30):
133 | # cv2.putText(image, 'Yellow', (x_pos+5, y_pos - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 255), 2)
134 | # self.yellow_light = True
135 | return v
136 |
137 |
138 | class SensorDataHandler(SocketServer.BaseRequestHandler):
139 | data = " "
140 |
141 | def handle(self):
142 | global sensor_data
143 | try:
144 | while self.data:
145 | self.data = self.request.recv(1024)
146 | sensor_data = round(float(self.data), 1)
147 | # print "{} sent:".format(self.client_address[0])
148 | print sensor_data
149 | finally:
150 | print "Connection closed on thread 2"
151 |
152 |
153 | class VideoStreamHandler(SocketServer.StreamRequestHandler):
154 | # h1: stop sign
155 | h1 = 15.5 - 10 # cm
156 | # h2: traffic light
157 | h2 = 15.5 - 10
158 |
159 | # create neural network
160 | model = NeuralNetwork()
161 | model.create()
162 |
163 | obj_detection = ObjectDetection()
164 | rc_car = RCControl()
165 |
166 | # cascade classifiers
167 | stop_cascade = cv2.CascadeClassifier('cascade_xml/stop_sign.xml')
168 | light_cascade = cv2.CascadeClassifier('cascade_xml/traffic_light.xml')
169 |
170 | d_to_camera = DistanceToCamera()
171 | d_stop_sign = 25
172 | d_light = 25
173 |
174 | stop_start = 0 # start time when stop at the stop sign
175 | stop_finish = 0
176 | stop_time = 0
177 | drive_time_after_stop = 0
178 |
179 | def handle(self):
180 |
181 | global sensor_data
182 | stream_bytes = ' '
183 | stop_flag = False
184 | stop_sign_active = True
185 |
186 | # stream video frames one by one
187 | try:
188 | while True:
189 | stream_bytes += self.rfile.read(1024)
190 | first = stream_bytes.find('\xff\xd8')
191 | last = stream_bytes.find('\xff\xd9')
192 | if first != -1 and last != -1:
193 | jpg = stream_bytes[first:last + 2]
194 | stream_bytes = stream_bytes[last + 2:]
195 | gray = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_GRAYSCALE)
196 | image = cv2.imdecode(np.fromstring(jpg, dtype=np.uint8), cv2.CV_LOAD_IMAGE_UNCHANGED)
197 |
198 | # lower half of the image
199 | half_gray = gray[120:240, :]
200 |
201 | # object detection
202 | v_param1 = self.obj_detection.detect(self.stop_cascade, gray, image)
203 | v_param2 = self.obj_detection.detect(self.light_cascade, gray, image)
204 |
205 | # distance measurement
206 | if v_param1 > 0 or v_param2 > 0:
207 | d1 = self.d_to_camera.calculate(v_param1, self.h1, 300, image)
208 | d2 = self.d_to_camera.calculate(v_param2, self.h2, 100, image)
209 | self.d_stop_sign = d1
210 | self.d_light = d2
211 |
212 | cv2.imshow('image', image)
213 | cv2.imshow('mlp_image', half_gray)
214 |
215 | # reshape image
216 | image_array = half_gray.reshape(1, 38400).astype(np.float32)
217 |
218 | # neural network makes prediction
219 | prediction = self.model.predict(image_array)
220 |
221 | # stop conditions
222 | if sensor_data is not None and sensor_data < 50:
223 | print("Stop, obstacle in front")
224 | self.rc_car.over()
225 |
226 | elif 0 < self.d_stop_sign < 25 and stop_sign_active:
227 | print("Stop sign ahead")
228 | self.rc_car.stop()
229 |
230 | # stop for 5 seconds
231 | if stop_flag is False:
232 | self.stop_start = cv2.getTickCount()
233 | stop_flag = True
234 | self.stop_finish = cv2.getTickCount()
235 |
236 | self.stop_time = (self.stop_finish - self.stop_start) / cv2.getTickFrequency()
237 | print "Stop time: %.2fs" % self.stop_time
238 |
239 | # 5 seconds later, continue driving
240 | if self.stop_time > 5:
241 | print("Waited for 5 seconds")
242 | stop_flag = False
243 | stop_sign_active = False
244 |
245 | elif 0 < self.d_light < 30:
246 | # print("Traffic light ahead")
247 | if self.obj_detection.red_light:
248 | print("Red light")
249 | self.rc_car.stop()
250 | elif self.obj_detection.green_light:
251 | print("Green light")
252 | pass
253 | elif self.obj_detection.yellow_light:
254 | print("Yellow light flashing")
255 | pass
256 |
257 | self.d_light = 30
258 | self.obj_detection.red_light = False
259 | self.obj_detection.green_light = False
260 | self.obj_detection.yellow_light = False
261 |
262 | else:
263 | self.rc_car.steer(prediction)
264 | self.stop_start = cv2.getTickCount()
265 | self.d_stop_sign = 25
266 |
267 | if stop_sign_active is False:
268 | self.drive_time_after_stop = (self.stop_start - self.stop_finish) / cv2.getTickFrequency()
269 | if self.drive_time_after_stop > 5:
270 | stop_sign_active = True
271 |
272 | if cv2.waitKey(1) & 0xFF == ord('q'):
273 | self.rc_car.stop()
274 | break
275 |
276 | cv2.destroyAllWindows()
277 |
278 | finally:
279 | print "Connection closed on thread 1"
280 |
281 |
282 | class ThreadServer(object):
283 |
284 | def server_thread(host, port):
285 | server = SocketServer.TCPServer((host, port), VideoStreamHandler)
286 | server.serve_forever()
287 |
288 | def server_thread2(host, port):
289 | server = SocketServer.TCPServer((host, port), SensorDataHandler)
290 | server.serve_forever()
291 |
292 | distance_thread = threading.Thread(target=server_thread2, args=('192.168.137.1', 8004))
293 | distance_thread.start()
294 | video_thread = threading.Thread(target=server_thread('192.168.137.1', 8002))
295 | video_thread.start()
296 |
297 |
298 | if __name__ == '__main__':
299 | ThreadServer()
300 |
--------------------------------------------------------------------------------
/computer/training_images/frame00001.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00001.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00002.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00002.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00003.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00003.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00004.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00004.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00005.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00005.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00006.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00006.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00007.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00007.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00008.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00008.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00009.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00009.jpg
--------------------------------------------------------------------------------
/computer/training_images/frame00010.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/RobinRajSB/Self-Driving-Autonomous-Car-using-Open-CV-and-Python-Neural-Network-Overtaking-Raspberry-Pi/72ad3c897915502375b751f095836647aad6f08d/computer/training_images/frame00010.jpg
--------------------------------------------------------------------------------
/raspberryPi/README.md:
--------------------------------------------------------------------------------
1 | Client programs on Rapsberry Pi did not stray too far from these two great tutorials:
2 |
3 | [PiCamera Documentation - Advanced Recipes](https://picamera.readthedocs.org/en/release-1.10/recipes2.html)
4 |
5 | [Ultrasonic Distance Measurement Using Python – Part 2](http://www.raspberrypi-spy.co.uk/2013/01/ultrasonic-distance-measurement-using-python-part-2/)
6 |
--------------------------------------------------------------------------------
/raspberryPi/stream_client.py:
--------------------------------------------------------------------------------
1 | """
2 | Reference:
3 | PiCamera documentation
4 | https://picamera.readthedocs.org/en/release-1.10/recipes2.html
5 |
6 | """
7 |
8 | import io
9 | import socket
10 | import struct
11 | import time
12 | import picamera
13 |
14 |
15 | # create socket and bind host
16 | client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
17 | client_socket.connect(('192.168.1.100', 8000))
18 | connection = client_socket.makefile('wb')
19 |
20 | try:
21 | with picamera.PiCamera() as camera:
22 | camera.resolution = (320, 240) # pi camera resolution
23 | camera.framerate = 10 # 10 frames/sec
24 | time.sleep(2) # give 2 secs for camera to initilize
25 | start = time.time()
26 | stream = io.BytesIO()
27 |
28 | # send jpeg format video stream
29 | for foo in camera.capture_continuous(stream, 'jpeg', use_video_port = True):
30 | connection.write(struct.pack(' 600:
35 | break
36 | stream.seek(0)
37 | stream.truncate()
38 | connection.write(struct.pack(' 10:
28 | break
29 | finally:
30 | self.connection.close()
31 | self.server_socket.close()
32 |
33 | if __name__ == '__main__':
34 | SensorStreamingTest()
35 |
--------------------------------------------------------------------------------
/testing/face.py:
--------------------------------------------------------------------------------
1 | import cv2
2 |
3 |
4 |
5 | faceCascade = cv2.CascadeClassifier('C:\Users\Robin Raj SB\Downloads\Webcam-Face-Detect-master\Webcam-Face-Detect-master\haarcascade_frontalface_default.xml')
6 | CustomCascade = cv2.CascadeClassifier('C:\Users\Robin Raj SB\Desktop\Custom Detection\classifier\cascade.xml')
7 | video_capture = cv2.VideoCapture(0)
8 |
9 | while True:
10 | # Capture frame-by-frame
11 | ret, frame = video_capture.read()
12 |
13 | gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
14 |
15 | faces = faceCascade.detectMultiScale(
16 | gray,
17 | scaleFactor=1.1,
18 | minNeighbors=5,
19 | minSize=(30, 30),
20 | flags=cv2.cv.CV_HAAR_SCALE_IMAGE
21 | )
22 | for (x, y, w, h) in faces:
23 | cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
24 | font = cv2.FONT_HERSHEY_SIMPLEX
25 | cv2.putText(frame, 'Human Detected', (x, y), font, 1, (200, 255, 155))
26 |
27 |
28 |
29 | code = CustomCascade.detectMultiScale(
30 | gray,
31 | scaleFactor=1.1,
32 | minNeighbors=5,
33 | minSize=(30, 30),
34 | flags=cv2.cv.CV_HAAR_SCALE_IMAGE
35 | )
36 |
37 | # Draw a rectangle around the faces
38 | for (x, y, w, h) in code:
39 | cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 0, 0), 2)
40 | font = cv2.FONT_HERSHEY_SIMPLEX
41 | cv2.putText(frame, 'Custom Code Detected', (x, y), font, 1, (200, 255, 155))
42 |
43 |
44 |
45 |
46 | # Display the resulting frame
47 | cv2.imshow('Video', frame)
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 | if cv2.waitKey(1) & 0xFF == ord('q'):
61 | break
62 |
63 | # When everything is done, release the capture
64 | video_capture.release()
65 | cv2.destroyAllWindows()
--------------------------------------------------------------------------------