├── BlinkLEDs
├── README.md
├── Raspberry_Pi_With_50_Ohm_Resistor.png
├── Raspberry_Pi_With_tactile_push_button.png
├── Raspberry_Pi_with_3_50_Ohm_Resistors_And_3_LEDs.png
├── blink_3leds_with_button_v0.1.py
├── blink_v0.1.py
├── blink_v0.2.py
├── blink_v0.3.py
├── blink_v0.4.py
└── blink_v0.5.py
├── GitTutorial
└── README.md
├── Gopigo_Robot_Browser_Controlled
├── COPYING
├── LICENSE
├── README.md
├── Raspberry_Pi_Camera_controlled-by-mobile-browser.jpg
├── Raspberry_Pi_Camera_streaming-to-computer-browser.jpg
├── Raspberry_Pi_Camera_streaming-to-mobile-browser.jpg
├── browser_stream_setup.sh
├── camera_streamer.py
├── camera_streamer.pyc
├── index.html
├── robot_controller.py
├── robot_controller.pyc
├── robot_web_server.py
├── stop.txt
└── www
│ ├── config.html
│ ├── css
│ ├── config.css
│ ├── modal.css
│ └── style.css
│ ├── images
│ └── ajax-loader.gif
│ ├── index.html
│ ├── js
│ ├── jquery.joystick.js
│ ├── jquery.js
│ └── sockjs-0.3.min.js
│ └── logs.html
├── Gopigo_obstacle_avoidance_robot
├── README.md
├── basic_obstacle_avoidv0.001.py
├── basic_obstacle_avoidv0.002.py
├── basic_obstacle_avoidv0.003.py
├── basic_obstacle_avoidv0.004.py
├── basic_obstacle_avoidv0.005.py
└── basic_obstacle_avoidv0.006.py
├── Gopigo_robot_Pygame_GUI_conrolled
├── Basic_Raspberry_Pi_Robot_Control_GUI.jpg
├── README.md
├── basic_robot_gui_v.010.py
├── basic_robot_gui_v.011.py
├── basic_robot_gui_v.012.py
└── fswebcam.sh
├── Gopigo_robot_keyboard_conrolled
├── Basic_Raspberry_Pi_Robot_Control.jpg
├── README.md
├── basic_robot_v0.001.py
└── basic_robot_v0.002.py
├── HomeConfig
├── Static_WebCam_View
│ └── usbcam.php
├── default.sitemap
├── http_switches.items
├── index.html
├── index_181017.html
├── openhab2
│ ├── cmdline.txt
│ ├── config.txt
│ ├── default.sitemap
│ ├── etc_default_openhab2
│ ├── etc_openhab2_services_weather.cfg
│ ├── extra_switches.items
│ ├── http_switches.items
│ ├── org.eclipse.smarthome.core.items.Item.json
│ └── org.eclipse.smarthome.core.items.Item.json.090518
└── stations.csv
├── LICENSE
├── LightSensor
├── Light_Sensor_v0.11.py
└── README.md
├── OpenCV
├── CaptureFaces
│ ├── FaceDetect1
│ │ ├── README.md
│ │ ├── abba.png
│ │ ├── abba_face_detected.jpg
│ │ ├── face_detect.py
│ │ ├── haarcascade_frontalface_default.xml
│ │ ├── little_mix_right.jpg
│ │ ├── little_mix_wrong.jpg
│ │ ├── the_saturdays_right.jpg
│ │ └── the_saturdays_wrong.jpg
│ └── FaceDetect2
│ │ ├── CaptureFace_FromSingleFile_Or_SingleCameraPic.py
│ │ ├── CaptureFace_FromSingleFile_Or_SingleCameraPic_v0.11.py
│ │ ├── CaptureFace_FromSingleFile_Or_SingleCameraPic_v0.12.py
│ │ ├── poi_1.jpg
│ │ ├── poi_2.jpg
│ │ └── snapshot.jpg
├── CaptureSingleImage
│ └── CaptureSingleImage_v0.11
├── CaptureVideoStream
│ ├── CaptureVideoStream_v0.11
│ ├── CaptureVideoStream_v0.11.py
│ ├── CaptureVideoStream_v0.12
│ ├── CaptureVideoStream_v0.12.py
│ ├── CaptureVideoStream_v0.13.py
│ ├── CaptureVideoStream_v0.14.py
│ ├── CaptureVideoStream_v0.15.py
│ ├── CaptureVideoStream_v0.16.py
│ ├── CaptureVideoStream_v0.17.py
│ ├── CaptureVideoStream_v0.18.py
│ ├── CaptureVideoStream_v0.19.py
│ ├── CaptureVideoStream_v0.20.py
│ ├── CaptureVideoStream_v0.21.py
│ ├── CaptureVideoStream_v0.22.py
│ ├── CaptureVideoStream_v0.23.py
│ ├── CaptureVideoStream_v0.24.py
│ └── CaptureVideoStream_v0.25.py
├── CaptureVideoStream_AWSLambda
│ └── CaptureVideoStream_v0.23.py
├── CaptureVideoStream_MultipleSourceFaceNoLambda
│ └── CaptureVideoStream_v0.23.py
└── README.md
├── PIR
├── 70936__guitarguy1985__police.wav
├── PIR_sensor_v0.11.py
├── PIR_sensor_v0.12.py
├── README.md
└── Whoop.wav
├── README.md
├── ReadingAnalogSensors
├── README.md
├── read_analog_signal_v0.1.py
├── read_analog_signal_v0.2.py
└── read_analog_signal_v0.3.py
├── Sense_Temp_Humidity
├── README.md
├── sense_temp_humidity_v0.1.py
├── sense_temp_humidity_v0.2.py
├── sense_temp_humidity_v0.3.py
├── sense_temp_humidity_v0.4.py
├── sense_temp_humidity_v0.5.py
├── sense_temp_humidity_v0.6.py
├── sense_temp_humidity_v0.7.py
├── sense_temp_humidity_v0.8.py
├── sense_temp_humidity_v0.9.py
├── sense_temp_humidity_v0.91.py
├── sense_temp_humidity_v0.94.py
├── sense_temp_humidity_v0.95.py
└── sense_temperature_pressure_with_dht11_raspberry_pi.png
├── Sense_Temp_Humidity_Pull_Data_Arduino
├── README.md
├── sense_temp_humidity_v0.95.py
└── sense_temp_humidity_v0.96.py
├── Weather_Reporting
├── README.md
├── get_weather_v0.1.py
├── get_weather_v0.11.py
├── get_weather_v0.12.py
├── get_weather_v0.13.py
└── get_weather_v0.14.py
├── Write_To_LCD_Screen
├── LCDWriteToScreenStartScript
├── LCDWriteToScreenStartScript_v0_11
├── LCDWriteToScreenStartScript_v0_12
├── README.md
├── RPi_I2C_driver.py
├── RPi_I2C_driver.pyc
├── WriteToScreenv0_11.py
├── WriteToScreenv0_12.py
├── WriteToScreenv0_13.py
├── WriteToScreenv0_14.py
├── WriteToScreenv0_15.py
├── WriteToScreenv0_16.py
└── runme.sh
└── Write_Weather_To_Twitter
├── Write_To_Twitter_v0.11.py
├── Write_To_Twitter_v0.12.py
└── Write_To_Twitter_v0.13.py
/BlinkLEDs/README.md:
--------------------------------------------------------------------------------
1 | BlinkLEDs (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/BlinkLEDs)
2 | - This folder includes programs that uses the Raspberry Pi's GPIO pins to interact with multiple LED's
3 | - This folder also includes programs that use the Raspberry Pi's GPIO pins to interact with a simple push button
4 | - Components required for the programs in this folder include
5 | - 1 x Raspberry Pi 3 (I have used a 3, Model B. You can use whatever you have at your disposal.)
6 | - 3 x 50 Ohm Resistors
7 | - 3 x LED's
8 | - LED's connected to the following GPIO ports : led1 on GPIO17, led2 on GPIO27, led3 on GPIO22
9 | - 1 x Breadboard
10 | - 1 x Extension cable (Suggested) to extend the GPIO ports of the Raspberry Pi and bring them closed to the breadboard
11 | - 6 x Jumper cables - Female (connect to Rasberry Pi GPIO) to Male (Connect to the Breadboard)
12 | - 1 x Tactile Push button
13 | - 1 x Raspberry Pi Cobbler board (Recommended, to breakout the GPIO's) and make working easier
14 | - For circuit and connectivity details please refer to the images in the respective directories
15 | - For additional reading please visit
16 | - https://www.raspberrypi.org/learning/physical-computing-with-python/worksheet/
17 |
--------------------------------------------------------------------------------
/BlinkLEDs/Raspberry_Pi_With_50_Ohm_Resistor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/BlinkLEDs/Raspberry_Pi_With_50_Ohm_Resistor.png
--------------------------------------------------------------------------------
/BlinkLEDs/Raspberry_Pi_With_tactile_push_button.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/BlinkLEDs/Raspberry_Pi_With_tactile_push_button.png
--------------------------------------------------------------------------------
/BlinkLEDs/Raspberry_Pi_with_3_50_Ohm_Resistors_And_3_LEDs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/BlinkLEDs/Raspberry_Pi_with_3_50_Ohm_Resistors_And_3_LEDs.png
--------------------------------------------------------------------------------
/BlinkLEDs/blink_3leds_with_button_v0.1.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LED
2 | from gpiozero import Button
3 | from time import sleep
4 | import sys
5 | from signal import pause
6 | from subprocess import *
7 | import os
8 |
9 | led1 = LED(17)
10 | led2 = LED(27)
11 | led3 = LED(22)
12 | button = Button(2)
13 |
14 | #while True:
15 | # button.wait_for_press()
16 | # print('You asked for it smarty')
17 | # led.toggle()
18 |
19 | while True:
20 | if button.is_pressed:
21 | print('Here goes the Red LED')
22 | os.system("espeak red")
23 | led1.on()
24 | sleep(0.5)
25 | led1.off()
26 | sleep(0.5)
27 | print('Here goes the Amber LED')
28 | os.system("espeak amber")
29 | led2.on()
30 | sleep(0.5)
31 | led2.off()
32 | sleep(0.5)
33 | print('Here goes the Green LED')
34 | os.system("espeak green")
35 | led3.on()
36 | sleep(0.5)
37 | led3.off()
38 | sleep(0.5)
39 |
40 |
41 | #button.when_pressed = led.blink
42 | #button.when_released = led.off
43 |
44 | pause()
45 |
--------------------------------------------------------------------------------
/BlinkLEDs/blink_v0.1.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LED
2 | from time import sleep
3 | import sys
4 |
5 | led = LED(17)
6 |
7 | while True:
8 | led.on()
9 | sleep(0.5)
10 | led.off()
11 | sleep(0.5)
12 |
--------------------------------------------------------------------------------
/BlinkLEDs/blink_v0.2.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LED
2 | from gpiozero import Button
3 | from time import sleep
4 | import sys
5 |
6 | led = LED(17)
7 | button = Button(2)
8 |
9 | button.wait_for_press()
10 | print('You asked for it smarty')
11 | #led.on()
12 | #sleep(5)
13 | #led.off()
14 |
15 | while True:
16 | led.on()
17 | sleep(0.5)
18 | led.off()
19 | sleep(0.5)
20 |
--------------------------------------------------------------------------------
/BlinkLEDs/blink_v0.3.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LED
2 | from gpiozero import Button
3 | from time import sleep
4 | import sys
5 |
6 | led = LED(17)
7 | button = Button(2)
8 |
9 | while True:
10 | button.wait_for_press()
11 | print('You asked for it smarty')
12 | led.toggle()
13 |
14 | #while True:
15 | # led.on()
16 | # sleep(0.5)
17 | # led.off()
18 | # sleep(0.5)
19 |
--------------------------------------------------------------------------------
/BlinkLEDs/blink_v0.4.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LED
2 | from gpiozero import Button
3 | from time import sleep
4 | import sys
5 | from signal import pause
6 |
7 | led = LED(17)
8 | button = Button(2)
9 |
10 | #while True:
11 | # button.wait_for_press()
12 | # print('You asked for it smarty')
13 | # led.toggle()
14 |
15 | #while True:
16 | # led.on()
17 | # sleep(0.5)
18 | # led.off()
19 | # sleep(0.5)
20 |
21 |
22 | button.when_pressed = led.on
23 | button.when_released = led.off
24 |
25 | pause()
26 |
--------------------------------------------------------------------------------
/BlinkLEDs/blink_v0.5.py:
--------------------------------------------------------------------------------
1 | from gpiozero import LED
2 | from gpiozero import Button
3 | from time import sleep
4 | import sys
5 | from signal import pause
6 |
7 | led1 = LED(17)
8 | led2 = LED(27)
9 | button = Button(2)
10 |
11 | #while True:
12 | # button.wait_for_press()
13 | # print('You asked for it smarty')
14 | # led.toggle()
15 |
16 | #while True:
17 | # if button.is_pressed:
18 | # print('You've just pressed the button')
19 | # led1.on()
20 | # sleep(0.5)
21 | # led1.off()
22 | # sleep(0.5)
23 | # led2.on()
24 | # sleep(0.5)
25 | # led2.off()
26 | # sleep(0.5)
27 |
28 |
29 | button.when_pressed = led1.blink
30 | button.when_released = led1.off
31 |
32 | pause()
33 |
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/COPYING:
--------------------------------------------------------------------------------
1 | The code is Licensed under Creative Commons Attribution-ShareAlike 3.0 license.
2 |
3 | The code from the python example by Dawn Robotics Ltd is licensed under the BSD license.
4 |
5 | Of the included JavaScript libraries, jQuery and SockJS are licensed under the MIT license. Our joystick class jquery.joystick.js is a modified version of https://github.com/mifi/jquery-joystick. This project doesn't have a license but obviously all derived projects should link back and provide acknowledgement.
6 |
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2014, Dawn Robotics Ltd
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | 1. Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | 2. Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | 3. Neither the name of the Dawn Robotics Ltd nor the names of its contributors
15 | may be used to endorse or promote products derived from this software without
16 | specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/README.md:
--------------------------------------------------------------------------------
1 | ## Stream Video & Control Your Robot via Your Web Browser
2 | ### This project is aimed at building a Robot that allows for streaming of video and control from a web browser
3 |
4 | * Here are some pictures of what you will end up with. This project has been built upon the Dexter Industries GoPiGo robot.
5 | * Dexter Industries offers a modular robot with excellent video, printed tutorials to getting you up and running in a few hours.
6 | * The initial Dexter Industries robots were based on the Rapsberry Pi 2 (which is what my robot is based upon) but have now upgraded their robot package to support the Raspberry Pi 3.
7 | * You can read more about this robot and pick your up from [Dexter Industries](http://www.dexterindustries.com)
8 | * A lot of the code and functions included here are leveraged from Dexter Industries examples which are licensed under GPL.
9 |
10 | 
11 |
12 | 
13 |
14 | 
15 |
16 | **Usage:**
17 |
18 | * To start we need to resolve a bunch of depedencies and install some software. So let's get going.
19 | * Make the browser_stream_setup.sh script executable
20 |
21 | > bash# chmod +x ./browser_stream_setup.sh
22 |
23 | * Execute the script to kick off the installation
24 |
25 | > bash# sudo ./browser_stream_setup.sh
26 |
27 | * During the installation you will see errors about mjpg-streamer. Don't stress we will manually install them next.
28 | * Once the installer has done it's job open up a console and download the fork of mjpg-streamer for the Raspberry Pi from github.
29 |
30 | > bash# git clone https://github.com/jacksonliam/mjpg-streamer
31 |
32 | * The above project is a fork of http://sourceforge.net/projects/mjpg-streamer/ with added support for the Raspberry Pi camera via the input_raspicam plugin.
33 | * mjpg-streamer is a command line application that copies JPEG frames from one or more input plugins to multiple output plugins.
34 | * It can be used to stream JPEG files over an IP-based network from a webcam to various types of viewers such as Chrome, Firefox, Cambozola, VLC, mplayer, and other software capable of receiving MJPG streams.
35 | * It was originally written for embedded devices with very limited resources in terms of RAM and CPU.
36 | * Its predecessor "uvc_streamer" was created because Linux-UVC compatible cameras directly produce JPEG-data, allowing fast and perfomant M-JPEG streams even from an embedded device running OpenWRT.
37 | * The input module "input_uvc.so" captures such JPG frames from a connected webcam. mjpg-streamer now supports a variety of different input devices.
38 | * You must have cmake installed. You will also probably want to have a development version of libjpeg installed. We used libjpeg8-dev based on guidance from the developers. e.g.
39 | * So let's proceed, install the dependencies for mjpg-streamer and then compile, build and install mjpg-streamer from source.
40 |
41 | > bash# sudo apt-get install cmake libjpeg8-dev
42 |
43 | * The following commands will build and install all plugins that can be compiled.
44 |
45 | > bash# cd mjpg-streamer-experimental
46 | > bash# make
47 | > bash# sudo make install
48 |
49 | * With all the installation now out of the way you are now able to proceed with launch of the streaming, browser control program.
50 | * Make robot_web_server.py executable
51 |
52 | > bash# chmod +x robot_web_server.py
53 |
54 | * Run robot_web_server.py which launches the Python Tornado web server
55 | * Open a web browser on any computer or mobile device and enter the following in the address bar:
56 |
57 | > http://IP_Address_Of_Your_Raspberry_Pi:98
58 |
59 | * The page that hosts the streaming video and browser control application runs on the local IP address of the Pi on port 98
60 | * The video stream would load up and you can use the joystick on the screen to control the GoPiGo
61 |
62 | 
63 |
64 | 
65 |
66 | Enjoy and keep hacking !!!
67 |
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/Raspberry_Pi_Camera_controlled-by-mobile-browser.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/Raspberry_Pi_Camera_controlled-by-mobile-browser.jpg
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/Raspberry_Pi_Camera_streaming-to-computer-browser.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/Raspberry_Pi_Camera_streaming-to-computer-browser.jpg
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/Raspberry_Pi_Camera_streaming-to-mobile-browser.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/Raspberry_Pi_Camera_streaming-to-mobile-browser.jpg
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/browser_stream_setup.sh:
--------------------------------------------------------------------------------
1 | echo " _____ _ ";
2 | echo " | __ \ | | ";
3 | echo " | | | | _____ _| |_ ___ _ __ ";
4 | echo " | | | |/ _ \ \/ / __/ _ \ '__| ";
5 | echo " | |__| | __/> <| || __/ | ";
6 | echo " |_____/ \___/_/\_\\__\___|_| _ _ ";
7 | echo " |_ _| | | | | (_) ";
8 | echo " | | _ __ __| |_ _ ___| |_ _ __ _ ___ ___ ";
9 | echo " | | | '_ \ / _\` | | | / __| __| '__| |/ _ \/ __|";
10 | echo " _| |_| | | | (_| | |_| \__ \ |_| | | | __/\__ \ ";
11 | echo " |_____|_| |_|\__,_|\__,_|___/\__|_| |_|\___||___/ ";
12 | echo " ";
13 | echo " ";
14 | echo " "
15 | printf "Welcome to GoPiGo Browser Streaming Bot Installer.\nPlease ensure internet connectivity before running this script.\n
16 | NOTE: Raspberry Pi wil reboot after completion."
17 | echo "Must be running as Root user"
18 | echo " "
19 | echo "Press ENTER to begin..."
20 | read
21 |
22 | echo " "
23 | echo "Check for internet connectivity..."
24 | echo "=================================="
25 | wget -q --tries=2 --timeout=20 http://google.com
26 | if [ $? -eq 0 ];then
27 | echo "Connected"
28 | else
29 | echo "Unable to Connect, try again !!!"
30 | exit 0
31 | fi
32 |
33 | #Installing Mjpeg streamer http://blog.miguelgrinberg.com/post/how-to-build-and-run-mjpg-streamer-on-the-raspberry-pi
34 | sudo apt-get update
35 | sudo apt-get install libjpeg8-dev imagemagick libv4l-dev
36 | sudo ln -s /usr/include/linux/videodev2.h /usr/include/linux/videodev.h
37 | wget http://sourceforge.net/code-snapshots/svn/m/mj/mjpg-streamer/code/mjpg-streamer-code-182.zip
38 | unzip mjpg-streamer-code-182.zip
39 | cd mjpg-streamer-code-182/mjpg-streamer
40 | make mjpg_streamer input_file.so output_http.so
41 | sudo cp mjpg_streamer /usr/local/bin
42 | sudo cp output_http.so input_file.so /usr/local/lib/
43 | sudo cp -R www /usr/local/www
44 | mkdir /tmp/stream
45 | cd ../../
46 |
47 | rm -rf mjpg-streamer-182
48 | rm -rf mjpg-streamer-code-182
49 | rm index.html
50 | rm mjpg-streamer-code-182.zip
51 |
52 | git clone https://github.com/DexterInd/userland.git
53 |
54 | cd userland
55 | mkdir build
56 | cd build
57 | sudo apt-get install gcc build-essential cmake vlc rpi-update
58 | cmake ../
59 | make
60 | sudo make install
61 | cd ../../
62 | rm -R userland
63 |
64 | git clone https://bitbucket.org/DexterIndustries/raspberry_pi_camera_streamer.git
65 | cd raspberry_pi_camera_streamer
66 | mkdir build
67 | cd build
68 | cmake ../
69 | make
70 | sudo make install
71 | cd ../../
72 |
73 | rm -R raspberry_pi_camera_streamer
74 |
75 | sudo pip install tornado
76 | git clone https://github.com/DexterInd/sockjs-tornado
77 | cd sockjs-tornado
78 | sudo python setup.py install
79 | cd ..
80 | rm -R sockjs-tornado
81 |
82 | echo " "
83 | echo "Restarting"
84 | echo "3"
85 | sleep 1
86 | echo "2"
87 | sleep 1
88 | echo "1"
89 | sleep 1
90 | shutdown -r now
91 |
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/camera_streamer.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/camera_streamer.pyc
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/index.html:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/index.html
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/robot_controller.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/robot_controller.pyc
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/stop.txt:
--------------------------------------------------------------------------------
1 | +++ readlink -f /home/pi/Downloads/installers/RPi_Cam_Web_Interface/stop.sh
2 | ++ dirname /home/pi/Downloads/installers/RPi_Cam_Web_Interface/stop.sh
3 | + cd /home/pi/Downloads/installers/RPi_Cam_Web_Interface
4 | + source ./config.txt
5 | ++ rpicamdir=raspicam
6 | ++ webserver=apache
7 | ++ webport=80
8 | ++ user=twarren
9 | ++ webpasswd=entsysperf234
10 | ++ autostart=yes
11 | + fn_stop
12 | + sudo killall raspimjpeg
13 | + sudo killall php
14 | + sudo killall motion
15 |
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/www/css/config.css:
--------------------------------------------------------------------------------
1 | article.tabs
2 | {
3 | position: relative;
4 | display: block;
5 | width: 40em;
6 | height: 15em;
7 | margin: 2em auto auto 12em;
8 | }
9 |
10 | article.tabs section
11 | {
12 | position: absolute;
13 | display: block;
14 | top: 0;
15 | left: 2.4em;
16 | height: 12em;
17 | padding: 10px 20px;
18 | background-color: #fff;
19 | z-index: 0;
20 | }
21 |
22 | article.tabs section:first-child
23 | {
24 | z-index: 1;
25 | }
26 |
27 | article.tabs section h2
28 | {
29 | position: absolute;
30 | font-size: 1em;
31 | font-weight: normal;
32 | width: 12em;
33 | height: 4em;
34 | top: 0;
35 | left: -12em;
36 | padding: 0;
37 | margin: 0;
38 | color: #999;
39 | background-color: #ddd;
40 | border-radius: 5px 0 0 5px;
41 | }
42 |
43 | article.tabs section:nth-child(2) h2
44 | {
45 | top: 4em;
46 | }
47 |
48 | article.tabs section:nth-child(3) h2
49 | {
50 | top: 8em;
51 | }
52 |
53 | article.tabs section h2 a
54 | {
55 | position: relative;
56 | margin: auto;
57 | top: 1em;
58 | display: block;
59 | width: 100%;
60 | line-height: 1.8em;
61 | text-align: center;
62 | text-decoration: none;
63 | color: inherit;
64 | outline: 0 none;
65 | }
66 |
67 | article.tabs section .tab_content
68 | {
69 | display: none;
70 | }
71 |
72 | article.tabs section:target .tab_content
73 | {
74 | display: block;
75 | z-index: 2;
76 | border-radius: 5px;
77 | }
78 |
79 | article.tabs section:target,
80 | article.tabs section:target h2
81 | {
82 | color: #333;
83 | background-color: #fff;
84 | z-index: 2;
85 | display: block;
86 | }
87 |
88 | article.tabs section:target
89 | {
90 | box-shadow: 3px 3px 3px rgba(0,0,0,0.1);
91 | }
92 |
93 | article.tabs section:target h2
94 | {
95 | box-shadow: -1px 3px 3px rgba(0,0,0,0.1);
96 | }
97 |
98 | /*article.tabs section,
99 | article.tabs section h2
100 | {
101 | -webkit-transition: all 500ms ease;
102 | -moz-transition: all 500ms ease;
103 | -ms-transition: all 500ms ease;
104 | -o-transition: all 500ms ease;
105 | transition: all 500ms ease;
106 | }*/
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/www/css/modal.css:
--------------------------------------------------------------------------------
1 | /*
2 | * Modified from demo at http://cameronbaney.com/2012/07/26/pure-html5css3-responsive-modal-window/
3 | */
4 |
5 | /* Normal styles for the modal shutdown dialog */
6 | #modalShutdownDialog {
7 | left:10%;
8 | /*margin:-250px 0 0 -40%;*/
9 | opacity: 0;
10 | position:absolute;
11 | top:-90%;
12 | visibility: hidden;
13 | width:80%;
14 | height:80%;
15 | box-shadow:0 3px 7px rgba(0,0,0,.25);
16 | box-sizing:border-box;
17 | transition: all 0.4s ease-in-out;
18 | -moz-transition: all 0.4s ease-in-out;
19 | -webkit-transition: all 0.4s ease-in-out;
20 | }
21 |
22 | /* Make the modal appear when targeted */
23 | /*#modalShutdownDialog:target {
24 | opacity: 1;
25 | top:50%;
26 | visibility: visible;
27 | }*/
28 |
29 | #modalShutdownDialog .header,#modalShutdownDialog .footer {
30 | border-bottom: 1px solid #e7e7e7;
31 | border-radius: 5px 5px 0 0;
32 | }
33 | #modalShutdownDialog .footer {
34 | border:none;
35 | border-top: 1px solid #e7e7e7;
36 | border-radius: 0 0 5px 5px;
37 | }
38 | #modalShutdownDialog h2 {
39 | margin:0;
40 | }
41 | #modalShutdownDialog .btn {
42 | float:right;
43 | }
44 | #modalShutdownDialog .copy,#modalShutdownDialog .header, #modalShutdownDialog .footer {
45 | padding:15px;
46 | }
47 | .modal-content {
48 | background: #ffffff;
49 | position: relative;
50 | z-index: 20;
51 | border-radius:5px;
52 | height: 100%;
53 | }
54 | #modalShutdownDialog .copy {
55 | background: #fff;
56 | position: absolute;
57 | height: 200px;
58 | width: 40%;
59 | top: 50%;
60 | left: 50%;
61 | margin-top: -100px;
62 | margin-left: -25%;
63 | }
64 |
65 | #modalShutdownDialog .overlay {
66 | background-color: #000;
67 | background: rgba(0,0,0,.5);
68 | height: 100%;
69 | left: 0;
70 | position: fixed;
71 | top: 0;
72 | width: 100%;
73 | z-index: 10;
74 | }
75 |
76 | #shutdownText {
77 | text-align: center;
78 | margin-bottom: 40px;
79 | font-size: large
80 | }
81 |
82 | #shutdownProgress {
83 | display: block;
84 | margin-left: auto;
85 | margin-right: auto;
86 | }
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/www/images/ajax-loader.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_Robot_Browser_Controlled/www/images/ajax-loader.gif
--------------------------------------------------------------------------------
/Gopigo_Robot_Browser_Controlled/www/logs.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Logs
6 |
7 |
8 |
9 |
10 |
46 |
47 |
48 |
49 | Main Log
50 |
51 |
52 |
53 |
54 |
--------------------------------------------------------------------------------
/Gopigo_obstacle_avoidance_robot/README.md:
--------------------------------------------------------------------------------
1 | ## GoPiGo Robot - Obstable Avoider
2 | ### This project uses the HC-SR04 Ultrasonic sensor to build an obstacle avoidance robot.
3 |
4 | * Combined with the other Browser Streaming Robot project you can pretty much monitor the movements of the robot around the place.
5 |
6 | 
7 |
8 | **Usage:**
9 |
10 | * Open LXTerminal in VNC or Pi Desktop or just SSH into the Pi
11 | * Go to the folder where you've downloaded the code:
12 |
13 | `bash# cd Desktop/GoPiGo/Software/Python/Examples/Basic_Robot_Control/GUI`
14 |
15 | * Run the example:
16 |
17 | `bash# python ./basic_obstacle_avoidv0.006.py`
18 |
19 | 
20 |
21 | 
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/Gopigo_obstacle_avoidance_robot/basic_obstacle_avoidv0.001.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | ########################################################################
3 | # This example demonstrates using the Ultrasonic sensor with the GoPiGo
4 | #
5 | # In this examples, the GoPiGo keeps reading from the ultrasonic sensor and when it close to the an obstacle, it stops.
6 | #
7 | # http://www.dexterindustries.com/GoPiGo/
8 | # History
9 | # ------------------------------------------------
10 | # Author Date Comments
11 | # Karan 21 Aug 14 Initial Authoring
12 | #
13 | '''
14 | ## License
15 | GoPiGo for the Raspberry Pi: an open source robotics platform for the Raspberry Pi.
16 | Copyright (C) 2015 Dexter Industries
17 |
18 | This program is free software: you can redistribute it and/or modify
19 | it under the terms of the GNU General Public License as published by
20 | the Free Software Foundation, either version 3 of the License, or
21 | (at your option) any later version.
22 |
23 | This program is distributed in the hope that it will be useful,
24 | but WITHOUT ANY WARRANTY; without even the implied warranty of
25 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
26 | GNU General Public License for more details.
27 |
28 | You should have received a copy of the GNU General Public License
29 | along with this program. If not, see .
30 | '''
31 | #
32 | ########################################################################
33 | #
34 | # ! Attach Ultrasonic sensor to A1 Port.
35 | #
36 | ########################################################################
37 | from gopigo import *
38 | import time
39 |
40 | distance_to_stop=20 #Distance from obstacle where the GoPiGo should stop
41 | print "Press ENTER to start"
42 | raw_input() #Wait for input to start
43 | fwd() #Start moving
44 |
45 | while True:
46 | dist=us_dist(15) #Find the distance of the object in front
47 | print "Dist:",dist,'cm'
48 | if distdistance_to_stop:
20 | print "Distance to object is ",dist, "cm"
21 | else:
22 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
23 | led_off(0) #Switching off LED 0
24 | led_off(1) #Switching off LED 1
25 | stop()
26 | break
27 | time.sleep(.1)
28 |
29 | move_forward()
30 |
--------------------------------------------------------------------------------
/Gopigo_obstacle_avoidance_robot/basic_obstacle_avoidv0.003.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from gopigo import *
4 | import time
5 | import sys
6 |
7 | led_on(0) #Switching on LED 0
8 | led_on(1) #Switching on LED 1
9 |
10 | distance_to_stop=10
11 | print "!!!! Press ENTER to get going !!!!"
12 | raw_input()
13 |
14 | def lights_on():
15 | led_on(0) #Switching on LED 0
16 | led_on(1) #Switching on LED 1
17 |
18 | def lights_off():
19 | led_off(0) #Switching off LED 0
20 | led_off(1) #Switching off LED 1
21 |
22 | def reset_servo():
23 | enable_servo()
24 | servo(0)
25 | time.sleep(2)
26 | servo(180)
27 | time.sleep(2)
28 | servo(80)
29 | time.sleep(2)
30 | disable_servo()
31 |
32 | def move_forward():
33 | reset_servo()
34 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
35 | fwd()
36 | while True:
37 | dist=us_dist(15)
38 | if dist>distance_to_stop:
39 | print "Distance to object is ",dist, "cm"
40 | else:
41 | stop()
42 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
43 | time.sleep(2)
44 | subprocess.call("espeak -v english-us 'reversing'", shell=True)
45 | bwd()
46 | time.sleep(1)
47 | #lights_off()
48 | stop()
49 | time.sleep(2)
50 | distance_gauge()
51 | #break
52 | #time.sleep(.1)
53 |
54 | def distance_gauge():
55 | subprocess.call("espeak -v english-us 'checking optimal route'", shell=True)
56 | enable_servo()
57 | lights_off()
58 | lights_on()
59 | servo(0)
60 | distance1=us_dist(15)
61 | lights_off()
62 | time.sleep(1)
63 | lights_on()
64 | servo(45)
65 | distance2=us_dist(15)
66 | lights_off()
67 | time.sleep(1)
68 | lights_on()
69 | servo(90)
70 | distance3=us_dist(15)
71 | lights_off()
72 | time.sleep(1)
73 | lights_on()
74 | servo(135)
75 | distance4=us_dist(15)
76 | lights_off()
77 | time.sleep(1)
78 | lights_on()
79 | servo(160)
80 | distance5=us_dist(15)
81 | lights_off()
82 | time.sleep(1)
83 | lights_on()
84 | reset_servo()
85 | #disable_servo()
86 | if distance1>distance5:
87 | subprocess.call("espeak -v english-us 'turning right'", shell=True)
88 | right_rot()
89 | time.sleep(1)
90 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
91 | stop()
92 | time.sleep(1)
93 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
94 | move_forward()
95 | elif distance5>distance1:
96 | subprocess.call("espeak -v english-us 'turning left'", shell=True)
97 | left_rot()
98 | time.sleep(1)
99 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
100 | stop()
101 | time.sleep(1)
102 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
103 | move_forward()
104 | else:
105 | subprocess.call("espeak -v english-us 'Am blocked on all sides darling. Get me out of here.'", shell=True)
106 | stop
107 | lights_off()
108 |
109 | move_forward()
110 |
111 |
112 |
--------------------------------------------------------------------------------
/Gopigo_obstacle_avoidance_robot/basic_obstacle_avoidv0.004.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from gopigo import *
4 | import time
5 | import sys
6 |
7 | led_on(0) #Switching on LED 0
8 | led_on(1) #Switching on LED 1
9 |
10 | distance_to_stop=40
11 | print "!!!! Press ENTER to get going !!!!"
12 | raw_input()
13 |
14 | def lights_on():
15 | led_on(0) #Switching on LED 0
16 | led_on(1) #Switching on LED 1
17 |
18 | def lights_off():
19 | led_off(0) #Switching off LED 0
20 | led_off(1) #Switching off LED 1
21 |
22 | def reset_servo():
23 | enable_servo()
24 | servo(0)
25 | time.sleep(2)
26 | #servo(180)
27 | #time.sleep(2)
28 | servo(80)
29 | time.sleep(2)
30 | disable_servo()
31 |
32 | def move_forward():
33 | reset_servo()
34 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
35 | fwd()
36 | while True:
37 | dist=us_dist(15)
38 | if dist>distance_to_stop:
39 | print "Distance to object is ",dist, "cm"
40 | else:
41 | stop()
42 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
43 | time.sleep(2)
44 | subprocess.call("espeak -v english-us 'reversing'", shell=True)
45 | bwd()
46 | time.sleep(1)
47 | #lights_off()
48 | stop()
49 | time.sleep(2)
50 | distance_gauge()
51 | #break
52 | #time.sleep(.1)
53 |
54 | def distance_gauge():
55 | subprocess.call("espeak -v english-us 'checking optimal route'", shell=True)
56 | enable_servo()
57 | lights_off()
58 | lights_on()
59 | servo(0)
60 | distance1=us_dist(15)
61 | lights_off()
62 | time.sleep(1)
63 | lights_on()
64 | servo(45)
65 | distance2=us_dist(15)
66 | lights_off()
67 | time.sleep(1)
68 | lights_on()
69 | servo(90)
70 | distance3=us_dist(15)
71 | lights_off()
72 | time.sleep(1)
73 | lights_on()
74 | servo(135)
75 | distance4=us_dist(15)
76 | lights_off()
77 | time.sleep(1)
78 | lights_on()
79 | servo(160)
80 | distance5=us_dist(15)
81 | lights_off()
82 | time.sleep(1)
83 | lights_on()
84 | reset_servo()
85 | #disable_servo()
86 | if distance1>distance5:
87 | subprocess.call("espeak -v english-us 'turning right'", shell=True)
88 | right_rot()
89 | time.sleep(.05)
90 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
91 | stop()
92 | time.sleep(1)
93 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
94 | move_forward()
95 | elif distance5>distance1:
96 | subprocess.call("espeak -v english-us 'turning left'", shell=True)
97 | left_rot()
98 | time.sleep(.05)
99 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
100 | stop()
101 | time.sleep(1)
102 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
103 | move_forward()
104 | else:
105 | subprocess.call("espeak -v english-us 'Am blocked on all sides darling. Get me out of here.'", shell=True)
106 | stop
107 | lights_off()
108 |
109 | move_forward()
110 |
111 |
112 |
--------------------------------------------------------------------------------
/Gopigo_obstacle_avoidance_robot/basic_obstacle_avoidv0.005.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from gopigo import *
4 | import time
5 | import sys
6 |
7 |
8 | def lights_on():
9 | led_on(0) #Switching on LED 0
10 | led_on(1) #Switching on LED 1
11 |
12 | def lights_off():
13 | led_off(0) #Switching off LED 0
14 | led_off(1) #Switching off LED 1
15 |
16 | def reset_servo():
17 | enable_servo()
18 | servo(0)
19 | time.sleep(2)
20 | #servo(180)
21 | #time.sleep(2)
22 | servo(80)
23 | time.sleep(2)
24 | disable_servo()
25 |
26 | def set_gpg_speed():
27 | set_left_speed(150)
28 | set_right_speed(150)
29 |
30 | def move_forward():
31 | reset_servo()
32 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
33 | set_gpg_speed()
34 | fwd()
35 | while True:
36 | dist=us_dist(15)
37 | if dist>distance_to_stop:
38 | print "Distance to object is ",dist, "cm"
39 | else:
40 | stop()
41 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
42 | time.sleep(2)
43 | subprocess.call("espeak -v english-us 'reversing'", shell=True)
44 | set_gpg_speed()
45 | bwd()
46 | time.sleep(1)
47 | #lights_off()
48 | stop()
49 | time.sleep(2)
50 | distance_gauge()
51 | #break
52 | #time.sleep(.1)
53 |
54 | def distance_gauge():
55 | subprocess.call("espeak -v english-us 'checking optimal route'", shell=True)
56 | enable_servo()
57 | lights_off()
58 | lights_on()
59 | servo(0)
60 | distance1=us_dist(15)
61 | lights_off()
62 | time.sleep(1)
63 | lights_on()
64 | servo(45)
65 | distance2=us_dist(15)
66 | lights_off()
67 | time.sleep(1)
68 | lights_on()
69 | servo(90)
70 | distance3=us_dist(15)
71 | lights_off()
72 | time.sleep(1)
73 | lights_on()
74 | servo(135)
75 | distance4=us_dist(15)
76 | lights_off()
77 | time.sleep(1)
78 | lights_on()
79 | servo(160)
80 | distance5=us_dist(15)
81 | lights_off()
82 | time.sleep(1)
83 | lights_on()
84 | reset_servo()
85 | #disable_servo()
86 | if distance1>distance5:
87 | subprocess.call("espeak -v english-us 'turning right mate'", shell=True)
88 | #right_rot()
89 | set_gpg_speed()
90 | right()
91 | time.sleep(.05)
92 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
93 | stop()
94 | time.sleep(1)
95 | subprocess.call("espeak -v english-us 'moving forward mate'", shell=True)
96 | move_forward()
97 | elif distance5>distance1:
98 | subprocess.call("espeak -v english-us 'turning left mate'", shell=True)
99 | #left_rot()
100 | left()
101 | set_gpg_speed()
102 | time.sleep(.05)
103 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
104 | stop()
105 | time.sleep(1)
106 | subprocess.call("espeak -v english-us 'moving forward mate'", shell=True)
107 | move_forward()
108 | else:
109 | subprocess.call("espeak -v english-us 'Am blocked on all sides darling. Get me out of here.'", shell=True)
110 | stop
111 | lights_off()
112 |
113 |
114 | distance_to_stop=70
115 | print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
116 | print "!!!! Press ENTER to get going !!!!"
117 | print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
118 | raw_input()
119 | lights_on()
120 | move_forward()
121 |
122 |
123 |
--------------------------------------------------------------------------------
/Gopigo_obstacle_avoidance_robot/basic_obstacle_avoidv0.006.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from gopigo import *
4 | import time
5 | import sys
6 |
7 |
8 |
9 | def lights_on():
10 | led_on(0) #Switching on LED 0
11 | led_on(1) #Switching on LED 1
12 |
13 | def lights_off():
14 | led_off(0) #Switching off LED 0
15 | led_off(1) #Switching off LED 1
16 |
17 | def reset_servo():
18 | enable_servo()
19 | servo(0)
20 | time.sleep(2)
21 | #servo(180)
22 | #time.sleep(2)
23 | servo(80)
24 | time.sleep(2)
25 | disable_servo()
26 |
27 | def set_gpg_speed():
28 | set_left_speed(150)
29 | set_right_speed(150)
30 |
31 | def move_forward():
32 | reset_servo()
33 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
34 | set_gpg_speed()
35 | fwd()
36 | while True:
37 | dist=us_dist(15)
38 | if dist>distance_to_stop:
39 | print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
40 | print "Distance to object is ",dist, "cm"
41 | print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
42 | else:
43 | stop()
44 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
45 | time.sleep(2)
46 | subprocess.call("espeak -v english-us 'reversing'", shell=True)
47 | set_gpg_speed()
48 | bwd()
49 | time.sleep(1)
50 | #lights_off()
51 | stop()
52 | time.sleep(2)
53 | distance_gauge()
54 | #break
55 | #time.sleep(.1)
56 |
57 | def distance_gauge():
58 | subprocess.call("espeak -v english-us 'checking optimal route'", shell=True)
59 | enable_servo()
60 | lights_off()
61 | lights_on()
62 | servo(0)
63 | distance1=us_dist(15)
64 | lights_off()
65 | time.sleep(1)
66 | lights_on()
67 | servo(45)
68 | distance2=us_dist(15)
69 | lights_off()
70 | time.sleep(1)
71 | lights_on()
72 | servo(90)
73 | distance3=us_dist(15)
74 | lights_off()
75 | time.sleep(1)
76 | lights_on()
77 | servo(135)
78 | distance4=us_dist(15)
79 | lights_off()
80 | time.sleep(1)
81 | lights_on()
82 | servo(160)
83 | distance5=us_dist(15)
84 | lights_off()
85 | time.sleep(1)
86 | lights_on()
87 | reset_servo()
88 | #disable_servo()
89 | if distance1>distance5:
90 | subprocess.call("espeak -v english-us 'turning right'", shell=True)
91 | #right_rot()
92 | set_gpg_speed()
93 | right()
94 | time.sleep(.05)
95 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
96 | stop()
97 | time.sleep(1)
98 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
99 | move_forward()
100 | elif distance5>distance1:
101 | subprocess.call("espeak -v english-us 'turning left'", shell=True)
102 | #left_rot()
103 | left()
104 | set_gpg_speed()
105 | time.sleep(.05)
106 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
107 | stop()
108 | time.sleep(1)
109 | subprocess.call("espeak -v english-us 'moving forward mate'", shell=True)
110 | move_forward()
111 | else:
112 | subprocess.call("espeak -v english-us 'Am blocked on all sides darling. Get me out of here.'", shell=True)
113 | stop
114 | lights_off()
115 |
116 |
117 | distance_to_stop=40
118 | print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
119 | print "!!!! Press ENTER to get going !!!!"
120 | print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
121 | raw_input()
122 | lights_on()
123 | move_forward()
124 |
125 |
126 |
--------------------------------------------------------------------------------
/Gopigo_robot_Pygame_GUI_conrolled/Basic_Raspberry_Pi_Robot_Control_GUI.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_robot_Pygame_GUI_conrolled/Basic_Raspberry_Pi_Robot_Control_GUI.jpg
--------------------------------------------------------------------------------
/Gopigo_robot_Pygame_GUI_conrolled/README.md:
--------------------------------------------------------------------------------
1 | ## Basic Robot Control GUI
2 | ### Basic example for controlling the GoPiGo using the Keyboard and a GUI on VNC
3 |
4 | 
5 |
6 | Contributed by casten on Gitub https://github.com/DexterInd/GoPiGo/pull/112
7 |
8 | **Control:**
9 |
10 | - w: Move forward
11 | - a: Turn left
12 | - d: Turn right
13 | - s: Move back
14 | - x: Stop
15 | - t: Increase speed
16 | - g: Decrease speed
17 | - z: Exit
18 |
19 | **Usage:**
20 | Open LXTerminal in VNC or Pi Desktop
21 |
22 | Go to the example folder:
23 |
24 | `cd Desktop/GoPiGo/Software/Python/Examples/Basic_Robot_Control/GUI`
25 |
26 | Run the example:
27 |
28 | `bash# python ./basic_robot_gui.py`
29 |
30 | 
31 |
32 | 
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/Gopigo_robot_Pygame_GUI_conrolled/basic_robot_gui_v.010.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #############################################################################################################
3 | # Basic example for controlling the GoPiGo using the Keyboard
4 | # Contributed by casten on Gitub https://github.com/DexterInd/GoPiGo/pull/112
5 | #
6 | # This code lets you control the GoPiGo from the VNC or Pi Desktop. Also, these are non-blocking calls so it is much more easier to use too.
7 | #
8 | # Controls:
9 | # w: Move forward
10 | # a: Turn left
11 | # d: Turn right
12 | # s: Move back
13 | # x: Stop
14 | # t: Increase speed
15 | # g: Decrease speed
16 | # z: Exit
17 | # http://www.dexterindustries.com/GoPiGo/
18 | # History
19 | # ------------------------------------------------
20 | # Author Date Comments
21 | # Karan 27 June 14 Code cleanup
22 | # Casten 31 Dec 15 Added async io, action until keyup
23 | # Karan 04 Jan 16 Cleaned up the GUI
24 |
25 | '''
26 | ## License
27 | GoPiGo for the Raspberry Pi: an open source robotics platform for the Raspberry Pi.
28 | Copyright (C) 2017 Dexter Industries
29 |
30 | This program is free software: you can redistribute it and/or modify
31 | it under the terms of the GNU General Public License as published by
32 | the Free Software Foundation, either version 3 of the License, or
33 | (at your option) any later version.
34 |
35 | This program is distributed in the hope that it will be useful,
36 | but WITHOUT ANY WARRANTY; without even the implied warranty of
37 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
38 | GNU General Public License for more details.
39 |
40 | You should have received a copy of the GNU General Public License
41 | along with this program. If not, see .
42 | '''
43 | ##############################################################################################################
44 |
45 | from gopigo import * #Has the basic functions for controlling the GoPiGo Robot
46 | import sys #Used for closing the running program
47 | import pygame #Gives access to KEYUP/KEYDOWN events
48 |
49 | #Initialization for pygame
50 | pygame.init()
51 | screen = pygame.display.set_mode((700, 400))
52 | pygame.display.set_caption('Remote Control Window')
53 |
54 | # Fill background
55 | background = pygame.Surface(screen.get_size())
56 | background = background.convert()
57 | background.fill((250, 250, 250))
58 |
59 | # Display some text
60 | instructions = '''
61 | BASIC GOPIGO CONTROL GUI
62 |
63 | This is a basic example for the GoPiGo Robot control
64 |
65 | (Be sure to put focus on thi window to control the gopigo!)
66 |
67 | Press:
68 | ->w: Move GoPiGo Robot forward
69 | ->a: Turn GoPiGo Robot left
70 | ->d: Turn GoPiGo Robot right
71 | ->s: Move GoPiGo Robot backward
72 | ->t: Increase speed
73 | ->g: Decrease speed
74 | ->z: Exit
75 | ''';
76 | size_inc=22
77 | index=0
78 | for i in instructions.split('\n'):
79 | font = pygame.font.Font(None, 36)
80 | text = font.render(i, 1, (10, 10, 10))
81 | background.blit(text, (10,10+size_inc*index))
82 | index+=1
83 |
84 | # Blit everything to the screen
85 | screen.blit(background, (0, 0))
86 | pygame.display.flip()
87 |
88 | while True:
89 | event = pygame.event.wait();
90 | if (event.type == pygame.KEYUP):
91 | stop();
92 | continue;
93 | if (event.type != pygame.KEYDOWN):
94 | continue;
95 | char = event.unicode;
96 | if char=='w':
97 | fwd() ;# Move forward
98 | elif char=='a':
99 | left(); # Turn left
100 | elif char=='d':
101 | right();# Turn Right
102 | elif char=='s':
103 | bwd();# Move back
104 | elif char=='t':
105 | increase_speed(); # Increase speed
106 | elif char=='g':
107 | decrease_speed(); # Decrease speed
108 | elif char=='z':
109 | print "\nExiting"; # Exit
110 | sys.exit();
111 |
--------------------------------------------------------------------------------
/Gopigo_robot_Pygame_GUI_conrolled/basic_robot_gui_v.011.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #############################################################################################################
3 | # Basic example for controlling the GoPiGo using the Keyboard
4 | # Contributed by casten on Gitub https://github.com/DexterInd/GoPiGo/pull/112
5 | #
6 | # This code lets you control the GoPiGo from the VNC or Pi Desktop. Also, these are non-blocking calls so it is much more easier to use too.
7 | #
8 | # Controls:
9 | # w: Move forward
10 | # a: Turn left
11 | # d: Turn right
12 | # s: Move back
13 | # x: Stop
14 | # t: Increase speed
15 | # g: Decrease speed
16 | # z: Exit
17 | # http://www.dexterindustries.com/GoPiGo/
18 | # History
19 | # ------------------------------------------------
20 | # Author Date Comments
21 | # Karan 27 June 14 Code cleanup
22 | # Casten 31 Dec 15 Added async io, action until keyup
23 | # Karan 04 Jan 16 Cleaned up the GUI
24 |
25 | '''
26 | ## License
27 | GoPiGo for the Raspberry Pi: an open source robotics platform for the Raspberry Pi.
28 | Copyright (C) 2017 Dexter Industries
29 |
30 | This program is free software: you can redistribute it and/or modify
31 | it under the terms of the GNU General Public License as published by
32 | the Free Software Foundation, either version 3 of the License, or
33 | (at your option) any later version.
34 |
35 | This program is distributed in the hope that it will be useful,
36 | but WITHOUT ANY WARRANTY; without even the implied warranty of
37 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
38 | GNU General Public License for more details.
39 |
40 | You should have received a copy of the GNU General Public License
41 | along with this program. If not, see .
42 | '''
43 | ##############################################################################################################
44 |
45 | from gopigo import * #Has the basic functions for controlling the GoPiGo Robot
46 | import sys #Used for closing the running program
47 | import pygame #Gives access to KEYUP/KEYDOWN events
48 |
49 | #Initialization for pygame
50 | pygame.init()
51 | screen = pygame.display.set_mode((700, 400))
52 | pygame.display.set_caption('Remote Control Window')
53 |
54 | # Fill background
55 | background = pygame.Surface(screen.get_size())
56 | background = background.convert()
57 | background.fill((250, 250, 250))
58 |
59 | # Display some text
60 | instructions = '''
61 | BASIC GOPIGO CONTROL GUI
62 |
63 | This is a basic example for the GoPiGo Robot control
64 |
65 | (Be sure to put focus on thi window to control the gopigo!)
66 |
67 | Press:
68 | ->w: Move GoPiGo Robot forward
69 | ->a: Turn GoPiGo Robot left
70 | ->d: Turn GoPiGo Robot right
71 | ->s: Move GoPiGo Robot backward
72 | ->t: Increase speed
73 | ->g: Decrease speed
74 | ->z: Exit
75 | ''';
76 | size_inc=22
77 | index=0
78 | for i in instructions.split('\n'):
79 | font = pygame.font.Font(None, 36)
80 | text = font.render(i, 1, (10, 10, 10))
81 | background.blit(text, (10,10+size_inc*index))
82 | index+=1
83 |
84 | # Blit everything to the screen
85 | screen.blit(background, (0, 0))
86 | pygame.display.flip()
87 |
88 | while True:
89 | event = pygame.event.wait();
90 | if (event.type == pygame.KEYUP):
91 | stop();
92 | continue;
93 | if (event.type != pygame.KEYDOWN):
94 | continue;
95 | char = event.unicode;
96 | if char=='w':
97 | fwd() ;# Move forward
98 | elif char=='a':
99 | left(); # Turn left
100 | elif char=='d':
101 | right();# Turn Right
102 | elif char=='s':
103 | bwd();# Move back
104 | elif char=='t':
105 | increase_speed(); # Increase speed
106 | elif char=='g':
107 | decrease_speed(); # Decrease speed
108 | elif char=='z':
109 | print "\nExiting"; # Exit
110 | sys.exit();
111 |
--------------------------------------------------------------------------------
/Gopigo_robot_Pygame_GUI_conrolled/basic_robot_gui_v.012.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #############################################################################################################
3 | # Basic example for controlling the GoPiGo using the Keyboard
4 | # Contributed by casten on Gitub https://github.com/DexterInd/GoPiGo/pull/112
5 | #
6 | # This code lets you control the GoPiGo from the VNC or Pi Desktop. Also, these are non-blocking calls so it is much more easier to use too.
7 | #
8 | # Controls:
9 | # w: Move forward
10 | # a: Turn left
11 | # d: Turn right
12 | # s: Move back
13 | # x: Stop
14 | # t: Increase speed
15 | # g: Decrease speed
16 | # z: Exit
17 | # http://www.dexterindustries.com/GoPiGo/
18 | # History
19 | # ------------------------------------------------
20 | # Author Date Comments
21 | # Karan 27 June 14 Code cleanup
22 | # Casten 31 Dec 15 Added async io, action until keyup
23 | # Karan 04 Jan 16 Cleaned up the GUI
24 |
25 | '''
26 | ## License
27 | GoPiGo for the Raspberry Pi: an open source robotics platform for the Raspberry Pi.
28 | Copyright (C) 2017 Dexter Industries
29 |
30 | This program is free software: you can redistribute it and/or modify
31 | it under the terms of the GNU General Public License as published by
32 | the Free Software Foundation, either version 3 of the License, or
33 | (at your option) any later version.
34 |
35 | This program is distributed in the hope that it will be useful,
36 | but WITHOUT ANY WARRANTY; without even the implied warranty of
37 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
38 | GNU General Public License for more details.
39 |
40 | You should have received a copy of the GNU General Public License
41 | along with this program. If not, see .
42 | '''
43 | ##############################################################################################################
44 |
45 | from gopigo import * #Has the basic functions for controlling the GoPiGo Robot
46 | import sys #Used for closing the running program
47 | import pygame #Gives access to KEYUP/KEYDOWN events
48 |
49 | #Initialization for pygame
50 | pygame.init()
51 | screen = pygame.display.set_mode((700, 400))
52 | pygame.display.set_caption('Remote Control Window')
53 |
54 | # Fill background
55 | background = pygame.Surface(screen.get_size())
56 | background = background.convert()
57 | background.fill((250, 250, 250))
58 |
59 | # Display some text
60 | instructions = '''
61 | BASIC GOPIGO CONTROL GUI
62 |
63 | This is a basic example for the GoPiGo Robot control
64 |
65 | (Be sure to put focus on thi window to control the gopigo!)
66 |
67 | Press:
68 | ->p: Move GoPiGo Robot forward
69 | ->q: Turn GoPiGo Robot left
70 | ->a: Turn GoPiGo Robot right
71 | ->l: Move GoPiGo Robot backward
72 | ->t: Increase speed
73 | ->g: Decrease speed
74 | ->z: Exit
75 | ''';
76 | size_inc=22
77 | index=0
78 | for i in instructions.split('\n'):
79 | font = pygame.font.Font(None, 36)
80 | text = font.render(i, 1, (10, 10, 10))
81 | background.blit(text, (10,10+size_inc*index))
82 | index+=1
83 |
84 | # Blit everything to the screen
85 | screen.blit(background, (0, 0))
86 | pygame.display.flip()
87 |
88 | while True:
89 | event = pygame.event.wait();
90 | if (event.type == pygame.KEYUP):
91 | stop();
92 | continue;
93 | if (event.type != pygame.KEYDOWN):
94 | continue;
95 | char = event.unicode;
96 | if char=='p':
97 | fwd() ;# Move forward
98 | elif char=='q':
99 | left(); # Turn left
100 | elif char=='a':
101 | right();# Turn Right
102 | elif char=='l':
103 | bwd();# Move back
104 | elif char=='t':
105 | increase_speed(); # Increase speed
106 | elif char=='g':
107 | decrease_speed(); # Decrease speed
108 | elif char=='z':
109 | print "\nExiting"; # Exit
110 | sys.exit();
111 |
--------------------------------------------------------------------------------
/Gopigo_robot_Pygame_GUI_conrolled/fswebcam.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | i=0
4 | while [ $i -eq 0 ]
5 | do
6 | sudo /usr/bin/fswebcam /var/www/html/usbcam/capturedimage.jpg
7 | sleep 2s
8 | done
9 |
--------------------------------------------------------------------------------
/Gopigo_robot_keyboard_conrolled/Basic_Raspberry_Pi_Robot_Control.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Gopigo_robot_keyboard_conrolled/Basic_Raspberry_Pi_Robot_Control.jpg
--------------------------------------------------------------------------------
/Gopigo_robot_keyboard_conrolled/README.md:
--------------------------------------------------------------------------------
1 | ## Basic Robot Control
2 | ### Basic example for controlling the GoPiGo using the Keyboard
3 |
4 | 
5 |
6 | **Control:**
7 |
8 | - w: Move forward
9 | - a: Turn left
10 | - d: Turn right
11 | - s: Move back
12 | - x: Stop
13 | - t: Increase speed
14 | - g: Decrease speed
15 | - z: Exit
16 |
17 |
18 | 
19 |
20 | 
21 |
22 |
--------------------------------------------------------------------------------
/Gopigo_robot_keyboard_conrolled/basic_robot_v0.001.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #############################################################################################################
3 | # Basic example for controlling the GoPiGo using the Keyboard
4 | # Controls:
5 | # w: Move forward
6 | # a: Turn left
7 | # d: Turn right
8 | # s: Move back
9 | # x: Stop
10 | # t: Increase speed
11 | # g: Decrease speed
12 | # z: Exit
13 | # http://www.dexterindustries.com/GoPiGo/
14 | # History
15 | # ------------------------------------------------
16 | # Author Date Comments
17 | # Karan 27 June 14 Code cleanup
18 | '''
19 | ## License
20 | GoPiGo for the Raspberry Pi: an open source robotics platform for the Raspberry Pi.
21 | Copyright (C) 2015 Dexter Industries
22 |
23 | This program is free software: you can redistribute it and/or modify
24 | it under the terms of the GNU General Public License as published by
25 | the Free Software Foundation, either version 3 of the License, or
26 | (at your option) any later version.
27 |
28 | This program is distributed in the hope that it will be useful,
29 | but WITHOUT ANY WARRANTY; without even the implied warranty of
30 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
31 | GNU General Public License for more details.
32 |
33 | You should have received a copy of the GNU General Public License
34 | along with this program. If not, see .
35 | '''
36 | #
37 | ##############################################################################################################
38 |
39 | from gopigo import * #Has the basic functions for controlling the GoPiGo Robot
40 | import sys #Used for closing the running program
41 | print "This is a basic example for the GoPiGo Robot control"
42 | print "Press:\n\tw: Move GoPiGo Robot forward\n\ta: Turn GoPiGo Robot left\n\td: Turn GoPiGo Robot right\n\ts: Move GoPiGo Robot backward\n\tt: Increase speed\n\tg: Decrease speed\n\tx: Stop GoPiGo Robot\n\tz: Exit\n"
43 | while True:
44 | print "Enter the Command:",
45 | a=raw_input() # Fetch the input from the terminal
46 | if a=='w':
47 | fwd() # Move forward
48 | elif a=='a':
49 | left() # Turn left
50 | elif a=='d':
51 | right() # Turn Right
52 | elif a=='s':
53 | bwd() # Move back
54 | elif a=='x':
55 | stop() # Stop
56 | elif a=='t':
57 | increase_speed() # Increase speed
58 | elif a=='g':
59 | decrease_speed() # Decrease speed
60 | elif a=='z':
61 | print "Exiting" # Exit
62 | sys.exit()
63 | else:
64 | print "Wrong Command, Please Enter Again"
65 | time.sleep(.1)
--------------------------------------------------------------------------------
/Gopigo_robot_keyboard_conrolled/basic_robot_v0.002.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from gopigo import * #Has the basic functions for controlling the GoPiGo Robot
4 | from subprocess import *
5 | import sys #Used for closing the running program
6 |
7 | print "This is a basic example for the GoPiGo Robot control"
8 | print "Press:\n\tp: Move GoPiGo Robot forward\n\tq: Turn GoPiGo Robot left\n\ta: Turn GoPiGo Robot right\n\tl: Move GoPiGo Robot backward\n\tt: Increase speed\n\tg: Decrease speed\n\tx: Stop GoPiGo Robot\n\tz: Exit\n"
9 | while True:
10 | print "Enter the Command:",
11 | a=raw_input() # Fetch the input from the terminal
12 | if a=='p':
13 | subprocess.call("espeak -v english-us 'moving forward'", shell=True)
14 | led_on(0)
15 | led_on(1)
16 | fwd() # Move forward
17 | elif a=='q':
18 | subprocess.call("espeak -v english-us 'turning left'", shell=True)
19 | led_on(0)
20 | led_on(1)
21 | left() # Turn left
22 | elif a=='a':
23 | subprocess.call("espeak -v english-us 'turning right'", shell=True)
24 | led_on(0)
25 | led_on(1)
26 | right() # Turn Right
27 | elif a=='l':
28 | subprocess.call("espeak -v english-us 'moving back'", shell=True)
29 | led_on(0)
30 | led_on(1)
31 | bwd() # Move back
32 | elif a=='x':
33 | stop() # Stop
34 | led_off(0)
35 | led_off(1)
36 | subprocess.call("espeak -v english-us 'stopping'", shell=True)
37 | elif a=='t':
38 | increase_speed() # Increase speed
39 | elif a=='g':
40 | decrease_speed() # Decrease speed
41 | elif a=='z':
42 | print "Exiting" # Exit
43 | sys.exit()
44 | else:
45 | print "Wrong Command, Please Enter Again"
46 | time.sleep(.1)
47 | # print us_dist(15),'cm'
48 |
--------------------------------------------------------------------------------
/HomeConfig/Static_WebCam_View/usbcam.php:
--------------------------------------------------------------------------------
1 |
2 | Add the following lines to /etc/sudoers
7 | // www-data ALL=NOPASSWD: /usr/bin/fswebcam
8 | // www-data ALL=NOPASSWD: /bin/webcam.sh
9 | // www-data ALL=NOPASSWD: /bin/chown
10 |
11 | // 2> Create a script called /bin/webcam.sh with the following content
12 | // sudo /usr/bin/fswebcam -r 640x480 /var/www/html/usbcam/capturedimage.jpg
13 |
14 |
15 | //Capturing the image
16 | //exec('sudo /usr/bin/fswebcam -r 640x480 /var/www/html/usbcam/capturedimage.jpg');
17 | exec('sudo /bin/webcam.sh');
18 |
19 | //Sleep for 5s
20 | sleep(5);
21 |
22 | exec('sudo /bin/chown -R www-data:www-data /var/www/html/usbcam/capturedimage.jpg');
23 |
24 | //header("Content-Type: application/html");
25 | header("Expires: 0");
26 | //header("Last-Modified: " . gmdate("D, d M Y H:i:s") . " GMT");
27 | header("Cache-Control: no-store, no-cache, must-revalidate max-ago=0");
28 | header("Cache-Control: post-check=0, pre-check=0", false);
29 | header("Pragma: no-cache");
30 |
31 | print "";
32 | print "";
33 | print "Webcam View - tangowhisky37 Study";
34 | //print "";
36 | print "";
37 | print "
";
38 | print "
";
39 | print "";
40 | print "
Webcam View - tangowhisky37 Study
";
41 | print "";
42 | print "
";
43 | print "";
44 | print "
";
45 | print "
";
46 | print "
";
47 | print "";
48 | print "
This page is manually refreshed. Please wait for the entire page to load before you hit refresh.
";
49 | print "";
50 | print "";
51 | print "";
52 |
53 | //Sleep for 10s
54 | //sleep(10);
55 |
56 |
57 | ?>
58 |
59 |
--------------------------------------------------------------------------------
/HomeConfig/default.sitemap:
--------------------------------------------------------------------------------
1 | sitemap demo label="Home Automation @ 37 Ronald Av" {
2 | Frame label="Home Automation @ 37 Ronald Av" {
3 | Text label="Home Automation Dashboard"
4 | Text item=TimeInMelbourne_Date label="The Time & Date is [ %1$tA, %1$td/%1$tm/%1$tY %1$tH:%1$tM ]"
5 | }
6 | Frame label="Weather" {
7 | Text item=WeatherInformation_ForecastToday_MaximumTemperature label="Temp Max (Today) [%.1f °C]"
8 | Text item=WeatherInformation_ForecastToday_MinimumTemperature label="Temp Min (Today) [%.1f °C]"
9 | Text item=WeatherInformation_ForecastToday_RainFallDay label="Rain Forecasted (Today) [%.1f]"
10 | Text item=WeatherInformation_ForecastTomorrow_MaximumTemperature label="Temp Max (Tomorrow) [%.1f °C]"
11 | Text item=WeatherInformation_ForecastTomorrow_MinimumTemperature label="Temp Min (Tomorrow) [%.1f °C]"
12 | Text item=WeatherInformation_ForecastTomorrow_RainFallDay label="Rain Forecasted (Tomorrow) [%.1f]"
13 | }
14 | Frame label="Ground Floor Lights" {
15 | Switch item=MainKitchenSwitch icon="Light" label="Kitchen"
16 | Switch item=KitchenAdjacentOpenAreaSwitch icon="Light" label="Kitchen Open Area"
17 | Switch item=LivingRoom icon="Light" label="Living Room"
18 | Switch item=PassageWaySwitch icon="Light" label="Downstairs Passage Way Light"
19 | Switch item=MainDoorOutside icon="Light" label="Main Door / Entrance (Outside)"
20 | Switch item=DiningRoom icon="Light" label="Dining Room"
21 | Switch item=LivingRoomFloorLamps icon="Light" label="Living Room Floor Lamps"
22 | Switch item=DesktopLamp icon="Light" label="Study Desk Lamp"
23 | Switch item=BookShelfLamp icon="Light" label="Study Bookshelf Lamp"
24 | Switch item=LivingRoomNightLamp icon="Light" label="Living Room Night Lamp"
25 | Switch item=StudyDeskGlobe icon="Light" label="Study Desk Globe"
26 | Switch item=StudyFan icon="Light" label="Study Fan"
27 | }
28 | Frame label="First Floor Lights" {
29 | Switch item=StairwayLight_Brightness icon="Light" label="Stairway Light"
30 | Switch item=UpperPassageWayLight_Brightness icon="Light" label="Upstairs Passage Way Light"
31 | Switch item=MasterBedroomLight icon="Light" label="Master Bedroom Light"
32 | Switch item=MasterBedroomRHLamp icon="Light" label="Master Bedroom RH Table Lamp"
33 | Switch item=MasterBedroomLHLamp icon="Light" label="Master Bedroom LH Table Lamp"
34 | Switch item=GuestBedroomLamp icon="Light" label="Guest Bedroom Table Lamp"
35 | Switch item=JadynsRoomLamp icon="Light" label="Jadyns Room Table Lamp"
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/HomeConfig/http_switches.items:
--------------------------------------------------------------------------------
1 | Switch LivingRoomFloorLamps { http=">[ON:POST:http://192.168.1.24/socket1On:on] >[OFF:POST:http://192.168.1.24/socket1Off:off]" }
2 | Switch DesktopLamp { http=">[ON:POST:http://192.168.1.24/socket3On:on] >[OFF:POST:http://192.168.1.24/socket3Off:off]" }
3 | Switch BookShelfLamp { http=">[ON:POST:http://192.168.1.24/socket2On:on] >[OFF:POST:http://192.168.1.24/socket2Off:off]" }
4 | Switch LivingRoomNightLamp { http=">[ON:POST:http://192.168.1.24/socket4On:on] >[OFF:POST:http://192.168.1.24/socket4Off:off]" }
5 | Switch GuestBedroomLamp { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnGuestBedroomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffGuestBedroomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
6 | Switch JadynsRoomLamp { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnJadynsRoomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffJadynsRoomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
7 | Switch StudyDeskGlobe { http=">[ON:POST:https://maker.ifttt.com/trigger/StudyDeskGlobeOn/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/StudyDeskGlobeOff/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
8 | Switch MasterBedroomRHLamp { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnMasterBedroomRHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffMasterBedroomRHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
9 | Switch MasterBedroomLHLamp { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnMasterBedroomLHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffMasterBedroomLHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
10 | Switch StudyFan { http=">[ON:POST:https://maker.ifttt.com/trigger/StudyFanOn/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/StudyFanOff/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
11 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/cmdline.txt:
--------------------------------------------------------------------------------
1 | dwc_otg.lpm_enable=0 console=tty1 root=/dev/mmcblk0p2 rootfstype=ext4 elevator=deadline fsck.repair=yes rootwait quiet splash plymouth.ignore-serial-consoles
2 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/config.txt:
--------------------------------------------------------------------------------
1 | # For more options and information see
2 | # http://rpf.io/configtxtreadme
3 | # Some settings may impact device functionality. See link above for details
4 |
5 | # uncomment if you get no picture on HDMI for a default "safe" mode
6 | #hdmi_safe=1
7 |
8 | # uncomment this if your display has a black border of unused pixels visible
9 | # and your display can output without overscan
10 | #disable_overscan=1
11 |
12 | # uncomment the following to adjust overscan. Use positive numbers if console
13 | # goes off screen, and negative if there is too much border
14 | #overscan_left=16
15 | #overscan_right=16
16 | #overscan_top=16
17 | #overscan_bottom=16
18 |
19 | # uncomment to force a console size. By default it will be display's size minus
20 | # overscan.
21 | #framebuffer_width=1280
22 | #framebuffer_height=720
23 |
24 | # uncomment if hdmi display is not detected and composite is being output
25 | #hdmi_force_hotplug=1
26 |
27 | # uncomment to force a specific HDMI mode (this will force VGA)
28 | #hdmi_group=1
29 | #hdmi_mode=1
30 |
31 | # uncomment to force a HDMI mode rather than DVI. This can make audio work in
32 | # DMT (computer monitor) modes
33 | #hdmi_drive=2
34 |
35 | # uncomment to increase signal to HDMI, if you have interference, blanking, or
36 | # no display
37 | #config_hdmi_boost=4
38 |
39 | # uncomment for composite PAL
40 | #sdtv_mode=2
41 |
42 | #uncomment to overclock the arm. 700 MHz is the default.
43 | #arm_freq=800
44 |
45 | # Uncomment some or all of these to enable the optional hardware interfaces
46 | #dtparam=i2s=on
47 |
48 | # Uncomment this to enable the lirc-rpi module
49 | #dtoverlay=lirc-rpi
50 |
51 | # Additional overlays and parameters are documented /boot/overlays/README
52 |
53 | # Enable audio (loads snd_bcm2835)
54 | dtparam=audio=on
55 |
56 |
57 |
58 |
59 |
60 | start_x=1
61 | gpu_mem=128
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 | dtparam=spi=on
72 |
73 | init_uart_clock=32000000
74 |
75 |
76 | dtparam=i2c=on1=on
77 | dtparam=i2s=on
78 | dtparam=i2c_arm=on
79 | device_tree_param=i2c1=on
80 | #device_tree=
81 | dtparam=i2c1_baudrate=200000
82 | dtparam=i2c_arm_baudrate=200000
83 | #dtoverlay=pi3-miniuart-bt
84 | dtoverlay=w1-gpio
85 | enable_uart=1
86 |
87 |
88 | #Added by TRW to get openhab to use /dev/ttyACM0
89 | #Apparently bluetooth on the pi uses this device
90 | #This change was made on 050518
91 | dtoverlay=pi3-disable-bt
92 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/default.sitemap:
--------------------------------------------------------------------------------
1 | sitemap demo label="Home Automation @ 37 Ronald Av" {
2 | Frame label="Home Automation @ 37 Ronald Av" {
3 | Text label="Home Automation Dashboard"
4 | Text item=TimeInMelbourne_Date label="The Time & Date is [ %1$tA, %1$td/%1$tm/%1$tY %1$tH:%1$tM ]"
5 | }
6 | Frame label="Weather" {
7 | Text item=WeatherInformation_ForecastToday_MaximumTemperature label="Temp Max (Today) [%.1f °C]"
8 | Text item=WeatherInformation_ForecastToday_MinimumTemperature label="Temp Min (Today) [%.1f °C]"
9 | Text item=WeatherInformation_ForecastToday_RainFallDay label="Rain Forecasted (Today) [%.1f]"
10 | Text item=WeatherInformation_ForecastTomorrow_MaximumTemperature label="Temp Max (Tomorrow) [%.1f °C]"
11 | Text item=WeatherInformation_ForecastTomorrow_MinimumTemperature label="Temp Min (Tomorrow) [%.1f °C]"
12 | Text item=WeatherInformation_ForecastTomorrow_RainFallDay label="Rain Forecasted (Tomorrow) [%.1f]"
13 | }
14 | Frame label="Ground Floor Lights" {
15 | Switch item=MainKitchenSwitch icon="Light" label="Kitchen"
16 | Switch item=KitchenAdjacentOpenAreaSwitch icon="Light" label="Kitchen Open Area"
17 | Switch item=LivingRoom icon="Light" label="Living Room"
18 | Switch item=PassageWaySwitch icon="Light" label="Downstairs Passage Way Light"
19 | Switch item=MainDoorOutside icon="Light" label="Main Door / Entrance (Outside)"
20 | Switch item=DiningRoom icon="Light" label="Dining Room"
21 | Switch item=LivingRoomFloorLamps icon="Light" label="Living Room Floor Lamps"
22 | Switch item=DesktopLamp icon="Light" label="Study Desk Lamp"
23 | Switch item=BookShelfLamp icon="Light" label="Study Bookshelf Lamp"
24 | Switch item=LivingRoomNightLamp icon="Light" label="Living Room Night Lamp"
25 | Switch item=StudyDeskGlobe icon="Light" label="Study Desk Globe"
26 | Switch item=StudyFan icon="Light" label="Study Fan"
27 | }
28 | Frame label="First Floor Lights" {
29 | Switch item=StairwayLight_Brightness icon="Light" label="Stairway Light"
30 | Switch item=UpperPassageWayLight_Brightness icon="Light" label="Upstairs Passage Way Light"
31 | Switch item=MasterBedroomLight icon="Light" label="Master Bedroom Light"
32 | Switch item=MasterBedroomRHLamp icon="Light" label="Master Bedroom RH Table Lamp"
33 | Switch item=MasterBedroomLHLamp icon="Light" label="Master Bedroom LH Table Lamp"
34 | Switch item=GuestBedroomLamp icon="Light" label="Guest Bedroom Table Lamp"
35 | Switch item=JadynsRoomLamp icon="Light" label="Jadyns Room Table Lamp"
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/etc_default_openhab2:
--------------------------------------------------------------------------------
1 | # openHAB 2 service options
2 |
3 | #########################
4 | ## PORTS
5 | ## The ports openHAB will bind its HTTP/HTTPS web server to.
6 |
7 | #OPENHAB_HTTP_PORT=8080
8 | #OPENHAB_HTTPS_PORT=8443
9 |
10 | #########################
11 | ## BACKUP DIRECTORY
12 | ## Set the following variable to specify the backup location.
13 | ## runtime/bin/backup and runtime/bin/restore will use this path for the zip files.
14 |
15 | #OPENHAB_BACKUPS=/var/lib/openhab2/backups
16 |
17 | #########################
18 | ## JAVA OPTIONS
19 | ## Additional options for the JAVA_OPTS environment variable.
20 | ## These will be appended to the execution of the openHAB Java runtime in front of all other options.
21 | ##
22 | ## A couple of independent examples:
23 | ## EXTRA_JAVA_OPTS="-Dgnu.io.rxtx.SerialPorts=/dev/ttyAMA0"
24 | ## EXTRA_JAVA_OPTS="-Dgnu.io.rxtx.SerialPorts=/dev/ttyUSB0:/dev/ttyS0:/dev/ttyS2:/dev/ttyACM0:/dev/ttyAMA0"
25 | ## EXTRA_JAVA_OPTS="-Djna.library.path=/lib/arm-linux-gnueabihf/ -Duser.timezone=Europe/Berlin -Dgnu.io.rxtx.SerialPorts=/dev/ttyS0"
26 |
27 | EXTRA_JAVA_OPTS="-Dgnu.io.rxtx.SerialPorts=/dev/ttyACM0:/dev/ttyUSB0:/dev/ttyUSB-ZStick-5G:/dev/ttyACM1"
28 |
29 | #########################
30 | ## OPENHAB DEFAULTS PATHS
31 | ## The following settings override the default apt/rpm locations and should be used with caution.
32 | ## openHAB will fail to update itself if you're using different paths.
33 | ## Only set these if you are testing and are confident in debugging.
34 |
35 | #OPENHAB_HOME=/usr/share/openhab2
36 | #OPENHAB_CONF=/etc/openhab2
37 | #OPENHAB_RUNTIME=/usr/share/openhab2/runtime
38 | #OPENHAB_USERDATA=/var/lib/openhab2
39 | #OPENHAB_LOGDIR=/var/log/openhab2
40 |
41 | #########################
42 | ## OPENHAB USER AND GROUP
43 | ## The user and group that takes ownership of openHAB. Only available for init.d systems.
44 | ## To edit user and group for systemd, see the service file at /usr/lib/systemd/system/openhab2.service.
45 |
46 | #OPENHAB_USER=openhab
47 | #OPENHAB_GROUP=openhab
48 |
49 | #OPENHAB_USER=root
50 | #OPENHAB_GROUP=root
51 |
52 | #########################
53 | ## SYSTEMD START MODE
54 | ## The Karaf startmode for the openHAB runtime. Only available for systemctl/systemd systems.
55 | ## Defaults to daemon when unset here. Multiple options can be used without quotes.
56 | ## debug increases log output. daemon launches the Karaf/openHAB processes.
57 |
58 | #OPENHAB_STARTMODE=debug
59 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/etc_openhab2_services_weather.cfg:
--------------------------------------------------------------------------------
1 | # The apikey for the different weather providers, at least one must be specified
2 | # Note: Hamweather requires two apikeys: client_id=apikey, client_secret=apikey2
3 |
4 | #apikey.ForecastIo=
5 | apikey.OpenWeatherMap=2866c7dec86f0ad873d0f626dafcd20e
6 | #apikey.WorldWeatherOnline=
7 | #apikey.Wunderground=
8 | #apikey.Hamweather=
9 | #apikey2.Hamweather=
10 | #apikey.Meteoblue=
11 |
12 | # location configuration, you can specify multiple locations
13 | location..name=Australia/Melbourne
14 | location..latitude=-37.843472 (not required for Yahoo provider)
15 | location..longitude=144.849552 (not required for Yahoo provider)
16 | location..woeid=1103816 (required for Yahoo provider)
17 | location..provider=OpenWeatherMap
18 | #location..language=
19 | #location..updateInterval=
20 | #location..units=
21 |
22 | #location..name=
23 | #location..latitude= (not required for Yahoo provider)
24 | #location..longitude= (not required for Yahoo provider)
25 | #location..woeid= (required for Yahoo provider)
26 | #location..provider=
27 | #location..language=
28 | #location..updateInterval=
29 | #location..units=
30 |
31 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/extra_switches.items:
--------------------------------------------------------------------------------
1 | Switch KitchenDownlights "Kitchen Lights" [ "Lighting" ] {channel="zwave:device:6d8c4d92:node5:switch_binary1"}
2 | Switch ArenaDownlights "Arena Lights" [ "Lighting" ] {channel="zwave:device:6d8c4d92:node5:switch_binary2"}
3 | Switch DiningRoomDownlights "Dining Room Lights" [ "Lighting" ] {channel="zwave:device:6d8c4d92:node2:switch_binary1"}
4 | Switch LivingRoomDownlights "Living Room Lights" [ "Lighting" ] {channel="zwave:device:6d8c4d92:node3:switch_binary1"}
5 | Switch PassageWayDownlights "Passage Lights" [ "Lighting" ] {channel="zwave:device:6d8c4d92:node4:switch_binary1"}
6 |
--------------------------------------------------------------------------------
/HomeConfig/openhab2/http_switches.items:
--------------------------------------------------------------------------------
1 | Switch LivingRoomFloorLamps "Living Room Floor Lamps" [ "Lighting" ] { http=">[ON:POST:http://192.168.1.24/socket1On:on] >[OFF:POST:http://192.168.1.24/socket1Off:off]" }
2 | Switch DesktopLamp "Study Desk Lamp" [ "Lighting" ] { http=">[ON:POST:http://192.168.1.24/socket3On:on] >[OFF:POST:http://192.168.1.24/socket3Off:off]" }
3 | Switch BookShelfLamp "Study Book Shelf Lamp" [ "Lighting" ] { http=">[ON:POST:http://192.168.1.24/socket2On:on] >[OFF:POST:http://192.168.1.24/socket2Off:off]" }
4 | Switch LivingRoomNightLamp "Night Lamp" [ "Lighting" ] { http=">[ON:POST:http://192.168.1.24/socket4On:on] >[OFF:POST:http://192.168.1.24/socket4Off:off]" }
5 | Switch GuestBedroomLamp "Guest Bedroom Lamp" [ "Lighting" ] { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnGuestBedroomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffGuestBedroomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
6 | Switch JadynsRoomLamp "Jadyns Room Lamp" [ "Lighting" ] { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnJadynsRoomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffJadynsRoomLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
7 | Switch StudyDeskGlobe "Study Desk Globe" [ "Lighting" ] { http=">[ON:POST:https://maker.ifttt.com/trigger/StudyDeskGlobeOn/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/StudyDeskGlobeOff/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
8 | Switch MasterBedroomRHLamp "Master Bedroom Right Hand Lamp" [ "Lighting" ] { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnMasterBedroomRHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffMasterBedroomRHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
9 | Switch MasterBedroomLHLamp "Master Bedroom Left Hand Lamp" [ "Lighting" ] { http=">[ON:POST:https://maker.ifttt.com/trigger/TurnOnMasterBedroomLHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/TurnOffMasterBedroomLHLamp/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
10 | Switch StudyFan "Study Fan" [ "Lighting" ] { http=">[ON:POST:https://maker.ifttt.com/trigger/StudyFanOn/with/key/Dzj4Tcd9vO05FUsKZfYtN:on] >[OFF:POST:https://maker.ifttt.com/trigger/StudyFanOff/with/key/Dzj4Tcd9vO05FUsKZfYtN:off]" }
11 |
--------------------------------------------------------------------------------
/HomeConfig/stations.csv:
--------------------------------------------------------------------------------
1 | # Find lots more stations at http://www.iheart.com,
2 | Alternative (BAGeL Radio - SomaFM),http://somafm.com/bagel.pls
3 | Alternative (The Alternative Project),http://c9.prod.playlists.ihrhls.com/4447/playlist.m3u8
4 | American Roots (Boot Liquor - SomaFM),http://somafm.com/bootliquor.pls
5 | Celtic (ThistleRadio - SomaFM),http://somafm.com/thistle.pls
6 | Chillout (Groove Salad - SomaFM),http://somafm.com/startstream=groovesalad.pls
7 | Commodore 64 Remixes (Slay Radio),http://www.slayradio.org/tune_in.php/128kbps/listen.m3u
8 | Covers (SomaFM),http://somafm.com/covers.pls
9 | Downtempo (Secret Agent - SomaFM),http://somafm.com/secretagent.pls
10 | Dub Step (Dub Step Beyond - SomaFM),http://somafm.com/dubstep.pls
11 | Electronic/Dance (Electronic Culture),http://www.shouted.fm/tunein/electro-dsl.m3u
12 | Folk (Folk Forward - SomaFM),http://somafm.com/folkfwd.pls
13 | Hip Hop (Hot 97 NYC),http://playerservices.streamtheworld.com/pls/WQHTAAC.pls
14 | Hip Hop (Power 1051 NYC),http://c11.prod.playlists.ihrhls.com/1481/playlist.m3u8
15 | House (Beat Blender - SomaFM),http://somafm.com/startstream=beatblender.pls
16 | Indie Pop (Indie Pop Rocks! - SomaFM),http://somafm.com/indiepop130.pls
17 | Intelligent dance music (Cliq Hop - SomaFM),http://somafm.com/startstream=cliqhop.pls
18 | Jazz (Sonic Universe - SomaFM),http://somafm.com/startstream=sonicuniverse.pls
19 | Lounge (Illinois Street Lounge - SomaFM),http://somafm.com/illstreet.pls
20 | Pop (PopTron! - SomaFM),http://somafm.com/poptron.pls
21 | Pop/Rock/Urban (Frequence 3 - Paris) ,http://streams.frequence3.net/hd-mp3.m3u
22 | Progressive (Tags Trance Trip - SomaFM),http://somafm.com/tagstrance.pls
23 | Public Radio (WNYC - Public Radio from New York to the World),http://wnyc-iheart.streamguys.com/wnycfm-iheart.aac
24 | Reggae Dancehall (Ragga Kings),http://www.raggakings.net/listen.m3u
25 | Rock (Digitalis - SomaFM),http://somafm.com/digitalis.pls
26 | Venice Classic Radio Italia, http://174.36.206.197:8000/listen.pls
27 | Classical WETA 90.9 FM (Washington DC), http://38.100.128.106:8000/listen.pls
28 | WKSU 3 Classical, http://66.225.205.8:8030/listen.pls
29 | KLUX 89.5 HD, http://206.217.202.1:7610/listen.pls
30 | JazzGroove.org Laidback Jazz, http://199.180.72.2:8015/listen.pls
31 | SmoothJazz.com Global Jazz, http://sj64.hnux.com/listen.pls
32 | Best Smooth Jazz UK, http://64.95.243.43:8002/listen.pls
33 | Smooth Jazz Tampa Florida, http://us3.internet-radio.com:8007/listen.pls
34 | Traditional Classic Country HPR1, http://50.7.70.58:8708/listen.pls
35 | Nashville FM, http://46.231.87.20:8300/listen.pls
36 | Classic Country 1630, http://198.105.216.204:8194/listen.pls
37 | New Country, http://173.192.70.138:9170/listen.pls
38 | Nicomorr Radio, http://uk6.internet-radio.com:8294/listen.pls
39 | Desi Bollywood Evergreen Hits, http://192.240.102.133:11454/stream
40 | Old Hits Of Bollywood, http://50.7.77.115:8174/listen.pls
41 | Old Non Stop Bollywood hits, http://198.178.123.14:8216/listen.pls
42 | Desi Music Mix, http://66.23.234.242:8012/listen.pls
43 | Essence Piano Radio, http://62.210.209.179:8011/listen.pls
44 | Piano Letter, http://222.122.178.186:1882/listen.pls
45 | ABC Classical Radio, http://www.abc.net.au/res/streaming/audio/mp3/classic_fm.pls
46 | ABC Jazz Radio, http://www.abc.net.au/res/streaming/audio/mp3/abc_jazz.pls
47 | ABC Country Radio, http://www.abc.net.au/res/streaming/audio/mp3/abc_country.pls
48 |
--------------------------------------------------------------------------------
/LightSensor/Light_Sensor_v0.11.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | import gpiozero
4 | import signal
5 | import subprocess
6 | import sys
7 | import os
8 | import time
9 | from time import sleep
10 | import csv
11 | import httplib, urllib
12 | import RPi.GPIO as GPIO
13 |
14 | GPIO.setmode(GPIO.BCM)
15 | GPIO.setup(20,GPIO.IN)
16 |
17 | while True :
18 | sensorvalue = GPIO.input(20)
19 | if sensorvalue == 0:
20 | print("Hey, I can sense that the room is illuminated.")
21 | elif sensorvalue == 1:
22 | print("Hey, I sense darkness.")
23 | else:
24 | print("Something's wrong. Let's wait for the next reading.")
25 | time.sleep(10)
26 |
--------------------------------------------------------------------------------
/LightSensor/README.md:
--------------------------------------------------------------------------------
1 | Simple light sensor (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/LightSensor)
2 | - The Simple Light Sensor is a very simple program that detects the presence of light or darkness
3 | - Components required include -
4 | - 1 x Raspberry Pi 3 (I have used a 3, Model B. You can use whatever you have at your disposal.)
5 | - 1 x Photoresistor Light Sensor module
6 | - http://www.buildcircuit.com.au/Photoresistor-Sensor-Module-Light-Detection-for-arduino
7 | - http://www.dx.com/p/6495-photoresistor-light-sensor-module-for-smart-car-black-blue-152774
8 | - https://tkkrlab.nl/wiki/Arduino_KY-018_Photo_resistor_module
9 | - The first two links very closely resemble the one I've used
10 | - 1 x Breadboard
11 | - 1 x Extension cable (Suggested) to extend the GPIO ports of the Raspberry Pi and bring them closed to the breadboard
12 | - 10 x Jumper cables - Female (connect to Rasberry Pi GPIO) to Male (Connect to the Breadboard)
13 | - 1 x Tactile Push button
14 | - 1 x Raspberry Pi Cobbler board (Recommended, to breakout the GPIO's) and make working easier
15 |
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/README.md:
--------------------------------------------------------------------------------
1 | Run the code like this:
2 |
3 | *python face_detect.py abba.png haarcascade_frontalface_default.xml*
4 |
5 | If you want to understand how the code works, the details are here:
6 |
7 | https://realpython.com/blog/python/face-recognition-with-python/
8 |
9 | EDIT: now works with OpenCV3.
10 |
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/abba.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect1/abba.png
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/abba_face_detected.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect1/abba_face_detected.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/face_detect.py:
--------------------------------------------------------------------------------
1 | import cv2
2 | import sys
3 |
4 | # Get user supplied values
5 | imagePath = sys.argv[1]
6 | cascPath = sys.argv[2]
7 |
8 | # Create the haar cascade
9 | faceCascade = cv2.CascadeClassifier(cascPath)
10 |
11 | # Read the image
12 | image = cv2.imread(imagePath)
13 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
14 |
15 | # Detect faces in the image
16 | faces = faceCascade.detectMultiScale(
17 | gray,
18 | scaleFactor=1.1,
19 | minNeighbors=5,
20 | minSize=(30, 30)
21 | #flags = cv2.CV_HAAR_SCALE_IMAGE
22 | )
23 |
24 | print "Found {0} faces!".format(len(faces))
25 |
26 | # Draw a rectangle around the faces
27 | for (x, y, w, h) in faces:
28 | cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
29 |
30 | cv2.imshow("Faces found", image)
31 | cv2.waitKey(0)
32 |
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/little_mix_right.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect1/little_mix_right.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/little_mix_wrong.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect1/little_mix_wrong.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/the_saturdays_right.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect1/the_saturdays_right.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect1/the_saturdays_wrong.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect1/the_saturdays_wrong.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect2/CaptureFace_FromSingleFile_Or_SingleCameraPic.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 |
4 | import sys
5 | from time import sleep
6 | import cv2
7 | import picamera
8 |
9 | def extract_features(image):
10 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
11 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
12 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
13 |
14 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
15 | faces = face_cascade.detectMultiScale(
16 | gray,
17 | scaleFactor=1.1,
18 | minNeighbors=5,
19 | minSize=(30, 30),
20 | flags = cv2.CASCADE_SCALE_IMAGE
21 | )
22 |
23 | # iterate over all identified faces and try to find eyes
24 | for (x, y, w, h) in faces:
25 | cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
26 |
27 | roi_gray = gray[y:y+h, x:x+w]
28 | roi_color = image[y:y+h, x:x+w]
29 |
30 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
31 | for (ex,ey,ew,eh) in eyes:
32 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
33 |
34 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
35 | #for (ex,ey,ew,eh) in noses:
36 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
37 |
38 | print "Found {0} faces in the picture!!!".format(len(faces))
39 | cv2.imshow('Mapping Faces within the Image', image)
40 | cv2.waitKey(0)
41 | cv2.destroyAllWindows()
42 |
43 |
44 | if __name__ == "__main__":
45 |
46 | if len(sys.argv) >= 2:
47 | image_file = sys.argv[1]
48 | else:
49 | sleep(2)
50 | image_file = 'snapshot.jpg'
51 | picamera.PiCamera().capture(image_file)
52 |
53 | image = cv2.imread(image_file)
54 | extract_features(image)
55 |
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect2/CaptureFace_FromSingleFile_Or_SingleCameraPic_v0.11.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 |
4 | import sys
5 | from time import sleep
6 | import cv2
7 | import picamera
8 |
9 | def extract_features(image):
10 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
11 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
12 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
13 |
14 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
15 | faces = face_cascade.detectMultiScale(
16 | gray,
17 | scaleFactor=1.1,
18 | minNeighbors=5,
19 | minSize=(30, 30),
20 | flags = cv2.CASCADE_SCALE_IMAGE
21 | )
22 |
23 | # iterate over all identified faces and try to find eyes
24 | for (x, y, w, h) in faces:
25 | cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
26 |
27 | roi_gray = gray[y:y+h, x:x+w]
28 | roi_color = image[y:y+h, x:x+w]
29 |
30 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
31 | for (ex,ey,ew,eh) in eyes:
32 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
33 |
34 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
35 | #for (ex,ey,ew,eh) in noses:
36 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
37 |
38 | print "Found {0} faces in the picture!!!".format(len(faces))
39 | cv2.imshow('Mapping Faces within the Image', image)
40 | cv2.waitKey(0)
41 | cv2.destroyAllWindows()
42 |
43 |
44 | if __name__ == "__main__":
45 |
46 | if len(sys.argv) >= 2:
47 | image_file = sys.argv[1]
48 | else:
49 | sleep(2)
50 | image_file = 'snapshot.jpg'
51 | picamera.PiCamera().capture(image_file)
52 |
53 | image = cv2.imread(image_file)
54 | extract_features(image)
55 |
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect2/CaptureFace_FromSingleFile_Or_SingleCameraPic_v0.12.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 |
4 | import sys
5 | from time import sleep
6 | import cv2
7 | import picamera
8 |
9 | def extract_features(image):
10 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
11 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
12 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
13 |
14 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
15 | faces = face_cascade.detectMultiScale(
16 | gray,
17 | scaleFactor=1.1,
18 | minNeighbors=5,
19 | minSize=(30, 30),
20 | flags = cv2.CASCADE_SCALE_IMAGE
21 | )
22 |
23 | # iterate over all identified faces and try to find eyes
24 | for (x, y, w, h) in faces:
25 | cv2.rectangle(image, (x, y), (x+w, y+h), (0, 255, 0), 2)
26 |
27 | roi_gray = gray[y:y+h, x:x+w]
28 | roi_color = image[y:y+h, x:x+w]
29 |
30 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
31 | for (ex,ey,ew,eh) in eyes:
32 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
33 |
34 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
35 | #for (ex,ey,ew,eh) in noses:
36 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
37 |
38 | print "Found {0} faces in the picture!!!".format(len(faces))
39 | cv2.imshow('Mapping Faces within the Image', image)
40 | cv2.waitKey(0)
41 | cv2.destroyAllWindows()
42 |
43 |
44 | if __name__ == "__main__":
45 |
46 | #The code in this section allows passing of images via the CLI
47 | #else the code directly reads off the camera
48 |
49 | if len(sys.argv) >= 2:
50 | image_file = sys.argv[1]
51 | else:
52 | sleep(2)
53 | image_file = 'snapshot.jpg'
54 | picamera.PiCamera().capture(image_file)
55 |
56 | image = cv2.imread(image_file)
57 | extract_features(image)
58 |
59 |
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect2/poi_1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect2/poi_1.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect2/poi_2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect2/poi_2.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureFaces/FaceDetect2/snapshot.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/OpenCV/CaptureFaces/FaceDetect2/snapshot.jpg
--------------------------------------------------------------------------------
/OpenCV/CaptureSingleImage/CaptureSingleImage_v0.11:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import time
6 | import cv2
7 |
8 | camera = PiCamera()
9 | rawCapture = PiRGBArray(camera)
10 | time.sleep(0.1)
11 |
12 | camera.capture(rawCapture, format="bgr")
13 | image = rawCapture.array
14 |
15 | cv2.imshow("Single Capture Image", image)
16 | cv2.waitKey(0)
17 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.11:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | import time
7 | import cv2
8 | import numpy as np
9 | import sys
10 | from time import sleep
11 |
12 |
13 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
14 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
15 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
16 |
17 | capture = cv2.VideoCapture(0)
18 |
19 | while True:
20 | ret, img = capture.read()
21 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
22 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
23 |
24 | # iterate over all identified faces and try to find eyes
25 | for (x, y, w, h) in faces:
26 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
27 | roi_gray = gray[y:y+h, x:x+w]
28 | roi_color = img[y:y+h, x:x+w]
29 |
30 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
31 | for (ex,ey,ew,eh) in eyes:
32 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
33 |
34 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
35 | #for (ex,ey,ew,eh) in noses:
36 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
37 |
38 | print "Found {0} faces in the picture!!!".format(len(faces))
39 | cv2.imshow('Mapping Faces within the Image', img)
40 | #cv2.waitKey(0)
41 | #cv2.destroyAllWindows()
42 | key = cv2.waitKey(1) & 0xFF
43 | if key == ord("q"):
44 | break
45 | capture.release()
46 | cv2.destroyAllWindows()
47 |
48 |
49 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.11.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | import time
7 | import cv2
8 | import numpy as np
9 | import sys
10 | from time import sleep
11 |
12 |
13 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
14 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
15 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
16 |
17 | capture = cv2.VideoCapture(0)
18 |
19 | while True:
20 | ret, img = capture.read()
21 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
22 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
23 |
24 | # iterate over all identified faces and try to find eyes
25 | for (x, y, w, h) in faces:
26 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
27 | roi_gray = gray[y:y+h, x:x+w]
28 | roi_color = img[y:y+h, x:x+w]
29 |
30 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
31 | for (ex,ey,ew,eh) in eyes:
32 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
33 |
34 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
35 | #for (ex,ey,ew,eh) in noses:
36 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
37 |
38 | print "Found {0} faces in the picture!!!".format(len(faces))
39 | cv2.imshow('Mapping Faces within the Image', img)
40 | #cv2.waitKey(0)
41 | #cv2.destroyAllWindows()
42 | key = cv2.waitKey(1) & 0xFF
43 | if key == ord("q"):
44 | break
45 | capture.release()
46 | cv2.destroyAllWindows()
47 |
48 |
49 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.12:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | from time import sleep
7 | import time
8 | import cv2
9 | import numpy as np
10 | import sys
11 |
12 |
13 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
14 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
15 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
16 |
17 | camera = PiCamera()
18 | camera.resolution = (640,480)
19 | camera.framerate = 32
20 | rawCapture = PiRGBArray(camera, size=(640,480))
21 | time.sleep(2)
22 |
23 | while True:
24 | camera.capture(rawCapture, format="bgr")
25 | img = rawCapture.array
26 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
27 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
28 |
29 | # iterate over all identified faces and try to find eyes
30 | for (x, y, w, h) in faces:
31 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
32 | roi_gray = gray[y:y+h, x:x+w]
33 | roi_color = img[y:y+h, x:x+w]
34 |
35 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
36 | for (ex,ey,ew,eh) in eyes:
37 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
38 |
39 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
40 | #for (ex,ey,ew,eh) in noses:
41 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
42 |
43 | print "Found {0} faces in the picture!!!".format(len(faces))
44 | cv2.imshow('Mapping Faces within the Image', img)
45 | #cv2.waitKey(0)
46 | #cv2.destroyAllWindows()
47 | key = cv2.waitKey(1) & 0xFF
48 | if key == ord("q"):
49 | break
50 | #Clearing the buffer before loading the next image
51 | rawCapture.truncate(0)
52 | capture.release()
53 | cv2.destroyAllWindows()
54 |
55 |
56 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.12.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | from time import sleep
7 | import time
8 | import cv2
9 | import numpy as np
10 | import sys
11 |
12 |
13 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
14 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
15 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
16 |
17 | camera = PiCamera()
18 | camera.resolution = (640,480)
19 | camera.framerate = 32
20 | rawCapture = PiRGBArray(camera, size=(640,480))
21 | time.sleep(2)
22 |
23 | while True:
24 | camera.capture(rawCapture, format="bgr")
25 | img = rawCapture.array
26 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
27 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
28 |
29 | # iterate over all identified faces and try to find eyes
30 | for (x, y, w, h) in faces:
31 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
32 | roi_gray = gray[y:y+h, x:x+w]
33 | roi_color = img[y:y+h, x:x+w]
34 |
35 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
36 | for (ex,ey,ew,eh) in eyes:
37 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
38 |
39 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
40 | #for (ex,ey,ew,eh) in noses:
41 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
42 |
43 | print "Found {0} faces in the picture!!!".format(len(faces))
44 | cv2.imshow('Mapping Faces within the Image', img)
45 | #cv2.waitKey(0)
46 | #cv2.destroyAllWindows()
47 | key = cv2.waitKey(1) & 0xFF
48 | if key == ord("q"):
49 | break
50 | #Clearing the buffer before loading the next image
51 | rawCapture.truncate(0)
52 | rawCapture.release()
53 | cv2.destroyAllWindows()
54 |
55 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.13.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | from time import sleep
7 | import time
8 | import cv2
9 | import numpy as np
10 | import sys
11 | import datetime
12 |
13 |
14 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
15 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
16 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
17 |
18 | camera = PiCamera()
19 | camera.resolution = (640,480)
20 | camera.framerate = 32
21 | rawCapture = PiRGBArray(camera, size=(640,480))
22 | time.sleep(2)
23 |
24 | while True:
25 | #time.sleep(1)
26 | camera.capture(rawCapture, format="bgr")
27 | img = rawCapture.array
28 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
29 | gray = cv2.GaussianBlur(gray, (21, 21), 0)
30 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
31 |
32 | # iterate over all identified faces and try to find eyes
33 | for (x, y, w, h) in faces:
34 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
35 | roi_gray = gray[y:y+h, x:x+w]
36 | roi_color = img[y:y+h, x:x+w]
37 |
38 | eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
39 | for (ex,ey,ew,eh) in eyes:
40 | cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
41 |
42 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
43 | #for (ex,ey,ew,eh) in noses:
44 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
45 |
46 | #printing messages to the screen
47 | print "At time "+time.strftime("%d/%m/%y-%H:%M:%S")+", found {0} faces in the picture!!!".format(len(faces))
48 |
49 | #writing the image to the screen
50 | cv2.imshow('Mapping Faces within the Image', img)
51 |
52 | #writing the image to a file
53 | #cv2.imwrite("temp"+str(time.strftime("%d/%m/%y-%H%M%S"))+".jpg",img)
54 | #cv2.imwrite("temp"+str(datetime.datetime.now())+".jpg",img)
55 | #cv2.imwrite("temp"+str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S"))+".jpg",img)
56 | cv2.imwrite("temp"+str(datetime.datetime.now())+".jpg",img)
57 |
58 | #looking for escape sequence
59 | key = cv2.waitKey(1) & 0xFF
60 | if key == ord("q"):
61 | print "Quitting....hold on"
62 | break
63 |
64 | #Clearing the buffer before loading the next image
65 | rawCapture.truncate(0)
66 |
67 | #Closing the capture, releasing all resources
68 | #rawCapture.release()
69 | cv2.destroyAllWindows()
70 |
71 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.14.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | from time import sleep
7 | import time
8 | import cv2
9 | import numpy as np
10 | import sys
11 | import datetime
12 |
13 |
14 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
15 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
16 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
17 |
18 | camera = PiCamera()
19 | camera.resolution = (640,480)
20 | camera.framerate = 32
21 | rawCapture = PiRGBArray(camera, size=(640,480))
22 | time.sleep(2)
23 |
24 | while True:
25 | #time.sleep(1)
26 | camera.capture(rawCapture, format="bgr")
27 | img = rawCapture.array
28 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
29 | gray = cv2.GaussianBlur(gray, (21, 21), 0)
30 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
31 |
32 | # iterate over all identified faces and try to find eyes
33 | for (x, y, w, h) in faces:
34 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
35 | roi_gray = gray[y:y+h, x:x+w]
36 | roi_color = img[y:y+h, x:x+w]
37 |
38 | #The code on the next three lines works and has been tested out
39 | #Disabling it because it's not required for purposes of identification of faces
40 | #eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
41 | #for (ex,ey,ew,eh) in eyes:
42 | #cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
43 |
44 | #Detection of code for noses has not been validated or tested
45 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
46 | #for (ex,ey,ew,eh) in noses:
47 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
48 |
49 | #printing messages to the screen
50 | print "At time "+time.strftime("%d/%m/%y-%H:%M:%S")+", found {0} faces in the picture!!!".format(len(faces))
51 |
52 | #writing the image to the screen
53 | font = cv2.FONT_HERSHEY_SIMPLEX
54 | #cv2.putText(img, str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S")), (100,500), font, 4,(255,255,255),2)
55 | cv2.putText(img, "DateTime - "+str(datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")), (5,25), font, 0.5,(255,255,255))
56 | cv2.imshow('Mapping Faces within the Image', img)
57 |
58 | #writing the image to a file
59 | if len(faces) > 0:
60 | #cv2.imwrite("temp"+str(time.strftime("%d/%m/%y-%H%M%S"))+".jpg",img)
61 | #cv2.imwrite("temp"+str(datetime.datetime.now())+".jpg",img)
62 | #cv2.imwrite("temp"+str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S"))+".jpg",img)
63 | cv2.imwrite("FaceCaptureWarrenPi-"+str(datetime.datetime.now())+".jpg",img)
64 | print "Captured image to file !!!"
65 |
66 | #looking for escape sequence
67 | key = cv2.waitKey(1) & 0xFF
68 | if key == ord("q"):
69 | print "Quitting....hold on"
70 | break
71 |
72 | #Clearing the buffer before loading the next image
73 | rawCapture.truncate(0)
74 |
75 | #Closing the capture, releasing all resources
76 | #rawCapture.release()
77 | cv2.destroyAllWindows()
78 |
79 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.15.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | from time import sleep
7 | import time
8 | import cv2
9 | import numpy as np
10 | import sys
11 | import datetime
12 | import boto3
13 |
14 |
15 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
16 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
17 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
18 |
19 | camera = PiCamera()
20 | camera.resolution = (640,480)
21 | camera.framerate = 32
22 | rawCapture = PiRGBArray(camera, size=(640,480))
23 | s3 = boto3.client('s3')
24 | time.sleep(2)
25 |
26 | while True:
27 | #time.sleep(1)
28 | camera.capture(rawCapture, format="bgr")
29 | img = rawCapture.array
30 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
31 | gray = cv2.GaussianBlur(gray, (21, 21), 0)
32 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
33 |
34 | # iterate over all identified faces and try to find eyes
35 | for (x, y, w, h) in faces:
36 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
37 | roi_gray = gray[y:y+h, x:x+w]
38 | roi_color = img[y:y+h, x:x+w]
39 |
40 | #The code on the next three lines works and has been tested out
41 | #Disabling it because it's not required for purposes of identification of faces
42 | #eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
43 | #for (ex,ey,ew,eh) in eyes:
44 | #cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
45 |
46 | #Detection of code for noses has not been validated or tested
47 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
48 | #for (ex,ey,ew,eh) in noses:
49 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
50 |
51 | #printing messages to the screen
52 | print "At time "+time.strftime("%d/%m/%y-%H:%M:%S")+", found {0} faces in the picture!!!".format(len(faces))
53 |
54 | #writing the image to the screen
55 | font = cv2.FONT_HERSHEY_SIMPLEX
56 | #cv2.putText(img, str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S")), (100,500), font, 4,(255,255,255),2)
57 | cv2.putText(img, "DateTime - "+str(datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")), (5,25), font, 0.5,(255,255,255))
58 | cv2.imshow('Mapping Faces within the Image', img)
59 |
60 | #writing the image to a file
61 | if len(faces) > 0:
62 | #Older versions of cv2.imwrite
63 | #cv2.imwrite("temp"+str(time.strftime("%d/%m/%y-%H%M%S"))+".jpg",img)
64 | #cv2.imwrite("temp"+str(datetime.datetime.now())+".jpg",img)
65 | #cv2.imwrite("temp"+str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S"))+".jpg",img)
66 | #cv2.imwrite("FaceCaptureWarrenPi-"+str(datetime.datetime.now())+".jpg",img)
67 |
68 | #current version of cv2.imwrite
69 | imagename = "FaceCaptureWarrenPi-" + format(str(datetime.datetime.now())) + ".jpg"
70 | writepath = "/home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/OpenCV/CaptureVideoStream/imagecapture/" + imagename
71 | cv2.imwrite(writepath, img)
72 | print "Captured image to file !!!"
73 |
74 | #Uploading files to AWS
75 | with open(writepath, 'rb') as data:
76 | s3.upload_fileobj(data, "tw37-opencv", imagename)
77 |
78 | #looking for escape sequence
79 | key = cv2.waitKey(1) & 0xFF
80 | if key == ord("q"):
81 | print "Quitting....hold on"
82 | break
83 |
84 | #Clearing the buffer before loading the next image
85 | rawCapture.truncate(0)
86 |
87 | #Closing the capture, releasing all resources
88 | #rawCapture.release()
89 | cv2.destroyAllWindows()
90 |
91 |
--------------------------------------------------------------------------------
/OpenCV/CaptureVideoStream/CaptureVideoStream_v0.16.py:
--------------------------------------------------------------------------------
1 | #!/home/pi/.virtualenvs/cv2/bin/python
2 |
3 | from picamera.array import PiRGBArray
4 | from picamera import PiCamera
5 | import picamera
6 | from time import sleep
7 | import time
8 | import cv2
9 | import numpy as np
10 | import sys
11 | import datetime
12 | import boto3
13 | import subprocess
14 |
15 | bucket_source_var = "tw37-opencv"
16 | #key_source_var = "new_image_name.jpg"
17 | key_target_var = "orignal_trevor_1706.jpg"
18 | bucket_target_var = "tw37-original"
19 |
20 | def compare_faces(bucket, key, bucket_target, key_target, threshold=80, region="us-west-2"):
21 | rekognition = boto3.client("rekognition", region)
22 | response = rekognition.compare_faces(
23 | SourceImage={
24 | "S3Object": {
25 | "Bucket": bucket,
26 | "Name": key,
27 | }
28 | },
29 | TargetImage={
30 | "S3Object": {
31 | "Bucket": bucket_target,
32 | "Name": key_target,
33 | }
34 | },
35 | SimilarityThreshold=threshold,
36 | )
37 | return response['SourceImageFace'], response['FaceMatches']
38 |
39 | face_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml')
40 | eye_cascade = cv2.CascadeClassifier('/usr/local/share/OpenCV/haarcascades/haarcascade_eye.xml')
41 | #nose_cascade = cv2.CascadeClassifier('/home/pi/opencv-3.0.0/data/haarcascades/Nariz.xml')
42 |
43 | camera = PiCamera()
44 | camera.resolution = (640,480)
45 | camera.framerate = 32
46 | rawCapture = PiRGBArray(camera, size=(640,480))
47 | s3 = boto3.client('s3')
48 | time.sleep(2)
49 |
50 | while True:
51 | #time.sleep(1)
52 | camera.capture(rawCapture, format="bgr")
53 | img = rawCapture.array
54 | gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
55 | gray = cv2.GaussianBlur(gray, (21, 21), 0)
56 | faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)
57 |
58 | # iterate over all identified faces and try to find eyes
59 | for (x, y, w, h) in faces:
60 | cv2.rectangle(img, (x, y), (x+w, y+h), (0, 255, 0), 2)
61 | roi_gray = gray[y:y+h, x:x+w]
62 | roi_color = img[y:y+h, x:x+w]
63 |
64 | #The code on the next three lines works and has been tested out
65 | #Disabling it because it's not required for purposes of identification of faces
66 | #eyes = eye_cascade.detectMultiScale(roi_gray, minSize=(30, 30))
67 | #for (ex,ey,ew,eh) in eyes:
68 | #cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(255,0,0),2)
69 |
70 | #Detection of code for noses has not been validated or tested
71 | #noses = nose_cascade.detectMultiScale(roi_gray, minSize=(100, 30))
72 | #for (ex,ey,ew,eh) in noses:
73 | # cv2.rectangle(roi_color,(ex,ey),(ex+ew,ey+eh),(0,0,255),2)
74 |
75 | #printing messages to the screen
76 | print "At time "+time.strftime("%d/%m/%y-%H:%M:%S")+", found {0} faces in the picture!!!".format(len(faces))
77 |
78 | #writing the image to the screen
79 | font = cv2.FONT_HERSHEY_SIMPLEX
80 | #cv2.putText(img, str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S")), (100,500), font, 4,(255,255,255),2)
81 | cv2.putText(img, "DateTime - "+str(datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")), (5,25), font, 0.5,(255,255,255))
82 | cv2.imshow('Mapping Faces within the Image', img)
83 |
84 | #writing the image to a file
85 | if len(faces) > 0:
86 | #Older versions of cv2.imwrite
87 | #cv2.imwrite("temp"+str(time.strftime("%d/%m/%y-%H%M%S"))+".jpg",img)
88 | #cv2.imwrite("temp"+str(datetime.datetime.now())+".jpg",img)
89 | #cv2.imwrite("temp"+str(datetime.datetime.now().strftime("%d/%m/%y-%H/%M/%S"))+".jpg",img)
90 | #cv2.imwrite("FaceCaptureWarrenPi-"+str(datetime.datetime.now())+".jpg",img)
91 |
92 | #current version of cv2.imwrite
93 | #imagename = "FaceCaptureWarrenPi-" + format(str(datetime.datetime.now())) + ".jpg" #This also works
94 | imagename = "FaceCaptureWarrenPi-" + format(str(time.strftime("%d%m%y-%H%M%S"))) + ".jpg"
95 | writepath = "/home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/OpenCV/CaptureVideoStream/imagecapture/" + imagename
96 | cv2.imwrite(writepath, img)
97 | print "Captured image to file !!!"
98 |
99 | #Uploading files to AWS S3
100 | with open(writepath, 'rb') as data:
101 | s3.upload_fileobj(data, "tw37-opencv", imagename)
102 |
103 | #Comparing images using AWS Rekognition
104 | bucket_source_var = "tw37-opencv"
105 | #key_source_var = "new_image_name.jpg"
106 | key_target_var = "orignal_trevor_1706.jpg"
107 | bucket_target_var = "tw37-original"
108 |
109 | source_face, matches = compare_faces(bucket_source_var, imagename, bucket_target_var, key_target_var)
110 | print "Source Face ({Confidence}%)".format(**source_face)
111 | #one match for each target face
112 | for match in matches:
113 | print "Target Face ({Confidence}%)".format(**match['Face'])
114 | print " Similarity : {}%".format(match['Similarity'])
115 | if (match['Similarity'] > 80):
116 | print "Hi Trevor, Welcome back."
117 | subprocess.call("espeak \" Hi Trevor Welcome back \" ", shell=True)
118 |
119 | #looking for escape sequence
120 | key = cv2.waitKey(1) & 0xFF
121 | if key == ord("q"):
122 | print "Quitting....hold on"
123 | break
124 |
125 | #Clearing the buffer before loading the next image
126 | rawCapture.truncate(0)
127 |
128 | #Closing the capture, releasing all resources
129 | #rawCapture.release()
130 | cv2.destroyAllWindows()
131 |
132 |
--------------------------------------------------------------------------------
/OpenCV/README.md:
--------------------------------------------------------------------------------
1 | OpenCV - This repository hosts code for various python OpenCV projects that have been developed for the Raspberry Pi.
2 |
3 | - Roadmap includes -
4 | - Comparing two images (One face in source image) and looking for a similar face in the target (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/OpenCV/CaptureVideoStream) -
5 | - Step 1 - Simple capture of images. (Done)
6 | - Step 2 - Capture video and look for faces using OpenCV. (Done)
7 | - Step 3 - Capture video, look for faces using OpenCV and once faces are found log image to disk. (Done)
8 | - Step 4 - Capture video, look for faces using OpenCV and once faces are found log image to AWS S3. (Done)
9 | - Step 5 - Capture video, look for faces using OpenCV, once faces are found log image to AWS S3, compare uploaded image to orignal image at S3 using AWS rekognition, display results of face comparison. (Done)
10 | - Step 6 - Capture video, look for faces using OpenCV, once faces are found log image to AWS S3, compare uploaded image to orignal image at S3 using AWS rekognition, display results of face comparison, verbalise results using STT (Speech To Text). (Done)
11 | - Step 7 - Capture video, look for faces using OpenCV, once faces are found log image to AWS S3, compare uploaded image to orignal image at S3 using AWS rekognition, display results of face comparison, verbalise results using STT (Speech To Text), call out local time, temperature and forecast for the day. (Done)
12 | - Step 8 - Capture video, look for faces using OpenCV, once faces are found log image to AWS S3, compare uploaded image to orignal image at S3 using AWS rekognition, display results of face comparison, verbalise results using STT (Speech To Text), call out local time, temperature and forecast for the day, check time of last correct match and if < 60s ago do not perform STT functions. (Done)
13 |
14 | - Comparing two images (Many faces in source image) and looking for one of the known faces in the target -
15 | - Capture video, look for faces using OpenCV, once faces are found log image to AWS S3, compare uploaded image to orignal image (which now contains multiple faces in it) at S3 using AWS rekognition, display results of face comparison identifying which of the people in the original image have been identified, verbalise results using STT (Speech To Text), call out local time, temperature and forecast for the day, check time of last correct match and if < 60s ago do not perform STT functions. (In Progress)
16 |
17 | - Comparing two images (One face in source image) and looking for a similar face in the target. Implement AWS Lambda -
18 | - Capture video, look for faces using OpenCV and once faces are found log image to AWS S3 all performed using one thread. Use AWS Lambda to launch a step function which compares uploaded image to orignal image at S3 using AWS rekognition and then uses SNS to send notification. Pick up notification on a separate thread, verbalise results using STT (Speech To Text), call out local time, temperature and forecast for the day, check time of last correct match and if < 60s ago do not perform STT functions. (Planned)
19 |
20 | - Projects include -
21 | - OpenCV Simple CaptureImage -
22 | - This was the first program i got started with. It's a very basic OpenCV program which warms up the camera and captures an image.
23 | - Review the code at - https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/OpenCV/CaptureSingleImage
24 | - OpenCV Scan & CaptureFaces -
25 | - FaceDetect1 -
26 | - This piece of code was adopted from https://realpython.com/blog/python/face-recognition-with-python/.
27 | - This code provides basic functionality to track faces. You can access the code at https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/OpenCV/CaptureFaces/FaceDetect1
28 | - FaceDetect2 -
29 | - This piece of code provides functionality to scan an image provided at the command line or as acquired from the Pi Camera with the objective of identifying faces.
30 | - This project using the Face Cascase HAAR filters and draws rectangles around the faces on the captured image.
31 | - Review the code at https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/OpenCV/CaptureFaces/FaceDetect2
32 | - OpenCV Capture Images from VideoStream, Store on AWS S3, perform comparison of faces, call out to the identified person using STT (Speech To Text)
33 | - This project is designed to capture video from the Raspberry Pi camera and continuously scan the content for faces
34 | - This project using the Face Cascase HAAR filters and draws rectangles around the faces.
35 | - Once faces have been identified in an image the image is logged to AWS S3
36 | - The code then calls the AWS Rekognition API to compare faces. If a positive match occurs it performs (Speech To Text) STT functions e.g. says hi, calls out the current weather, etc.
37 | - Review code for the project at https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/OpenCV/CaptureVideoStream
38 | - The roadmap for this is always evolving. So check out the roadmap above to work out what's being currently worked upon.
39 |
--------------------------------------------------------------------------------
/PIR/70936__guitarguy1985__police.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/PIR/70936__guitarguy1985__police.wav
--------------------------------------------------------------------------------
/PIR/PIR_sensor_v0.11.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import MotionSensor, Buzzer
4 | import signal
5 | import subprocess
6 | import sys
7 | import os
8 | import time
9 | from time import sleep
10 |
11 | pir = MotionSensor(20)
12 | buzzer = Buzzer(19)
13 |
14 | while True:
15 | if pir.motion_detected:
16 | print("You moved")
17 | buzzer.on()
18 | time.sleep(1)
19 | buzzer.off()
20 | time.sleep(5)
21 | else:
22 | print("No movement detected")
23 |
24 |
--------------------------------------------------------------------------------
/PIR/PIR_sensor_v0.12.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import MotionSensor, Buzzer
4 | import signal
5 | import subprocess
6 | import sys
7 | import os
8 | import time
9 | from time import sleep
10 |
11 | pir = MotionSensor(20)
12 | buzzer = Buzzer(19)
13 |
14 | while True:
15 | if pir.motion_detected:
16 | print(" ")
17 | print("!!!!! Intruder Alert !!!!")
18 | print(" ")
19 | buzzer.on()
20 | #cmd = "aplay /usr/share/scratch/Media/Sounds/Electronic/Whoop.wav"
21 | cmd = "aplay /home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/PIR/70936__guitarguy1985__police.wav"
22 | os.system(cmd)
23 | time.sleep(30)
24 | buzzer.off()
25 | time.sleep(5)
26 | else:
27 | print(" ")
28 | print("All good, no movement detected.")
29 | print(" ")
30 |
31 |
--------------------------------------------------------------------------------
/PIR/README.md:
--------------------------------------------------------------------------------
1 | PIR Sensor - Detect Motion & Raise an Alarm (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/ReadingAnalogSensors)
2 | - This project detects presence of intruders in the room using the PIR sensor and sets of an alarm.
3 | - This program reads analog sensor values using an MCP 3008. An MCP 3008 is required since the Raspberry Pi does not have an ADC or Analog to Digital converter and is not in a position to read analog signals by itself.
4 | - Components required include
5 | - 1 x Raspberry Pi 3 (I have used a 3, Model B. You can use whatever you have at your disposal.)
6 | - 1 x PIR Motion Sensor
7 | - https://littlebirdelectronics.com.au/products/mcp3008-8-channel-10-bit-adc-with-spi-interface
8 | - 1 x Breadboard
9 | - 1 x Extension cable (Suggested) to extend the GPIO ports of the Raspberry Pi and bring them closed to the breadboard
10 | - 10 x Jumper cables - Female (connect to Rasberry Pi GPIO) to Male (Connect to the Breadboard)
11 | - 1 x Raspberry Pi Cobbler board (Recommended, to breakout the GPIO's) and make working easier
12 | - To setup the project you will need to -
13 | - Read through the tutorial at Adafruit -
14 | - https://learn.adafruit.com/reading-a-analog-in-and-controlling-audio-volume-with-the-raspberry-pi)
15 | - Download and install the Adafruit libraries to interact with the MCP3008
16 |
--------------------------------------------------------------------------------
/PIR/Whoop.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/PIR/Whoop.wav
--------------------------------------------------------------------------------
/ReadingAnalogSensors/README.md:
--------------------------------------------------------------------------------
1 | IoT - Read Analog Sensors using MCP 3008 & Upload data (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/ReadingAnalogSensors)
2 | - This program reads analog sensor values using an MCP 3008. An MCP 3008 is required since the Raspberry Pi does not have an ADC or Analog to Digital converter and is not in a position to read analog signals by itself.
3 | - This project has evolved to Log data to CSV including upload of data to the ThingSpeak/IoT Platform
4 | - Components required include
5 | - 1 x Raspberry Pi 3 (I have used a 3, Model B. You can use whatever you have at your disposal.)
6 | - 1 x PIR Motion Sensor
7 | - https://littlebirdelectronics.com.au/products/mcp3008-8-channel-10-bit-adc-with-spi-interface
8 | - 1 x Breadboard
9 | - 1 x Extension cable (Suggested) to extend the GPIO ports of the Raspberry Pi and bring them closed to the breadboard
10 | - 10 x Jumper cables - Female (connect to Rasberry Pi GPIO) to Male (Connect to the Breadboard)
11 | - 1 x Raspberry Pi Cobbler board (Recommended, to breakout the GPIO's) and make working easier
12 | - To setup the project you will need to -
13 | - Read through the tutorial at Adafruit -
14 | - https://learn.adafruit.com/reading-a-analog-in-and-controlling-audio-volume-with-the-raspberry-pi)
15 | - Download and install the Adafruit libraries to interact with the MCP3008
16 |
--------------------------------------------------------------------------------
/ReadingAnalogSensors/read_analog_signal_v0.1.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # Import SPI library (for hardware SPI) and MCP3008 library.
4 | import Adafruit_GPIO.SPI as SPI
5 | import Adafruit_MCP3008
6 | import sys
7 | import os
8 | import time
9 |
10 | # Software SPI configuration:
11 | CLK = 18
12 | MISO = 23
13 | MOSI = 24
14 | CS = 25
15 | mcp = Adafruit_MCP3008.MCP3008(clk=CLK, cs=CS, miso=MISO, mosi=MOSI)
16 |
17 | # Hardware SPI configuration:
18 | #SPI_PORT = 0
19 | #SPI_DEVICE = 0
20 | #mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
21 |
22 | print('Reading MCP3008 values, press Ctrl-C to quit...')
23 | # Print nice channel column headers.
24 | #print('| {0:>4} | {1:>4} | {2:>4} | {3:>4} | {4:>4} | {5:>4} | {6:>4} | {7:>4} |'.format(*range(8)))
25 | #print('-' * 57)
26 |
27 | # Main program loop.
28 | while True:
29 | # Read all the ADC channel values in a list.
30 | #values = [0]*8
31 |
32 | #for i in range(8):
33 | # The read_adc function will get the value of the specified channel (0-7).
34 | #values[i] = mcp.read_adc(i)
35 | # Print the ADC values.
36 | #print('| {0:>4} | {1:>4} | {2:>4} | {3:>4} | {4:>4} | {5:>4} | {6:>4} | {7:>4} |'.format(*values))
37 |
38 | # Pause for half a second.
39 | print('The value read by the LDR sensor in my study is %s' %mcp.read_adc(0))
40 | time.sleep(1)
41 |
42 |
--------------------------------------------------------------------------------
/ReadingAnalogSensors/read_analog_signal_v0.2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # Import SPI library (for hardware SPI) and MCP3008 library.
4 | import Adafruit_GPIO.SPI as SPI
5 | import Adafruit_MCP3008
6 | from gpiozero import LED
7 | from gpiozero import Button
8 | from signal import pause
9 | import subprocess
10 | import sys
11 | import os
12 | import time
13 | import Adafruit_DHT
14 | from time import sleep
15 | import csv
16 | import httplib, urllib
17 | import pyowm
18 | import commands
19 |
20 |
21 | # Software SPI configuration:
22 | CLK = 18
23 | MISO = 23
24 | MOSI = 24
25 | CS = 25
26 | mcp = Adafruit_MCP3008.MCP3008(clk=CLK, cs=CS, miso=MISO, mosi=MOSI)
27 |
28 | # Hardware SPI configuration:
29 | #SPI_PORT = 0
30 | #SPI_DEVICE = 0
31 | #mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
32 |
33 |
34 | def WriteDataCSV(LDRSensorValue):
35 | if LDRSensorValue is not None:
36 | now = time.strftime("%d-%m-%Y %H:%M:%S")
37 | unit1 = "Value"
38 | outputFile = open('/opt/data/AnalogSensorData.csv', 'a')
39 | outputWriter = csv.writer(outputFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL)
40 | tempArray = [now,LDRSensorValue,unit1]
41 | outputWriter.writerow(tempArray)
42 | outputFile.close()
43 | time.sleep(5)
44 | else:
45 | print('Failed to get a reading. Will try again!')
46 | time.sleep(5)
47 | return
48 |
49 |
50 | def WriteDataThingSpeak(LDRSensorValue):
51 | now = time.strftime("%d/%m/%Y %H:%M:%S")
52 |
53 | #Un-comment the below lines to see what's being logged & debug any issues with the code
54 | print ("*******************************************************************")
55 | print ("Data written to ThingSpeak : LDR Sensor Value is %s" %(LDRSensorValue)
56 | print ("*******************************************************************")
57 |
58 | params = urllib.urlencode({'field8': LDRSensorValue, 'key':'XXXXXX'}) # You MUST provide a valid API key
59 | headers = {"Content-typZZe": "application/x-www-form-urlencoded","Accept": "text/plain"}
60 | conn = httplib.HTTPConnection("api.thingspeak.com:80")
61 |
62 | try:
63 | conn.request("POST", "/update", params, headers)
64 | response = conn.getresponse()
65 | print response.status, response.reason
66 | data = response.read()
67 | conn.close()
68 | except:
69 | print "Connection to ThingSpeak failed. Will try again next time."
70 | time.sleep(5)
71 |
72 | return
73 |
74 |
75 | def AnalogDataProcessing():
76 |
77 | #Obtaining Analog measurement for MCP 3008 Pin 0
78 | LDRSensorValue = mcp.read_adc(0)
79 | print (" "
80 | print ("The Protosensitive LDR Sensor reading is %s " %LDRSensorValue)
81 | print (" "
82 |
83 | WriteDataCSV(LDRSensorValue)
84 | WriteDataThingSpeak(LDRSensorValue)
85 | print (" ")
86 | time.sleep(5)
87 | return
88 |
89 | #Calling Main function once
90 | AnalogDataProcessing()
91 |
92 |
93 |
--------------------------------------------------------------------------------
/ReadingAnalogSensors/read_analog_signal_v0.3.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # Import SPI library (for hardware SPI) and MCP3008 library.
4 | import Adafruit_GPIO.SPI as SPI
5 | import Adafruit_MCP3008
6 | from gpiozero import LED
7 | from gpiozero import Button
8 | from signal import pause
9 | import subprocess
10 | import sys
11 | import os
12 | import time
13 | import Adafruit_DHT
14 | from time import sleep
15 | import csv
16 | import httplib, urllib
17 | import pyowm
18 | import commands
19 |
20 |
21 | # Software SPI configuration:
22 | CLK = 18
23 | MISO = 23
24 | MOSI = 24
25 | CS = 25
26 | mcp = Adafruit_MCP3008.MCP3008(clk=CLK, cs=CS, miso=MISO, mosi=MOSI)
27 |
28 | # Hardware SPI configuration:
29 | #SPI_PORT = 0
30 | #SPI_DEVICE = 0
31 | #mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
32 |
33 |
34 | def WriteDataCSV(LDRSensorValue):
35 | if LDRSensorValue is not None:
36 | now = time.strftime("%d-%m-%Y %H:%M:%S")
37 | unit1 = "Value"
38 | outputFile = open('/opt/data/AnalogSensorData.csv', 'a')
39 | outputWriter = csv.writer(outputFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL)
40 | tempArray = [now,LDRSensorValue,unit1]
41 | outputWriter.writerow(tempArray)
42 | outputFile.close()
43 | time.sleep(5)
44 | else:
45 | print('Failed to get a reading. Will try again!')
46 | time.sleep(5)
47 | return
48 |
49 |
50 | def WriteDataThingSpeak(LDRSensorValue):
51 | now = time.strftime("%d/%m/%Y %H:%M:%S")
52 |
53 | #Un-comment the below lines to see what's being logged & debug any issues with the code
54 | print "*******************************************************************"
55 | print ("Data written to ThingSpeak : LDR Sensor Value is %s" %(LDRSensorValue))
56 | print "*******************************************************************"
57 |
58 | params = urllib.urlencode({'field8': LDRSensorValue, 'key':'XXXX'}) # You MUST provide a valid API key
59 | headers = {"Content-typZZe": "application/x-www-form-urlencoded","Accept": "text/plain"}
60 | conn = httplib.HTTPConnection("api.thingspeak.com:80")
61 |
62 | try:
63 | conn.request("POST", "/update", params, headers)
64 | response = conn.getresponse()
65 | print response.status, response.reason
66 | data = response.read()
67 | conn.close()
68 | except:
69 | print "Connection to ThingSpeak failed. Will try again next time."
70 | time.sleep(5)
71 |
72 | return
73 |
74 |
75 | def AnalogDataProcessing():
76 |
77 | #Obtaining Analog measurement for MCP 3008 Pin 0
78 | LDRSensorValue = mcp.read_adc(0)
79 | print " "
80 | print ("The Protosensitive LDR Sensor reading is %s " %LDRSensorValue)
81 | print " "
82 |
83 | WriteDataCSV(LDRSensorValue)
84 | WriteDataThingSpeak(LDRSensorValue)
85 | print (" ")
86 | time.sleep(5)
87 | return
88 |
89 | #Calling Main function once
90 | AnalogDataProcessing()
91 |
92 |
93 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/README.md:
--------------------------------------------------------------------------------
1 | IoT - Sense_Temp_Humidity (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/Sense_Temp_Humidity)
2 | - This project has evolved to Log data to CSV on the RaspberryPi and also upload data to the ThingSpeak/IoT Platform
3 | - Components required for the programs in this folder include
4 | - 1 x Raspberry Pi 3 (I have used a 3, Model B. You can use whatever you have at your disposal.)
5 | - 3 x 50 Ohm Resistors
6 | - 1 x 10 KOhm Resistors
7 | - 3 x LED's
8 | - LED's connected to the following GPIO ports : Red LED on GPIO17, amber LED on GPIO27, Green LED on GPIO22
9 | - 1 x Breadboard
10 | - 1 x Extension cable (Suggested) to extend the GPIO ports of the Raspberry Pi and bring them closed to the breadboard
11 | - 6 x Jumper cables - Female (connect to Rasberry Pi GPIO) to Male (Connect to the Breadboard)
12 | - 4 x Jumper cables - Female (connect to Rasberry Pi GPIO) to Male (Connect to the Breadboard)
13 | - 1 x Tactile Push button
14 | - 1 x Raspberry Pi Cobbler board (Recommended, to breakout the GPIO's) and make working easier
15 | - 1 x Arduino compatible Temperature and Humidity sensor
16 | - http://www.jaycar.com.au/arduino-compatible-temperature-and-humidity-sensor-module/p/XC4520
17 | - https://littlebirdelectronics.com.au/products/arduino-compatible-temperature-and-humidity-sensor-module
18 | - https://tkkrlab.nl/wiki/Arduino_KY-001_Temperature_sensor_module
19 | - https://learn.adafruit.com/adafruits-raspberry-pi-lesson-11-ds18b20-temperature-sensing
20 | - https://www.cl.cam.ac.uk/projects/raspberrypi/tutorials/temperature/
21 | - Sensor should be connected to GPIO04 (Pin 7)
22 | - 1 x Speaker connected to the Raspberry Pi if you want it to speak
23 | - 1 x 3.5mm Audio cable to connect the speaker to the Raspberry Pi
24 | - Python Adafruit DHT Library - https://github.com/adafruit/Adafruit_Python_DHT
25 | - Data directory to log captured data in CSV form in /opt/data.
26 | - The folder /opt/data will need to be created.
27 | - Permissions on the folder to be granted to user executing the program.
28 | - To log data at the ThingSpeak IoT platform, create an account at ThingSpeak (https://thingspeak.com).
29 | - This program uses the DHT11 and logs both Temperatury, Humidity
30 | - Create a new channel at Thingspeak. Create two new fields at ThingSpeak i.e. field1, field2
31 | - Copy your API key to the program and update the WriteDataThinkSpeak() function.
32 | - This program has now been updated to pull data from a local Arduino Mega 2560 over http. The Arduino has a DHT11 connected to it.
33 | - The Arduino also includes an Ethernet module based on the enc28j60 chip allowing it to connect over the network.
34 | - For details on connecting up the Arduino and configuring the ethernet module please refer to my contribution at ArduinoProjects here on Github.
35 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/sense_temp_humidity_v0.1.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # Copyright (c) 2014 Adafruit Industries
3 | # Author: Tony DiCola
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy
6 | # of this software and associated documentation files (the "Software"), to deal
7 | # in the Software without restriction, including without limitation the rights
8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | # copies of the Software, and to permit persons to whom the Software is
10 | # furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all
13 | # copies or substantial portions of the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | # SOFTWARE.
22 | import sys
23 | import time
24 | import Adafruit_DHT
25 |
26 |
27 | # Parse command line parameters.
28 | sensor_args = { '11': Adafruit_DHT.DHT11,
29 | '22': Adafruit_DHT.DHT22,
30 | '2302': Adafruit_DHT.AM2302 }
31 | if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
32 | sensor = sensor_args[sys.argv[1]]
33 | pin = sys.argv[2]
34 | else:
35 | print('usage: sudo ./Adafruit_DHT.py [11|22|2302] GPIOpin#')
36 | print('example: sudo ./Adafruit_DHT.py 2302 4 - Read from an AM2302 connected to GPIO #4')
37 | sys.exit(1)
38 |
39 |
40 | while True:
41 | # Try to grab a sensor reading. Use the read_retry method which will retry up
42 | # to 15 times to get a sensor reading (waiting 2 seconds between each retry).
43 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
44 |
45 | # Un-comment the line below to convert the temperature to Fahrenheit.
46 | # temperature = temperature * 9/5.0 + 32
47 |
48 | # Note that sometimes you won't get a reading and
49 | # the results will be null (because Linux can't
50 | # guarantee the timing of calls to read the sensor).
51 | # If this happens try again!
52 | #while True:
53 | if humidity is not None and temperature is not None:
54 | # print('Temp={0:0.1f}* Humidity={1:0.1f}%'.format(temperature, humidity))
55 | print('The current Temperature is {0:0.1f} DegreesCelcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
56 | time.sleep(30)
57 | else:
58 | print('Failed to get reading. Try again!')
59 | time.sleep(30)
60 |
61 | sys.exit(1)
62 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/sense_temp_humidity_v0.2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import LED
4 | from gpiozero import Button
5 | from signal import pause
6 | from subprocess import *
7 | import sys
8 | import os
9 | import time
10 | import Adafruit_DHT
11 | from time import sleep
12 |
13 | redled = LED(17)
14 | amberled = LED(27)
15 | greenled = LED(22)
16 | button = Button(2)
17 |
18 | def WriteData():
19 | return
20 |
21 | def ReadValues():
22 | # Try to grab a sensor reading. Use the read_retry method which will retry up
23 | # to 15 times to get a sensor reading (waiting 2 seconds between each retry).
24 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
25 |
26 | # Un-comment the line below to convert the temperature to Fahrenheit.
27 | # temperature = temperature * 9/5.0 + 32
28 |
29 | # Note that sometimes you won't get a reading and
30 | # the results will be null (because Linux can't
31 | # guarantee the timing of calls to read the sensor).
32 | # If this happens try again!
33 | if humidity is not None and temperature is not None:
34 | print('The current Temperature is {0:0.1f} Degrees Celcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
35 | time.sleep(5)
36 | else:
37 | print('Failed to get a reading. Will try again!')
38 | time.sleep(5)
39 | return
40 |
41 | def SeeAndListen():
42 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
43 | if humidity is not None and temperature is not None:
44 | #print('The current Temperature is {0:0.1f} Degrees Celcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
45 | if temperature >= 0 and temperature <= 10:
46 | greenled.on()
47 | print("green")
48 | return
49 | elif temperature > 10 and temperature <= 20:
50 | amberled.on()
51 | sleep(1)
52 | amberled.off()
53 | print("amber")
54 | return
55 | elif temperature > 20:
56 | redled.blink
57 | print("red")
58 | return
59 | else:
60 | print('Failed to get a reading. I will not blink this time around. Will try again soon!!!')
61 | time.sleep(5)
62 | return
63 |
64 |
65 | # Parse command line parameters.
66 | sensor_args = { '11': Adafruit_DHT.DHT11,
67 | '22': Adafruit_DHT.DHT22,
68 | '2302': Adafruit_DHT.AM2302 }
69 | if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
70 | sensor = sensor_args[sys.argv[1]]
71 | pin = sys.argv[2]
72 | else:
73 | print('usage: sudo ./Adafruit_DHT.py [11|22|2302] GPIOpin#')
74 | print('example: sudo ./Adafruit_DHT.py 2302 4 - Read from an AM2302 connected to GPIO #4')
75 | sys.exit(1)
76 |
77 | while True:
78 | ReadValues()
79 | SeeAndListen()
80 | WriteData()
81 |
82 | pause()
83 |
84 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/sense_temp_humidity_v0.3.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import LED
4 | from gpiozero import Button
5 | from signal import pause
6 | from subprocess import *
7 | import sys
8 | import os
9 | import time
10 | import Adafruit_DHT
11 | from time import sleep
12 |
13 | redled = LED(17)
14 | amberled = LED(27)
15 | greenled = LED(22)
16 | button = Button(2)
17 |
18 | def WriteData():
19 | return
20 |
21 | def ReadValues():
22 | # Try to grab a sensor reading. Use the read_retry method which will retry up
23 | # to 15 times to get a sensor reading (waiting 2 seconds between each retry).
24 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
25 |
26 | # Un-comment the line below to convert the temperature to Fahrenheit.
27 | # temperature = temperature * 9/5.0 + 32
28 |
29 | # Note that sometimes you won't get a reading and
30 | # the results will be null (because Linux can't
31 | # guarantee the timing of calls to read the sensor).
32 | # If this happens try again!
33 | if humidity is not None and temperature is not None:
34 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
35 | print('The current Temperature is {0:0.1f} Degrees Celcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
36 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
37 | time.sleep(60)
38 | else:
39 | print('Failed to get a reading. Will try again!')
40 | time.sleep(60)
41 | return
42 |
43 | def SeeAndListen():
44 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
45 | if humidity is not None and temperature is not None:
46 | #print('The current Temperature is {0:0.1f} Degrees Celcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
47 | if temperature >= 0 and temperature <= 10:
48 | greenled.on()
49 | sleep(0.5)
50 | greenled.off()
51 | sleep(0.5)
52 | greenled.on()
53 | sleep(0.5)
54 | greenled.off()
55 | cmd="espeak 'The current temperature is " + str(temperature) + "and the humidity is" + str(humidity) + "Percent'"
56 | os.system(cmd)
57 | return
58 | elif temperature > 10 and temperature <= 20:
59 | amberled.on()
60 | sleep(0.5)
61 | amberled.off()
62 | sleep(0.5)
63 | amberled.on()
64 | sleep(0.5)
65 | amberled.off()
66 | cmd="espeak 'The current temperature is " + str(temperature) + "and the humidity is" + str(humidity) + "Percent'"
67 | os.system(cmd)
68 | return
69 | elif temperature > 20:
70 | redled.on()
71 | sleep(0.5)
72 | redled.off()
73 | sleep(0.5)
74 | redled.on()
75 | sleep(0.5)
76 | redled.off()
77 | cmd="espeak 'The current temperature is " + str(temperature) + "and the humidity is" + str(humidity) + "Percent'"
78 | os.system(cmd)
79 | return
80 | else:
81 | print('Failed to get a reading. I will not blink this time around. Will try again soon!!!')
82 | time.sleep(5)
83 | return
84 |
85 |
86 | # Parse command line parameters.
87 | sensor_args = { '11': Adafruit_DHT.DHT11,
88 | '22': Adafruit_DHT.DHT22,
89 | '2302': Adafruit_DHT.AM2302 }
90 | if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
91 | sensor = sensor_args[sys.argv[1]]
92 | pin = sys.argv[2]
93 | else:
94 | print('usage: sudo ./Adafruit_DHT.py [11|22|2302] GPIOpin#')
95 | print('example: sudo ./Adafruit_DHT.py 2302 4 - Read from an AM2302 connected to GPIO #4')
96 | sys.exit(1)
97 |
98 | while True:
99 | ReadValues()
100 | SeeAndListen()
101 | WriteData()
102 |
103 | pause()
104 |
105 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/sense_temp_humidity_v0.4.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # Trevor W - The code to measure Humidity/Temp has been obtained from the Adafruit examples
4 | #
5 | # Copyright (c) 2014 Adafruit Industries
6 | # Author: Tony DiCola
7 |
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 |
15 | # The above copyright notice and this permission notice shall be included in all
16 | # copies or substantial portions of the Software.
17 |
18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 | # SOFTWARE.
25 |
26 | from gpiozero import LED
27 | from gpiozero import Button
28 | from signal import pause
29 | from subprocess import *
30 | import sys
31 | import os
32 | import time
33 | import Adafruit_DHT
34 | from time import sleep
35 |
36 | redled = LED(17)
37 | amberled = LED(27)
38 | greenled = LED(22)
39 | button = Button(2)
40 |
41 | def WriteData():
42 | return
43 |
44 | def ReadValues():
45 | # Try to grab a sensor reading. Use the read_retry method which will retry up
46 | # to 15 times to get a sensor reading (waiting 2 seconds between each retry).
47 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
48 |
49 | # Un-comment the line below to convert the temperature to Fahrenheit.
50 | # temperature = temperature * 9/5.0 + 32
51 |
52 | # Note that sometimes you won't get a reading and
53 | # the results will be null (because Linux can't
54 | # guarantee the timing of calls to read the sensor).
55 | # If this happens try again!
56 | if humidity is not None and temperature is not None:
57 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
58 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
59 | print('The current Temperature is {0:0.1f} Degrees Celcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
60 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
61 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
62 | time.sleep(1)
63 | else:
64 | print('Failed to get a reading. Will try again!')
65 | time.sleep(1)
66 | return
67 |
68 | def SeeAndListen():
69 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
70 | if humidity is not None and temperature is not None:
71 | #print('The current Temperature is {0:0.1f} Degrees Celcius while the current Humidity is {1:0.1f}%'.format(temperature, humidity))
72 | if temperature >= 0 and temperature <= 10:
73 | greenled.on()
74 | sleep(0.5)
75 | greenled.off()
76 | sleep(0.5)
77 | greenled.on()
78 | sleep(0.5)
79 | greenled.off()
80 | cmd="espeak 'The current temperature is " + str(temperature) + "Degrees Celcius and the humidity is" + str(humidity) + "Percent'"
81 | os.system(cmd)
82 | time.sleep(60)
83 | return
84 | elif temperature > 10 and temperature <= 20:
85 | amberled.on()
86 | sleep(0.5)
87 | amberled.off()
88 | sleep(0.5)
89 | amberled.on()
90 | sleep(0.5)
91 | amberled.off()
92 | cmd="espeak 'The current temperature is " + str(temperature) + "Degrees Celcius and the humidity is" + str(humidity) + "Percent'"
93 | os.system(cmd)
94 | time.sleep(60)
95 | return
96 | elif temperature > 20:
97 | redled.on()
98 | sleep(0.5)
99 | redled.off()
100 | sleep(0.5)
101 | redled.on()
102 | sleep(0.5)
103 | redled.off()
104 | cmd="espeak 'The current temperature is " + str(temperature) + "Degrees Celcius and the humidity is" + str(humidity) + "Percent'"
105 | os.system(cmd)
106 | time.sleep(60)
107 | return
108 | else:
109 | print('Failed to get a reading. I will not blink this time around. Will try again soon!!!')
110 | time.sleep(5)
111 | return
112 |
113 | # Parse command line parameters.
114 | sensor_args = { '11': Adafruit_DHT.DHT11,
115 | '22': Adafruit_DHT.DHT22,
116 | '2302': Adafruit_DHT.AM2302 }
117 | if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
118 | sensor = sensor_args[sys.argv[1]]
119 | pin = sys.argv[2]
120 | else:
121 | print('usage: sudo ./Adafruit_DHT.py [11|22|2302] GPIOpin#')
122 | print('example: sudo ./Adafruit_DHT.py 2302 4 - Read from an AM2302 connected to GPIO #4')
123 | sys.exit(1)
124 |
125 | while True:
126 | ReadValues()
127 | SeeAndListen()
128 | WriteData()
129 |
130 | pause()
131 |
132 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/sense_temp_humidity_v0.5.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # Trevor W - The code to measure Humidity/Temp has been obtained from the Adafruit examples
4 | #
5 | # Copyright (c) 2014 Adafruit Industries
6 | # Author: Tony DiCola
7 |
8 | # Permission is hereby granted, free of charge, to any person obtaining a copy
9 | # of this software and associated documentation files (the "Software"), to deal
10 | # in the Software without restriction, including without limitation the rights
11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | # copies of the Software, and to permit persons to whom the Software is
13 | # furnished to do so, subject to the following conditions:
14 |
15 | # The above copyright notice and this permission notice shall be included in all
16 | # copies or substantial portions of the Software.
17 |
18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 | # SOFTWARE.
25 |
26 | from gpiozero import LED
27 | from gpiozero import Button
28 | from signal import pause
29 | from subprocess import *
30 | import sys
31 | import os
32 | import time
33 | import Adafruit_DHT
34 | from time import sleep
35 |
36 | redled = LED(17)
37 | amberled = LED(27)
38 | greenled = LED(22)
39 | button = Button(2)
40 |
41 | def WriteData():
42 | return
43 |
44 | def ReadValues():
45 | # Try to grab a sensor reading. Use the read_retry method which will retry up
46 | # to 15 times to get a sensor reading (waiting 2 seconds between each retry).
47 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
48 |
49 | # Un-comment the line below to convert the temperature to Fahrenheit.
50 | # temperature = temperature * 9/5.0 + 32
51 |
52 | # Note that sometimes you won't get a reading and
53 | # the results will be null (because Linux can't
54 | # guarantee the timing of calls to read the sensor).
55 | # If this happens try again!
56 | if humidity is not None and temperature is not None:
57 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
58 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
59 | print('!!!! Temperature = {0:0.1f} DegC, Humidity = {1:0.1f}% !!!!'.format(temperature, humidity))
60 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
61 | print('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
62 | time.sleep(1)
63 | else:
64 | print('Failed to get a reading. Will try again!')
65 | time.sleep(1)
66 | return
67 |
68 | def SeeAndListen():
69 | humidity, temperature = Adafruit_DHT.read_retry(sensor, pin)
70 | if humidity is not None and temperature is not None:
71 | if temperature >= 0 and temperature <= 10:
72 | greenled.on()
73 | sleep(0.5)
74 | greenled.off()
75 | sleep(0.5)
76 | greenled.on()
77 | sleep(0.5)
78 | greenled.off()
79 | sleep(0.5)
80 | greenled.on()
81 | sleep(0.5)
82 | greenled.off()
83 | cmd="espeak 'The current temperature is " + str(temperature) + "Degrees Celcius and the humidity is" + str(humidity) + "Percent'"
84 | #Uncomment the below line if you would like to hear your Raspberry Pi speak
85 | #os.system(cmd)
86 | time.sleep(60)
87 | return
88 | elif temperature > 10 and temperature <= 20:
89 | amberled.on()
90 | sleep(0.5)
91 | amberled.off()
92 | sleep(0.5)
93 | amberled.on()
94 | sleep(0.5)
95 | amberled.off()
96 | sleep(0.5)
97 | amberled.on()
98 | sleep(0.5)
99 | amberled.off()
100 | cmd="espeak 'The current temperature is " + str(temperature) + "Degrees Celcius and the humidity is" + str(humidity) + "Percent'"
101 | #Uncomment the below line if you would like to hear your Raspberry Pi speak
102 | #os.system(cmd)
103 | time.sleep(60)
104 | return
105 | elif temperature > 20:
106 | redled.on()
107 | sleep(0.5)
108 | redled.off()
109 | sleep(0.5)
110 | redled.on()
111 | sleep(0.5)
112 | redled.off()
113 | sleep(0.5)
114 | redled.on()
115 | sleep(0.5)
116 | redled.off()
117 | cmd="espeak 'The current temperature is " + str(temperature) + "Degrees Celcius and the humidity is" + str(humidity) + "Percent'"
118 | #Uncomment the below line if you would like to hear your Raspberry Pi speak
119 | #os.system(cmd)
120 | time.sleep(60)
121 | return
122 | else:
123 | print('Failed to get a reading. I will not blink this time around. Will try again soon!!!')
124 | time.sleep(5)
125 | return
126 |
127 | # Parse command line parameters.
128 | sensor_args = { '11': Adafruit_DHT.DHT11,
129 | '22': Adafruit_DHT.DHT22,
130 | '2302': Adafruit_DHT.AM2302 }
131 | if len(sys.argv) == 3 and sys.argv[1] in sensor_args:
132 | sensor = sensor_args[sys.argv[1]]
133 | pin = sys.argv[2]
134 | else:
135 | print('usage: sudo ./Adafruit_DHT.py [11|22|2302] GPIOpin#')
136 | print('example: sudo ./Adafruit_DHT.py 2302 4 - Read from an AM2302 connected to GPIO #4')
137 | sys.exit(1)
138 |
139 | while True:
140 | ReadValues()
141 | SeeAndListen()
142 | WriteData()
143 |
144 | pause()
145 |
146 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity/sense_temperature_pressure_with_dht11_raspberry_pi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Sense_Temp_Humidity/sense_temperature_pressure_with_dht11_raspberry_pi.png
--------------------------------------------------------------------------------
/Sense_Temp_Humidity_Pull_Data_Arduino/README.md:
--------------------------------------------------------------------------------
1 | Weather Reporting using OWM (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/Sense_Temp_Humidity_Pull_Data_Arduino)
2 | - While the Sense_Temp_Humidity project (above) was designed as a simple IoT (Internet Of Things) project to pull data from my DHT11 on the Raspberry Pi to be uploaded to ThingSpeak, Weather Reporting on the other hand pulls data from OpenWeatherMap and uploads the data to Thingspeak.
3 | - The objective of this project was to be able to compare the data for temperature, humidity being collected by my sensors at home with the data for the city i live in provided by Open Weather Map.
4 | - The project uses the Open Weather Map python library which you will need to download and install on the Raspberry Pi
5 | - This project has evolved to log data to CSV including upload of data to the ThingSpeak/IoT Platform
6 | - Components required include -
7 | - Python module for OWM from https://github.com/csparpa/pyowm
8 | - 1 x Raspberry Pi 3 (I have used a 3, Model B. You can use whatever you have at your disposal.
9 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity_Pull_Data_Arduino/sense_temp_humidity_v0.95.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # Trevor W - The code to measure Humidity/Temp has been obtained from the Adafruit examples
4 | # To execute the script - bash# sudo ./sense_temp_humidity_v0.7.py 11 4
5 | # First input variable is the DHT11 or DHT22. Second input variable is the GPIO pin. Please use GPIO4.
6 |
7 | # Including original copyright since part of the code (for the temp/humidity module) was obtained from Adafruit
8 | # Copyright (c) 2014 Adafruit Industries
9 | # Author: Tony DiCola
10 |
11 | # Permission is hereby granted, free of charge, to any person obtaining a copy
12 | # of this software and associated documentation files (the "Software"), to deal
13 | # in the Software without restriction, including without limitation the rights
14 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
15 | # copies of the Software, and to permit persons to whom the Software is
16 | # furnished to do so, subject to the following conditions:
17 |
18 | # The above copyright notice and this permission notice shall be included in all
19 | # copies or substantial portions of the Software.
20 |
21 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
22 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
23 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
24 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
25 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
26 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
27 | # SOFTWARE.
28 |
29 | from gpiozero import LED
30 | from gpiozero import Button
31 | from signal import pause
32 | from subprocess import *
33 | import sys
34 | import os
35 | import time
36 | import Adafruit_DHT
37 | from time import sleep
38 | import csv
39 | import httplib, urllib
40 | from lxml import html
41 | import requests
42 |
43 |
44 | redled = LED(17)
45 | amberled = LED(22)
46 | greenled = LED(27)
47 | button = Button(2)
48 |
49 | def WriteDataCSV():
50 | #Acquire data from Arduino, first for temp and next for humidity
51 | page = requests.get('http://10.100.10.10')
52 | tree = html.fromstring(page.content)
53 | bulletpoints = tree.xpath('//li/text()')
54 | #print bulletpoints[0]
55 | #print bulletpoints[1]
56 | #print bulletpoints[2]
57 | #print bulletpoints[3]
58 |
59 | #Obtaining temp
60 | string1 = bulletpoints[2]
61 | tempstringarray = string1.split(' ')
62 | temperature = tempstringarray[4]
63 |
64 | #Obtaining humidity
65 | string2 = bulletpoints[3]
66 | tempstringarray = string2.split(' ')
67 | humidity = tempstringarray[4]
68 |
69 | now = time.strftime("%d-%m-%Y %H:%M:%S")
70 | unit1 = "DegC"
71 | unit2 = "Percent"
72 | #outputFile = open('/opt/data/temphumidity.csv', 'wb')
73 | outputFile = open('/opt/data/temphumidity.csv', 'a')
74 | outputWriter = csv.writer(outputFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL)
75 | tempArray = [now,temperature,unit1,humidity,unit2]
76 | outputWriter.writerow(tempArray)
77 | outputFile.close()
78 | time.sleep(2)
79 | return
80 |
81 |
82 | def WriteDataThingSpeak():
83 | #Acquire data from Arduino, first for temp and next for humidity
84 | page = requests.get('http://10.100.10.10')
85 | tree = html.fromstring(page.content)
86 | bulletpoints = tree.xpath('//li/text()')
87 | #print bulletpoints[0]
88 | #print bulletpoints[1]
89 | #print bulletpoints[2]
90 | #print bulletpoints[3]
91 |
92 | #Obtaining temp
93 | string1 = bulletpoints[2]
94 | tempstringarray = string1.split(' ')
95 | temperature = tempstringarray[4]
96 |
97 | #Obtaining humidity
98 | string2 = bulletpoints[3]
99 | tempstringarray = string2.split(' ')
100 | humidity = tempstringarray[4]
101 |
102 | #now = time.strftime("%d/%m/%Y %H:%M:%S")
103 | now = time.strftime("%d-%m-%Y %H:%M:%S")
104 | unit1 = "DegC"
105 | unit2 = "Percent"
106 | params = urllib.urlencode({'field1': temperature, 'field2': humidity, 'key':'xxxxxx'}) #Enter Key Here
107 | headers = {"Content-typZZe": "application/x-www-form-urlencoded","Accept": "text/plain"}
108 | conn = httplib.HTTPConnection("api.thingspeak.com:80")
109 | try:
110 | conn.request("POST", "/update", params, headers)
111 | response = conn.getresponse()
112 | #Un-comment the below lines to see what's being logged & debug any issues with the code
113 | #print ("Data written to ThingSpeak : Temperature = {0:0.1f} DegC, Humidity = {1:0.1f}%".format(temperature,humidity))
114 | #print response.status, response.reason
115 | data = response.read()
116 | conn.close()
117 | except:
118 | print "Connection to ThingSpeak failed. Will try again next time."
119 | time.sleep(2)
120 | return
121 |
122 | #Executing all modules only once since we are now executing from Cron
123 | WriteDataCSV()
124 | WriteDataThingSpeak()
125 |
126 |
--------------------------------------------------------------------------------
/Sense_Temp_Humidity_Pull_Data_Arduino/sense_temp_humidity_v0.96.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import LED
4 | from gpiozero import Button
5 | from signal import pause
6 | from subprocess import *
7 | import sys
8 | import os
9 | import time
10 | import Adafruit_DHT
11 | from time import sleep
12 | import csv
13 | import httplib, urllib
14 | from lxml import html
15 | import requests
16 |
17 |
18 | redled = LED(17)
19 | amberled = LED(22)
20 | greenled = LED(27)
21 | button = Button(2)
22 |
23 | def WriteDataCSV():
24 | #Acquire data from Arduino, first for temp and next for humidity
25 | page = requests.get('http://192.168.1.24')
26 | tree = html.fromstring(page.content)
27 | bulletpoints = tree.xpath('//ul/text()')
28 | #print bulletpoints[0]
29 | #print bulletpoints[1]
30 | #print bulletpoints[2]
31 | #print bulletpoints[3]
32 |
33 | #Obtaining temp
34 | string1 = bulletpoints[0]
35 | tempstringarray = string1.split(' ')
36 | temperature = tempstringarray[4].strip()
37 | print "temperature - " + temperature
38 |
39 | #Obtaining humidity
40 | string2 = bulletpoints[2]
41 | tempstringarray = string2.split(' ')
42 | humidity = tempstringarray[2].strip()
43 | print "humidity - " + humidity
44 |
45 | now = time.strftime("%d-%m-%Y %H:%M:%S")
46 | unit1 = "DegC"
47 | unit2 = "Percent"
48 | #outputFile = open('/opt/data/temphumidity.csv', 'wb')
49 | outputFile = open('/opt/data/temphumidity.csv', 'a')
50 | outputWriter = csv.writer(outputFile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL)
51 | tempArray = [now,temperature,unit1,humidity,unit2]
52 | outputWriter.writerow(tempArray)
53 | outputFile.close()
54 | time.sleep(2)
55 | return
56 |
57 |
58 | def WriteDataThingSpeak():
59 | #Acquire data from Arduino, first for temp and next for humidity
60 | page = requests.get('http://192.168.1.24')
61 | tree = html.fromstring(page.content)
62 | bulletpoints = tree.xpath('//ul/text()')
63 | #print bulletpoints[0]
64 | #print bulletpoints[1]
65 | #print bulletpoints[2]
66 | #print bulletpoints[3]
67 |
68 | #Obtaining temp
69 | string1 = bulletpoints[0]
70 | tempstringarray = string1.split(' ')
71 | temperature = tempstringarray[4].strip()
72 | print "temperature - " + temperature
73 |
74 | #Obtaining humidity
75 | string2 = bulletpoints[2]
76 | tempstringarray = string2.split(' ')
77 | humidity = tempstringarray[2].strip()
78 | print "humidity - " + humidity
79 |
80 | #now = time.strftime("%d/%m/%Y %H:%M:%S")
81 | now = time.strftime("%d-%m-%Y %H:%M:%S")
82 | unit1 = "DegC"
83 | unit2 = "Percent"
84 | params = urllib.urlencode({'field1': temperature, 'field2': humidity, 'key':'xxxxxxx'}) #Enter Key Here
85 | headers = {"Content-typZZe": "application/x-www-form-urlencoded","Accept": "text/plain"}
86 | conn = httplib.HTTPConnection("api.thingspeak.com:80")
87 | try:
88 | conn.request("POST", "/update", params, headers)
89 | response = conn.getresponse()
90 | #Un-comment the below lines to see what's being logged & debug any issues with the code
91 | #print ("Data written to ThingSpeak : Temperature = {0:0.1f} DegC, Humidity = {1:0.1f}%".format(temperature,humidity))
92 | #print response.status, response.reason
93 | data = response.read()
94 | conn.close()
95 | except:
96 | print "Connection to ThingSpeak failed. Will try again next time."
97 | time.sleep(2)
98 | return
99 |
100 | #Executing all modules only once since we are now executing from Cron
101 | WriteDataCSV()
102 | WriteDataThingSpeak()
103 |
104 |
--------------------------------------------------------------------------------
/Weather_Reporting/README.md:
--------------------------------------------------------------------------------
1 | Weather Data Acquisition & Reporting (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/Weather_Reporting)
2 |
3 | - The Sense_Temp_Humidity project (above) was designed as a simple IoT (Internet Of Things) project to pull data from my DHT11 on the Raspberry Pi to be uploaded to ThingSpeak.
4 | - Weather Reporting on the other hand pulls data from OpenWeatherMap and uploads the data to Thingspeak.
5 | - The Weather Reporting project was eventually updated to log information to the local file system for purposes of further analysis.
6 | - The objective of this (Weather Reporting) project was to obtain the last value of the temperature, humidity from OWM (Open Weather Map) for the local city, log the data to disk and also upload to an IoT platform.
7 | - The weather data (temperature, pressure, humidity, etc.) after being logged is uploaded to ThingSpeak IoT platform.
8 |
9 |
--------------------------------------------------------------------------------
/Weather_Reporting/get_weather_v0.1.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | import pyowm
4 |
5 | owm = pyowm.OWM('XXXXX') # You MUST provide a valid API key
6 | #owm.set_API_key('XXXX')
7 |
8 | # You have a pro subscription? Use:
9 | # owm = pyowm.OWM(API_key='your-API-key', subscription_type='pro')
10 |
11 | # Will it be sunny tomorrow at this time in Milan (Italy) ?
12 | #forecast = owm.daily_forecast("Milan,it")
13 | #tomorrow = pyowm.timeutils.tomorrow()
14 | #forecast.will_be_sunny_at(tomorrow) # Always True in Italy, right? ;-)
15 |
16 | # Search for current weather in London (UK)
17 | #observation = owm.weather_at_place('London,uk')
18 | observation = owm.weather_at_place('Melbourne,au')
19 | w = observation.get_weather()
20 | #print(w) #
22 |
23 | # Weather details
24 | print(w.get_wind()) # {'speed': 4.6, 'deg': 330}
25 | print(w.get_humidity()) # 87
26 | print(w.get_temperature('celsius')) # {'temp_max': 10.5, 'temp': 9.7, 'temp_min': 9.0}
27 | #print(w) # /tmp/wind.txt"
49 | os.system(cmd)
50 | OnlyWind = open('/tmp/wind.txt', 'r').read()
51 | OnlyWind = ''.join(OnlyWind.split())
52 | print ('Current wind speed is %s knots' %OnlyWind)
53 |
54 | #Obtain current Temperature
55 | cmd = "echo " + str(Temperature) + "| cut -d : -f 4 | cut -d , -f 1 > /tmp/temp.txt"
56 | os.system(cmd)
57 | OnlyTemp = open('/tmp/temp.txt', 'r').read()
58 | #OnlyTemp.strip()
59 | OnlyTemp = ''.join(OnlyTemp.split())
60 | print ('Current Temperature is %s DegC' %OnlyTemp)
61 |
62 | #Obtain current Min Temp
63 | MinTemp = w.get_temperature('celsius')['temp_min']
64 | print ('Current minimum temperature is %s DegC ' %MinTemp)
65 |
66 | #Obtain current Max Temp
67 | MaxTemp = w.get_temperature('celsius')['temp_max']
68 | print ('Current maximum temperature is %s DegC ' %MaxTemp)
69 |
70 | #Obtain current Humidity
71 | print ("Current humidity is = %s Percent" %Humidity)
72 |
73 | #Obtain current Cloud Ceiling
74 | print ("Current cloud ceiling is %s (x00) feet" %Clouds)
75 |
76 | #Obtain current Rainfall
77 |
78 | #Obtain current Barometric Pressure
79 | cmd = "echo " + str(Pressure) + "| cut -d : -f 2 | cut -d , -f 1 > /tmp/pressure.txt"
80 | os.system(cmd)
81 | OnlyPressure = open('/tmp/pressure.txt', 'r').read()
82 | #OnlyPressure.strip()
83 | OnlyPressure = ''.join(OnlyPressure.split())
84 | print ('The Barometric Pressure is %s hPa' %OnlyPressure)
85 |
86 |
87 |
--------------------------------------------------------------------------------
/Weather_Reporting/get_weather_v0.12.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import LED
4 | from gpiozero import Button
5 | from signal import pause
6 | import subprocess
7 | import sys
8 | import os
9 | import time
10 | import Adafruit_DHT
11 | from time import sleep
12 | import csv
13 | import httplib, urllib
14 | import pyowm
15 | import commands
16 |
17 | owm = pyowm.OWM('XXXX') # You MUST provide a valid API key
18 |
19 | # Will it be sunny tomorrow at this time in Milan (Italy) ?
20 | #forecast = owm.daily_forecast("Milan,it")
21 | #tomorrow = pyowm.timeutils.tomorrow()
22 | #forecast.will_be_sunny_at(tomorrow) # Always True in Italy, right? ;-)
23 |
24 | # Search for current weather in London (UK)
25 | observation = owm.weather_at_place('Melbourne,au')
26 | w = observation.get_weather()
27 |
28 | # Weather details
29 | Wind = w.get_wind() # {'speed': 4.6, 'deg': 330}
30 | #SWind = w.get_wind()['speed'] # 4
31 | Humidity = w.get_humidity() # 87
32 | Temperature = w.get_temperature('celsius') # {'temp_max': 10.5, 'temp': 9.7, 'temp_min': 9.0}
33 | Clouds = w.get_clouds()
34 | Rainfall = w.get_rain()
35 | Pressure = w.get_pressure()
36 |
37 | #Output for debugging purpose
38 | #print ("****************************************************************************************************")
39 | #print ("Current wind Speed and Direction right now in Melbourne is = %s " %Wind)
40 | #print ("Current Temperature in Melbourne is = %s" %Temperature)
41 | #print ("Current Humidity in Melbourne is = %s Percent" %Humidity)
42 | #print ("Cloud ceiling across Melbourne is %s thousand feet" %Clouds)
43 | #print ("Current Rainfall across Melbourne is %s " %Rainfall)
44 | #print ("Barometric Pressure across Melbourne is %s " %Pressure)
45 | #print ("****************************************************************************************************")
46 |
47 | #Obtain current Wind Speed
48 | cmd = "echo " + str(Wind) + "| cut -f 1 -d , | cut -f 2 -d : > /tmp/wind.txt"
49 | os.system(cmd)
50 | OnlyWind = open('/tmp/wind.txt', 'r').read()
51 | OnlyWind = ''.join(OnlyWind.split())
52 | print ('Current wind speed is %s knots' %OnlyWind)
53 |
54 | #Obtain current Temperature
55 | cmd = "echo " + str(Temperature) + "| cut -d : -f 4 | cut -d , -f 1 > /tmp/temp.txt"
56 | os.system(cmd)
57 | OnlyTemp = open('/tmp/temp.txt', 'r').read()
58 | #OnlyTemp.strip()
59 | OnlyTemp = ''.join(OnlyTemp.split())
60 | print ('Current Temperature is %s DegC' %OnlyTemp)
61 |
62 | #Obtain current Min Temp
63 | MinTemp = w.get_temperature('celsius')['temp_min']
64 | print ('Current minimum temperature is %s DegC ' %MinTemp)
65 |
66 | #Obtain current Max Temp
67 | MaxTemp = w.get_temperature('celsius')['temp_max']
68 | print ('Current maximum temperature is %s DegC ' %MaxTemp)
69 |
70 | #Obtain current Humidity
71 | print ("Current humidity is = %s Percent" %Humidity)
72 |
73 | #Obtain current Cloud Ceiling
74 | print ("Current cloud ceiling is %s (x00) feet" %Clouds)
75 |
76 | #Obtain current Rainfall
77 |
78 | #Obtain current Barometric Pressure
79 | cmd = "echo " + str(Pressure) + "| cut -d : -f 2 | cut -d , -f 1 > /tmp/pressure.txt"
80 | os.system(cmd)
81 | OnlyPressure = open('/tmp/pressure.txt', 'r').read()
82 | #OnlyPressure.strip()
83 | OnlyPressure = ''.join(OnlyPressure.split())
84 | print ('The Barometric Pressure is %s hPa' %OnlyPressure)
85 |
86 |
87 |
--------------------------------------------------------------------------------
/Weather_Reporting/get_weather_v0.13.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from gpiozero import LED
4 | from gpiozero import Button
5 | from signal import pause
6 | import subprocess
7 | import sys
8 | import os
9 | import time
10 | import Adafruit_DHT
11 | from time import sleep
12 | import csv
13 | import httplib, urllib
14 | import pyowm
15 | import commands
16 |
17 |
18 | def WriteDataThingSpeak(OnlyWind,OnlyTemp,OnlyPressure,Humidity,Clouds):
19 | now = time.strftime("%d/%m/%Y %H:%M:%S")
20 |
21 | #Un-comment the below lines to see what's being logged & debug any issues with the code
22 | print ("****************************************************************************************************")
23 | print ("Data written to ThingSpeak : Wind Speed = %s Knots, Temperature = %s DegC, Pressure = %s hPa, Humidity = %s Percent, Cloud Ceiling = %s (x00)feet" %(OnlyWind, OnlyTemp, OnlyPressure, Humidity, Clouds))
24 | print ("****************************************************************************************************")
25 |
26 | params = urllib.urlencode({'field3': OnlyWind, 'field4': OnlyTemp, 'field5': OnlyPressure, 'field6': Humidity, 'field7': Clouds, 'key':'XXXX'}) # You MUST provide a valid API key
27 | headers = {"Content-typZZe": "application/x-www-form-urlencoded","Accept": "text/plain"}
28 | conn = httplib.HTTPConnection("api.thingspeak.com:80")
29 |
30 | try:
31 | conn.request("POST", "/update", params, headers)
32 | response = conn.getresponse()
33 | print response.status, response.reason
34 | data = response.read()
35 | conn.close()
36 | except:
37 | print "Connection to ThingSpeak failed. Will try again next time."
38 | time.sleep(5)
39 |
40 | return
41 |
42 |
43 | def WeatherProcessing():
44 | owm = pyowm.OWM('XXXX') # You MUST provide a valid API key
45 |
46 | # Search for current weather in Melbourne (Australia)
47 | observation = owm.weather_at_place('Melbourne,au')
48 | w = observation.get_weather()
49 |
50 | #Get Weather details
51 | Wind = w.get_wind() # {'speed': 4.6, 'deg': 330}
52 | #SWind = w.get_wind()['speed'] # 4
53 | Humidity = w.get_humidity() # 87
54 | Temperature = w.get_temperature('celsius') # {'temp_max': 10.5, 'temp': 9.7, 'temp_min': 9.0}
55 | Clouds = w.get_clouds()
56 | Rainfall = w.get_rain()
57 | Pressure = w.get_pressure()
58 |
59 | #Output for debugging purpose
60 | #print ("****************************************************************************************************")
61 | #print ("Current wind Speed and Direction right now in Melbourne is = %s " %Wind)
62 | #print ("Current Temperature in Melbourne is = %s" %Temperature)
63 | #print ("Current Humidity in Melbourne is = %s Percent" %Humidity)
64 | #print ("Cloud ceiling across Melbourne is %s thousand feet" %Clouds)
65 | #print ("Current Rainfall across Melbourne is %s " %Rainfall)
66 | #print ("Barometric Pressure across Melbourne is %s " %Pressure)
67 | #print ("****************************************************************************************************")
68 |
69 | #Obtain current Wind Speed
70 | #cmd = "echo " + str(Wind) + "| cut -f 1 -d , | cut -f 2 -d : > /tmp/wind.txt"
71 | #os.system(cmd)
72 | #OnlyWind = open('/tmp/wind.txt', 'r').read()
73 | #OnlyWind = ''.join(OnlyWind.split()) # Strips away leading and trailing white space including newline, etc.
74 | OnlyWind = w.get_wind()['speed']
75 | print ("****************************************************************************************************")
76 | print ('Current wind speed is %s knots' %OnlyWind)
77 |
78 | #Obtain current Temperature
79 | #cmd = "echo " + str(Temperature) + "| cut -d : -f 4 | cut -d , -f 1 > /tmp/temp.txt"
80 | #os.system(cmd)
81 | #OnlyTemp = open('/tmp/temp.txt', 'r').read()
82 | #OnlyTemp.strip() # Strips away leading and trailing white space
83 | #OnlyTemp = ''.join(OnlyTemp.split()) # Strips away leading and trailing white space including newline, etc.
84 | OnlyTemp = w.get_temperature('celsius')['temp']
85 | print ('Current Temperature is %s DegC' %OnlyTemp)
86 |
87 | #Obtain current Min Temp
88 | MinTemp = w.get_temperature('celsius')['temp_min']
89 | print ('Current minimum temperature is %s DegC ' %MinTemp)
90 |
91 | #Obtain current Max Temp
92 | MaxTemp = w.get_temperature('celsius')['temp_max']
93 | print ('Current maximum temperature is %s DegC ' %MaxTemp)
94 |
95 | #Obtain current Humidity
96 | print ("Current humidity is = %s Percent" %Humidity)
97 |
98 | #Obtain current Cloud Ceiling
99 | print ("Current cloud ceiling is %s (x00) feet" %Clouds)
100 |
101 | #Obtain current Barometric Pressure
102 | cmd = "echo " + str(Pressure) + "| cut -d : -f 2 | cut -d , -f 1 > /tmp/pressure.txt"
103 | os.system(cmd)
104 | OnlyPressure = open('/tmp/pressure.txt', 'r').read()
105 | #OnlyPressure.strip()
106 | OnlyPressure = ''.join(OnlyPressure.split())
107 | print ('The Barometric Pressure is %s hPa' %OnlyPressure)
108 |
109 | WriteDataThingSpeak(OnlyWind,OnlyTemp,OnlyPressure,Humidity,Clouds)
110 | print ("****************************************************************************************************")
111 | print (" ")
112 | print (" ")
113 | time.sleep(30)
114 | return
115 |
116 | while True:
117 | WeatherProcessing()
118 | time.sleep(30)
119 |
120 | Pause()
121 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/LCDWriteToScreenStartScript:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # daemon - http://libslack.org/daemon/
4 | #
5 | # Copyright (C) 1999-2010 raf
6 | #
7 | # This program is free software; you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation; either version 2 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program; if not, write to the Free Software
19 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 | # or visit http://www.gnu.org/copyleft/gpl.html
21 | #
22 | # 20100612 raf
23 |
24 | # This is an example /etc/init.d script that shows how to use daemon(1)
25 | # in that context. Note that this would need to be modified quite a bit
26 | # to meet the usual conventions of any specific system but if you aren't
27 | # concerned about that it should be usable. At least it's a starting point.
28 |
29 | # The daemon's name (to ensure uniqueness and for stop, restart and status)
30 | name="RaspverryPiLCDWriter"
31 | # The path of the client executable
32 | command="/home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/Write_To_LCD_Screen/WriteToScreenv0_14.py"
33 | # Any command line arguments for the client executable
34 | command_args=""
35 | # The path of the daemon executable
36 | daemon="/usr/bin/daemon"
37 |
38 | [ -x "$daemon" ] || exit 0
39 | [ -x "$command" ] || exit 0
40 |
41 | # Note: The following daemon option arguments could be in /etc/daemon.conf
42 | # instead. That would probably be better because if the command itself were
43 | # there as well then we could just use the name here to start the daemon.
44 | # Here's some code to do it here in case you prefer that.
45 |
46 | # Any command line arguments for the daemon executable (when starting)
47 | daemon_start_args="" # e.g. --inherit --env="ENV=VAR" --unsafe
48 | # The pidfile directory (need to force this so status works for normal users)
49 | pidfiles="/var/run"
50 | # The user[:group] to run as (if not to be run as root)
51 | user=""
52 | # The path to chroot to (otherwise /)
53 | chroot=""
54 | # The path to chdir to (otherwise /)
55 | chdir=""
56 | # The umask to adopt, if any
57 | umask=""
58 | # The syslog facility or filename for the client's stdout (otherwise discarded)
59 | stdout="daemon.info"
60 | # The syslog facility or filename for the client's stderr (otherwise discarded)
61 | stderr="daemon.err"
62 |
63 | case "$1" in
64 | start)
65 | # This if statement isn't strictly necessary but it's user friendly
66 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
67 | then
68 | echo "$name is already running."
69 | else
70 | echo -n "Starting $name..."
71 | "$daemon" --respawn $daemon_start_args \
72 | --name "$name" --pidfiles "$pidfiles" \
73 | ${user:+--user $user} ${chroot:+--chroot $chroot} \
74 | ${chdir:+--chdir $chdir} ${umask:+--umask $umask} \
75 | ${stdout:+--stdout $stdout} ${stderr:+--stderr $stderr} \
76 | -- \
77 | "$command" $command_args
78 | echo done.
79 | fi
80 | ;;
81 |
82 | stop)
83 | # This if statement isn't strictly necessary but it's user friendly
84 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
85 | then
86 | echo -n "Stopping $name..."
87 | "$daemon" --stop --name "$name" --pidfiles "$pidfiles"
88 | echo done.
89 | else
90 | echo "$name is not running."
91 | fi
92 | ;;
93 |
94 | restart|reload)
95 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
96 | then
97 | echo -n "Restarting $name..."
98 | "$daemon" --restart --name "$name" --pidfiles "$pidfiles"
99 | echo done.
100 | else
101 | echo "$name is not running."
102 | exit 1
103 | fi
104 | ;;
105 |
106 | status)
107 | "$daemon" --running --name "$name" --pidfiles "$pidfiles" --verbose
108 | ;;
109 |
110 | *)
111 | echo "usage: $0 " >&2
112 | exit 1
113 | esac
114 |
115 | exit 0
116 |
117 | # vi:set ts=4 sw=4:
118 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/LCDWriteToScreenStartScript_v0_11:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # daemon - http://libslack.org/daemon/
4 | #
5 | # Copyright (C) 1999-2010 raf
6 | #
7 | # This program is free software; you can redistribute it and/or modify
8 | # it under the terms of the GNU General Public License as published by
9 | # the Free Software Foundation; either version 2 of the License, or
10 | # (at your option) any later version.
11 | #
12 | # This program is distributed in the hope that it will be useful,
13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 | # GNU General Public License for more details.
16 | #
17 | # You should have received a copy of the GNU General Public License
18 | # along with this program; if not, write to the Free Software
19 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 | # or visit http://www.gnu.org/copyleft/gpl.html
21 | #
22 | # 20100612 raf
23 |
24 | # This is an example /etc/init.d script that shows how to use daemon(1)
25 | # in that context. Note that this would need to be modified quite a bit
26 | # to meet the usual conventions of any specific system but if you aren't
27 | # concerned about that it should be usable. At least it's a starting point.
28 |
29 | # The daemon's name (to ensure uniqueness and for stop, restart and status)
30 | name="RaspverryPiLCDWriter"
31 | # The path of the client executable
32 | command="/home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/Write_To_LCD_Screen/WriteToScreenv0_14.py"
33 | # Any command line arguments for the client executable
34 | command_args=""
35 | # The path of the daemon executable
36 | daemon="/usr/bin/daemon"
37 |
38 | [ -x "$daemon" ] || exit 0
39 | [ -x "$command" ] || exit 0
40 |
41 | # Note: The following daemon option arguments could be in /etc/daemon.conf
42 | # instead. That would probably be better because if the command itself were
43 | # there as well then we could just use the name here to start the daemon.
44 | # Here's some code to do it here in case you prefer that.
45 |
46 | # Any command line arguments for the daemon executable (when starting)
47 | daemon_start_args="" # e.g. --inherit --env="ENV=VAR" --unsafe
48 | # The pidfile directory (need to force this so status works for normal users)
49 | pidfiles="/var/run"
50 | # The user[:group] to run as (if not to be run as root)
51 | user=""
52 | # The path to chroot to (otherwise /)
53 | chroot=""
54 | # The path to chdir to (otherwise /)
55 | chdir=""
56 | # The umask to adopt, if any
57 | umask=""
58 | # The syslog facility or filename for the client's stdout (otherwise discarded)
59 | stdout="daemon.info"
60 | # The syslog facility or filename for the client's stderr (otherwise discarded)
61 | stderr="daemon.err"
62 |
63 | case "$1" in
64 | start)
65 | # This if statement isn't strictly necessary but it's user friendly
66 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
67 | then
68 | echo "$name is already running."
69 | else
70 | echo -n "Starting $name..."
71 | "$daemon" --respawn $daemon_start_args \
72 | --name "$name" --pidfiles "$pidfiles" \
73 | ${user:+--user $user} ${chroot:+--chroot $chroot} \
74 | ${chdir:+--chdir $chdir} ${umask:+--umask $umask} \
75 | ${stdout:+--stdout $stdout} ${stderr:+--stderr $stderr} \
76 | -- \
77 | "$command" $command_args
78 | echo done.
79 | fi
80 | ;;
81 |
82 | stop)
83 | # This if statement isn't strictly necessary but it's user friendly
84 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
85 | then
86 | echo -n "Stopping $name..."
87 | "$daemon" --stop --name "$name" --pidfiles "$pidfiles"
88 | echo done.
89 | else
90 | echo "$name is not running."
91 | fi
92 | ;;
93 |
94 | restart|reload)
95 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
96 | then
97 | echo -n "Restarting $name..."
98 | "$daemon" --restart --name "$name" --pidfiles "$pidfiles"
99 | echo done.
100 | else
101 | echo "$name is not running."
102 | exit 1
103 | fi
104 | ;;
105 |
106 | status)
107 | "$daemon" --running --name "$name" --pidfiles "$pidfiles" --verbose
108 | ;;
109 |
110 | *)
111 | echo "usage: $0 " >&2
112 | exit 1
113 | esac
114 |
115 | exit 0
116 |
117 | # vi:set ts=4 sw=4:
118 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/LCDWriteToScreenStartScript_v0_12:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | ### BEGIN INIT INFO#
4 | # Provides: RaspberryPiLCDWriter
5 | # Required-Start: $all
6 | # Required-Stop: $all
7 | # Default-Start: 2 3 4 5
8 | # Default-Stop: 0 1 6
9 | # Short-Description: AlexaPi Service
10 | # Description: Start / Restart / Stop LCD Writer Service
11 | ### END INIT INFO
12 |
13 | #exec > /var/log/LCDWriter.log 2>&1
14 |
15 | # daemon - http://libslack.org/daemon/
16 | #
17 | # Copyright (C) 1999-2010 raf
18 | #
19 | # This program is free software; you can redistribute it and/or modify
20 | # it under the terms of the GNU General Public License as published by
21 | # the Free Software Foundation; either version 2 of the License, or
22 | # (at your option) any later version.
23 | #
24 | # This program is distributed in the hope that it will be useful,
25 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
26 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
27 | # GNU General Public License for more details.
28 | #
29 | # You should have received a copy of the GNU General Public License
30 | # along with this program; if not, write to the Free Software
31 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 | # or visit http://www.gnu.org/copyleft/gpl.html
33 | #
34 | # 20100612 raf
35 |
36 | # This is an example /etc/init.d script that shows how to use daemon(1)
37 | # in that context. Note that this would need to be modified quite a bit
38 | # to meet the usual conventions of any specific system but if you aren't
39 | # concerned about that it should be usable. At least it's a starting point.
40 |
41 | # The daemon's name (to ensure uniqueness and for stop, restart and status)
42 | name="RaspberryPiLCDWriter"
43 | # The path of the client executable
44 | command="/home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/Write_To_LCD_Screen/WriteToScreenv0_14.py"
45 | # Any command line arguments for the client executable
46 | command_args=""
47 | # The path of the daemon executable
48 | daemon="/usr/bin/daemon"
49 |
50 | [ -x "$daemon" ] || exit 0
51 | [ -x "$command" ] || exit 0
52 |
53 | # Note: The following daemon option arguments could be in /etc/daemon.conf
54 | # instead. That would probably be better because if the command itself were
55 | # there as well then we could just use the name here to start the daemon.
56 | # Here's some code to do it here in case you prefer that.
57 |
58 | # Any command line arguments for the daemon executable (when starting)
59 | daemon_start_args="" # e.g. --inherit --env="ENV=VAR" --unsafe
60 | # The pidfile directory (need to force this so status works for normal users)
61 | pidfiles="/var/run"
62 | # The user[:group] to run as (if not to be run as root)
63 | user=""
64 | # The path to chroot to (otherwise /)
65 | chroot=""
66 | # The path to chdir to (otherwise /)
67 | chdir=""
68 | # The umask to adopt, if any
69 | umask=""
70 | # The syslog facility or filename for the client's stdout (otherwise discarded)
71 | stdout="daemon.info"
72 | # The syslog facility or filename for the client's stderr (otherwise discarded)
73 | stderr="daemon.err"
74 |
75 | case "$1" in
76 | start)
77 | # This if statement isn't strictly necessary but it's user friendly
78 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
79 | then
80 | echo "$name is already running."
81 | else
82 | echo -n "Starting $name..."
83 | "$daemon" --respawn $daemon_start_args \
84 | --name "$name" --pidfiles "$pidfiles" \
85 | ${user:+--user $user} ${chroot:+--chroot $chroot} \
86 | ${chdir:+--chdir $chdir} ${umask:+--umask $umask} \
87 | ${stdout:+--stdout $stdout} ${stderr:+--stderr $stderr} \
88 | -- \
89 | "$command" $command_args
90 | echo done.
91 | fi
92 | ;;
93 |
94 | stop)
95 | # This if statement isn't strictly necessary but it's user friendly
96 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
97 | then
98 | echo -n "Stopping $name..."
99 | "$daemon" --stop --name "$name" --pidfiles "$pidfiles"
100 | echo done.
101 | else
102 | echo "$name is not running."
103 | fi
104 | ;;
105 |
106 | restart|reload)
107 | if "$daemon" --running --name "$name" --pidfiles "$pidfiles"
108 | then
109 | echo -n "Restarting $name..."
110 | "$daemon" --restart --name "$name" --pidfiles "$pidfiles"
111 | echo done.
112 | else
113 | echo "$name is not running."
114 | exit 1
115 | fi
116 | ;;
117 |
118 | status)
119 | "$daemon" --running --name "$name" --pidfiles "$pidfiles" --verbose
120 | ;;
121 |
122 | *)
123 | echo "usage: $0 " >&2
124 | exit 1
125 | esac
126 |
127 | exit 0
128 |
129 | # vi:set ts=4 sw=4:
130 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/README.md:
--------------------------------------------------------------------------------
1 | Weather Reporting and Writing to LCD Screen (https://github.com/tangowhisky37/RaspiPythonProjects/tree/master/Write_To_LCD_Screen)
2 | - While the Sense_Temp_Humidity project (above) was designed as a simple IoT (Internet Of Things) project to pull data from my DHT11 on the Raspberry Pi to be uploaded to ThingSpeak, Weather Reporting on the other hand pulls data from OpenWeatherMap and uploads the data to Thingspeak. The Weather Reporting project (above) was also updated to log information to the local file system for purposes of further analysis.
3 | - The objective of this project was to obtain the last value of the temperature, humidity (inside the house, in the city) and display it to the screen.
4 | - The program builds upon another LCD library that was downloaded from the internet and is made available in the same folder
5 | - The code for obtaining the temperature, humidity and other information from the OWM service using python is described above. Please reference the previous project.
6 | - This project simply pulls the data from the log file and displays it to the screen refreshing it regularly.
7 | - Updates (030617) -
8 | - I ran an upgrade (dist-upgrade) to upgrade Raspbian to the latest version. For some reason i found an additional entry in the "/boot/config.txt" file called "dtparam=i2c=on1=on"
9 | - This entry was causing me a lot of grief and as a result the LCD wasn't being seen on the i2c bus by the Raspberry Pi 3.
10 | - I would recommend looking for this entry if you have issues finding your LCD on the i2c bus and disabling it.
11 | - Please note the other entry "dtparam=i2c_arm=on" is required.
12 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/RPi_I2C_driver.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tangowhisky37/RaspiPythonProjects/f4f221ffb368da17d2e4a169f4eea7f76274f35a/Write_To_LCD_Screen/RPi_I2C_driver.pyc
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/WriteToScreenv0_11.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # requires RPi_I2C_driver.py
4 | import RPi_I2C_driver
5 | from time import *
6 |
7 | mylcd = RPi_I2C_driver.lcd()
8 | # test 2
9 | mylcd.lcd_display_string("Testing the screen", 1)
10 | mylcd.lcd_display_string(" Hi there mate!!!", 2)
11 | mylcd.lcd_display_string(" Here's another one.", 3)
12 | mylcd.lcd_display_string(" Please watch out!!!.", 4)
13 |
14 | sleep(2) # 2 sec delay
15 |
16 | mylcd.lcd_clear()
17 |
18 | # let's define a custom icon, consisting of 6 individual characters
19 | # 3 chars in the first row and 3 chars in the second row
20 | fontdata1 = [
21 | # Char 0 - Upper-left
22 | [ 0x00, 0x00, 0x03, 0x04, 0x08, 0x19, 0x11, 0x10 ],
23 | # Char 1 - Upper-middle
24 | [ 0x00, 0x1F, 0x00, 0x00, 0x00, 0x11, 0x11, 0x00 ],
25 | # Char 2 - Upper-right
26 | [ 0x00, 0x00, 0x18, 0x04, 0x02, 0x13, 0x11, 0x01 ],
27 | # Char 3 - Lower-left
28 | [ 0x12, 0x13, 0x1b, 0x09, 0x04, 0x03, 0x00, 0x00 ],
29 | # Char 4 - Lower-middle
30 | [ 0x00, 0x11, 0x1f, 0x1f, 0x0e, 0x00, 0x1F, 0x00 ],
31 | # Char 5 - Lower-right
32 | [ 0x09, 0x19, 0x1b, 0x12, 0x04, 0x18, 0x00, 0x00 ],
33 | # Char 6 - my test
34 | [ 0x1f,0x0,0x4,0xe,0x0,0x1f,0x1f,0x1f],
35 | ]
36 |
37 | # Load logo chars (fontdata1)
38 | mylcd.lcd_load_custom_chars(fontdata1)
39 |
40 |
41 | # Write first three chars to row 1 directly
42 | mylcd.lcd_write(0x80)
43 | mylcd.lcd_write_char(0)
44 | mylcd.lcd_write_char(1)
45 | mylcd.lcd_write_char(2)
46 | # Write next three chars to row 2 directly
47 | mylcd.lcd_write(0xC0)
48 | mylcd.lcd_write_char(3)
49 | mylcd.lcd_write_char(4)
50 | mylcd.lcd_write_char(5)
51 | sleep(2)
52 |
53 | mylcd.lcd_clear()
54 |
55 | mylcd.lcd_display_string_pos("Testing",1,1) # row 1, column 1
56 | sleep(1)
57 | mylcd.lcd_display_string_pos("Testing",2,3) # row 2, column 3
58 | sleep(1)
59 | mylcd.lcd_clear()
60 |
61 | # Now let's define some more custom characters
62 | fontdata2 = [
63 | # Char 0 - left arrow
64 | [ 0x1,0x3,0x7,0xf,0xf,0x7,0x3,0x1 ],
65 | # Char 1 - left one bar
66 | [ 0x10,0x10,0x10,0x10,0x10,0x10,0x10,0x10 ],
67 | # Char 2 - left two bars
68 | [ 0x18,0x18,0x18,0x18,0x18,0x18,0x18,0x18 ],
69 | # Char 3 - left 3 bars
70 | [ 0x1c,0x1c,0x1c,0x1c,0x1c,0x1c,0x1c,0x1c ],
71 | # Char 4 - left 4 bars
72 | [ 0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e,0x1e ],
73 | # Char 5 - left start
74 | [ 0x0,0x1,0x3,0x7,0xf,0x1f,0x1f,0x1f ],
75 | # Char 6 -
76 | # [ ],
77 | ]
78 |
79 | # Load logo chars from the second set
80 | mylcd.lcd_load_custom_chars(fontdata2)
81 |
82 | block = chr(255) # block character, built-in
83 |
84 | # display two blocks in columns 5 and 6 (i.e. AFTER pos. 4) in row 1
85 | # first draw two blocks on 5th column (cols 5 and 6), starts from 0
86 | mylcd.lcd_display_string_pos(block * 2,1,4)
87 |
88 | #
89 | pauza = 0.2 # define duration of sleep(x)
90 | #
91 | # now draw cust. chars starting from col. 7 (pos. 6)
92 |
93 | pos = 6
94 | mylcd.lcd_display_string_pos(unichr(1),1,6)
95 | sleep(pauza)
96 |
97 | mylcd.lcd_display_string_pos(unichr(2),1,pos)
98 | sleep(pauza)
99 |
100 | mylcd.lcd_display_string_pos(unichr(3),1,pos)
101 | sleep(pauza)
102 |
103 | mylcd.lcd_display_string_pos(unichr(4),1,pos)
104 | sleep(pauza)
105 |
106 | mylcd.lcd_display_string_pos(block,1,pos)
107 | sleep(pauza)
108 |
109 | # and another one, same as above, 1 char-space to the right
110 | pos = pos +1 # increase column by one
111 |
112 | mylcd.lcd_display_string_pos(unichr(1),1,pos)
113 | sleep(pauza)
114 | mylcd.lcd_display_string_pos(unichr(2),1,pos)
115 | sleep(pauza)
116 | mylcd.lcd_display_string_pos(unichr(3),1,pos)
117 | sleep(pauza)
118 | mylcd.lcd_display_string_pos(unichr(4),1,pos)
119 | sleep(pauza)
120 | mylcd.lcd_display_string_pos(block,1,pos)
121 | sleep(pauza)
122 |
123 |
124 | #
125 | # now again load first set of custom chars - smiley
126 | mylcd.lcd_load_custom_chars(fontdata1)
127 |
128 | mylcd.lcd_display_string_pos(unichr(0),1,9)
129 | mylcd.lcd_display_string_pos(unichr(1),1,10)
130 | mylcd.lcd_display_string_pos(unichr(2),1,11)
131 | mylcd.lcd_display_string_pos(unichr(3),2,9)
132 | mylcd.lcd_display_string_pos(unichr(4),2,10)
133 | mylcd.lcd_display_string_pos(unichr(5),2,11)
134 |
135 | sleep(2)
136 | mylcd.lcd_clear()
137 | sleep(1)
138 | mylcd.backlight(0)
139 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/WriteToScreenv0_12.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # requires RPi_I2C_driver.py
4 | import RPi_I2C_driver
5 | from time import *
6 | import os
7 | import subprocess
8 |
9 |
10 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
11 | (out, err) = proc.communicate()
12 | #print "program output:", out
13 | roomtemp = out.strip()
14 |
15 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
16 | (out, err) = proc.communicate()
17 | #print "program output:", out
18 | roomhumidity = out.strip()
19 |
20 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
21 | (out, err) = proc.communicate()
22 | #print "program output:", out
23 | outsidetemp = out.strip()
24 |
25 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
26 | (out, err) = proc.communicate()
27 | #print "program output:", out
28 | outsidehumidity = out.strip()
29 |
30 |
31 | mylcd = RPi_I2C_driver.lcd()
32 | mylcd.lcd_display_string("Room Temp - %s DegC" %roomtemp, 1)
33 | mylcd.lcd_display_string("Room Hum - %s Pcnt" %roomhumidity, 2)
34 | mylcd.lcd_display_string("Out Temp - %s DegC" %outsidetemp, 3)
35 | mylcd.lcd_display_string("Out Hum - %s Pcnt" %outsidehumidity, 4)
36 |
37 | sleep(5) # 5 sec delay
38 |
39 | mylcd.lcd_clear()
40 | mylcd.backlight(0)
41 |
42 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/WriteToScreenv0_13.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # requires RPi_I2C_driver.py
4 | import RPi_I2C_driver
5 | from time import *
6 | import os
7 | import subprocess
8 |
9 |
10 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
11 | (out, err) = proc.communicate()
12 | #print "program output:", out
13 | roomtemp = out.strip()
14 |
15 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
16 | (out, err) = proc.communicate()
17 | #print "program output:", out
18 | roomhumidity = out.strip()
19 |
20 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
21 | (out, err) = proc.communicate()
22 | #print "program output:", out
23 | outsidetemp = out.strip()
24 |
25 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
26 | (out, err) = proc.communicate()
27 | #print "program output:", out
28 | outsidehumidity = out.strip()
29 |
30 | cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
31 | os.system(cmd)
32 | sleep (2)
33 | mylcd = RPi_I2C_driver.lcd()
34 | mylcd.lcd_display_string("RmTemp %s DegC" %roomtemp, 1)
35 | mylcd.lcd_display_string("RmHumidity %s Pcnt" %roomhumidity, 2)
36 | mylcd.lcd_display_string("OutTemp %s DegC" %outsidetemp, 3)
37 | mylcd.lcd_display_string("OutHumidity %s Pcnt" %outsidehumidity, 4)
38 |
39 | sleep(5) # 5 sec delay
40 |
41 | mylcd.lcd_clear()
42 | mylcd.backlight(0)
43 |
44 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/WriteToScreenv0_14.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # requires RPi_I2C_driver.py
4 | import RPi_I2C_driver
5 | from time import *
6 | import os
7 | import subprocess
8 |
9 | #Raspberry Pi 3 doesn't detect the connected I2C devices
10 | #The code below (removal and insertion of I2C modules) is required to
11 | #force detection of attached devices
12 | #cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
13 | #os.system(cmd)
14 |
15 | while True:
16 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
17 | (out, err) = proc.communicate()
18 | #print "program output:", out
19 | roomtemp = out.strip()
20 |
21 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
22 | (out, err) = proc.communicate()
23 | #print "program output:", out
24 | roomhumidity = out.strip()
25 |
26 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
27 | (out, err) = proc.communicate()
28 | #print "program output:", out
29 | outsidetemp = out.strip()
30 |
31 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
32 | (out, err) = proc.communicate()
33 | #print "program output:", out
34 | outsidehumidity = out.strip()
35 |
36 | #Raspberry Pi 3 doesn't detect the connected I2C devices
37 | #The code below (removal and insertion of I2C modules) is required to
38 | #force detection of attached devices
39 | cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
40 | os.system(cmd)
41 | mylcd = RPi_I2C_driver.lcd()
42 | mylcd.lcd_display_string("RmTemp %s DegC" %roomtemp, 1)
43 | mylcd.lcd_display_string("RmHumidity %s Pcnt" %roomhumidity, 2)
44 | mylcd.lcd_display_string("OutTemp %s DegC" %outsidetemp, 3)
45 | mylcd.lcd_display_string("OutHumidity %s Pcnt" %outsidehumidity, 4)
46 |
47 | sleep(5) # 5 sec delay
48 | mylcd.lcd_clear()
49 | mylcd.backlight(0)
50 | sleep(120) # 5 sec delay
51 |
52 | #mylcd.lcd_display_string(" !!! Downloading Updates !!!", 2)
53 | #sleep(5) # 5 sec delay
54 | #mylcd.lcd_clear()
55 | #mylcd.backlight(0)
56 |
57 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/WriteToScreenv0_15.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # requires RPi_I2C_driver.py
4 | import RPi_I2C_driver
5 | from time import *
6 | import os
7 | import subprocess
8 |
9 | #Raspberry Pi 3 doesn't detect the connected I2C devices
10 | #The code below (removal and insertion of I2C modules) is required to
11 | #force detection of attached devices
12 | #cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
13 | #os.system(cmd)
14 |
15 | while True:
16 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
17 | (out, err) = proc.communicate()
18 | #print "program output:", out
19 | roomtemp = out.strip()
20 |
21 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
22 | (out, err) = proc.communicate()
23 | #print "program output:", out
24 | roomhumidity = out.strip()
25 |
26 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
27 | (out, err) = proc.communicate()
28 | #print "program output:", out
29 | outsidetemp = out.strip()
30 |
31 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
32 | (out, err) = proc.communicate()
33 | #print "program output:", out
34 | outsidehumidity = out.strip()
35 |
36 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 8 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
37 | (out, err) = proc.communicate()
38 | #print "program output:", out
39 | outairspeed = out.strip()
40 |
41 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 10 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
42 | (out, err) = proc.communicate()
43 | #print "program output:", out
44 | outcloudcover = out.strip()
45 |
46 | #Raspberry Pi 3 doesn't detect the connected I2C devices
47 | #The code below (removal and insertion of I2C modules) is required to
48 | #force detection of attached devices
49 | cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
50 | os.system(cmd)
51 | mylcd = RPi_I2C_driver.lcd()
52 | mylcd.lcd_display_string("RoomTemp %s DegC" %roomtemp, 1)
53 | mylcd.lcd_display_string("OutTemp %s DegC" %outsidetemp, 2)
54 | #mylcd.lcd_display_string_pos("OtTp %s C" %outsidetemp,1,11) //Tried displaying both in one line but doesn't quite work
55 | mylcd.lcd_display_string("RmHumidity %s Pcnt" %roomhumidity, 3)
56 | mylcd.lcd_display_string("OutHumidity %s Pcnt" %outsidehumidity, 4)
57 | sleep(5) # 5 sec delay
58 | mylcd.lcd_clear()
59 | mylcd.backlight(0)
60 | sleep(120) # delay
61 |
62 | cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
63 | os.system(cmd)
64 | mylcd = RPi_I2C_driver.lcd()
65 | mylcd.lcd_clear()
66 | mylcd.lcd_display_string("RoomTemp %s C" %roomtemp, 1)
67 | mylcd.lcd_display_string("OutTemp %s C" %outsidetemp, 2)
68 | mylcd.lcd_display_string("Airspeed %s Knots" %outairspeed, 3)
69 | mylcd.lcd_display_string("Clouds %s00 Feet" %outcloudcover, 4)
70 | sleep(5) # 5 sec delay
71 | mylcd.lcd_clear()
72 | mylcd.backlight(0)
73 | sleep(120) # delay
74 |
75 | #mylcd.lcd_display_string(" !!! Downloading Updates !!!", 2)
76 | #sleep(5) # 5 sec delay
77 | #mylcd.lcd_clear()
78 | #mylcd.backlight(0)
79 |
80 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/WriteToScreenv0_16.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | # requires RPi_I2C_driver.py
4 | import RPi_I2C_driver
5 | from time import *
6 | import os
7 | import subprocess
8 |
9 | #Raspberry Pi 3 doesn't detect the connected I2C devices
10 | #The code below (removal and insertion of I2C modules) is required to
11 | #force detection of attached devices
12 | #cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
13 | #os.system(cmd)
14 |
15 | while True:
16 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
17 | (out, err) = proc.communicate()
18 | #print "program output:", out
19 | roomtemp = out.strip()
20 |
21 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
22 | (out, err) = proc.communicate()
23 | #print "program output:", out
24 | roomhumidity = out.strip()
25 |
26 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
27 | (out, err) = proc.communicate()
28 | #print "program output:", out
29 | outsidetemp = out.strip()
30 |
31 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
32 | (out, err) = proc.communicate()
33 | #print "program output:", out
34 | outsidehumidity = out.strip()
35 |
36 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 8 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
37 | (out, err) = proc.communicate()
38 | #print "program output:", out
39 | outairspeed = out.strip()
40 |
41 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 10 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
42 | (out, err) = proc.communicate()
43 | #print "program output:", out
44 | outcloudcover = out.strip()
45 |
46 | #Raspberry Pi 3 doesn't detect the connected I2C devices
47 | #The code below (removal and insertion of I2C modules) is required to
48 | #force detection of attached devices
49 | cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
50 | os.system(cmd)
51 | mylcd = RPi_I2C_driver.lcd()
52 | mylcd.lcd_display_string("RoomTemp %s DegC" %roomtemp, 1)
53 | mylcd.lcd_display_string("OutTemp %s DegC" %outsidetemp, 2)
54 | #mylcd.lcd_display_string_pos("OtTp %s C" %outsidetemp,1,11) //Tried displaying both in one line but doesn't quite work
55 | mylcd.lcd_display_string("RmHumidity %s Pcnt" %roomhumidity, 3)
56 | mylcd.lcd_display_string("OutHumidity %s Pcnt" %outsidehumidity, 4)
57 | #disabling sleep, lcd_clear() and lcd_backlight() to let the information remain on the screen
58 | #sleep(5) # 5 sec delay
59 | #mylcd.lcd_clear()
60 | #mylcd.backlight(0)
61 | sleep(300) # delay
62 |
63 | cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
64 | os.system(cmd)
65 | mylcd = RPi_I2C_driver.lcd()
66 | mylcd.lcd_clear()
67 | mylcd.lcd_display_string("RoomTemp %s C" %roomtemp, 1)
68 | mylcd.lcd_display_string("OutTemp %s C" %outsidetemp, 2)
69 | mylcd.lcd_display_string("Airspeed %s Knots" %outairspeed, 3)
70 | mylcd.lcd_display_string("Clouds %s00 Feet" %outcloudcover, 4)
71 | #disabling sleep, lcd_clear() and lcd_backlight() to let the information remain on the screen
72 | #sleep(5) # 5 sec delay
73 | #mylcd.lcd_clear()
74 | #mylcd.backlight(0)
75 | sleep(300) # delay
76 |
77 | #mylcd.lcd_display_string(" !!! Downloading Updates !!!", 2)
78 | #sleep(5) # 5 sec delay
79 | #mylcd.lcd_clear()
80 | #mylcd.backlight(0)
81 |
82 |
--------------------------------------------------------------------------------
/Write_To_LCD_Screen/runme.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | sudo /sbin/rmmod i2c_bcm2708
4 | sudo /sbin/modprobe i2c_bcm2708
5 | cd /home/pi/Downloads/TW_Experiments/Python_Projects/RaspiPythonProjects/LCD_Write/
6 | ./WriteToScreenv0_13.py
7 |
--------------------------------------------------------------------------------
/Write_Weather_To_Twitter/Write_To_Twitter_v0.11.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from time import *
4 | import os
5 | import subprocess
6 |
7 | #cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
8 | #os.system(cmd)
9 |
10 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
11 | (out, err) = proc.communicate()
12 | #print "program output:", out
13 | roomtemp = out.strip()
14 |
15 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
16 | (out, err) = proc.communicate()
17 | #print "program output:", out
18 | roomhumidity = out.strip()
19 |
20 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
21 | (out, err) = proc.communicate()
22 | #print "program output:", out
23 | outsidetemp = out.strip()
24 |
25 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
26 | (out, err) = proc.communicate()
27 | #print "program output:", out
28 | outsidehumidity = out.strip()
29 |
30 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 8 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
31 | (out, err) = proc.communicate()
32 | #print "program output:", out
33 | outairspeed = out.strip()
34 |
35 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 10 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
36 | (out, err) = proc.communicate()
37 | #print "program output:", out
38 | outcloudcover = out.strip()
39 |
40 | #cmd = "sudo rmmod i2c_bcm2708; sudo modprobe i2c_bcm2708;"
41 | #os.system(cmd)
42 | print("The temp inside is %s DegC, the temp outside is %s DegC, the humidity inside is %s Percent, the humidity outside is %s Percent, the airspeed is %s Knots and cloud cover is at %s x1000 feet." %(roomtemp, outsidetemp, roomhumidity, outsidehumidity, outairspeed, outcloudcover))
43 |
44 |
--------------------------------------------------------------------------------
/Write_Weather_To_Twitter/Write_To_Twitter_v0.12.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from time import *
4 | import os
5 | import sys
6 | import subprocess
7 | import twitter
8 |
9 | api = twitter.Api(consumer_key='jZdfgsdfgsdhdfghdfghdfjgD45V', consumer_secret='WwsNfzxfgdfgde56tdsfgzdfgsdfgm9ZKPdapLn6Mo4', access_token_key='77896786877857861297-JDSsdfdfgsdfgdbghghjfghjVsls', access_token_secret='6svPCxsI7Xkxvbterg44534dfgsdagsdfghhTwGVOGzg9pTz') #These are dummy keys, please replace with your original keys
10 |
11 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
12 | (out, err) = proc.communicate()
13 | #print "program output:", out
14 | roomtemp = out.strip()
15 |
16 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
17 | (out, err) = proc.communicate()
18 | #print "program output:", out
19 | roomhumidity = out.strip()
20 |
21 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
22 | (out, err) = proc.communicate()
23 | #print "program output:", out
24 | outsidetemp = out.strip()
25 |
26 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
27 | (out, err) = proc.communicate()
28 | #print "program output:", out
29 | outsidehumidity = out.strip()
30 |
31 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 8 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
32 | (out, err) = proc.communicate()
33 | #print "program output:", out
34 | outairspeed = out.strip()
35 |
36 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 10 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
37 | (out, err) = proc.communicate()
38 | #print "program output:", out
39 | outcloudcover = out.strip()
40 |
41 | #print("The temp inside is %s DegC, the temp outside is %s DegC, the humidity inside is %s Percent, the humidity outside is %s Percent, the airspeed is %s Knots and cloud cover is at %s x1000 feet." %(roomtemp, outsidetemp, roomhumidity, outsidehumidity, outairspeed, outcloudcover))
42 |
43 | message = "The temp inside is " + roomtemp + " DegC, the temp outside is " + outsidetemp + " DegC, the humidity inside is " + roomhumidity + " Percent, the humidity outside is " + outsidehumidity + " Percent, the airspeed is " + outairspeed + " Knots and cloud cover is at " + outcloudcover + " x1000 feet."
44 | print message
45 |
46 | status = api.PostUpdate(message)
47 |
48 |
--------------------------------------------------------------------------------
/Write_Weather_To_Twitter/Write_To_Twitter_v0.13.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | from time import *
4 | import os
5 | import sys
6 | import subprocess
7 | import twitter
8 |
9 | api = twitter.Api(consumer_key='jZdfgsdfgsdhdfghdfghdfjgD45V', consumer_secret='WwsNfzxfgdfgde56tdsfgzdfgsdfgm9ZKPdapLn6Mo4', access_token_key='77896786877857861297-JDSsdfdfgsdfgdbghghjfghjVsls', access_token_secret='6svPCxsI7Xkxvbterg44534dfgsdagsdfghhTwGVOGzg9pTz') #These are dummy keys, please replace with your original keys
10 |
11 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
12 | (out, err) = proc.communicate()
13 | #print "program output:", out
14 | roomtemp = out.strip()
15 |
16 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidity.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
17 | (out, err) = proc.communicate()
18 | #print "program output:", out
19 | roomhumidity = out.strip()
20 |
21 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 2 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
22 | (out, err) = proc.communicate()
23 | #print "program output:", out
24 | outsidetemp = out.strip()
25 |
26 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 4 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
27 | (out, err) = proc.communicate()
28 | #print "program output:", out
29 | outsidehumidity = out.strip()
30 |
31 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 8 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
32 | (out, err) = proc.communicate()
33 | #print "program output:", out
34 | outairspeed = out.strip()
35 |
36 | proc = subprocess.Popen(["tail -n 1 /opt/data/temphumidityOWM.csv | cut -d , -f 10 | cut -d \\\" " + "-f 2"], stdout=subprocess.PIPE, shell=True)
37 | (out, err) = proc.communicate()
38 | #print "program output:", out
39 | outcloudcover = out.strip()
40 |
41 | #print("The temp inside is %s DegC, the temp outside is %s DegC, the humidity inside is %s Percent, the humidity outside is %s Percent, the airspeed is %s Knots and cloud cover is at %s x1000 feet." %(roomtemp, outsidetemp, roomhumidity, outsidehumidity, outairspeed, outcloudcover))
42 |
43 | #message = "Intemp - " + roomtemp + " DegC, Outtemp - " + outsidetemp + " DegC, InHumidity - " + roomhumidity + " %, OutHumidity - " + outsidehumidity + " %, airspeed - " + outairspeed + " Kts, Cloud cover - " + outcloudcover + " x000 ft."
44 |
45 | message = "Intemp " + roomtemp + " C, Outtemp " + outsidetemp + " C, InHumidity " + roomhumidity + " %, OutHumidity " + outsidehumidity + " %, airspeed " + outairspeed + " Kts, Cloud " + outcloudcover + " x000 ft."
46 | print message
47 |
48 | status = api.PostUpdate(message)
49 |
50 |
--------------------------------------------------------------------------------