├── docs
├── imgs
│ ├── iq.JPG
│ ├── add_vpn.png
│ ├── paypal.jpeg
│ ├── qg_comms.png
│ ├── vpn_gui1.png
│ ├── ovpn_client.JPG
│ ├── sim_vision.png
│ ├── vpn_con_ops.png
│ ├── node_settings1.JPG
│ ├── IQ-GitHub-Banner.png
│ └── aircraft_vision.png
├── proximity
│ ├── imgs
│ │ ├── OA_params.png
│ │ ├── avoid_qgc.png
│ │ ├── prox_main.png
│ │ ├── 4_sonar_quad.png
│ │ ├── avoid_params.png
│ │ ├── AUTO_Avoidance.png
│ │ └── obstacles_on_takeoff.png
│ └── proximity_overview.md
├── Installing_Ardupilot_20_04.md
├── training_yolo.md
├── web_video_server.md
├── basic_gps_denied_navigation.md
├── installing_mission_on_Linux.md
├── rc_controller.md
├── Installing_Ardupilot.md
├── installing_qgc.md
├── intro_to_yolo.md
├── installing_ros.md
├── installing_ros_20_04.md
├── installing_gazebo_arduplugin.md
├── setting_up_a_vpn_server.md
├── GNC_functions_documentation.md
├── gzweb_install.md
├── helpful_commands.md
├── basic_ros_sub.md
├── ardu_params_and_commands.md
├── gazebo_world_modeling_intro.md
├── swarming_ardupilot.md
├── design_princepals.md
├── multi_mavros_drones.md
├── opencv_intro.md
├── avoidance.md
├── ros_intro.md
├── search_and_rescue.md
├── gnc_tutorial.md
└── adding_a_sensor.md
├── .github
└── FUNDING.yml
├── LICENSE
└── README.md
/docs/imgs/iq.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/iq.JPG
--------------------------------------------------------------------------------
/docs/imgs/add_vpn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/add_vpn.png
--------------------------------------------------------------------------------
/docs/imgs/paypal.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/paypal.jpeg
--------------------------------------------------------------------------------
/docs/imgs/qg_comms.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/qg_comms.png
--------------------------------------------------------------------------------
/docs/imgs/vpn_gui1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/vpn_gui1.png
--------------------------------------------------------------------------------
/docs/imgs/ovpn_client.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/ovpn_client.JPG
--------------------------------------------------------------------------------
/docs/imgs/sim_vision.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/sim_vision.png
--------------------------------------------------------------------------------
/docs/imgs/vpn_con_ops.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/vpn_con_ops.png
--------------------------------------------------------------------------------
/docs/imgs/node_settings1.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/node_settings1.JPG
--------------------------------------------------------------------------------
/docs/imgs/IQ-GitHub-Banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/IQ-GitHub-Banner.png
--------------------------------------------------------------------------------
/docs/imgs/aircraft_vision.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/imgs/aircraft_vision.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/OA_params.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/OA_params.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/avoid_qgc.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/avoid_qgc.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/prox_main.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/prox_main.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/4_sonar_quad.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/4_sonar_quad.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/avoid_params.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/avoid_params.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/AUTO_Avoidance.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/AUTO_Avoidance.png
--------------------------------------------------------------------------------
/docs/proximity/imgs/obstacles_on_takeoff.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Intelligent-Quads/iq_tutorials/HEAD/docs/proximity/imgs/obstacles_on_takeoff.png
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: ericjohnson97
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
13 |
--------------------------------------------------------------------------------
/docs/Installing_Ardupilot_20_04.md:
--------------------------------------------------------------------------------
1 | # Installing Ardupilot and MAVProxy Ubuntu 20.04
2 |
3 | ## Video Tutorial at https://youtu.be/1FpJvUVPxL0
4 |
5 | ## Clone ArduPilot
6 |
7 | In home directory:
8 | ```
9 | cd ~
10 | sudo apt install git
11 | git clone https://github.com/ArduPilot/ardupilot.git
12 | cd ardupilot
13 | ```
14 |
15 | ## Install dependencies:
16 | ```
17 | cd ardupilot
18 | Tools/environment_install/install-prereqs-ubuntu.sh -y
19 | ```
20 |
21 | reload profile
22 | ```
23 | . ~/.profile
24 | ```
25 | ## If the next step "git submodule update" fails
26 | ```
27 | git config --global url.https://.insteadOf git://
28 | ```
29 |
30 | ## Checkout Latest Copter Build
31 | ```
32 | git checkout Copter-4.0.4
33 | git submodule update --init --recursive
34 | ```
35 |
36 | Run SITL (Software In The Loop) once to set params:
37 | ```
38 | cd ~/ardupilot/ArduCopter
39 | sim_vehicle.py -w
40 | ```
41 |
42 |
--------------------------------------------------------------------------------
/docs/training_yolo.md:
--------------------------------------------------------------------------------
1 | # Training YOLO for your Own Custom Object
2 |
3 | ## Generate a Dataset
4 |
5 | In this tutorial I will be using the gazebo sim we have used in the previous tutorials and saving the footage from the drone's camera
6 |
7 | launch sim
8 | ```
9 | roslaunch iq_sim runway.launch
10 | ```
11 |
12 | ```
13 | ./startsitl.sh
14 | ```
15 |
16 | fly the drone around the sim
17 |
18 | and run image saver to generate your dataset
19 | ```
20 | rosrun image_view image_saver image:/webcam/image_raw
21 | ```
22 | ## Annotate dataset
23 |
24 | use labelImg https://github.com/tzutalin/labelImg
25 |
26 | - follow instructions on readme
27 |
28 | ## Train YOLO
29 |
30 | use AlexeyAB's darknet repo https://github.com/AlexeyAB/darknet
31 |
32 | - follow instructions on readme
33 |
34 | ## Setup darknet ROS
35 |
36 | follow my tutorial
37 |
38 | https://github.com/Intelligent-Quads/iq_tutorials/blob/master/intro_to_yolo.md
39 |
40 |
41 |
--------------------------------------------------------------------------------
/docs/web_video_server.md:
--------------------------------------------------------------------------------
1 | # Video Web Server
2 |
3 | ## Video Tutorial at https://youtu.be/bh9mKgiIJAk
4 |
5 | This tutorial is highlighting a great opensource project that is super convinient for viewing cameras on a robot. The packege essentially sets up a webserver on your robot that allows you to select and view image streams from your web browser. Becuase of the web based nature anyone on the same network will have the ability to monitor cameras. This could really aid in the easy of monitoring and dubugging programs in a lab setting. This combined with a VPN could allow team members to view real time video data from anywhere in the world!
6 |
7 | ## Installation
8 |
9 | ```
10 | cd ~/catkin/src
11 | git clone https://github.com/sfalexrog/async_web_server_cpp.git
12 | cd async_web_server
13 | git checkout noetic-devel
14 | ```
15 |
16 | ```
17 | cd ~/catkin/src
18 | git clone https://github.com/RobotWebTools/web_video_server.git
19 | ```
20 |
21 | ```
22 | catkin build
23 | ```
24 |
25 | ## Run
26 | ```
27 | rosrun web_video_server web_video_server
28 | ```
29 |
30 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Intelligent-Quads
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/basic_gps_denied_navigation.md:
--------------------------------------------------------------------------------
1 | # Basic GPS Denied Navigation
2 |
3 | This tutorial will focus on how to use an optical flow sensor as a means of GPS denied navigation
4 |
5 | **Note: Optical Flow Sensors are not robust, and should only be used to augment another method of position estimation such as SLAM**
6 |
7 | ## Simulation Setup
8 |
9 |
10 | ardupilot has a built in optical flow simulated sensor. To use this we will launch the ardupilot sitl and set `SIM_FLOW` to 1 by runnning
11 |
12 | ```
13 | param set SIM_FLOW 1
14 | ```
15 |
16 | Disable the simulated GPS by typing
17 | ```
18 | param set SIM_GPS_ENABLE 0
19 | ```
20 |
21 | ## Vehicle Setup
22 |
23 | first we must enable the drone's EKF to use the optical flow estimates. to do this type
24 | ```
25 | param set EK2_GPS_TYPE 3
26 | ```
27 |
28 | Position estimation from optical flow is a methos that integrates velocity at each time step to form a position estimate. In order to do this we must tell the drone where to start integrating from. This will be done by setting the EKF origin. This can be done 2 different ways
29 |
30 | -1.) Via a ground station
31 | -2.) Programically
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/docs/installing_mission_on_Linux.md:
--------------------------------------------------------------------------------
1 | # Installing Mission Planner on Linux
2 |
3 | While I prefer to use QGroundControl as my ground station, there are still reasons now and again that only Mission Planner can do the job. Mission Planner does not have native support for linux, but it is still possible to use via mono.
4 |
5 | ## Video Tutorial at https://youtu.be/XIS-nPs8Oq4
6 |
7 | ### On Linux
8 |
9 | #### Requirements
10 |
11 | Those instructions were tested on Ubuntu 18.04.
12 | Please install Mono, either :
13 | - ` sudo apt install mono-runtime libmono-system-windows-forms4.0-cil libmono-system-core4.0-cil libmono-winforms2.0-cil libmono-corlib2.0-cil libmono-system-management4.0-cil libmono-system-xml-linq4.0-cil`
14 |
15 | or full Mono :
16 | - `sudo apt install mono-complete`
17 |
18 | #### Launching
19 |
20 | - Get the lastest zipped version of Mission Planner here : https://firmware.ardupilot.org/Tools/MissionPlanner/MissionPlanner-latest.zip
21 | - Unzip in the directory you want
22 | - Go into the directory
23 | - run with `mono MissionPlanner.exe`
24 |
25 | You can debug Mission Planner on Mono with `MONO_LOG_LEVEL=debug mono MissionPlanner.exe`
26 |
27 |
28 |
--------------------------------------------------------------------------------
/docs/rc_controller.md:
--------------------------------------------------------------------------------
1 | # RC Controller with MAVproxy
2 |
3 | It is help to be able to use an rc controller for manual input when debugging in your simulation. We can do this by using the joystick module in mavproxy. You can connect your rc controller to your pc via a usb dongle.
4 |
5 | ## Controller Dongles
6 |
7 | A List of native dongles are listed below
8 |
9 | https://ardupilot.github.io/MAVProxy/html/modules/joystick.html
10 |
11 | I am using a spektrum dx7 controller and elected to by the spektrum wireless receiver listed below
12 |
13 | https://www.amazon.com/Spektrum-Wireless-Simulator-Dongle-WS1000/dp/B06XCP7Z5Y/ref=asc_df_B06XCP7Z5Y/?tag=hyprod-20&linkCode=df0&hvadid=309833041189&hvpos=&hvnetw=g&hvrand=1456726319045727682&hvpone=&hvptwo=&hvqmt=&hvdev=c&hvdvcmdl=&hvlocint=&hvlocphy=9027263&hvtargid=pla-567767137867&th=1
14 |
15 | ## Dependencies
16 |
17 | make sure all of the below dependencies are installed
18 |
19 | ```
20 | sudo apt-get install python3-dev python3-opencv python3-wxgtk3.0 python3-pip python3-matplotlib python3-pygame python3-lxml python3-yaml
21 | ```
22 |
23 | ## Run Your Simulation
24 |
25 | ```
26 | cd ~/ardupilot/ArduCopter/ && sim_vehicle.py -v ArduCopter -f gazebo-iris --console
27 | ```
28 | ```
29 | roslaunch iq_sim runway.launch
30 | ```
31 |
32 | Once you are running your simulation you can enable your controller by typing the following in your mavproxy terminal
33 | ```
34 | module load joystick
35 | ```
36 |
37 | ## Custom Joystick Mapping
38 |
39 |
--------------------------------------------------------------------------------
/docs/Installing_Ardupilot.md:
--------------------------------------------------------------------------------
1 | # Installing Ardupilot and MAVProxy
2 |
3 | ### Video tutorial at https://youtu.be/wlkoq65mM2A
4 |
5 | ## Clone ArduPilot
6 |
7 | In home directory:
8 | ```
9 | cd ~
10 | sudo apt install git
11 | git clone https://github.com/ArduPilot/ardupilot.git
12 | cd ardupilot
13 | git checkout Copter-3.6
14 | git submodule update --init --recursive
15 | ```
16 |
17 | ## Install dependencies:
18 | ```
19 | sudo apt install python-matplotlib python-serial python-wxgtk3.0 python-wxtools python-lxml python-scipy python-opencv ccache gawk python-pip python-pexpect
20 | ```
21 |
22 | ## Use pip (Python package installer) to install mavproxy:
23 | ```
24 | sudo pip install future pymavlink MAVProxy
25 | ```
26 |
27 | MAVProxy is a fully-functioning GCS for UAV’s. The intent is for a minimalist, portable and extendable GCS for any UAV supporting the MAVLink protocol (such as one using ArduPilot). For more information check out http://ardupilot.github.io/MAVProxy/html/index.html
28 |
29 | Open `~/.bashrc` for editing:
30 | ```
31 | gedit ~/.bashrc
32 | ```
33 |
34 | Add these lines to end of `~/.bashrc` (the file open in the text editor):
35 | ```
36 | export PATH=$PATH:$HOME/ardupilot/Tools/autotest
37 | export PATH=/usr/lib/ccache:$PATH
38 | ```
39 |
40 | Save and close the text editor.
41 |
42 | Reload `~/.bashrc`:
43 | ```
44 | . ~/.bashrc
45 | ```
46 |
47 | Run SITL (Software In The Loop) once to set params:
48 | ```
49 | cd ~/ardupilot/ArduCopter
50 | sim_vehicle.py -w
51 | ```
52 |
53 |
--------------------------------------------------------------------------------
/docs/installing_qgc.md:
--------------------------------------------------------------------------------
1 | # Intalling QGroundControl
2 |
3 | ### Video tutorial (18.04) at https://youtu.be/qLQQbhKDQ6M
4 |
5 | ### Video Tutorial (20.04) at https://youtu.be/1FpJvUVPxL0
6 |
7 | ## Overview
8 |
9 | QGroundControl provides full flight control and vehicle setup for PX4 or ArduPilot powered vehicles. It provides easy and straightforward usage for beginners, while still delivering high end feature support for experienced users.
10 |
11 | ### Key Features:
12 |
13 | - Full setup/configuration of ArduPilot and PX4 Pro powered vehicles.
14 | - Flight support for vehicles running PX4 and ArduPilot (or any other autopilot that communicates using the MAVLink protocol).
15 | - Mission planning for autonomous flight.
16 | - Flight map display showing vehicle position, flight track, waypoints and vehicle instruments.
17 | - Video streaming with instrument display overlays.
18 | - Support for managing multiple vehicles.
19 | - QGC runs on Windows, OS X, Linux platforms, iOS and Android devices.
20 |
21 | for more detailed information please visit http://qgroundcontrol.com/
22 |
23 | ## Install QGroundControl for Ubuntu Linux 16.04 LTS or later:
24 |
25 | Add current user accout to dialout group and remove modemmanager
26 | ```
27 | sudo usermod -a -G dialout $USER
28 | sudo apt-get remove modemmanager
29 | ```
30 |
31 | Download QGroundControl.AppImage
32 | ```
33 | wget https://s3-us-west-2.amazonaws.com/qgroundcontrol/latest/QGroundControl.AppImage
34 | ```
35 | Change permissions and run
36 | ```
37 | chmod +x ./QGroundControl.AppImage
38 | ./QGroundControl.AppImage (or double click)
39 | ```
40 |
41 | ## Run SITL and connect with Q Ground
42 |
43 | ```
44 | cd ~/ardupilot/ArduCopter/
45 | sim_vehicle.py
46 | ```
47 |
48 |
--------------------------------------------------------------------------------
/docs/intro_to_yolo.md:
--------------------------------------------------------------------------------
1 | # Introduction to YOLO/Darknet Image Recognition
2 |
3 | ## Video Tutorial at https://youtu.be/SiVexS6Zrr8
4 |
5 | ## Install CUDA
6 | Cuda is a library that allows programs to take advantage of your GPU as a computing resource. YOLO will run without Cuda, but the algorithm is up to 500 x more quick with Cuda. To install Cuda, run
7 |
8 | ```
9 | sudo apt install nvidia-cuda-toolkit
10 | ```
11 |
12 | ## **Ubuntu 18.04**
13 | ### Clone Darknet/YOLO
14 |
15 | Clone the darknet repo into our catkin_ws
16 |
17 | ```
18 | cd ~/catkin_ws/src
19 | git clone --recursive https://github.com/leggedrobotics/darknet_ros.git
20 | ```
21 |
22 | ### Build Darknet
23 | ```
24 | catkin build -DCMAKE_BUILD_TYPE=Release
25 | ```
26 | on Ubuntu 18.04 you may need to run
27 | ```
28 | catkin build -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER=/usr/bin/gcc-6
29 | ```
30 |
31 | ## **Ubuntu 20.04**
32 | ```
33 | cd ~/catkin_ws/src
34 | git clone https://github.com/kunaltyagi/darknet_ros.git
35 | cd darknet_ros/
36 | git checkout opencv4
37 | git submodule update --init --recursive
38 | ```
39 | ### Build Darknet
40 | ```
41 | catkin build -DCMAKE_BUILD_TYPE=Release
42 | ```
43 | if you run into errors try running the following
44 | ```
45 | catkin build -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER=/usr/bin/gcc-8
46 | ```
47 |
48 |
49 | ## Configure YOLO/Darknet
50 |
51 | in the file `ros.yaml` specifies ros parameters. You can find this file under `darknet_ros/darknet_ros/config`. You will need to change the image topic from `/camera/rgb/image_raw` to
52 |
53 | ```
54 | /webcam/image_raw
55 | ```
56 |
57 | The file `darknet_ros.launch` will launch the darknet/yolo ros node. You can find this file under `darknet_ros/darknet_ros/launch`
58 |
59 | in this file you can choose which version of yolo you would like to run by changing
60 | ```
61 |
62 | ```
63 | the options are as follows
64 |
65 | - yolov1: Not recommended. this model is old
66 | - yolov2: more accurate, and faster.
67 | - yolov3: about as fast as v2, but more accurate. Yolo v3 has a high GPU ram requirement to train and run. If your graphics card does not have enough ram, use yolo v2
68 | - tiny-yolo: Very fast yolo model. Would recommend for application where speed is most important. Works very well on Nvidia Jetson
69 |
70 | ---
71 | ### References
72 |
73 | https://pjreddie.com/darknet/yolo/
74 |
75 |
--------------------------------------------------------------------------------
/docs/installing_ros.md:
--------------------------------------------------------------------------------
1 | # Install ROS and Setup Catkin
2 |
3 | ### Video Tutorial at https://youtu.be/EmIjedzHwzI
4 |
5 | In this tutorial we are using **Ubuntu 18.04** and **ROS Melodic**
6 |
7 | Code blocks are meant to be typed in Terminal windows. "Control+Alt+T" opens a new Terminal window.
8 |
9 | ## 1. Install ROS
10 |
11 | - Do _Desktop-full Install_
12 | - Follow until _Step 1.7_ at the end of the page
13 |
14 | First, install **ROS Melodic** using the following instructions: http://wiki.ros.org/melodic/Installation/Ubuntu
15 |
16 |
17 | ## 2. Set Up Catkin workspace
18 |
19 | We use `catkin build` instead of `catkin_make`. Please install the following:
20 | ```
21 | sudo apt-get install python-wstool python-rosinstall-generator python-catkin-tools
22 | ```
23 |
24 | Then, initialize the catkin workspace:
25 | ```
26 | mkdir -p ~/catkin_ws/src
27 | cd ~/catkin_ws
28 | catkin init
29 | ```
30 |
31 | ## 3. Dependencies installation
32 |
33 | Install `mavros` and `mavlink` from source:
34 | ```
35 | cd ~/catkin_ws
36 | wstool init ~/catkin_ws/src
37 |
38 | rosinstall_generator --upstream mavros | tee /tmp/mavros.rosinstall
39 | rosinstall_generator mavlink | tee -a /tmp/mavros.rosinstall
40 | wstool merge -t src /tmp/mavros.rosinstall
41 | wstool update -t src
42 | rosdep install --from-paths src --ignore-src --rosdistro `echo $ROS_DISTRO` -y
43 |
44 | catkin build
45 | ```
46 | Add a line to end of `~/.bashrc` by running the following command:
47 | ```
48 | echo "source ~/catkin_ws/devel/setup.bash" >> ~/.bashrc
49 | ```
50 |
51 | update global variables
52 | ```
53 | source ~/.bashrc
54 | ```
55 |
56 | install geographiclib dependancy
57 | ```
58 | sudo ~/catkin_ws/src/mavros/mavros/scripts/install_geographiclib_datasets.sh
59 | ```
60 |
61 |
62 | ## 4. Clone IQ Simulation ROS package
63 |
64 | ```
65 | cd ~/catkin_ws/src
66 | git clone https://github.com/Intelligent-Quads/iq_sim.git
67 | ```
68 | Our repository should now be copied to `~/catkin_ws/src/iq_sim/` (don't run this line. This is just saying that if you browse in the file manager, you will see those folders).
69 |
70 | run the following to tell gazebo where to look for the iq models
71 | ```
72 | echo "GAZEBO_MODEL_PATH=${GAZEBO_MODEL_PATH}:$HOME/catkin_ws/src/iq_sim/models" >> ~/.bashrc
73 | ```
74 |
75 | ## 5. Build instructions
76 | Inside `catkin_ws`, run `catkin build`:
77 |
78 | ```
79 | cd ~/catkin_ws
80 | catkin build
81 | ```
82 | update global variables
83 | ```
84 | source ~/.bashrc
85 | ```
86 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/docs/installing_ros_20_04.md:
--------------------------------------------------------------------------------
1 | # Install ROS and Setup Catkin
2 |
3 | ## Video Tutorial at https://youtu.be/1FpJvUVPxL0
4 |
5 | In this tutorial we are using **Ubuntu 20.04** and **ROS Noetic**
6 |
7 | Code blocks are meant to be typed in Terminal windows. "Control+Alt+T" opens a new Terminal window.
8 |
9 | ## 1. Install ROS
10 |
11 | - Do _Desktop-full Install_
12 | - Follow until _Step 1.7_ at the end of the page
13 |
14 | First, install **ROS Noetic** using the following instructions: http://wiki.ros.org/noetic/Installation/Ubuntu
15 |
16 |
17 | ## 2. Set Up Catkin workspace
18 |
19 | We use `catkin build` instead of `catkin_make`. Please install the following:
20 | ```
21 | sudo apt-get install python3-wstool python3-rosinstall-generator python3-catkin-lint python3-pip python3-catkin-tools
22 | pip3 install osrf-pycommon
23 | ```
24 |
25 | Then, initialize the catkin workspace:
26 | ```
27 | mkdir -p ~/catkin_ws/src
28 | cd ~/catkin_ws
29 | catkin init
30 | ```
31 |
32 | ## 3. Dependencies installation
33 |
34 | Install `mavros` and `mavlink` from source:
35 | ```
36 | cd ~/catkin_ws
37 | wstool init ~/catkin_ws/src
38 |
39 | rosinstall_generator --upstream mavros | tee /tmp/mavros.rosinstall
40 | rosinstall_generator mavlink | tee -a /tmp/mavros.rosinstall
41 | wstool merge -t src /tmp/mavros.rosinstall
42 | wstool update -t src
43 | rosdep install --from-paths src --ignore-src --rosdistro `echo $ROS_DISTRO` -y
44 |
45 | catkin build
46 | ```
47 | Add a line to end of `~/.bashrc` by running the following command:
48 | ```
49 | echo "source ~/catkin_ws/devel/setup.bash" >> ~/.bashrc
50 | ```
51 |
52 | update global variables
53 | ```
54 | source ~/.bashrc
55 | ```
56 |
57 | install geographiclib dependancy
58 | ```
59 | sudo ~/catkin_ws/src/mavros/mavros/scripts/install_geographiclib_datasets.sh
60 | ```
61 |
62 |
63 | ## 4. Clone IQ Simulation ROS package
64 |
65 | ```
66 | cd ~/catkin_ws/src
67 | git clone https://github.com/Intelligent-Quads/iq_sim.git
68 | ```
69 | Our repository should now be copied to `~/catkin_ws/src/iq_sim/` (don't run this line. This is just saying that if you browse in the file manager, you will see those folders).
70 |
71 | run the following to tell gazebo where to look for the iq models
72 | ```
73 | echo "GAZEBO_MODEL_PATH=${GAZEBO_MODEL_PATH}:$HOME/catkin_ws/src/iq_sim/models" >> ~/.bashrc
74 | ```
75 |
76 | ## 5. Build instructions
77 | Inside `catkin_ws`, run `catkin build`:
78 |
79 | ```
80 | cd ~/catkin_ws
81 | catkin build
82 | ```
83 | update global variables
84 | ```
85 | source ~/.bashrc
86 | ```
87 |
88 |
89 |
90 |
--------------------------------------------------------------------------------
/docs/installing_gazebo_arduplugin.md:
--------------------------------------------------------------------------------
1 | # Installing Gazebo and ArduPilot Plugin
2 |
3 | ### Video Tutorial at https://youtu.be/m7hPyJJmWmU
4 |
5 | ## Overview
6 |
7 | Robot simulation is an essential tool in every roboticist's toolbox. A well-designed simulator makes it possible to rapidly test algorithms, design robots, perform regression testing, and train AI system using realistic scenarios. Gazebo offers the ability to accurately and efficiently simulate populations of robots in complex indoor and outdoor environments. At your fingertips is a robust physics engine, high-quality graphics, and convenient programmatic and graphical interfaces. Best of all, Gazebo is free with a vibrant community.
8 |
9 | for more infromation on gazebo checkout http://gazebosim.org/
10 |
11 | ## Install Gazebo [***18.04-20.04***]
12 |
13 | Setup your computer to accept software from http://packages.osrfoundation.org:
14 | ```
15 | sudo sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" > /etc/apt/sources.list.d/gazebo-stable.list'
16 | ```
17 |
18 | Setup keys:
19 | ```
20 | wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add -
21 | ```
22 |
23 | Reload software list:
24 | ```
25 | sudo apt update
26 | ```
27 |
28 | Install Gazebo:
29 | ### Ubuntu [***18.04***]
30 | ```
31 | sudo apt install gazebo9 libgazebo9-dev
32 | ```
33 | ### Ubuntu [***20.04***]
34 | ```
35 | sudo apt-get install gazebo11 libgazebo11-dev
36 | ```
37 |
38 | for more detailed instructions for installing gazebo checkout http://gazebosim.org/tutorials?tut=install_ubuntu
39 |
40 |
41 | ## Install Gazebo plugin for APM (ArduPilot Master) :
42 | ```
43 | cd ~
44 | git clone https://github.com/khancyr/ardupilot_gazebo.git
45 | cd ardupilot_gazebo
46 | ```
47 | ***Ubuntu 18.04 only*** checkout dev
48 | ```
49 | git checkout dev
50 | ```
51 | build and install plugin
52 | ```
53 | mkdir build
54 | cd build
55 | cmake ..
56 | make -j4
57 | sudo make install
58 | ```
59 | ```
60 | echo 'source /usr/share/gazebo/setup.sh' >> ~/.bashrc
61 | ```
62 | Set paths for models:
63 | ```
64 | echo 'export GAZEBO_MODEL_PATH=~/ardupilot_gazebo/models' >> ~/.bashrc
65 | . ~/.bashrc
66 | ```
67 |
68 | ## Run Simulator
69 |
70 | **NOTE the iris_arducopter_runway is not currently working in gazebo11. The iq_sim worlds DO work**
71 |
72 | In one Terminal (Terminal 1), run Gazebo:
73 | ```
74 | gazebo --verbose ~/ardupilot_gazebo/worlds/iris_arducopter_runway.world
75 | ```
76 |
77 | In another Terminal (Terminal 2), run SITL:
78 | ```
79 | cd ~/ardupilot/ArduCopter/
80 | sim_vehicle.py -v ArduCopter -f gazebo-iris --console
81 | ```
82 |
83 |
84 |
--------------------------------------------------------------------------------
/docs/setting_up_a_vpn_server.md:
--------------------------------------------------------------------------------
1 | # Setting Up an Openvpn Server to Create a Virtual Robotics Lab Network
2 |
3 | ### Video tutorial at https://youtu.be/WoRyW4tyxo0
4 |
5 | Since the beginning of the pandemic, roboticists have been struggling to use their lab resources and do networking in a secure way. By using a VPN we can set up a network which mimics having all your devices on the same network in your lab. A VPN will encrypt all traffic allowing you to securely control simulation computers or robots remotely as long as you have an internet connection.
6 |
7 | 
8 |
9 | ## VPN Server
10 |
11 | The first thing we need to set up is a VPN server. This is a computer that will manage who can connect and will route all VPN traffic. In this tutorial I will use digital ocean to set up the vpn server. For $5 a month we can create a linux node that we can use as our VPN server.
12 |
13 | ### Digital Ocean Discounts
14 |
15 | If you are a currently a student you can redeem $50 worth of free credits by using the github student developer pack. This is enough to host a VPN server for 10 months! [Github Student Developer Pack](https://education.github.com/pack)
16 |
17 | If you are not a student, you can use my affiliate link which will grant you $100 of credits for 60 days. This isn't as good as the student discount, but at least we can host a VPN for 2 months free. [Digital Ocean $100 for 60 days credit](https://m.do.co/c/6752af521fd4)
18 |
19 | ### Creating a Digital Ocean Node
20 |
21 | In the top left corner click `new project`. Fill out relevant information. Then click `Get Started with a Droplet`.
22 |
23 | A basic ubuntu 20.04 node will be perfect for this application.
24 | 
25 |
26 | - select the data center closest to your location
27 | - add your ssh key by following the instructions under `New SSH Key`
28 |
29 | We should be ready to create our droplet!
30 |
31 | ### Installing Openvpn
32 |
33 | We will be using the easy install script.
34 |
35 | follow the bellow tutorial
36 | https://nextsouls.com/how-to-install-openvpn-using-script-installer/
37 |
38 | ## Client Setup
39 |
40 | On the VPN Server we can re-run the install script to generate client connect files.
41 |
42 | ## Installing Openvpn Clients
43 |
44 | ### Ubuntu Client Install
45 |
46 | ```
47 | sudo apt install openvpn
48 | ```
49 |
50 | ### Connect from the Command Line
51 | ```
52 | sudp openvpn --config ${client.ovpn}
53 | ```
54 | connect via gui
55 |
56 | ### Connect via GUI
57 |
58 | You can also connect to your vpn via the ubuntu settings. Go to Network and click the plus next to VPN.
59 |
60 | 
61 |
62 | Then click `import from file`
63 |
64 | 
65 |
66 | add your `.ovpn` file and click `add`.
67 |
68 | ### Windows Mac
69 |
70 | Download client: https://openvpn.net/download-open-vpn/
71 |
72 | import openvpn profile
73 |
74 | 
--------------------------------------------------------------------------------
/docs/GNC_functions_documentation.md:
--------------------------------------------------------------------------------
1 | # GNC Functions Documentation
2 |
3 | ## void set_heading(float heading)
4 | ```
5 | Returns n/a
6 | ```
7 | This function is used to specify the drone’s heading in the local reference frame. Psi is a counter clockwise rotation following the drone’s reference frame defined by the x axis through the right side of the drone with the y axis through the front of the drone.
8 |
9 | ## void set_destination(float x, float y, float z, float psi)
10 | ```
11 | Returns n/a
12 | ```
13 | This function is used to command the drone to fly to a waypoint. These waypoints should be specified in the local reference frame. This is typically defined from the location the drone is launched. Psi is counter clockwise rotation following the drone’s reference frame defined by the x axis through the right side of the drone with the y axis through the front of the drone.
14 |
15 | ## int set_speed(float speed__mps)
16 | ```
17 | returns 0 for success
18 | ```
19 | This function is used to change the speed of the vehicle in guided mode. it takes the speed in meters per second as a float as the input
20 |
21 | ## void spinOffSetPub()
22 | ```
23 | Returns n/a
24 | ```
25 | This function publishes the angle between the ENU frame and the local reference frame specified by initialize_local_frame(). This usefull if you have other nodes that need this information.
26 |
27 | ## int wait4connect()
28 | ```
29 | Returns 0 - connected to fcu
30 | ```
31 | Wait for connect is a function that will hold the program until communication with the FCU is established.
32 |
33 | ## int wait4start()
34 | ```
35 | Returns 0 - mission started
36 | ```
37 | Wait for strat will hold the program until the user signals the FCU to enther mode guided. This is typically done from a switch on the safety pilot’s remote or from the ground control station.
38 |
39 | ## int initialize_local_frame()
40 | ```
41 | Returns 0 - frame initialized
42 | ```
43 | This function will create a local reference frame based on the starting location of the drone. This is typically done right before takeoff. This reference frame is what all of the the set destination commands will be in reference to.
44 |
45 | ## int takeoff(float takeoff_alt)
46 | ```
47 | Returns 0 - nominal takeoff
48 | ```
49 | The takeoff function will arm the drone and put the drone in a hover above the initial position.
50 |
51 | ## int check_waypoint_reached(float tolerance)
52 | ```
53 | Returns 1 - waypoint reached
54 | ```
55 | This function returns an int of 1 or 0. THis function can be used to check when to request the next waypoint in the mission. The tolerance parameter is used to enforce how close the drone must be to reach a waypoint successfully. A small tollerance may take a long time to reach as the drone makes small slow corrections the closer it is to the waypoint.
56 |
57 | ## int land()
58 | ```
59 | Returns 1 - mode change successful
60 | ```
61 | this function changes the mode of the drone to land
62 |
63 | ## int init_publisher_subscriber(ros::NodeHandle controlnode)
64 | ```
65 | Returns n/a
66 | ```
67 | This function is called at the beginning of a program and will start of the communication links to the FCU. The function requires the program's ros nodehandle as an input
68 |
--------------------------------------------------------------------------------
/docs/gzweb_install.md:
--------------------------------------------------------------------------------
1 | # Installing and Using GZweb
2 |
3 | ## Video Tutorial at https://youtu.be/zukIsbYSCUs
4 |
5 | This tutorial will go over how to install a Gazebo web server and how to use gazebo from your web browser. I am currently looking at ways to adapt to the new normal. This is leading me to explore web based tools to create a virtual lab environment allowing engineers to work without having to ever meet in person. This imo is a very exciting prospect as it has the potential to liberate robotics engineers from having to work in one location. Hopefully digital nomadicy will be an option for us roboticists in the future.
6 |
7 | This tutorial was made and tested with ***Ubuntu 20.04***.
8 |
9 | ## Install Dependencies
10 |
11 | ```
12 | sudo apt install gazebo11 libgazebo11-dev
13 | ```
14 |
15 | ```
16 | sudo apt install libjansson-dev nodejs npm libboost-dev imagemagick libtinyxml-dev mercurial cmake build-essential
17 | ```
18 |
19 | ## Clone GZweb
20 | ```
21 | cd ~; git clone https://github.com/osrf/gzweb
22 | ```
23 | there is a problem with gzweb building on 20.04 with gazebo11. this is fixed on the below dev branch. I will do my best to update the tutorial when the branch is finally merged.
24 | ```
25 | cd ~/gzweb
26 | git checkout fix_build_gz11
27 | ```
28 | The first time you build, you'll need to gather all the Gazebo models which you want to simulate in the right directory ('http/client/assets') and prepare them for the web.
29 |
30 | Before running the deploy script, it's important to source the Gazebo setup.sh file:
31 | ```
32 | source /usr/share/gazebo/setup.sh
33 | ```
34 | ## Installing n
35 | gzweb is and older package that is not currently building on the latest version of node. To install an older version of node I use `n` which can be installed as follows
36 | ```
37 | sudo npm install -g n
38 | ```
39 |
40 | for more information take a look at the n github https://github.com/tj/n
41 |
42 | ## Installing node 8.0.0
43 | ```
44 | sudo n 8.14.0
45 | ```
46 |
47 | ## Set Python to run as Python 2 Temporarily
48 |
49 | Python 3 is now the default python version on Ubuntu 20.04. For this reason I am using `update-alternatives` to switch between python versions.
50 |
51 | to switch python version to 2 run
52 | ```
53 | sudo update-alternatives --config python
54 | ```
55 | select `1` to change to python 2
56 |
57 | ## Build GZweb
58 | Run the deploy script, this downloads models from the web and may take a couple of minutes, see more options below.
59 | ```
60 | npm run deploy --- -m
61 | ```
62 | Note: the -m flag tells the deploy script to grab all the models from the model database and any other models in your GAZEBO_MODEL_PATH. For all subsequent builds, the -m flag will not be needed
63 |
64 | # gzweb
65 | first launch a gazebo world for example
66 | ```
67 | roslaunch iq_sim runway.launch
68 | ```
69 |
70 | then launch gzweb
71 | ```
72 | cd ~/gzweb
73 | npm start
74 | ```
75 | # Luanching Gazebo without the gui
76 |
77 | In order to launch a gazebo world remotely we don't want to launch a gzclient instance on the server. to avoid the we will need to add the `gui` argument to our gazebo roslaunch files. for example we will need to add the following to the runway.launch file in iq_sim
78 | ```
79 |
80 | ```
81 | and
82 | ```
83 |
84 | ```
85 | our `runway.launch` would now look like
86 | ```
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 | ```
98 |
99 | # Optimization
100 | There are a variety of commands that can be used to optimize gazebo web for your application. be sure to take a look at http://gazebosim.org/tutorials?tut=gzweb_install&cat=gzweb to get the most out of your install
101 |
102 |
--------------------------------------------------------------------------------
/docs/helpful_commands.md:
--------------------------------------------------------------------------------
1 | # Common Linux, ROS and MAVproxy Commands
2 |
3 | ## Common Linux Commands
4 | ```
5 | cat : [filename] Display file’s contents to the standard output device (usually your monitor).
6 | cd : /directorypath Change to directory.
7 | chmod : [options] mode filename Change a file’s permissions.
8 | chown : [options] filename Change who owns a file.
9 | clear : Clear a command line screen/window for a fresh start.
10 | cp : [options] source destination Copy files and directories.
11 | date : [options] Display or set the system date and time.
12 | df : [options] Display used and available disk space.
13 | du : [options] Show how much space each file takes up.
14 | file : [options] filename Determine what type of data is within a file.
15 | find : [pathname] [expression] Search for files matching a provided pattern.
16 | grep : [options] pattern [filesname] Search files or output for a particular pattern.
17 | kill : [options] pid Stop a process. If the process refuses to stop, use kill :9 pid.
18 | less : [options] [filename] View the contents of a file one page at a time.
19 | ln : [options] source [destination] Create a shortcut. locate filename Search a copy of your filesystem for the specified filename.
20 | lpr : [options] Send a print job.
21 | ls : [options] List directory contents.
22 | man : [command] Display the help information for the specified command.
23 | mkdir : [options] directory Create a new directory.
24 | mv : [options] source destination Rename or move file(s) or directories.
25 | passwd : [name [password]] Change the password or allow (for the system administrator) to change any password.
26 | ps : [options] Display a snapshot of the currently running processes.
27 | pwd : Display the pathname for the current directory.
28 | rm : [options] directory Remove (delete) file(s) and/or directories.
29 | rmdir : [options] directory Delete empty directories.
30 | ssh : [options] user@machine Remotely log in to another Linux machine, over the network.
31 | Leave : an ssh session by typing exit.
32 | su : [options] [user [arguments]] Switch to another user account.
33 | tail : [options] [filename] Display the last n lines of a file (the default is 10).
34 | tar : [options] filename Store and extract files from a tarfile (.tar) or tarball (.tar.gz or .tgz).
35 | top : Displays the resources being used on your system. Press q to exit.
36 | touch : filename Create an empty file with the specified name.
37 | who : [options] Display who is logged on.
38 | ```
39 |
40 | ## Common ROS Commands
41 | ```
42 | rostopic list : show active ROS topics
43 | rostopic list -v : show active rostopics and the type of ros message
44 | rostopic echo : show data currently being published
45 | rosmsg show : show the structure of a rosmsg
46 | ```
47 |
48 | ## Common MAVproxy Commands
49 |
50 | ```
51 | alias : command aliases: usage: alias
52 | arm : arm motors: usage: arm
53 | auxopt : select option for aux switches on CH7 and CH8 (ArduCopter only): Usage: auxopt set|show|reset|list
54 | disarm : disarm motors
55 | land : auto land
56 | log : log file handling: usage: log
57 | mode : mode change
58 | module : module commands: usage: module
59 | param : parameter handling: Usage: param
60 | position : position: Usage: position x y z (meters)
61 | rc : RC input control: Usage: rc
62 | reboot : reboot autopilot
63 | repeat : repeat a command at regular intervals: Usage: repeat
64 | script : run a script of MAVProxy commands
65 | setspeed : do_change_speed: Usage: setspeed SPEED_VALUE
66 | setyaw : condition_yaw: Usage: yaw ANGLE ANGULAR_SPEED MODE:[0 absolute / 1 relative]
67 | takeoff : takeoff: Usage: takeoff ALTITUDE_IN_METERS
68 | ```
69 | Run `help` to see a full list of commands
70 |
71 |
72 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Intelligent Quads Tutorials
2 | 
3 | Intelligent Quads is a community dedicated to helping people learn how to become developers of intelligent drone applications.
4 |
5 | Each of the below tutorials has an associated video tutorial. Be sure to checkout the [Intelligent Quads Youtube Channel!](https://www.youtube.com/channel/UCuZy0c-uvSJglnZfQC0-uaQ)
6 |
7 | ## Community Discord Server
8 |
9 | Come be a part of the growing community of drone application developers! Join the the conversation in our [discord](https://discord.gg/xZjXaAf).
10 |
11 | ## Support My Work
12 |
13 | Donations are not required or expected for use. All work is licensed on the MIT license. That being said, if you find value in my work consider donating to help me create more content.
14 |
15 | ### Support Via Github Sponsors
16 |
17 | https://github.com/sponsors/ericjohnson97
18 |
19 | ### Support Via Paypal
20 |
21 | [](https://www.paypal.me/intelligentquads)
22 |
23 | [paypal.me/intelligentquads](https://www.paypal.com/paypalme/intelligentquads)
24 |
25 |
26 |
27 | ## Software Development Tutorials
28 |
29 | [Installing Ardupilot and MAVProxy \[***18.04***\]](docs/Installing_Ardupilot.md)
30 |
31 | [Installing Ardupilot and MAVProxy \[***20.04***\]](docs/Installing_Ardupilot_20_04.md)
32 |
33 | [Installing QGroundControl](docs/installing_qgc.md)
34 |
35 | [Installing Gazebo and ArduPilot Plugin \[***18.04-20.04***\]](docs/installing_gazebo_arduplugin.md)
36 |
37 | [Installing ROS and MAVROS \[***18.04***\]](docs/installing_ros.md)
38 |
39 | [Installing ROS and MAVROS \[***20.04***\]](docs/installing_ros_20_04.md)
40 |
41 | [Introduction to ROS and ROS Tools for Drone Development](docs/ros_intro.md)
42 |
43 | [C++ Waypoint Navigation Using the GNC API](docs/gnc_tutorial.md)
44 |
45 | [ArduCopter Params and MAVproxy Commands](docs/ardu_params_and_commands.md)
46 |
47 | [Basic Gazebo Modeling](docs/gazebo_world_modeling_intro.md)
48 |
49 | [Intoduction to YOLO Image Recognition](docs/intro_to_yolo.md)
50 |
51 | [Writing our First ROS Subscriber for YOLO](docs/basic_ros_sub.md)
52 |
53 | [Simple Search and Rescue Program](docs/search_and_rescue.md)
54 |
55 | [Adding a Sensor to a Gazebo Robot](docs/adding_a_sensor.md)
56 |
57 | [Obstacle Avoidance](docs/avoidance.md)
58 |
59 | [Installing Mission Planner On Linux](docs/installing_mission_on_Linux.md)
60 |
61 | [Training YOLO](docs/training_yolo.md)
62 |
63 | [Basic GPS Denied Navigation](docs/basic_gps_denied_navigation.md)
64 |
65 | [Using an RC Controller with MAVproxy](docs/rc_controller.md)
66 |
67 | [Drone Swarms with Ardupilot](docs/swarming_ardupilot.md)
68 |
69 | [Drone Swarms with Ardupilot+MAVROS](docs/multi_mavros_drones.md)
70 |
71 | [Default Ardupilot Avoidance](docs/proximity/proximity_overview.md)
72 |
73 | ## OpenCV In Drone Applications
74 |
75 | [Basic OpenCV with ROS](docs/opencv_intro.md)
76 |
77 | ## Remote Software Development/ Work from Home
78 |
79 | [Using/Installing Gazebo Web](docs/gzweb_install.md)
80 |
81 | [Setting up a VPN Server](docs/setting_up_a_vpn_server.md)
82 |
83 | [View a ROS Image Stream](docs/web_video_server.md)
84 |
85 | ## Multi-rotor Design Tutorials
86 |
87 | [Design Principals - Work In Progress](docs/design_princepals.md)
88 |
89 | ## Associated Repos
90 |
91 | [iq_sim](https://github.com/Intelligent-Quads/iq_sim)
92 | - Repo containing helpful gazebo worlds designed for ardupilot sitl
93 |
94 | [iq_gnc](https://github.com/Intelligent-Quads/iq_gnc)
95 | - Repo containing iq gnc function which are helpful for designing guidance programs for drones. The repo also contains a variety of example mission programs.
96 |
97 |
98 | ## Resources
99 |
100 | [Common Linux, ROS and MAVproxy Commands](docs/helpful_commands.md)
101 |
102 | [GNC API Documentation](docs/GNC_functions_documentation.md)
103 |
104 | ## Discounts on Dev Resources for Students
105 |
106 | [Github Student Developer Pack](https://education.github.com/pack)
107 |
108 | ## Affiliate Links
109 |
110 | [Digital Ocean $100 for 60 days credit](https://m.do.co/c/6752af521fd4)
111 |
112 | ---
113 |
114 | ## References
115 | http://ardupilot.org/copter/index.html
116 |
117 | http://ardupilot.org/copter/docs/parameters.html#wpnav-parameters
118 |
119 | http://qgroundcontrol.com/
120 |
121 | https://discuss.ardupilot.org/
122 |
123 | http://ardupilot.org/dev/
124 |
125 | https://www.ros.org/
126 |
127 |
128 |
--------------------------------------------------------------------------------
/docs/basic_ros_sub.md:
--------------------------------------------------------------------------------
1 | # First ROS Subscriber
2 |
3 | ## Video Tutorial at https://youtu.be/iueRUQCvJXw
4 |
5 | In this tutorial I will be showing you how to write a ROS subscriber. A ROS subscriber allows you to use data being published on a ROS topic in your own C++ ROS node.
6 |
7 | ## Create a New C++ File
8 |
9 | in `iq_gnc/src` create a new file and call it `sub.cpp`
10 |
11 | ## Add C++ File to CMakeLists
12 |
13 | Next open up the `CMakeLists.txt` in `iq_gnc` and add the following lines to the bottom of the file
14 | ```
15 | add_executable(sub src/sub.cpp)
16 | target_link_libraries(sub ${catkin_LIBRARIES})
17 | ```
18 | these will allow catkin to build our program when we next run `catkin build`
19 |
20 | ## Add Includes and Main Function
21 |
22 | in every ros program we make, we will need to add the ros include as follows
23 | ```
24 | #include
25 | ```
26 |
27 | then below, we will add our main function.
28 | ```
29 | int main(int argc, char **argv)
30 | {
31 |
32 | //rest of code will go here
33 |
34 | return 0;
35 | }
36 |
37 | ```
38 |
39 | ## Initialize ROS
40 |
41 | ```
42 | ros::init(argc, argv, "detection_sub");
43 | ros::NodeHandle n;
44 | ```
45 |
46 | ## Declare our Subscriber
47 |
48 | subscribers are declared in the form
49 | ```
50 | ros::Subscriber sub = .subscribe("", <# of msg buffered>, );
51 | ```
52 |
53 | We want to subscribe to the detection data coming from darknet, so lets see which topics might have the data we need. run
54 | ```
55 | roslaunch darknet_ros darknet_ros.launch
56 | ```
57 | then
58 | ```
59 | rostopic list
60 | ```
61 | we see the following topics
62 | ```
63 | /darknet_ros/bounding_boxes
64 | /darknet_ros/check_for_objects/cancel
65 | /darknet_ros/check_for_objects/feedback
66 | /darknet_ros/check_for_objects/goal
67 | /darknet_ros/check_for_objects/result
68 | /darknet_ros/check_for_objects/status
69 | /darknet_ros/detection_image
70 | /darknet_ros/found_object
71 | ```
72 | Lets subscribe to `/darknet_ros/bounding_boxes`, but fist we need to see what type of ROS message this is.
73 | run
74 | ```
75 | rostopic list -v /darknet_ros/bounding_boxes
76 | ```
77 | we see
78 | ```
79 | Published topics:
80 | * /darknet_ros/bounding_boxes [darknet_ros_msgs/BoundingBoxes] 1 publisher
81 |
82 | Subscribed topics:
83 | ```
84 | this means that the topic is publishing data in the form of `darknet_ros_msgs/BoundingBoxes`
85 |
86 | Now we should have enough info to declare our subscriber. write
87 | ```
88 | ros::Subscriber sub = n.subscribe("/darknet_ros/bounding_boxes", 1, detection_cb);
89 | ```
90 | then add `ros::spin();` this way the subscriber continues to get the latest data as it becomes available.
91 | ```
92 | ros::spin();
93 | ```
94 | we will also need to include the ros message to our include section. At the top of the program add
95 | ```
96 | #include
97 | ```
98 |
99 | ## Write our Callback Function
100 |
101 | we are going to call our call back function `detection_cb`. go ahead and add the following code
102 | ```
103 | void detection_cb(const darknet_ros_msgs::BoundingBoxes::ConstPtr& msg)
104 | {
105 | //rest of callback function code
106 | }
107 | ```
108 |
109 | now we want to get the name of the object we are seeing out of the message, so lets learn more about the structure of this message. run the following
110 | ```
111 | rosmsg show darknet_ros_msgs/BoundingBoxes
112 | ```
113 | you will see
114 | ```
115 | std_msgs/Header header
116 | uint32 seq
117 | time stamp
118 | string frame_id
119 | std_msgs/Header image_header
120 | uint32 seq
121 | time stamp
122 | string frame_id
123 | darknet_ros_msgs/BoundingBox[] bounding_boxes
124 | string Class <---- the information we want
125 | float64 probability
126 | int64 xmin
127 | int64 ymin
128 | int64 xmax
129 | int64 ymax
130 | ```
131 | we can access the above information by dereferencing the ROS message as follows `msg->bounding_boxes[i].Class`. add the following code to the callback function to print out all the objects detected.
132 |
133 | ```
134 | for( int i=0; i < msg->bounding_boxes.size(); i++)
135 | {
136 | ROS_INFO("%s detected", msg->bounding_boxes[i].Class.c_str());
137 | }
138 | ```
139 |
140 | ## Build and Run Program
141 |
142 | build
143 | ```
144 | catkin build
145 | ```
146 |
147 | then run our simulation and takeoff the drone
148 |
149 | we can run our new program by running
150 | ```
151 | rosrun iq_gnc sub
152 | ```
153 | we should see the objects seen by the drone being printed in the console
154 |
155 | ---
156 | ### References
157 |
158 | http://wiki.ros.org/ROS/Tutorials/WritingPublisherSubscriber%28c%2B%2B%29
159 |
--------------------------------------------------------------------------------
/docs/ardu_params_and_commands.md:
--------------------------------------------------------------------------------
1 | # ArduCopter Parameters and Helpful MAVproxy Commands
2 |
3 | ## Video Tutorial at https://youtu.be/A-JaRgtljLg
4 |
5 | ## Common MAVproxy Commands
6 |
7 | ```
8 | alias : command aliases: usage: alias
9 | arm : arm motors: usage: arm
10 | auxopt : select option for aux switches on CH7 and CH8 (ArduCopter only): Usage: auxopt set|show|reset|list
11 | disarm : disarm motors
12 | land : auto land
13 | log : log file handling: usage: log
14 | mode : mode change
15 | module : module commands: usage: module
16 | param : parameter handling: Usage: param
17 | position : position: Usage: position x y z (meters)
18 | rc : RC input control: Usage: rc
19 | reboot : reboot autopilot
20 | repeat : repeat a command at regular intervals: Usage: repeat
21 | script : run a script of MAVProxy commands
22 | setspeed : do_change_speed: Usage: setspeed SPEED_VALUE
23 | setyaw : condition_yaw: Usage: yaw ANGLE ANGULAR_SPEED MODE:[0 absolute / 1 relative]
24 | takeoff : takeoff: Usage: takeoff ALTITUDE_IN_METERS
25 | ```
26 |
27 | Helpful Commands Cheat Sheet [Here](helpful_commands.md)
28 |
29 | # ArduCopter Important Parameters
30 |
31 | ### Waypoint Nav Parameters
32 |
33 | #### WPNAV_SPEED: Waypoint Horizontal Speed Target
34 |
35 | Defines the speed in cm/s which the aircraft will attempt to maintain horizontally during a WP mission
36 | ```
37 | Range Increment Units
38 | 20 - 2000 50 centimeters per second
39 | ```
40 | #### WPNAV_RADIUS: Waypoint Radius
41 |
42 | Defines the distance from a waypoint, that when crossed indicates the wp has been hit.
43 | ```
44 | Range Increment Units
45 | 5 - 1000 1 centimeters
46 | ```
47 | #### WPNAV_SPEED_UP: Waypoint Climb Speed Target
48 |
49 | Defines the speed in cm/s which the aircraft will attempt to maintain while climbing during a WP mission
50 | ```
51 | Range Increment Units
52 | 10 - 1000 50 centimeters per second
53 | ```
54 |
55 | #### WPNAV_SPEED_DN: Waypoint Descent Speed Target
56 |
57 | Defines the speed in cm/s which the aircraft will attempt to maintain while descending during a WP mission
58 | ```
59 | Range Increment Units
60 | 10 - 500 10 centimeters per second
61 | ```
62 | #### WPNAV_ACCEL: Waypoint Acceleration
63 | Defines the horizontal acceleration in cm/s/s used during missions
64 | ```
65 | Range Increment Units
66 | 50 - 500 10 centimeters per square second
67 | ```
68 | #### WPNAV_ACCEL_Z: Waypoint Vertical Acceleration
69 |
70 | Defines the vertical acceleration in cm/s/s used during missions
71 | ```
72 | Range Increment Units
73 | 50 - 500 10 centimeters per square second
74 | ```
75 | #### WPNAV_RFND_USE: Waypoint missions use rangefinder for terrain following
76 | Note: This parameter is for advanced users
77 |
78 | This controls if waypoint missions use rangefinder for terrain following
79 | Values
80 | ```
81 | Value Meaning
82 | 0 Disable
83 | 1 Enable
84 | ```
85 | #### RTL_ALT: RTL Altitude
86 |
87 | The minimum alt above home the vehicle will climb to before returning. If the vehicle is flying higher than this value it will return at its current altitude.
88 | ```
89 | Range Increment Units
90 | 200 - 8000 1 centimeters
91 | ```
92 |
93 | #### RTL_CONE_SLOPE: RTL cone slope
94 |
95 | Defines a cone above home which determines maximum climb
96 | ```
97 | Range
98 | 0.5 - 10.0
99 |
100 | Values
101 | Value Meaning
102 | 0 Disabled
103 | 1 Shallow
104 | 3 Steep
105 |
106 | Increment
107 | .1
108 | ```
109 | #### RTL_SPEED: RTL speed
110 |
111 | Defines the speed in cm/s which the aircraft will attempt to maintain horizontally while flying home. If this is set to zero, WPNAV_SPEED will be used instead.
112 |
113 | ```
114 | Range Increment Units
115 | 0 - 2000 50 centimeters per second
116 | ```
117 | #### RTL_ALT_FINAL: RTL Final Altitude
118 |
119 | This is the altitude the vehicle will move to as the final stage of Returning to Launch or after completing a mission. Set to zero to land.
120 | ```
121 | Range Increment Units
122 | -1 - 1000 1 centimeters
123 | ```
124 | #### RTL_CLIMB_MIN: RTL minimum climb
125 |
126 | The vehicle will climb this many cm during the initial climb portion of the RTL
127 | ```
128 | Range Increment Units
129 | 0 - 3000 10 centimeters
130 | ```
131 | #### RTL_LOIT_TIME: RTL loiter time
132 |
133 | Time (in milliseconds) to loiter above home before beginning final descent
134 | ```
135 | Ran66666ge Increment Units
136 | 0 - 60000 1000 milliseconds
137 | ```
138 |
139 |
140 |
141 | ---
142 | **References**
143 |
144 | [ArduCopter Full Parameter List](http://ardupilot.org/copter/docs/parameters.html)
145 |
--------------------------------------------------------------------------------
/docs/proximity/proximity_overview.md:
--------------------------------------------------------------------------------
1 | # Ardupilot Proximity Sensing
2 |
3 | Ardupilot now comes with obstacle avoidance features for both manual and automated flight modes. This tutorial shows how these features work and what they are capable of. The next tutorial will go into the theory of how the sensors and software work as well as how to simulate the sensors in gazebo.
4 |
5 | 
6 |
7 | ## Launching the Pre-Made Simulation
8 |
9 | This Tutorial assumes you have install and updated to the iq_sim as well as are running arupilot `Copter-4.1` or higher.
10 |
11 | ### Updating Ardupilot
12 |
13 | ```
14 | cd ~/ardupilot
15 | git fetch
16 | git checkout Copter-4.1
17 | git pull
18 | git submodule update --init --recursive
19 | ```
20 | make sure dependencies are up to date. run
21 | ```
22 | Tools/environment_install/install-prereqs-ubuntu.sh -y
23 | ```
24 |
25 | clean your build environment
26 | ```
27 | cd ~/ardupilot
28 | ./waf clean
29 | ```
30 |
31 | In new versions of ardupilot, the sitl assumes you are running from a folder that is unique. This is because, the sitl now stores params and log in file in the directory from which you run the sitl. below is an example of how i now run my sitl.
32 | ```
33 | mkdir ~/drone1
34 | cd ~/drone1
35 | sim_vehicle.py -v ArduCopter -f gazebo-iris --console
36 | ```
37 |
38 | ### IQ Range Finder Plugin
39 |
40 | The iq range finder plugin is a slight variation on the gazebo ros range finder plugin. My range finder plugin is essentially the same, except that when the there is no object in the sensor's field of view the plugin publishes nan instead of the value of the range_max. Providing rnage_max when there are no detection was problematic because ardupilot's comparison is inclusive of the sensor range max. To install my plugin run the following.
41 |
42 | ```
43 | cd ~/catkin_ws/src
44 | git clone https://github.com/Intelligent-Quads/gazebo_ros_pkgs.git
45 | ```
46 | build your catkin workspace
47 | ```
48 | catkin build
49 | ```
50 |
51 | ### Update IQ Sim
52 |
53 | ```
54 | cd ~/catkin_ws/src/iq_sim
55 | git pull
56 | ```
57 |
58 |
59 | ### Run Sim
60 |
61 | First Terminal
62 | ```
63 | roslaunch iq_sim lake_travis.launch
64 | ```
65 |
66 | Second Terminal
67 | ```
68 | cd ~/catkin_ws/src/iq_sim/scripts/drone_travis
69 | ./drone1.sh
70 | ```
71 |
72 | Third Terminal
73 | ```
74 | roslaunch iq_sim apm.launch
75 | ```
76 |
77 | ## Obstacle Avoidance in Manual Mode
78 |
79 | The next tutorial will show how to configure the sensors and simulation. This tutorial will show how to configure the avoidance modes. Below are the list avoidance parameters:
80 |
81 | 
82 |
83 |
84 | The parameter I will focus on is `AVIOD_BEHAVE`. The `AVIOD_BEHAVE` param controls the behavior of the avoidance in non-auto flight modes. The options for this param are "slide" and "stop".
85 |
86 | From ardupilot wiki sliding does the following:
87 |
88 | "In LOITER, either stopping in front of the object or a “sliding” algorithm is used to avoid it. “Sliding” involves slowing and slightly yawing as the vehicle approaches the object. For example, if Copter approaches a fence line at an angle, it will “slide along” the fence as the operator pushes the Copter toward the fence. Head-on approaches would stop, even if the pilot continues to “pushes” it forward."
89 |
90 | **Note:** for a drone with 4 range sensors as shown below the drone will be unable to slide since a forward detection will not be able to tell which side of the "beam" the object is.
91 |
92 | 
93 |
94 |
95 | ## Obstacle Avoidance in Auto Flight Modes
96 |
97 | Ardupilot now has a couple avoidance algorithms available to in Ardupilot's auto modes now.
98 |
99 | 
100 |
101 | The OA_Type allows you to select an avoidance algorithm you would like to use in automated flight. For this tutorial we will focus on the "bendy ruler" algorithm.
102 |
103 |
104 | The Next Parameter that is important to consider when using Ardupilot's built in obstacle avoidance algorithms is the `WPNAV_SPEED` parameter. This Parameter controls the max speed the drone can fly at. If you avoidance sensor has a shorter range it is important to limit the speed your drone can fly at since the shorter the range a drone can perceive the obstacle the less time it has to react. For this tutorial I will be setting the `WPNAV_SPEED` Parameter to 300 (300 cm/s).
105 |
106 |
107 | ## Warning
108 |
109 | While making this tutorial I noticed that the current avoidance algorithm has what I would consider undesired behavior. If the aircraft takesoff from a location that has obstacles within the `AVOID_MARGIN`, then the aircraft will immediately take avoidance measures during takeoff. this can be specially problematic if you have sonar sensors that get returns from the ground prior to takeoff. Below is a picture that shows the situation I commonly experience in simulation.
110 |
111 | 
112 |
113 | ## Demo
114 |
115 | I will demonstrate this capability in Loiter, Auto and Guided Flight modes.
116 |
117 | 
118 |
119 |
120 | ---
121 | #### References
122 |
123 | - https://ardupilot.org/copter/docs/common-object-avoidance-landing-page.html
124 | - https://ardupilot.org/copter/docs/common-simple-object-avoidance.html
125 | - https://ardupilot.org/copter/docs/common-oa-bendyruler.html
--------------------------------------------------------------------------------
/docs/gazebo_world_modeling_intro.md:
--------------------------------------------------------------------------------
1 | # Intro to Gazebo World Modeling
2 |
3 | ## Video Tutorial at https://youtu.be/gpk8mQhhI4s
4 |
5 |
6 | ## Add the Open Gazebo Models Database
7 |
8 | Use git to get a bunch of open source gazebo models from the Open Source Robotics Foundation (OSRF)
9 |
10 | ```
11 | git clone https://github.com/osrf/gazebo_models.git
12 | ```
13 | Add Models path to the bashrc
14 | ```
15 | echo 'export GAZEBO_MODEL_PATH=~/gazebo_ws/gazebo_models:${GAZEBO_MODEL_PATH}' >> ~/.bashrc
16 | source ~/.bashrc
17 | ```
18 |
19 | ## A New Drone Simulation World
20 |
21 | make a new file in `~/catkin_ws/src/iq_sim/worlds/` called `hills.world`
22 |
23 | copy the following lines into the world
24 |
25 | ```
26 |
27 |
28 |
29 |
30 |
31 |
32 | quick
33 | 100
34 | 1.0
35 |
36 |
37 | 0.0
38 | 0.9
39 | 0.1
40 | 0.0
41 |
42 |
43 | -1
44 |
45 |
46 |
47 |
48 | model://sun
49 |
50 |
51 |
52 | model://ground_plane
53 |
54 |
55 |
56 |
57 | model://iris_with_standoffs_demo
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 | 0 -0.01 0.070 .8 0 1.57
68 |
69 | 0 0 0 0 0 0
70 | 0.1
71 |
72 | 0.001
73 | 0
74 | 0
75 | 0.001
76 | 0
77 | 0.001
78 |
79 |
80 |
81 | 0 0 0 0 0 0
82 |
83 |
84 | 0.025
85 | 0.025
86 |
87 |
88 |
89 |
93 |
94 |
95 |
96 |
97 | 0 0 0 -1.57 -1.57 0
98 |
99 | 1.0472
100 |
101 | 640
102 | 480
103 |
104 |
105 | 0.05
106 | 1000
107 |
108 |
109 | 1
110 | 10
111 | true
112 |
113 |
116 |
117 | true
118 | 0.0
119 | webcam
120 | image_raw
121 | camera_info
122 | camera_link
123 | 0.07
124 | 0.0
125 | 0.0
126 | 0.0
127 | 0.0
128 | 0.0
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 | 0 0 0.0 0 0 0
138 | iris::iris_demo::gimbal_small_2d::tilt_link
139 | camera
140 |
141 |
142 | 0
143 | 0
144 |
145 | 0 0 1
146 | true
147 |
148 |
149 |
150 |
151 |
152 |
153 | ```
154 | The above code is `sdf` code. it is a formatting language specific to Gazebo. The above lines form the basis to our a good gazebo drone simulation. The physics tag above is tailored to best simulate the drone's flight. The next part of the simulation is the `iris` which is our drone. The code inside the iris model tag is the code needed for our camera sensor. We will go over sdf coding in future tutorials.
155 |
156 | ## Add a Launch File to Facilitate the ROS Plugins
157 |
158 | make a file in `~/catkin_ws/src/iq_sim/launch` called `hills.launch`
159 |
160 | then add the following lines
161 |
162 | ```
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 | ```
171 | save the files ( `ctrl + s` )
172 |
173 | ## Launch the New World
174 |
175 | ```
176 | roslaunch iq_sim hills.launch
177 | ```
178 |
179 | ## Add Other Models
180 |
181 | click on the `Insert` tab in the left column
182 |
183 | add the model `Winding Valley Heightmap`
184 |
185 | delete the model `Ground Plane`
186 |
187 | add the model `standing person`
188 |
189 | reposition the models as seen fit
190 |
191 | save file and override our file hills.world
192 |
193 | ---
194 |
195 | ### References
196 |
197 | http://gazebosim.org/tutorials
198 |
--------------------------------------------------------------------------------
/docs/swarming_ardupilot.md:
--------------------------------------------------------------------------------
1 | # Swarming Using Ardupilot
2 |
3 | ## Video Tutorial at https://youtu.be/r15Tc6e2K7Y
4 |
5 | This tutorial shows you how to model and control a swarming using ardupilot and Gazebo
6 |
7 | ## Pre-rec
8 |
9 | add the models folder in the iq_sim repo to the gazebo models path
10 | ```
11 | echo "export GAZEBO_MODEL_PATH=${GAZEBO_MODEL_PATH}:$HOME/catkin_ws/src/iq_sim/models" >> ~/.bashrc
12 | ```
13 |
14 |
15 | ## Connecting Multiple Vehicles SITL to Gazebo
16 |
17 | You should think about ardupilot as purely a control system. It takes sensor inputs and outputs commands to actuators. Gazebo is playing the role of a Flight Dynamics Model (FDM). The FDM encompasses all of the following, sensor models, actuator models and the dynamics of the vehicle. In real life ardupilot would communicate with your sensors via serial connections, but when you run SITL with gazebo ardupilot talks to the sensors and actuators via UDP connections (an IP protocol). Because of this we will need to specify different UDP ports in our robot models such that these streams do not conflict.
18 |
19 | Lets start by copying the drone1 folder in `iq_sim/models` and paste it as a copy. rename the folder to drone2.
20 |
21 | then navigate to the drone2 folder open model.config and change the `` tag to
22 | ```
23 | drone2
24 | ```
25 |
26 | open the model.sdf and change the model name to `drone2` as well
27 |
28 | scroll down to the ardupilot plugin and change
29 | ```
30 | 9002
31 | 9003
32 | ```
33 | to
34 | ```
35 | 9012
36 | 9013
37 | ```
38 |
39 | Each successive drone fdm port should be incremented by 10 ie.
40 |
41 | drone3
42 | ```
43 | 9022
44 | 9023
45 | ```
46 | drone4
47 | ```
48 | 9032
49 | 9033
50 | ```
51 | ect...
52 |
53 |
54 | add the drone to `runway.world`
55 | ```
56 |
57 | 10 0 0 0 0 0
58 |
59 | model://drone2
60 |
61 |
62 | ```
63 | launch the world
64 | ```
65 | roslaunch iq_sim runway.launch
66 | ```
67 |
68 | ## launch ardupilot terminals
69 |
70 | to tell the ardupilot instance to incrament it's UPD connection use
71 | ```
72 | -I0 for drone1
73 | -I1 for drone2
74 | -I2 for drone3
75 | ect ...
76 | ```
77 |
78 | launch with
79 | ```
80 | sim_vehicle.py -v ArduCopter -f gazebo-iris --console -I0
81 | ```
82 | ```
83 | sim_vehicle.py -v ArduCopter -f gazebo-iris --console -I1
84 | ```
85 |
86 | ## Connecting Multiple Drones to a Ground Station
87 |
88 | Each Drone in your swarm will be producing mavlink messages. In order to discern what message is from which drone, we will need to assign each drone a unique system ID. This is controlled by the parameter `SYSID_THISMAV`.
89 |
90 | ## Launching Ardupilot SITL Instances with Unique Parameters
91 |
92 | Usually when we launch the ardupilot sitl simulation we launch the instance using the below command
93 | ```
94 | sim_vehicle.py -v ArduCopter -f gazebo-iris --console
95 | ```
96 | `-f` is used to specify the frame which Ardupilot will launch the simulation for. The frame type also specifies the location of the parameter files associated with the frame type. In order to simulate drones with different parameters we will need to create our own custom frames and parameter files.
97 |
98 | First, we will want to edit the file `ardupilot/Tools/autotest/pysim/vehicleinfo.py` add the following lines in the SIM section.
99 | ```
100 | "gazebo-drone1": {
101 | "waf_target": "bin/arducopter",
102 | "default_params_filename": ["default_params/copter.parm",
103 | "default_params/gazebo-drone1.parm"],
104 | },
105 | "gazebo-drone2": {
106 | "waf_target": "bin/arducopter",
107 | "default_params_filename": ["default_params/copter.parm",
108 | "default_params/gazebo-drone2.parm"],
109 | },
110 | "gazebo-drone3": {
111 | "waf_target": "bin/arducopter",
112 | "default_params_filename": ["default_params/copter.parm",
113 | "default_params/gazebo-drone3.parm"],
114 | },
115 | ```
116 | We will then need to create the following files
117 |
118 | - `default_params/gazebo-drone1.parm`
119 | - `default_params/gazebo-drone2.parm`
120 | - `default_params/gazebo-drone3.parm`
121 |
122 | each with their corresponding `SYSID_THISMAV` parameter value ie
123 | - `default_params/gazebo-drone1.parm` should contain `SYSID_THISMAV 1`
124 | - `default_params/gazebo-drone2.parm` should contain `SYSID_THISMAV 2`
125 | - `default_params/gazebo-drone3.parm` should contain `SYSID_THISMAV 3`
126 |
127 | ### Example gazebo-drone1.parm File
128 | ```
129 | # Iris is X frame
130 | FRAME_CLASS 1
131 | FRAME_TYPE 1
132 | # IRLOCK FEATURE
133 | RC8_OPTION 39
134 | PLND_ENABLED 1
135 | PLND_TYPE 3
136 | # SONAR FOR IRLOCK
137 | SIM_SONAR_SCALE 10
138 | RNGFND1_TYPE 1
139 | RNGFND1_SCALING 10
140 | RNGFND1_PIN 0
141 | RNGFND1_MAX_CM 5000
142 | SYSID_THISMAV 1
143 | ```
144 |
145 | ## Connecting Multiple Drones to qgroundcontrol
146 |
147 | In order to connect multiple unique vehicles to a ground station, you will need to make sure that the TCP or UDP connection ports do not conflict. For this example we will be using a TCP connection. The first thing we need to do is relaunch our SITL ardupilot instances with a unique in/out TCP port for our GCS.
148 |
149 | ```
150 | sim_vehicle.py -v ArduCopter -f gazebo-drone1 --console -I0 --out=tcpin:0.0.0.0:8100
151 | ```
152 | ```
153 | sim_vehicle.py -v ArduCopter -f gazebo-drone2 --console -I1 --out=tcpin:0.0.0.0:8200
154 | ```
155 |
156 | - note 0.0.0.0 allows any device on out local network to connect to the ardupilot instance
157 |
158 | ### setup qgroundcontrol to accept multiple vehicles
159 |
160 | navigate to the settings tab and click on `Comm Links`. then find the
161 |
162 | fill in each as bellow for the vehicle's unique TCP ports
163 |
164 | 
165 |
166 | - note you can connect from a different device on the same network, by entering the ip address of the host computer in the host address box
167 |
168 |
--------------------------------------------------------------------------------
/docs/design_princepals.md:
--------------------------------------------------------------------------------
1 | # Designing a Multi-rotor Drone
2 |
3 | **WORK IN PROGRESS**
4 |
5 | This document goes through the IQ design process for designing drones for intelligent missions. All of the autonomous drones I have designed have been for research purposes. From doing this, I have noticed there is not a lot of material about designing and building your custom drone for this purpose. I have tried to aggregate all of the information I have learned over the years into one document. I have also developed a fairly robust design process to help guide new and old drone engineers through their design choices.
6 |
7 | ## Overview of the Process
8 | 1. Figure out what you want your drone to do
9 | 2. Figure out your sensor package
10 | 3. Design a frame to carry your payload
11 | 4. Estimate the weight of the frame and payload
12 | 5. Do power thrust analysis and select motors, escs and batteries
13 |
14 | ## What will my drone do?
15 |
16 | This is where you examen the mission and decide what you will design to accomplish the goal. Start thinking of how your drone will fly, navigate, and take data.
17 |
18 | ## Sensor Compute and Comms Selection
19 |
20 | You should figure out all of the mission specific hardware computers and sensors before you design the vehicle to move your payload.
21 |
22 | ### Payload Components Most Intelligent Drones Need
23 | - FCC
24 | - Companion Computer
25 | - Camera(s)
26 | - RC receiver
27 | - Telemetry Radio
28 | - Navigation equipment (GPS, OF, altimeter)
29 | - Avoidance sensors (lidar, sonar, more cameras)
30 |
31 | ## Designing a Frame
32 |
33 | How do we best arrange our sensors, compute and comms to accomplish the mission. Make a CAD to best lock down the spacial arrangement of the components. Use the CAD to get a better weight estimate of your frame.
34 |
35 | ## Picking out motors ESCs and Props
36 |
37 | This part of the process is the most tricky. All of these bits and pieces are coupled, so changing one affects the other. This is not a well-defined process, and in its current state, most hobbyists rely on trial and error, as well as word of mouth. It is more of an art than a science.lege. I will attempt to make this more based on my experience to be more scientific and use some aerospace principles I learned in college.
38 |
39 | It is helpful to think of what our ultimate goal is when designing a drone. We want to carry a payload, and we want to be able to carry it for as long as possible usually. While we may think we have constraints on the flight time and payload, ultimately, people want to add more sensors and make the vehicle do more things. As the designer to the aircraft, we should try to give the mission designers as much margin as possible. To get the most out of our system, we should analyze the efficiency of our system and try to optimize each component. The two main design principles I will try to optimize are
40 |
41 | - Aerodynamic/Mechanical Efficiency
42 | - Electrical Efficiency
43 |
44 | ## Aerodynamic Efficiency of a Multi-rotor
45 |
46 | The Aerodynamic efficiency of your multi-rotor is affected by a couple different parameters.
47 | - The propeller disk
48 | - The pitch of the propeller
49 | - Blade count
50 | - The mass of the vehicle
51 |
52 | ## Propeller Disk
53 |
54 | A multi-rotor works by taking mass(air) and throwing it down so the vehicle can hover or ascend. This is Newton’s third law in a nutshell: for every action, there is an equal and opposite reaction. To generate more thrust, we want to throw as much air as possible down, one way to do this is to increase the propeller disk diameter. When we look at the equation for a circle, we notice that the area increases by the square of the radius, so a small increase in the radius will net us a whole heap more of air!
55 |
56 | Principal:
57 | - A bigger disk area equals a lot more thrust
58 |
59 | ## Propeller Pitch
60 |
61 | By the logic of wanting to throw as much air down as possible, we can increase thrust by throwing the air down faster. This can be accomplished by increasing the pitch of your blade. So great, let’s have a large rotor and high pitch! Not so fast. Increasing the pitch of the propeller increases the drag of the rotor, and the added pitch doesn’t transfer the energy from the rotor to the air very well.
62 |
63 | Principal:
64 | - Higher pitch equals more thrust
65 | - Higher pitch equals low efficiency
66 |
67 | ## Blade Count
68 |
69 | Similar to blade pitch, more blades will give higher thrust, but will also increases the drag of the blade as well as the mass.
70 |
71 | Principal:
72 | - More blades equal more thrust
73 | - More blades equal lower efficiency
74 |
75 | ## Mass of Vehicle
76 |
77 | Basically there is a decent amount of math that goes into this, but the big takeaway is this equation.
78 |
79 |
80 |
81 | We notice that the power to hover increases to the power of 1.5, which means adding double the mass requires almost 3 times the power! This is a huge mistake people make: they say “I’m only going to add a little bit of weight”, but then their flight time decrease quite a bit.
82 |
83 | Principal:
84 | - Required power increase to the 1.5 power of mass
85 |
86 | ## Aerodynamic Efficiency Summary
87 |
88 | - A bigger disk area equals more thrust by the square of the radius
89 | - Higher pitch equals more thrust
90 | - Higher pitch equals low efficiency
91 | - More blades equal more thrust
92 | - More blades equal low efficiency
93 | - Required power increase to the 1.5 power of mass
94 |
95 |
96 | ## Electrical Efficiency of the Drone
97 | The basic idea behind analyzing the electrical efficiency is to minimize the resistance loss in the circuit. Since P=I^2R, this means that the power consumed by the resistance in the wires increases with the square of the current. This means having a higher voltage system, which in turn reduces the current, is a more efficient electrical system.
98 |
99 | ## Selecting Motors ECSs and Battery
100 | Now that we understand how to best use the energy stored within our drone, we can look at selecting motors and escs.
101 |
102 | The first thing to do is take the weight estimate you calculated from our CAD and parts spreadsheet and use this as the minimum thrust our motors will need at 50% throttle. The best thing to do is take the estimated weight of the done and add a healthy margin; the more you add, the better off you will be. This is especially important if this a developmental drone. As you continue to develop your application, you will want to add more sensors and actuators to the aircraft, which will kill your flight time quickly. As we noted above, the power required to hover is to the 1.5 power of your mass!
103 |
104 |
105 |
106 |
107 |
108 |
--------------------------------------------------------------------------------
/docs/multi_mavros_drones.md:
--------------------------------------------------------------------------------
1 | # Controlling Multiple Ardupilot Drones Via MAVROS
2 |
3 |
4 |
5 | In this tutorial you will learn how to use the iq_gnc functions to control multiple drones on the same ros network.
6 |
7 | ## Launching a MAVROS Instance for Each Drone
8 |
9 | Each ardupilot SITL instance creates a couple of unique MAVlink streams. This is shown in your mavproxy terminal when you launch your ardupilot instance. For example when launching ardupilot with the below launch options
10 |
11 | ```
12 | sim_vehicle.py -v ArduCopter -f gazebo-drone2 -I1
13 | ```
14 | the output mavlink streams are show as follows
15 | ```
16 | SIM_VEHICLE: "mavproxy.py" "--master" "tcp:127.0.0.1:5770" "--sitl" "127.0.0.1:5511" "--out" "127.0.0.1:14560" "--out" "127.0.0.1:14561"
17 | ```
18 |
19 | We will need to tell MAVROS which udp interface to use for the drone we want MAVROS to connect to. For this vehicle the interface we will be using is 14561. To tell MAVROS to use this port we will launch with the following
20 | ```
21 | roslaunch iq_sim apm.launch fcu_url:=udp://127.0.0.1:14561@14565 mavros_ns:=/drone2 tgt_system:=2
22 | ```
23 |
24 | You will notice in the command above the argument `mavros_ns:=/drone2`. This is to create a unique mavros topic for each vehicle. For example
25 | ```
26 | /mavros/global_position/local
27 | ```
28 | will become
29 | ```
30 | /drone1/mavros/global_position/local
31 | ```
32 |
33 | you will also notice the launch argument `tgt_system:=2` is passed. This corresponds to the SYSID_THISMAV parameter we set in the `gazebo-droneX.parm` files in the previous tutorial.
34 |
35 |
36 | ## Executing a Multi-Drone Swarm Mission
37 |
38 | **note** if you have not updated your iq_sim and iq_gnc repos lately, go ahead and run a `git pull` in both `iq_sim` and `iq_gnc`
39 |
40 | In this example we will use our previously created `square.cpp` program in conjunction with the `multi_drone.world` gazebo sim.
41 |
42 | first make a copy of square.cpp and call it multi_square.cpp.
43 |
44 | We will be creating a drone swarm where each drone flies a synchronized square pattern. Instead of having to change each drone to GUIDED manually, we will be having the program initiate on start. To do this we will be replacing `wait4start();` with `set_mode("GUIDED");`. This is the only code change that will be needed.
45 |
46 | Next we are going to setup a new launch file to launch each drone's gnc code. Create a new file in launch called `multi_square.launch`
47 | ```
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 | ```
71 |
72 | you will notice the below line
73 | ```
74 |
75 | ```
76 | this parameter tells the gnc_functions what namespace this program's mavros topics fall under.
77 |
78 | The next thing we will need to do is setup a mavros launch file to create the needed links between the drone and the companion computer ros environment.
79 |
80 | create the file `multi-am.launch` within `iq_sim/launch`
81 |
82 | add the below
83 | ```
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 | ```
121 |
122 | - you will notice that the `fcu_url` increments by 10 for each corresponding udp port for the drones.
123 | - the `target_system_id` increments by 1 for each instance.
124 | - you will notice that each node has a unique ns (namespace). this will put a unique topic prefix for each mavros topic
125 |
126 | You will notice that launching each ardupilot instance might be tedious, so it might be helpful to create the below shell script as `multi-ardupilot.sh`
127 |
128 | ```
129 | #!/bin/bash
130 |
131 | gnome-terminal \
132 | --tab -e "sim_vehicle.py -v ArduCopter -f gazebo-drone1 -I0" \
133 | --tab -e "sim_vehicle.py -v ArduCopter -f gazebo-drone2 -I1" \
134 | --tab -e "sim_vehicle.py -v ArduCopter -f gazebo-drone3 -I2" \
135 | ```
136 |
137 | ## Running the Program
138 |
139 | Run the gazebo sim
140 | ```
141 | roslaunch iq_sim multi_drone.launch
142 | ```
143 | Run the ardupilot instances
144 | ```
145 | ./multi-ardupilot.sh
146 | ```
147 | Run the mavros instances
148 | ```
149 | roslaunch iq_sim multi-apm.launch
150 | ```
151 | Run the guidance program
152 | ```
153 | roslaunch iq_gnc multi_square.launch
154 | ```
155 |
156 |
157 |
158 |
--------------------------------------------------------------------------------
/docs/opencv_intro.md:
--------------------------------------------------------------------------------
1 | # Introduction to OpenCV for Drone Applications
2 |
3 | This tutorial assumes you have installed the ardupilot-sitl with the ardupilot gazebo plugin. This tutorial will teach you how to create a simple computer vision algorithm to be run on a ROS image stream.
4 |
5 | This tutorial is based on the cv_bridge tutorial http://wiki.ros.org/cv_bridge/Tutorials/UsingCvBridgeToConvertBetweenROSImagesAndOpenCVImages
6 |
7 |
8 | ## Concept of Operation
9 |
10 | It is important to have an understanding of how this might be used on a real drone and the differences that exist between our real aircraft and the simulation environment.
11 |
12 | Real Aircraft:
13 | 
14 |
15 | On a real ardupilot drone, vision processing is often done on a companion computer. This allows the Flight Control Unit (FCU) to be dedicated to controlling the aircraft while less flight critical tasks are off-loaded to a secondary computer, usually more optimized for high level autonomy tasks.
16 |
17 | Simulated Aircraft:
18 | 
19 | In our simulated environment, we will be using the gazebo ros camera plugin which will publish an image stream of what our simulated camera is seeing. On a real drone, we might use the [video_stream_opencv](http://wiki.ros.org/video_stream_opencv) package to create the video stream from a real camera. In the simulated environment, we will be skipping this step and having gazebo do this job for us.
20 | ## Pre-Req
21 |
22 | Clone the iq_vision ros package
23 | ```
24 | git clone https://github.com/Intelligent-Quads/iq_vision.git
25 | ```
26 |
27 | ## Setup
28 |
29 | Create the file `canny_edge.cpp` in `iq_vision/src`
30 |
31 | Add the following line to the end of the `CMakeLists.txt`
32 | ```
33 | add_executable(canny_edge src/canny_edge.cpp)
34 | target_link_libraries(canny_edge ${catkin_LIBRARIES} ${OpenCV_INCLUDE_DIRS})
35 | ```
36 |
37 | ## Setup our ROS node
38 |
39 | The following code includes the needed ros libraries as well as the opencv libraries we will need for the tutorial.
40 |
41 | ```
42 | #include
43 | #include
44 | #include
45 | #include
46 | #ifdef ROS_NOETIC
47 | #include
48 | #include
49 | #else
50 | #include
51 | #include
52 | #endif
53 | // Add vision object here
54 |
55 | int main(int argc, char** argv)
56 | {
57 | ros::init(argc, argv, "image_converter");
58 | ros::spin();
59 | return 0;
60 | }
61 | ```
62 |
63 |
64 | ## Creating an Object to do Vision Processing
65 |
66 | ```
67 | class ImageConverter
68 | {
69 | ros::NodeHandle nh_;
70 | image_transport::ImageTransport it_;
71 | image_transport::Subscriber image_sub_;
72 | image_transport::Publisher image_pub_;
73 |
74 | public:
75 | ImageConverter()
76 | : it_(nh_)
77 | {
78 | // Subscrive to input video feed and publish output video feed
79 | image_sub_ = it_.subscribe("/webcam/image_raw", 1,
80 | &ImageConverter::imageCb, this);
81 | image_pub_ = it_.advertise("/image_converter/output_video", 1);
82 |
83 | cv::namedWindow("source");
84 | cv::namedWindow("canny");
85 | }
86 |
87 | ~ImageConverter()
88 | {
89 | cv::destroyWindow("source");
90 | cv::destroyWindow("canny");
91 | }
92 |
93 | void imageCb(const sensor_msgs::ImageConstPtr& msg)
94 | {
95 | cv_bridge::CvImagePtr cv_ptr;
96 | try
97 | {
98 | cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
99 | }
100 | catch (cv_bridge::Exception& e)
101 | {
102 | ROS_ERROR("cv_bridge exception: %s", e.what());
103 | return;
104 | }
105 |
106 | // Run Canny edge detector on image
107 | cv::Mat src = cv_ptr->image;
108 | cv::Mat dst;
109 | cv::Canny( src, dst, 0, 0, 3 );
110 |
111 | // Update GUI Window
112 | cv::imshow("source", src);
113 | cv::imshow("canny", dst);
114 | cv::waitKey(3);
115 |
116 | sensor_msgs::ImagePtr msg_out = cv_bridge::CvImage(std_msgs::Header(), "mono8", dst).toImageMsg();
117 | // Output modified video stream
118 | image_pub_.publish(msg_out);
119 | }
120 | };
121 |
122 | ```
123 |
124 | The above code shows how to subscribe to a ros image stream and run an opencv image processing algorithm on the images. The method `imageCb` will be called when receiving a new image in the image stream `/webcam/image_raw`
125 |
126 | Finally, we need to declare an instance of our `ImageConverter` object. Add the following in our main function right before `ros::spin();`
127 |
128 | ```
129 | ImageConverter ic;
130 | ```
131 |
132 | The finish Code is shown below
133 | ```
134 | #include
135 | #include
136 | #include
137 | #include
138 | #ifdef ROS_NOETIC
139 | #include
140 | #include
141 | #else
142 | #include
143 | #include
144 | #endif
145 | // Add vision object here
146 |
147 | class ImageConverter
148 | {
149 | ros::NodeHandle nh_;
150 | image_transport::ImageTransport it_;
151 | image_transport::Subscriber image_sub_;
152 | image_transport::Publisher image_pub_;
153 |
154 | public:
155 | ImageConverter()
156 | : it_(nh_)
157 | {
158 | // Subscrive to input video feed and publish output video feed
159 | image_sub_ = it_.subscribe("/webcam/image_raw", 1,
160 | &ImageConverter::imageCb, this);
161 | image_pub_ = it_.advertise("/image_converter/output_video", 1);
162 |
163 | cv::namedWindow("source");
164 | cv::namedWindow("canny");
165 | }
166 |
167 | ~ImageConverter()
168 | {
169 | cv::destroyWindow("source");
170 | cv::destroyWindow("canny");
171 | }
172 |
173 | void imageCb(const sensor_msgs::ImageConstPtr& msg)
174 | {
175 | cv_bridge::CvImagePtr cv_ptr;
176 | try
177 | {
178 | cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
179 | }
180 | catch (cv_bridge::Exception& e)
181 | {
182 | ROS_ERROR("cv_bridge exception: %s", e.what());
183 | return;
184 | }
185 |
186 | // Run Canny edge detector on image
187 | cv::Mat src = cv_ptr->image;
188 | cv::Mat dst;
189 | cv::Canny( src, dst, 0, 0, 3 );
190 |
191 | // Update GUI Window
192 | cv::imshow("source", src);
193 | cv::imshow("canny", dst);
194 | cv::waitKey(3);
195 |
196 | sensor_msgs::ImagePtr msg_out = cv_bridge::CvImage(std_msgs::Header(), "mono8", dst).toImageMsg();
197 | // Output modified video stream
198 | image_pub_.publish(msg_out);
199 | }
200 | };
201 |
202 |
203 | int main(int argc, char** argv)
204 | {
205 | ros::init(argc, argv, "image_converter");
206 | ImageConverter ic;
207 | ros::spin();
208 | return 0;
209 | }
210 |
211 | ```
--------------------------------------------------------------------------------
/docs/avoidance.md:
--------------------------------------------------------------------------------
1 | # Drone Obstacle Avoidance
2 |
3 | ## Video Tutorial at https://youtu.be/-jgRhJKBXhs
4 |
5 | This tutorial will teach how to use a 2d lidar to detect and avoid obstacles using the potential field method.
6 |
7 | ## Create Obstacle Avoidance Program and add to CMakeLists
8 |
9 | create a file called `avoidance.cpp` in `iq_gnc/src`
10 |
11 | then add the following to the iq_gnc CMakeLists
12 |
13 | ```
14 | add_executable(avoidance src/avoidance.cpp)
15 | target_link_libraries(avoidance ${catkin_LIBRARIES})
16 | ```
17 |
18 | ## Setup a Genaric C++ ROS node
19 |
20 | ```
21 | #include
22 | #include
23 |
24 | int main(int argc, char **argv)
25 | {
26 | //initialize ros
27 | ros::init(argc, argv, "avoidance_node");
28 | ros::NodeHandle n;
29 |
30 | //add rest of code here
31 |
32 | return 0;
33 | }
34 | ```
35 | here we add the include files we need and define our ros node similar to previous tutorials
36 |
37 | ## Add subscriber for lidar
38 |
39 | first add the include file for the message of the lidar we will be using
40 | ```
41 | #include
42 | ```
43 |
44 | then in our main function define the ros subscriber as so
45 | ```
46 | ros::Subscriber collision_sub = n.subscribe("/spur/laser/scan", 1, scan_cb);
47 | ```
48 | we will be using a call back function to access the lidar data. Lets add that between the includes and the main function
49 | ```
50 | void scan_cb(const sensor_msgs::LaserScan::ConstPtr& msg)
51 | {
52 |
53 | }
54 | ```
55 | we will file in the avoidance logic latter
56 |
57 | ## Takeoff and add control loop
58 | ```
59 | //initialize control publisher/subscribers
60 | init_publisher_subscriber(n);
61 |
62 | // wait for FCU connection
63 | wait4connect();
64 |
65 | //wait for user to switch to mode GUIDED
66 | wait4start();
67 |
68 | //create local reference frame
69 | initialize_local_frame();
70 |
71 | //request takeoff
72 | takeoff(2);
73 |
74 | set_destination(0,0,2,0);
75 |
76 | ros::Rate rate(2.0);
77 | int counter = 0;
78 | while(ros::ok())
79 | {
80 |
81 | ros::spinOnce();
82 | rate.sleep();
83 |
84 | }
85 |
86 |
87 | ```
88 | this will make our drone take off and hold postion.
89 |
90 | ## Parse the Lidar Data
91 |
92 | we are going to go through the returns of the lidar and create a direction and magnitude in which the drone will maneuver. We will use a version of the potential field method seen in this [paper](http://users.isr.ist.utl.pt/~mir/pub/ObstacleAvoidance.pdf)
93 |
94 | ```
95 |
96 | sensor_msgs::LaserScan current_2D_scan;
97 | current_2D_scan = *msg;
98 | float avoidance_vector_x = 0;
99 | float avoidance_vector_y = 0;
100 | bool avoid = false;
101 |
102 | for(int i=1; i .35)
108 | {
109 | avoid = true;
110 | float x = cos(current_2D_scan.angle_increment*i);
111 | float y = sin(current_2D_scan.angle_increment*i);
112 | float U = -.5*k*pow(((1/current_2D_scan.ranges[i]) - (1/d0)), 2);
113 |
114 | avoidance_vector_x = avoidance_vector_x + x*U;
115 | avoidance_vector_y = avoidance_vector_y + y*U;
116 |
117 | }
118 | }
119 | ```
120 |
121 | The following code generates the avoidance waypoint in the correct reference frame and scales it
122 |
123 | ```
124 | float current_heading = get_current_heading();
125 | float deg2rad = (M_PI/180);
126 | avoidance_vector_x = avoidance_vector_x*cos((current_heading)*deg2rad) - avoidance_vector_y*sin((current_heading)*deg2rad);
127 | avoidance_vector_y = avoidance_vector_x*sin((current_heading)*deg2rad) + avoidance_vector_y*cos((current_heading)*deg2rad);
128 |
129 | if(avoid)
130 | {
131 | if( sqrt(pow(avoidance_vector_x,2) + pow(avoidance_vector_y,2)) > 3)
132 | {
133 | avoidance_vector_x = 3 * (avoidance_vector_x/sqrt(pow(avoidance_vector_x,2) + pow(avoidance_vector_y,2)));
134 | avoidance_vector_y = 3 * (avoidance_vector_y/sqrt(pow(avoidance_vector_x,2) + pow(avoidance_vector_y,2)));
135 | }
136 | geometry_msgs::Point current_pos;
137 | current_pos = get_current_location();
138 | set_destination(avoidance_vector_x + current_pos.x, avoidance_vector_y + current_pos.y, 2, 0);
139 | }
140 |
141 |
142 | ```
143 |
144 |
145 |
146 |
147 | ## Finished Program
148 |
149 |
150 | ```
151 | #include
152 | #include
153 | #include
154 |
155 |
156 |
157 |
158 | void scan_cb(const sensor_msgs::LaserScan::ConstPtr& msg)
159 | {
160 |
161 | sensor_msgs::LaserScan current_2D_scan;
162 | current_2D_scan = *msg;
163 | float avoidance_vector_x = 0;
164 | float avoidance_vector_y = 0;
165 | bool avoid = false;
166 |
167 | for(int i=1; i .35)
173 | {
174 | avoid = true;
175 | float x = cos(current_2D_scan.angle_increment*i);
176 | float y = sin(current_2D_scan.angle_increment*i);
177 | float U = -.5*k*pow(((1/current_2D_scan.ranges[i]) - (1/d0)), 2);
178 |
179 | avoidance_vector_x = avoidance_vector_x + x*U;
180 | avoidance_vector_y = avoidance_vector_y + y*U;
181 |
182 | }
183 | }
184 | float current_heading = get_current_heading();
185 | float deg2rad = (M_PI/180);
186 | avoidance_vector_x = avoidance_vector_x*cos((current_heading)*deg2rad) - avoidance_vector_y*sin((current_heading)*deg2rad);
187 | avoidance_vector_y = avoidance_vector_x*sin((current_heading)*deg2rad) + avoidance_vector_y*cos((current_heading)*deg2rad);
188 |
189 | if(avoid)
190 | {
191 | if( sqrt(pow(avoidance_vector_x,2) + pow(avoidance_vector_y,2)) > 3)
192 | {
193 | avoidance_vector_x = 3 * (avoidance_vector_x/sqrt(pow(avoidance_vector_x,2) + pow(avoidance_vector_y,2)));
194 | avoidance_vector_y = 3 * (avoidance_vector_y/sqrt(pow(avoidance_vector_x,2) + pow(avoidance_vector_y,2)));
195 | }
196 | geometry_msgs::Point current_pos;
197 | current_pos = get_current_location();
198 | set_destination(avoidance_vector_x + current_pos.x, avoidance_vector_y + current_pos.y, 2, 0);
199 | }
200 |
201 |
202 | }
203 |
204 | int main(int argc, char **argv)
205 | {
206 | //initialize ros
207 | ros::init(argc, argv, "gnc_node");
208 | ros::NodeHandle n;
209 | ros::Subscriber collision_sub = n.subscribe("/spur/laser/scan", 1, scan_cb);
210 | //initialize control publisher/subscribers
211 | init_publisher_subscriber(n);
212 |
213 | // wait for FCU connection
214 | wait4connect();
215 |
216 | //wait for used to switch to mode GUIDED
217 | wait4start();
218 |
219 | //create local reference frame
220 | initialize_local_frame();
221 |
222 | //request takeoff
223 | takeoff(2);
224 |
225 |
226 | set_destination(0,0,2,0);
227 | //specify control loop rate. We recommend a low frequency to not over load the FCU with messages. Too many messages will cause the drone to be sluggish
228 | ros::Rate rate(2.0);
229 | int counter = 0;
230 | while(ros::ok())
231 | {
232 |
233 | ros::spinOnce();
234 | rate.sleep();
235 |
236 |
237 |
238 | }
239 |
240 | return 0;
241 | }
242 |
243 |
244 | ```
245 |
246 | ---
247 | ### References
248 |
249 | http://users.isr.ist.utl.pt/~mir/pub/ObstacleAvoidance.pdf
250 |
251 |
252 |
--------------------------------------------------------------------------------
/docs/ros_intro.md:
--------------------------------------------------------------------------------
1 | # Introduction to Ros for Autonomous Drones
2 |
3 | ### Video Tutorial at https://youtu.be/N4XvVldWlXk
4 |
5 | Although ROS (Robot Operating System) is not an operating system, it provides services designed for a heterogeneous computer cluster such as hardware abstraction, low-level device control, implementation of commonly used functionality, message-passing between processes, and package management.
6 |
7 | We will mostly be using the message passing functionality. To demonstrate this we will launch our simulator again and run a few commands.
8 |
9 | ## Make sure Install ROS plugins for Gazebo:
10 | ```
11 | sudo apt install ros-melodic-gazebo-ros ros-melodic-gazebo-plugins
12 | ```
13 |
14 | ## Launch Gazebo World
15 | this time we will launch our gazebo world by launching it with ROS as follows
16 | ```
17 | roslaunch iq_sim runway.launch
18 | ```
19 | We will also launch the ArduCopter simulator as well. I have made a nice script so that you don't have to remember the big command `cd ~/ardupilot/ArduCopter/ && sim_vehicle.py -v ArduCopter -f gazebo-iris --console` from the previous tutorials. I recommend you move the script to your home folder for ease of access. Do this by running the command below.
20 |
21 | ```
22 | cp ~/catkin_ws/src/iq_sim/scripts/startsitl.sh ~
23 | ```
24 | now we can launch the ardupilot sitl by running
25 | ```
26 | ~/startsitl.sh
27 | ```
28 |
29 | ## Intoduction to ROS Commandline Tools
30 |
31 | In a new terminal (ctrl+shift+T)
32 |
33 | ```
34 | rostopic list
35 | ```
36 | you should see the following
37 | ```
38 | /clock
39 | /gazebo/link_states
40 | /gazebo/model_states
41 | /gazebo/parameter_descriptions
42 | /gazebo/parameter_updates
43 | /gazebo/set_link_state
44 | /gazebo/set_model_state
45 | /rosout
46 | /rosout_agg
47 | /webcam/camera_info
48 | /webcam/image_raw
49 | /webcam/image_raw/compressed
50 | /webcam/image_raw/compressed/parameter_descriptions
51 | /webcam/image_raw/compressed/parameter_updates
52 | /webcam/image_raw/compressedDepth
53 | /webcam/image_raw/compressedDepth/parameter_descriptions
54 | /webcam/image_raw/compressedDepth/parameter_updates
55 | /webcam/image_raw/theora
56 | /webcam/image_raw/theora/parameter_descriptions
57 | /webcam/image_raw/theora/parameter_updates
58 | /webcam/parameter_descriptions
59 | /webcam/parameter_updates
60 |
61 | ```
62 | this shows the different topics currently being published. These topics contains data about the source of the data such as images from a camera.
63 |
64 | we can see what data is being published by running the following
65 | ```
66 | rostopic echo /gazebo/model_states
67 | ```
68 |
69 | now if we fly the drone, we will be able to see changes in position as we fly around.
70 |
71 | fly the drone by running the following in the mavproxy terminal as we did in the previous tutorial.
72 |
73 | ```
74 | mode guided
75 | arm throttle
76 | takeoff 15
77 | ```
78 |
79 | ## Using MAVROS to get telemetry data from the FCU
80 |
81 | Now the the topic `/gazebo/model_states` is the true model position in the simulator. This isn't something we can use in real life. In real life, we have to use the estimate of the drone's position which is formulated from a combination of its sensors. This position is trasmitted using a communication protocol called mavlink. These messages are stripped down and are optimized for radio transmission. MAVROS is a middle man which translates the MAVlink messages into ROS messages, which are easy to use and common between different robot systems. To start mavros run
82 |
83 | ```
84 | roslaunch iq_sim apm.launch
85 | ```
86 | when you run `rostopic list` you should see a bunch of mavros topics
87 | ```
88 | /clock
89 | /diagnostics
90 | /gazebo/link_states
91 | /gazebo/model_states
92 | /gazebo/parameter_descriptions
93 | /gazebo/parameter_updates
94 | /gazebo/set_link_state
95 | /gazebo/set_model_state
96 | /mavlink/from
97 | /mavlink/to
98 | /mavros/adsb/send
99 | /mavros/adsb/vehicle
100 | /mavros/battery
101 | /mavros/cam_imu_sync/cam_imu_stamp
102 | /mavros/companion_process/status
103 | /mavros/distance_sensor/rangefinder_pub
104 | /mavros/distance_sensor/rangefinder_sub
105 | /mavros/extended_state
106 | /mavros/fake_gps/mocap/tf
107 | /mavros/global_position/compass_hdg
108 | /mavros/global_position/global
109 | /mavros/global_position/gp_lp_offset
110 | /mavros/global_position/gp_origin
111 | /mavros/global_position/home
112 | /mavros/global_position/local
113 | /mavros/global_position/raw/fix
114 | /mavros/global_position/raw/gps_vel
115 | /mavros/global_position/rel_alt
116 | /mavros/global_position/set_gp_origin
117 | /mavros/gps_rtk/send_rtcm
118 | /mavros/home_position/home
119 | /mavros/home_position/set
120 | /mavros/imu/data
121 | /mavros/imu/data_raw
122 | /mavros/imu/diff_pressure
123 | /mavros/imu/mag
124 | /mavros/imu/static_pressure
125 | /mavros/imu/temperature_baro
126 | /mavros/imu/temperature_imu
127 | /mavros/landing_target/lt_marker
128 | /mavros/landing_target/pose
129 | /mavros/landing_target/pose_in
130 | /mavros/local_position/accel
131 | /mavros/local_position/odom
132 | /mavros/local_position/pose
133 | /mavros/local_position/pose_cov
134 | /mavros/local_position/velocity_body
135 | /mavros/local_position/velocity_body_cov
136 | /mavros/local_position/velocity_local
137 | /mavros/log_transfer/raw/log_data
138 | /mavros/log_transfer/raw/log_entry
139 | /mavros/manual_control/control
140 | /mavros/manual_control/send
141 | /mavros/mission/reached
142 | /mavros/mission/waypoints
143 | /mavros/mocap/pose
144 | /mavros/obstacle/send
145 | /mavros/odometry/in
146 | /mavros/odometry/out
147 | /mavros/param/param_value
148 | /mavros/radio_status
149 | /mavros/rangefinder/rangefinder
150 | /mavros/rc/in
151 | /mavros/rc/out
152 | /mavros/rc/override
153 | /mavros/setpoint_accel/accel
154 | /mavros/setpoint_attitude/cmd_vel
155 | /mavros/setpoint_attitude/thrust
156 | /mavros/setpoint_position/global
157 | /mavros/setpoint_position/local
158 | /mavros/setpoint_raw/attitude
159 | /mavros/setpoint_raw/global
160 | /mavros/setpoint_raw/local
161 | /mavros/setpoint_raw/target_attitude
162 | /mavros/setpoint_raw/target_global
163 | /mavros/setpoint_raw/target_local
164 | /mavros/setpoint_velocity/cmd_vel
165 | /mavros/setpoint_velocity/cmd_vel_unstamped
166 | /mavros/state
167 | /mavros/statustext/recv
168 | /mavros/statustext/send
169 | /mavros/time_reference
170 | /mavros/timesync_status
171 | /mavros/trajectory/desired
172 | /mavros/trajectory/generated
173 | /mavros/trajectory/path
174 | /mavros/vfr_hud
175 | /mavros/vision_pose/pose
176 | /mavros/vision_pose/pose_cov
177 | /mavros/wind_estimation
178 | /rosout
179 | /rosout_agg
180 | /tf
181 | /tf_static
182 | /webcam/camera_info
183 | /webcam/image_raw
184 | /webcam/image_raw/compressed
185 | /webcam/image_raw/compressed/parameter_descriptions
186 | /webcam/image_raw/compressed/parameter_updates
187 | /webcam/image_raw/compressedDepth
188 | /webcam/image_raw/compressedDepth/parameter_descriptions
189 | /webcam/image_raw/compressedDepth/parameter_updates
190 | /webcam/image_raw/theora
191 | /webcam/image_raw/theora/parameter_descriptions
192 | /webcam/image_raw/theora/parameter_updates
193 | /webcam/parameter_descriptions
194 | /webcam/parameter_updates
195 | ```
196 | Now we can see the drones position in it's local frame by running
197 |
198 | ```
199 | rostopic echo /mavros/global_position/local
200 | ```
201 |
202 | In the following tutorials we will be accessing the data on these topics in our C++ programs. To see the type of message being published run
203 | ```
204 | rostopic list -v /mavros/global_position/local
205 | ```
206 | we see that the topic is publisng the message in the form of `nav_msgs/Odometry`
207 |
208 | to see the structure of the message you can run the following
209 | ```
210 | rosmsg show nav_msgs/Odometry
211 | ```
212 | This will be usefull when writing publishers and subscribers in the future
213 |
214 |
215 |
216 |
217 |
218 |
--------------------------------------------------------------------------------
/docs/search_and_rescue.md:
--------------------------------------------------------------------------------
1 | # Simple Search and Rescue Program
2 |
3 | ## Video Tutorial at https://youtu.be/LYtpJqYYMB0
4 |
5 | In this tutorial, we will be combining all of our skills together to make a drone that will autonomously look for a missing hiker in the hills. We will use YOLO to identify the person and we will use a modified version of our waypoint program to control where the drone.
6 |
7 | ## Create a New File in iq_gnc
8 |
9 | Create a file in `iq_gnc/src` called `sr.cpp`
10 |
11 | ## Add the cpp File to CMakeLists
12 |
13 | add the following lines to the `CMakeLists.txt`, so catkin can build our program
14 |
15 | ```
16 | add_executable(sr src/sr.cpp)
17 | target_link_libraries(sr ${catkin_LIBRARIES})
18 | ```
19 |
20 | ## Combine sub.cpp and square.cpp
21 |
22 | copy in sub.cpp
23 | ```
24 | #include
25 | #include
26 |
27 | void detection_cb(const darknet_ros_msgs::BoundingBoxes::ConstPtr& msg)
28 | {
29 | for( int i=0; i < msg->bounding_boxes.size(); i++)
30 | {
31 | ROS_INFO("%s detected", msg->bounding_boxes[i].Class.c_str());
32 | }
33 |
34 | }
35 |
36 |
37 | int main(int argc, char **argv)
38 | {
39 | ros::init(argc, argv, "detection_sub");
40 | ros::NodeHandle n;
41 | ros::Subscriber sub = n.subscribe("/darknet_ros/bounding_boxes", 1, detection_cb);
42 |
43 | ros::spin();
44 |
45 | return 0;
46 | }
47 | ```
48 |
49 | delete the line
50 | ```
51 | ros::spin();
52 | ```
53 |
54 | then add square.cpp headers
55 | ```
56 | #include
57 | ```
58 |
59 | add bellow lines from square.cpp
60 | ```
61 | //initialize control publisher/subscribers
62 | init_publisher_subscriber(gnc_node);
63 |
64 | // wait for FCU connection
65 | wait4connect();
66 |
67 | //wait for used to switch to mode GUIDED
68 | wait4start();
69 |
70 | //create local reference frame
71 | initialize_local_frame();
72 |
73 | //request takeoff
74 | takeoff(3);
75 | ```
76 |
77 | be sure to change `init_publisher_subscriber(gnc_node);` to `init_publisher_subscriber(n);`
78 |
79 | now we will generate a search waypoint pattern. add
80 | ```
81 | //specify some waypoints
82 | std::vector waypointList;
83 | gnc_api_waypoint nextWayPoint;
84 | float range = 50;
85 | float spacing = 10;
86 | int rows = 5;
87 | int row;
88 | for(int i=0; i<5; i++)
89 | {
90 | row = i*2;
91 | nextWayPoint.x = row*spacing;
92 | nextWayPoint.y = 0;
93 | nextWayPoint.z = 10;
94 | nextWayPoint.psi = 0;
95 | waypointList.push_back(nextWayPoint);
96 |
97 | nextWayPoint.x = row*spacing;
98 | nextWayPoint.y = range;
99 | nextWayPoint.z = 10;
100 | nextWayPoint.psi = 0;
101 | waypointList.push_back(nextWayPoint);
102 |
103 | nextWayPoint.x = (row+1)*spacing;
104 | nextWayPoint.y = range;
105 | nextWayPoint.z = 10;
106 | nextWayPoint.psi = 0;
107 | waypointList.push_back(nextWayPoint);
108 |
109 | nextWayPoint.x = (row+1)*spacing;
110 | nextWayPoint.y = 0;
111 | nextWayPoint.z = 10;
112 | nextWayPoint.psi = 0;
113 | waypointList.push_back(nextWayPoint);
114 | }
115 | ```
116 |
117 | then add square.cpp's control loop
118 | ```
119 | ros::Rate rate(2.0);
120 | int counter = 0;
121 | while(ros::ok())
122 | {
123 | ros::spinOnce();
124 | rate.sleep();
125 | if(check_waypoint_reached(.3) == 1)
126 | {
127 | if(counter < waypointList.size())
128 | {
129 | set_destination(waypointList[counter].x, waypointList[counter].y, waypointList[counter].z, waypointList[counter].psi);
130 | counter++;
131 | }else{
132 | land();
133 | }
134 | }
135 | }
136 | ```
137 |
138 | ## Introduction to FLight Modes
139 |
140 | It is common in robotics for the platform to perform different tasks based on the mode it is currently in. For our program we will have to modes. `mode - 0` will be a search mode where the drone searches for the hiker, and `mode - 1` will be resuce mode where the drone delivers the rescue supplies (ex first aid kid, food, water).
141 |
142 | we will add a global variable above the detection callback called `mode_g`. I use `_g` to denote my global variables.
143 | ```
144 | int mode_g = 0;
145 | ```
146 |
147 | ## Flight mode Check
148 |
149 | Add a couple if statements to check which flight mode the drone is in, so that the while loop looks like so.
150 |
151 | ```
152 | while(ros::ok())
153 | {
154 | if(mode_g == 0)
155 | {
156 | ros::spinOnce();
157 | rate.sleep();
158 | if(check_waypoint_reached(.3) == 1)
159 | {
160 | if (counter < waypointList.size())
161 | {
162 | set_destination(waypointList[counter].x,waypointList[counter].y,waypointList[counter].z, waypointList[counter].psi);
163 | counter++;
164 | }else{
165 | //land after all waypoints are reached
166 | land();
167 | }
168 | }
169 | }
170 | if(mode_g == 1)
171 | {
172 | //rescue operation
173 | }
174 |
175 | }
176 | ```
177 |
178 | ## Rescue Flight Mode
179 |
180 | We want the drone to land and deliver the rescue supplies when we sind the missing person, so lets add that functionallity real quick.
181 |
182 | ```
183 | land();
184 | ROS_INFO("Landing Started");
185 | break;
186 | ```
187 |
188 | ## Completed Program
189 |
190 | ```
191 | #include
192 | #include
193 | #include
194 |
195 | // mode_g denotes the flight opperations
196 | // 0 - search
197 | // 1 - rescue
198 | int mode_g = 0;
199 |
200 | void detection_cb(const darknet_ros_msgs::BoundingBoxes::ConstPtr& msg)
201 | {
202 | for( int i=0; i < msg->bounding_boxes.size(); i++)
203 | {
204 | ROS_INFO("%s detected", msg->bounding_boxes[i].Class.c_str());
205 | if(msg->bounding_boxes[i].Class == "person")
206 | {
207 | mode_g = 1;
208 | ROS_INFO("Person found. Starting Rescue Operation");
209 | }
210 | }
211 |
212 | }
213 |
214 |
215 | int main(int argc, char **argv)
216 | {
217 | //initialize ros
218 | ros::init(argc, argv, "gnc_node");
219 | ros::NodeHandle n;
220 | ros::Subscriber sub = n.subscribe("/darknet_ros/bounding_boxes", 1, detection_cb);
221 | //initialize control publisher/subscribers
222 | init_publisher_subscriber(n);
223 |
224 | // wait for FCU connection
225 | wait4connect();
226 |
227 | //wait for used to switch to mode GUIDED
228 | wait4start();
229 |
230 | //create local reference frame
231 | initialize_local_frame();
232 |
233 | //request takeoff
234 | takeoff(10);
235 |
236 |
237 | //specify some waypoints
238 | std::vector waypointList;
239 | gnc_api_waypoint nextWayPoint;
240 | float range = 50;
241 | float spacing = 10;
242 | int rows = 5;
243 | int row;
244 | for(int i=0; i<5; i++)
245 | {
246 | row = i*2;
247 | nextWayPoint.x = row*spacing;
248 | nextWayPoint.y = 0;
249 | nextWayPoint.z = 10;
250 | nextWayPoint.psi = 0;
251 | waypointList.push_back(nextWayPoint);
252 |
253 | nextWayPoint.x = row*spacing;
254 | nextWayPoint.y = range;
255 | nextWayPoint.z = 10;
256 | nextWayPoint.psi = 0;
257 | waypointList.push_back(nextWayPoint);
258 |
259 | nextWayPoint.x = (row+1)*spacing;
260 | nextWayPoint.y = range;
261 | nextWayPoint.z = 10;
262 | nextWayPoint.psi = 0;
263 | waypointList.push_back(nextWayPoint);
264 |
265 | nextWayPoint.x = (row+1)*spacing;
266 | nextWayPoint.y = 0;
267 | nextWayPoint.z = 10;
268 | nextWayPoint.psi = 0;
269 | waypointList.push_back(nextWayPoint);
270 | }
271 |
272 | //specify control loop rate. We recommend a low frequency to not over load the FCU with messages. Too many messages will cause the drone to be sluggish
273 | ros::Rate rate(2.0);
274 | int counter = 0;
275 | while(ros::ok())
276 | {
277 | if(mode_g == 0)
278 | {
279 | ros::spinOnce();
280 | rate.sleep();
281 | if(check_waypoint_reached(.3) == 1)
282 | {
283 | if (counter < waypointList.size())
284 | {
285 | set_destination(waypointList[counter].x,waypointList[counter].y,waypointList[counter].z, waypointList[counter].psi);
286 | counter++;
287 | }else{
288 | //land after all waypoints are reached
289 | land();
290 | }
291 | }
292 | }
293 | if(mode_g == 1)
294 | {
295 | land();
296 | ROS_INFO("Landing Started");
297 | break;
298 | }
299 |
300 | }
301 |
302 | return 0;
303 | }
304 | ```
305 |
306 |
--------------------------------------------------------------------------------
/docs/gnc_tutorial.md:
--------------------------------------------------------------------------------
1 | # Guidance Navigation and Control
2 |
3 | The following tutorial will show you how to make a simple program that allows you to send your drone to waypoints. This tutorial uses my API, which has a bunch of high level functions that handle the various flight operations including, takeoff, land, waypoint nav and all the reference frames associated with the navigation. The documentation for these GNC functions are available **[here](GNC_functions_documentation.md)**
4 |
5 | ### Video Tutorial Part 1 at https://youtu.be/eRAfeC8OFfs
6 |
7 | ## Make sure you have a text editor
8 | As this is the first tutorial that we will be coding please make sure you have a text editor. My prefered text editor is sublime. You can download it by running the below commands
9 | ```
10 | wget -qO - https://download.sublimetext.com/sublimehq-pub.gpg | sudo apt-key add -
11 | echo "deb https://download.sublimetext.com/ apt/stable/" | sudo tee /etc/apt/sources.list.d/sublime-text.list
12 | sudo apt-get update
13 | sudo apt-get install sublime-text
14 | ```
15 |
16 | ## Clone the IQ GNC ROS package
17 |
18 | First, we will clone the IQ GNC ROS package. This ROS package comes with my GNC API that will make scripting the drone easy. It will also come with a worked out solution to this tutorial
19 | ```
20 | git clone https://github.com/Intelligent-Quads/iq_gnc.git
21 | ```
22 |
23 | this package contains the file `gnc_functions.hpp`. This file contains a bunch of useful functions for creating intelligent drone applications.
24 |
25 | ## Write a small program to navigate your drone
26 |
27 | Once you have cloned the `iq_gnc` package. Create a new file called `square.cpp` in `Mission8_OutOfControls/src`. Then open the file `CMakeLists.txt` and add the following to the bottom.
28 | ```
29 | add_executable(square src/square.cpp)
30 | target_link_libraries(square ${catkin_LIBRARIES})
31 | ```
32 |
33 | First we will include our control functions
34 | ```
35 | #include
36 | ```
37 | This will allow us to use all of our control functions and structures
38 |
39 | Next add the main function and initialize ros
40 | ```
41 | int main(int argc, char** argv)
42 | {
43 | //initialize ros
44 | ros::init(argc, argv, "gnc_node");
45 | ros::NodeHandle gnc_node;
46 |
47 |
48 | //Rest of code here
49 |
50 |
51 | }
52 | ```
53 |
54 | We will then add the function `init_publisher_subscriber()`. This function takes our ros node handle as an input and initializes subcribers that will collect the necessary information from our autopilot. Add the following
55 |
56 | ```
57 | //initialize control publisher/subscribers
58 | init_publisher_subscriber(gnc_node);
59 | ```
60 |
61 | we will then add the following functions to handle preflight operations
62 | ```
63 | // wait for FCU connection
64 | wait4connect();
65 |
66 | //wait for used to switch to mode GUIDED
67 | wait4start();
68 |
69 | //create local reference frame
70 | initialize_local_frame();
71 | ```
72 | The function `wait4connect()` will loop until the node can communicate with the flight control unit (FCU). Once the connection with the FCU is established then we will use the function `wait4start()` to hold the program until the pilot executes the program by switching the FCU flight mode to GUIDED. This can be done from a ground control stattion (GCS) such as Mission Planner or QGroundControl or from a switch on a radio controller. Finally, once the command to execute the mission is sent, you will use the function `initialize_local_frame()` to create your navigation frame. This function creates the local reference frame based on the starting location of the drone.
73 |
74 | Next we will request takeoff. Using the function `takeoff(float takeOffHieght)`. Add
75 | ```
76 | //request takeoff
77 | takeoff(3);
78 | ```
79 |
80 | The GNC API contains the structure `gnc_api_waypoint` this structure contains the variables `x y z psi` which you can use to set locations and orientations of your drone.
81 |
82 | To make your drone fly in a square specify the following waypoints
83 | ```
84 | //specify some waypoints
85 | std::vector waypointList;
86 | gnc_api_waypoint nextWayPoint;
87 | nextWayPoint.x = 0;
88 | nextWayPoint.y = 0;
89 | nextWayPoint.z = 3;
90 | nextWayPoint.psi = 0;
91 | waypointList.push_back(nextWayPoint);
92 | nextWayPoint.x = 5;
93 | nextWayPoint.y = 0;
94 | nextWayPoint.z = 3;
95 | nextWayPoint.psi = -90;
96 | waypointList.push_back(nextWayPoint);
97 | nextWayPoint.x = 5;
98 | nextWayPoint.y = 5;
99 | nextWayPoint.z = 3;
100 | nextWayPoint.psi = 0;
101 | waypointList.push_back(nextWayPoint);
102 | nextWayPoint.x = 0;
103 | nextWayPoint.y = 5;
104 | nextWayPoint.z = 3;
105 | nextWayPoint.psi = 90;
106 | waypointList.push_back(nextWayPoint);
107 | nextWayPoint.x = 0;
108 | nextWayPoint.y = 0;
109 | nextWayPoint.z = 3;
110 | nextWayPoint.psi = 180;
111 | waypointList.push_back(nextWayPoint);
112 | ```
113 |
114 | Finally we will add our control loop
115 | ```
116 | //specify control loop rate. We recommend a low frequency to not over load the FCU with messages. Too many messages will cause the drone to be sluggish
117 | ros::Rate rate(2.0);
118 | int counter = 0;
119 | while(ros::ok())
120 | {
121 | ros::spinOnce();
122 | rate.sleep();
123 | if(check_waypoint_reached() == 1)
124 | {
125 | if (counter < waypointList.size())
126 | {
127 | set_destination(waypointList[counter].x,waypointList[counter].y,waypointList[counter].z, waypointList[counter].psi);
128 | counter++;
129 | }else{
130 | //land after all waypoints are reached
131 | land();
132 | }
133 | }
134 |
135 | }
136 | return 0;
137 | }
138 | ```
139 | This loop will continually send the requested destination to the FCU via the `set_destination(x, y, z, psi)` function. The loop uses the function `check_waypoint_reached()` to determained when the drone has arrived at the requested destination. The function return 1 or 0. 1 denotes the drone has arrived. Each time the waypoint is reached the vector of waypoints is iterated to request the next waypoint via the variable `counter`. Finally, the drone will land via the land function `land()`
140 |
141 |
142 | Your program should like like the following
143 | src/controlAPIExample.cpp
144 |
145 | ```
146 | #include
147 | //include API
148 |
149 | int main(int argc, char** argv)
150 | {
151 | //initialize ros
152 | ros::init(argc, argv, "gnc_node");
153 | ros::NodeHandle gnc_node;
154 |
155 | //initialize control publisher/subscribers
156 | init_publisher_subscriber(gnc_node);
157 |
158 | // wait for FCU connection
159 | wait4connect();
160 |
161 | //wait for used to switch to mode GUIDED
162 | wait4start();
163 |
164 | //create local reference frame
165 | initialize_local_frame();
166 |
167 | //request takeoff
168 | takeoff(3);
169 |
170 | //specify some waypoints
171 | std::vector waypointList;
172 | gnc_api_waypoint nextWayPoint;
173 | nextWayPoint.x = 0;
174 | nextWayPoint.y = 0;
175 | nextWayPoint.z = 3;
176 | nextWayPoint.psi = 0;
177 | waypointList.push_back(nextWayPoint);
178 | nextWayPoint.x = 5;
179 | nextWayPoint.y = 0;
180 | nextWayPoint.z = 3;
181 | nextWayPoint.psi = -90;
182 | waypointList.push_back(nextWayPoint);
183 | nextWayPoint.x = 5;
184 | nextWayPoint.y = 5;
185 | nextWayPoint.z = 3;
186 | nextWayPoint.psi = 0;
187 | waypointList.push_back(nextWayPoint);
188 | nextWayPoint.x = 0;
189 | nextWayPoint.y = 5;
190 | nextWayPoint.z = 3;
191 | nextWayPoint.psi = 90;
192 | waypointList.push_back(nextWayPoint);
193 | nextWayPoint.x = 0;
194 | nextWayPoint.y = 0;
195 | nextWayPoint.z = 3;
196 | nextWayPoint.psi = 180;
197 | waypointList.push_back(nextWayPoint);
198 | nextWayPoint.x = 0;
199 | nextWayPoint.y = 0;
200 | nextWayPoint.z = 3;
201 | nextWayPoint.psi = 0;
202 | waypointList.push_back(nextWayPoint);
203 |
204 |
205 | //specify control loop rate. We recommend a low frequency to not over load the FCU with messages. Too many messages will cause the drone to be sluggish
206 | ros::Rate rate(2.0);
207 | int counter = 0;
208 | while(ros::ok())
209 | {
210 | ros::spinOnce();
211 | rate.sleep();
212 | if(check_waypoint_reached(.3) == 1)
213 | {
214 | if (counter < waypointList.size())
215 | {
216 | set_destination(waypointList[counter].x,waypointList[counter].y,waypointList[counter].z, waypointList[counter].psi);
217 | counter++;
218 | }else{
219 | //land after all waypoints are reached
220 | land();
221 | }
222 | }
223 |
224 | }
225 | return 0;
226 | }
227 |
228 |
229 | ```
230 | ## Build code
231 | ```
232 | cd ~/catkin_ws
233 | catkin build
234 | source ~/.bashrc
235 | ```
236 |
237 | ## Run example code
238 |
239 | ```
240 | roslaunch iq_sim runway.launch
241 | # New Terminal
242 | ./startsitl.sh
243 | # New Terminal
244 | roslaunch iq_sim apm.launch
245 | # New Terminal
246 | rosrun iq_gnc square
247 | ```
248 | NOTE** you can tile gnome terminals by pressing `ctrl + shift + t`
249 |
250 | Finally run the mission by changing the flight mode to guided in the MAVproxy terminal by running
251 | ```
252 | mode guided
253 | ```
254 |
255 | You should now have a basic understanding of the functions available for controling your drone. You should be able to use these functions to help you make more complex navigation code.
256 |
257 |
258 |
259 |
260 |
--------------------------------------------------------------------------------
/docs/adding_a_sensor.md:
--------------------------------------------------------------------------------
1 | # Adding a Sensor to a Gazebo Robot
2 |
3 | ## Video Tutorial at https://youtu.be/0zQ8iFgw6uk
4 |
5 | This tutorial will go how to add an existing gazebo sensor to an existing gazebo robot.
6 |
7 | ## Available Plugins List
8 |
9 | To see a list of available gazebo sensor plugins take a look at this [link](http://gazebosim.org/tutorials?tut=ros_gzplugins#Pluginsavailableingazebo_plugins)
10 |
11 | ## Add a 2d LiDAR to our Drone Model
12 |
13 | There are 2 ways to add sensor to a model. The first is to modify a model's sdf file. The second is to add the sensor with the `` tags in side the world file. the second method is what we will focus on today.
14 |
15 | Open the flie `runway.world` in the folder `iq_sim/wolrds`. then scroll to the line
16 | ```
17 |
18 | ```
19 | this line is where our drone is specified. You will notice a camera already attached to the drone. we are going to do a similar method to add our 2d lidar.
20 |
21 | we will then add the following lines below the `` tag for the camera.
22 |
23 | ```
24 |
25 |
26 | 0 0 0 0 0 0
27 |
28 | 0 0 0.3 0 0 0
29 |
30 |
31 | 0.1 0.1 0.1
32 |
33 |
34 |
35 |
36 | 0 0 0.27 0 0 0
37 |
38 |
39 | model://hokuyo/meshes/hokuyo.dae
40 |
41 |
42 |
43 |
44 | 0.016
45 |
46 | 0.0001
47 | 0
48 | 0
49 | 0.0001
50 | 0
51 | 0.0001
52 |
53 |
54 |
55 |
56 |
57 | 0 0 0.3 0 0 1.57
58 | true
59 | 10
60 |
61 |
62 |
63 | 1024
64 | 1
65 | -3.141593
66 | 3.141593
67 |
68 |
69 |
70 | 0.1
71 | 30
72 | 0.1
73 |
74 |
79 |
80 |
81 |
82 | /spur/laser/scan
83 | /hokuyo_sensor_link
84 |
85 |
86 |
87 |
88 |
89 | 0 0 0 0 0 0
90 | iris::iris_demo::iris::base_link
91 | hokuyo_link
92 |
93 |
94 | ```
95 |
96 | ## Key Tags
97 |
98 | - `` specify a position of an object with respect to the last object in the hierarchy
99 |
100 | - `` specifies an object to be attached to the model
101 |
102 | - `` specifies how the link is attached to the model
103 |
104 |
105 | ## Completed World File
106 |
107 | `runway.world` should look like this
108 |
109 | ```
110 |
111 |
112 |
113 |
114 |
115 |
116 | quick
117 | 100
118 | 1.0
119 |
120 |
121 | 0.0
122 | 0.9
123 | 0.1
124 | 0.0
125 |
126 |
127 | -1
128 |
129 |
130 |
131 |
132 | true
133 |
134 |
135 |
136 |
137 | 0 0 1
138 | 5000 5000
139 |
140 |
141 |
142 |
143 |
144 | 1
145 | 1
146 |
147 |
148 |
149 |
150 |
151 | 000 0 0.005 0 0 0
152 | false
153 |
154 |
155 | 0 0 1
156 | 1829 45
157 |
158 |
159 |
160 |
164 |
165 |
166 |
167 |
168 | 0 0 -0.1 0 0 0
169 | false
170 |
171 |
172 | 0 0 1
173 | 5000 5000
174 |
175 |
176 |
177 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 | model://sun
189 |
190 |
191 |
192 | model://iris_with_standoffs_demo
193 |
194 | 0 0 0 0 0 0
195 |
196 |
197 |
198 |
199 | 0 -0.01 0.070 1.57 0 1.57
200 |
201 | 0 0 0 0 0 0
202 | 0.1
203 |
204 | 0.001
205 | 0
206 | 0
207 | 0.001
208 | 0
209 | 0.001
210 |
211 |
212 |
213 | 0 0 0 0 0 0
214 |
215 |
216 | 0.025
217 | 0.025
218 |
219 |
220 |
221 |
225 |
226 |
227 |
228 |
229 | 0 0 0 -1.57 -1.57 0
230 |
231 | 1.0472
232 |
233 | 640
234 | 480
235 |
236 |
237 | 0.05
238 | 1000
239 |
240 |
241 | 1
242 | 10
243 | true
244 |
245 |
248 |
249 | true
250 | 0.0
251 | webcam
252 | image_raw
253 | camera_info
254 | camera_link
255 | 0.07
256 | 0.0
257 | 0.0
258 | 0.0
259 | 0.0
260 | 0.0
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 | 0 0 0.0 0 0 0
270 | iris::iris_demo::gimbal_small_2d::tilt_link
271 | camera
272 |
273 |
274 | 0
275 | 0
276 |
277 | 0 0 1
278 | true
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 | 0 0 0 0 0 0
287 |
288 | 0 0 0.3 0 0 0
289 |
290 |
291 | 0.1 0.1 0.1
292 |
293 |
294 |
295 |
296 | 0 0 0.27 0 0 0
297 |
298 |
299 | model://hokuyo/meshes/hokuyo.dae
300 |
301 |
302 |
303 |
304 | 0.016
305 |
306 | 0.0001
307 | 0
308 | 0
309 | 0.0001
310 | 0
311 | 0.0001
312 |
313 |
314 |
315 |
316 |
317 | 0 0 0.3 0 0 1.57
318 | true
319 | 10
320 |
321 |
322 |
323 | 1024
324 | 1
325 | -3.141593
326 | 3.141593
327 |
328 |
329 |
330 | 0.1
331 | 30
332 | 0.1
333 |
334 |
339 |
340 |
341 |
342 | /spur/laser/scan
343 | /hokuyo_sensor_link
344 |
345 |
346 |
347 |
348 |
349 | 0 0 0 0 0 0
350 | iris::iris_demo::iris::base_link
351 | hokuyo_link
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 | ```
360 |
361 | ---
362 |
363 | ### Resources
364 |
365 | - http://gazebosim.org/tutorials?tut=ros_gzplugins#Pluginsavailableingazebo_plugins
366 |
367 |
368 |
369 |
370 |
371 |
--------------------------------------------------------------------------------