├── mocha_core
├── msg
│ ├── Header_pub.msg
│ ├── SM_state.msg
│ └── Client_stats.msg
├── srv
│ ├── SelectDB.srv
│ ├── AddUpdateDB.srv
│ └── GetDataHeaderDB.srv
├── config
│ ├── testConfigs
│ │ ├── topic_configs.yaml
│ │ └── robot_configs.yaml
│ ├── robot_configs.yaml
│ ├── topic_configs.yaml
│ └── radio_configs.yaml
├── launch
│ └── database_translators_publishers.launch
├── scripts
│ ├── core
│ │ ├── test
│ │ │ ├── run_tests.sh
│ │ │ ├── test_multiple_robots_sync.py
│ │ │ ├── sample_db.py
│ │ │ ├── test_zmq_comm_node.py
│ │ │ ├── test_delay_2robots.py
│ │ │ ├── test_database_utils.py
│ │ │ ├── test_database.py
│ │ │ ├── test_synchronize_channel.py
│ │ │ └── test_database_server.py
│ │ ├── database_server.py
│ │ ├── hash_comm.py
│ │ ├── integrate_database.py
│ │ ├── database_utils.py
│ │ ├── zmq_comm_node.py
│ │ ├── database.py
│ │ └── synchronize_channel.py
│ ├── translators
│ │ └── translator.py
│ └── publishers
│ │ └── topic_publisher.py
├── package.xml
└── CMakeLists.txt
├── mocha.gif
├── interface_rajant
├── scripts
│ ├── thirdParty
│ │ └── .gitignore
│ ├── rajant_parser.py
│ └── rajant_query.py
├── README.md
├── launch
│ └── rajant_nodes.launch
├── package.xml
└── CMakeLists.txt
├── .gitignore
├── mocha_launch
├── launch
│ ├── titan.launch
│ ├── jackal.launch
│ └── basestation.launch
├── package.xml
└── CMakeLists.txt
├── LICENSE
├── README.md
└── .github
└── workflows
└── build.yaml
/mocha_core/msg/Header_pub.msg:
--------------------------------------------------------------------------------
1 | uint8[] header
2 |
--------------------------------------------------------------------------------
/mocha.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/KumarRobotics/MOCHA/HEAD/mocha.gif
--------------------------------------------------------------------------------
/mocha_core/msg/SM_state.msg:
--------------------------------------------------------------------------------
1 | std_msgs/Header header
2 | string state
3 |
--------------------------------------------------------------------------------
/interface_rajant/scripts/thirdParty/.gitignore:
--------------------------------------------------------------------------------
1 | lib
2 | target
3 | *.xml
4 | *.jar
5 | *.java
6 |
--------------------------------------------------------------------------------
/mocha_core/srv/SelectDB.srv:
--------------------------------------------------------------------------------
1 | uint8 robot_id
2 | uint8 topic_id
3 | time ts_limit
4 | ---
5 | uint8[] headers
6 |
--------------------------------------------------------------------------------
/mocha_core/msg/Client_stats.msg:
--------------------------------------------------------------------------------
1 | std_msgs/Header header
2 | string msg
3 | float64 rtt
4 | uint64 answ_len
5 | float64 bw
6 |
--------------------------------------------------------------------------------
/mocha_core/srv/AddUpdateDB.srv:
--------------------------------------------------------------------------------
1 | uint8 topic_id
2 | time timestamp
3 | uint8[] msg_content
4 | ---
5 | uint8[] new_header
6 |
--------------------------------------------------------------------------------
/mocha_core/srv/GetDataHeaderDB.srv:
--------------------------------------------------------------------------------
1 | uint8[] msg_header
2 | ---
3 | uint8 robot_id
4 | uint8 topic_id
5 | time timestamp
6 | uint8[] msg_content
7 |
--------------------------------------------------------------------------------
/interface_rajant/README.md:
--------------------------------------------------------------------------------
1 | ## Interface Rajant
2 |
3 | This module interfaces with the Rajant Breadcrumb radios to obtain low-level
4 | information such as SNR and RSSI. Due to IP requirements, we are not able to
5 | provide the code for the API.
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.pickle
3 | tags
4 | *.pyc
5 | *.out
6 | *.swp
7 | *.swo
8 |
9 | # Ignore legacy folder where we can store legacy stuff from the 70's
10 | legacy/
11 |
12 | # Ignore all log files generated by Rajant
13 | *.log
14 |
--------------------------------------------------------------------------------
/mocha_launch/launch/titan.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/mocha_launch/launch/jackal.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/mocha_launch/launch/basestation.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/mocha_core/config/testConfigs/topic_configs.yaml:
--------------------------------------------------------------------------------
1 | ground_robot:
2 | - msg_topic: "/odometry"
3 | msg_type: "nav_msgs/Odometry"
4 | msg_priority: "NO_PRIORITY"
5 | msg_history: "LAST_MESSAGE"
6 |
7 | - msg_topic: "/pose"
8 | msg_type: "geometry_msgs/PointStamped"
9 | msg_priority: "NO_PRIORITY"
10 | msg_history: "WHOLE_HISTORY"
11 |
12 | aerial_robot:
13 | - msg_topic: "/image"
14 | msg_type: "sensor_msgs/Image"
15 | msg_priority: "HIGH_PRIORITY"
16 | msg_history: "LAST_MESSAGE"
17 |
18 | - msg_topic: "/pose"
19 | msg_type: "geometry_msgs/PointStamped"
20 | msg_priority: "NO_PRIORITY"
21 | msg_history: "LAST_MESSAGE"
22 |
23 | base_station:
24 | - msg_topic: "/target_goals"
25 | msg_type: "geometry_msgs/PoseArray"
26 | msg_priority: "HIGH_PRIORITY"
27 | msg_history: "LAST_MESSAGE"
28 |
--------------------------------------------------------------------------------
/mocha_core/config/testConfigs/robot_configs.yaml:
--------------------------------------------------------------------------------
1 | basestation:
2 | node-type: "base_station"
3 | IP-address: "127.0.0.1"
4 | using-radio: "brick-1"
5 | base-port: "1234"
6 | clients:
7 | - "charon"
8 | - "styx"
9 | - "quad1"
10 |
11 | charon:
12 | node-type: "ground_robot"
13 | IP-address: "127.0.0.1"
14 | using-radio: "black-1"
15 | base-port: "2234"
16 | clients:
17 | - "basestation"
18 | - "styx"
19 | - "quad1"
20 |
21 | styx:
22 | node-type: "ground_robot"
23 | IP-address: "127.0.0.1"
24 | using-radio: "black-2"
25 | base-port: "3234"
26 | clients:
27 | - "basestation"
28 | - "charon"
29 |
30 |
31 | quad1:
32 | node-type: "aerial_robot"
33 | IP-address: "127.0.0.1"
34 | using-radio: "blue-1"
35 | base-port: "4234"
36 | clients:
37 | - "basestation"
38 | - "charon"
39 | - "quad1"
40 |
--------------------------------------------------------------------------------
/interface_rajant/launch/rajant_nodes.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
12 |
13 |
14 |
15 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/mocha_core/config/robot_configs.yaml:
--------------------------------------------------------------------------------
1 | basestation:
2 | node-type: "base_station"
3 | IP-address: "10.10.10.100"
4 | using-radio: "brick-1"
5 | base-port: "1234"
6 | clients:
7 | - "callisto"
8 | - "europa"
9 | - "io"
10 | - "titan"
11 |
12 | europa:
13 | node-type: "ground_robot"
14 | IP-address: "10.10.10.101"
15 | using-radio: "blue-2"
16 | base-port: "2234"
17 | clients:
18 | - "basestation"
19 | - "callisto"
20 | - "io"
21 | - "titan"
22 |
23 | callisto:
24 | node-type: "ground_robot"
25 | IP-address: "10.10.10.102"
26 | using-radio: "blue-6"
27 | base-port: "3234"
28 | clients:
29 | - "basestation"
30 | - "europa"
31 | - "io"
32 | - "titan"
33 |
34 | io:
35 | node-type: "ground_robot"
36 | IP-address: "10.10.10.103"
37 | using-radio: "blue-5"
38 | base-port: "4234"
39 | clients:
40 | - "basestation"
41 | - "europa"
42 | - "callisto"
43 | - "titan"
44 |
45 | titan:
46 | node-type: "aerial_robot"
47 | IP-address: "10.10.10.107"
48 | using-radio: "blue-1"
49 | base-port: "6234"
50 | clients:
51 | - "basestation"
52 | - "callisto"
53 | - "europa"
54 | - "io"
55 |
--------------------------------------------------------------------------------
/mocha_core/config/topic_configs.yaml:
--------------------------------------------------------------------------------
1 | ground_robot:
2 | - msg_topic: "/top_down_render/pose_est"
3 | msg_type: "geometry_msgs/PoseWithCovarianceStamped"
4 | msg_priority: "HIGH_PRIORITY"
5 | msg_history: "LAST_MESSAGE"
6 |
7 | - msg_topic: "/object_mapper/map"
8 | msg_type: "sensor_msgs/PointCloud2"
9 | msg_priority: "NO_PRIORITY"
10 | msg_history: "LAST_MESSAGE"
11 |
12 | - msg_topic: "/spomp_global/reachability_history"
13 | msg_type: "spomp/LocalReachabilityArray"
14 | msg_priority: "LOW_PRIORITY"
15 | msg_history: "LAST_MESSAGE"
16 |
17 | - msg_topic: "/spomp_global/path_viz"
18 | msg_type: "nav_msgs/Path"
19 | msg_priority: "MEDIUM_PRIORITY"
20 | msg_history: "LAST_MESSAGE"
21 |
22 | - msg_topic: "/goal_manager/claimed_goals"
23 | msg_type: "spomp/ClaimedGoalArray"
24 | msg_priority: "HIGH_PRIORITY"
25 | msg_history: "LAST_MESSAGE"
26 |
27 | aerial_robot:
28 | - msg_topic: "/asoom/map/compressed"
29 | msg_type: "grid_map_msgs/GridMapCompressed"
30 | msg_priority: "HIGH_PRIORITY"
31 | msg_history: "LAST_MESSAGE"
32 |
33 | - msg_topic: "/asoom/recent_key_pose"
34 | msg_type: "geometry_msgs/PoseStamped"
35 | msg_priority: "NO_PRIORITY"
36 | msg_history: "LAST_MESSAGE"
37 |
38 | base_station:
39 | - msg_topic: "/target_goals"
40 | msg_type: "geometry_msgs/PoseArray"
41 | msg_priority: "HIGH_PRIORITY"
42 | msg_history: "LAST_MESSAGE"
43 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2023, KumarRobotics at University of Pennsylvania
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice,
10 | this list of conditions and the following disclaimer.
11 | * Redistributions in binary form must reproduce the above copyright
12 | notice, this list of conditions and the following disclaimer in the
13 | documentation and/or other materials provided with the distribution.
14 | * Neither the name of University of Pennsylvania nor the names of its
15 | contributors may be used to endorse or promote products derived from
16 | this software without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
22 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 | POSSIBILITY OF SUCH DAMAGE.
29 |
--------------------------------------------------------------------------------
/mocha_core/launch/database_translators_publishers.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
22 |
23 |
24 |
25 |
26 |
27 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ☕ MOCHA: Multi-robot Opportunistic Communication for Heterogeneous Collaboration
2 | ---------------------------------------------------------------------------------
3 | 
4 |
5 | This repository contains the distributed and opportunistic communication stack used for multi-robot experiments at KumarRobotics.
6 |
7 | ## Directories
8 |
9 | - `mocha_launch/`: launch files the different robots in MOCHA. The launch file
10 | sets up the `robot_name` argument,
11 | - `mocha_core/`: MOCHA's main components (source code, config files).
12 | - `interface_rajant/`: interface for Rajant breadrumb radios
13 |
14 | ## Dependencies:
15 |
16 | MOCHA requires `rospkg`, `defusedxml`, and `python3-zmq`. You may install these
17 | packages with:
18 |
19 | ```
20 | sudo apt update
21 | pip3 install rospkg
22 | pip3 install defusedxml
23 | sudo apt install python3-zmq
24 | ```
25 |
26 | ## Contribution - Questions
27 |
28 | Please [fill-out an issue](https://github.com/KumarRobotics/MOCHA/issues) if you have any questions.
29 | Do not hesitate to [send your pull request](https://github.com/KumarRobotics/MOCHA/pulls).
30 |
31 | ## Citation
32 |
33 | If you find MOCHA useful, please cite:
34 |
35 | ```
36 | @INPROCEEDINGS{cladera2024enabling,
37 | author={Cladera, Fernando and Ravichandran, Zachary and Miller, Ian D. and Ani Hsieh, M. and Taylor, C. J. and Kumar, Vijay},
38 | booktitle={2024 IEEE International Conference on Robotics and Automation (ICRA)},
39 | title={{Enabling Large-scale Heterogeneous Collaboration with Opportunistic Communications}},
40 | year={2024},
41 | pages={2610-2616},
42 | doi={10.1109/ICRA57147.2024.10611469}
43 | }
44 | ```
45 |
--------------------------------------------------------------------------------
/.github/workflows/build.yaml:
--------------------------------------------------------------------------------
1 | # ROS build pipeline based on kr_mav_control build
2 | # https://github.com/KumarRobotics/kr_mav_control/blob/master/.github/workflows/build.yml
3 | name: build
4 |
5 | on:
6 | push:
7 | branches: [main]
8 | workflow_dispatch:
9 | schedule:
10 | - cron: '0 0 * * 0'
11 |
12 | jobs:
13 | build:
14 | strategy:
15 | matrix:
16 | ros_distro: [noetic]
17 |
18 | runs-on: ubuntu-latest
19 | container: osrf/ros:${{ matrix.ros_distro }}-desktop
20 | steps:
21 | - uses: actions/checkout@v3
22 |
23 | - name: Apt dependencies
24 | run: |
25 | apt-get update
26 | apt-get install -qy g++ libeigen3-dev git python3-catkin-tools
27 | apt-get install -qy python3-colorama python3-zmq python3-lz4
28 | rosdep update
29 | rosdep install --from-paths . --ignore-src -y -r --as-root apt:false
30 |
31 | - name: Setup catkin workspace
32 | run: |
33 | . /opt/ros/${{ matrix.ros_distro }}/setup.sh
34 | mkdir -p ${RUNNER_WORKSPACE}/catkin_ws/src
35 | cd ${RUNNER_WORKSPACE}/catkin_ws
36 | catkin init
37 | catkin build -j2 --no-status -DCMAKE_BUILD_TYPE=Release
38 |
39 | - name: Build workspace
40 | run: |
41 | . /opt/ros/${{ matrix.ros_distro }}/setup.sh
42 | cd ${RUNNER_WORKSPACE}/catkin_ws/src
43 | ln -s ${GITHUB_WORKSPACE}
44 | catkin build -j2 --no-status -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_STANDARD=17
45 |
46 | - name: Run Tests
47 | run: |
48 | cd ${RUNNER_WORKSPACE}/catkin_ws/src/MOCHA/mocha_core/scripts/core/test
49 | ./run_tests.sh ${RUNNER_WORKSPACE}/catkin_ws
50 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -eo pipefail
3 |
4 | # Check that the first argument is not null
5 | if [ -z "$1" ]; then
6 | echo "No catkin workspace provided. Usage: ./run_tests.sh "
7 | exit 1
8 | fi
9 |
10 | # Echo some variables for debug
11 | echo "$PYTHONPATH"
12 |
13 | # Colors for better distinguishing start and end of tests
14 | RED='\033[0;31m'
15 | GREEN='\033[0;32m'
16 | YELLOW='\033[1;33m'
17 | BOLD='\033[1m'
18 | NC='\033[0m' # No Color
19 |
20 | # TESTS_TO_RUN is an array of tests that should be executed in the CI
21 | TESTS_TO_RUN=(
22 | "test_zmq_comm_node.py"
23 | "test_database_utils.py"
24 | "test_database.py"
25 | "test_synchronize_channel.py"
26 | "test_database_server.py"
27 | )
28 |
29 | # The first argument is the path to the catkin workspace + catkin_ws
30 | CATKIN_WS="$1"
31 | # Display path
32 | echo "Using catkin workspace: $CATKIN_WS"
33 |
34 | # Source the ROS environment
35 | source "$CATKIN_WS/devel/setup.bash"
36 |
37 | # Run roscore in the background and save its PID to kill it later
38 | roscore > /dev/null 2>&1 &
39 | PID=$(ps -ef | grep roscore | grep -v grep | awk '{print $2}')
40 | echo "Waiting 2 seconds for roscore to load"
41 | sleep 2
42 |
43 | # TODO(fclad) Change this when all the tests are passing
44 | # Run all the files that start with test_ in this folder
45 | # for i in $(ls | grep "^test_"); do
46 |
47 | for test in "${TESTS_TO_RUN[@]}"; do
48 | # Print yellow separator
49 | echo -e "${YELLOW}===================================== $test - START${NC}"
50 |
51 | rosrun mocha_core "$test"
52 | retVal=$?
53 | sleep 5 # To wait for some spawned processes
54 | if [ $retVal -ne 0 ]; then
55 | echo -e "${RED}${BOLD}===================================== $test - FAILED${NC}"
56 | kill -9 "$PID"
57 | exit $retVal
58 | fi
59 | done
60 | echo -e "${GREEN}${BOLD}================================ALL TESTS PASSED${NC}"
61 | exit 0
62 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_multiple_robots_sync.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import unittest
3 | import sys
4 | import uuid
5 | import time
6 | from pprint import pprint
7 | import multiprocessing
8 | sys.path.append('..')
9 | import get_sample_db as sdb
10 | import synchronize_channel as sync
11 | import synchronize_utils as su
12 | import database_server_utils as du
13 | from colorama import Fore, Back, Style
14 |
15 | CONFIG_FILE = "testConfigs/robotConfigs.yml"
16 |
17 | class test(unittest.TestCase):
18 | def setUp(self):
19 | test_name = self._testMethodName
20 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
21 |
22 | def test_multi_robot_sync(self):
23 | self.maxDiff=None
24 |
25 | db_gs = sdb.get_sample_dbl()
26 | db_r1 = sdb.get_sample_dbl()
27 |
28 | # Set up all channels
29 | node_gs_r1 = sync.Channel(db_gs, 'groundstation',
30 | 'charon', CONFIG_FILE)
31 | node_r1_gs = sync.Channel(db_r1, 'charon',
32 | 'groundstation', CONFIG_FILE)
33 |
34 | pprint(db_gs.db)
35 | pprint(db_r1.db)
36 |
37 | node_r1_gs.run()
38 | node_gs_r1.run()
39 |
40 | nodes_1 = 3
41 | robot_prefix = 'R1_'
42 | feature_prefix = 'Node_'
43 | for i in range(nodes_1):
44 | feature = robot_prefix + feature_prefix + str(i)
45 | dbm = su.DBMessage(1, feature, 0, 1, time.time(),
46 | bytes('r1_data', 'utf-8'), False)
47 | du.add_modify_data_dbl(db_r1, dbm)
48 |
49 | node_gs_r1.trigger_sync()
50 | time.sleep(2)
51 | node_r1_gs.trigger_sync()
52 | time.sleep(2)
53 |
54 | dbm = su.DBMessage(0, 'Node_0', 1, 0, time.time(),
55 | bytes('r1_data', 'utf-8'), False)
56 | du.add_modify_data_dbl(db_gs, dbm)
57 |
58 | node_r1_gs.trigger_sync()
59 | time.sleep(2)
60 | node_gs_r1.trigger_sync()
61 | time.sleep(2)
62 |
63 | node_r1_gs.stop()
64 | node_gs_r1.stop()
65 |
66 | pprint(db_gs.db)
67 | pprint(db_r1.db)
68 |
69 | self.assertDictEqual(db_gs.db,db_r1.db)
70 | time.sleep(2)
71 |
72 | if __name__ == '__main__':
73 | unittest.main()
74 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/sample_db.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import copy
3 | from pprint import pprint
4 | import sys
5 | sys.path.append('..')
6 | import hash_comm
7 | import database as db
8 | import database_utils as du
9 | import pdb
10 | import rospy
11 |
12 | DB_TEMPLATE = {
13 | 0: {
14 | 0: {
15 | 'msg1': {
16 | 'dtype': 1,
17 | 'priority': 3,
18 | 'ts': 115.2223,
19 | 'data': bytes('Binary chunk', 'utf-8'),
20 | },
21 | 'msg2': {
22 | 'dtype': 2,
23 | 'priority': 0,
24 | 'ts': 117.2223,
25 | 'data': bytes('Binary chunk', 'utf-8'),
26 | },
27 | },
28 | 1: {
29 | 'msg3': {
30 | 'dtype': 3,
31 | 'priority': 1,
32 | 'ts': 118.2223,
33 | 'data': bytes('Binary chunk', 'utf-8'),
34 | },
35 | 'msg4': {
36 | 'dtype': 3,
37 | 'priority': 1,
38 | 'ts': 119.2223,
39 | 'data': bytes('Binary chunk', 'utf-8'),
40 | },
41 | },
42 | },
43 | 1: {
44 | 0: {
45 | 'msg1': {
46 | 'dtype': 1,
47 | 'priority': 4,
48 | 'ts': 335.22,
49 | 'data': bytes('Binary chunk', 'utf-8'),
50 | },
51 | 'msg2': {
52 | 'dtype': 2,
53 | 'priority': 2,
54 | 'ts': 335.2,
55 | 'data': bytes('Binary chunk', 'utf-8'),
56 | },
57 | }
58 | }
59 | }
60 |
61 |
62 | def get_sample_dbl():
63 | DB = {}
64 | for robot_id in DB_TEMPLATE:
65 | if robot_id not in DB:
66 | DB[robot_id] = {}
67 | for topic_id in DB_TEMPLATE[robot_id]:
68 | if topic_id not in DB[robot_id]:
69 | DB[robot_id][topic_id] = {}
70 | for msg in DB_TEMPLATE[robot_id][topic_id]:
71 | ts = DB_TEMPLATE[robot_id][topic_id][msg]['ts']
72 | ts = rospy.Time.from_sec(ts)
73 | data = DB_TEMPLATE[robot_id][topic_id][msg]['data']
74 | dtype = DB_TEMPLATE[robot_id][topic_id][msg]['dtype']
75 | prio = DB_TEMPLATE[robot_id][topic_id][msg]['priority']
76 | c = db.DBMessage(robot_id, topic_id, dtype, prio, ts, data)
77 | DB[robot_id][topic_id][c.header] = c
78 | dbl = db.DBwLock(DB)
79 | return dbl
80 |
81 |
82 | if __name__ == '__main__':
83 | dbl = get_sample_dbl()
84 | print(dbl)
85 |
--------------------------------------------------------------------------------
/interface_rajant/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | interface_rajant
4 | 0.0.0
5 | The interface_rajant package
6 |
7 |
8 |
9 |
10 | Fernando Cladera
11 |
12 |
13 |
14 |
15 |
16 | TODO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | catkin
52 | rospy
53 | std_msgs
54 | rospy
55 | std_msgs
56 | rospy
57 | std_msgs
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/mocha_launch/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | mocha_launch
4 | 0.0.0
5 | The mocha_launch package
6 |
7 |
8 |
9 |
10 | Fernando Cladera
11 |
12 |
13 |
14 |
15 |
16 | TODO
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 | catkin
52 | roscpp
53 | rospy
54 | std_msgs
55 | roscpp
56 | rospy
57 | std_msgs
58 | roscpp
59 | rospy
60 | std_msgs
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
--------------------------------------------------------------------------------
/mocha_core/scripts/translators/translator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import sys
5 | import rospy
6 | import rospkg
7 | import mocha_core.srv
8 | import yaml
9 | import pdb
10 |
11 | class Translator:
12 | def __init__(self, this_robot, this_robot_id,
13 | topic_name, topic_id, msg_type):
14 | self.__du = du
15 |
16 | self.__counter = 0
17 | self.__topic_name = topic_name
18 | self.__topic_id = topic_id
19 | self.__this_robot = this_robot
20 | self.__this_robot_id = this_robot_id
21 | self.__service_name = "integrate_database/AddUpdateDB"
22 | self.__add_update_db = rospy.ServiceProxy(
23 | self.__service_name, mocha_core.srv.AddUpdateDB
24 | )
25 |
26 | rospy.Subscriber(
27 | self.__topic_name, msg_type, self.translator_cb
28 | )
29 |
30 | def translator_cb(self, data):
31 | msg = data
32 |
33 | rospy.wait_for_service(self.__service_name)
34 | serialized_msg = self.__du.serialize_ros_msg(msg)
35 | try:
36 | ts = rospy.get_rostime()
37 | answ = self.__add_update_db(self.__topic_id,
38 | ts,
39 | serialized_msg)
40 | answ_header = answ.new_header
41 | rospy.logdebug(f"{self.__this_robot} - Header insert " +
42 | f"- {self.__topic_name} - {answ_header}")
43 | self.__counter += 1
44 | except rospy.ServiceException as exc:
45 | rospy.logerr(f"Service did not process request: {exc}")
46 |
47 |
48 | if __name__ == "__main__":
49 | rospy.init_node("topic_translator", anonymous=False)
50 |
51 | # Get the mocha_core path
52 | rospack = rospkg.RosPack()
53 | ddb_path = rospack.get_path('mocha_core')
54 | scripts_path = os.path.join(ddb_path, "scripts/core")
55 | sys.path.append(scripts_path)
56 | import database_utils as du
57 |
58 | # Get robot from the parameters
59 | this_robot = rospy.get_param("~robot_name")
60 |
61 | # Get the robot_config path and generate the robot_configs object
62 | robot_configs_default = os.path.join(ddb_path,
63 | "config/testConfigs/robot_configs.yaml")
64 | robot_configs_path = rospy.get_param("~robot_configs", robot_configs_default)
65 | with open(robot_configs_path, 'r') as f:
66 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
67 | if this_robot not in robot_configs.keys():
68 | rospy.logerr(f"Robot {this_robot} not in robot_configs")
69 | rospy.signal_shutdown("Robot not in robot_configs")
70 | rospy.spin()
71 |
72 | # Get the topic_configs path and generate the topic_configs object
73 | topic_configs_default = os.path.join(ddb_path,
74 | "config/testConfigs/topic_configs.yaml")
75 | topic_configs_path = rospy.get_param("~topic_configs",
76 | topic_configs_default)
77 | with open(topic_configs_path, 'r') as f:
78 | topic_configs = yaml.load(f, Loader=yaml.FullLoader)
79 | this_robot_topics = topic_configs[robot_configs[this_robot]["node-type"]]
80 |
81 | # Get msg_types dict used to publish the topics
82 | msg_types = du.msg_types(robot_configs, topic_configs)
83 |
84 | for topic_id, topic in enumerate(this_robot_topics):
85 | # Get robot id
86 | robot_id = du.get_robot_id_from_name(robot_configs, this_robot)
87 | obj = msg_types[robot_id][topic_id]["obj"]
88 | Translator(this_robot,
89 | robot_id,
90 | topic["msg_topic"],
91 | topic_id,
92 | obj)
93 | rospy.spin()
94 |
--------------------------------------------------------------------------------
/mocha_core/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | mocha_core
4 | 0.0.1
5 | DCIST Distributed Database package
6 |
7 |
8 |
9 | developer
10 |
11 |
12 |
13 |
14 |
15 | GPLv3
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | message_generation
38 |
39 |
40 |
41 |
42 |
43 | message_runtime
44 |
45 |
46 |
47 |
48 | catkin
49 | geometry_msgs
50 | roscpp
51 | rospy
52 | std_msgs
53 | geometry_msgs
54 | sensor_msgs
55 | vision_msgs
56 | geometry_msgs
57 | roscpp
58 | rospy
59 | std_msgs
60 | geometry_msgs
61 | sensor_msgs
62 | vision_msgs
63 | geometry_msgs
64 | roscpp
65 | rospy
66 | std_msgs
67 | geometry_msgs
68 | sensor_msgs
69 | vision_msgs
70 | depth_atlas
71 | detection_filter_processor
72 | cell_detect
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_zmq_comm_node.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """ test_zmq_comm_node.py
4 | This file tests the basic functionality between two communication nodes.
5 | - Two nodes are created: basestation and charon. The configuration for these are
6 | defined in the configuration file. The client of each node is set to the other one.
7 | - We generate a random message that groundstation sends to charon. Upon reception,
8 | cb_charon is called. The return value of this function is transmitted to the groundstation.
9 | - The groundstation receives the message
10 | """
11 |
12 | import os
13 | import random
14 | import string
15 | import sys
16 | import unittest
17 | import yaml
18 | import rospkg
19 | import pdb
20 | import rospy
21 | from colorama import Fore, Style
22 |
23 |
24 | class Test(unittest.TestCase):
25 | def setUp(self):
26 | test_name = self._testMethodName
27 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
28 | super().setUp()
29 |
30 | def tearDown(self):
31 | rospy.sleep(1)
32 | super().tearDown()
33 |
34 | def test_simple_connection(self):
35 | self.answer = None
36 |
37 | def cb_groundstation(value):
38 | rospy.logdebug("cb_groundstation")
39 | self.answer = value
40 |
41 | def cb_charon(value):
42 | # This function is called upon reception of a message by charon.
43 | # The return value is transmitted as answer to the original
44 | # message.
45 | rospy.logdebug(f"cb_charon: {value}")
46 | return value
47 |
48 | # Create the two robots
49 | node_groundstation = zmq_comm_node.Comm_node(
50 | "basestation", "charon", robot_configs,
51 | cb_groundstation, cb_groundstation, 2
52 | )
53 | node_charon = zmq_comm_node.Comm_node(
54 | "charon", "basestation", robot_configs,
55 | cb_charon, cb_charon, 2
56 | )
57 |
58 | # Generate random message
59 | letters = string.ascii_letters
60 | random_str = "".join(random.choice(letters) for i in range(10))
61 | random_msg = random_str + str(random.randint(0, 1024))
62 | random_msg = random_msg.encode()
63 |
64 | # Send message from node_groundstation to robot 2
65 | node_groundstation.connect_send_message(random_msg)
66 |
67 | # node_charon.connect_send_message(random_msg)
68 |
69 | # Terminate robots and test assertion
70 | node_groundstation.terminate()
71 | node_charon.terminate()
72 | self.assertEqual(random_msg, self.answer, "Sent %s" % random_msg)
73 |
74 |
75 | if __name__ == "__main__":
76 | # Get the directory path and import all the required modules to test
77 | rospack = rospkg.RosPack()
78 | ddb_path = rospack.get_path("mocha_core")
79 | scripts_path = os.path.join(ddb_path, "scripts/core")
80 | sys.path.append(scripts_path)
81 | import zmq_comm_node
82 |
83 | # Create a ROS node using during the test
84 | rospy.init_node("test_zmq_comm_node",
85 | log_level=rospy.DEBUG, anonymous=False)
86 |
87 | # Get the default path from the ddb_path
88 | robot_configs_default = os.path.join(ddb_path,
89 | "config/testConfigs/robot_configs.yaml")
90 | # Get the path to the robot config file from the ros parameter robot_configs
91 | robot_configs = rospy.get_param("robot_configs",
92 | robot_configs_default)
93 |
94 | # Get the yaml dictionary objects
95 | with open(robot_configs, "r") as f:
96 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
97 |
98 | # Run test cases!
99 | unittest.main()
100 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_delay_2robots.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import unittest
4 | import sys
5 | import uuid
6 | import time
7 | from pprint import pprint
8 | import multiprocessing
9 | sys.path.append('..')
10 | import get_sample_db as sdb
11 | import synchronize_channel as sync
12 | import synchronize_utils as su
13 | import database_server_utils as du
14 | from colorama import Fore, Style
15 |
16 | CONFIG_FILE = "testConfigs/robotConfigs.yml"
17 |
18 | class test(unittest.TestCase):
19 | def setUp(self):
20 | test_name = self._testMethodName
21 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
22 |
23 | def test_delay_run(self):
24 | self.maxDiff = None
25 | db_gs = sdb.get_sample_dbl()
26 | db_r1 = sdb.get_sample_dbl()
27 | db_r2 = sdb.get_sample_dbl()
28 |
29 | node_gs_r1 = sync.Channel(db_gs, 'groundstation', 'charon', CONFIG_FILE)
30 | node_gs_r2 = sync.Channel(db_gs, 'groundstation', 'styx', CONFIG_FILE)
31 |
32 | node_r1_gs = sync.Channel(db_r1, 'charon', 'groundstation', CONFIG_FILE)
33 | node_r2_gs = sync.Channel(db_r2, 'styx', 'groundstation', CONFIG_FILE)
34 |
35 | node_r1_r2 = sync.Channel(db_r1, 'charon', 'styx', CONFIG_FILE)
36 | node_r2_r1 = sync.Channel(db_r2, 'styx', 'charon', CONFIG_FILE)
37 |
38 | pprint(db_gs.db)
39 | pprint(db_r1.db)
40 | pprint(db_r2.db)
41 |
42 | node_gs_r1.run()
43 | node_gs_r2.run()
44 |
45 | dbm = su.DBMessage(1, 'Node_1', 1, 2, time.time(), bytes('r1_data', 'utf-8'), False)
46 | du.add_modify_data_dbl(db_r1, dbm)
47 |
48 | node_gs_r1.trigger_sync()
49 | time.sleep(4)
50 |
51 | node_r1_gs.run()
52 | node_r2_gs.run()
53 |
54 | dbm = su.DBMessage(2, 'Node_2', 2, 1, time.time(), bytes('r2_data', 'utf-8'), False)
55 | du.add_modify_data_dbl(db_r2, dbm)
56 |
57 | node_r1_r2.run()
58 |
59 | node_r1_r2.trigger_sync()
60 | time.sleep(4)
61 |
62 | node_gs_r2.trigger_sync()
63 | time.sleep(2)
64 |
65 | node_r2_r1.run()
66 |
67 | node_gs_r1.stop()
68 | node_r1_gs.stop()
69 |
70 | node_r1_r2.stop()
71 | node_r2_r1.stop()
72 |
73 | node_r2_gs.stop()
74 | node_gs_r2.stop()
75 |
76 | pprint(db_gs.db)
77 | pprint(db_r1.db)
78 | pprint(db_r2.db)
79 | time.sleep(2)
80 |
81 | def test_multiple_trigger_sync(self):
82 |
83 | self.maxDiff = None
84 | db_gs = sdb.get_sample_dbl()
85 | db_r1 = sdb.get_sample_dbl()
86 | db_r2 = sdb.get_sample_dbl()
87 |
88 | node_gs_r1 = sync.Channel(db_gs, 'groundstation', 'charon', CONFIG_FILE)
89 | node_gs_r2 = sync.Channel(db_gs, 'groundstation', 'styx', CONFIG_FILE)
90 |
91 | node_r1_gs = sync.Channel(db_r1, 'charon', 'groundstation', CONFIG_FILE)
92 | node_r2_gs = sync.Channel(db_r2, 'styx', 'groundstation', CONFIG_FILE)
93 |
94 | node_r1_r2 = sync.Channel(db_r1, 'charon', 'styx', CONFIG_FILE)
95 | node_r2_r1 = sync.Channel(db_r2, 'styx', 'charon', CONFIG_FILE)
96 |
97 | # pprint(db_gs)
98 | # pprint(db_r1)
99 | # pprint(db_r2)
100 |
101 | # node_r1_r2.run()
102 | node_r2_r1.run()
103 |
104 | dbm = su.DBMessage(1, 'Node_1', 1, 2, time.time(), bytes('R1_data', 'utf-8'), False)
105 | du.add_modify_data_dbl(db_r1, dbm)
106 |
107 | node_r2_r1.trigger_sync()
108 | time.sleep(5)
109 |
110 | # node_r2_r1.trigger_sync()
111 | # time.sleep(5)
112 |
113 | # node_r1_r2.stop()
114 | node_r2_r1.stop()
115 | time.sleep(2)
116 |
117 | if __name__ == '__main__':
118 | unittest.main()
119 |
--------------------------------------------------------------------------------
/mocha_core/config/radio_configs.yaml:
--------------------------------------------------------------------------------
1 | brick-1:
2 | SN: "ME4-2450R-58845"
3 | MAC-address: ["04:f0:21:38:cf:5b", "04:f0:21:38:86:bd"]
4 | computed-IP-address: "10.229.221.1"
5 |
6 | brick-2:
7 | SN: "ME4-2450R-58846"
8 | MAC-address: ["04:f0:21:38:d3:be", "04:f0:21:38:86:c9"]
9 | computed-IP-address: "10.229.222.1"
10 |
11 | blue-1:
12 | SN: "OMAHA-50-63915"
13 | MAC-address: ["cc:2d:e0:9b:3e:aa"]
14 | computed-IP-address: "10.249.171.1"
15 |
16 | blue-2:
17 | SN: "OMAHA-50-63935"
18 | MAC-address: ["cc:2d:e0:9b:3e:46"]
19 | computed-IP-address: "10.249.191.1"
20 |
21 | blue-3:
22 | SN: "OMAHA-50-67211"
23 | MAC-address: ["b8:69:f4:70:ba:90"]
24 | computed-IP-address: "10.6.139.1"
25 |
26 | blue-4:
27 | SN: "OMAHA-50-67212"
28 | MAC-address: ["b8:69:f4:70:ba:bc"]
29 | computed-IP-address: "10.6.140.1"
30 |
31 | blue-5:
32 | SN: "OMAHA-50-67213"
33 | MAC-address: ["b8:69:f4:70:ba:0f"]
34 | computed-IP-address: "10.6.141.1"
35 |
36 | blue-6:
37 | SN: "DX2-50-67783"
38 | MAC-address: ["74:4d:28:41:7a:e6"]
39 | computed-IP-address: "10.8.199.1"
40 |
41 | black-1:
42 | SN: "ME4-2450R-41075"
43 | MAC-address: ["04:f0:21:2f:6d:ab", "04:f0:21:17:e4:51"]
44 | computed-IP-address: "10.160.115.1"
45 |
46 | black-2:
47 | SN: "ME4-2450R-46136"
48 | MAC-address: ["04:f0:21:13:13:e7", "04:f0:21:02:e1:39"]
49 | computed-IP-address: "10.180.56.1"
50 |
51 | black-3:
52 | SN: "ME4-2450R-58161"
53 | MAC-address: ["04:f0:21:1c:1b:21", "04:f0:21:19:b1:bb"]
54 | computed-IP-address: "10.227.49.1"
55 |
56 | black-4:
57 | SN: "ME4-2450R-46130"
58 | MAC-address: ["04:f0:21:13:13:e4", "04:f0:21:02:e1:4a"]
59 | computed-IP-address: "10.180.50.1"
60 |
61 | cardinal-1:
62 | SN: "AG1-5250M-117082"
63 | MAC-address: ["84:8d:84:ef:13:7e","84:8d:84:ef:13:7f"]
64 | computed-IP-address: "10.201.90.1"
65 |
66 | cardinal-2:
67 | SN: "AG1-5250M-117113"
68 | MAC-address: ["84:8d:84:ef:13:8d","84:8d:84:ef:13:8e"]
69 | computed-IP-address: "10.201.121.1"
70 |
71 | cardinal-3:
72 | SN: "AG1-5250M-117098"
73 | MAC-address: ["84:8d:84:ef:0d:7b","84:8d:84:ef:0d:7c"]
74 | computed-IP-address: "10.201.106.1"
75 |
76 | deep-blue-1:
77 | SN: "DX2-50-81745"
78 | MAC-address: ["c4:ad:34:79:e4:da"]
79 | computed-IP-address: "10.63.81.1"
80 |
81 | deep-blue-2:
82 | SN: "DX2-50-100071"
83 | MAC-address: ["dc:2c:6e:bc:e2:8e"]
84 | computed-IP-address: "10.134.231.1"
85 |
86 | deep-blue-3:
87 | SN: "DX2-24-119294"
88 | MAC-address: ["48:a9:8a:a0:75:69"]
89 | computed-IP-address: 10.209.254.1
90 |
91 | deep-blue-4:
92 | SN: "DX2-24-125991"
93 | MAC-address: ["48:8f:5a:48:8f:38"]
94 | computed-IP-address: 10.236.39.1
95 |
96 | deep-blue-5:
97 | SN: "DX2-24-125992"
98 | MAC-address: ["2c:c8:1b:e1:9b:d6"]
99 | computed-IP-address: 10.236.40.1
100 |
101 | dx4-1:
102 | SN: "DX4-5252E-106727"
103 | MAC-address: ["00:0e:8e:bd:d9:24", "00:0e:8e:bd:d9:23"]
104 | computed-IP-address: "10.160.231.1"
105 |
106 | dx4-2:
107 | SN: "DX4-5252E-106742"
108 | MAC-address: ["00:0e:8e:bd:d9:31", "00:0e:8e:bd:29:2a"]
109 | computed-IP-address: "10.160.246.1"
110 |
111 | dx4-3:
112 | SN: "DX4-5252E-106741"
113 | MAC-address: ["00:0e:8e:bd:d8:e9", "00:0e:8e:bd:d0:03"]
114 | computed-IP-address: "10.160.245.1"
115 |
116 | dx4-4:
117 | SN: "DX4-5252E-106737"
118 | MAC-address: ["00:0e:8e:bd:d9:35", "00:0e:8e:bd:d9:30"]
119 | computed-IP-address: "10.160.241.1"
120 |
121 | dx4-5:
122 | SN: "DX4-5252E-106739"
123 | MAC-address: ["00:0e:8e:bd:d9:37", "00:0e:8e:bd:d9:29"]
124 | computed-IP-address: "10.160.243.1"
125 |
126 | dx4-6:
127 | SN: "DX4-5252E-106740"
128 | MAC-address: ["00:0e:8e:bd:d6:e5", "00:0e:8e:bd:d9:32"]
129 | computed-IP-address: "10.160.244.1"
130 |
131 | dx4-7:
132 | SN: "DX4-5252E-106744"
133 | MAC-address: ["00:0e:8e:a0:ae:5e", "00:0e:8e:a0:ae:57"]
134 | computed-IP-address: "10.160.248.1"
135 |
136 | super-brick:
137 | SN: "PE1-2255B-103860"
138 | MAC-address: ["84:8d:84:41:4f:fb", "84:8d:84:41:4f:f7", "84:8d:84:41:4f:f8", "84:8d:84:41:4f:f9"]
139 | computed-IP-address: "10.149.180.1"
140 |
141 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/database_server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import threading
5 | import rospy
6 | import mocha_core.srv
7 | import database
8 | import pdb
9 | import database_utils as du
10 |
11 |
12 | class DatabaseServer:
13 | """ Starts a clean database and offers an API-like service
14 | to interact with the databaseself.
15 |
16 | Please see the list of services in the srv folder
17 | """
18 | def __init__(self, robot_configs, topic_configs):
19 | # Check input topics
20 | assert isinstance(robot_configs, dict)
21 | assert isinstance(topic_configs, dict)
22 |
23 | self.robot_configs = robot_configs
24 | self.topic_configs = topic_configs
25 |
26 | # Get current robot name from params
27 | self.robot_name = rospy.get_param('~robot_name')
28 |
29 | if self.robot_name not in self.robot_configs.keys():
30 | rospy.logerr(f"{self.robot_name} does not exist in robot_configs")
31 | rospy.signal_shutdown("robot_name does not exist in robot_configs")
32 | rospy.spin()
33 |
34 | self.topic_list = self.topic_configs[self.robot_configs[self.robot_name]["node-type"]]
35 |
36 | # Create the empty database with lock
37 | self.dbl = database.DBwLock()
38 |
39 | # Get current robot number
40 | self.robot_number = du.get_robot_id_from_name(self.robot_configs,
41 | robot_name=self.robot_name)
42 |
43 | # create services for all the possible calls to the DB
44 | self.service_list = []
45 | s = rospy.Service('~AddUpdateDB',
46 | mocha_core.srv.AddUpdateDB,
47 | self.add_update_db_service_cb)
48 | self.service_list.append(s)
49 | s = rospy.Service('~GetDataHeaderDB',
50 | mocha_core.srv.GetDataHeaderDB,
51 | self.get_data_hash_db_service_cb)
52 | self.service_list.append(s)
53 | s = rospy.Service('~SelectDB',
54 | mocha_core.srv.SelectDB,
55 | self.select_db_service_cb)
56 | self.service_list.append(s)
57 |
58 | self.msg_types = du.msg_types(self.robot_configs, self.topic_configs)
59 |
60 | def add_update_db_service_cb(self, req):
61 | if not isinstance(req.topic_id, int) or req.topic_id is None:
62 | rospy.logerr("Error: topic_id empty")
63 | return
64 | if len(req.msg_content) == 0:
65 | rospy.logerr("Error: msg_content empty")
66 | return
67 | if req.topic_id > len(self.topic_list):
68 | rospy.logerr("Error: topic_id not in topic_list")
69 | return
70 | topic = self.topic_list[req.topic_id]
71 | priority = du.get_priority_number(topic["msg_priority"])
72 | ts = req.timestamp
73 | ts = rospy.Time(ts.secs, ts.nsecs)
74 | dbm = database.DBMessage(self.robot_number,
75 | req.topic_id,
76 | dtype=self.msg_types[self.robot_number][req.topic_id]["dtype"],
77 | priority=priority,
78 | ts=ts,
79 | data=req.msg_content)
80 |
81 | header = self.dbl.add_modify_data(dbm)
82 | return mocha_core.srv.AddUpdateDBResponse(header)
83 |
84 | def get_data_hash_db_service_cb(self, req):
85 | if req.msg_header is None or len(req.msg_header) == 0:
86 | rospy.logerr("Error: msg_header empty")
87 | return
88 | dbm = self.dbl.find_header(req.msg_header)
89 | answ = mocha_core.srv.GetDataHeaderDBResponse(dbm.robot_id,
90 | dbm.topic_id,
91 | dbm.ts,
92 | dbm.data)
93 | return answ
94 |
95 | def select_db_service_cb(self, req):
96 | # TODO Implement filtering
97 | if req.robot_id is None:
98 | rospy.logerr("Error: robot_id none")
99 | return
100 | if req.topic_id is None:
101 | rospy.logerr("Error: topic_id none")
102 | return
103 | list_headers = self.dbl.get_header_list(req.robot_id)
104 | answ = mocha_core.srv.SelectDBResponse(du.serialize_headers(list_headers))
105 | return answ
106 |
107 | def shutdown(self):
108 | for s in self.service_list:
109 | s.shutdown()
110 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_database_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import unittest
3 | import sys
4 | import os
5 | from pprint import pprint
6 | import uuid
7 | import geometry_msgs.msg
8 | import rospkg
9 | import pdb
10 | import rospy
11 | from colorama import Fore, Back, Style
12 | import yaml
13 | import random
14 |
15 | ROBOT0_TOPIC2_PRIO0 = b'\x00\x00\x00u\x00\xde'
16 | ROBOT1_TOPIC1_PRIO4 = b'\x01\x00\x01O\x00\xdc'
17 | ROBOT1_TOPIC2_PRIO2 = b'\x01\x00\x01O\x00\xc7'
18 |
19 |
20 | class test(unittest.TestCase):
21 | def setUp(self):
22 | test_name = self._testMethodName
23 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
24 | super().setUp()
25 |
26 | def tearDown(self):
27 | rospy.sleep(1)
28 | super().tearDown()
29 |
30 | def test_serialize_deserialize(self):
31 | for _ in range(10):
32 | header_list = []
33 | for _ in range(20):
34 | header_list.append(du.generate_random_header())
35 | serialized = du.serialize_headers(header_list)
36 | deserialized = du.deserialize_headers(serialized)
37 | self.assertEqual(len(header_list), len(deserialized))
38 | self.assertEqual(deserialized, header_list)
39 |
40 | def test_deserialize_wrong(self):
41 | header_list = []
42 | for _ in range(5):
43 | header_list.append(du.generate_random_header())
44 | serialized = du.serialize_headers(header_list)
45 | with self.assertRaises(Exception):
46 | du.deserialize_headers(serialized[1:])
47 |
48 | def test_pack_unpack_data_topic(self):
49 | dbl = sample_db.get_sample_dbl()
50 | dbm = dbl.find_header(ROBOT1_TOPIC1_PRIO4)
51 | packed = du.pack_data(dbm)
52 | u_dbm = du.unpack_data(ROBOT1_TOPIC1_PRIO4, packed)
53 | self.assertEqual(dbm, u_dbm)
54 | dbm = dbl.find_header(ROBOT1_TOPIC2_PRIO2)
55 | packed = du.pack_data(dbm)
56 | u_dbm = du.unpack_data(ROBOT1_TOPIC2_PRIO2, packed)
57 | self.assertEqual(dbm, u_dbm)
58 |
59 | def test_topic_id(self):
60 | for i in range(50):
61 | # Pick a random robot
62 | robot = random.choice(list(robot_configs.keys()))
63 | # Pick a random topic
64 | topic_list = topic_configs[robot_configs[robot]["node-type"]]
65 | topic = random.choice(topic_list)
66 | id = du.get_topic_id_from_name(robot_configs, topic_configs,
67 | robot, topic["msg_topic"])
68 | topic_find = du.get_topic_name_from_id(robot_configs,
69 | topic_configs, robot, id)
70 | self.assertEqual(topic["msg_topic"], topic_find)
71 |
72 | def test_robot_id(self):
73 | for robot in robot_configs:
74 | number = du.get_robot_id_from_name(robot_configs, robot)
75 | robot_name = du.get_robot_name_from_id(robot_configs, number)
76 | self.assertEqual(robot, robot_name)
77 |
78 | if __name__ == '__main__':
79 | # Get the directory path and import all the required modules to test
80 | rospack = rospkg.RosPack()
81 | ddb_path = rospack.get_path('mocha_core')
82 | scripts_path = os.path.join(ddb_path, "scripts/core")
83 | sys.path.append(scripts_path)
84 | import database_utils as du
85 | import sample_db
86 | import hash_comm as hc
87 |
88 | # Set the node name
89 | rospy.init_node('test_synchronize_utils', anonymous=False)
90 |
91 | # Get the default path from the ddb_path
92 | robot_configs_default = os.path.join(ddb_path,
93 | "config/testConfigs/robot_configs.yaml")
94 | # Get the path to the robot config file from the ros parameter robot_configs
95 | robot_configs = rospy.get_param("robot_configs",
96 | robot_configs_default)
97 |
98 | # Get the yaml dictionary objects
99 | with open(robot_configs, "r") as f:
100 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
101 |
102 | # Get the default path from the ddb_path
103 | topic_configs_default = os.path.join(ddb_path,
104 | "config/testConfigs/topic_configs.yaml")
105 | # Get the path to the robot config file from the ros
106 | # parameter topic_configs
107 | topic_configs = rospy.get_param("topic_configs",
108 | topic_configs_default)
109 |
110 | # Get the yaml dictionary objects
111 | with open(topic_configs, "r") as f:
112 | topic_configs = yaml.load(f, Loader=yaml.FullLoader)
113 |
114 | msg_types = du.msg_types(robot_configs, topic_configs)
115 |
116 | # Run test cases!
117 | unittest.main()
118 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/hash_comm.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import hashlib
3 | import rospy
4 | import struct
5 |
6 | LENGTH = 6
7 |
8 |
9 | class Hash():
10 | HASH_LENGTH = LENGTH
11 | # Length of the hash in bits
12 | # We can expect a collision after approx
13 | # math.sqrt(2**HASH_LENGTH_BITS)
14 | # https://preshing.com/20110504/hash-collision-probabilities/
15 | HASH_LENGTH_BITS = HASH_LENGTH*8
16 |
17 | def __init__(self, data):
18 | if type(data) != bytes:
19 | raise TypeError
20 | self.data = data
21 |
22 | def digest(self):
23 | h = hashlib.sha256(self.data)
24 | hexdigest = h.digest()
25 | trimmed_digest = hexdigest[:self.HASH_LENGTH]
26 | return trimmed_digest
27 |
28 | def bindigest(self):
29 | return self.digest()
30 |
31 |
32 | class TsHeader():
33 | # Get length in bytes
34 | HEADER_LENGTH = LENGTH
35 |
36 | def __init__(self, *, robot_id=None, topic_id=None, secs=None, msecs=None):
37 | # You should not use this constructor to create the class. Use
38 | # from_data or from_header instead
39 | self.robot_id = robot_id
40 | self.topic_id = topic_id
41 | self.secs = secs
42 | self.msecs = msecs
43 |
44 | @classmethod
45 | def from_data(cls, robot_id, topic_id, time):
46 | # Robot ID is a 1-byte unsigned integer. Check type and value
47 | assert type(robot_id) == int and robot_id >= 0 and robot_id < 256
48 | # Same for topic ID
49 | assert type(topic_id) == int and topic_id >= 0 and topic_id < 256
50 | # time is a rospy time object
51 | assert type(time) == rospy.Time
52 |
53 | # The header will be comprised of 1 byte for the robot number, 1 byte
54 | # for the topic id, and 4 bytes for the time. The first 2 bytes of the
55 | # time are the secs, and the 2 last bytes are the ms of the message.
56 | # This means that we have ms resolution. We only handle positive times
57 | assert type(time.secs) == int and time.secs >= 0
58 | assert type(time.nsecs) == int and time.nsecs >= 0
59 | secs = time.secs % 65536
60 | msecs = time.nsecs // 1000000
61 | robot_id = robot_id
62 | topic_id = topic_id
63 | return cls(robot_id=robot_id, topic_id=topic_id,
64 | secs=secs, msecs=msecs)
65 |
66 | @classmethod
67 | def from_header(cls, header):
68 | robot_id = struct.unpack("!B", header[0:1])[0]
69 | topic_id = struct.unpack("!B", header[1:2])[0]
70 | secs = struct.unpack("!H", header[2:4])[0]
71 | msecs = struct.unpack("!H", header[4:6])[0]
72 | return cls(robot_id=robot_id, topic_id=topic_id,
73 | secs=secs, msecs=msecs)
74 |
75 | def bindigest(self):
76 | # Assemble the header by concatenating data, using struct
77 | b = struct.pack("!B", self.robot_id)
78 | b += struct.pack("!B", self.topic_id)
79 | b += struct.pack("!H", self.secs)
80 | b += struct.pack("!H", self.msecs)
81 | return b
82 |
83 | def get_id_and_time(self):
84 | assert self.robot_id is not None
85 | assert self.topic_id is not None
86 | assert self.secs is not None
87 | assert self.msecs is not None
88 | time = rospy.Time(self.secs, self.msecs*1000000)
89 | return self.robot_id, self.topic_id, time
90 |
91 | if __name__ == "__main__":
92 | import random
93 | import numpy as np
94 | import pdb
95 | import sys
96 |
97 | import argparse
98 | ap = argparse.ArgumentParser()
99 | # Add an argument to decide which test to run
100 | ap.add_argument("-t", "--test", required=True, help="Test to run")
101 | args = vars(ap.parse_args())
102 |
103 | if args["test"] == "hash":
104 | expected_collision = np.sqrt(2**Hash.HASH_LENGTH_BITS)
105 | MAX_DATA = 10000000000
106 | collision_i = np.array([])
107 | for loop in range(10):
108 | hashes = set()
109 | for i in range(MAX_DATA):
110 | randstr = str(random.random())
111 | hi = Hash(randstr.encode()).digest()
112 | if hi in hashes:
113 | print(loop, "- Collision on hash %d -" % i, hi)
114 | collision_i = np.append(collision_i, i)
115 | break
116 | hashes.add(hi)
117 | if i % 1000000 == 0:
118 | # print with only 3 decimals
119 | print(f"{loop} {i}" +
120 | f"Expected rate {(i/expected_collision):.3f}")
121 | print("Avg collision: %f" % np.average(collision_i))
122 | elif args["test"] == "time":
123 | for i in range(100):
124 | random_robot = random.randint(0, 255)
125 | random_topic = random.randint(0, 255)
126 | random_time = rospy.Time(random.randint(0, 65536),
127 | random.randint(0, 1000000000))
128 | ts = TsHeader.from_data(random_robot, random_topic, random_time)
129 | bindigest = ts.bindigest()
130 | ts2 = TsHeader.from_header(bindigest)
131 | robot_id, topic_id, time = ts2.get_id_and_time()
132 | assert robot_id == random_robot
133 | assert topic_id == random_topic
134 | assert time.secs == random_time.secs
135 | assert np.abs(time.nsecs-random_time.nsecs) < 1000000
136 | else:
137 | sys.exit("Invalid test")
138 |
139 |
--------------------------------------------------------------------------------
/interface_rajant/scripts/rajant_parser.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import ast
4 | import yaml
5 | import os
6 | import re
7 | import threading
8 | import pprint
9 | import sys
10 | import pdb
11 |
12 | import rospy
13 | from std_msgs.msg import String
14 | from std_msgs.msg import Int32
15 | import rospkg
16 |
17 | class RajantParser():
18 | def __init__(self, this_robot, robot_configs, radio_configs):
19 |
20 | # Check input args
21 | assert isinstance(this_robot, str)
22 | assert isinstance(robot_configs, dict)
23 | assert isinstance(radio_configs, dict)
24 | self.MAC_DICT = {}
25 |
26 | self.this_robot = this_robot
27 | self.robot_cfg = robot_configs
28 | self.radio_cfg = radio_configs
29 | self.rate = rospy.Rate(.5)
30 |
31 | rospy.loginfo(f"{self.this_robot} - Rajant API Parser - Starting")
32 |
33 | # Generate a standard configuration with a RSSI of -1
34 | for radio in self.radio_cfg.keys():
35 | for address in self.radio_cfg[radio]['MAC-address']:
36 | self.MAC_DICT[address] = {}
37 | self.MAC_DICT[address]['rssi'] = -20
38 | self.MAC_DICT[address]['timestamp'] = rospy.Time.now()
39 | self.MAC_DICT[address]['radio'] = radio
40 | self.MAC_DICT[address]['publisher'] = None
41 |
42 | # Generate publishers for each item in the dict
43 | for mac in self.MAC_DICT.keys():
44 | for robot in self.robot_cfg.keys():
45 | if self.MAC_DICT[mac]['radio'] == self.robot_cfg[robot]['using-radio'] and robot != self.this_robot:
46 | self.MAC_DICT[mac]['publisher'] = rospy.Publisher('ddb/rajant/rssi/' + robot, Int32, queue_size = 10)
47 |
48 | rospy.Subscriber('ddb/rajant/log', String, self.update_dict)
49 | rospy.spin()
50 |
51 |
52 | def update_dict(self, data):
53 | # If we did not receive an update after dt, drop the RSSI to -1
54 | no_rssi = -1
55 | dt = rospy.Duration(20.)
56 |
57 | # Evaluate the input data as a dictionary
58 | alldata = data.data
59 | data_dict = ast.literal_eval(data.data)
60 |
61 | state = data_dict['watchResponse']['state']
62 |
63 | # Still need to figure out the rospy time issue
64 | # Update the RSSI
65 | for wireless_channel in state.keys():
66 | for wireless_keys in state[wireless_channel].keys():
67 | if wireless_keys[0:4] == 'peer':
68 | peer = wireless_keys
69 | if 'rssi' in state[wireless_channel][peer].keys():
70 | mac = state[wireless_channel][peer]['mac']
71 | if mac not in self.MAC_DICT.keys():
72 | rospy.logerr(f"MAC: {mac} is not in the list of knowns MACs. Is your radio_configs.yaml file correct?")
73 | continue
74 | rssi = state[wireless_channel][peer]['rssi']
75 | self.MAC_DICT[mac]['rssi'] = rssi
76 | self.MAC_DICT[mac]['timestamp'] = rospy.Time.now()
77 | # Only publish if the publisher is not None
78 | # This avoids an error for a radio that is connected but that is not
79 | # actively used by any robot
80 | if self.MAC_DICT[mac]['publisher'] is not None:
81 | self.MAC_DICT[mac]['publisher'].publish(rssi)
82 | else:
83 | rospy.logwarn(f"{self.this_robot} - Rajant API Parser - " +
84 | f"active radio {self.MAC_DICT[mac]['radio']} not assigned to any robot")
85 | elif 'mac' in state[wireless_channel][peer].keys() and 'rssi' not in state[wireless_channel][peer].keys():
86 | mac = state[wireless_channel][peer]['mac']
87 | if mac not in self.MAC_DICT.keys():
88 | rospy.logerr(f"MAC: {mac} is not in the list of knowns MACs. Is your radio_configs.yaml file correct?")
89 | continue
90 | if rospy.Time.now()-self.MAC_DICT[mac]['timestamp'] > dt:
91 | self.MAC_DICT[mac]['rssi'] = no_rssi
92 | # Only publish if the publisher is not None
93 | # See comment above
94 | if self.MAC_DICT[mac]['publisher'] is not None:
95 | self.MAC_DICT[mac]['publisher'].publish(no_rssi)
96 | else:
97 | rospy.logwarn(f"{self.this_robot} - Rajant API Parser - " +
98 | f"active radio {self.MAC_DICT[mac]['radio']} not assigned to any robot")
99 |
100 |
101 | if __name__ == '__main__':
102 |
103 | rospy.init_node('rajant_listener', anonymous=False)
104 | # Get robot name from the ~robot_name param
105 | robot_name = rospy.get_param('~robot_name', 'charon')
106 |
107 | # Get robot configs
108 | robot_configs_file = rospy.get_param("~robot_configs")
109 | with open(robot_configs_file, "r") as f:
110 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
111 | if robot_name not in robot_configs.keys():
112 | rospy.signal_shutdown("Robot not in config file")
113 | rospy.spin()
114 |
115 | # Get radio configs
116 | radio_configs_file = rospy.get_param("~radio_configs")
117 | with open(radio_configs_file, "r") as f:
118 | radio_configs = yaml.load(f, Loader=yaml.FullLoader)
119 | radio = robot_configs[robot_name]["using-radio"]
120 | if radio not in radio_configs.keys():
121 | rospy.shutdown("Radio not in config file")
122 | rospy.spin()
123 |
124 | RajantParser(robot_name, robot_configs, radio_configs)
125 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_database.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import unittest
3 | import sys
4 | import os
5 | import uuid
6 | import geometry_msgs.msg
7 | import rospkg
8 | import pdb
9 | import rospy
10 | from colorama import Fore, Back, Style
11 | import yaml
12 | import pprint
13 |
14 | ROBOT0_TOPIC0_PRIO0 = b'\x00\x00\x00u\x00\xde'
15 | ROBOT0_TOPIC1_PRIO1_OLD = b'\x00\x01\x00v\x00\xde'
16 | ROBOT0_TOPIC1_PRIO1_NEW = b'\x00\x01\x00w\x00\xde'
17 | ROBOT1_TOPIC0_PRIO4_NEW = b'\x01\x00\x01O\x00\xdc'
18 | ROBOT1_TOPIC0_PRIO4_OLD = b'\x01\x00\x01O\x00\xc7'
19 |
20 |
21 | class test(unittest.TestCase):
22 | def setUp(self):
23 | test_name = self._testMethodName
24 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
25 | super().setUp()
26 |
27 | def tearDown(self):
28 | rospy.sleep(1)
29 | super().tearDown()
30 |
31 | def test_get_header_list(self):
32 | dbl = sample_db.get_sample_dbl()
33 | for i in range(5):
34 | # Check for locked mutex with loop
35 | # Test get all headers
36 | headers = dbl.get_header_list()
37 | self.assertTrue(ROBOT0_TOPIC0_PRIO0 in headers)
38 | self.assertTrue(ROBOT1_TOPIC0_PRIO4_OLD in headers)
39 | # The order of headers should also be respected by the
40 | # priority. ROBOT0_TOPIC0_PRIO0 has the lowest priority
41 | # among the headers so it should be the last of the list.
42 | # Conversely, ROBOT1_TOPIC0_PRIO4_NEW has the highest priority
43 | # and it should be first
44 | self.assertEqual(headers[-1], ROBOT0_TOPIC0_PRIO0)
45 | self.assertEqual(headers[0], ROBOT1_TOPIC0_PRIO4_NEW)
46 | # Test get headers filtering by robot
47 | headers_rfilt = dbl.get_header_list(filter_robot_id=0)
48 | self.assertTrue(ROBOT0_TOPIC0_PRIO0 in headers_rfilt)
49 | self.assertFalse(ROBOT1_TOPIC0_PRIO4_NEW in headers_rfilt)
50 | # TOPIC2 should also be the last header after filtering
51 | self.assertEqual(headers_rfilt[-1], ROBOT0_TOPIC0_PRIO0)
52 | # TOPIC 4 and 3 should be the second and third headers. Their
53 | # priority is 1, so they should be ordered by timestamp
54 | # latest timestamp
55 | self.assertEqual(headers_rfilt[1], ROBOT0_TOPIC1_PRIO1_NEW)
56 | self.assertEqual(headers_rfilt[2], ROBOT0_TOPIC1_PRIO1_OLD)
57 | # Test timestamp filtering. Only one timestamp should be
58 | # remaining (and it is not ROBOT0_TOPIC0_PRIO0).
59 | headers_tfilt = dbl.get_header_list(filter_robot_id=0,
60 | filter_latest=True)
61 | self.assertTrue(ROBOT0_TOPIC0_PRIO0 in headers_tfilt)
62 | self.assertTrue(ROBOT0_TOPIC1_PRIO1_NEW in headers_tfilt)
63 | self.assertFalse(ROBOT0_TOPIC1_PRIO1_OLD in headers_tfilt)
64 | self.assertFalse(ROBOT1_TOPIC0_PRIO4_OLD in headers_tfilt)
65 | self.assertFalse(ROBOT1_TOPIC0_PRIO4_NEW in headers_tfilt)
66 | self.assertTrue(len(headers_tfilt) == 2)
67 | headers_tfilt = dbl.get_header_list(filter_latest=True)
68 | self.assertTrue(ROBOT0_TOPIC0_PRIO0 in headers_tfilt)
69 | self.assertTrue(ROBOT0_TOPIC1_PRIO1_NEW in headers_tfilt)
70 | self.assertTrue(ROBOT1_TOPIC0_PRIO4_NEW in headers_tfilt)
71 | self.assertFalse(ROBOT0_TOPIC1_PRIO1_OLD in headers_tfilt)
72 | self.assertFalse(ROBOT1_TOPIC0_PRIO4_OLD in headers_tfilt)
73 | self.assertTrue(len(headers_tfilt) == 3)
74 |
75 | def test_headers_not_in_local(self):
76 | dbl = sample_db.get_sample_dbl()
77 | header_list = dbl.get_header_list()
78 | extra_header_1 = du.generate_random_header()
79 | extra_header_2 = du.generate_random_header()
80 | header_list.append(extra_header_2)
81 | header_list.append(extra_header_1)
82 | new_headers = [extra_header_1, extra_header_2]
83 | new_headers.sort()
84 | discover_extra_header = dbl.headers_not_in_local(header_list)
85 | discover_extra_header.sort()
86 | self.assertListEqual(discover_extra_header,
87 | new_headers)
88 |
89 | def test_find_header(self):
90 | dbl = sample_db.get_sample_dbl()
91 | header = b'\x01\x00\x01O\x00\xc7'
92 | robot_id = 1
93 | topic_id = 0
94 | dtype = dbl.db[robot_id][topic_id][header].dtype
95 | prio = dbl.db[robot_id][topic_id][header].priority
96 | ts = dbl.db[robot_id][topic_id][header].ts
97 | data = dbl.db[robot_id][topic_id][header].data
98 | dbm = dbl.find_header(header)
99 | self.assertEqual(dbm.robot_id, robot_id)
100 | self.assertEqual(dbm.topic_id, topic_id)
101 | self.assertEqual(dbm.dtype, dtype)
102 | self.assertEqual(dbm.priority, prio)
103 | self.assertAlmostEqual(dbm.ts, ts)
104 | self.assertEqual(dbm.data, data)
105 |
106 |
107 | if __name__ == '__main__':
108 | # Get the directory path and import all the required modules to test
109 | rospack = rospkg.RosPack()
110 | ddb_path = rospack.get_path('mocha_core')
111 | scripts_path = os.path.join(ddb_path, "scripts/core")
112 | sys.path.append(scripts_path)
113 | import database
114 | import sample_db
115 | import hash_comm as hc
116 | import database_utils as du
117 |
118 | dbl = sample_db.get_sample_dbl()
119 |
120 | # Set the node name
121 | rospy.init_node('test_synchronize_utils', anonymous=False)
122 |
123 | # Get the default path from the ddb_path
124 | robot_configs_default = os.path.join(ddb_path,
125 | "config/testConfigs/robot_configs.yaml")
126 | # Get the path to the robot config file from the ros parameter robot_configs
127 | robot_configs = rospy.get_param("robot_configs",
128 | robot_configs_default)
129 |
130 | # Get the yaml dictionary objects
131 | with open(robot_configs, "r") as f:
132 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
133 |
134 | # Run test cases!
135 | unittest.main()
136 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/integrate_database.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import random
4 | import signal
5 | import sys
6 | import time
7 | import traceback
8 | from functools import partial
9 |
10 | import roslaunch
11 | import rospkg
12 | import rospy
13 | import yaml
14 | import std_msgs.msg
15 | import subprocess
16 |
17 | import database_server as ds
18 | import database_utils as du
19 | import synchronize_channel as sync
20 |
21 |
22 | def ping(host):
23 | command = ["ping", "-c", "1", host]
24 | try:
25 | result = subprocess.run(command, stdout=subprocess.DEVNULL,
26 | stderr=subprocess.DEVNULL)
27 | return result.returncode == 0
28 | except Exception as e:
29 | print(f"Error pinging {host}: {e}")
30 | return False
31 |
32 |
33 | class IntegrateDatabase:
34 | def __init__(self):
35 | rospy.init_node("integrate_database", anonymous=False)
36 |
37 | self.this_robot = rospy.get_param("~robot_name")
38 | self.rssi_threshold = rospy.get_param("~rssi_threshold", 20)
39 | self.all_channels = []
40 | rospy.loginfo(f"{self.this_robot} - Integrate - " +
41 | f"RSSI threshold: {self.rssi_threshold}")
42 | self.client_timeout = rospy.get_param("~client_timeout", 6.)
43 | rospy.loginfo(f"{self.this_robot} - Integrate - " +
44 | f"Client timeout: {self.client_timeout}")
45 |
46 | # Load and check robot configs
47 | self.robot_configs_file = rospy.get_param("~robot_configs")
48 | with open(self.robot_configs_file, "r") as f:
49 | self.robot_configs = yaml.load(f, Loader=yaml.FullLoader)
50 | if self.this_robot not in self.robot_configs.keys():
51 | self.shutdown("Robot not in config file")
52 |
53 | # Load and check radio configs
54 | self.radio_configs_file = rospy.get_param("~radio_configs")
55 | with open(self.radio_configs_file, "r") as f:
56 | self.radio_configs = yaml.load(f, Loader=yaml.FullLoader)
57 | self.radio = self.robot_configs[self.this_robot]["using-radio"]
58 | if self.radio not in self.radio_configs.keys():
59 | self.shutdown("Radio not in config file")
60 |
61 | # Load and check topic configs
62 | self.topic_configs_file = rospy.get_param("~topic_configs")
63 | with open(self.topic_configs_file, "r") as f:
64 | self.topic_configs = yaml.load(f, Loader=yaml.FullLoader)
65 | self.node_type = self.robot_configs[self.this_robot]["node-type"]
66 | if self.node_type not in self.topic_configs.keys():
67 | self.shutdown("Node type not in config file")
68 |
69 | # Check that we can ping the radios
70 | ip = self.robot_configs[self.this_robot]["IP-address"]
71 | if not ping(ip):
72 | rospy.logerr(f"{self.this_robot} - Integrate - " +
73 | f"Cannot ping self {ip}. Is the radio on?")
74 | rospy.signal_shutdown("Cannot ping self")
75 | rospy.spin()
76 |
77 | # Create a database server object
78 | self.DBServer = ds.DatabaseServer(self.robot_configs,
79 | self.topic_configs)
80 |
81 | self.num_robot_in_comm = 0
82 |
83 | # Handle possible interruptions
84 | self.interrupted = False
85 |
86 | def signal_handler(sig, frame):
87 | rospy.logwarn(f"{self.this_robot} - Integrate - " +
88 | f"Got signal. Killing comm nodes.")
89 | self.interrupted = True
90 | rospy.signal_shutdown("Killed by user")
91 | signal.signal(signal.SIGINT, signal_handler)
92 |
93 | rospy.loginfo(f"{self.this_robot} - Integrate - " +
94 | "Created all communication channels!")
95 |
96 | # Start comm channels with other robots
97 | self.other_robots = [i for i in list(self.robot_configs.keys()) if i !=
98 | self.this_robot]
99 | for other_robot in self.other_robots:
100 | if other_robot not in self.robot_configs[self.this_robot]["clients"]:
101 | rospy.logwarn(
102 | f"{self.this_robot} - Integrate - "+
103 | f"Skipping channel {self.this_robot}->{other_robot} " +
104 | "as it is not in the graph of this robot"
105 | )
106 | continue
107 | # Start communication channel
108 | channel = sync.Channel(self.DBServer.dbl,
109 | self.this_robot,
110 | other_robot, self.robot_configs,
111 | self.client_timeout)
112 | self.all_channels.append(channel)
113 | channel.run()
114 |
115 | # Attach a radio trigger to each channel. This will be triggered
116 | # when the RSSI is high enough. You can use another approach here
117 | # such as using a timer to periodically trigger the sync
118 | rospy.Subscriber('ddb/rajant/rssi/' + other_robot,
119 | std_msgs.msg.Int32,
120 | self.rssi_cb,
121 | channel)
122 |
123 | # Wait for all the robots to start
124 | for _ in range(100):
125 | if self.interrupted or rospy.is_shutdown():
126 | self.shutdown("Killed while waiting for other robots")
127 | return
128 | rospy.sleep(.1)
129 |
130 | # Spin!
131 | rospy.spin()
132 |
133 | def shutdown(self, reason):
134 | assert isinstance(reason, str)
135 | rospy.logerr(f"{self.this_robot} - Integrate - " + reason)
136 | for channel in self.all_channels:
137 | channel.comm_node.terminate()
138 | self.all_channels.remove(channel)
139 | rospy.logwarn(f"{self.this_robot} - Integrate - " + "Killed Channels")
140 | self.DBServer.shutdown()
141 | rospy.logwarn(f"{self.this_robot} - Integrate - " + "Killed DB")
142 | rospy.signal_shutdown(reason)
143 | rospy.logwarn(f"{self.this_robot} - Integrate - " + "Shutting down")
144 | rospy.spin()
145 |
146 | def rssi_cb(self, data, comm_node):
147 | rssi = data.data
148 | if rssi > self.rssi_threshold:
149 | self.num_robot_in_comm += 1
150 | try:
151 | rospy.loginfo(f"{self.this_robot} <- {comm_node.target_robot}: Triggering comms")
152 | comm_node.trigger_sync()
153 | except:
154 | traceback.print_exception(*sys.exc_info())
155 |
156 |
157 | if __name__ == "__main__":
158 | # Start the node
159 | IntegrateDatabase()
160 |
--------------------------------------------------------------------------------
/interface_rajant/scripts/rajant_query.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import sys
3 | import subprocess
4 | from threading import Thread
5 | from queue import Queue, Empty
6 | from pprint import pprint
7 | import sys
8 | import os
9 | import time
10 | import yaml
11 | import re
12 | import pdb
13 | import string
14 | import hashlib
15 | import random
16 | import rospy
17 | import std_msgs.msg
18 | import rospkg
19 |
20 | def randomNumber(stringLength=4):
21 | """Generate a random string of fixed length """
22 | number = random.randint(1000, 9999)
23 | return str(number)
24 |
25 |
26 | def enqueue_output(out, queue):
27 | """ Saves the output of the process in a queue to be parsed
28 | afterwards """
29 | for line in iter(out.readline, b''):
30 | queue.put(line)
31 | out.close()
32 |
33 |
34 | def ping_ip(ip_address):
35 | try:
36 | # Run the ping command with a single ping packet (-c 1) and a timeout of 1 second (-W 1)
37 | result = subprocess.run(["ping", "-c", "1", "-W", "1", ip_address],
38 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True)
39 | return result.returncode == 0
40 | except subprocess.CalledProcessError:
41 | # An error occurred (ping failed)
42 | return False
43 |
44 |
45 | def line_parser(line_bytes):
46 | """ Returns parsed str version of bytes input line
47 | This is quite magic: rajant output is not yaml but it is very
48 | yamlish. If we replace the { with :, we remove }, and we do some
49 | minor modifications everything works out of the box!"""
50 | line_str = line_bytes.decode('unicode-escape')
51 | line_str = line_str.replace("{", ":")
52 | line_str = line_str.replace("}", "")
53 | # random numbers are added to avoid overwriting the key on the yaml
54 | line_str = re.sub("wireless",
55 | "wireless-" + randomNumber(), line_str)
56 | line_str = re.sub("peer",
57 | "peer-" + randomNumber(), line_str)
58 | # MACs are a little bit more tricky
59 | if line_str.replace(" ", "")[:4] == "mac:":
60 | separator = line_str.find(":") + 2
61 | mac_str = line_str[separator:]
62 | # FIXME The .replace that happens before can modify the MAC of the client.
63 | # This is an ugly fix. The right way to solve this problem is to use the proto message
64 | # (or implement a more resilient parser)
65 | mac_str = mac_str.replace(":", "{")
66 | mac_bytes = bytes(mac_str, 'raw_unicode_escape')
67 | mac_decoded = ":".join(["%02x" % c for c in mac_bytes[1:-2]])
68 | line_str = line_str[:separator] + mac_decoded + "\n"
69 | return line_str
70 |
71 |
72 | ON_POSIX = 'posix' in sys.builtin_module_names
73 |
74 | if __name__ == "__main__":
75 | rospy.init_node("rajant_query", anonymous=False)
76 |
77 | # Get robot name from the ~robot_name param
78 | robot_name = rospy.get_param('~robot_name', 'charon')
79 |
80 | # Get robot configs
81 | robot_configs_file = rospy.get_param("~robot_configs")
82 | with open(robot_configs_file, "r") as f:
83 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
84 | if robot_name not in robot_configs.keys():
85 | rospy.signal_shutdown("Robot not in config file")
86 | rospy.spin()
87 |
88 | # Get radio configs
89 | radio_configs_file = rospy.get_param("~radio_configs")
90 | with open(radio_configs_file, "r") as f:
91 | radio_configs = yaml.load(f, Loader=yaml.FullLoader)
92 | radio = robot_configs[robot_name]["using-radio"]
93 | if radio not in radio_configs.keys():
94 | rospy.shutdown("Radio not in config file")
95 | rospy.spin()
96 |
97 | # Get path of the package
98 | rajant_name = robot_configs[robot_name]['using-radio']
99 | if rajant_name in radio_configs.keys():
100 | target_ip = radio_configs[rajant_name]['computed-IP-address']
101 | else:
102 | rospy.logerr(f"Radio {rajant_name} for robot {robot_name} not found in configs")
103 | rospy.signal_shutdown("Radio not in configs")
104 | rospy.spin()
105 |
106 |
107 | # Create ros publisher
108 | pub = rospy.Publisher('ddb/rajant/log', std_msgs.msg.String, queue_size=10)
109 |
110 | rospack = rospkg.RosPack()
111 | ros_path = rospack.get_path('interface_rajant')
112 |
113 | # Create subprocess for the java app
114 | java_bin = os.path.join(ros_path, 'scripts',
115 | 'thirdParty/watchstate/bcapi-watchstate-11.19.0-SNAPSHOT-jar-with-dependencies.jar')
116 |
117 | p = subprocess.Popen(['java',
118 | '-jar',
119 | java_bin,
120 | target_ip], stdout=subprocess.PIPE, close_fds=ON_POSIX)
121 | q = Queue()
122 | t = Thread(target=enqueue_output, args=(p.stdout, q))
123 | t.daemon = True # thread dies with the program
124 | t.start()
125 |
126 | # Go
127 | rospy.loginfo(f"{robot_name} - Rajant API Query - Starting on {rajant_name}")
128 |
129 | # ping the assigned radio
130 | if ping_ip(target_ip):
131 | rospy.loginfo(f"{robot_name} - Rajant API Query - ping success")
132 | else:
133 | rospy.logerr(f"{robot_name} - Rajant API Query - Rajant ping failed")
134 | rospy.signal_shutdown("Rajant IP")
135 | rospy.spin()
136 |
137 | while not rospy.is_shutdown():
138 | if not t.is_alive():
139 | rospy.logerr(f'{robot_name}: Rajant Java process died! Restarting...')
140 | p = subprocess.Popen(['java',
141 | '-jar',
142 | java_bin,
143 | target_ip], stdout=subprocess.PIPE, close_fds=ON_POSIX)
144 | q = Queue()
145 | t = Thread(target=enqueue_output, args=(p.stdout, q))
146 | t.daemon = True # thread dies with the program
147 | t.start()
148 |
149 | # This sleep avoids problem with the java process. DO NOT REMOVE IT
150 | rospy.sleep(1)
151 |
152 | try:
153 | line = q.get_nowait()
154 | except Empty:
155 | # No output yet
156 | pass
157 | else: # got line
158 | answ_array = line_parser(line)
159 | while not rospy.is_shutdown():
160 | try:
161 | newline = q.get_nowait()
162 | except Empty:
163 | break
164 | else:
165 | answ_array += line_parser(newline)
166 | try:
167 | yaml_res = yaml.load(answ_array, Loader=yaml.Loader)
168 | if type(yaml_res) == type({}):
169 | # rospy.logdebug(str(yaml_res) + "\n")
170 | pub.publish(str(yaml_res))
171 | else:
172 | rospy.logerr(f"{robot_name}: YAML from Rajant did not look like an object!")
173 | except yaml.scanner.ScannerError:
174 | rospy.logerr(f"{robot_name}: Could not parse YAML from Rajant!")
175 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_synchronize_channel.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import unittest
4 | import sys
5 | import uuid
6 | import pdb
7 | import time
8 | import yaml
9 | import rospkg
10 | import rospy
11 | import multiprocessing
12 | from pprint import pprint
13 | from colorama import Fore, Style
14 | import pprint
15 |
16 |
17 | class Test(unittest.TestCase):
18 | def setUp(self):
19 | test_name = self._testMethodName
20 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
21 | super().setUp()
22 |
23 | def tearDown(self):
24 | rospy.sleep(1)
25 | super().tearDown()
26 |
27 | def test_onehop_oneway_sync(self):
28 | dbl1 = sample_db.get_sample_dbl()
29 | dbl2 = sample_db.get_sample_dbl()
30 | dbm = db.DBMessage(1, 1, 2, 1,
31 | rospy.Time(123.456), bytes('New data', 'utf-8'))
32 | dbl2.add_modify_data(dbm)
33 | node_1 = sync.Channel(dbl1, 'basestation',
34 | 'charon', robot_configs, 2)
35 | node_2 = sync.Channel(dbl2, 'charon',
36 | 'basestation', robot_configs, 2)
37 | node_1.run()
38 | node_2.run()
39 | node_1.trigger_sync()
40 | # Wait for comms propagation
41 | time.sleep(2)
42 | node_1.stop()
43 | node_2.stop()
44 | self.assertDictEqual(dbl1.db, dbl2.db)
45 | # This sleep avoid getting an error because the socket is
46 | # already in use
47 | time.sleep(4)
48 |
49 | def test_convoluted_onehop_oneway_sync(self):
50 | self.maxDiff=None
51 | dbl1 = sample_db.get_sample_dbl()
52 | dbl2 = sample_db.get_sample_dbl()
53 | # print(id(db1), id(db2))
54 | # Modify one of the features in the db
55 | dbm = db.DBMessage(1, 2, 2, 1,
56 | rospy.Time(123.456), bytes('New data', 'utf-8'))
57 | dbl2.add_modify_data(dbm)
58 | node_1 = sync.Channel(dbl1, 'basestation',
59 | 'charon', robot_configs, 2)
60 | node_1.run()
61 | node_1.trigger_sync()
62 | time.sleep(8)
63 |
64 | node_2 = sync.Channel(dbl2, 'charon', 'basestation',
65 | robot_configs, 2)
66 | # Start the comm channel
67 | node_2.run()
68 |
69 | # Trigger a synchronization
70 | for i in range(10):
71 | node_2.trigger_sync()
72 | node_1.trigger_sync()
73 |
74 | # Wait for comms propagation
75 | time.sleep(2)
76 | self.assertDictEqual(dbl1.db, dbl2.db)
77 |
78 | # Stop the server and wait unti all is killed
79 | node_1.stop()
80 | node_2.stop()
81 | # This sleep avoid getting an error because the socker is
82 | # already in use
83 | time.sleep(4)
84 |
85 | def test_convoluted_onehop_twoway_sync(self):
86 | self.maxDiff=None
87 | dbl1 = sample_db.get_sample_dbl()
88 | dbl2 = sample_db.get_sample_dbl()
89 | # Modify one of the features in the db
90 | dbm = db.DBMessage(1, 2, 2, 1,
91 | rospy.Time(123.456), bytes('New data', 'utf-8'))
92 | dbl1.add_modify_data(dbm)
93 | dbm = db.DBMessage(1, 3, 2, 1,
94 | rospy.Time(123.456), bytes('New data', 'utf-8'))
95 | dbl2.add_modify_data(dbm)
96 | node_1 = sync.Channel(dbl1, 'basestation',
97 | 'charon', robot_configs, 2)
98 | node_1.run()
99 | node_1.trigger_sync()
100 | time.sleep(8)
101 |
102 | node_2 = sync.Channel(dbl2, 'charon', 'basestation',
103 | robot_configs, 2)
104 | # Start the comm channel
105 | node_2.run()
106 |
107 | # Trigger a synchronization
108 | for i in range(10):
109 | node_2.trigger_sync()
110 | node_1.trigger_sync()
111 |
112 | # Wait for comms propagation
113 | time.sleep(2)
114 | self.assertDictEqual(dbl1.db, dbl2.db)
115 |
116 | # Stop the server and wait unti all is killed
117 | node_1.stop()
118 | node_2.stop()
119 | # This sleep avoid getting an error because the socker is
120 | # already in use
121 | time.sleep(4)
122 |
123 | def test_twohop_oneway_sync(self):
124 | dbl_robot1 = sample_db.get_sample_dbl()
125 | dbl_groundstation = sample_db.get_sample_dbl()
126 | dbl_robot2 = sample_db.get_sample_dbl()
127 | # Modify one of the features in the db
128 | dbm = db.DBMessage(1, 2, 2, 1,
129 | rospy.Time(123.456), bytes('New data', 'utf-8'))
130 | dbl_robot1.add_modify_data(dbm)
131 | node_1 = sync.Channel(dbl_robot1, 'charon',
132 | 'basestation', robot_configs, 2)
133 | node_2 = sync.Channel(dbl_groundstation,
134 | 'basestation',
135 | 'charon', robot_configs, 2)
136 | node_3 = sync.Channel(dbl_groundstation,
137 | 'basestation',
138 | 'styx', robot_configs, 2)
139 | node_4 = sync.Channel(dbl_robot2,
140 | 'styx', 'basestation',
141 | robot_configs, 2)
142 |
143 | node_1.run()
144 | node_2.run()
145 | node_3.run()
146 | node_4.run()
147 |
148 | # Trigger first sync
149 | node_2.trigger_sync()
150 |
151 | # Wait for comms propagation
152 | time.sleep(2)
153 | self.assertDictEqual(dbl_groundstation.db, dbl_robot1.db)
154 |
155 | # Trigger second sync
156 | node_4.trigger_sync()
157 | time.sleep(2)
158 | self.assertDictEqual(dbl_robot2.db, dbl_groundstation.db)
159 | self.assertDictEqual(dbl_robot2.db, dbl_robot1.db)
160 |
161 | # Wait until all the servers are killed
162 | node_1.stop()
163 | node_2.stop()
164 | node_3.stop()
165 | node_4.stop()
166 | # This sleep avoid getting an error because the socker is
167 | # already in use
168 | time.sleep(4)
169 |
170 | if __name__ == '__main__':
171 | rospy.init_node('test_synchronize_channel', anonymous=False,
172 | log_level=rospy.DEBUG)
173 |
174 | # Get the directory path and import all the required modules to test
175 | rospack = rospkg.RosPack()
176 | ddb_path = rospack.get_path('mocha_core')
177 | scripts_path = os.path.join(ddb_path, "scripts/core")
178 | sys.path.append(scripts_path)
179 | import sample_db
180 | import synchronize_channel as sync
181 | import database_utils as du
182 | import database as db
183 |
184 | # Get the default path from the ddb_path
185 | robot_configs_default = os.path.join(ddb_path,
186 | "config/testConfigs/robot_configs.yaml")
187 | # Get the path to the robot config file from the ros parameter robot_configs
188 | robot_configs = rospy.get_param("robot_configs",
189 | robot_configs_default)
190 |
191 | # Get the yaml dictionary objects
192 | with open(robot_configs, "r") as f:
193 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
194 |
195 | # Run test cases
196 | unittest.main()
197 |
--------------------------------------------------------------------------------
/mocha_core/scripts/publishers/topic_publisher.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import pdb
4 | import sys
5 |
6 | import rospkg
7 | import rospy
8 | import yaml
9 | import re
10 | import mocha_core.msg
11 |
12 | import mocha_core.srv
13 |
14 |
15 | class TopicPublisher():
16 | def __init__(self, this_robot, target, msg_history="WHOLE_HISTORY"):
17 |
18 | self.this_robot = this_robot
19 |
20 |
21 | # Service configuration
22 | self.__select_service = "integrate_database/SelectDB"
23 | self.__get_header_service = "integrate_database/GetDataHeaderDB"
24 |
25 | try:
26 | rospy.wait_for_service(self.__select_service)
27 | rospy.wait_for_service(self.__get_header_service)
28 | except rospy.ROSInterruptException as exc:
29 | rospy.logdebug("Service did not process request: " +
30 | str(exc))
31 | rospy.signal_shutdown("Service did not process request")
32 | rospy.spin()
33 | self.__select_db = rospy.ServiceProxy(
34 | self.__select_service, mocha_core.srv.SelectDB
35 | )
36 |
37 | self.__get_header_db = rospy.ServiceProxy(
38 | self.__get_header_service, mocha_core.srv.GetDataHeaderDB
39 | )
40 |
41 | # List of robots to pull
42 | self.__robot_list = []
43 |
44 | # Publisher creation
45 | self.publishers = {}
46 | self.header_pub = rospy.Publisher("ddb/topic_publisher/headers",
47 | mocha_core.msg.Header_pub,
48 | queue_size=10)
49 | for t in target:
50 | robot, robot_id, topic, topic_id, obj = t
51 | if robot not in self.__robot_list:
52 | self.__robot_list.append(robot_id)
53 | self.publishers[(robot_id, topic_id)] = {"pub": rospy.Publisher(f"/{robot}{topic}",
54 | obj,
55 | queue_size=10),
56 | "ts": rospy.Time(1, 0)}
57 |
58 |
59 | def run(self):
60 | rospy.loginfo(f"{self.this_robot} - Topic Publisher - Started")
61 | rate = rospy.Rate(10)
62 | headers = set()
63 | while not rospy.is_shutdown():
64 | for robot_id in self.__robot_list:
65 | headers_to_get = []
66 |
67 | try:
68 | answ = self.__select_db(robot_id, None, None)
69 | except rospy.ServiceException as exc:
70 | rospy.logdebug(f"Service did not process request {exc}")
71 | continue
72 | returned_headers = du.deserialize_headers(answ.headers)
73 | if len(returned_headers) == 0:
74 | rate.sleep()
75 | continue
76 |
77 | for header_ in returned_headers:
78 | if header_ not in headers:
79 | headers_to_get.append(header_)
80 |
81 | for get_header in headers_to_get:
82 | rospy.logdebug(f"Getting headers {get_header}")
83 | try:
84 | answ = self.__get_header_db(get_header)
85 | except rospy.ServiceException as exc:
86 | rospy.logerr("Service did not process request: " +
87 | str(exc))
88 | continue
89 | headers.add(get_header)
90 |
91 | ans_robot_id, ans_topic_id, ans_ts, ans_data = du.parse_answer(answ,
92 | msg_types)
93 | for t in self.publishers.keys():
94 | if t == (ans_robot_id, ans_topic_id):
95 | assert isinstance(ans_data,
96 | self.publishers[t]['pub'].data_class)
97 | # FIXME: remove this line once we have proper time
98 | # filtering implemented
99 | if ans_ts > self.publishers[t]["ts"]:
100 | self.publishers[t]["ts"] = ans_ts
101 | self.publishers[t]["pub"].publish(ans_data)
102 | self.header_pub.publish(get_header)
103 | rospy.logdebug(f"Publishing robot_id: {ans_robot_id}" +
104 | f" - topic: {ans_topic_id}")
105 | else:
106 | rospy.logdebug(f"Skipping robot_id: {ans_robot_id}" +
107 | f" - topic: {ans_topic_id} as there is an old ts")
108 | rate.sleep()
109 |
110 |
111 | if __name__ == "__main__":
112 | rospy.init_node("mocha_core_publisher", anonymous=False)
113 |
114 | # Get the mocha_core path
115 | rospack = rospkg.RosPack()
116 | ddb_path = rospack.get_path('mocha_core')
117 | scripts_path = os.path.join(ddb_path, "scripts/core")
118 | sys.path.append(scripts_path)
119 | import database_utils as du
120 |
121 | # Get robot from the parameters
122 | this_robot = rospy.get_param("~robot_name")
123 |
124 | # Get the robot_config path and generate the robot_configs object
125 | robot_configs_default = os.path.join(ddb_path,
126 | "config/testConfigs/robot_configs.yaml")
127 | robot_configs_path = rospy.get_param("~robot_configs", robot_configs_default)
128 | with open(robot_configs_path, 'r') as f:
129 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
130 | if this_robot not in robot_configs.keys():
131 | rospy.logerr(f"Robot {this_robot} not in robot_configs")
132 | rospy.signal_shutdown("Robot not in robot_configs")
133 | rospy.spin()
134 |
135 |
136 | # Get the topic_configs path and generate the topic_configs object
137 | topic_configs_default = os.path.join(ddb_path,
138 | "config/testConfigs/topic_configs.yaml")
139 | topic_configs_path = rospy.get_param("~topic_configs", topic_configs_default)
140 | with open(topic_configs_path, 'r') as f:
141 | topic_configs = yaml.load(f, Loader=yaml.FullLoader)
142 |
143 | # Get msg_types dict used to publish the topics
144 | msg_types = du.msg_types(robot_configs, topic_configs)
145 |
146 | # Flip the dict so that we have name: obj
147 | msg_objects = {}
148 | # for msg in msg_types:
149 | # msg_objects[msg_types[msg]["name"]] = msg_types[msg]["obj"]
150 |
151 | list_of_topics = set()
152 |
153 | # Iterate for each robot in robot_configs, and generate the topics
154 | for robot in robot_configs.keys():
155 | robot_id = du.get_robot_id_from_name(robot_configs, robot)
156 | robot_type = robot_configs[robot]["node-type"]
157 | topic_list = topic_configs[robot_type]
158 | for topic_id, topic in enumerate(topic_list):
159 | msg_topic = topic["msg_topic"]
160 | obj = msg_types[robot_id][topic_id]["obj"]
161 | robot_tuple = (robot, robot_id, topic["msg_topic"], topic_id, obj)
162 | list_of_topics.add(robot_tuple)
163 |
164 | pub = TopicPublisher(this_robot, list_of_topics)
165 | pub.run()
166 |
--------------------------------------------------------------------------------
/mocha_launch/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(mocha_launch)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | # add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | roscpp
12 | rospy
13 | std_msgs
14 | )
15 |
16 | ## System dependencies are found with CMake's conventions
17 | # find_package(Boost REQUIRED COMPONENTS system)
18 |
19 |
20 | ## Uncomment this if the package has a setup.py. This macro ensures
21 | ## modules and global scripts declared therein get installed
22 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
23 | # catkin_python_setup()
24 |
25 | ################################################
26 | ## Declare ROS messages, services and actions ##
27 | ################################################
28 |
29 | ## To declare and build messages, services or actions from within this
30 | ## package, follow these steps:
31 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
32 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
33 | ## * In the file package.xml:
34 | ## * add a build_depend tag for "message_generation"
35 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
36 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
37 | ## but can be declared for certainty nonetheless:
38 | ## * add a exec_depend tag for "message_runtime"
39 | ## * In this file (CMakeLists.txt):
40 | ## * add "message_generation" and every package in MSG_DEP_SET to
41 | ## find_package(catkin REQUIRED COMPONENTS ...)
42 | ## * add "message_runtime" and every package in MSG_DEP_SET to
43 | ## catkin_package(CATKIN_DEPENDS ...)
44 | ## * uncomment the add_*_files sections below as needed
45 | ## and list every .msg/.srv/.action file to be processed
46 | ## * uncomment the generate_messages entry below
47 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
48 |
49 | ## Generate messages in the 'msg' folder
50 | # add_message_files(
51 | # FILES
52 | # Message1.msg
53 | # Message2.msg
54 | # )
55 |
56 | ## Generate services in the 'srv' folder
57 | # add_service_files(
58 | # FILES
59 | # Service1.srv
60 | # Service2.srv
61 | # )
62 |
63 | ## Generate actions in the 'action' folder
64 | # add_action_files(
65 | # FILES
66 | # Action1.action
67 | # Action2.action
68 | # )
69 |
70 | ## Generate added messages and services with any dependencies listed here
71 | # generate_messages(
72 | # DEPENDENCIES
73 | # std_msgs
74 | # )
75 |
76 | ################################################
77 | ## Declare ROS dynamic reconfigure parameters ##
78 | ################################################
79 |
80 | ## To declare and build dynamic reconfigure parameters within this
81 | ## package, follow these steps:
82 | ## * In the file package.xml:
83 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
84 | ## * In this file (CMakeLists.txt):
85 | ## * add "dynamic_reconfigure" to
86 | ## find_package(catkin REQUIRED COMPONENTS ...)
87 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
88 | ## and list every .cfg file to be processed
89 |
90 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
91 | # generate_dynamic_reconfigure_options(
92 | # cfg/DynReconf1.cfg
93 | # cfg/DynReconf2.cfg
94 | # )
95 |
96 | ###################################
97 | ## catkin specific configuration ##
98 | ###################################
99 | ## The catkin_package macro generates cmake config files for your package
100 | ## Declare things to be passed to dependent projects
101 | ## INCLUDE_DIRS: uncomment this if your package contains header files
102 | ## LIBRARIES: libraries you create in this project that dependent projects also need
103 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
104 | ## DEPENDS: system dependencies of this project that dependent projects also need
105 | catkin_package(
106 | # INCLUDE_DIRS include
107 | # LIBRARIES mocha_launch
108 | # CATKIN_DEPENDS roscpp rospy std_msgs
109 | # DEPENDS system_lib
110 | )
111 |
112 | ###########
113 | ## Build ##
114 | ###########
115 |
116 | ## Specify additional locations of header files
117 | ## Your package locations should be listed before other locations
118 | include_directories(
119 | # include
120 | ${catkin_INCLUDE_DIRS}
121 | )
122 |
123 | ## Declare a C++ library
124 | # add_library(${PROJECT_NAME}
125 | # src/${PROJECT_NAME}/mocha_launch.cpp
126 | # )
127 |
128 | ## Add cmake target dependencies of the library
129 | ## as an example, code may need to be generated before libraries
130 | ## either from message generation or dynamic reconfigure
131 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
132 |
133 | ## Declare a C++ executable
134 | ## With catkin_make all packages are built within a single CMake context
135 | ## The recommended prefix ensures that target names across packages don't collide
136 | # add_executable(${PROJECT_NAME}_node src/mocha_launch_node.cpp)
137 |
138 | ## Rename C++ executable without prefix
139 | ## The above recommended prefix causes long target names, the following renames the
140 | ## target back to the shorter version for ease of user use
141 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
142 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
143 |
144 | ## Add cmake target dependencies of the executable
145 | ## same as for the library above
146 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
147 |
148 | ## Specify libraries to link a library or executable target against
149 | # target_link_libraries(${PROJECT_NAME}_node
150 | # ${catkin_LIBRARIES}
151 | # )
152 |
153 | #############
154 | ## Install ##
155 | #############
156 |
157 | # all install targets should use catkin DESTINATION variables
158 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
159 |
160 | ## Mark executable scripts (Python etc.) for installation
161 | ## in contrast to setup.py, you can choose the destination
162 | # install(PROGRAMS
163 | # scripts/my_python_script
164 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
165 | # )
166 |
167 | ## Mark executables and/or libraries for installation
168 | # install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
169 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
170 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
171 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
172 | # )
173 |
174 | ## Mark cpp header files for installation
175 | # install(DIRECTORY include/${PROJECT_NAME}/
176 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
177 | # FILES_MATCHING PATTERN "*.h"
178 | # PATTERN ".svn" EXCLUDE
179 | # )
180 |
181 | ## Mark other files for installation (e.g. launch and bag files, etc.)
182 | install(DIRECTORY
183 | launch
184 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
185 | )
186 |
187 | #############
188 | ## Testing ##
189 | #############
190 |
191 | ## Add gtest based cpp test target and link libraries
192 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_mocha_launch.cpp)
193 | # if(TARGET ${PROJECT_NAME}-test)
194 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
195 | # endif()
196 |
197 | ## Add folders to be run by python nosetests
198 | # catkin_add_nosetests(test)
199 |
--------------------------------------------------------------------------------
/interface_rajant/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(interface_rajant)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | # add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | rospy
12 | std_msgs
13 | )
14 |
15 | ## System dependencies are found with CMake's conventions
16 | # find_package(Boost REQUIRED COMPONENTS system)
17 |
18 |
19 | ## Uncomment this if the package has a setup.py. This macro ensures
20 | ## modules and global scripts declared therein get installed
21 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
22 | # catkin_python_setup()
23 |
24 | ################################################
25 | ## Declare ROS messages, services and actions ##
26 | ################################################
27 |
28 | ## To declare and build messages, services or actions from within this
29 | ## package, follow these steps:
30 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
31 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
32 | ## * In the file package.xml:
33 | ## * add a build_depend tag for "message_generation"
34 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
35 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
36 | ## but can be declared for certainty nonetheless:
37 | ## * add a exec_depend tag for "message_runtime"
38 | ## * In this file (CMakeLists.txt):
39 | ## * add "message_generation" and every package in MSG_DEP_SET to
40 | ## find_package(catkin REQUIRED COMPONENTS ...)
41 | ## * add "message_runtime" and every package in MSG_DEP_SET to
42 | ## catkin_package(CATKIN_DEPENDS ...)
43 | ## * uncomment the add_*_files sections below as needed
44 | ## and list every .msg/.srv/.action file to be processed
45 | ## * uncomment the generate_messages entry below
46 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
47 |
48 | ## Generate messages in the 'msg' folder
49 | # add_message_files(
50 | # FILES
51 | # Message1.msg
52 | # Message2.msg
53 | # )
54 |
55 | ## Generate services in the 'srv' folder
56 | # add_service_files(
57 | # FILES
58 | # Service1.srv
59 | # Service2.srv
60 | # )
61 |
62 | ## Generate actions in the 'action' folder
63 | # add_action_files(
64 | # FILES
65 | # Action1.action
66 | # Action2.action
67 | # )
68 |
69 | ## Generate added messages and services with any dependencies listed here
70 | # generate_messages(
71 | # DEPENDENCIES
72 | # std_msgs
73 | # )
74 |
75 | ################################################
76 | ## Declare ROS dynamic reconfigure parameters ##
77 | ################################################
78 |
79 | ## To declare and build dynamic reconfigure parameters within this
80 | ## package, follow these steps:
81 | ## * In the file package.xml:
82 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
83 | ## * In this file (CMakeLists.txt):
84 | ## * add "dynamic_reconfigure" to
85 | ## find_package(catkin REQUIRED COMPONENTS ...)
86 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
87 | ## and list every .cfg file to be processed
88 |
89 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
90 | # generate_dynamic_reconfigure_options(
91 | # cfg/DynReconf1.cfg
92 | # cfg/DynReconf2.cfg
93 | # )
94 |
95 | ###################################
96 | ## catkin specific configuration ##
97 | ###################################
98 | ## The catkin_package macro generates cmake config files for your package
99 | ## Declare things to be passed to dependent projects
100 | ## INCLUDE_DIRS: uncomment this if your package contains header files
101 | ## LIBRARIES: libraries you create in this project that dependent projects also need
102 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
103 | ## DEPENDS: system dependencies of this project that dependent projects also need
104 | catkin_package(
105 | # INCLUDE_DIRS include
106 | # LIBRARIES interface_rajant
107 | # CATKIN_DEPENDS rospy std_msgs
108 | # DEPENDS system_lib
109 | )
110 |
111 | ###########
112 | ## Build ##
113 | ###########
114 |
115 | ## Specify additional locations of header files
116 | ## Your package locations should be listed before other locations
117 | include_directories(
118 | # include
119 | ${catkin_INCLUDE_DIRS}
120 | )
121 |
122 | ## Declare a C++ library
123 | # add_library(${PROJECT_NAME}
124 | # src/${PROJECT_NAME}/interface_rajant.cpp
125 | # )
126 |
127 | ## Add cmake target dependencies of the library
128 | ## as an example, code may need to be generated before libraries
129 | ## either from message generation or dynamic reconfigure
130 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
131 |
132 | ## Declare a C++ executable
133 | ## With catkin_make all packages are built within a single CMake context
134 | ## The recommended prefix ensures that target names across packages don't collide
135 | # add_executable(${PROJECT_NAME}_node src/interface_rajant_node.cpp)
136 |
137 | ## Rename C++ executable without prefix
138 | ## The above recommended prefix causes long target names, the following renames the
139 | ## target back to the shorter version for ease of user use
140 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
141 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
142 |
143 | ## Add cmake target dependencies of the executable
144 | ## same as for the library above
145 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
146 |
147 | ## Specify libraries to link a library or executable target against
148 | # target_link_libraries(${PROJECT_NAME}_node
149 | # ${catkin_LIBRARIES}
150 | # )
151 |
152 | #############
153 | ## Install ##
154 | #############
155 |
156 | # all install targets should use catkin DESTINATION variables
157 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
158 |
159 | ## Mark executable scripts (Python etc.) for installation
160 | ## in contrast to setup.py, you can choose the destination
161 | install(PROGRAMS
162 | scripts/rajant_listener.py
163 | scripts/rajant_listener_new.py
164 | scripts/rajant_log.py
165 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
166 | )
167 |
168 | ## Mark executables and/or libraries for installation
169 | # install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
170 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
171 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
172 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
173 | # )
174 |
175 | ## Mark cpp header files for installation
176 | # install(DIRECTORY include/${PROJECT_NAME}/
177 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
178 | # FILES_MATCHING PATTERN "*.h"
179 | # PATTERN ".svn" EXCLUDE
180 | # )
181 |
182 | ## Mark other files for installation (e.g. launch and bag files, etc.)
183 | install(DIRECTORY
184 | launch
185 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
186 | )
187 | install(DIRECTORY
188 | scripts/thirdParty
189 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/scripts
190 | )
191 |
192 | #############
193 | ## Testing ##
194 | #############
195 |
196 | ## Add gtest based cpp test target and link libraries
197 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_interface_rajant.cpp)
198 | # if(TARGET ${PROJECT_NAME}-test)
199 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
200 | # endif()
201 |
202 | ## Add folders to be run by python nosetests
203 | # catkin_add_nosetests(test)
204 |
--------------------------------------------------------------------------------
/mocha_core/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(mocha_core)
3 |
4 | ## Compile as C++11, supported in ROS Kinetic and newer
5 | # add_compile_options(-std=c++11)
6 |
7 | ## Find catkin macros and libraries
8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | ## is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | geometry_msgs
12 | roscpp
13 | rospy
14 | std_msgs
15 | geometry_msgs
16 | sensor_msgs
17 | vision_msgs
18 | message_generation
19 | )
20 |
21 | ## System dependencies are found with CMake's conventions
22 | # find_package(Boost REQUIRED COMPONENTS system)
23 |
24 |
25 | ## Uncomment this if the package has a setup.py. This macro ensures
26 | ## modules and global scripts declared therein get installed
27 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
28 | # catkin_python_setup()
29 |
30 | ################################################
31 | ## Declare ROS messages, services and actions ##
32 | ################################################
33 |
34 | ## To declare and build messages, services or actions from within this
35 | ## package, follow these steps:
36 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in
37 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...).
38 | ## * In the file package.xml:
39 | ## * add a build_depend tag for "message_generation"
40 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET
41 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in
42 | ## but can be declared for certainty nonetheless:
43 | ## * add a exec_depend tag for "message_runtime"
44 | ## * In this file (CMakeLists.txt):
45 | ## * add "message_generation" and every package in MSG_DEP_SET to
46 | ## find_package(catkin REQUIRED COMPONENTS ...)
47 | ## * add "message_runtime" and every package in MSG_DEP_SET to
48 | ## catkin_package(CATKIN_DEPENDS ...)
49 | ## * uncomment the add_*_files sections below as needed
50 | ## and list every .msg/.srv/.action file to be processed
51 | ## * uncomment the generate_messages entry below
52 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...)
53 |
54 | ## Generate messages in the 'msg' folder
55 | add_message_files(
56 | FILES
57 | # Legacy, may be removed in the future
58 | Header_pub.msg
59 | SM_state.msg
60 | Client_stats.msg
61 | )
62 |
63 | ## Generate services in the 'srv' folder
64 | add_service_files(
65 | FILES
66 | AddUpdateDB.srv
67 | GetDataHeaderDB.srv
68 | SelectDB.srv
69 | )
70 |
71 | ## Generate actions in the 'action' folder
72 | # add_action_files(
73 | # FILES
74 | # Action1.action
75 | # Action2.action
76 | # )
77 |
78 | ## Generate added messages and services with any dependencies listed here
79 | generate_messages(
80 | DEPENDENCIES
81 | std_msgs
82 | geometry_msgs
83 | vision_msgs
84 | )
85 |
86 | ################################################
87 | ## Declare ROS dynamic reconfigure parameters ##
88 | ################################################
89 |
90 | ## To declare and build dynamic reconfigure parameters within this
91 | ## package, follow these steps:
92 | ## * In the file package.xml:
93 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure"
94 | ## * In this file (CMakeLists.txt):
95 | ## * add "dynamic_reconfigure" to
96 | ## find_package(catkin REQUIRED COMPONENTS ...)
97 | ## * uncomment the "generate_dynamic_reconfigure_options" section below
98 | ## and list every .cfg file to be processed
99 |
100 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
101 | # generate_dynamic_reconfigure_options(
102 | # cfg/DynReconf1.cfg
103 | # cfg/DynReconf2.cfg
104 | # )
105 |
106 | ###################################
107 | ## catkin specific configuration ##
108 | ###################################
109 | ## The catkin_package macro generates cmake config files for your package
110 | ## Declare things to be passed to dependent projects
111 | ## INCLUDE_DIRS: uncomment this if your package contains header files
112 | ## LIBRARIES: libraries you create in this project that dependent projects also need
113 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need
114 | ## DEPENDS: system dependencies of this project that dependent projects also need
115 | catkin_package(
116 | # INCLUDE_DIRS include
117 | CATKIN_DEPENDS message_runtime std_msgs geometry_msgs vision_msgs
118 | # DEPENDS system_lib
119 | )
120 |
121 | ###########
122 | ## Build ##
123 | ###########
124 |
125 | ## Specify additional locations of header files
126 | ## Your package locations should be listed before other locations
127 | include_directories(
128 | # include
129 | ${catkin_INCLUDE_DIRS}
130 | )
131 |
132 | ## Declare a C++ library
133 | # add_library(${PROJECT_NAME}
134 | # src/${PROJECT_NAME}/radio_communication.cpp
135 | # )
136 |
137 | ## Add cmake target dependencies of the library
138 | ## as an example, code may need to be generated before libraries
139 | ## either from message generation or dynamic reconfigure
140 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
141 |
142 | ## Declare a C++ executable
143 | ## With catkin_make all packages are built within a single CMake context
144 | ## The recommended prefix ensures that target names across packages don't collide
145 | # add_executable(${PROJECT_NAME}_node src/radio_communication_node.cpp)
146 |
147 | ## Rename C++ executable without prefix
148 | ## The above recommended prefix causes long target names, the following renames the
149 | ## target back to the shorter version for ease of user use
150 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node"
151 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "")
152 |
153 | ## Add cmake target dependencies of the executable
154 | ## same as for the library above
155 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
156 |
157 | ## Specify libraries to link a library or executable target against
158 | # target_link_libraries(${PROJECT_NAME}_node
159 | # ${catkin_LIBRARIES}
160 | # )
161 |
162 | #############
163 | ## Install ##
164 | #############
165 |
166 | # all install targets should use catkin DESTINATION variables
167 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
168 |
169 | ## Mark executable scripts (Python etc.) for installation
170 | ## in contrast to setup.py, you can choose the destination
171 | install(PROGRAMS
172 | scripts/integrate_database.py
173 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
174 | )
175 |
176 | ## Mark executables and/or libraries for installation
177 | # install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
178 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
179 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
180 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
181 | # )
182 |
183 | ## Mark cpp header files for installation
184 | # install(DIRECTORY include/${PROJECT_NAME}/
185 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
186 | # FILES_MATCHING PATTERN "*.h"
187 | # PATTERN ".svn" EXCLUDE
188 | # )
189 |
190 | ## Mark other files for installation (e.g. launch and bag files, etc.)
191 | install(DIRECTORY
192 | launch
193 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
194 | )
195 |
196 | install(DIRECTORY
197 | scripts
198 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
199 | )
200 |
201 | #############
202 | ## Testing ##
203 | #############
204 |
205 | ## Add gtest based cpp test target and link libraries
206 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_radio_communication.cpp)
207 | # if(TARGET ${PROJECT_NAME}-test)
208 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
209 | # endif()
210 |
211 | ## Add folders to be run by python nosetests
212 | # catkin_add_nosetests(test)
213 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/database_utils.py:
--------------------------------------------------------------------------------
1 | import struct
2 | import rospy
3 | import database as db
4 | import hash_comm
5 | import io
6 | import pdb
7 | import importlib
8 | import random
9 | import lz4.frame
10 |
11 | HEADER_LENGTH = hash_comm.TsHeader.HEADER_LENGTH
12 |
13 |
14 | def get_robot_id_from_name(robot_configs, robot_name):
15 | """ Returns the current robot number, based on the name of the
16 | robot. If no name is provided, returns the number of the current
17 | robot. """
18 |
19 | # Check input arguments
20 | assert isinstance(robot_configs, dict)
21 | assert robot_name is not None and isinstance(robot_name, str)
22 |
23 | # The robot id is the index of the dictionary
24 | robot_list = list(robot_configs.keys())
25 | robot_list.sort()
26 | robot_id = robot_list.index(robot_name)
27 | return robot_id
28 |
29 |
30 | def get_robot_name_from_id(robot_configs, robot_id):
31 | assert isinstance(robot_configs, dict)
32 | assert robot_id is not None and isinstance(robot_id, int)
33 | robots = {get_robot_id_from_name(robot_configs, robot): robot for robot
34 | in robot_configs}
35 | return robots[robot_id]
36 |
37 |
38 | def get_topic_id_from_name(robot_configs, topic_configs,
39 | robot_name, topic_name):
40 | """ Returns the topic id for a particular robot,
41 | by searching the topic name"""
42 | assert isinstance(robot_configs, dict)
43 | assert isinstance(topic_configs, dict)
44 | assert robot_name is not None and isinstance(robot_name, str)
45 | assert topic_name is not None and isinstance(topic_name, str)
46 |
47 | list_topics = topic_configs[robot_configs[robot_name]["node-type"]]
48 | id = None
49 | for i, topic in enumerate(list_topics):
50 | if topic_name == topic["msg_topic"]:
51 | id = i
52 | break
53 | if id is None:
54 | rospy.logerr(f"{topic_name} does not exist in topic_configs")
55 | rospy.signal_shutdown("topic_name does not exist in topic_configs")
56 | rospy.spin()
57 | return id
58 |
59 |
60 | def get_topic_name_from_id(robot_configs, topic_configs, robot_name, topic_id):
61 | """ Returns the topic name for a particular robot, given its id"""
62 | assert isinstance(robot_configs, dict)
63 | assert isinstance(topic_configs, dict)
64 | assert robot_name is not None and isinstance(robot_name, str)
65 | assert topic_id is not None and isinstance(topic_id, int)
66 | list_topics = topic_configs[robot_configs[robot_name]["node-type"]]
67 | if topic_id >= len(list_topics):
68 | rospy.logerr(f"{topic_id} does not exist in topic_configs")
69 | rospy.signal_shutdown("topic_id does not exist in topic_configs")
70 | rospy.spin()
71 | return list_topics[topic_id]["msg_topic"]
72 |
73 |
74 | def serialize_headers(header_list):
75 | """ Converts a list of headers (bytes) into a single stream """
76 | return b''.join(header_list)
77 |
78 |
79 | def deserialize_headers(serial_headers):
80 | """ Converts a stream of hashes into a list of hashes """
81 | if len(serial_headers) % HEADER_LENGTH != 0:
82 | raise Exception('deserialize_hashes: wrong length of string')
83 | splitted_headers = [serial_headers[i:i+HEADER_LENGTH]
84 | for i in
85 | range(0, len(serial_headers), HEADER_LENGTH)]
86 | return splitted_headers
87 |
88 | def pack_data(msg):
89 | """ pack data from db in the standard transmission format
90 | (sizes in parantheses (in bytes)):
91 | [
92 | data_dtype (1 unsigned char),
93 | priority (1 unsigned char),
94 | data -- all the data
95 | ]
96 |
97 | Note that all the components that are part of the header are not packed in
98 | the data
99 | """
100 | assert isinstance(msg, db.DBMessage)
101 | serial_data = struct.pack('!B', msg.dtype)
102 | serial_data += struct.pack('!B', msg.priority)
103 | serial_data += msg.data
104 | return serial_data
105 |
106 |
107 | def unpack_data(header, packed_data):
108 | """ Unpacks a package with data, and return a dict to insert in the
109 | database. We _could_ do the unpack in one operation, but this is
110 | easier to debug. """
111 |
112 | h = hash_comm.TsHeader.from_header(header)
113 | p_robot_id, p_topic_id, p_ts = h.get_id_and_time()
114 |
115 | pointer = 0
116 | # unpack topic type
117 | p_topic_dtype = struct.unpack('!B',
118 | packed_data[pointer:pointer + 1])[0]
119 | pointer += 1
120 | # unpack priority
121 | p_priority = struct.unpack('!B',
122 | packed_data[pointer:pointer + 1])[0]
123 | pointer += 1
124 | # unpack data
125 | p_data = packed_data[pointer:]
126 | assert len(p_data) != 0, "Empty data in unpack_data"
127 |
128 | dbm = db.DBMessage(p_robot_id, p_topic_id, p_topic_dtype,
129 | p_priority, p_ts, p_data)
130 | return dbm
131 |
132 |
133 | def serialize_ros_msg(msg):
134 | # TODO check that we are not entering garbage
135 | sio_h = io.BytesIO()
136 | msg.serialize(sio_h)
137 | serialized = sio_h.getvalue()
138 | compressed = lz4.frame.compress(serialized)
139 | return compressed
140 |
141 |
142 | def parse_answer(api_answer, msg_types):
143 | constructor = msg_types[api_answer.robot_id][api_answer.topic_id]['obj']
144 | msg = constructor()
145 | # api_answer.msg_content has the compressed message
146 | decompressed = lz4.frame.decompress(api_answer.msg_content)
147 | msg.deserialize(decompressed)
148 | robot_id = api_answer.robot_id
149 | topic_id = api_answer.topic_id
150 | ts = api_answer.timestamp
151 | return robot_id, topic_id, ts, msg
152 |
153 |
154 | def msg_types(robot_configs, topic_configs):
155 | """
156 | Extracts message types from the topic_configs dictionary and validates
157 | them. Returns a dictionary with message type names as keys and
158 | information about the message type as values.
159 | The value is a dictionary containing
160 | - 'dtype' (an integer ID for the message type),
161 | - 'obj' (the message type object itself)
162 | """
163 | assert isinstance(topic_configs, dict)
164 |
165 | msg_list = []
166 | for robot in topic_configs:
167 | for topic in topic_configs[robot]:
168 | msg = topic['msg_type']
169 | # Check if the message is a valid one: it has only two parts
170 | # and all the characters are alphanumeric or _
171 | parts = msg.split('/')
172 | if not (len(parts) == 2 and
173 | all(part.replace("_", "").isalnum()
174 | for part in parts)):
175 | rospy.logerr(f"Error: msg_type {msg} not valid")
176 | rospy.signal_shutdown("Error: msg_type {msg} not valid")
177 | rospy.spin()
178 | msg_list.append(topic['msg_type'])
179 | # Important: sort the msg_list so we have a deterministic order
180 | msg_list.sort()
181 |
182 | msg_types = {}
183 | for robot in robot_configs:
184 | robot_id = get_robot_id_from_name(robot_configs, robot)
185 | if robot_id not in msg_types.keys():
186 | msg_types[robot_id] = {}
187 | for i, topic in enumerate(topic_configs[robot_configs[robot]["node-type"]]):
188 | msg = topic['msg_type']
189 | msg_id = msg_list.index(msg)
190 | package_name, msg_name = msg.split('/')
191 | package = importlib.import_module(package_name + '.msg')
192 | message_type = getattr(package, msg_name)
193 | msg_types[robot_id][i] = {"dtype": msg_id,
194 | "obj": message_type,
195 | "name": msg}
196 | return msg_types
197 |
198 | def get_priority_number(priority_string):
199 | PRIORITY_LUT = {"NO_PRIORITY": 0,
200 | "LOW_PRIORITY": 1,
201 | "MEDIUM_PRIORITY": 2,
202 | "HIGH_PRIORITY": 3}
203 | assert isinstance(priority_string, str)
204 | assert priority_string in PRIORITY_LUT
205 | return PRIORITY_LUT[priority_string]
206 |
207 |
208 | def generate_random_header():
209 | # generate random robot_id and topic_id, between 0 and 255
210 | robot_id = random.randint(0, 255)
211 | topic_id = random.randint(0, 255)
212 | # Generate a random rospy timestamp
213 | time = rospy.Time.from_sec(random.random())
214 | h = hash_comm.TsHeader.from_data(robot_id, topic_id, time)
215 | return h.bindigest()
216 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/zmq_comm_node.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import enum
3 | import pdb
4 | import threading
5 | import uuid
6 | import rospy
7 | import zmq
8 | import hash_comm
9 | import mocha_core.msg
10 |
11 | HASH_LENGTH = hash_comm.Hash.HASH_LENGTH
12 |
13 | SHOW_BANDWIDTH = False
14 |
15 |
16 | class SyncStatus(enum.Enum):
17 | IDLE = 0
18 | SYNCHRONIZING = 1
19 | FAILURE = 2
20 |
21 |
22 | class Comm_node:
23 | def __init__(self, this_node, client_node, robot_configs,
24 | client_callback, server_callback, client_timeout):
25 | # Check input arguments
26 | assert isinstance(this_node, str)
27 | assert isinstance(client_node, str)
28 | assert isinstance(robot_configs, dict)
29 | assert callable(client_callback)
30 | assert callable(server_callback)
31 | assert isinstance(client_timeout, (int, float))
32 |
33 | # Check that this_node and client_node exist in the config file
34 | if this_node not in robot_configs:
35 | rospy.logerr(f"{this_node} - Node: this_node not in config file")
36 | rospy.signal_shutdown("this_node not in config file")
37 | rospy.spin()
38 | self.this_node = this_node
39 |
40 | if client_node not in robot_configs[self.this_node]["clients"]:
41 | rospy.logerr(f"{this_node} - Node: client_node not in config file")
42 | rospy.signal_shutdown("client_node not in config file")
43 | rospy.spin()
44 |
45 | self.client_node = client_node
46 |
47 | self.robot_configs = robot_configs
48 |
49 | # The client id is the index of the list of clients
50 | self.client_id = robot_configs[self.this_node]["clients"].index(client_node)
51 |
52 | # Set network properties
53 | self.this_port = str(
54 | int(robot_configs[self.this_node]["base-port"]) + self.client_id)
55 | self.this_addr = robot_configs[self.this_node]["IP-address"]
56 |
57 | # Configure callbacks
58 | # We will call:
59 | # client_callback(data) on successful communication
60 | # client_callback(None) on erroneous communication (timeout)
61 | self.client_callback = client_callback
62 | self.server_callback = server_callback
63 | self.client_timeout = client_timeout
64 |
65 | # Create a publisher for the client bandwidth
66 | self.pub_client_stats = rospy.Publisher(f"ddb/client_stats/{self.client_node}",
67 | mocha_core.msg.Client_stats,
68 | queue_size=10)
69 | self.pub_client_count = 0
70 |
71 | self.syncStatus = SyncStatus.IDLE
72 | self.syncStatus_lock = threading.Lock()
73 |
74 | self.context = zmq.Context(1)
75 |
76 | # Start server thread
77 | server = threading.Thread(target=self.server_thread, args=())
78 | self.server_running = True
79 | server.start()
80 |
81 | def connect_send_message(self, msg):
82 | # TODO keep connection open instead of opening in each call
83 | # Msg check
84 | if not isinstance(msg, bytes):
85 | rospy.logdebug(f"{self.this_node} - Node - SENDMSG: " +
86 | "msg has to be bytes")
87 | return
88 |
89 | # Check that we are in the right state
90 | self.syncStatus_lock.acquire()
91 | if self.syncStatus != SyncStatus.IDLE:
92 | rospy.logdebug(f"{self.this_node} - Node - SENDMSG: " +
93 | "Sync is running, abort")
94 | return
95 | self.client_thread = SyncStatus.SYNCHRONIZING
96 | self.syncStatus_lock.release()
97 |
98 | # We're all set, send message
99 | target_robot = self.robot_configs[self.client_node]
100 | port_offset = target_robot["clients"].index(self.this_node)
101 | server_endpoint = (
102 | "tcp://"
103 | + target_robot["IP-address"]
104 | + ":"
105 | + str(int(target_robot["base-port"]) + port_offset)
106 | )
107 |
108 | rospy.logdebug(f"{self.this_node} - Node - SENDMSG: " +
109 | f"Connecting to server {server_endpoint}")
110 | client = self.context.socket(zmq.REQ)
111 | client.connect(server_endpoint)
112 |
113 | poll = zmq.Poller()
114 | poll.register(client, zmq.POLLIN)
115 |
116 | # Get an uuid for the message to send
117 | rnd_uuid = str(uuid.uuid4().hex).encode()
118 | msg_id = hash_comm.Hash(rnd_uuid).digest()
119 | full_msg = msg_id + msg
120 | rospy.logdebug(f"{self.this_node} - Node - SENDMSG: " +
121 | f"Sending ({full_msg})")
122 | client.send(full_msg)
123 | start_ts = rospy.Time.now()
124 | # Wait at most self.client_timeout * 1000 ms
125 | socks = dict(poll.poll(self.client_timeout*1000))
126 | if socks.get(client) == zmq.POLLIN:
127 | reply = client.recv()
128 | if not reply:
129 | rospy.logdebug(
130 | f"{self.this_node} - Node - SENDMSG: " +
131 | "No response from the server"
132 | )
133 | self.client_callback(None)
134 | else:
135 | header = reply[0:HASH_LENGTH]
136 | data = reply[HASH_LENGTH:]
137 | if header == msg_id:
138 | rospy.logdebug(
139 | f"{self.this_node} - Node - SENDMSG: Server replied " +
140 | f"({len(reply)} bytes)"
141 | )
142 | stop_ts = rospy.Time.now()
143 | time_d = stop_ts - start_ts
144 | time_s = float(time_d.to_sec())
145 | bw = len(reply)/time_s/1024/1024
146 | stats = mocha_core.msg.Client_stats()
147 | stats.header.stamp = rospy.Time.now()
148 | stats.header.frame_id = self.this_node
149 | stats.header.seq = self.pub_client_count
150 | self.pub_client_count += 1
151 | stats.msg = msg[:5].decode("utf-8")
152 | stats.rtt = time_s
153 | stats.bw = bw
154 | stats.answ_len = len(reply)
155 | self.pub_client_stats.publish(stats)
156 | if len(reply) > 10*1024 and SHOW_BANDWIDTH:
157 | rospy.loginfo(f"{self.this_node} - Node - " +
158 | f"SENDMSG: Data RTT: {time_s}")
159 | rospy.loginfo(f"{self.this_node} - Node - SENDMSG: " +
160 | f"BW: {bw}" +
161 | "MBytes/s")
162 | self.client_callback(data)
163 | else:
164 | rospy.logerr(f"{self.this_node} - Node - SENDMSG: " +
165 | f"Malformed reply from server: {reply}")
166 | self.client_callback(None)
167 | else:
168 | rospy.logdebug(f"{self.this_node} - Node - SENDMSG: " +
169 | "No response from server")
170 | self.client_callback(None)
171 | client.setsockopt(zmq.LINGER, 0)
172 | client.close()
173 | poll.unregister(client)
174 | self.syncStatus_lock.acquire()
175 | self.syncStatus = SyncStatus.IDLE
176 | self.syncStatus_lock.release()
177 |
178 | def server_thread(self):
179 | # This timer does not have a big impact as it is only the timer until
180 | # the recv times out Most calls from the client are very lightweight
181 | RECV_TIMEOUT = 1000
182 | self.server = self.context.socket(zmq.REP)
183 | self.server.RCVTIMEO = RECV_TIMEOUT
184 |
185 | port = self.this_port
186 |
187 | self.server.bind("tcp://*:" + str(port))
188 |
189 | while self.server_running:
190 | try:
191 | request = self.server.recv()
192 | except zmq.ZMQError as e:
193 | if e.errno == zmq.EAGAIN:
194 | # rospy.logdebug(
195 | # f"{self.this_node} - Node - SERVER_RUNNING: " +
196 | # f"{self.server_running}"
197 | # )
198 | continue
199 | else:
200 | rospy.logerr(f"{self.this_node} - Node - SERVER: " +
201 | f"ZMQ error: {e}")
202 | rospy.signal_shutdown("ZMQ error")
203 | rospy.spin()
204 | rospy.logdebug(f"{self.this_node} - Node - SERVER: Got {request}")
205 | header = request[0:HASH_LENGTH]
206 | data = request[HASH_LENGTH:]
207 | reply = self.server_callback(data)
208 | if reply is None:
209 | rospy.logerr(f"{self.this_node} - Node - SERVER: " +
210 | f"reply cannot be none")
211 | rospy.signal_shutdown("Reply none")
212 | rospy.spin()
213 | if not isinstance(reply, bytes):
214 | rospy.logerr(f"{self.this_node} - Node - SERVER: " +
215 | "reply has to be bytes")
216 | rospy.signal_shutdown("Reply not bytes")
217 | rospy.spin()
218 | ans = header + reply
219 | self.server.send(ans)
220 | rospy.logdebug(f"{self.this_node} - Node - SERVER: " +
221 | "Replied {len(ans)} bytes")
222 | rospy.logdebug(f"{self.this_node} - SERVER: {ans}")
223 | self.server.close()
224 | self.context.term()
225 |
226 | def terminate(self):
227 | rospy.logdebug(f"{self.this_node} - Node - Terminating server")
228 | self.server_running = False
229 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/test/test_database_server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import unittest
5 | import sys
6 | import multiprocessing
7 | import roslaunch
8 | import rospy
9 | import rospkg
10 | import pdb
11 | from pprint import pprint
12 | import mocha_core.srv
13 | from geometry_msgs.msg import PointStamped
14 | import yaml
15 | from colorama import Fore, Style
16 | import random
17 | import warnings
18 |
19 |
20 | class Test(unittest.TestCase):
21 | def setUp(self):
22 | test_name = self._testMethodName
23 | print("\n", Fore.RED, 20*"=", test_name, 20*"=", Style.RESET_ALL)
24 |
25 | # Ignore pesky warnings about sockets
26 | warnings.filterwarnings(action="ignore", message="unclosed", category=ResourceWarning)
27 |
28 | # Create a database server object
29 | self.dbs = ds.DatabaseServer(robot_configs, topic_configs)
30 |
31 | rospy.wait_for_service('~AddUpdateDB')
32 | rospy.wait_for_service('~GetDataHeaderDB')
33 | rospy.wait_for_service('~SelectDB')
34 |
35 | # Generate the service calls for all the services
36 | service_name = '~AddUpdateDB'
37 | self.add_update_db = rospy.ServiceProxy(service_name,
38 | mocha_core.srv.AddUpdateDB,
39 | persistent=True)
40 |
41 | service_name = '~GetDataHeaderDB'
42 | self.get_data_header_db = rospy.ServiceProxy(service_name,
43 | mocha_core.srv.GetDataHeaderDB,
44 | persistent=True)
45 |
46 | service_name = '~SelectDB'
47 | self.select_db = rospy.ServiceProxy(service_name,
48 | mocha_core.srv.SelectDB,
49 | persistent=True)
50 |
51 | super().setUp()
52 |
53 | def tearDown(self):
54 | self.add_update_db.close()
55 | self.get_data_header_db.close()
56 | self.select_db.close()
57 | self.dbs.shutdown()
58 | rospy.sleep(1)
59 | super().tearDown()
60 |
61 | def test_add_retrieve_single_msg(self):
62 | # Simulate sending a "/pose" message from Charon
63 | sample_msg = PointStamped()
64 | sample_msg.header.frame_id = "world"
65 | sample_msg.point.x = random.random()
66 | sample_msg.point.y = random.random()
67 | sample_msg.point.z = random.random()
68 | sample_msg.header.stamp = rospy.Time.now()
69 |
70 | tid = du.get_topic_id_from_name(robot_configs, topic_configs,
71 | robot_name, "/pose")
72 | # Serialize and send through service
73 | serialized_msg = du.serialize_ros_msg(sample_msg)
74 |
75 | try:
76 | answ = self.add_update_db(tid,
77 | rospy.get_rostime(),
78 | serialized_msg)
79 | answ_header = answ.new_header
80 | except rospy.ServiceException as exc:
81 | print("Service did not process request: " + str(exc))
82 | self.assertTrue(False)
83 | # print("Got hash:", answ_hash)
84 | # The data is now stored in the database!
85 |
86 | # Request the same hash through service
87 | try:
88 | answ = self.get_data_header_db(answ_header)
89 | except rospy.ServiceException as exc:
90 | print("Service did not process request: " + str(exc))
91 | self.assertTrue(False)
92 |
93 | # Parse answer and compare results
94 | _, ans_topic_id, _, ans_data = du.parse_answer(answ,
95 | msg_types)
96 |
97 | self.assertEqual(tid, ans_topic_id)
98 | self.assertEqual(ans_data, sample_msg)
99 | # print("Received topic:", ans_topic_name)
100 | # print("ROS msg:", ans_data)
101 | # print("Timestamp:", ans_ts)
102 |
103 | def test_add_select_robot(self):
104 | stored_headers = []
105 | for i in range(3):
106 | # Sleep is very important to have distinct messages
107 | rospy.sleep(0.1)
108 | # Create a dumb random PointStamped message
109 | sample_msg = PointStamped()
110 | sample_msg.header.frame_id = "world"
111 | sample_msg.point.x = random.random()
112 | sample_msg.point.y = random.random()
113 | sample_msg.point.z = random.random()
114 | sample_msg.header.stamp = rospy.Time.now()
115 | tid = du.get_topic_id_from_name(robot_configs, topic_configs,
116 | robot_name, "/pose")
117 |
118 | # Serialize and send through service
119 | serialized_msg = du.serialize_ros_msg(sample_msg)
120 | try:
121 | answ = self.add_update_db(tid,
122 | rospy.get_rostime(),
123 | serialized_msg)
124 | answ_header = answ.new_header
125 | except rospy.ServiceException as exc:
126 | print("Service did not process request: " + str(exc))
127 | self.assertTrue(False)
128 | stored_headers.append(answ_header)
129 |
130 | # Request the list of headers through the service
131 | try:
132 | robot_id = du.get_robot_id_from_name(robot_configs, "charon")
133 | answ = self.select_db(robot_id, None, None)
134 | except rospy.ServiceException as exc:
135 | print("Service did not process request: " + str(exc))
136 | self.assertTrue(False)
137 | returned_headers = du.deserialize_headers(answ.headers)
138 |
139 | # Sort list before comparing
140 | stored_headers.sort()
141 | returned_headers.sort()
142 | self.assertEqual(stored_headers, returned_headers)
143 |
144 | def test_insert_storm(self):
145 | # Bomb the database with insert petitions from different robots,
146 | # check the number of headers afterwards
147 | NUM_PROCESSES = 100
148 | LOOPS_PER_PROCESS = 10
149 |
150 | # Spin a number of processes to write into the database
151 | def recorder_thread():
152 | # Get a random ros time
153 | tid = du.get_topic_id_from_name(robot_configs, topic_configs,
154 | robot_name, "/pose")
155 | sample_msg = PointStamped()
156 | sample_msg.header.frame_id = "world"
157 | sample_msg.point.x = random.random()
158 | sample_msg.point.y = random.random()
159 | sample_msg.point.z = random.random()
160 | sample_msg.header.stamp = rospy.Time.now()
161 | # Serialize and send through service
162 | serialized_msg = du.serialize_ros_msg(sample_msg)
163 |
164 | for i in range(LOOPS_PER_PROCESS):
165 | timestamp = rospy.Time(random.randint(1, 10000),
166 | random.randint(0, 1000)*1000000)
167 | try:
168 | _ = self.add_update_db(tid,
169 | timestamp,
170 | serialized_msg)
171 | except rospy.ServiceException as exc:
172 | print("Service did not process request: " + str(exc))
173 | self.assertTrue(False)
174 |
175 | process_list = []
176 | for i in range(NUM_PROCESSES):
177 | x = multiprocessing.Process(target=recorder_thread)
178 | process_list.append(x)
179 |
180 | for p in process_list:
181 | p.start()
182 |
183 | for p in process_list:
184 | p.join()
185 |
186 | # Get the list of hashes from the DB and count them
187 | try:
188 | robot_id = du.get_robot_id_from_name(robot_configs, "charon")
189 | answ = self.select_db(robot_id, None, None)
190 | except rospy.ServiceException as exc:
191 | print("Service did not process request: " + str(exc))
192 | self.assertTrue(False)
193 | returned_headers = du.deserialize_headers(answ.headers)
194 |
195 | self.assertEqual(len(returned_headers),
196 | NUM_PROCESSES*LOOPS_PER_PROCESS)
197 |
198 |
199 | if __name__ == '__main__':
200 | rospy.init_node('test_database_server', anonymous=False)
201 |
202 | # Get the directory path and import all the required modules to test
203 | rospack = rospkg.RosPack()
204 | ddb_path = rospack.get_path('mocha_core')
205 | scripts_path = os.path.join(ddb_path, "scripts/core")
206 | sys.path.append(scripts_path)
207 | import database_utils as du
208 | import database_server as ds
209 |
210 | # Get the default path from the ddb_path
211 | robot_configs_default = os.path.join(ddb_path,
212 | "config/testConfigs/robot_configs.yaml")
213 | # Get the path to the robot config file from the ros parameter robot_configs
214 | robot_configs = rospy.get_param("robot_configs",
215 | robot_configs_default)
216 |
217 | # Get the yaml dictionary objects
218 | with open(robot_configs, "r") as f:
219 | robot_configs = yaml.load(f, Loader=yaml.FullLoader)
220 |
221 | # Get the default path from the ddb_path
222 | topic_configs_default = os.path.join(ddb_path,
223 | "config/testConfigs/topic_configs.yaml")
224 | # Get the path to the robot config file from the ros parameter topic_configs
225 | topic_configs = rospy.get_param("topic_configs",
226 | topic_configs_default)
227 |
228 | # Get the yaml dictionary objects
229 | with open(topic_configs, "r") as f:
230 | topic_configs = yaml.load(f, Loader=yaml.FullLoader)
231 |
232 | msg_types = du.msg_types(robot_configs, topic_configs)
233 |
234 | # Set the ~robot_name param to charon
235 | robot_name = "charon"
236 | rospy.set_param("~robot_name", robot_name)
237 |
238 | # Run test cases
239 | unittest.main()
240 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/database.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import threading
3 | import hash_comm
4 | import rospy
5 | import pdb
6 | import database_utils as du
7 | import numpy as np
8 | import hash_comm
9 | import copy
10 |
11 |
12 | class DBMessage():
13 | """Database message object.
14 |
15 | This class is used to represent messages in the context of a database.
16 | The messages are typically used for inserting or extracting data from
17 | the database.
18 | """
19 | def __init__(self, robot_id, topic_id,
20 | dtype, priority, ts, data):
21 | """
22 | Initialize the DBMessage object.
23 |
24 | Args:
25 | robot_id (int): ID of the robot.
26 | topic_id (int): Topic id of the robot
27 | dtype (int): Data type identifier.
28 | priority (int): Priority level of the message.
29 | ts (rostime): Timestamp of the message
30 | data (bytes): Binary data payload.
31 | """
32 | assert isinstance(robot_id, int)
33 | assert isinstance(topic_id, int)
34 | assert isinstance(dtype, int)
35 | assert isinstance(priority, int)
36 | assert isinstance(ts, rospy.Time)
37 | assert isinstance(data, bytes)
38 | # The first three items are encoded in the header
39 | self.robot_id = robot_id
40 | self.topic_id = topic_id
41 | self.ts = ts
42 | # The next items are encoded in the data
43 | self.dtype = dtype
44 | self.priority = priority
45 | self.data = data
46 | # Calculate the header of the message
47 | header = hash_comm.TsHeader.from_data(self.robot_id,
48 | self.topic_id, self.ts)
49 | self.header = header.bindigest()
50 |
51 | def __eq__(self, other):
52 | if not isinstance(other, DBMessage):
53 | return False
54 | if other.robot_id != self.robot_id:
55 | return False
56 | if other.topic_id != self.topic_id:
57 | return False
58 | if other.dtype != self.dtype:
59 | return False
60 | if other.priority != self.priority:
61 | return False
62 | if other.ts.secs != self.ts.secs:
63 | return False
64 | if np.abs(other.ts.nsecs - self.ts.nsecs) > 1000000:
65 | print("nsecs diff: %d" % np.abs(other.ts.nsecs - self.ts.nsecs))
66 | return False
67 | if other.data != self.data:
68 | return False
69 | return True
70 |
71 | def __str__(self):
72 | return "%d, %d, %d, %d, %f" % (self.robot_id, self.topic_id,
73 | self.dtype, self.priority,
74 | self.ts)
75 |
76 |
77 | class DBwLock():
78 | """ Database with lock object
79 |
80 | The database is a dictionary.
81 | The lock should be manually acquired before executing any operation
82 | with the database to ensure data integrity.
83 |
84 | sample_db may be a valid dictionary that can be preloaded into
85 | the object (useful for debugging, see sample_db.py) """
86 | def __init__(self, sample_db=None):
87 | if sample_db is not None:
88 | assert isinstance(sample_db, dict)
89 | self.db = copy.deepcopy(sample_db)
90 | else:
91 | self.db = {}
92 | self.lock = threading.Lock()
93 |
94 | def add_modify_data(self, dbm):
95 | """ Insert new stuff into the db. Use the header as message index """
96 | # Do a quick check
97 | assert isinstance(dbm, DBMessage)
98 |
99 | # Acquire lock and commit into db
100 | self.lock.acquire()
101 | # Create and store things in db
102 | if dbm.robot_id not in self.db:
103 | self.db[dbm.robot_id] = {}
104 | if dbm.topic_id not in self.db[dbm.robot_id]:
105 | self.db[dbm.robot_id][dbm.topic_id] = {}
106 | self.db[dbm.robot_id][dbm.topic_id][dbm.header] = dbm
107 | self.lock.release()
108 | return dbm.header
109 |
110 | def get_header_list(self, filter_robot_id=None, filter_latest=None):
111 | """ Returns a list with all the headers of the db. The results are
112 | filtered for a specific robot, or *after* a specific timestamp if
113 | these fields are available. To filter by timestamp,
114 | you should specify a robot. Timestamps are recorded in the robot
115 | frame (i.e. each robot has different timestamps, so it does not make
116 | sense to filter by a global timestamp) """
117 | if filter_robot_id is not None:
118 | assert isinstance(filter_robot_id, int)
119 | if filter_latest is not None:
120 | assert isinstance(filter_latest, bool)
121 |
122 | # Header list is a dict with the headers as keys and the priorities as
123 | # values
124 | header_list = {}
125 |
126 | # To avoid inconsistencies, the db is locked while searching
127 | self.lock.acquire()
128 | for robot_id in self.db:
129 | if filter_robot_id is not None and robot_id != filter_robot_id:
130 | continue
131 | for topic in self.db[robot_id]:
132 | if filter_latest:
133 | latest_msg_ts = rospy.Time(1, 0)
134 | latest_msg = None
135 | for header in self.db[robot_id][topic]:
136 | msg_content = self.db[robot_id][topic][header]
137 | if filter_latest and msg_content.ts > latest_msg_ts:
138 | latest_msg_ts = msg_content.ts
139 | latest_msg = msg_content
140 | if not filter_latest:
141 | header_list[header] = {'prio': msg_content.priority,
142 | 'ts': msg_content.ts}
143 | if filter_latest:
144 | header_list[latest_msg.header] = {'prio': latest_msg.priority,
145 | 'ts': latest_msg.ts}
146 | self.lock.release()
147 |
148 | # Sort the dictionary by value, and get the keys
149 | sorted_tuples = sorted(header_list.items(),
150 | key=lambda kv: (kv[1]['prio'], kv[1]['ts'], kv[0]),
151 | reverse=True)
152 | sorted_header_list = [i[0] for i in sorted_tuples]
153 | return sorted_header_list
154 |
155 | def get_ts_dict(self):
156 | """ Returns a dictionary with the newest timestamps of all the topics """
157 | ts_dict = {}
158 | self.lock.acquire()
159 | for robot_id in self.db:
160 | if robot_id not in ts_dict:
161 | ts_dict[robot_id] = {}
162 | for topic in self.db[robot_id]:
163 | if topic not in ts_dict[robot_id]:
164 | ts_dict[robot_id][topic] = -np.inf
165 | for header in self.db[robot_id][topic]:
166 | msg = self.db[robot_id][topic][header]
167 | ts_dict[robot_id][topic] = max(ts_dict[robot_id][topic],
168 | msg.ts.to_sec())
169 | self.lock.release()
170 | return ts_dict
171 |
172 | def headers_not_in_local(self, remote_header_list, newer=False):
173 | # Compares a remote header list with the local database.
174 | # If newer is True, only headers newer than the local ones are
175 | # returned. Otherwise, all the headers are returned.
176 | assert isinstance(remote_header_list, list)
177 | assert isinstance(newer, bool)
178 |
179 | missing_headers = []
180 | if not newer:
181 | local_headers = self.get_header_list()
182 | for h in remote_header_list:
183 | if h not in local_headers:
184 | missing_headers.append(h)
185 | return missing_headers
186 | else:
187 | ts_dict = self.get_ts_dict()
188 | for h in remote_header_list:
189 | h = hash_comm.TsHeader.from_header(h)
190 | r_id, t_id, time = h.get_id_and_time()
191 | if r_id not in ts_dict:
192 | missing_headers.append(h.bindigest())
193 | elif t_id not in ts_dict[h.robot_id]:
194 | missing_headers.append(h.bindigest())
195 | elif time.to_sec() > ts_dict[h.robot_id][h.topic_id]:
196 | missing_headers.append(h.bindigest())
197 | return missing_headers
198 |
199 | def find_header(self, requested_header):
200 | assert isinstance(requested_header, bytes)
201 | # Find data in db
202 | data_found = False
203 | self.lock.acquire()
204 | for robot in self.db:
205 | for topic_id in self.db[robot]:
206 | if data_found:
207 | break
208 | for header in self.db[robot][topic_id]:
209 | if data_found:
210 | break
211 | if header == requested_header:
212 | data_found = True
213 | req_robot_id = robot
214 | req_topic_id = topic_id
215 | break
216 | if not data_found:
217 | self.lock.release()
218 | raise Exception('packData: header not found')
219 | req_dtype = self.db[req_robot_id][req_topic_id][header].dtype
220 | req_priority = self.db[req_robot_id][req_topic_id][header].priority
221 | req_ts = self.db[req_robot_id][req_topic_id][header].ts
222 | req_data = self.db[req_robot_id][req_topic_id][header].data
223 | self.lock.release()
224 | dbm = DBMessage(req_robot_id, req_topic_id, req_dtype,
225 | req_priority, req_ts, req_data)
226 | return dbm
227 |
228 | def __str__(self):
229 | # Print the database in a nice way
230 | self.lock.acquire()
231 | out = ""
232 | for robot in self.db:
233 | out += f"Robot ID: {robot}\n"
234 | for topic_id in self.db[robot]:
235 | out += f" Topic ID: {topic_id}\n"
236 | for header in self.db[robot][topic_id]:
237 | msg = self.db[robot][topic_id][header]
238 | out += f" TS: {str(msg.ts)}\n"
239 | out += f" Dtype: {msg.dtype}\n"
240 | out += f" Prio: {msg.priority}\n"
241 | out += f" Data: {msg.data}\n"
242 | out += f" Header: {header}\n"
243 | self.lock.release()
244 | return out
245 |
--------------------------------------------------------------------------------
/mocha_core/scripts/core/synchronize_channel.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import enum
4 | import threading
5 | import smach
6 | import database as db
7 | import database_utils as du
8 | import hash_comm as hc
9 | import zmq_comm_node
10 | import logging
11 | import rospy
12 | import pdb
13 | from std_msgs.msg import Time, String
14 | from mocha_core.msg import SM_state
15 |
16 | # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
17 | # General configuration variables
18 |
19 | # Get the header length from hash_comm
20 | HEADER_LENGTH = hc.TsHeader.HEADER_LENGTH
21 |
22 | # When actively polling for an answer or for a changement in variable,
23 | # use this time
24 | CHECK_POLL_TIME = 0.2
25 | CHECK_TRIGGER_TIME = 0.05
26 |
27 | # Msg codes that are used during the operation of the communication
28 | # channel. Important: all codes should be CODE_LENGTH characters
29 | CODE_LENGTH = 5
30 |
31 |
32 | class Comm_msgs(enum.Enum):
33 | GHEAD = 1
34 | GDATA = 2
35 | DENDT = 3
36 | SERRM = 4
37 |
38 | # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
39 | """ SMACH states for the synchronization state machine"""
40 |
41 |
42 | class Idle(smach.State):
43 | def __init__(self, outer):
44 | self.outer = outer
45 | smach.State.__init__(self, outcomes=['to_req_hash',
46 | 'to_stopped'])
47 |
48 | def execute(self, userdata):
49 | self.outer.publishState("Idle Start")
50 | while (not self.outer.sm_shutdown.is_set() and
51 | not rospy.is_shutdown()):
52 | if self.outer.sync.get_state():
53 | # trigger sync and reset bistable
54 | self.outer.publishState("Idle to RequestHash")
55 | return 'to_req_hash'
56 | rospy.sleep(CHECK_TRIGGER_TIME)
57 | self.outer.publishState("Idle to Stopped")
58 | return 'to_stopped'
59 |
60 |
61 | class RequestHash(smach.State):
62 | def __init__(self, outer):
63 | self.outer = outer
64 | smach.State.__init__(self,
65 | output_keys=['out_answer'],
66 | outcomes=['to_idle',
67 | 'to_req_hash_reply',
68 | 'to_stopped'])
69 |
70 | def execute(self, userdata):
71 | self.outer.publishState("RequestHash Start")
72 | # Request current comm node
73 | comm = self.outer.get_comm_node()
74 | # Ask server for hash
75 | msg = Comm_msgs.GHEAD.name.encode()
76 | comm.connect_send_message(msg)
77 | # Wait for an answer in a polling fashion
78 | i = 0
79 | # Important: the <= is not a typo. We want one iteration more of the
80 | # loop to wait for the timeout
81 | while (i <= int(self.outer.client_timeout/CHECK_POLL_TIME)
82 | and not self.outer.sm_shutdown.is_set()):
83 | answer = self.outer.client_answer
84 | if answer is not None:
85 | rospy.logdebug(f"{comm.this_node} - Channel" +
86 | f"- REQUESTHASH: {answer}")
87 | userdata.out_answer = answer
88 | self.outer.publishState("RequestHash to Reply")
89 | return 'to_req_hash_reply'
90 | rospy.sleep(CHECK_POLL_TIME)
91 | i += 1
92 | if self.outer.sm_shutdown.is_set():
93 | self.outer.publishState("RequestHash to Stopped")
94 | return 'to_stopped'
95 | self.outer.sync.reset()
96 | self.outer.publishState("RequestHash to Idle")
97 | return 'to_idle'
98 |
99 |
100 | class RequestHashReply(smach.State):
101 | def __init__(self, outer):
102 | self.outer = outer
103 | smach.State.__init__(self, outcomes=['to_transmission_end',
104 | 'to_get_data'],
105 | input_keys=['in_answer'],
106 | output_keys=['out_hash_list'])
107 |
108 | def execute(self, userdata):
109 | self.outer.publishState("GetHashReply Start")
110 | deserialized = du.deserialize_headers(userdata.in_answer)
111 | # print("REQUESTHASH: All ->", deserialized)
112 | # FIXME(fernando): Configure this depending on the message type
113 | # depending on the message type
114 | hash_list = self.outer.dbl.headers_not_in_local(deserialized,
115 | newer=True)
116 | rospy.logdebug(f"======== - REQUESTHASH: {hash_list}")
117 | if len(hash_list):
118 | # We have hashes. Go get them
119 | # rospy.logdebug(f"{self.this_robot} - REQUESTHASH: Unique -> {hash_list}")
120 | userdata.out_hash_list = hash_list
121 | self.outer.publishState("GetHashReply to GetData")
122 | return 'to_get_data'
123 | # We have no hashes. Sync is complete
124 | self.outer.publishState("GetHashReply to TransmissionEnd")
125 | return 'to_transmission_end'
126 |
127 |
128 | class GetData(smach.State):
129 | def __init__(self, outer):
130 | self.outer = outer
131 | smach.State.__init__(self, outcomes=['to_idle',
132 | 'to_get_data_reply',
133 | 'to_stopped'],
134 | input_keys=['in_hash_list'],
135 | output_keys=['out_hash_list',
136 | 'out_req_hash',
137 | 'out_answer'])
138 |
139 | def execute(self, userdata):
140 | self.outer.publishState("GetData Start")
141 | # Request current comm node
142 | comm = self.outer.get_comm_node()
143 | hash_list = userdata.in_hash_list.copy()
144 | # Get the first hash of the list, the one with the higher priority
145 | req_hash = hash_list.pop(0)
146 | rospy.logdebug(f"{comm.this_node} - Channel - GETDATA: {req_hash}")
147 | # Ask for hash
148 | msg = Comm_msgs.GDATA.name.encode() + req_hash
149 | comm.connect_send_message(msg)
150 | # Wait for an answer in a polling fashion
151 | i = 0
152 | # Important: the <= is not a typo. We want one iteration more of the
153 | # loop to wait for the timeout
154 | while (i <= int(self.outer.client_timeout/CHECK_POLL_TIME)
155 | and not self.outer.sm_shutdown.is_set()):
156 | answer = self.outer.client_answer
157 | if answer is not None:
158 | userdata.out_hash_list = userdata.in_hash_list
159 | userdata.out_answer = answer
160 | userdata.out_req_hash = req_hash
161 | self.outer.publishState("GetData to GetDataReply")
162 | return 'to_get_data_reply'
163 | rospy.sleep(CHECK_POLL_TIME)
164 | i += 1
165 | if self.outer.sm_shutdown.is_set():
166 | self.outer.publishState("GetData to Stopped")
167 | return 'to_stopped'
168 | self.outer.sync.reset()
169 | self.outer.publishState("GetData to Idle")
170 | return 'to_idle'
171 |
172 |
173 | class GetDataReply(smach.State):
174 | def __init__(self, outer):
175 | self.outer = outer
176 | smach.State.__init__(self, outcomes=['to_transmission_end',
177 | 'to_get_more_data'],
178 | input_keys=['in_hash_list',
179 | 'in_answer',
180 | 'in_req_hash'],
181 | output_keys=['out_hash_list'])
182 |
183 | def execute(self, userdata):
184 | self.outer.publishState("GetDataReply Start")
185 | # store result in db
186 | dbm = du.unpack_data(userdata.in_req_hash, userdata.in_answer)
187 | hash_list = userdata.in_hash_list.copy()
188 | self.outer.dbl.add_modify_data(dbm)
189 | hash_list.remove(userdata.in_req_hash)
190 | # rospy.logdebug(f"HASH_LIST {hash_list} REQ_HASH {userdata.in_req_hash}")
191 | # Transition back
192 | if hash_list:
193 | userdata.out_hash_list = hash_list
194 | self.outer.publishState("GetDataReply to GetMoreData")
195 | return 'to_get_more_data'
196 | else:
197 | self.outer.publishState("GetDataReply to TransmissionEnd")
198 | return 'to_transmission_end'
199 |
200 |
201 | class TransmissionEnd(smach.State):
202 | def __init__(self, outer):
203 | self.outer = outer
204 | smach.State.__init__(self, outcomes=['to_idle',
205 | 'to_stopped'])
206 |
207 | def execute(self, userdata):
208 | self.outer.publishState("TransmissionEnd Start")
209 | # Request current comm node
210 | comm = self.outer.get_comm_node()
211 | rospy.logdebug(f"{comm.this_node} - Channel - DENDT")
212 | # Ask for hash
213 | msg = Comm_msgs.DENDT.name.encode() + self.outer.this_robot.encode()
214 | comm.connect_send_message(msg)
215 | # Wait for an answer in a polling fashion
216 | i = 0
217 | # Important: the <= is not a typo. We want one iteration more of the
218 | # loop to wait for the timeout
219 | while (i <= int(self.outer.client_timeout/CHECK_POLL_TIME)
220 | and not self.outer.sm_shutdown.is_set()):
221 | answer = self.outer.client_answer
222 | if answer is not None:
223 | # We received an ACK
224 | self.outer.client_sync_complete_pub.publish(Time(rospy.get_rostime()))
225 | break
226 | rospy.sleep(CHECK_POLL_TIME)
227 | i += 1
228 | if self.outer.sm_shutdown.is_set():
229 | self.outer.publishState("TransmissionEnd to Stopped")
230 | return 'to_stopped'
231 | self.outer.sync.reset()
232 | self.outer.publishState("TransmissionEnd to Idle")
233 | return 'to_idle'
234 |
235 |
236 | # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
237 | """ Bistable class. Allows the communication between an external method
238 | in Channel and its state machine. The bistable will be set by a
239 | method in Channel to trigger the synchronization, and it will be reset
240 | inside the state machine """
241 |
242 |
243 | class Bistable():
244 | def __init__(self):
245 | self.state = False
246 | self.lock = threading.Lock()
247 |
248 | def set(self):
249 | self.lock.acquire()
250 | self.state = True
251 | self.lock.release()
252 |
253 | def reset(self):
254 | self.lock.acquire()
255 | self.state = False
256 | self.lock.release()
257 |
258 | def get_state(self):
259 | return self.state
260 |
261 |
262 | # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
263 | # Channel class
264 |
265 | class Channel():
266 | def __init__(self, dbl, this_robot,
267 | target_robot, robot_configs,
268 | client_timeout):
269 | # Check input arguments
270 | assert type(dbl) is db.DBwLock
271 | assert type(this_robot) is str
272 | assert type(target_robot) is str
273 | assert type(robot_configs) is dict
274 | assert type(client_timeout) is float or type(client_timeout) is int
275 |
276 | # Override smach logger to use rospy loggers
277 | # Use rospy.logdebug for smach info as it is too verbose
278 | # def set_loggers(info,warn,debug,error):
279 | smach.set_loggers(rospy.logdebug, rospy.logwarn,
280 | rospy.logdebug, rospy.logerr)
281 |
282 | # Basic parameters of the communication channel
283 | self.this_robot = this_robot
284 | self.target_robot = target_robot
285 | self.dbl = dbl
286 | # Config file used to fetch configurations
287 | self.robot_configs = robot_configs
288 |
289 | # Client timeout defines the time before an answer is considered lost
290 | self.client_timeout = client_timeout
291 |
292 | # Bistable used to start the synchronization. It will be enabled
293 | # by self.start_sync(), and it will be disabled inside the state
294 | # machine once the synchronization starts
295 | self.sync = Bistable()
296 |
297 | # Change to false before running the state machine. Otherwise,
298 | # when the SM reaches the idle state it will stop
299 | self.sm_shutdown = threading.Event()
300 | self.sm_shutdown.set()
301 | # The answer of the client will be written here
302 | self.client_answer = None
303 | # The pointer of the comm node will be stored here
304 | self.comm_node = None
305 | # Create state machine and add states
306 | self.sm = smach.StateMachine(outcomes=['failure', 'stopped'])
307 |
308 | # Create topic to notify that the transmission ended
309 | self.client_sync_complete_pub = rospy.Publisher(f"ddb/client_sync_complete/{self.target_robot}",
310 | Time,
311 | queue_size=20)
312 | self.server_sync_complete_pub = rospy.Publisher(f"ddb/server_sync_complete/{self.target_robot}",
313 | Time,
314 | queue_size=20)
315 |
316 | # Create a topic that prints the current state of the state machine
317 | self.sm_state_pub = rospy.Publisher(f"ddb/client_sm_state/{self.target_robot}",
318 | SM_state,
319 | queue_size=20)
320 | self.sm_state_count = 0
321 |
322 | with self.sm:
323 | smach.StateMachine.add('IDLE',
324 | Idle(self),
325 | transitions={'to_req_hash': 'REQ_HASH',
326 | 'to_stopped': 'stopped'})
327 | smach.StateMachine.add('REQ_HASH',
328 | RequestHash(self),
329 | transitions={'to_idle': 'IDLE',
330 | 'to_req_hash_reply': 'REQ_HASH_REPLY',
331 | 'to_stopped': 'stopped'},
332 | remapping={'out_answer': 'sm_answer'})
333 |
334 | smach.StateMachine.add('REQ_HASH_REPLY',
335 | RequestHashReply(self),
336 | transitions={'to_transmission_end': 'TRANSMISSION_END',
337 | 'to_get_data': 'GET_DATA'},
338 | remapping={'in_answer': 'sm_answer',
339 | 'out_hash_list': 'sm_hash_list'})
340 |
341 | smach.StateMachine.add('GET_DATA',
342 | GetData(self),
343 | transitions={'to_idle': 'IDLE',
344 | 'to_get_data_reply': 'GET_DATA_REPLY',
345 | 'to_stopped': 'stopped'},
346 | remapping={'in_hash_list': 'sm_hash_list',
347 | 'out_hash_list': 'sm_hash_list_2',
348 | 'out_req_hash': 'sm_req_hash',
349 | 'out_answer': 'sm_answer_2' })
350 |
351 | smach.StateMachine.add('GET_DATA_REPLY',
352 | GetDataReply(self),
353 | transitions={'to_transmission_end': 'TRANSMISSION_END',
354 | 'to_get_more_data': 'GET_DATA'},
355 | remapping={'in_hash_list': 'sm_hash_list_2',
356 | 'in_req_hash': 'sm_req_hash',
357 | 'in_answer': 'sm_answer_2',
358 | 'out_hash_list': 'sm_hash_list'})
359 |
360 | smach.StateMachine.add('TRANSMISSION_END',
361 | TransmissionEnd(self),
362 | transitions={'to_idle': 'IDLE',
363 | 'to_stopped': 'stopped'})
364 |
365 | def publishState(self, msg):
366 | """ Publish the string msg (where the state message will be stored)
367 | with a timestamp"""
368 | assert type(msg) is str
369 | state_msg = SM_state()
370 | state_msg.header.stamp = rospy.Time.now()
371 | state_msg.header.frame_id = self.this_robot
372 | state_msg.header.seq = self.sm_state_count
373 | self.sm_state_count += 1
374 | state_msg.state = msg
375 | self.sm_state_pub.publish(state_msg)
376 |
377 | def run(self):
378 | """ Configures the zmq_comm_node and also starts the state
379 | machine thread"""
380 | # The comm node needs to be created first, as it may be required
381 | # by the SM
382 | self.comm_node = zmq_comm_node.Comm_node(self.this_robot,
383 | self.target_robot,
384 | self.robot_configs,
385 | self.callback_client,
386 | self.callback_server,
387 | self.client_timeout)
388 | # Unset this flag before starting the SM thread
389 | self.sm_shutdown.clear()
390 | self.th = threading.Thread(target=self.sm_thread, args=())
391 | self.th.setDaemon(True)
392 | self.th.start()
393 |
394 | def stop(self):
395 | # Set the flag and wait until the state machine finishes
396 | self.sm_shutdown.set()
397 | self.th.join()
398 |
399 |
400 | def sm_thread(self):
401 | # Start the state machine and wait until it ends
402 | rospy.logwarn(f"Channel {self.this_robot} -> {self.target_robot} started")
403 | outcome = self.sm.execute()
404 | exit_msg = f"Channel {self.this_robot} -> {self.target_robot}" + \
405 | f" finished with outcome: {outcome}"
406 | if outcome == 'failure':
407 | rospy.logerr(exit_msg)
408 | elif outcome == 'stopped':
409 | rospy.logwarn(exit_msg)
410 | # Terminate the comm node once the state machine ends
411 | self.comm_node.terminate()
412 |
413 | def get_comm_node(self):
414 | if not self.comm_node:
415 | rospy.logerr("Requesting for an empty comm node")
416 | rospy.signal_shutdown("Requesting for an empty comm node")
417 | rospy.spin()
418 | return self.comm_node
419 |
420 | def trigger_sync(self):
421 | if self.sync.get_state():
422 | rospy.logwarn(f"{self.this_robot} <- {self.target_robot}: Channel Busy")
423 | else:
424 | self.sync.set()
425 |
426 | def callback_client(self, msg):
427 | if msg is not None:
428 | rospy.logdebug(f"{self.this_robot} - Channel - CALLBACK_CLIENT: len: {len(msg)}")
429 | else:
430 | rospy.logdebug(f"{self.this_robot} - Channel - CALLBACK_CLIENT - None")
431 | self.client_answer = msg
432 |
433 | def callback_server(self, msg):
434 | rospy.logdebug(f"{self.this_robot} - Channel - CALLBACK_SERVER: {msg}")
435 | header = msg[:CODE_LENGTH].decode()
436 | data = msg[CODE_LENGTH:]
437 |
438 | if header == Comm_msgs.GHEAD.name:
439 | # Returns all the headers that this node has
440 | # FIXME(Fernando): Configure this depending on the message type
441 | headers = self.dbl.get_header_list(filter_latest=True)
442 | rospy.logdebug(f"{self.this_robot} - Channel - Sending {len(headers)} headers")
443 | rospy.logdebug(f"{self.this_robot} - Channel - {headers}")
444 | serialized = du.serialize_headers(headers)
445 | return serialized
446 | if header == Comm_msgs.GDATA.name:
447 | r_header = data
448 | # Returns a packed data for the requires header
449 | # One header at a time
450 | if len(data) != HEADER_LENGTH:
451 | rospy.logerr(f"{self.this_robot} - Wrong header length: {len(data)}")
452 | return Comm_msgs.SERRM.name.encode()
453 | try:
454 | dbm = self.dbl.find_header(r_header)
455 | packed = du.pack_data(dbm)
456 | return packed
457 | except Exception:
458 | rospy.logerr(f"{self.this_robot} - Header not found: {r_header}")
459 | return Comm_msgs.SERRM.name.encode()
460 | if header == Comm_msgs.DENDT.name:
461 | target = data
462 | if target.decode() != self.target_robot:
463 | print(f"{self.this_robot} - Channel - Wrong DENDT -" +
464 | f" Target: {target.decode()} - " +
465 | f"My target: {self.target_robot}")
466 | self.server_sync_complete_pub.publish(Time(rospy.get_rostime()))
467 | return "Ack".encode()
468 | return Comm_msgs.SERRM.name.encode()
469 |
--------------------------------------------------------------------------------