├── .gitattributes ├── .gitignore ├── Docs ├── Architecture.md ├── General UML.jpg ├── TextMaps.png ├── afterinj.png ├── beforeinj.png ├── motivtraj.png └── vision_opencv Issues.md ├── LICENSE ├── README.md ├── ncore ├── CMakeLists.txt ├── data │ ├── 8 │ │ ├── VOSet.txt │ │ ├── YouBotMap.pgm │ │ ├── YouBotMap.yaml │ │ └── optitrack.yaml │ ├── 8_high_res │ │ ├── YouBotMap.pgm │ │ ├── YouBotMap.yaml │ │ └── optitrack.yaml │ ├── config │ │ ├── cam0.config │ │ ├── front_laser.config │ │ ├── front_laser.yaml │ │ ├── rear_laser.config │ │ ├── rear_laser.yaml │ │ └── textspotting.config │ ├── floor │ │ ├── 2021_10_27 │ │ │ ├── YouBotMap.pgm │ │ │ ├── YouBotMap.yaml │ │ │ ├── YouBotMapRoomSeg.png │ │ │ ├── editor.xml │ │ │ ├── floor.config │ │ │ ├── nmcl.config │ │ │ └── textspotting.json │ │ ├── Faro │ │ │ ├── FMap.pgm │ │ │ ├── FMap.png │ │ │ ├── FMap.yaml │ │ │ ├── FMapRoomSeg.png │ │ │ ├── TextMaps │ │ │ │ ├── Room 1.png │ │ │ │ ├── Room 10.png │ │ │ │ ├── Room 2.png │ │ │ │ ├── Room 3.png │ │ │ │ ├── Room 4.png │ │ │ │ ├── Room 5.png │ │ │ │ ├── Room 6.png │ │ │ │ ├── Room 7.png │ │ │ │ ├── Room 8.png │ │ │ │ └── Room 9.png │ │ │ ├── editor.xml │ │ │ ├── floor.config │ │ │ ├── nmcl.config │ │ │ └── textspotting.json │ │ ├── GMap │ │ │ ├── GMap.png │ │ │ ├── GMap.yaml │ │ │ ├── GMapRoomSeg.png │ │ │ ├── TextMaps │ │ │ │ ├── Room 1.png │ │ │ │ ├── Room 10.png │ │ │ │ ├── Room 2.png │ │ │ │ ├── Room 3.png │ │ │ │ ├── Room 4.png │ │ │ │ ├── Room 5.png │ │ │ │ ├── Room 6.png │ │ │ │ ├── Room 7.png │ │ │ │ ├── Room 8.png │ │ │ │ └── Room 9.png │ │ │ ├── editor.xml │ │ │ ├── floor.config │ │ │ └── nmcl.config │ │ └── test │ │ │ ├── YouBotMap.pgm │ │ │ ├── YouBotMap.yaml │ │ │ ├── YouBotMapRoomSeg.png │ │ │ ├── editor.xml │ │ │ ├── floor.config │ │ │ ├── floor_seeds.xml │ │ │ └── nmcl.config │ ├── json │ │ └── nmcl.config │ └── text │ │ ├── lebron.jpg │ │ └── test.jpg ├── ndl │ ├── CMakeLists.txt │ ├── include │ │ ├── ImageUtils.h │ │ └── TextSpotting.h │ ├── src │ │ ├── CMakeLists.txt │ │ ├── ImageUtils.cpp │ │ ├── TextRecoFromFolder.cpp │ │ ├── TextRecoFromFolder2.cpp │ │ └── TextSpotting.cpp │ └── tst │ │ ├── CMakeLists.txt │ │ └── NDLUnitTests.cpp ├── nengine │ ├── CMakeLists.txt │ ├── include │ │ ├── DataFrameLoader.h │ │ └── NMCLEngine.h │ ├── src │ │ ├── CMakeLists.txt │ │ ├── DataFrameLoader.cpp │ │ ├── NMCLEngine.cpp │ │ └── df_loader.py │ └── tst │ │ ├── CMakeLists.txt │ │ ├── EngineBenchmark.cpp │ │ └── main.cpp ├── nmap │ ├── CMakeLists.txt │ ├── data │ │ ├── YouBotMap.pgm │ │ ├── YouBotMap.yaml │ │ └── YouBotMapRoomSeg.png │ ├── include │ │ ├── FloorMap.h │ │ ├── GMap.h │ │ ├── IMap2D.h │ │ ├── Lift.h │ │ ├── Object.h │ │ └── Room.h │ ├── src │ │ ├── CMakeLists.txt │ │ ├── FloorMap.cpp │ │ ├── GMap.cpp │ │ ├── Lift.cpp │ │ ├── Object.cpp │ │ ├── Room.cpp │ │ └── RoomSegmentation.cpp │ └── tst │ │ ├── CMakeLists.txt │ │ └── NMapUnitTests.cpp ├── nmcl │ ├── CMakeLists.txt │ ├── include │ │ ├── AggressiveLowVarianceResampling.h │ │ ├── BeamEnd.h │ │ ├── FSR.h │ │ ├── GaussianPredictStrategy.h │ │ ├── LidarData.h │ │ ├── LowVarianceResampling.h │ │ ├── MixedFSR.h │ │ ├── MotionModel.h │ │ ├── NMCL.h │ │ ├── NMCLFactory.h │ │ ├── Particle.h │ │ ├── PlaceRecognition.h │ │ ├── PredictStrategy.h │ │ ├── ReNMCL.h │ │ ├── Resampling.h │ │ ├── SensorData.h │ │ ├── SensorModel.h │ │ ├── SetStatistics.h │ │ └── UniformPredictStrategy.h │ ├── src │ │ ├── AggressiveLowVarianceResampling.cpp │ │ ├── BeamEnd.cpp │ │ ├── CMakeLists.txt │ │ ├── FSR.cpp │ │ ├── GaussianPredictStrategy.cpp │ │ ├── LowVarianceResampling.cpp │ │ ├── MixedFSR.cpp │ │ ├── NMCL.cpp │ │ ├── NMCLFactory.cpp │ │ ├── Particle.cpp │ │ ├── PlaceRecognition.cpp │ │ ├── ReNMCL.cpp │ │ ├── SetStatistics.cpp │ │ └── UniformPredictStrategy.cpp │ └── tst │ │ ├── CMakeLists.txt │ │ ├── NMCLUnitTests.cpp │ │ └── verifyInit.py └── nsensors │ ├── CMakeLists.txt │ ├── include │ ├── Camera.h │ ├── Lidar2D.h │ ├── OptiTrack.h │ └── Utils.h │ ├── src │ ├── CMakeLists.txt │ ├── Camera.cpp │ ├── Lidar2D.cpp │ ├── OptiTrack.cpp │ └── Utils.cpp │ └── tst │ ├── CMakeLists.txt │ └── NSensorsUnitTests.cpp └── ros1_ws └── src ├── CMakeLists.txt ├── data_processing ├── CMakeLists.txt ├── launch │ ├── allsync.launch │ ├── amcleval.launch │ ├── gtviz.launch │ ├── syncgtrs.launch │ └── synclidar.launch ├── package.xml ├── rviz │ ├── amcl.rviz │ └── gtviz.rviz ├── src │ ├── AMCLEval.py │ ├── AddPredictions.py │ ├── CameraDumpNode.py │ ├── ExtractTextPoses (copy).py │ ├── ExtractTextPoses.py │ ├── GMAP.py │ ├── GTpickle.py │ ├── OdomDumpNode.py │ ├── PickleBroadcastNode.py │ ├── PlotBBox.py │ ├── SyncGPandRSNode.py │ ├── SyncLidarNode.py │ ├── VizBBoxNode.py │ ├── checkPickle.py │ ├── computeMetrics.py │ ├── computeMetricsStatic.py │ ├── computeMetricsdynamic.py │ ├── evalutils.py │ ├── mergepickles.py │ ├── plotResults.py │ ├── ploterrorvsparticlenum.py │ └── ploterrorvstime.py └── srv │ └── NoArguments.srv ├── evaluation ├── CMakeLists.txt ├── package.xml └── src │ ├── GMAP.py │ ├── converge.py │ ├── convplot.py │ ├── evaluation.py │ ├── evalutils.py │ ├── ploterrorAVG.py │ ├── ploterrorpartnumAVG.py │ ├── ploterrorvsparticlenumConv.py │ ├── plotgttrajectoryandprediction.py │ └── plottextmaps.py ├── nmcl_msgs ├── CMakeLists.txt ├── msg │ ├── LidarScanMask.msg │ ├── MergedLaserScan.msg │ └── TextArray.msg └── package.xml └── nmcl_ros ├── CMakeLists.txt ├── config ├── 480x640 │ ├── cam0.config │ ├── cam1.config │ ├── cam2.config │ └── cam3.config ├── cam0.config ├── cam1.config ├── cam2.config ├── cam3.config ├── front_laser.config ├── front_laser.yaml ├── rear_laser.config └── rear_laser.yaml ├── include └── RosUtils.h ├── launch ├── camerasync.launch ├── confignmcl.launch └── textrec.launch ├── package.xml ├── rviz ├── calib.rviz ├── confignmcl.rviz ├── humanmask.rviz ├── nmcl.rviz ├── panopticnmcl.rviz └── vonmcl.rviz └── src ├── CMakeLists.txt ├── ConfigNMCLNode.cpp ├── RosUtils.cpp ├── ScanMergeNode.cpp └── TextRecoNode.cpp /.gitattributes: -------------------------------------------------------------------------------- 1 | data/*.yaml filter=lfs diff=lfs merge=lfs -text 2 | data/*.bin filter=lfs diff=lfs merge=lfs -text 3 | data/*.pgm filter=lfs diff=lfs merge=lfs -text 4 | *.bin filter=lfs diff=lfs merge=lfs -text 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.cmake 2 | CMakeCache.txt 3 | ros1_ws/src/legdetector_pyros/* 4 | ros1_ws/src/odom_eval/* 5 | nmcl/src/CMakeFiles/ 6 | nmcl/tst/CMakeFiles/ 7 | nmcl/CMakeFiles/ 8 | ros1_ws/build/ 9 | ncore/build/ 10 | *__pycache__/ 11 | *Makefile 12 | -------------------------------------------------------------------------------- /Docs/Architecture.md: -------------------------------------------------------------------------------- 1 | # Software Architecture 2 | 3 | There are a few main concepts that drove the development of the system. 4 | 5 | ### Core-ROS Separation 6 | The core C++ library that runs the NMCL algorithm does not include ROS code. That was done because it's good practice, and because I knew we would develop for ROS1 initially and then transition to ROS2 when the code is more mature. 7 | 8 | ### Modularity 9 | The [NMCL](nmcl/include/NMCL.h) class can be seen as an engine driving the MCL-type algorithm. The [motion model](nmcl/include/MotionModel.h) and [sensor model](nmcl/include/SensorModel.h) are abstracted through an interface, allowing the developer to change these models easily, without adjusting the localization engine. This enables quick evaluation of different models, isolating their impact. 10 | 11 | ### Data Source Abstraction 12 | A certain interface was defined for localization engine, and preprocessing is done depending on the source of the data. On the highest level, the functions and the order in which they are called remain the same whether the data is read from a binary/text files or received through ROS topics. On a lower level, the localization engine only knows it is receiving a scan in a form of heading angles and ranges. The developer may painlessly adjust the algorithm to receive scans from one or more range-finding devices.
13 | Example for a standard localization process: 14 | * Initialize the NMCL class with the motion and sensor models 15 | * Predict method with an input that is appropriate for your motion model 16 | * Correct method with an input that is appropriate for your sensor model. This step also includes that sampling strategy - I might change the sampling to be a class you can pass to the engine 17 | * Stats method will return the prediction with covariance 18 | 19 | ## Design Intro 20 | In our ROS wrappers, we imlemented two motion models, for odometry sources: 21 | * [FSR](nmcl/include/FSR.h) - one odometry source 22 | * [MixedFSR](nmcl/include/MixedFSR.h) - two odometry sources that we consider in equal probability 23 | They both target omnidirectional platforms.
24 | 25 | Our sensor model, [BeamEnd](nmcl/include/BeamEnd.h), requires that the range scans are converted to 3D points, in the coordinate frame of the base link. In our ROS wrapper, we combine scans from the front and rear lidars to a "virtual scan", centring both scans around the base link which is set to be the origin (0, 0, 0). This preprocessing step can be adjusted to suit the needs, e.g. processing only one lidar scan (or more than 2). The sensor (and motion) model can also be replaced in its entirety, provided that it complies with the interface. 26 | 27 | ## UML 28 | 29 | ![alt-text](General UML.jpg "Title Text") 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /Docs/General UML.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/Docs/General UML.jpg -------------------------------------------------------------------------------- /Docs/TextMaps.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/Docs/TextMaps.png -------------------------------------------------------------------------------- /Docs/afterinj.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/Docs/afterinj.png -------------------------------------------------------------------------------- /Docs/beforeinj.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/Docs/beforeinj.png -------------------------------------------------------------------------------- /Docs/motivtraj.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/Docs/motivtraj.png -------------------------------------------------------------------------------- /Docs/vision_opencv Issues.md: -------------------------------------------------------------------------------- 1 | The reason things don't compile might be that vision_opencv still compiles with OpenCV 4.2. 2 | To force compilation with OpenCV 4.5, you can edit two CMakeLists.txt files. 3 | For [cv_bridge](https://github.com/ros-perception/vision_opencv/blob/noetic/cv_bridge/CMakeLists.txt), edit lines 20-25 as follows 4 | 5 | ```bash 6 | set(_opencv_version 4.5) 7 | find_package(OpenCV 4.5 QUIET) 8 | if(NOT OpenCV_FOUND) 9 | message(STATUS "Did not find OpenCV 4.5") 10 | endif() 11 | ``` 12 | 13 | For [image_geometry](https://github.com/ros-perception/vision_opencv/blob/noetic/image_geometry/CMakeLists.txt), edit line 5 14 | 15 | ```bash 16 | find_package(OpenCV 4.5 REQUIRED) 17 | ``` 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Nicky Zimmerman and Cyrill Stachniss, Photogrammetry & Robotics Bonn, University of Bonn 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /ncore/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(ncore) 3 | 4 | set(CMAKE_CXX_FLAGS "-std=c++11 -O3 -Wall ${CMAKE_CXX_FLAGS}") 5 | 6 | 7 | set(CMAKE_CXX_STANDARD 14) 8 | 9 | 10 | 11 | find_package(Eigen3 REQUIRED) 12 | find_package(OpenCV 4.5 REQUIRED) 13 | if(OpenCV_FOUND) 14 | message(STATUS "Found OpenCV version ${OpenCV_VERSION}") 15 | message(STATUS "OpenCV directories: ${OpenCV_INCLUDE_DIRS}") 16 | include_directories(${OpenCV_INCLUDE_DIRS}) 17 | else() 18 | message(FATAL_ERROR "OpenCV not found, please read the README.md") 19 | endif(OpenCV_FOUND) 20 | 21 | 22 | find_package(Boost COMPONENTS serialization system filesystem) 23 | if(Boost_FOUND) 24 | message(STATUS "Found Boost version ${Boost_VERSION}") 25 | message(STATUS "Boost directories: ${Boost_INCLUDE_DIRS}") 26 | include_directories(${Boost_INCLUDE_DIRS}) 27 | else() 28 | message(FATAL_ERROR "Boost not found, please read the README.md") 29 | endif(Boost_FOUND) 30 | 31 | find_package(nlohmann_json 3.2.0 REQUIRED) 32 | if(nlohmann_json_FOUND) 33 | message(STATUS "Found nlohmann_json") 34 | else() 35 | message(FATAL_ERROR "nlohmann_json not found, please read the README.md") 36 | endif(nlohmann_json_FOUND) 37 | 38 | if(BUILD_TESTING) 39 | enable_testing() 40 | find_package(GTest REQUIRED) 41 | if(GTEST_FOUND) 42 | message(STATUS "GTest directories: ${GTEST_INCLUDE_DIRS}") 43 | include_directories(${GTEST_INCLUDE_DIRS}) 44 | else() 45 | message(FATAL_ERROR "GTEST_FOUND not found, please read the README.md") 46 | endif(GTEST_FOUND) 47 | endif() 48 | 49 | #add_subdirectory(nlohmann_json) 50 | add_subdirectory(nsensors) 51 | add_subdirectory(nmap) 52 | add_subdirectory(nmcl) 53 | add_subdirectory(ndl) 54 | add_subdirectory(nengine) 55 | -------------------------------------------------------------------------------- /ncore/data/8/YouBotMap.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/8/YouBotMap.pgm -------------------------------------------------------------------------------- /ncore/data/8/YouBotMap.yaml: -------------------------------------------------------------------------------- 1 | image: YouBotMap.pgm 2 | resolution: 0.050000 3 | origin: [-12.200000, -17.000000, 0.000000] 4 | negate: 0 5 | occupied_thresh: 0.65 6 | free_thresh: 0.196 7 | 8 | -------------------------------------------------------------------------------- /ncore/data/8/optitrack.yaml: -------------------------------------------------------------------------------- 1 | origin: [1.3097140789031982, 0.5226072072982788, -3.138] 2 | -------------------------------------------------------------------------------- /ncore/data/8_high_res/YouBotMap.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/8_high_res/YouBotMap.pgm -------------------------------------------------------------------------------- /ncore/data/8_high_res/YouBotMap.yaml: -------------------------------------------------------------------------------- 1 | image: YouBotMap.pgm 2 | resolution: 0.010000 3 | origin: [-11.240000, -15.720000, 0.000000] 4 | negate: 0 5 | occupied_thresh: 0.65 6 | free_thresh: 0.196 7 | 8 | -------------------------------------------------------------------------------- /ncore/data/8_high_res/optitrack.yaml: -------------------------------------------------------------------------------- 1 | origin: [1.3097140789031982, 0.5226072072982788, -3.138] 2 | -------------------------------------------------------------------------------- /ncore/data/config/cam0.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 0, 3 | "yaw" : 0, 4 | "k" : [614.6570434570312, 0.0, 326.6780090332031, 0.0, 614.308349609375, 242.25645446777344, 0.0, 0.0, 1.0], 5 | "t" : [0, 0, 1, -1, 0, 0, 0, -1, 0] 6 | } -------------------------------------------------------------------------------- /ncore/data/config/front_laser.config: -------------------------------------------------------------------------------- 1 | { 2 | "name": "front_laser", 3 | "angle_max" : 2.268928050994873, 4 | "angle_min" : -2.268928050994873, 5 | "num_beams" : 1041, 6 | "origin" : [0.25, 0.155, 0.785] 7 | } 8 | -------------------------------------------------------------------------------- /ncore/data/config/front_laser.yaml: -------------------------------------------------------------------------------- 1 | name: front_laser 2 | angle_max: 2.268928050994873 3 | angle_min: -2.268928050994873 4 | num_beams: 1041 5 | origin: [0.25, 0.155, 0.785] 6 | -------------------------------------------------------------------------------- /ncore/data/config/rear_laser.config: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rear_laser", 3 | "angle_max" : 2.268928050994873, 4 | "angle_min" : -2.268928050994873, 5 | "num_beams" : 1041, 6 | "origin" : [-0.25, -0.155, -2.356] 7 | } 8 | -------------------------------------------------------------------------------- /ncore/data/config/rear_laser.yaml: -------------------------------------------------------------------------------- 1 | name: rear_laser 2 | angle_max: 2.268928050994873 3 | angle_min: -2.268928050994873 4 | num_beams: 1041 5 | origin: [-0.25, -0.155, -2.356] 6 | -------------------------------------------------------------------------------- /ncore/data/config/textspotting.config: -------------------------------------------------------------------------------- 1 | { 2 | "detModelPath": "/home/nickybones/Code/OmniNMCL/ncore/data/text/DB_IC15_resnet18.onnx", 3 | "recModelPath": "/home/nickybones/Code/OmniNMCL/ncore/data/text/crnn_cs.onnx", 4 | "vocPath": "/home/nickybones/Code/OmniNMCL/ncore/data/text/alphabet_94.txt", 5 | "height": 480, 6 | "width": 640, 7 | "binThresh": 0.3, 8 | "polyThresh" : 0.5, 9 | "maxCandidates" : 50, 10 | "unclipRatio" : 2.0 11 | } 12 | -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/YouBotMap.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/2021_10_27/YouBotMap.pgm -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/YouBotMap.yaml: -------------------------------------------------------------------------------- 1 | image: YouBotMap.pgm 2 | resolution: 0.050000 3 | origin: [-17.000000, -12.200000, 0.000000] 4 | negate: 0 5 | occupied_thresh: 0.65 6 | free_thresh: 0.196 7 | 8 | -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/YouBotMapRoomSeg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/2021_10_27/YouBotMapRoomSeg.png -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/editor.xml: -------------------------------------------------------------------------------- 1 | 22 serialization::archive 17 0 0 0 0 13 0 0 0 9 Not$Valid 7 general 0 0 0 0 0 6 Room 1 7 general 1 0 0 6 Room 2 7 general 2 0 0 6 Room 3 7 general 3 0 0 6 Room 4 7 general 4 0 0 6 Room 5 7 general 5 0 0 6 Room 6 7 general 6 0 0 6 Room 7 7 general 7 0 0 6 Room 8 7 general 8 0 0 6 Room 9 7 general 9 0 0 7 Room 10 7 general 10 0 0 7 Room 11 7 general 11 0 0 7 Room 12 7 general 12 0 0 0 0 13 0 0 0 0.000000000e+00 0.000000000e+00 0.000000000e+00 -4.800000191e+00 1.950000000e+01 0.000000000e+00 -4.599999428e+00 1.604999924e+01 0.000000000e+00 -6.649999619e+00 6.650000572e+00 0.000000000e+00 -4.050000191e+00 6.250000954e+00 0.000000000e+00 -6.800000191e+00 4.550000191e+00 0.000000000e+00 -2.449999809e+00 4.000005722e-01 0.000000000e+00 -2.849999428e+00 5.100001335e+00 0.000000000e+00 -3.050000191e+00 8.900000572e+00 0.000000000e+00 -3.199999809e+00 1.265000057e+01 0.000000000e+00 -3.899999619e+00 2.225000000e+01 0.000000000e+00 -4.099999428e+00 1.355000019e+01 0.000000000e+00 -3.349999428e+00 8.500003815e-01 0.000000000e+00 -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/floor.config: -------------------------------------------------------------------------------- 1 | { 2 | "name" : "0", 3 | "editor" : "editor.xml", 4 | "roomSeg" : "YouBotMapRoomSeg.png", 5 | "map": { 6 | "type" : "GMap", 7 | "image": "YouBotMap.pgm", 8 | "resolution" : 0.050000, 9 | "origin" : [-17.000000, -12.200000, 0.000000], 10 | "negate" : 0, 11 | "occupied_thresh" : 0.65, 12 | "free_thresh" : 0.196 13 | } 14 | } 15 | 16 | -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/nmcl.config: -------------------------------------------------------------------------------- 1 | { 2 | "injRatio": 1.0, 3 | "floorMapPath": "/home/nickybones/Code/YouBotMCL/ncore/data/floor/2021_10_27/floor.config", 4 | "motionModel": "MixedFSR", 5 | "numParticles": 10000, 6 | "predictStrategy": "Uniform", 7 | "resampling": "LowVarianceResampling", 8 | "sensorModel": { 9 | "likelihoodSigma": 2, 10 | "maxRange": 15, 11 | "type": "BeamEnd", 12 | "weightingScheme": 3 13 | }, 14 | "tracking": false 15 | } 16 | -------------------------------------------------------------------------------- /ncore/data/floor/2021_10_27/textspotting.json: -------------------------------------------------------------------------------- 1 | { 2 | "detModelPath": "/home/nickybones/Code/YouBotMCL/ncore/data/text/DB_IC15_resnet18.onnx", 3 | "recModelPath": "/home/nickybones/Code/YouBotMCL/ncore/data/text/crnn_cs.onnx", 4 | "vocPath": "/home/nickybones/Code/YouBotMCL/ncore/data/text/alphabet_94.txt", 5 | "height": 480, 6 | "width": 640, 7 | "binThresh": 0.3, 8 | "polyThresh" : 0.5, 9 | "maxCandidates" : 50, 10 | "unclipRatio" : 2.0 11 | } 12 | -------------------------------------------------------------------------------- /ncore/data/floor/Faro/FMap.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/FMap.pgm -------------------------------------------------------------------------------- /ncore/data/floor/Faro/FMap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/FMap.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/FMap.yaml: -------------------------------------------------------------------------------- 1 | image: FMap.pgm 2 | resolution: 0.05 3 | origin: 4 | - -13.9155 5 | - -10.99537 6 | - 0.0 7 | negate: 0 8 | occupied_thresh: 0.65 9 | free_thresh: 0.196 10 | -------------------------------------------------------------------------------- /ncore/data/floor/Faro/FMapRoomSeg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/FMapRoomSeg.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 1.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 10.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 2.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 3.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 4.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 5.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 6.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 7.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 8.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/TextMaps/Room 9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/Faro/TextMaps/Room 9.png -------------------------------------------------------------------------------- /ncore/data/floor/Faro/editor.xml: -------------------------------------------------------------------------------- 1 | 22 serialization::archive 17 0 0 0 0 13 0 0 0 8 NotValid 7 general 0 0 0 0 0 6 Room 1 7 general 1 0 0 6 Room 2 7 general 2 0 0 6 Room 3 7 general 3 0 0 6 Room 4 7 general 4 0 0 6 Room 5 7 general 5 0 0 6 Room 6 7 general 6 0 0 6 Room 7 7 general 7 0 0 6 Room 8 7 general 8 0 0 6 Room 9 7 general 9 0 0 7 Room 10 7 general 10 0 0 7 Room 11 7 general 11 0 0 7 Room 12 7 general 12 0 0 0 0 13 0 0 0 0.000000000e+00 0.000000000e+00 0.000000000e+00 7.934500694e+00 -2.795370102e+00 0.000000000e+00 4.334500313e+00 -2.995369911e+00 0.000000000e+00 -6.015499592e+00 -8.453693390e-01 0.000000000e+00 -5.715499878e+00 -3.095369816e+00 0.000000000e+00 -7.815499783e+00 -2.453699112e-01 0.000000000e+00 -1.136549950e+01 -4.195369720e+00 0.000000000e+00 -7.265499592e+00 -4.295369625e+00 0.000000000e+00 -3.665499687e+00 -4.345369816e+00 0.000000000e+00 9.845008850e-01 -4.045369625e+00 0.000000000e+00 9.134501457e+00 -3.945369720e+00 0.000000000e+00 8.345003128e-01 -3.545369625e+00 0.000000000e+00 -1.096549988e+01 -3.995369911e+00 0.000000000e+00 -------------------------------------------------------------------------------- /ncore/data/floor/Faro/floor.config: -------------------------------------------------------------------------------- 1 | { 2 | "name" : "0", 3 | "editor" : "editor.xml", 4 | "roomSeg" : "FMapRoomSeg.png", 5 | "map": { 6 | "type" : "GMap", 7 | "image": "FMap.pgm", 8 | "resolution" : 0.050000, 9 | "origin" : [-13.9155, -10.99537, 0.0], 10 | "negate" : 0, 11 | "occupied_thresh" : 0.65, 12 | "free_thresh" : 0.196 13 | } 14 | } 15 | 16 | -------------------------------------------------------------------------------- /ncore/data/floor/Faro/nmcl.config: -------------------------------------------------------------------------------- 1 | { 2 | "injRatio": 0.5, 3 | "floorMapPath": "/home/nickybones/Code/OmniNMCL/ncore/data/floor/Faro/floor.config", 4 | "motionModel": "MixedFSR", 5 | "numParticles": 10000, 6 | "predictStrategy": "Uniform", 7 | "resampling": "LowVarianceResampling", 8 | "sensorModel": { 9 | "likelihoodSigma": 2, 10 | "maxRange": 15, 11 | "type": "BeamEnd", 12 | "weightingScheme": 3 13 | }, 14 | "tracking": false 15 | } 16 | -------------------------------------------------------------------------------- /ncore/data/floor/Faro/textspotting.json: -------------------------------------------------------------------------------- 1 | { 2 | "detModelPath": "/home/nickybones/Code/YouBotMCL/ncore/data/text/DB_IC15_resnet18.onnx", 3 | "recModelPath": "/home/nickybones/Code/YouBotMCL/ncore/data/text/crnn_cs.onnx", 4 | "vocPath": "/home/nickybones/Code/YouBotMCL/ncore/data/text/alphabet_94.txt", 5 | "height": 480, 6 | "width": 640, 7 | "binThresh": 0.3, 8 | "polyThresh" : 0.5, 9 | "maxCandidates" : 50, 10 | "unclipRatio" : 2.0 11 | } 12 | -------------------------------------------------------------------------------- /ncore/data/floor/GMap/GMap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/GMap.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/GMap.yaml: -------------------------------------------------------------------------------- 1 | image: GMap.png 2 | resolution: 0.05 3 | origin: 4 | - -13.90309 5 | - -10.82778 6 | - 0.0 7 | negate: 0 8 | occupied_thresh: 0.65 9 | free_thresh: 0.196 10 | -------------------------------------------------------------------------------- /ncore/data/floor/GMap/GMapRoomSeg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/GMapRoomSeg.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 1.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 10.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 2.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 3.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 4.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 5.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 6.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 7.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 8.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/TextMaps/Room 9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/GMap/TextMaps/Room 9.png -------------------------------------------------------------------------------- /ncore/data/floor/GMap/editor.xml: -------------------------------------------------------------------------------- 1 | 22 serialization::archive 17 0 0 0 0 13 0 0 0 8 NotValid 7 general 0 0 0 0 0 6 Room 1 7 general 1 0 0 6 Room 2 7 general 2 0 0 6 Room 3 7 general 3 0 0 6 Room 4 7 general 4 0 0 6 Room 5 7 general 5 0 0 6 Room 6 7 general 6 0 0 6 Room 7 7 general 7 0 0 6 Room 8 7 general 8 0 0 6 Room 9 7 general 9 0 0 7 Room 10 7 general 10 0 0 7 Room 11 7 general 11 0 0 7 Room 12 7 general 12 0 0 0 0 13 0 0 0 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 0.000000000e+00 -------------------------------------------------------------------------------- /ncore/data/floor/GMap/floor.config: -------------------------------------------------------------------------------- 1 | { 2 | "name" : "0", 3 | "editor" : "editor.xml", 4 | "roomSeg" : "GMapRoomSeg.png", 5 | "map": { 6 | "type" : "GMap", 7 | "image": "GMap.png", 8 | "resolution" : 0.050000, 9 | "origin" : [-13.90309, -10.82778, 0.0], 10 | "negate" : 0, 11 | "occupied_thresh" : 0.65, 12 | "free_thresh" : 0.196 13 | } 14 | } 15 | 16 | -------------------------------------------------------------------------------- /ncore/data/floor/GMap/nmcl.config: -------------------------------------------------------------------------------- 1 | { 2 | "injRatio": 0.5, 3 | "floorMapPath": "/home/nickybones/Code/YouBotMCL/ncore/data/floor/GMap/floor.config", 4 | "motionModel": "MixedFSR", 5 | "numParticles": 10000, 6 | "predictStrategy": "Uniform", 7 | "resampling": "LowVarianceResampling", 8 | "sensorModel": { 9 | "likelihoodSigma": 2, 10 | "maxRange": 15, 11 | "type": "BeamEnd", 12 | "weightingScheme": 3 13 | }, 14 | "tracking": false 15 | } 16 | -------------------------------------------------------------------------------- /ncore/data/floor/test/YouBotMap.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/test/YouBotMap.pgm -------------------------------------------------------------------------------- /ncore/data/floor/test/YouBotMap.yaml: -------------------------------------------------------------------------------- 1 | image: YouBotMap.pgm 2 | resolution: 0.050000 3 | origin: [-17.000000, -12.200000, 0.000000] 4 | negate: 0 5 | occupied_thresh: 0.65 6 | free_thresh: 0.196 7 | 8 | -------------------------------------------------------------------------------- /ncore/data/floor/test/YouBotMapRoomSeg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/floor/test/YouBotMapRoomSeg.png -------------------------------------------------------------------------------- /ncore/data/floor/test/editor.xml: -------------------------------------------------------------------------------- 1 | 22 serialization::archive 17 0 0 0 0 13 0 0 0 9 Not$Valid 7 general 0 0 0 0 0 6 Room 1 7 general 1 0 0 6 Room 2 7 general 2 0 0 6 Room 3 7 general 3 0 0 6 Room 4 7 general 4 0 0 6 Room 5 7 general 5 0 0 6 Room 6 7 general 6 0 0 6 Room 7 7 general 7 0 0 6 Room 8 7 general 8 0 0 6 Room 9 7 general 9 0 0 7 Room 10 7 general 10 0 0 7 Room 11 7 general 11 0 0 7 Room 12 7 general 12 0 0 0 0 13 0 0 0 0.000000000e+00 0.000000000e+00 0.000000000e+00 -4.800000191e+00 1.950000000e+01 0.000000000e+00 -4.599999428e+00 1.604999924e+01 0.000000000e+00 -6.649999619e+00 6.650000572e+00 0.000000000e+00 -4.050000191e+00 6.250000954e+00 0.000000000e+00 -6.800000191e+00 4.550000191e+00 0.000000000e+00 -2.449999809e+00 4.000005722e-01 0.000000000e+00 -2.849999428e+00 5.100001335e+00 0.000000000e+00 -3.050000191e+00 8.900000572e+00 0.000000000e+00 -3.199999809e+00 1.265000057e+01 0.000000000e+00 -3.899999619e+00 2.225000000e+01 0.000000000e+00 -4.099999428e+00 1.355000019e+01 0.000000000e+00 -3.349999428e+00 8.500003815e-01 0.000000000e+00 -------------------------------------------------------------------------------- /ncore/data/floor/test/floor.config: -------------------------------------------------------------------------------- 1 | { 2 | "name" : "0", 3 | "editor" : "editor.xml", 4 | "roomSeg" : "YouBotMapRoomSeg.png", 5 | "map": { 6 | "type" : "GMap", 7 | "image": "YouBotMap.pgm", 8 | "resolution" : 0.050000, 9 | "origin" : [-17.000000, -12.200000, 0.000000], 10 | "negate" : 0, 11 | "occupied_thresh" : 0.65, 12 | "free_thresh" : 0.196 13 | } 14 | } 15 | 16 | -------------------------------------------------------------------------------- /ncore/data/floor/test/floor_seeds.xml: -------------------------------------------------------------------------------- 1 | 22 serialization::archive 17 0 0 0 0 13 0 0 0 1 0 7 general 0 0 0 0 0 1 1 7 general 1 0 0 1 2 7 general 2 0 0 1 3 7 general 3 0 0 1 4 7 general 4 0 0 1 5 7 general 5 0 0 1 6 7 general 6 0 0 1 7 7 general 7 0 0 1 8 7 general 8 0 0 1 9 7 general 9 0 0 2 10 7 general 10 0 0 2 11 7 general 11 0 0 2 12 7 general 12 0 0 0 0 13 0 0 0 -1.700000000e+01 3.579999924e+01 0.000000000e+00 0.000000000e+00 1.580000019e+01 0.000000000e+00 1.000000000e+00 1.230000019e+01 0.000000000e+00 2.000000000e+00 3.300000191e+00 0.000000000e+00 5.000000000e+00 3.300000191e+00 0.000000000e+00 3.000000000e+00 8.000001907e-01 0.000000000e+00 8.000000000e+00 -1.699999809e+00 0.000000000e+00 6.000000000e+00 2.800000191e+00 0.000000000e+00 4.500000000e+00 5.800000191e+00 0.000000000e+00 3.000000000e+00 9.800000191e+00 0.000000000e+00 -5.000000000e-01 1.879999924e+01 0.000000000e+00 1.830000000e+02 1.080000019e+01 0.000000000e+00 7.000000000e+00 -1.699999809e+00 0.000000000e+00 -------------------------------------------------------------------------------- /ncore/data/floor/test/nmcl.config: -------------------------------------------------------------------------------- 1 | { 2 | "injRatio": 1.0, 3 | "floorMapPath": "../data/floor/2021_10_27/floor.config", 4 | "motionModel": "MixedFSR", 5 | "numParticles": 10000, 6 | "predictStrategy": "Uniform", 7 | "resampling": "LowVarianceResampling", 8 | "sensorModel": { 9 | "likelihoodSigma": 2, 10 | "maxRange": 15, 11 | "type": "BeamEnd", 12 | "weightingScheme": 3 13 | }, 14 | "tracking": false 15 | } 16 | -------------------------------------------------------------------------------- /ncore/data/json/nmcl.config: -------------------------------------------------------------------------------- 1 | { 2 | "injRatio": 0.5, 3 | "floorMapPath": "/home/nickybones/Code/YouBotMCL/ncore/data/floor/2021_10_27/floor.config", 4 | "motionModel": "MixedFSR", 5 | "numParticles": 10000, 6 | "predictStrategy": "Uniform", 7 | "resampling": "LowVarianceResampling", 8 | "sensorModel": { 9 | "likelihoodSigma": 8, 10 | "maxRange": 15, 11 | "type": "BeamEnd", 12 | "weightingScheme": 2 13 | }, 14 | "tracking": false 15 | } 16 | -------------------------------------------------------------------------------- /ncore/data/text/lebron.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/text/lebron.jpg -------------------------------------------------------------------------------- /ncore/data/text/test.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/data/text/test.jpg -------------------------------------------------------------------------------- /ncore/ndl/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(ndl) 3 | 4 | ## Specify additional locations of header files 5 | ## Your package locations should be listed before other locations 6 | 7 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/) 8 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/lib/) 9 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include/) 10 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nlohmann_json/include/) 11 | 12 | set(RESULTS_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/../build) 13 | file(MAKE_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}) 14 | set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 15 | set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 16 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/bin) 17 | 18 | 19 | add_subdirectory("src") 20 | 21 | 22 | # After all are setup is done, we can go to our src/ directory to build our 23 | # files 24 | if(BUILD_TESTING) 25 | add_subdirectory("tst") 26 | endif() 27 | -------------------------------------------------------------------------------- /ncore/ndl/include/ImageUtils.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: ImageUtils.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef IMAGEUTILS 13 | #define IMAGEUTILS 14 | 15 | #include "opencv2/opencv.hpp" 16 | #include 17 | 18 | template 19 | std::vector Unique(cv::Mat img) 20 | { 21 | assert(img.channels() == 1 && "This implementation is only for single-channel images"); 22 | cv::Mat in = img.clone(); 23 | auto begin = in.begin(), end = in.end(); 24 | auto last = std::unique(begin, end); // remove adjacent duplicates to reduce size 25 | std::sort(begin, last); // sort remaining elements 26 | last = std::unique(begin, last); // remove duplicates 27 | return std::vector(begin, last); 28 | } 29 | 30 | cv::Mat ThresholdSingleValue(const cv::Mat& img, int val); 31 | 32 | 33 | //std::vector BoundingBox(const cv::Mat& mask); 34 | 35 | cv::Rect BoundingBox(const cv::Mat& mask); 36 | 37 | cv::Mat DrawRects(const cv::Mat& mask, std::vector bboxes); 38 | 39 | 40 | 41 | #endif -------------------------------------------------------------------------------- /ncore/ndl/include/TextSpotting.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: TextSpotting.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef TEXTSPOTTING 13 | #define TEXTSPOTTING 14 | 15 | #include 16 | #include 17 | #include "opencv2/opencv.hpp" 18 | #include 19 | 20 | 21 | 22 | 23 | class TextSpotting 24 | { 25 | public: 26 | 27 | TextSpotting(std::string detModelPath, std::string recModelPath, std::string vocPath, 28 | int height = 480, int width = 640, float binThresh = 0.3, float polyThresh = 0.5, 29 | uint maxCandidates = 50, double unclipRatio = 2.0); 30 | 31 | TextSpotting(std::string jsonPath); 32 | 33 | std::vector Infer(const cv::Mat& img); 34 | 35 | 36 | private: 37 | 38 | void fourPointsTransform(const cv::Mat& frame, const cv::Point2f vertices[], cv::Mat& result); 39 | bool sortPts(const cv::Point& p1, const cv::Point& p2); 40 | 41 | 42 | std::shared_ptr o_detector; 43 | std::shared_ptr o_recognizer; 44 | 45 | 46 | }; 47 | 48 | #endif 49 | 50 | 51 | -------------------------------------------------------------------------------- /ncore/ndl/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | add_executable(TextRecoFromFolder TextRecoFromFolder.cpp TextSpotting.cpp) 3 | target_link_libraries(TextRecoFromFolder ${OpenCV_LIBS} ${Boost_LIBRARIES} ) 4 | 5 | add_executable(TextRecoFromFolder2 TextRecoFromFolder2.cpp TextSpotting.cpp) 6 | target_link_libraries(TextRecoFromFolder2 ${OpenCV_LIBS} ${Boost_LIBRARIES} ) 7 | 8 | 9 | add_library(NDL TextSpotting.cpp ImageUtils.cpp) 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /ncore/ndl/src/ImageUtils.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: ImageUtils.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "ImageUtils.h" 13 | 14 | 15 | cv::Mat ThresholdSingleValue(const cv::Mat& img, int val) 16 | { 17 | cv::Mat mask; 18 | cv::threshold( img, mask, val, 255, 4 ); 19 | cv::threshold( mask, mask, val - 1, 255, 0 ); 20 | 21 | return mask; 22 | } 23 | 24 | /* 25 | std::vector BoundingBox(const cv::Mat& mask) 26 | { 27 | std::vector bboxes; 28 | std::vector > contours; 29 | std::vector hierarchy; 30 | cv::findContours(mask, contours, hierarchy, cv::RETR_TREE, cv::CHAIN_APPROX_SIMPLE, cv::Point(0, 0)); 31 | 32 | for( size_t i = 0; i < contours.size(); i++ ) 33 | { 34 | cv::Rect bb = cv::boundingRect(contours[i]); 35 | bboxes.push_back(bb); 36 | } 37 | 38 | return bboxes; 39 | }*/ 40 | 41 | 42 | cv::Rect BoundingBox(const cv::Mat& mask) 43 | { 44 | std::vector locations; 45 | cv::findNonZero(mask, locations); 46 | cv::Rect bb = cv::boundingRect(locations); 47 | 48 | return bb; 49 | } 50 | 51 | 52 | 53 | cv::Mat DrawRects(const cv::Mat& mask, std::vector bboxes) 54 | { 55 | cv::RNG rng(12345); 56 | cv::Mat drawing = mask.clone(); 57 | cv::cvtColor(drawing,drawing,cv::COLOR_GRAY2BGR); 58 | 59 | for( size_t i = 0; i < bboxes.size(); i++ ) 60 | { 61 | cv::Scalar color = cv::Scalar( rng.uniform(0, 256), rng.uniform(0,256), rng.uniform(0,256) ); 62 | rectangle(drawing, bboxes[i], color); 63 | } 64 | 65 | return drawing; 66 | } 67 | -------------------------------------------------------------------------------- /ncore/ndl/src/TextRecoFromFolder.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: TextRecoFromFolder.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | 14 | #include "TextSpotting.h" 15 | #include 16 | #include 17 | #include 18 | 19 | int main(int argc, char** argv) 20 | { 21 | std::string jsonPath = "/home/nickybones/Code/YouBotMCL/ncore/data/text/textspotting.config"; 22 | 23 | for(int c = 0; c < 4; ++c) 24 | { 25 | std::string ImgDirPath = "/home/nickybones/data/MCL/Dump/camera" + std::to_string(c) + "/"; 26 | std::string outputPath = "/home/nickybones/data/MCL/Dump/predictions" + std::to_string(c) + ".txt"; 27 | TextSpotting ts = TextSpotting(jsonPath); 28 | std::vector filenames; 29 | 30 | std::ofstream output; 31 | output.open(outputPath); 32 | 33 | for (const auto & entry : boost::filesystem::directory_iterator(ImgDirPath)) 34 | { 35 | std::string imgPath = entry.path().string(); 36 | std::string extension = boost::filesystem::extension(imgPath); 37 | if(extension == ".png") filenames.push_back(imgPath); 38 | else std::cout << extension << std::endl; 39 | } 40 | 41 | std::sort(filenames.begin(), filenames.end()); 42 | 43 | for (int f = 0; f < filenames.size(); ++f) 44 | { 45 | std::string imgPath = filenames[f]; 46 | std::cout << imgPath << std::endl; 47 | cv::Mat img = cv::imread(imgPath); 48 | try 49 | { 50 | std::vector recRes = ts.Infer(img); 51 | for (int i = 0; i < recRes.size(); ++i) 52 | { 53 | output << recRes[i] + ", "; 54 | } 55 | } 56 | catch(...) 57 | { 58 | std::cerr << "failed to infer" << std::endl; 59 | 60 | } 61 | output << "\n"; 62 | 63 | } 64 | 65 | output.close(); 66 | } 67 | 68 | return 0; 69 | } 70 | 71 | -------------------------------------------------------------------------------- /ncore/ndl/src/TextRecoFromFolder2.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: TextRecoFromFolder.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | 14 | #include "TextSpotting.h" 15 | #include 16 | #include 17 | #include 18 | 19 | int main(int argc, char** argv) 20 | { 21 | std::string jsonPath = "/home/nickybones/Code/YouBotMCL/ncore/data/text/textspotting.config"; 22 | 23 | 24 | std::string ImgDirPath = "/home/nickybones/data/MCL/Dump/camera/"; 25 | std::string outputPath = "/home/nickybones/data/MCL/Dump/predictions.txt"; 26 | TextSpotting ts = TextSpotting(jsonPath); 27 | std::vector filenames; 28 | 29 | std::ofstream output; 30 | output.open(outputPath); 31 | 32 | for (const auto & entry : boost::filesystem::directory_iterator(ImgDirPath)) 33 | { 34 | std::string imgPath = entry.path().string(); 35 | std::string extension = boost::filesystem::extension(imgPath); 36 | if(extension == ".png") filenames.push_back(imgPath); 37 | else std::cout << extension << std::endl; 38 | } 39 | 40 | std::sort(filenames.begin(), filenames.end()); 41 | 42 | for (int f = 0; f < filenames.size(); ++f) 43 | { 44 | std::string imgPath = filenames[f]; 45 | std::cout << imgPath << std::endl; 46 | cv::Mat img = cv::imread(imgPath); 47 | try 48 | { 49 | std::vector recRes = ts.Infer(img); 50 | for (int i = 0; i < recRes.size(); ++i) 51 | { 52 | output << recRes[i] + ", "; 53 | } 54 | } 55 | catch(...) 56 | { 57 | std::cerr << "failed to infer" << std::endl; 58 | } 59 | output << "\n"; 60 | 61 | } 62 | 63 | output.close(); 64 | 65 | 66 | return 0; 67 | } 68 | 69 | -------------------------------------------------------------------------------- /ncore/ndl/tst/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | add_executable(NDLUnitTests NDLUnitTests.cpp) 5 | target_link_libraries(NDLUnitTests ${OpenCV_LIBS} NDL GTest::GTest gtest_main ) 6 | target_compile_definitions(NDLUnitTests PRIVATE PROJECT_TEST_DATA_DIR="${PROJECT_SOURCE_DIR}/../data") 7 | add_test(AllTestsInTests NDLUnitTests) 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /ncore/ndl/tst/NDLUnitTests.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: NDLUnitTests.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "gtest/gtest.h" 13 | #include "TextSpotting.h" 14 | #include "ImageUtils.h" 15 | #include 16 | 17 | std::string textDataPath = PROJECT_TEST_DATA_DIR + std::string("/text/"); 18 | 19 | 20 | 21 | TEST(TextSpotting, test1) { 22 | 23 | std::string imgPath = textDataPath + "test.jpg"; 24 | cv::Mat frame = cv::imread(imgPath); 25 | TextSpotting ts = TextSpotting(textDataPath + "textspotting.config"); 26 | std::vector texts = ts.Infer(frame); 27 | /* for(int i = 0; i < texts.size(); ++i) 28 | { 29 | std::cout << texts[i] << std::endl; 30 | }*/ 31 | ASSERT_EQ("JAMES", texts[1]); 32 | } 33 | 34 | 35 | int main(int argc, char **argv) { 36 | ::testing::InitGoogleTest(&argc, argv); 37 | return RUN_ALL_TESTS(); 38 | } 39 | 40 | 41 | -------------------------------------------------------------------------------- /ncore/nengine/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(nengine) 3 | 4 | ## Specify additional locations of header files 5 | ## Your package locations should be listed before other locations 6 | 7 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/) 8 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/lib/) 9 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include/) 10 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nsensors/include/) 11 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nmap/include/) 12 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nlohmann_json/include/) 13 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nmcl/include/) 14 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../ndl/include/) 15 | 16 | 17 | set(PYTHON_INCLUDE_DIR "/usr/include/python3.8") 18 | 19 | find_package (Python COMPONENTS Interpreter Development) 20 | if(Python_FOUND) 21 | message(STATUS "Found Python version ${Python_VERSION}") 22 | include_directories(${PYTHON_INCLUDE_DIR}) 23 | endif(Python_FOUND) 24 | 25 | set(RESULTS_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/../build) 26 | file(MAKE_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}) 27 | set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 28 | set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 29 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/bin) 30 | 31 | 32 | 33 | 34 | add_subdirectory("src") 35 | #add_library(NSENSORS STATIC IMPORTED) 36 | set_target_properties(NSENSORS PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNSENSORS.a) 37 | set_target_properties(NMAP PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNMAP.a) 38 | set_target_properties(NMAP PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNMCL.a) 39 | set_target_properties(NMAP PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNDL.a) 40 | 41 | 42 | # After all are setup is done, we can go to our src/ directory to build our 43 | # files 44 | if(BUILD_TESTING) 45 | add_subdirectory("tst") 46 | endif() 47 | -------------------------------------------------------------------------------- /ncore/nengine/include/DataFrameLoader.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include "Python.h" 3 | #include 4 | #include 5 | #include 6 | 7 | #include 8 | 9 | 10 | 11 | enum FrameTypes 12 | { 13 | CAMERA0 = 0, 14 | CAMERA1 = 1, 15 | CAMERA2 = 2, 16 | CAMERA3 = 3, 17 | SEM0 = 4, 18 | SEM1 = 5, 19 | SEM2 = 6, 20 | SEM3 = 7, 21 | TEXT0 = 8, 22 | TEXT1 = 9, 23 | TEXT2 = 10, 24 | TEXT3 = 11, 25 | LIDAR = 12, 26 | ODOM = 13, 27 | GT = 14 28 | }; 29 | 30 | 31 | 32 | struct FrameData 33 | { 34 | FrameTypes type; 35 | unsigned long stamp; 36 | Eigen::Vector3f gt; 37 | std::string path; 38 | Eigen::Vector3f odom; 39 | std::vector scan; 40 | std::vector places; 41 | std::vector boxes; 42 | }; 43 | 44 | 45 | 46 | class DataFrameLoader 47 | { 48 | public: 49 | DataFrameLoader(const std::string& moduleFolder, const std::string& moduleName); 50 | 51 | void Load(const std::string& picklePath); 52 | 53 | FrameData GetData(unsigned long index); 54 | 55 | int GetNumFrames(); 56 | 57 | 58 | ~DataFrameLoader(); 59 | 60 | 61 | private: 62 | 63 | const char * get_type(unsigned long ind); 64 | unsigned long get_stamp(unsigned long ind); 65 | PyObject * get_data(unsigned long ind); 66 | static std::vector listTupleToVector_Float(PyObject* incoming); 67 | 68 | 69 | PyObject *pDict; 70 | PyObject *dfObject; 71 | 72 | }; 73 | 74 | -------------------------------------------------------------------------------- /ncore/nengine/include/NMCLEngine.h: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | #include "ReNMCL.h" 5 | #include "TextSpotting.h" 6 | #include "PlaceRecognition.h" 7 | #include "Camera.h" 8 | #include 9 | 10 | 11 | 12 | class NMCLEngine 13 | { 14 | public: 15 | 16 | NMCLEngine(const std::string& nmclConfigPath, const std::string& sensorConfigFolder, const std::string& textMapDir); 17 | 18 | void Predict(Eigen::Vector3f odom); 19 | 20 | int Correct(const std::vector& scan); 21 | 22 | void TextMask(const cv::Mat& img, int camID); 23 | 24 | void TextMask(std::vector& places, int camID); 25 | 26 | 27 | SetStatistics PoseEstimation() const 28 | { 29 | return o_renmcl->Stats(); 30 | } 31 | 32 | std::vector Particles() const 33 | { 34 | return o_renmcl->Particles(); 35 | } 36 | 37 | std::vector ScanMask() const 38 | { 39 | return o_scanMask; 40 | } 41 | 42 | 43 | private: 44 | 45 | 46 | 47 | std::shared_ptr o_textSpotter; 48 | std::shared_ptr o_renmcl; 49 | std::shared_ptr o_placeRec; 50 | Eigen::Vector3f o_wheelPrevPose = Eigen::Vector3f(0, 0, 0); 51 | std::vector o_scanMask; 52 | bool o_first = true; 53 | int o_dsFactor = 1; 54 | Eigen::Vector3f o_odomNoise = Eigen::Vector3f(0.02, 0.02, 0.02); 55 | std::vector o_odomWeights = {1.0}; 56 | float o_triggerDist = 0.05; 57 | float o_triggerAngle = 0.05; 58 | bool o_step = false; 59 | bool o_initPose = true; 60 | 61 | 62 | std::vector> o_cameras; 63 | //std::vector textMaps; 64 | std::vector o_textBBs; 65 | std::vector o_textOrientations; 66 | int o_lastDetectedCamera = -1; 67 | int o_lastDetectedMatch = -1; 68 | }; 69 | -------------------------------------------------------------------------------- /ncore/nengine/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_library(NEGNINE NMCLEngine.cpp DataFrameLoader.cpp) -------------------------------------------------------------------------------- /ncore/nengine/src/df_loader.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | def load_pickle(picklePth): 5 | df = pd.read_pickle(picklePth) 6 | print(df.head(10)) 7 | return df 8 | 9 | 10 | def get_type(df, ind): 11 | row = df.iloc[ind] 12 | strg = row['type'] 13 | return strg 14 | 15 | def get_stamp(df, ind): 16 | row = df.iloc[ind] 17 | t = row['t'] 18 | return t 19 | 20 | def get_data(df, ind): 21 | row = df.iloc[ind] 22 | d = row['data'] 23 | if type(d) is tuple: 24 | return d 25 | elif type(d) is str: 26 | return d 27 | elif type(d) is list: 28 | return d 29 | else: 30 | d = d.tolist() 31 | return d 32 | 33 | def num_frames(df): 34 | num = len(df.index) 35 | return num -------------------------------------------------------------------------------- /ncore/nengine/tst/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_executable(NEngineTests main.cpp) 2 | target_link_libraries(NEngineTests NEGNINE ${Python_LIBRARIES} ${OpenCV_LIBS} NSENSORS NMAP NMCL NDL nlohmann_json::nlohmann_json ${Boost_LIBRARIES}) 3 | 4 | add_executable(EngineBenchmark EngineBenchmark.cpp) 5 | target_link_libraries(EngineBenchmark NEGNINE ${Python_LIBRARIES} ${OpenCV_LIBS} NSENSORS NMAP NMCL NDL nlohmann_json::nlohmann_json ${Boost_LIBRARIES}) 6 | -------------------------------------------------------------------------------- /ncore/nengine/tst/EngineBenchmark.cpp: -------------------------------------------------------------------------------- 1 | 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "NMCLEngine.h" 9 | #include "Utils.h" 10 | #include 11 | #include 12 | 13 | #include "DataFrameLoader.h" 14 | 15 | std::vector GenerateRandomScan() 16 | { 17 | std::vector scan; 18 | int max_dist = 10; 19 | for(int i = 0; i < 2082; ++i) 20 | { 21 | float x = drand48() * (2 * max_dist) - max_dist; 22 | float y = drand48() * (2 * max_dist) - max_dist; 23 | float theta = drand48() * 2 * M_PI - M_PI; 24 | scan.push_back(Eigen::Vector3f(x, y, theta)); 25 | } 26 | return scan; 27 | } 28 | 29 | 30 | 31 | 32 | 33 | int main(int argc, char** argv) 34 | { 35 | std::string moduleFolder = "/home/nuc20/Code/YouBotMCL/ncore/nengine/src"; 36 | std::string moduleName = "df_loader"; 37 | std::string sensorConfigFolder = "/home/nuc20/Code/YouBotMCL/ros1_ws/src/nmcl_ros/config/480x640/"; 38 | std::string nmclConfigFile = "/home/nuc20/Code/YouBotMCL/ncore/data/floor/Faro/nmcl.config"; 39 | std::string textMapDir = "/home/nuc20/Code/YouBotMCL/ncore/data/floor/Faro/TextMaps/"; 40 | std::string dataFolder = "/home/nuc20/data/MCL/2022_01_14/Run2/"; 41 | std::string pickleName = "textgtmerged.pickle"; 42 | 43 | DataFrameLoader df(moduleFolder, moduleName); 44 | df.Load(dataFolder + pickleName); 45 | int numFrames = df.GetNumFrames(); 46 | 47 | int count = 0; 48 | srand48(1); 49 | NMCLEngine engine(nmclConfigFile, sensorConfigFolder, textMapDir); 50 | auto millis = 0; 51 | for(int i = 0; i < numFrames; ++i) 52 | { 53 | FrameData fd = df.GetData(i); 54 | // std::vector scan = GenerateRandomScan(); 55 | 56 | if (fd.type == FrameTypes::LIDAR) 57 | { 58 | std::vector scan = fd.scan; 59 | auto start = std::chrono::system_clock::now(); 60 | int ret = engine.Correct(scan); 61 | auto end = std::chrono::system_clock::now(); 62 | std::chrono::duration duration = end - start; 63 | if(ret) 64 | { 65 | if (count > 499) break; 66 | ++count; 67 | millis += std::chrono::duration_cast(duration).count(); 68 | std::cout << std::chrono::duration_cast(duration).count() << std::endl; 69 | } 70 | } 71 | 72 | } 73 | 74 | std::cout << "finished computation at " << millis/500 << std::endl; 75 | 76 | 77 | 78 | return 0; 79 | 80 | } 81 | -------------------------------------------------------------------------------- /ncore/nmap/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(nmap) 3 | 4 | 5 | 6 | 7 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/) 8 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/lib/) 9 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include/) 10 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nsensors/include/) 11 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nlohmann_json/include/) 12 | 13 | set(RESULTS_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/../build) 14 | file(MAKE_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}) 15 | set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 16 | set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 17 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/bin) 18 | 19 | 20 | 21 | add_subdirectory("src") 22 | #add_library(NSENSORS STATIC IMPORTED) 23 | #set_target_properties(NSENSORS PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNSENSORS.a) 24 | 25 | 26 | # After all are setup is done, we can go to our src/ directory to build our 27 | # files 28 | if(BUILD_TESTING) 29 | add_subdirectory("tst") 30 | endif() 31 | -------------------------------------------------------------------------------- /ncore/nmap/data/YouBotMap.pgm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/nmap/data/YouBotMap.pgm -------------------------------------------------------------------------------- /ncore/nmap/data/YouBotMap.yaml: -------------------------------------------------------------------------------- 1 | image: YouBotMap.pgm 2 | resolution: 0.050000 3 | origin: [-15.400000, -33.000000, 0.000000] 4 | negate: 0 5 | occupied_thresh: 0.65 6 | free_thresh: 0.196 7 | 8 | -------------------------------------------------------------------------------- /ncore/nmap/data/YouBotMapRoomSeg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PRBonn/tmcl/cce63cdd278a37a77599b8c3b2c2aa4bf112b150/ncore/nmap/data/YouBotMapRoomSeg.png -------------------------------------------------------------------------------- /ncore/nmap/include/FloorMap.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: FloorMap.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef FLOORMAP 13 | #define FLOORMAP 14 | #pragma once 15 | 16 | #include 17 | #include 18 | 19 | #include "GMap.h" 20 | #include "Room.h" 21 | #include "Lift.h" 22 | 23 | #include 24 | #include 25 | #include 26 | 27 | class FloorMap 28 | { 29 | public: 30 | 31 | FloorMap(std::shared_ptr gmap, cv::Mat& roomSeg, std::string name = "0"); 32 | 33 | FloorMap(std::string jsonPath); 34 | 35 | 36 | int GetRoomID(float x, float y); 37 | 38 | std::string Name() const 39 | { 40 | return o_name; 41 | } 42 | 43 | Room& GetRoom(int id) 44 | { 45 | return o_rooms[id]; 46 | } 47 | 48 | std::vector& GetRooms() 49 | { 50 | return o_rooms; 51 | } 52 | 53 | std::vector GetRoomNames(); 54 | 55 | const std::vector>& Neighbors() 56 | { 57 | return o_neighbors; 58 | } 59 | 60 | // returned seeds in world coordinates 61 | const std::vector& Seeds() const 62 | { 63 | return o_seeds; 64 | } 65 | 66 | // returned seed in world coordinates 67 | const Eigen::Vector3f& Seed(int id) const 68 | { 69 | return o_seeds[id]; 70 | } 71 | 72 | // input seed in world coordinates 73 | void Seed(int id, const Eigen::Vector3f& seed) 74 | { 75 | o_seeds[id] = seed; 76 | } 77 | 78 | // input seed in map ccordinates 79 | void Seed(int id, const Eigen::Vector2f& seed); 80 | 81 | // input seeds in world coordinates 82 | void Seeds(std::vector seeds) 83 | { 84 | o_seeds = seeds; 85 | } 86 | 87 | const std::shared_ptr& Map() const 88 | { 89 | return o_map; 90 | } 91 | 92 | 93 | 94 | cv::Mat ColorizeRoomSeg(); 95 | void findNeighbours(); 96 | 97 | 98 | private: 99 | 100 | friend class boost::serialization::access; 101 | template 102 | void serialize(Archive & ar, const unsigned int version) 103 | { 104 | ar & o_rooms; 105 | ar & o_seeds; 106 | } 107 | 108 | void extractRoomSegmentation(); 109 | std::vector extractRoomIDs(); 110 | //void fingNeighbours(); 111 | 112 | 113 | std::string o_name = "0"; 114 | std::shared_ptr o_map; 115 | cv::Mat o_roomSeg; 116 | std::vector o_rooms; 117 | //std::vector o_lifts; 118 | std::vector> o_neighbors; 119 | std::vector o_seeds; 120 | 121 | 122 | }; 123 | 124 | #endif // !FLOORMAP 125 | 126 | -------------------------------------------------------------------------------- /ncore/nmap/include/GMap.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: GMap.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef GMAP 14 | #define GMAP 15 | 16 | #include "opencv2/opencv.hpp" 17 | #include 18 | #include "IMap2D.h" 19 | 20 | class GMap : public IMap2D 21 | { 22 | public: 23 | 24 | //! A constructor for handling the output of Gmapping, which include a metadata yaml and a .pgm map 25 | /*! 26 | \param origin is the 2D pose of the bottom right corner of the map (found in the yaml) 27 | \param resolution is the map resolution - distance in meters corresponding to 1 pixel (found in the yaml) 28 | \param gridMap is occupancy map built according to the scans 29 | */ 30 | GMap(cv::Mat& gridMap, Eigen::Vector3f origin, float resolution); 31 | 32 | 33 | //! A constructor for handling the output of Gmapping, which include a metadata yaml and a .pgm map 34 | /*! 35 | \param yamlPath is the path to the metadata yaml file produced by gmapping 36 | */ 37 | GMap(std::string yamlPath); 38 | 39 | 40 | //! A getter top left corner of the actual occupied area, as the map usually has wide empty margins 41 | /*! 42 | \return Eigen::Vector2f = (u, v) pixel coordinates for the gridmap 43 | */ 44 | Eigen::Vector2f TopLeft() 45 | { 46 | return o_topLeft; 47 | } 48 | 49 | 50 | //! A getter bottom right corner of the actual occupied area, as the map usually has wide empty margins 51 | /*! 52 | \return Eigen::Vector2f = (u, v) pixel coordinates for the gridmap 53 | */ 54 | Eigen::Vector2f BottomRight() 55 | { 56 | return o_bottomRight; 57 | } 58 | 59 | 60 | //! Converts (x, y) from the map frame to the pixels coordinates 61 | /*! 62 | \param (x, y) position in map frame 63 | \return (u, v) pixel coordinates for the gridmap 64 | */ 65 | Eigen::Vector2f World2Map(Eigen::Vector2f xy); 66 | 67 | 68 | std::vector World2Map(std::vector xyPoints); 69 | 70 | 71 | //! Converts (u, v) pixel coordinates to map frame (x, y) 72 | /*! 73 | \param (u, v) pixel coordinates for the gridmap 74 | \return (x, y) position in map frame 75 | */ 76 | Eigen::Vector2f Map2World(Eigen::Vector2f uv); 77 | 78 | 79 | const cv::Mat& Map() const 80 | { 81 | return o_gridmap; 82 | } 83 | 84 | 85 | private: 86 | 87 | //std::shared_ptr o_gridmap; 88 | cv::Mat o_gridmap; 89 | float o_resolution = 0; 90 | int o_maxy = 0; 91 | Eigen::Vector3f o_origin; 92 | Eigen::Vector2f o_bottomRight; 93 | Eigen::Vector2f o_topLeft; 94 | 95 | void getBorders(); 96 | 97 | }; 98 | 99 | #endif //GMap 100 | 101 | -------------------------------------------------------------------------------- /ncore/nmap/include/IMap2D.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: IMap2D.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef IMAP2D_H 13 | #define IMAP2D_H 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | 20 | class IMap2D 21 | { 22 | public: 23 | 24 | 25 | //! Converts (x, y) from the map frame to the pixels coordinates 26 | /*! 27 | \param (x, y) position in map frame 28 | \return (u, v) pixel coordinates for the gridmap 29 | */ 30 | virtual Eigen::Vector2f World2Map(Eigen::Vector2f xy) = 0; 31 | 32 | 33 | //! Converts a vector of (x, y) from the map frame to the pixels coordinates 34 | /*! 35 | \param vector of (x, y) positions in map frame 36 | \return vector of (u, v) pixel coordinates for the gridmap 37 | */ 38 | virtual std::vector World2Map(std::vector xyPoints) = 0; 39 | 40 | 41 | //! Converts (u, v) pixel coordinates to map frame (x, y) 42 | /*! 43 | \param (u, v) pixel coordinates for the gridmap 44 | \return (x, y) position in map frame 45 | */ 46 | virtual Eigen::Vector2f Map2World(Eigen::Vector2f uv) = 0; 47 | 48 | 49 | virtual ~IMap2D() {}; 50 | 51 | 52 | virtual const cv::Mat& Map() const = 0; 53 | 54 | 55 | }; 56 | 57 | #endif // IMap2D -------------------------------------------------------------------------------- /ncore/nmap/include/Lift.h: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | # ############################################################################## 4 | # Copyright (c) 2021- University of Bonn # 5 | # All rights reserved. # 6 | # # 7 | # Author: Nicky Zimmerman # 8 | # # 9 | # File: Lift.h # 10 | # ############################################################################## 11 | **/ 12 | 13 | 14 | #ifndef LIFT 15 | #define LIFT 16 | 17 | 18 | #pragma once 19 | class Lift 20 | { 21 | }; 22 | 23 | #endif // !LIFT 24 | 25 | -------------------------------------------------------------------------------- /ncore/nmap/include/Object.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Object.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef OBJECT 14 | #define OBJECT 15 | 16 | 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #pragma once 23 | class Object 24 | { 25 | 26 | public: 27 | 28 | 29 | Object(int semLabel, Eigen::Vector3f pose = Eigen::Vector3f(), std::string modelPath = ""); 30 | 31 | Object(); 32 | 33 | 34 | int ID() const 35 | { 36 | return o_id; 37 | } 38 | 39 | int SemLabel() const 40 | { 41 | return o_semLabel; 42 | } 43 | 44 | void SemLabel(int semLabel) 45 | { 46 | o_semLabel = semLabel; 47 | } 48 | 49 | Eigen::Vector3f Pose() const 50 | { 51 | return o_pose; 52 | } 53 | 54 | void Pose(Eigen::Vector3f pose) 55 | { 56 | o_pose = pose; 57 | } 58 | 59 | std::string ModelPath() const 60 | { 61 | return o_modelPath; 62 | } 63 | 64 | void ModelPath(std::string modelPath) 65 | { 66 | o_modelPath = modelPath; 67 | //loadModel(); 68 | } 69 | 70 | private: 71 | 72 | friend class boost::serialization::access; 73 | template 74 | void serialize(Archive & ar, const unsigned int version) 75 | { 76 | ar & o_id; 77 | ar & o_semLabel; 78 | ar & o_pose; 79 | ar & o_modelPath; 80 | } 81 | 82 | static int generateID(); 83 | void loadModel(); 84 | 85 | int o_id; 86 | int o_semLabel; 87 | Eigen::Vector3f o_pose; 88 | std::string o_modelPath; 89 | }; 90 | 91 | 92 | namespace boost { 93 | namespace serialization { 94 | 95 | template 96 | void serialize(Archive & ar, Eigen::Vector3f & pose, const unsigned int version) 97 | { 98 | ar & pose(0); 99 | ar & pose(1); 100 | ar & pose(2); 101 | } 102 | 103 | } // namespace serialization 104 | } // namespace boost 105 | 106 | 107 | #endif // !OBJECT 108 | 109 | 110 | -------------------------------------------------------------------------------- /ncore/nmap/include/Room.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Room.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef ROOM 13 | #define ROOM 14 | 15 | #pragma once 16 | 17 | #include 18 | #include 19 | #include "Object.h" 20 | 21 | #include 22 | #include 23 | #include 24 | 25 | 26 | class Room 27 | { 28 | public: 29 | 30 | Room(std::string name, int id, std::string purpose = "general"); 31 | 32 | Room() {}; 33 | 34 | std::string Name() const 35 | { 36 | return o_name; 37 | } 38 | 39 | 40 | void Name(std::string name) 41 | { 42 | o_name = name; 43 | } 44 | 45 | std::string Purpose() const 46 | { 47 | return o_purpose; 48 | } 49 | 50 | int ID() const 51 | { 52 | return o_id; 53 | } 54 | 55 | const std::vector Objects() const 56 | { 57 | return o_objects; 58 | } 59 | 60 | Object& GetObject(int id); 61 | 62 | 63 | int AddObject(Object& obj); 64 | 65 | //NOT IMPLEMENTED YET!!!! 66 | int RemoveObject(int id); 67 | 68 | private: 69 | 70 | 71 | friend class boost::serialization::access; 72 | template 73 | void serialize(Archive & ar, const unsigned int version) 74 | { 75 | ar & o_name; 76 | ar & o_purpose; 77 | ar & o_id; 78 | ar & o_objects; 79 | } 80 | 81 | 82 | std::string o_name = "room"; 83 | std::string o_purpose = "general"; 84 | int o_id = -1; 85 | std::vector o_objects; 86 | 87 | }; 88 | 89 | #endif // ROOM 90 | -------------------------------------------------------------------------------- /ncore/nmap/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_executable(RoomSegmentation RoomSegmentation.cpp GMap.cpp FloorMap.cpp Room.cpp Lift.cpp Object.cpp) 2 | target_link_libraries(RoomSegmentation ${OpenCV_LIBS} NSENSORS ${Boost_LIBRARIES}) 3 | add_library(NMAP GMap.cpp FloorMap.cpp Room.cpp Lift.cpp Object.cpp) 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /ncore/nmap/src/Lift.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Lift.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "Lift.h" 13 | -------------------------------------------------------------------------------- /ncore/nmap/src/Object.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Object.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "Object.h" 13 | #include 14 | 15 | Object::Object(int semLabel, Eigen::Vector3f pose, std::string modelPath) 16 | { 17 | o_id = Object::generateID(); 18 | o_semLabel = semLabel; 19 | o_pose = pose; 20 | o_modelPath = modelPath; 21 | } 22 | 23 | Object::Object() 24 | { 25 | o_id = Object::generateID(); 26 | o_pose = Eigen::Vector3f(0, 0, 0); 27 | o_semLabel = 0; 28 | o_modelPath = ""; 29 | } 30 | 31 | int Object::generateID() 32 | { 33 | //srand(); 34 | int id = (rand()%10000)+1; 35 | 36 | return id; 37 | } 38 | 39 | void Object::loadModel() 40 | { 41 | 42 | } 43 | 44 | 45 | -------------------------------------------------------------------------------- /ncore/nmap/src/Room.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Room.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "Room.h" 13 | 14 | 15 | Room::Room(std::string name, int id, std::string purpose) 16 | { 17 | o_name = name; 18 | o_purpose = purpose; 19 | o_id = id; 20 | } 21 | 22 | Object& Room::GetObject(int id) 23 | { 24 | auto it = find_if(o_objects.begin(), o_objects.end(), [&id](const Object& obj) {return obj.ID() == id;}); 25 | auto index = std::distance(o_objects.begin(), it); 26 | return o_objects[index]; 27 | } 28 | 29 | 30 | int Room::AddObject(Object& obj) 31 | { 32 | o_objects.push_back(obj); 33 | return 0; 34 | } 35 | 36 | int Room::RemoveObject(int id) 37 | { 38 | auto it = find_if(o_objects.begin(), o_objects.end(), [&id](const Object& obj) {return obj.ID() == id;}); 39 | if (it != o_objects.end()) 40 | { 41 | auto index = std::distance(o_objects.begin(), it); 42 | o_objects.erase (o_objects.begin() + index); 43 | 44 | return 0; 45 | } 46 | 47 | return -1; 48 | } 49 | -------------------------------------------------------------------------------- /ncore/nmap/src/RoomSegmentation.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: RoomSegmentation.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | // RoomSegmentation.cpp : This file contains the 'main' function. Program execution begins and ends there. 13 | // 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include "opencv2/opencv.hpp" 21 | 22 | #include "GMap.h" 23 | #include "FloorMap.h" 24 | 25 | 26 | int main() 27 | { 28 | cv::Mat grid = cv::imread("/home/nickybones/Code/YouBotMCL/ncore/data/floor/2021_10_27/YouBotMap.pgm"); 29 | cv::Mat roomSeg = cv::imread("/home/nickybones/Code/YouBotMCL/ncore/data/floor/2021_10_27/YouBotMapRoomSeg.png"); 30 | 31 | //int id = o_roomSeg.at(440, 300); 32 | //std::cout << id << std::endl; 33 | 34 | // cv::Mat split[3]; 35 | // cv::split(roomSeg3, split); 36 | // cv::Mat roomSeg = split[2]; 37 | 38 | GMap map = GMap(grid, Eigen::Vector3f(-17.000000, -12.200000, 0.000000), 0.05); 39 | FloorMap fm = FloorMap(std::make_shared(map), roomSeg); 40 | cv::Mat roomSegRGB = fm.ColorizeRoomSeg(); 41 | 42 | cv::imshow("room segmentation", roomSegRGB); 43 | cv::imwrite("colorized_room_seg.png", roomSegRGB); 44 | cv::waitKey(); 45 | 46 | } 47 | 48 | -------------------------------------------------------------------------------- /ncore/nmap/tst/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | add_executable(NMapUnitTests NMapUnitTests.cpp ) 4 | target_link_libraries(NMapUnitTests ${OpenCV_LIBS} ${catkin_LIBRARIES} NMAP NSENSORS GTest::GTest gtest_main ${Boost_LIBRARIES}) 5 | target_compile_definitions(NMapUnitTests PRIVATE PROJECT_TEST_DATA_DIR="${PROJECT_SOURCE_DIR}/../data/") 6 | add_test(AllTestsInTests NMapUnitTests) 7 | -------------------------------------------------------------------------------- /ncore/nmcl/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(nmcl) 3 | 4 | ## Specify additional locations of header files 5 | ## Your package locations should be listed before other locations 6 | 7 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/) 8 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/lib/) 9 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include/) 10 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nsensors/include/) 11 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nmap/include/) 12 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nlohmann_json/include/) 13 | 14 | set(RESULTS_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/../build) 15 | file(MAKE_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}) 16 | set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 17 | set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 18 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/bin) 19 | 20 | 21 | 22 | add_subdirectory("src") 23 | #add_library(NSENSORS STATIC IMPORTED) 24 | set_target_properties(NSENSORS PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNSENSORS.a) 25 | set_target_properties(NMAP PROPERTIES IMPORTED_LOCATION ${CMAKE_CURRENT_SOURCE_DIR}/../build/lib/libNMAP.a) 26 | 27 | 28 | 29 | # After all are setup is done, we can go to our src/ directory to build our 30 | # files 31 | if(BUILD_TESTING) 32 | add_subdirectory("tst") 33 | endif() 34 | -------------------------------------------------------------------------------- /ncore/nmcl/include/AggressiveLowVarianceResampling.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: AggressiveLowVarianceResampling.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef AGGRESSIVELOWVARIANCERESAMPLING_H 14 | #define AGGRESSIVELOWVARIANCERESAMPLING_H 15 | 16 | #include "Resampling.h" 17 | #include 18 | #include 19 | 20 | class AggressiveLowVarianceResampling: public Resampling 21 | { 22 | public: 23 | void Resample(std::vector& particles); 24 | 25 | }; 26 | 27 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/FSR.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: FSR.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef FSR_H 13 | #define FSR_H 14 | 15 | 16 | #include "MotionModel.h" 17 | 18 | 19 | 20 | class FSR: public MotionModel 21 | { 22 | public: 23 | virtual Eigen::Vector3f Backward(Eigen::Vector3f p1, Eigen::Vector3f p2); 24 | 25 | virtual Eigen::Vector3f Forward(Eigen::Vector3f p1, Eigen::Vector3f u); 26 | 27 | virtual Eigen::Vector3f SampleMotion(Eigen::Vector3f p1, std::vector u, std::vector weights, Eigen::Vector3f noise); 28 | 29 | }; 30 | 31 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/GaussianPredictStrategy.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: GaussianPredictStrategy.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef GAUSSIANPREDICTSTRATEGY 14 | #define GAUSSIANPREDICTSTRATEGY 15 | 16 | #include "PredictStrategy.h" 17 | 18 | 19 | 20 | class GaussianPredictStrategy : public PredictStrategy 21 | { 22 | public: 23 | 24 | void Predict(ReNMCL* renmcl, std::vector u, std::vector odomWeights, Eigen::Vector3f noise); 25 | 26 | }; 27 | 28 | 29 | 30 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/LidarData.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: LidarData.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef LIDARDATA_H 14 | #define LIDARDATA_H 15 | 16 | #include 17 | #include 18 | 19 | class LidarData : public SensorData 20 | { 21 | 22 | public: 23 | 24 | LidarData(const std::vector& scan, const std::vector& mask) 25 | { 26 | o_scan = scan; 27 | o_mask = mask; 28 | } 29 | 30 | //virtual ~LidarData(){}; 31 | 32 | std::vector& Scan() 33 | { 34 | return o_scan; 35 | } 36 | 37 | std::vector& Mask() 38 | { 39 | return o_mask; 40 | } 41 | 42 | private: 43 | 44 | std::vector o_scan; 45 | std::vector o_mask; 46 | 47 | }; 48 | 49 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/LowVarianceResampling.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: LowVarianceResampling.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef LOWVARIANCERESAMPLING_H 14 | #define LOWVARIANCERESAMPLING_H 15 | 16 | #include "Resampling.h" 17 | #include 18 | #include 19 | 20 | class LowVarianceResampling: public Resampling 21 | { 22 | public: 23 | void Resample(std::vector& particles); 24 | 25 | }; 26 | 27 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/MixedFSR.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: MixedFSR.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef MIXEDFSR_H 14 | #define MIXEDFSR_H 15 | 16 | #include "FSR.h" 17 | 18 | class MixedFSR : public FSR 19 | { 20 | public: 21 | 22 | Eigen::Vector3f SampleMotion(Eigen::Vector3f p1, std::vector u, std::vector weights, Eigen::Vector3f noise); 23 | 24 | 25 | }; 26 | 27 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/MotionModel.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: MotionModel.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef MOTIONMODEL_H 14 | #define MOTIONMODEL_H 15 | 16 | #include 17 | #include 18 | 19 | class MotionModel 20 | { 21 | 22 | public: 23 | 24 | virtual Eigen::Vector3f Backward(Eigen::Vector3f p1, Eigen::Vector3f p2) = 0; 25 | 26 | virtual Eigen::Vector3f Forward(Eigen::Vector3f p1, Eigen::Vector3f u) = 0; 27 | 28 | virtual Eigen::Vector3f SampleMotion(Eigen::Vector3f p1, std::vector u, std::vector weights, Eigen::Vector3f noise) = 0; 29 | 30 | 31 | virtual ~MotionModel(){}; 32 | 33 | }; 34 | 35 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/NMCLFactory.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: NMCLFactory.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef NMCLFACTORY 13 | #define NMCLFACTORY 14 | 15 | #include "ReNMCL.h" 16 | #include 17 | 18 | 19 | class NMCLFactory 20 | { 21 | public: 22 | 23 | static std::shared_ptr Create(const std::string& configPath); 24 | 25 | static void Dump(const std::string& configFolder, const std::string& configFile); 26 | 27 | 28 | private: 29 | 30 | }; 31 | 32 | 33 | #endif 34 | -------------------------------------------------------------------------------- /ncore/nmcl/include/Particle.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Particle.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef PARTICLE_H 13 | #define PARTICLE_H 14 | 15 | 16 | #include 17 | #include 18 | 19 | 20 | class Particle 21 | { 22 | public: 23 | 24 | Particle(Eigen::Vector3f p = Eigen::Vector3f(0, 0, 0), double w = 0); 25 | 26 | Eigen::Vector3f pose; 27 | double weight; 28 | 29 | }; 30 | 31 | 32 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/PlaceRecognition.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: PlaceRecognition.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef PLACERECOGNITION_H 14 | #define PLACERECOGNITION_H 15 | 16 | #include 17 | #include 18 | #include 19 | 20 | class PlaceRecognition 21 | { 22 | public: 23 | PlaceRecognition(const std::vector& dict); 24 | 25 | std::vector Match(const std::vector& places); 26 | 27 | private: 28 | 29 | std::vector divideWord(const std::string& word, const char delim = ' '); 30 | 31 | std::vector o_dict; 32 | std::vector> o_extDict; 33 | 34 | }; 35 | 36 | 37 | 38 | 39 | #endif //PLACERECOGNITION -------------------------------------------------------------------------------- /ncore/nmcl/include/PredictStrategy.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: PredictStrategy.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef PREDICTSTRATEGY 14 | #define PREDICTSTRATEGY 15 | 16 | #include 17 | #include 18 | 19 | class ReNMCL; 20 | 21 | class PredictStrategy 22 | { 23 | public: 24 | 25 | virtual void Predict(ReNMCL* renmcl, std::vector u, std::vector odomWeights, Eigen::Vector3f noise) = 0; 26 | 27 | virtual ~PredictStrategy() {}; 28 | }; 29 | 30 | 31 | 32 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/Resampling.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Resampling.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef RESAMPLING_H 13 | #define RESAMPLING_H 14 | 15 | #include 16 | #include 17 | #include "Particle.h" 18 | 19 | class Resampling 20 | { 21 | public: 22 | virtual void Resample(std::vector& particles) = 0; 23 | 24 | }; 25 | 26 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/SensorData.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: SensorData.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef SENSORDATA_H 14 | #define SENSORDATA_H 15 | 16 | #include 17 | #include 18 | 19 | class SensorData 20 | { 21 | 22 | public: 23 | 24 | virtual ~SensorData(){}; 25 | 26 | }; 27 | 28 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/SensorModel.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: SensorModel.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef SENSORMODEL_H 14 | #define SENSORMODEL_H 15 | 16 | #include 17 | #include 18 | #include "Particle.h" 19 | #include 20 | #include 21 | 22 | 23 | class SensorModel 24 | { 25 | 26 | public: 27 | 28 | virtual void ComputeWeights(std::vector& particles, std::shared_ptr data) = 0; 29 | virtual bool IsOccupied(Particle particle) = 0; 30 | virtual bool IsValid(Particle particle) = 0; 31 | virtual std::vector InitUniform(int n_particles) = 0; 32 | virtual std::vector InitBoundingBox(int n_particles, std::vector tl, std::vector br, std::vector yaw) = 0; 33 | virtual std::vector InitGaussian(int n_particles, std::vector initGuess, std::vector covariances) = 0; 34 | virtual std::vector InitOriented(int n_particles, std::vector initGuess, std::vector covariances) = 0; 35 | 36 | virtual ~SensorModel(){}; 37 | 38 | }; 39 | 40 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/SetStatistics.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: SetStatistics.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef SETSTATISTICS_H 14 | #define SETSTATISTICS_H 15 | 16 | #include 17 | #include 18 | #include "Particle.h" 19 | 20 | class SetStatistics 21 | { 22 | public: 23 | 24 | SetStatistics(Eigen::Vector3d m = Eigen::Vector3d::Zero(), Eigen::Matrix3d c = Eigen::Matrix3d::Zero()) 25 | { 26 | mean = m; 27 | cov = c; 28 | } 29 | 30 | Eigen::Vector3d Mean() 31 | { 32 | return mean; 33 | } 34 | 35 | Eigen::Matrix3d Cov() 36 | { 37 | return cov; 38 | } 39 | 40 | static SetStatistics ComputeParticleSetStatistics(const std::vector& particles); 41 | 42 | private: 43 | 44 | Eigen::Vector3d mean; 45 | Eigen::Matrix3d cov; 46 | 47 | }; 48 | 49 | 50 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/include/UniformPredictStrategy.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: UniformPredictStrategy.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #ifndef UNIFORMPREDICTSTRATEGY 14 | #define UNIFORMPREDICTSTRATEGY 15 | 16 | #include "PredictStrategy.h" 17 | 18 | 19 | 20 | class UniformPredictStrategy : public PredictStrategy 21 | { 22 | public: 23 | 24 | void Predict(ReNMCL* renmcl, std::vector u, std::vector odomWeights, Eigen::Vector3f noise); 25 | 26 | 27 | }; 28 | 29 | 30 | 31 | #endif -------------------------------------------------------------------------------- /ncore/nmcl/src/AggressiveLowVarianceResampling.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: AggressiveLowVarianceResampling.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "AggressiveLowVarianceResampling.h" 14 | 15 | #include 16 | #include 17 | 18 | void AggressiveLowVarianceResampling::Resample(std::vector& particles) 19 | { 20 | std::vector new_particles; 21 | int n_particles = particles.size(); 22 | 23 | double unitW = 1.0 / n_particles; 24 | double r = drand48() * 1.0 / n_particles; 25 | double acc = particles[0].weight; 26 | int i = 0; 27 | 28 | for(int j = 0; j < n_particles; ++j) 29 | { 30 | double U = r + j * 1.0 / n_particles; 31 | while((U > acc) && (i < n_particles)) 32 | { 33 | ++i; 34 | acc += particles[i].weight; 35 | } 36 | particles[i].weight = unitW; 37 | new_particles.push_back(particles[i]); 38 | } 39 | particles = new_particles; 40 | } -------------------------------------------------------------------------------- /ncore/nmcl/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | add_library(NMCL FSR.cpp NMCL.cpp BeamEnd.cpp MixedFSR.cpp Particle.cpp SetStatistics.cpp LowVarianceResampling.cpp PlaceRecognition.cpp ReNMCL.cpp AggressiveLowVarianceResampling.cpp UniformPredictStrategy.cpp GaussianPredictStrategy.cpp NMCLFactory.cpp) 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /ncore/nmcl/src/FSR.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: FSR.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "FSR.h" 14 | #include 15 | #include 16 | #include "Utils.h" 17 | #include 18 | 19 | 20 | Eigen::Vector3f FSR::Backward(Eigen::Vector3f p1, Eigen::Vector3f p2) 21 | { 22 | float dx = p2(0) - p1(0); 23 | float dy = p2(1) - p1(1); 24 | float dtheta = p2(2) - p1(2); 25 | 26 | float a = cos(p1(2)); 27 | float b = sin(p1(2)); 28 | 29 | float f = (dx * a + dy * b) / (pow(a, 2.0) + pow(b, 2.0)); 30 | float s = (dy - f * b) / a; 31 | float r = dtheta; 32 | 33 | return Eigen::Vector3f(f, s, r); 34 | 35 | } 36 | 37 | Eigen::Vector3f FSR::Forward(Eigen::Vector3f p1, Eigen::Vector3f u) 38 | { 39 | float f = u(0); 40 | float s = u(1); 41 | float r = u(2); 42 | 43 | float x = p1(0) + f * cos(p1(2)) - s * sin(p1(2)); 44 | float y = p1(1) + f * sin(p1(2)) + s * cos(p1(2)); 45 | float theta = Wrap2Pi(r + p1(2)); 46 | // write code to make theta between 0 and 2pi 47 | 48 | return Eigen::Vector3f(x, y, theta); 49 | 50 | } 51 | 52 | Eigen::Vector3f FSR::SampleMotion(Eigen::Vector3f p1, std::vector command, std::vector weights, Eigen::Vector3f noise) 53 | { 54 | Eigen::Vector3f u = command[0]; 55 | float f = u(0); 56 | float s = u(1); 57 | float r = u(2); 58 | 59 | float f_h = f - SampleGuassian(noise(0) * fabs(f)); 60 | float s_h = s - SampleGuassian(noise(1) * fabs(s)); 61 | float r_h = r - SampleGuassian(noise(2) * fabs(r)); 62 | 63 | //std::cout << "FSR" << std::endl; 64 | 65 | 66 | Eigen::Vector3f new_p = Forward(p1, Eigen::Vector3f(f_h, s_h, r_h)); 67 | 68 | return new_p; 69 | 70 | } 71 | 72 | -------------------------------------------------------------------------------- /ncore/nmcl/src/GaussianPredictStrategy.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: GaussianPredictStrategy.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "GaussianPredictStrategy.h" 14 | #include "Particle.h" 15 | #include "ReNMCL.h" 16 | 17 | 18 | 19 | void GaussianPredictStrategy::Predict(ReNMCL* renmcl, std::vector u, std::vector odomWeights, Eigen::Vector3f noise) 20 | { 21 | Eigen::Matrix3d cov; 22 | cov << 1.0, 0, 0, 0, 1.0, 0, 0, 0, 1.0; 23 | std::vector covariances{cov}; 24 | Eigen::Vector3d mean = renmcl->o_stats.Mean(); 25 | std::vector initGuesses{Eigen::Vector3f(mean(0), mean(1), mean(2))}; 26 | 27 | for(int i = 0; i < renmcl->o_numParticles; ++i) 28 | { 29 | Eigen::Vector3f pose = renmcl->o_motionModel->SampleMotion(renmcl->o_particles[i].pose, u, odomWeights, noise); 30 | renmcl->o_particles[i].pose = pose; 31 | //particle pruning - if particle is outside the map, we replace it 32 | while (!renmcl->o_sensorModel->IsValid(renmcl->o_particles[i])) 33 | { 34 | Particle p = (renmcl->o_sensorModel->InitGaussian(1, initGuesses, covariances))[0]; 35 | p.weight = 1.0 / renmcl->o_numParticles; 36 | renmcl->o_particles[i].pose = p.pose; 37 | } 38 | } 39 | } -------------------------------------------------------------------------------- /ncore/nmcl/src/LowVarianceResampling.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: LowVarianceResampling.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "LowVarianceResampling.h" 14 | 15 | #include 16 | #include 17 | 18 | void LowVarianceResampling::Resample(std::vector& particles) 19 | { 20 | std::vector new_particles; 21 | int n_particles = particles.size(); 22 | std::vector transWeights(n_particles); 23 | 24 | std::transform(particles.begin(), particles.end(), transWeights.begin(), [](Particle &p){ return p.weight * p.weight; }); 25 | double sumWeights = std::accumulate(transWeights.begin(), transWeights.end(), 0.0); 26 | 27 | double effN = 1.0 / sumWeights; 28 | 29 | if (effN < 0.5 * n_particles) 30 | { 31 | double unitW = 1.0 / n_particles; 32 | //std::cout << "resample" << std::endl; 33 | double r = drand48() * 1.0 / n_particles; 34 | double acc = particles[0].weight; 35 | int i = 0; 36 | 37 | for(int j = 0; j < n_particles; ++j) 38 | { 39 | double U = r + j * 1.0 / n_particles; 40 | while((U > acc) && (i < n_particles)) 41 | { 42 | ++i; 43 | acc += particles[i].weight; 44 | } 45 | particles[i].weight = unitW; 46 | new_particles.push_back(particles[i]); 47 | } 48 | particles = new_particles; 49 | } 50 | } -------------------------------------------------------------------------------- /ncore/nmcl/src/MixedFSR.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: MixedFSR.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "MixedFSR.h" 13 | #include 14 | #include 15 | #include "Utils.h" 16 | #include 17 | 18 | 19 | 20 | Eigen::Vector3f MixedFSR::SampleMotion(Eigen::Vector3f p1, std::vector command, std::vector weights, Eigen::Vector3f noise) 21 | { 22 | Eigen::Vector3f u(0, 0, 0); 23 | float choose = drand48(); 24 | 25 | float w = 0.0; 26 | 27 | for(long unsigned int i = 0; i < command.size(); ++i) 28 | { 29 | w += weights[i]; 30 | if(choose <= w) 31 | { 32 | u = command[i]; 33 | break; 34 | } 35 | } 36 | 37 | float f = u(0); 38 | float s = u(1); 39 | float r = u(2); 40 | 41 | float f_h = f - SampleGuassian(noise(0) * fabs(f)); 42 | float s_h = s - SampleGuassian(noise(1) * fabs(s)); 43 | float r_h = r - SampleGuassian(noise(2) * fabs(r)); 44 | 45 | 46 | Eigen::Vector3f new_p = Forward(p1, Eigen::Vector3f(f_h, s_h, r_h)); 47 | 48 | return new_p; 49 | 50 | } 51 | 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /ncore/nmcl/src/NMCL.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: NMCL.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "NMCL.h" 14 | #include 15 | #include 16 | #include 17 | 18 | NMCL::NMCL(std::shared_ptr mm, std::shared_ptr sm, 19 | std::shared_ptr rs, int n) 20 | { 21 | motionModel = mm; 22 | sensorModel = sm; 23 | resampler = rs; 24 | n_particles = n; 25 | particles = sensorModel->InitUniform(n_particles); 26 | stats = SetStatistics::ComputeParticleSetStatistics(particles); 27 | 28 | } 29 | 30 | NMCL::NMCL(std::shared_ptr mm, std::shared_ptr sm, 31 | std::shared_ptr rs, int n, 32 | std::vector initGuess, std::vector covariances) 33 | { 34 | motionModel = mm; 35 | sensorModel = sm; 36 | resampler = rs; 37 | n_particles = n; 38 | particles = sensorModel->InitGaussian(n_particles, initGuess, covariances ); 39 | stats = SetStatistics::ComputeParticleSetStatistics(particles); 40 | 41 | } 42 | 43 | 44 | void NMCL::Predict(std::vector u, std::vector odomWeights, Eigen::Vector3f noise) 45 | { 46 | /*Eigen::Matrix3d cov; 47 | cov << 0.1, 0, 0, 0, 0.1, 0, 0, 0, 0.1; 48 | std::vector covariances{cov}; 49 | Eigen::Vector3d mean = stats.Mean(); 50 | std::vector initGuesses{Eigen::Vector3f(mean(0), mean(1), mean(2))};*/ 51 | 52 | for(int i = 0; i < n_particles; ++i) 53 | { 54 | Eigen::Vector3f pose = motionModel->SampleMotion(particles[i].pose, u, odomWeights, noise); 55 | particles[i].pose = pose; 56 | //particle pruning - if particle is on an occupied grid cell, we replace it 57 | //if (sensorModel->IsOccupied(particles[i])) 58 | //particle pruning - if particle is outside the map, we replace it 59 | while (!sensorModel->IsValid(particles[i])) 60 | { 61 | //Particle p = (sensorModel->InitGaussian(1, initGuesses, covariances))[0]; 62 | Particle p = (sensorModel->InitUniform(1))[0]; 63 | p.weight = 1.0 / n_particles; 64 | particles[i].pose = p.pose; 65 | } 66 | } 67 | } 68 | 69 | 70 | void NMCL::Correct(std::shared_ptr data) 71 | { 72 | sensorModel->ComputeWeights(particles, data); 73 | 74 | normalizeWeights(); 75 | resampler->Resample(particles); 76 | //normalizeWeights(); 77 | computeStatistics(); 78 | } 79 | 80 | void NMCL::normalizeWeights() 81 | { 82 | auto lambda = [&](double total, Particle p){return total + p.weight; }; 83 | double sumWeights = std::accumulate(particles.begin(), particles.end(), 0.0, lambda); 84 | std::for_each(particles.begin(), particles.end(), [sumWeights](Particle &p){ p.weight /= sumWeights; }); 85 | } 86 | 87 | 88 | 89 | void NMCL::computeStatistics() 90 | { 91 | stats = SetStatistics::ComputeParticleSetStatistics(particles); 92 | } 93 | 94 | void NMCL::Recover() 95 | { 96 | particles = sensorModel->InitUniform(n_particles); 97 | } 98 | -------------------------------------------------------------------------------- /ncore/nmcl/src/Particle.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Particle.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "Particle.h" 14 | 15 | Particle::Particle(Eigen::Vector3f p, double w) 16 | { 17 | pose = p; 18 | weight = w; 19 | } -------------------------------------------------------------------------------- /ncore/nmcl/src/PlaceRecognition.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: PlaceRecognition.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "PlaceRecognition.h" 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | PlaceRecognition::PlaceRecognition(const std::vector& dict) 19 | { 20 | o_dict = dict; 21 | 22 | int numWords = dict.size(); 23 | o_extDict = std::vector>(numWords); 24 | const char delim = ' '; 25 | for (int w = 0; w < numWords; ++w) 26 | { 27 | //o_extDict[w] = std::vector(); 28 | o_extDict[w] = divideWord(o_dict[w], delim); 29 | } 30 | } 31 | 32 | std::vector PlaceRecognition::Match(const std::vector& places) 33 | { 34 | std::vector detections; 35 | const char delim = ' '; 36 | 37 | 38 | for (int p = 0; p < places.size(); ++p) 39 | { 40 | for(int d = 0; d < o_dict.size(); ++d) 41 | { 42 | std::size_t found = places[p].find(o_dict[d]); 43 | /* 44 | will find match if detection is "Room 1$" and room name is "Room 1" since it is a substring of the detection 45 | */ 46 | if (found != std::string::npos) 47 | { 48 | detections.push_back(d); 49 | } 50 | /* 51 | will find match if detection is "Room1" and room name is "Room 1" by dividing the room name to "Room", "1" 52 | */ 53 | else 54 | { 55 | bool add = true; 56 | for (int w = 0; w < o_extDict[d].size(); ++w) 57 | { 58 | std::size_t found = places[p].find(o_extDict[d][w]); 59 | 60 | add *= (found != std::string::npos); 61 | } 62 | if (add) detections.push_back(d); 63 | } 64 | } 65 | } 66 | 67 | 68 | std::vector detectedPlaces(detections.size()); 69 | for(int i = 0; i < detections.size(); ++i) 70 | { 71 | detectedPlaces[i] = o_dict[detections[i]]; 72 | } 73 | 74 | for(int i = 0; i < detectedPlaces.size(); ++i) 75 | { 76 | for(int j = i + 1; j < detectedPlaces.size(); ++j) 77 | { 78 | std::size_t found = detectedPlaces[i].find(detectedPlaces[j]); 79 | if (found != std::string::npos) 80 | { 81 | detectedPlaces.erase(detectedPlaces.begin() + j); 82 | detections.erase(detections.begin() + j); 83 | } 84 | else 85 | { 86 | found = detectedPlaces[j].find(detectedPlaces[i]); 87 | if (found != std::string::npos) 88 | { 89 | detectedPlaces.erase(detectedPlaces.begin() + i); 90 | detections.erase(detections.begin() + i); 91 | } 92 | } 93 | } 94 | } 95 | 96 | 97 | 98 | return detections; 99 | } 100 | 101 | 102 | std::vector PlaceRecognition::divideWord(const std::string& word, const char delim) 103 | { 104 | std::vector tokens; 105 | std::stringstream ss(word); 106 | std::string temp; 107 | 108 | while(std::getline(ss, temp, delim)) 109 | { 110 | tokens.push_back( temp ); 111 | } 112 | 113 | std::sort(tokens.begin(), tokens.end()); 114 | 115 | return tokens; 116 | } 117 | -------------------------------------------------------------------------------- /ncore/nmcl/src/SetStatistics.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: SetStatistics.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "SetStatistics.h" 13 | #include 14 | 15 | SetStatistics SetStatistics::ComputeParticleSetStatistics(const std::vector& particles) 16 | { 17 | 18 | Eigen::Vector4d m = Eigen::Vector4d(0, 0, 0, 0); 19 | Eigen::Matrix3d cov = Eigen::Matrix3d::Zero(3, 3); 20 | int n = particles.size(); 21 | double tot_w = 0.0; 22 | 23 | for(int i = 0; i < n; ++i) 24 | { 25 | Eigen::Vector3f p = particles[i].pose; 26 | double w = particles[i].weight; 27 | 28 | m(0) += p(0) * w; // mean x 29 | m(1) += p(1) * w; // mean y 30 | m(2) += cos(p(2)) * w; // theta 31 | m(3) += sin(p(2)) * w; // theta 32 | 33 | tot_w += w; 34 | 35 | // linear components cov 36 | for(int j = 0; j < 2; ++j) 37 | { 38 | for(int k = 0; k < 2; ++k) 39 | { 40 | cov(j,k) += w * p(j) * p(k); 41 | } 42 | } 43 | } 44 | 45 | Eigen::Vector3d mean; 46 | mean(0) = m(0) / tot_w; 47 | mean(1) = m(1) / tot_w; 48 | mean(2) = atan2(m(3), m(2)); 49 | 50 | 51 | // normalize linear components cov 52 | for(int j = 0; j < 2; ++j) 53 | { 54 | for(int k = 0; k < 2; ++k) 55 | { 56 | cov(j, k) = cov(j, k) /tot_w - mean(j) * mean(k); 57 | } 58 | } 59 | 60 | // angular covariance 61 | double R = sqrt(m(2) * m(2) + m(3) * m(3)); 62 | 63 | // https://github.com/ros-planning/navigation/blob/2b807bd312fac1b476851800c84cb962559cbc53/amcl/src/amcl/pf/pf.c#L690 64 | //cov(2, 2) = -2 * log(R); 65 | 66 | // https://www.ebi.ac.uk/thornton-srv/software/PROCHECK/nmr_manual/man_cv.html 67 | cov(2, 2) = 1 - R / tot_w; 68 | 69 | 70 | SetStatistics stats = SetStatistics(mean, cov); 71 | 72 | return stats; 73 | 74 | } -------------------------------------------------------------------------------- /ncore/nmcl/src/UniformPredictStrategy.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: UniformPredictStrategy.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | #include "UniformPredictStrategy.h" 13 | #include "Particle.h" 14 | #include "ReNMCL.h" 15 | 16 | void UniformPredictStrategy::Predict(ReNMCL* renmcl, std::vector u, std::vector odomWeights, Eigen::Vector3f noise) 17 | { 18 | for(int i = 0; i < renmcl->o_numParticles; ++i) 19 | { 20 | Eigen::Vector3f pose = renmcl->o_motionModel->SampleMotion(renmcl->o_particles[i].pose, u, odomWeights, noise); 21 | renmcl->o_particles[i].pose = pose; 22 | //particle pruning - if particle is outside the map, we replace it 23 | while (!renmcl->o_sensorModel->IsValid(renmcl->o_particles[i])) 24 | { 25 | Particle p = (renmcl->o_sensorModel->InitUniform(1))[0]; 26 | p.weight = 1.0 / renmcl->o_numParticles; 27 | renmcl->o_particles[i].pose = p.pose; 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /ncore/nmcl/tst/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | add_executable(NMCLUnitTests NMCLUnitTests.cpp ) 5 | target_link_libraries(NMCLUnitTests ${OpenCV_LIBS} ${catkin_LIBRARIES} NMCL NSENSORS NMAP nlohmann_json::nlohmann_json GTest::GTest gtest_main ${Boost_LIBRARIES}) 6 | target_compile_definitions(NMCLUnitTests PRIVATE PROJECT_TEST_DATA_DIR="${PROJECT_SOURCE_DIR}/../data") 7 | add_test(AllTestsInTests NMCLUnitTests) 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /ncore/nmcl/tst/verifyInit.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import pandas as pd 4 | 5 | 6 | for a in range(4): 7 | 8 | df = pd.read_csv("/home/nickybones/Code/YouBotMCL/ncore/build/" + str(90 * a) + "particles.csv") 9 | x = df.iloc[:, 0] 10 | y = df.iloc[:, 1] 11 | yaw = df.iloc[:, 2] 12 | 13 | plt.gca().invert_yaxis() 14 | 15 | plt.scatter(x, y, c='k') 16 | plt.scatter([0], [0], c='r') 17 | 18 | for i in range(len(yaw)): 19 | ang = yaw[i] 20 | dx = np.cos(ang) 21 | dy = np.sin(ang) 22 | plt.arrow(x[i], y[i], 0.005*dx, 0.005*dy) 23 | 24 | plt.savefig("/home/nickybones/Code/YouBotMCL/ncore/build/" + str(90 * a) + "particles.png") 25 | plt.close() 26 | #plt.show() 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /ncore/nsensors/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(nsensors) 3 | 4 | ## Specify additional locations of header files 5 | ## Your package locations should be listed before other locations 6 | 7 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/) 8 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/src/lib/) 9 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include/) 10 | include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../nlohmann_json/include/) 11 | 12 | set(RESULTS_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/../build) 13 | file(MAKE_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}) 14 | set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 15 | set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/lib) 16 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${RESULTS_OUTPUT_DIRECTORY}/bin) 17 | 18 | 19 | # After all are setup is done, we can go to our src/ directory to build our 20 | # files 21 | add_subdirectory("src") 22 | if(BUILD_TESTING) 23 | add_subdirectory("tst") 24 | endif() 25 | -------------------------------------------------------------------------------- /ncore/nsensors/include/Camera.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Camera.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef CAMERA_H 13 | #define CAMERA_H 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | 20 | 21 | class Camera 22 | { 23 | public: 24 | //! A constructor for Camera object, that maps from image coordinates to 3D in camera frame 25 | /*! 26 | \param k 27 | \param tc 28 | */ 29 | Camera(Eigen::Matrix3d k, Eigen::Matrix3d t); 30 | 31 | Camera(std::string jsonPath); 32 | 33 | std::pair UV2CameraFrame(Eigen::Vector2d q1, Eigen::Vector2d q2); 34 | 35 | std::pair ComputeOccAngles(Eigen::Vector2d q1, Eigen::Vector2d q2); 36 | 37 | int ID() const 38 | { 39 | return o_id; 40 | } 41 | 42 | 43 | 44 | 45 | private: 46 | 47 | Eigen::Matrix3d o_K; 48 | Eigen::Matrix3d o_invK; 49 | Eigen::Matrix3d o_T; 50 | int o_id = 0; 51 | float o_yaw = 0.0; 52 | }; 53 | 54 | 55 | #endif -------------------------------------------------------------------------------- /ncore/nsensors/include/Lidar2D.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Lidar2D.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef LIDAR2D_H 13 | #define LIDAR2D_H 14 | 15 | #include 16 | #include 17 | #include 18 | 19 | 20 | class Lidar2D 21 | { 22 | public: 23 | 24 | Lidar2D(std::string name_, Eigen::Vector3f origin, int numBeams, float maxAngle, float minAngle); 25 | 26 | Lidar2D(std::string name_, std::string yamlFolder); 27 | 28 | Lidar2D(std::string jsonPath); 29 | 30 | //const std::vector& Heading() const 31 | std::vector Heading() 32 | { 33 | return o_heading; 34 | } 35 | 36 | std::string Name() 37 | { 38 | return o_name; 39 | } 40 | 41 | std::vector Center(std::vector& homoPoints); 42 | 43 | 44 | 45 | private: 46 | 47 | std::vector o_heading; 48 | std::string o_name; 49 | Eigen::Matrix3f o_trans; 50 | 51 | }; 52 | 53 | 54 | 55 | std::vector MergeScans(const std::vector& f_ranges, Lidar2D laser_front, 56 | const std::vector& r_ranges, Lidar2D laser_rear, int dsFactor = 10, float maxRange = 15); 57 | 58 | std::vector MergeScansSimple(const std::vector& f_ranges, Lidar2D laser_front, 59 | const std::vector& r_ranges, Lidar2D laser_rear); 60 | 61 | #endif -------------------------------------------------------------------------------- /ncore/nsensors/include/OptiTrack.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: OptiTrack.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef OPTITRACK_H 13 | #define OPTITRACK_H 14 | 15 | #include 16 | #include 17 | #include 18 | 19 | class OptiTrack 20 | { 21 | public: 22 | 23 | //! A constructor 24 | /*! 25 | \param origin is the (x, y, theta) location of the robot in the OptiTrack frame when the robot begins mapping 26 | So it is (0, 0, 0) in the map frame. 27 | */ 28 | OptiTrack(Eigen::Vector3f origin); 29 | 30 | //! A constructor 31 | /*! 32 | \param yamlPath is a string of the path of the folder of a yaml file containing the origin location 33 | */ 34 | OptiTrack(std::string yamlFolder); 35 | 36 | //! Converts 2D pose (x, y, theta) in the OptiTrack frame to 2D pose in the map frame. 37 | /*! 38 | \param p is 2D pose from the OptiTrack motion capture 39 | \return 2D pose in the map frame 40 | */ 41 | Eigen::Vector3f OptiTrack2World(Eigen::Vector3f p); 42 | 43 | 44 | private: 45 | Eigen::Matrix3f o_invTrans; 46 | Eigen::Vector3f o_origin; 47 | }; 48 | 49 | #endif -------------------------------------------------------------------------------- /ncore/nsensors/include/Utils.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Utils.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef UTILS_H 13 | #define UTILS_H 14 | 15 | 16 | #include 17 | #include 18 | #include 19 | 20 | 21 | 22 | Eigen::Matrix3f Vec2Trans(Eigen::Vector3f v); 23 | 24 | float Wrap2Pi(float angle); 25 | 26 | float GetYaw(float qz, float qw); 27 | 28 | std::vector Ranges2Points(const std::vector& ranges, const std::vector& angles); 29 | 30 | std::vector Downsample(const std::vector& ranges, int N); 31 | 32 | std::vector Downsample(const std::vector& ranges, int N); 33 | 34 | std::vector StringToVec(std::string seq); 35 | 36 | std::vector File2Lines(std::string filePath); 37 | 38 | float SampleGuassian(float sigma); 39 | 40 | #endif -------------------------------------------------------------------------------- /ncore/nsensors/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | #target_link_libraries(Driver ${OpenCV_LIBS}) 2 | add_library(NSENSORS Utils.cpp Camera.cpp OptiTrack.cpp Lidar2D.cpp) 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /ncore/nsensors/src/Camera.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: Camera.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "Camera.h" 14 | #include "Utils.h" 15 | #include 16 | #include 17 | #include 18 | 19 | Camera::Camera(Eigen::Matrix3d k, Eigen::Matrix3d t) 20 | { 21 | o_K = k; 22 | o_invK = o_K.inverse(); 23 | o_T = t; 24 | } 25 | 26 | Camera::Camera(std::string jsonPath) 27 | { 28 | using json = nlohmann::json; 29 | 30 | std::ifstream file(jsonPath); 31 | json config; 32 | file >> config; 33 | o_id = config["id"]; 34 | o_yaw = config["yaw"]; 35 | std::vector k = config["k"]; 36 | std::vector t = config["t"]; 37 | 38 | o_K = Eigen::Matrix(k.data()); 39 | o_invK = o_K.inverse(); 40 | o_T = Eigen::Matrix(t.data()); 41 | } 42 | 43 | 44 | 45 | std::pair Camera::UV2CameraFrame(Eigen::Vector2d q1, Eigen::Vector2d q2) 46 | { 47 | Eigen::Vector3d p1(q1(0), q1(1), 1); 48 | Eigen::Vector3d p2(q2(0), q2(1), 1); 49 | 50 | 51 | // multiply by inverse calibration matrix 52 | Eigen::Vector3d p1k_ = o_invK * p1; 53 | Eigen::Vector3d p2k_ = o_invK * p2; 54 | 55 | // divide by z component to homogenize it 56 | Eigen::Vector3d p1k = p1k_ / p1k_(2); 57 | Eigen::Vector3d p2k = p2k_ / p2k_(2); 58 | 59 | // go from image frame to camera frame 60 | Eigen::Vector3d p1c = o_T * p1k; 61 | Eigen::Vector3d p2c = o_T * p2k; 62 | 63 | std::pair pc{p1c, p2c}; 64 | 65 | return pc; 66 | } 67 | 68 | std::pair Camera::ComputeOccAngles(Eigen::Vector2d q1, Eigen::Vector2d q2) 69 | { 70 | std::pair pc = UV2CameraFrame(q1, q2); 71 | Eigen::Vector3d p1c = pc.first; 72 | Eigen::Vector3d p2c = pc.second; 73 | float t1 = Wrap2Pi(atan2(p1c(1), p1c(0)) + o_yaw); 74 | float t2 = Wrap2Pi(atan2(p2c(1), p2c(0)) + o_yaw); 75 | 76 | return std::pair (t1, t2); 77 | } -------------------------------------------------------------------------------- /ncore/nsensors/src/OptiTrack.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: OptiTrack.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include "OptiTrack.h" 14 | #include "Utils.h" 15 | 16 | 17 | OptiTrack::OptiTrack(Eigen::Vector3f origin_) 18 | { 19 | Eigen::Matrix3f trans = Vec2Trans(origin_); 20 | o_invTrans = trans.inverse(); 21 | o_origin = origin_; 22 | } 23 | 24 | OptiTrack::OptiTrack(std::string yamlFolder) 25 | { 26 | std::vector fields = File2Lines(yamlFolder + "optitrack.yaml"); 27 | // "origin:" - 8 28 | fields[0].erase(0,8); 29 | std::vector vec = StringToVec(fields[0]); 30 | 31 | o_origin = Eigen::Vector3f(vec[0], vec[1], vec[2]); 32 | Eigen::Matrix3f trans = Vec2Trans(o_origin); 33 | o_invTrans = trans.inverse(); 34 | } 35 | 36 | 37 | 38 | Eigen::Vector3f OptiTrack::OptiTrack2World(Eigen::Vector3f p) 39 | { 40 | Eigen::Vector3f xy1 = Eigen::Vector3f(p(0), p(1), 1); 41 | Eigen::Vector3f p_trans = o_invTrans * xy1; 42 | p_trans(2) = Wrap2Pi(p(2) - o_origin(2)); 43 | 44 | return p_trans; 45 | } -------------------------------------------------------------------------------- /ncore/nsensors/tst/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | add_executable(NSensorsUnitTests NSensorsUnitTests.cpp ) 4 | target_link_libraries(NSensorsUnitTests ${OpenCV_LIBS} ${catkin_LIBRARIES} NSENSORS GTest::GTest gtest_main) 5 | target_compile_definitions(NSensorsUnitTests PRIVATE PROJECT_TEST_DATA_DIR="${PROJECT_SOURCE_DIR}/../data/") 6 | add_test(AllTestsInTests NSensorsUnitTests) 7 | -------------------------------------------------------------------------------- /ros1_ws/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | /opt/ros/noetic/share/catkin/cmake/toplevel.cmake -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/launch/allsync.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/launch/amcleval.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/launch/gtviz.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/launch/syncgtrs.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/launch/synclidar.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | data_processing 4 | 0.0.0 5 | The data_processing package 6 | 7 | 8 | 9 | 10 | nickybones 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | catkin 52 | 53 | std_msgs 54 | message_generation 55 | message_runtime 56 | std_msgs 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/AMCLEval.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | import numpy as np 5 | import pandas as pd 6 | import rospy 7 | from geometry_msgs.msg import PoseWithCovarianceStamped 8 | import csv 9 | from GTpickle import interpolate 10 | 11 | 12 | def wrapToPi(theta): 13 | while theta < -np.pi: 14 | theta = theta + 2 * np.pi 15 | while theta > np.pi: 16 | theta = theta - 2 * np.pi 17 | return theta 18 | 19 | 20 | 21 | class AMCLEval(): 22 | 23 | def __init__(self)->None: 24 | 25 | poseTopic = rospy.get_param('~poseTopic') 26 | self.pose_sub = rospy.Subscriber(poseTopic, PoseWithCovarianceStamped, self.callback, queue_size=100) 27 | 28 | 29 | picklePath = rospy.get_param('~picklePath') 30 | df = pd.read_pickle(picklePath) 31 | df_sub = df.loc[df['type'] == 'gt'] 32 | gt_t = df_sub['t'].to_numpy() 33 | print(gt_t.flatten().shape) 34 | gt_poses = df_sub['data'].to_numpy() 35 | intr = interpolate(gt_t, gt_poses) 36 | self.intr = intr 37 | 38 | csvPath = rospy.get_param('~csvPath') 39 | f = open(csvPath, 'w') 40 | self.csv = csv.writer(f) 41 | header = ["t","pose_x","pose_y","pose_yaw","cov_x","cov_y","cov_yaw","gt_x", "gt_y","gt_yaw"] 42 | self.csv.writerow(header) 43 | 44 | def callback(self, pose_msg): 45 | 46 | t = int(pose_msg.header.stamp.to_nsec()) 47 | 48 | pose = pose_msg.pose.pose 49 | pose_x = pose.position.x 50 | pose_y = pose.position.y 51 | qz = pose.orientation.z 52 | qw = pose.orientation.w 53 | pose_yaw = wrapToPi(2 * np.arctan(qz/qw)) 54 | 55 | cov_x = pose_msg.pose.covariance[0] 56 | cov_y = pose_msg.pose.covariance[7] 57 | cov_yaw = pose_msg.pose.covariance[35] 58 | 59 | gt = self.intr(t) 60 | gt_x = gt[0] 61 | gt_y = gt[1] 62 | gt_yaw = gt[2] 63 | 64 | row = [t,pose_x,pose_y,pose_yaw,cov_x,cov_y,cov_yaw,gt_x, gt_y,gt_yaw] 65 | self.csv.writerow(row) 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | if __name__ == "__main__": 75 | 76 | rospy.init_node('amcl_eval') 77 | posn = AMCLEval() 78 | rospy.spin() 79 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/AddPredictions.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import math 4 | 5 | 6 | 7 | 8 | def addText(dumpFolder): 9 | 10 | matches = [] 11 | text_t = [] 12 | types = [] 13 | 14 | for c in range(4): 15 | df_cam = pd.read_pickle(dumpFolder + "camera" + str(c) + ".pickle") 16 | textFile = dumpFolder + "predictions" + str(c) + ".txt" 17 | f = open(textFile, "r") 18 | lines = f.readlines() 19 | 20 | for i in range(len(lines)): 21 | matches.append(lines[i]) 22 | row = df_cam.iloc[i] 23 | text_t.append(row['t']) 24 | types.append('text' + str(c)) 25 | 26 | 27 | data = {'t': text_t, 'type': types, 'data': matches} 28 | df = pd.DataFrame(data) 29 | 30 | return df 31 | 32 | 33 | def addSemantics(dumpFolder): 34 | 35 | df = pd.read_pickle(dumpFolder + "person.pickle") 36 | 37 | 38 | # t = df['t'] 39 | # types = df['type'] 40 | # seman = [] 41 | 42 | # for index, row in df.iterrows(): 43 | # sem = row['boxes'] 44 | # sem = np.array(sem).flatten().astype(float) 45 | # seman.append(sem) 46 | 47 | # data = {'t': t, 'type': types, 'data': seman} 48 | # new_df = pd.DataFrame(data) 49 | 50 | # new_df.to_pickle(dumpFolder + "person2.pickle") 51 | 52 | # return new_df 53 | return df 54 | 55 | 56 | if __name__ == "__main__": 57 | 58 | dumpFolder = "/home/nickybones/data/MCL/2021_12_08/Run3/" 59 | pickleFolder = "/home/nickybones/data/MCL/2021_12_08/Run3/" 60 | pickleName = pickleFolder + "gtmerged.pickle" 61 | 62 | df_merged = pd.read_pickle(pickleName) 63 | 64 | df_text = addText(dumpFolder) 65 | df_sem = addSemantics(dumpFolder) 66 | 67 | 68 | 69 | dfcomb = pd.concat([df_text, df_sem, df_merged], axis=0) 70 | dfcomb['t'] = dfcomb['t'].astype(int) 71 | dfcomb = dfcomb.sort_values(by='t') 72 | dfcomb.to_pickle(dumpFolder + "semtextgtmerged.pickle") 73 | 74 | dfcomb = pd.concat([df_text, df_merged], axis=0) 75 | dfcomb['t'] = dfcomb['t'].astype(int) 76 | dfcomb = dfcomb.sort_values(by='t') 77 | dfcomb.to_pickle(dumpFolder + "textgtmerged.pickle") 78 | 79 | dfcomb = pd.concat([df_sem, df_merged], axis=0) 80 | dfcomb['t'] = dfcomb['t'].astype(int) 81 | dfcomb = dfcomb.sort_values(by='t') 82 | dfcomb.to_pickle(dumpFolder + "semgtmerged.pickle") 83 | 84 | 85 | 86 | #df_sem = addSemantics(dumpFolder) 87 | #df_sem.to_pickle(dumpFolder + "person2.pickle") 88 | 89 | 90 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/CameraDumpNode.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | import glob 5 | import numpy as np 6 | import cv2 7 | import rospy 8 | from data_processing.srv import NoArguments 9 | import pandas as pd 10 | from sensor_msgs.msg import Image 11 | import os 12 | from cv_bridge import CvBridge 13 | 14 | class CameraDumpNode(): 15 | 16 | def __init__(self)->None: 17 | 18 | cameraTopic = rospy.get_param('~cameraTopic') 19 | self.dumpFolder = rospy.get_param('dumpFolder') 20 | self.camName = rospy.get_param('~camName') 21 | print(self.camName) 22 | self.path = [] 23 | self.stamp = [] 24 | self.img = [] 25 | self.bridge = CvBridge() 26 | 27 | if not os.path.exists(self.dumpFolder + self.camName): 28 | os.makedirs(self.dumpFolder + self.camName) 29 | 30 | self.srv = rospy.Service(self.camName + '/dump_images', NoArguments, self.dump_images_service) 31 | 32 | cam_sub = rospy.Subscriber(cameraTopic, Image, self.callback, queue_size=1000) 33 | self.cnt = 0 34 | 35 | 36 | 37 | def callback(self,img_msg): 38 | 39 | img = self.bridge.imgmsg_to_cv2(img_msg, desired_encoding='passthrough') 40 | #self.img.append(img) 41 | 42 | img_name = '{0:05d}'.format(self.cnt) + ".png" 43 | self.path.append(img_name) 44 | self.stamp.append(int(img_msg.header.stamp.to_nsec())) 45 | img_name = self.dumpFolder + self.camName + "/" + img_name 46 | cv2.imwrite(img_name, img) 47 | 48 | print(self.cnt) 49 | self.cnt += 1 50 | 51 | 52 | 53 | def dump_images_service(self, request): 54 | 55 | 56 | # for i in range(len(self.path)): 57 | # img_name = self.dumpFolder + self.camName + "/" + self.path[i] 58 | # cv2.imwrite(img_name, self.img[i]) 59 | 60 | data = {'t' : self.stamp, 61 | 'type' : self.camName, 62 | 'data' : self.path} 63 | df = pd.DataFrame(data) 64 | df.to_pickle(self.dumpFolder + "/" + self.camName + ".pickle") 65 | 66 | 67 | 68 | print("dumped") 69 | 70 | return "dumped" 71 | 72 | 73 | if __name__ == "__main__": 74 | 75 | 76 | rospy.init_node('image') 77 | posn = CameraDumpNode() 78 | rospy.spin() 79 | 80 | # df = pd.read_pickle("/home/nickybones/data/MCL/Dump/scans.pickle") 81 | # print(df.head()) -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/GMAP.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | class GMAP(): 5 | 6 | def __init__(self, yaml, gridmap): 7 | 8 | self.map = gridmap 9 | self.resolution = yaml['resolution'] 10 | self.origin = yaml['origin'] 11 | self.max_y = gridmap.shape[0] 12 | self.ComputeBorders() 13 | 14 | 15 | def __init__(self, gridmap, resolution, origin): 16 | 17 | self.map = gridmap 18 | self.resolution = resolution 19 | self.origin = origin 20 | self.max_y = gridmap.shape[0] 21 | self.ComputeBorders() 22 | 23 | # the (0,0) of the gmapping map is where the robot started from. To relate this to the gridmap image 24 | # We need to know the real world coordinates of some image point. 25 | # the map origin in the bottom left corner of the image, and its real world coordinates are 26 | # specified in the metadata yaml. 27 | 28 | def world2map(self, p): 29 | u = np.round((p[0] - self.origin[0]) / self.resolution) 30 | v = self.max_y - np.round((p[1] - self.origin[1]) / self.resolution) 31 | return np.array([u, v]).astype(int) 32 | 33 | def map2world(self, uv): 34 | x = uv[0] * self.resolution + self.origin[0] 35 | y = (self.max_y - uv[1]) * self.resolution + self.origin[1] 36 | 37 | return np.array([x, y]) 38 | 39 | def ComputeBorders(self): 40 | self.map = 255 - cv2.cvtColor(self.map, cv2.COLOR_BGR2GRAY) 41 | white_pixels = np.array(np.where(self.map == 255)) 42 | min_x = min(white_pixels[1]) 43 | max_x = max(white_pixels[1]) 44 | min_y = min(white_pixels[0]) 45 | max_y = max(white_pixels[0]) 46 | self.map_border = {"min_x": min_x, "min_y": min_y, "max_x": max_x, "max_y": max_y} 47 | 48 | def TopLeft(self): 49 | return np.array([self.map_border["min_x"], self.map_border["min_y"]]) 50 | 51 | def BottomRight(self): 52 | return np.array([self.map_border["max_x"], self.map_border["max_y"]]) 53 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/GTpickle.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import math 4 | from scipy.interpolate import interp1d 5 | from bisect import insort, bisect_left 6 | from collections import deque 7 | from itertools import islice 8 | from scipy.spatial.transform import Rotation as R 9 | 10 | 11 | 12 | def str2Pose(line): 13 | 14 | comps = line.split() 15 | pid = int(comps[0]) 16 | strmtx = np.asarray(comps[4:]) 17 | mtx = strmtx.reshape((4,4)).astype(float) 18 | Rot = mtx[:3, :3] 19 | #yaw, pitch, roll = rotationMatrixToEulerAngles(R) 20 | r = R.from_matrix(Rot) 21 | _, _, roll = r.as_euler('xyz', degrees=False) 22 | x = mtx[0][3] 23 | y = mtx[1][3] 24 | 25 | return x, y, roll, pid 26 | 27 | 28 | def running_median_insort(seq, window_size): 29 | """Contributed by Peter Otten""" 30 | seq = iter(seq) 31 | d = deque() 32 | s = [] 33 | result = [] 34 | for item in islice(seq, window_size): 35 | d.append(item) 36 | insort(s, item) 37 | result.append(s[len(d)//2]) 38 | m = window_size // 2 39 | for item in seq: 40 | old = d.popleft() 41 | d.append(item) 42 | del s[bisect_left(s, old)] 43 | insort(s, item) 44 | result.append(s[m]) 45 | return result 46 | 47 | 48 | def interpolate(t, poses, kind='nearest', window=12, tol=0.5): 49 | """Contributed by Jerome Guzzi and adapted to our data""" 50 | 51 | n = len(poses) 52 | data = np.zeros((n, 4)) 53 | 54 | for i in range(n): 55 | data[i, 0] = t[i] 56 | data[i, 1:] = poses[i] 57 | 58 | 59 | # Filter outliers in yaw 60 | data = data[np.abs(data[:, 3] - running_median_insort(data[:, 3], window)) < tol] 61 | data[0, -1] = np.fmod(data[0, -1], 2 * np.pi) 62 | 63 | return interp1d(data[:, 0], data[:, 1:], axis=0, fill_value='extrapolate', assume_sorted=True, kind=kind) 64 | 65 | 66 | 67 | 68 | def localizationPickle(dumpFolder, pickleFolder): 69 | 70 | pickleName = pickleFolder + "gopro.pickle" 71 | posesFile = dumpFolder + "poses.txt" 72 | 73 | df = pd.read_pickle(pickleName) 74 | 75 | f = open(posesFile, "r") 76 | lines = f.readlines() 77 | 78 | gt_poses = [] 79 | gt_t = [] 80 | 81 | for i in range(len(lines)): 82 | x, y, yaw, pid = str2Pose(lines[i]) 83 | gt_poses.append(np.array([x, y, yaw])) 84 | row = df.iloc[pid] 85 | gt_t.append(row['t']) 86 | 87 | 88 | mergedPickle = dumpFolder + "merged.pickle" 89 | 90 | intr = interpolate(gt_t, gt_poses) 91 | 92 | merged_df = pd.read_pickle(mergedPickle) 93 | merged_t = merged_df['t'] 94 | 95 | intr_poses = intr(merged_t).tolist() 96 | print(intr_poses) 97 | 98 | data = {'t': merged_t, 'type': 'gt', 'data': intr_poses} 99 | df = pd.DataFrame(data) 100 | 101 | dfcomb = pd.concat([df,merged_df], axis=0) 102 | dfcomb['t'] = dfcomb['t'].astype(int) 103 | dfcomb = dfcomb.sort_values(by='t') 104 | dfcomb.to_pickle(dumpFolder + "gtmerged.pickle") 105 | 106 | 107 | 108 | if __name__ == "__main__": 109 | 110 | 111 | dumpFolder = "/home/nickybones/data/MCL/2021_12_07/Run2/" 112 | pickleFolder = "/home/nickybones/data/MCL/2021_12_07/Run2/" 113 | 114 | 115 | localizationPickle(dumpFolder, pickleFolder) 116 | 117 | 118 | 119 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/OdomDumpNode.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | import glob 5 | import numpy as np 6 | import cv2 7 | import rospy 8 | from data_processing.srv import NoArguments 9 | from nav_msgs.msg import Odometry 10 | import pandas as pd 11 | 12 | 13 | class OdomDumpNode(): 14 | 15 | def __init__(self)->None: 16 | 17 | odomTopic = rospy.get_param('odomTopic') 18 | self.dumpFolder = rospy.get_param('dumpFolder') 19 | self.odom = [] 20 | self.stamp = [] 21 | 22 | self.srv = rospy.Service('dump_odom', NoArguments, self.dump_odom_service) 23 | 24 | scan_sub = rospy.Subscriber(odomTopic, Odometry, self.callback) 25 | self.cnt = 0 26 | 27 | 28 | 29 | def callback(self, odom_msg): 30 | 31 | p = odom_msg.pose.pose.position 32 | q = odom_msg.pose.pose.orientation 33 | odom = np.array([p.x, p.y, p.z, q.x, q.y, q.z, q.w]) 34 | #print((odom)) 35 | self.odom.append(odom) 36 | self.stamp.append(int(odom_msg.header.stamp.to_nsec())) 37 | #print(self.cnt) 38 | self.cnt += 1 39 | 40 | 41 | 42 | 43 | def dump_odom_service(self, request): 44 | 45 | data = {'t' : self.stamp, 46 | 'type' : 'odom', 47 | 'data' : self.odom} 48 | df = pd.DataFrame(data) 49 | df.to_pickle(self.dumpFolder + "odom.pickle") 50 | 51 | 52 | 53 | print("dumped") 54 | 55 | return "dumped" 56 | 57 | 58 | if __name__ == "__main__": 59 | 60 | 61 | rospy.init_node('OdomDumpNode', anonymous=True) 62 | posn = OdomDumpNode() 63 | rospy.spin() 64 | 65 | # df = pd.read_pickle("/home/nickybones/data/MCL/Dump/odom.pickle") 66 | # print(df.head()) -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/SyncGPandRSNode.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | import glob 5 | import numpy as np 6 | import cv2 7 | import rospy 8 | from sensor_msgs.msg import Image 9 | from cv_bridge import CvBridge 10 | from message_filters import ApproximateTimeSynchronizer, Subscriber 11 | from data_processing.srv import NoArguments 12 | 13 | class SyncGPandRSNode(): 14 | 15 | def __init__(self)->None: 16 | 17 | goproTopic = rospy.get_param('goproTopic') 18 | cameraTopic = rospy.get_param('cameraTopic') 19 | self.dumpFolder = rospy.get_param('dumpFolder') 20 | 21 | self.bridge = CvBridge() 22 | self.gopro = [] 23 | self.camera = [] 24 | self.counter = 0 25 | 26 | self.srv = rospy.Service('dump_images', NoArguments, self.dump_images_service) 27 | 28 | gopro_sub = Subscriber(goproTopic, Image) 29 | camera_sub = Subscriber(cameraTopic, Image) 30 | 31 | self.ats = ApproximateTimeSynchronizer([gopro_sub, camera_sub], queue_size=5, slop=0.1) 32 | self.ats.registerCallback(self.callback) 33 | 34 | 35 | def callback(self, gopro_msg, camera_msg): 36 | 37 | gp = self.bridge.imgmsg_to_cv2(gopro_msg, desired_encoding='passthrough') 38 | cam = self.bridge.imgmsg_to_cv2(camera_msg, desired_encoding='passthrough') 39 | self.gopro.append(gp) 40 | self.camera.append(cam) 41 | self.counter += 1 42 | print(self.counter) 43 | 44 | 45 | 46 | def dump_images_service(self, request): 47 | 48 | for i in range(len(self.gopro)): 49 | img_name = '{0:04d}'.format(i) + ".png" 50 | cv2.imwrite(self.dumpFolder + "gopro/" + img_name, self.gopro[i]) 51 | cv2.imwrite(self.dumpFolder + "camera/" + img_name, self.camera[i]) 52 | 53 | print("dumped") 54 | 55 | return "dumped" 56 | 57 | 58 | if __name__ == "__main__": 59 | 60 | 61 | rospy.init_node('SyncGPandRSNode', anonymous=True) 62 | #rate = rospy.Rate(10) 63 | posn = SyncGPandRSNode() 64 | rospy.spin() -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/SyncLidarNode.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | import glob 5 | import numpy as np 6 | import cv2 7 | import rospy 8 | from data_processing.srv import NoArguments 9 | import pandas as pd 10 | from nmcl_msgs.msg import MergedLaserScan 11 | 12 | 13 | class SyncLidarNode(): 14 | 15 | def __init__(self)->None: 16 | 17 | mergedScanTopic = rospy.get_param('mergedScanTopic') 18 | self.dumpFolder = rospy.get_param('dumpFolder') 19 | self.xy = [] 20 | self.stamp = [] 21 | 22 | self.srv = rospy.Service('dump_scans', NoArguments, self.dump_scans_service) 23 | 24 | scan_sub = rospy.Subscriber(mergedScanTopic, MergedLaserScan, self.callback) 25 | self.cnt = 0 26 | 27 | 28 | 29 | def callback(self, merged_msg): 30 | 31 | self.xy.append(merged_msg.xy) 32 | self.stamp.append(int(merged_msg.header.stamp.to_nsec())) 33 | #print(self.cnt) 34 | self.cnt += 1 35 | 36 | 37 | 38 | 39 | def dump_scans_service(self, request): 40 | 41 | data = {'t' : self.stamp, 42 | 'type' : 'lidar', 43 | 'data' : self.xy} 44 | df = pd.DataFrame(data) 45 | df.to_pickle(self.dumpFolder + "scans.pickle") 46 | 47 | 48 | 49 | print("dumped") 50 | 51 | return "dumped" 52 | 53 | 54 | if __name__ == "__main__": 55 | 56 | 57 | rospy.init_node('SyncLidarNode', anonymous=True) 58 | posn = SyncLidarNode() 59 | rospy.spin() 60 | 61 | # df = pd.read_pickle("/home/nickybones/data/MCL/Dump/scans.pickle") 62 | # print(df.head()) -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/checkPickle.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | 5 | df = pd.read_pickle("/home/nickybones/data/MCL/2021_12_07/Run4/textgtmerged.pickle") 6 | 7 | for index, row in df.iterrows(): 8 | 9 | typ = row['type'] 10 | if "sem3" in typ: 11 | t = row['t'] 12 | boxes = row['data'] 13 | print(typ, t) 14 | print(boxes) 15 | 16 | for i in range(len(boxes)): 17 | if boxes[i] < 0 or boxes[i] > 639: 18 | print(typ, t) 19 | print(boxes) -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/computeMetrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from scipy.spatial import distance 5 | from evalutils import EvalAllRuns 6 | 7 | 8 | def svd_icp(ti, tj): 9 | mu_i = np.mean(ti, axis=0) 10 | mu_j = np.mean(tj, axis=0) 11 | sigma_ij = 1 / (ti.shape[0]) * (ti - mu_i).T @ (tj - mu_j) 12 | U, _, VT = np.linalg.svd(sigma_ij) 13 | R = U @ VT 14 | t = mu_i.T - R @ mu_j.T 15 | return R, t 16 | 17 | 18 | 19 | if __name__ == "__main__": 20 | 21 | types = ["particles_300/notext", "particles_300/seeds", "particles_300/naivebb", "particles_300/onlywhennotbb", "particles_300/singletimebb"] 22 | #types = ["particles_1000/notext", "particles_1000/seeds", "particles_1000/naivebb", "particles_1000/onlywhennotbb", "particles_1000/singletimebb"] 23 | #types = ["particles_10000/notext", "particles_10000/seeds", "particles_10000/naivebb", "particles_10000/onlywhennotbb", "particles_10000/singletimebb"] 24 | #types = ["particles_500/notext", "particles_500/seeds", "particles_500/naivebb", "particles_500/onlywhennotbb", "particles_500/singletimebb"] 25 | run_nums = 12 26 | folderPath = "/home/nickybones/data/iros2022/2021_12_08_Run3/" 27 | #folderPath = "/home/nickybones/data/MCL/2021_12_08/Run3/" 28 | #EvalAllRuns(run_nums, "particles_300/singletimebb") 29 | #EvalAllRuns(run_nums, "particles_300/singletimebb", folderPath) 30 | 31 | 32 | for t in types: 33 | EvalAllRuns(run_nums, t, folderPath) 34 | 35 | #csv_file_path = "/home/nickybones/data/MCL/2021_12_08/Run3/amcl_results.csv" 36 | # csv_file_path = "/home/nickybones/data/MCL/2021_12_08/Run3/particles_1000/singletimebb/Run0/poseestimation.csv" 37 | # err_xy_dist_avg, err_ang_dist_avg = EvalSingleRun(csv_file_path) 38 | # print(err_xy_dist_avg, err_ang_dist_avg) -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/computeMetricsStatic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from scipy.spatial import distance 5 | from evalutils import EvalAllRuns 6 | 7 | 8 | 9 | if __name__ == "__main__": 10 | 11 | types = ["notext", "singletimebb"] 12 | run_nums = 10 13 | folderPath = "/home/nickybones/data/iros2022/2022_01_14_Run1/" 14 | particles = [300, 500, 1000, 10000] 15 | for t in types: 16 | res_str = "{}".format(t) 17 | for p in particles: 18 | ang_avg, xy_avg = EvalAllRuns(run_nums, t, folderPath + "particles_" + str(p) + "/") 19 | s1_ang, s1_xy, s2_ang, s2_xy = np.mean(ang_avg[0:5]), np.mean(xy_avg[0:5]), np.mean(ang_avg[5:10]), np.mean(xy_avg[5:10]) 20 | res_str += " & {:.3f}/{:.3f}".format(s1_ang, s1_xy) 21 | res_str += " \\\\" 22 | print(res_str) 23 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/computeMetricsdynamic.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from scipy.spatial import distance 5 | from evalutils import EvalAllRuns 6 | 7 | 8 | if __name__ == "__main__": 9 | 10 | types = ["notext", "seeds", "naivebb", "singletimebb", "onlywhennotbb"] 11 | run_nums = 12 12 | folderPath = "/home/nickybones/data/iros2022/2021_12_08_Run3/" 13 | particles = [300, 500, 1000, 10000] 14 | for p in particles: 15 | print("particles: {}".format(p)) 16 | for t in types: 17 | ang_avg, xy_avg = EvalAllRuns(run_nums, t, folderPath + "particles_" + str(p) + "/") 18 | s1_ang, s1_xy, s2_ang, s2_xy, s3_ang, s3_xy, s4_ang, s4_xy = np.mean(ang_avg[0:3]), np.mean(xy_avg[0:3]), np.mean(ang_avg[3:6]), np.mean(xy_avg[3:6]), np.mean(ang_avg[6:9]), np.mean(xy_avg[6:9]), np.mean(ang_avg[9:12]), np.mean(xy_avg[9:12]) 19 | res_str = "{} & {:.3f}/{:.3f} & {:.3f}/{:.3f} & {:.3f}/{:.3f} & {:.3f}/{:.3f} \\\\ ".format(t, s1_ang, s1_xy, s2_ang, s2_xy, s3_ang, s3_xy, s4_ang, s4_xy) 20 | print(res_str) 21 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/mergepickles.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | 5 | dumpFolder = "/home/nickybones/data/MCL/Dump/" 6 | pickleName = "merged.pickle" 7 | 8 | df_s = pd.read_pickle(dumpFolder + "scans.pickle") 9 | df_o = pd.read_pickle(dumpFolder + "odom.pickle") 10 | df_c0 = pd.read_pickle(dumpFolder + "camera0.pickle") 11 | df_c1 = pd.read_pickle(dumpFolder + "camera1.pickle") 12 | df_c2 = pd.read_pickle(dumpFolder + "camera2.pickle") 13 | df_c3 = pd.read_pickle(dumpFolder + "camera3.pickle") 14 | df_gt = pd.read_pickle(dumpFolder + "gopro.pickle") 15 | 16 | dfcomb = pd.concat([df_s,df_o, df_c0, df_c1, df_c2, df_c3, df_gt], axis=0) 17 | 18 | dfcomb['t'] = dfcomb['t'].astype(int) 19 | 20 | dfcomb = dfcomb.sort_values(by='t') 21 | #print(dfcomb.head(20).to_string(index=False)) 22 | print((dfcomb['type']== "camera0").sum()) 23 | print((dfcomb['type']== "camera1").sum()) 24 | print((dfcomb['type']== "camera2").sum()) 25 | print((dfcomb['type']== "camera3").sum()) 26 | print((dfcomb['type']== "gopro").sum()) 27 | 28 | dfcomb.to_pickle(dumpFolder + pickleName) -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/plotResults.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from GMAP import GMAP 5 | 6 | 7 | if __name__ == "__main__": 8 | csv_file_path = "/media/nickybones/My Passport/youbot_localization/2021_12_08/Run3/results/nosemnotext/Run3/poseestimation.csv" 9 | csv_file_path2 = "/media/nickybones/My Passport/youbot_localization/2021_12_08/Run3/results/nosemyessophitext/Run3/poseestimation.csv" 10 | img_file_path = "/home/nickybones/Code/YouBotMCL/ncore/data/floor/Faro/FMap.pgm" 11 | df = pd.read_csv(csv_file_path) 12 | df2 = pd.read_csv(csv_file_path2) 13 | gridmap = cv2.imread(img_file_path) 14 | print(df.head()) 15 | 16 | p_x = df['pose_x'] 17 | p_y = df['pose_y'] 18 | p_yaw = df['pose_yaw'] 19 | 20 | p_x2 = df2['pose_x'] 21 | p_y2 = df2['pose_y'] 22 | p_yaw2 = df2['pose_yaw'] 23 | 24 | gt_x = df['gt_x'] 25 | gt_y = df['gt_y'] 26 | gt_yaw = df['gt_yaw'] 27 | 28 | gmap = GMAP(gridmap, 0.05, [-13.9155, -10.99537, 0.0]) 29 | 30 | 31 | for i in range(len(p_x)): 32 | p = [p_x[i], p_y[i]] 33 | gt = [gt_x[i], gt_y[i]] 34 | p2 = [p_x2[i], p_y2[i]] 35 | img = gridmap.copy() 36 | u, v = gmap.world2map(p) 37 | u2, v2 = gmap.world2map(gt) 38 | u3, v3 = gmap.world2map(p2) 39 | 40 | img = cv2.circle(img, (u, v), radius=1, color=(255, 0, 255), thickness=-1) 41 | img = cv2.circle(img, (u2, v2), radius=1, color=(255, 0, 0), thickness=-1) 42 | img = cv2.circle(img, (u3, v3), radius=1, color=(0, 255, 0), thickness=-1) 43 | cv2.imshow("", img) 44 | cv2.waitKey() 45 | 46 | 47 | 48 | 49 | -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/ploterrorvsparticlenum.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from GMAP import GMAP 5 | from evalutils import cam2BaselinkTF, angDist, EvalAllRuns, point2inch 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | 10 | rcParams['hatch.linewidth'] = 5.0 11 | rcParams.update({'font.size': 7}) 12 | rcParams.update({'pdf.fonttype': 42}) 13 | # rcParams['text.usetex'] = True 14 | linewidth_latex = 245.71811 15 | 16 | 17 | if __name__ == "__main__": 18 | 19 | folderPath = "/home/nickybones/data/iros2022/2021_12_08_Run3/particles_" 20 | partciles = [300, 500, 1000, 10000] 21 | run_nums = 12 22 | type_run = "singletimebb" 23 | 24 | 25 | #types = ["notext", "seeds", "naivebb", "onlywhennotbb", "singletimebb"] 26 | colors = ['r', 'y', 'b', 'k', 'g'] 27 | markers = ['*', 'o', '^', 'D', 's'] 28 | fig = plt.figure(figsize=(point2inch(linewidth_latex), point2inch(linewidth_latex))) 29 | 30 | for i, p in enumerate(partciles): 31 | s1_xy, s1_ang, s2_xy, s2_ang, s3_xy, s3_ang, s4_xy, s4_ang = EvalAllRuns(run_nums, type_run, folderPath + str(p) + "/") 32 | s_xy = [s1_xy, s2_xy, s3_xy, s4_xy] 33 | s_ang = [s1_ang, s2_ang, s3_ang, s4_ang] 34 | plt.plot([1,2,3,4], s_xy, marker=markers[i], label=p, markersize=1) 35 | 36 | plt.title('Average Trajectory Error (xy) vs. Particle #') 37 | plt.legend() 38 | plt.ylabel(' m ') 39 | plt.xlabel('#') 40 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 41 | fig.savefig(folderPath + 'Average Trajectory Error (xy) vs. Particle #.eps', format='eps') 42 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/src/ploterrorvstime.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from GMAP import GMAP 5 | from evalutils import cam2BaselinkTF, angDist, EvalAllRuns, point2inch, computeErr4Sequence, compueAvgErr4Sequence 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | 10 | 11 | rcParams['hatch.linewidth'] = 5.0 12 | rcParams.update({'font.size': 7}) 13 | rcParams.update({'pdf.fonttype': 42}) 14 | # rcParams['text.usetex'] = True 15 | linewidth_latex = 245.71811 16 | 17 | 18 | # def computeErr4Sequence(df): 19 | 20 | # samples = len(df['pose_x'].to_numpy()) - 25 #1554 21 | 22 | # p_x = df['pose_x'].to_numpy() 23 | # p_y = df['pose_y'].to_numpy() 24 | # p_yaw = df['pose_yaw'].to_numpy() 25 | # t = df['t'] 26 | 27 | # p_traj = np.zeros((len(p_x), 2)) 28 | # for i in range(samples): 29 | # p_traj[i] = [p_x[i], p_y[i]] 30 | 31 | # gt_x = df['gt_x'].to_numpy() 32 | # gt_y = df['gt_y'].to_numpy() 33 | # gt_yaw = df['gt_yaw'].to_numpy() #+ 0.5 * np.pi 34 | # gt_traj = np.zeros((len(gt_x), 3)) 35 | 36 | # for i in range(samples): 37 | # #gt_traj[i] = [gt_x[i], gt_y[i]] 38 | # gt = np.array([gt_x[i], gt_y[i], gt_yaw[i]]) 39 | # gt_traj[i] = cam2BaselinkTF(gt) 40 | 41 | # gt_fix = gt_traj 42 | 43 | # err_xy_dist = np.sqrt((p_traj[:, 0] - gt_fix[:, 0]) ** 2 + (p_traj[:, 1] - gt_fix[:, 1]) ** 2) 44 | 45 | # err_ang_dist = np.zeros(len(p_yaw)) 46 | 47 | # for i in range(samples): 48 | # theta = p_yaw[i] 49 | # theta2 = gt_fix[i][2] 50 | 51 | # u = np.array([np.cos(theta), np.sin(theta)]) 52 | # v = np.array([np.cos(theta2), np.sin(theta2)]) 53 | # dist = angDist(u, v) 54 | # err_ang_dist[i] = dist 55 | 56 | 57 | # return err_xy_dist, err_ang_dist, t.to_numpy() 58 | 59 | # def compueAvgErr4Sequence(err_xy_dist, err_ang_dist): 60 | 61 | # samples = len(err_xy_dist) 62 | # err_xy_avg = np.zeros(samples) 63 | # err_ang_avg = np.zeros(samples) 64 | 65 | # for i in range(samples): 66 | # xy = np.mean(err_xy_dist[:i]) 67 | # ang = np.mean(err_ang_dist[:i]) 68 | # err_xy_avg[i] = xy 69 | # err_ang_avg[i] = ang 70 | 71 | # return err_xy_avg, err_ang_avg 72 | 73 | 74 | if __name__ == "__main__": 75 | 76 | folderPath = "/home/nickybones/data/iros2022/2021_12_08_Run3/particles_10000/" 77 | r = 0 78 | types = ["notext", "seeds", "naivebb", "onlywhennotbb", "singletimebb"] 79 | colors = ['r', 'y', 'b', 'k', 'g'] 80 | markers = ['*', 'o', '^', 'D', 's'] 81 | fig = plt.figure(figsize=(3.41275152778, 3.41275152778)) 82 | 83 | 84 | for i, type_run in enumerate(types): 85 | #type_run = "singletimebb" 86 | csv_file_path = folderPath + type_run + "/Run" + str(r)+ "/poseestimation.csv" 87 | df = pd.read_csv(csv_file_path) 88 | err_xy_dist, err_ang_dist, t = computeErr4Sequence(df) 89 | err_xy_avg, err_ang_avg = compueAvgErr4Sequence(err_xy_dist, err_ang_dist) 90 | t = t/1000000000 91 | t -= t[0] 92 | plt.plot(t, err_xy_avg, marker=markers[i], label=type_run, markersize=1, alpha=0.7) 93 | 94 | plt.title('Average Trajectory Error (xy) vs. Time') 95 | plt.legend() 96 | plt.ylabel('m') 97 | plt.xlabel('s') 98 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 99 | fig.savefig(folderPath + 'Average Trajectory Error (xy) vs. Time.eps', format='eps') 100 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/data_processing/srv/NoArguments.srv: -------------------------------------------------------------------------------- 1 | --- 2 | string return_value -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | evaluation 4 | 0.0.0 5 | The evaluation package 6 | 7 | 8 | 9 | 10 | nuc20 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | catkin 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/GMAP.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import cv2 3 | 4 | class GMAP(): 5 | 6 | def __init__(self, yaml, gridmap): 7 | 8 | self.map = gridmap 9 | self.resolution = yaml['resolution'] 10 | self.origin = yaml['origin'] 11 | self.max_y = gridmap.shape[0] 12 | self.ComputeBorders() 13 | 14 | 15 | def __init__(self, gridmap, resolution, origin): 16 | 17 | self.map = gridmap 18 | self.resolution = resolution 19 | self.origin = origin 20 | self.max_y = gridmap.shape[0] 21 | self.ComputeBorders() 22 | 23 | # the (0,0) of the gmapping map is where the robot started from. To relate this to the gridmap image 24 | # We need to know the real world coordinates of some image point. 25 | # the map origin in the bottom left corner of the image, and its real world coordinates are 26 | # specified in the metadata yaml. 27 | 28 | def world2map(self, p): 29 | u = np.round((p[0] - self.origin[0]) / self.resolution) 30 | v = self.max_y - np.round((p[1] - self.origin[1]) / self.resolution) 31 | return np.array([u, v]).astype(int) 32 | 33 | def map2world(self, uv): 34 | x = uv[0] * self.resolution + self.origin[0] 35 | y = (self.max_y - uv[1]) * self.resolution + self.origin[1] 36 | 37 | return np.array([x, y]) 38 | 39 | def ComputeBorders(self): 40 | self.map = 255 - cv2.cvtColor(self.map, cv2.COLOR_BGR2GRAY) 41 | white_pixels = np.array(np.where(self.map == 255)) 42 | min_x = min(white_pixels[1]) 43 | max_x = max(white_pixels[1]) 44 | min_y = min(white_pixels[0]) 45 | max_y = max(white_pixels[0]) 46 | self.map_border = {"min_x": min_x, "min_y": min_y, "max_x": max_x, "max_y": max_y} 47 | 48 | def TopLeft(self): 49 | return np.array([self.map_border["min_x"], self.map_border["min_y"]]) 50 | 51 | def BottomRight(self): 52 | return np.array([self.map_border["max_x"], self.map_border["max_y"]]) 53 | -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/convplot.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from converge import time_to_convergence, computeErr4Sequence 5 | from evalutils import point2inch 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | from matplotlib import cm 10 | 11 | rcParams['hatch.linewidth'] = 5.0 12 | rcParams.update({'font.size': 7}) 13 | rcParams.update({'pdf.fonttype': 42}) 14 | # rcParams['text.usetex'] = True 15 | linewidth_latex = 245.71811 16 | 17 | 18 | if __name__ == "__main__": 19 | 20 | folderPath = "/home/nickybones/data/iros2022/FMap/2021_12_08_Run3/particles_300/" 21 | seq = ["S1", "S2", "S3", "S4", "S5", "S6", "S7", "S8", "S9", "S10"] 22 | rn = 10 23 | th = 0.5 24 | type_run = "nicky" 25 | resultsFolder = "/home/nickybones/Code/zimmerman2022iros/pics/" 26 | 27 | types = ["seeds", "repeat", "conservative", "nicky"] 28 | names = ["seeds", "repeat", "conservative", "MCL+text"] 29 | colors = ['r', 'y', 'b', 'k', 'g'] 30 | markers = ['*', 'o', '^', 'D', 's'] 31 | fig = plt.figure(figsize=(point2inch(linewidth_latex), 0.5 * point2inch(linewidth_latex))) 32 | 33 | conv = np.zeros((len(types), len(seq))) 34 | 35 | for i, tp in enumerate(types): 36 | for r in range(rn): 37 | csv_file_path = folderPath + tp + "/Run" + str(r) + "/poseestimation.csv" 38 | #print(csv_file_path) 39 | df_pred = pd.read_csv(csv_file_path) 40 | df_gt = pd.read_csv(csv_file_path) 41 | err_xy_dist, err_ang_dist, t = computeErr4Sequence(df_pred, df_gt) 42 | ind = time_to_convergence(err_xy_dist, err_ang_dist, th) 43 | t_conv = (t[ind] - t[0]) / 1000000000 44 | conv[i, r] = t_conv 45 | #print(t_conv) 46 | 47 | 48 | 49 | clr = cm.rainbow(np.linspace(0, 1, 2*len(types))) 50 | clrs = [clr[1], clr[3], clr[6], clr[7]] 51 | x_ticks = np.arange(0,50,5) + 1.5 52 | 53 | for i, tp in enumerate(types): 54 | x = np.arange(0,50,5) 55 | x += i 56 | print(conv[i]) 57 | 58 | plt.bar(x, conv[i, :], width=1, label=names[i], color=clrs[i]) 59 | 60 | plt.legend(loc='best') 61 | #plt.ylim(0, 3) 62 | plt.ylabel(' Convergence time (s)') 63 | plt.xlabel('Injection method') 64 | plt.xticks(ticks=x_ticks, labels=seq) 65 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 66 | fig.savefig(resultsFolder + 'AblationConv.pdf') 67 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/evaluation.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import pandas as pd 4 | import cv2 5 | from scipy.spatial import distance 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | 10 | rcParams['hatch.linewidth'] = 5.0 11 | rcParams.update({'font.size': 7}) 12 | rcParams.update({'pdf.fonttype': 42}) 13 | # rcParams['text.usetex'] = True 14 | linewidth_latex = 245.71811 15 | 16 | 17 | def point2inch(x): 18 | return x / 72. 19 | 20 | 21 | def angDist(a, b): 22 | 23 | dist = 1 - (a @ b.T)/(np.linalg.norm(a) * np.linalg.norm(b)) 24 | 25 | return dist 26 | 27 | 28 | def cam2BaselinkTF(gt): 29 | 30 | camTF = np.array([0.08, 0.02, 1]); 31 | #theta = gt[2] + np.pi * 88 / 180#np.pi / 2 32 | theta = gt[2] + np.pi / 2 33 | v = np.array([0, 0, theta]) 34 | 35 | trans = np.array([[np.cos(theta), np.sin(theta), 0], [-np.sin(theta), np.cos(theta), 0], [0, 0, 1]]) 36 | 37 | gt_fix = trans @ camTF; 38 | gt_fix[0] += gt[0]; 39 | gt_fix[1] += gt[1]; 40 | gt_fix[2] = theta 41 | 42 | return gt_fix 43 | 44 | 45 | def computeErr4Sequence(df_pred, df_gt): 46 | 47 | samples = len(df_pred['pose_x'].to_numpy()) - 25 #1554 48 | 49 | p_x = df_pred['pose_x'].to_numpy() 50 | p_y = df_pred['pose_y'].to_numpy() 51 | p_yaw = df_pred['pose_yaw'].to_numpy() 52 | t = df_pred['t'] 53 | 54 | p_traj = np.zeros((len(p_x), 2)) 55 | for i in range(samples): 56 | p_traj[i] = [p_x[i], p_y[i]] 57 | 58 | gt_x = df_gt['gt_x'].to_numpy() 59 | gt_y = df_gt['gt_y'].to_numpy() 60 | gt_yaw = df_gt['gt_yaw'].to_numpy() #+ 0.5 * np.pi 61 | gt_traj = np.zeros((len(gt_x), 3)) 62 | 63 | for i in range(samples): 64 | #gt_traj[i] = [gt_x[i], gt_y[i]] 65 | gt = np.array([gt_x[i], gt_y[i], gt_yaw[i]]) 66 | gt_traj[i] = cam2BaselinkTF(gt) 67 | 68 | gt_fix = gt_traj 69 | 70 | err_xy_dist = np.sqrt((p_traj[:, 0] - gt_fix[:, 0]) ** 2 + (p_traj[:, 1] - gt_fix[:, 1]) ** 2) 71 | 72 | err_ang_dist = np.zeros(len(p_yaw)) 73 | 74 | for i in range(samples): 75 | theta = p_yaw[i] 76 | theta2 = gt_fix[i][2] 77 | 78 | u = np.array([np.cos(theta), np.sin(theta)]) 79 | v = np.array([np.cos(theta2), np.sin(theta2)]) 80 | dist = angDist(u, v) 81 | err_ang_dist[i] = dist 82 | 83 | 84 | return err_xy_dist, err_ang_dist, t.to_numpy() 85 | 86 | 87 | 88 | def time_to_convergence(err_xy_dist, err_ang_dist, th): 89 | 90 | for i in range(len(err_xy_dist)): 91 | 92 | if err_xy_dist[i] < th: 93 | return i 94 | -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/ploterrorAVG.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from converge import time_to_convergence, computeErr4Sequence 5 | from evalutils import point2inch 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | from matplotlib import cm 10 | 11 | rcParams['hatch.linewidth'] = 5.0 12 | rcParams.update({'font.size': 7}) 13 | rcParams.update({'pdf.fonttype': 42}) 14 | # rcParams['text.usetex'] = True 15 | linewidth_latex = 245.71811 16 | 17 | 18 | if __name__ == "__main__": 19 | 20 | folderPath = "/home/nickybones/data/iros2022/FMap/2021_12_08_Run3/particles_300/" 21 | seq = ["S1", "S2", "S3", "S4", "S5", "S6", "S7", "S8", "S9", "S10"] 22 | rn = 10 23 | th = 0.5 24 | type_run = "nicky" 25 | resultsFolder = "/home/nickybones/Code/zimmerman2022iros/pics/" 26 | 27 | 28 | types = ["seeds", "repeat", "conservative", "nicky"] 29 | names = ["seeds", "repeat", "conservative", "MCL+text"] 30 | colors = ['r', 'y', 'b', 'k', 'g'] 31 | markers = ['*', 'o', '^', 'D', 's'] 32 | fig = plt.figure(figsize=(point2inch(linewidth_latex), 0.5 * point2inch(linewidth_latex))) 33 | 34 | xy_avg = [] 35 | 36 | x_ticks = [0.5 ,1.1, 1.7, 2.3] 37 | 38 | for i, tp in enumerate(types): 39 | xy_vec = np.zeros((1, rn)) 40 | for r in range(rn): 41 | csv_file_path = folderPath + tp + "/Run" + str(r) + "/poseestimation.csv" 42 | df_pred = pd.read_csv(csv_file_path) 43 | df_gt = pd.read_csv(csv_file_path) 44 | err_xy_dist, err_ang_dist, t = computeErr4Sequence(df_pred, df_gt) 45 | ind = time_to_convergence(err_xy_dist, err_ang_dist, th) 46 | t_conv = (t[ind] - t[0]) / 1000000000 47 | t_tot = (t[len(err_xy_dist)-25] - t[0]) / 1000000000 48 | avg_xy = np.mean(err_xy_dist[ind:]) 49 | avg_ang = np.mean(err_ang_dist[ind:]) 50 | 51 | xy_vec[:, r] = avg_xy 52 | 53 | xy_avg.append(np.mean(xy_vec)) 54 | 55 | print(xy_avg) 56 | #print(xy_avg.shape) 57 | 58 | clr = cm.rainbow(np.linspace(0, 1, 2*len(types))) 59 | clrs = [clr[1], clr[3], clr[6], clr[7]] 60 | for i, tp in enumerate(types): 61 | 62 | plt.bar([i*0.6 + 0.5], xy_avg[i], width=0.3, label=names[i], color=clrs[i]) 63 | 64 | 65 | plt.legend(ncol=1, loc='best') 66 | plt.ylim(0, 0.8) 67 | plt.ylabel(' ATE (m)') 68 | plt.xlabel('Injection method') 69 | plt.xticks(ticks=x_ticks, labels=names) 70 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 71 | #fig.savefig(resultsFolder + 'AblationAVG.eps', format='eps') 72 | fig.savefig(resultsFolder + 'AblationAVG.pdf') 73 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/ploterrorpartnumAVG.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from converge import time_to_convergence, computeErr4Sequence 5 | from evalutils import point2inch 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | from matplotlib import cm 10 | 11 | rcParams['hatch.linewidth'] = 5.0 12 | rcParams.update({'font.size': 7}) 13 | rcParams.update({'pdf.fonttype': 42}) 14 | # rcParams['text.usetex'] = True 15 | linewidth_latex = 245.71811 16 | 17 | 18 | if __name__ == "__main__": 19 | 20 | folderPath = "/home/nickybones/data/iros2022/FMap/2021_12_08_Run3/particles_" 21 | seq = ["S1", "S2", "S3", "S4", "S5", "S6", "S7", "S8", "S9", "S10"] 22 | rn = 10 23 | th = 0.5 24 | resultsFolder = "/home/nickybones/Code/zimmerman2022iros/pics/" 25 | particles = [300, 500, 1000, 10000] 26 | types = ["amcl", "notext", "hardbox", "softbox", "nicky"] 27 | names = ["AMCL", "MCL", "SM1", "SM2", "MCL+text"] 28 | type_num = len(types) 29 | 30 | colors = ['r', 'y', 'b', 'k', 'g'] 31 | markers = ['*', 'o', '^', 'D', 's'] 32 | fig = plt.figure(figsize=(point2inch(linewidth_latex), 0.5 * point2inch(linewidth_latex))) 33 | 34 | xy_avg = np.zeros((type_num, 4)) 35 | 36 | x_ticks = [2 , 8, 14, 20] 37 | 38 | for j, p in enumerate(particles): 39 | for i, tp in enumerate(types): 40 | #xy_vec = np.zeros((1, rn)) 41 | xy_vec = [] 42 | for r in range(rn): 43 | csv_file_path = folderPath + str(p) + "/" + tp + "/Run" + str(r) + "/poseestimation.csv" 44 | df_pred = pd.read_csv(csv_file_path) 45 | df_gt = pd.read_csv(csv_file_path) 46 | err_xy_dist, err_ang_dist, t = computeErr4Sequence(df_pred, df_gt) 47 | ind = time_to_convergence(err_xy_dist, err_ang_dist, th) 48 | if ind > -1: 49 | t_conv = (t[ind] - t[0]) / 1000000000 50 | t_tot = (t[len(err_xy_dist)-25] - t[0]) / 1000000000 51 | if t_conv / t_tot < 0.95 : 52 | avg_xy = np.mean(err_xy_dist[ind:]) 53 | avg_ang = np.mean(err_ang_dist[ind:]) 54 | xy_vec.append(avg_xy) 55 | else: 56 | xy_vec.append(np.nanmean(err_xy_dist)) 57 | else: 58 | xy_vec.append(np.nanmean(err_xy_dist)) 59 | 60 | #xy_vec[:, r] = avg_xy 61 | xy_vec = np.asarray(xy_vec) 62 | xy_avg[i, j]= np.nanmean(xy_vec) 63 | 64 | print(xy_avg) 65 | print(xy_avg.shape) 66 | print(xy_avg[0, :]) 67 | 68 | clr = cm.rainbow(np.linspace(0, 1, 2*len(types))) 69 | clrs = [clr[0], clr[1], clr[5], clr[7], clr[9]] 70 | 71 | 72 | #plt.bar([0, 1, 2, 3], xy_avg[0, :], width=0.3, label=names[0], color=clr[0]) 73 | 74 | for i, tp in enumerate(types): 75 | plt.bar([i , i + type_num+1 , i + type_num*2+2 , i + type_num*3+3], xy_avg[i, :], width=1, label=names[i], color=clrs[i]) 76 | 77 | 78 | plt.legend(ncol=1, loc='best') 79 | #plt.ylim(0, 0.8) 80 | plt.ylabel(' ATE (m)') 81 | plt.xlabel('Number of particles') 82 | plt.xticks(ticks=x_ticks, labels=particles) 83 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 84 | #fig.savefig(resultsFolder + 'AblationAVG.eps', format='eps') 85 | fig.savefig(resultsFolder + 'ParticleNumAVG.pdf') 86 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/ploterrorvsparticlenumConv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from converge import time_to_convergence, computeErr4Sequence 5 | from evalutils import point2inch 6 | import matplotlib.pyplot as plt 7 | from matplotlib import rcParams 8 | from matplotlib.ticker import FormatStrFormatter 9 | from matplotlib import cm 10 | 11 | rcParams['hatch.linewidth'] = 5.0 12 | rcParams.update({'font.size': 7}) 13 | rcParams.update({'pdf.fonttype': 42}) 14 | # rcParams['text.usetex'] = True 15 | linewidth_latex = 245.71811 16 | 17 | 18 | if __name__ == "__main__": 19 | 20 | folderPath = "/home/nickybones/data/iros2022/FMap/2021_12_08_Run3/particles_" 21 | particles = [300, 500, 1000, 10000] 22 | seq = ["S1", "S2", "S3", "S4", "S5", "S6", "S7", "S8", "S9", "S10"] 23 | rn = 10 24 | th = 0.5 25 | type_run = "nicky" 26 | resultsFolder = "/home/nickybones/Code/zimmerman2022iros/pics/" 27 | 28 | #types = ["notext", "seeds", "naivebb", "onlywhennotbb", "singletimebb"] 29 | colors = ['r', 'y', 'b', 'k', 'g'] 30 | markers = ['*', 'o', '^', 'D', 's'] 31 | fig = plt.figure(figsize=(point2inch(linewidth_latex), 0.5 * point2inch(linewidth_latex))) 32 | 33 | xy_matrix = np.zeros((4, rn)) 34 | ang_matrix = np.zeros((4, rn)) 35 | 36 | x_ticks = [5 ,16, 27, 38] 37 | 38 | for i, p in enumerate(particles): 39 | print(p) 40 | for r in range(rn): 41 | csv_file_path = folderPath + str(p) + "/" + type_run + "/Run" + str(r) + "/poseestimation.csv" 42 | df_pred = pd.read_csv(csv_file_path) 43 | df_gt = pd.read_csv(csv_file_path) 44 | err_xy_dist, err_ang_dist, t = computeErr4Sequence(df_pred, df_gt) 45 | ind = time_to_convergence(err_xy_dist, err_ang_dist, th) 46 | t_conv = (t[ind] - t[0]) / 1000000000 47 | t_tot = (t[len(err_xy_dist)-25] - t[0]) / 1000000000 48 | avg_xy = np.mean(err_xy_dist[ind:]) 49 | avg_ang = np.mean(err_ang_dist[ind:]) 50 | 51 | xy_matrix[i, r] = avg_xy 52 | 53 | clr = cm.nipy_spectral(np.linspace(0, 1, rn)) 54 | 55 | for r in range(rn): 56 | plt.bar([r ,rn+r+1, rn*2+r+2, rn*3+r+3], xy_matrix[:, r], width=1, label=seq[r], color=clr[r]) 57 | 58 | plt.legend(ncol=5, loc='best') 59 | plt.ylim(0, 3) 60 | plt.ylabel(' ATE (m)') 61 | plt.xlabel('Number of particles N') 62 | plt.xticks(ticks=x_ticks, labels=particles) 63 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 64 | #fig.savefig(resultsFolder + 'AverageTrajectoryErrorVSParticleNum.eps', format='eps') 65 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/plotgttrajectoryandprediction.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from evalutils import cam2BaselinkTF, angDist, EvalAllRuns, point2inch 5 | import matplotlib.pyplot as plt 6 | from matplotlib import cm 7 | 8 | from matplotlib import rcParams 9 | from matplotlib.ticker import FormatStrFormatter 10 | from GMAP import GMAP 11 | 12 | rcParams['hatch.linewidth'] = 5.0 13 | rcParams.update({'font.size': 7}) 14 | rcParams.update({'pdf.fonttype': 42}) 15 | # rcParams['text.usetex'] = True 16 | linewidth_latex = 245.71811 17 | 18 | 19 | 20 | if __name__ == "__main__": 21 | 22 | #run_name = "2021_12_08/Run3" 23 | run_name = "2021_12_08_Run3" 24 | #folderPath = "/home/nickybones/data/MCL/Dump/" 25 | #folderPath = "/home/nickybones/data/MCL/" + run_name + "/particles_" 26 | folderPath = "/home/nickybones/data/iros2022/FMap/" + run_name + "/particles_" 27 | partciles = [300, 500, 1000, 10000] 28 | types = ["gt", "notext", "singletimebb"] 29 | names = ["ground truth", "without text", "with text"] 30 | csv_file_path = folderPath + str(300) + "/notext/Run8/poseestimation.csv" 31 | csv_file_path2 = folderPath + str(300) + "/nicky/Run8/poseestimation.csv" 32 | #csv_file_path = folderPath + "poseestimation.csv" 33 | resultsFolder = "/home/nickybones/Code/zimmerman2022iros/pics/" 34 | 35 | gridmap = cv2.imread("/home/nickybones/Code/YouBotMCL/ncore/data/floor/FMap/FMap.pgm") 36 | mapdoors = cv2.cvtColor(cv2.imread("/home/nickybones/Code/zimmerman2022iros/pics/FMapDoors.png"), cv2.COLOR_BGR2RGB) 37 | gmap = GMAP(gridmap, 0.05, [-13.9155, -10.99537, 0.0]) 38 | 39 | df = pd.read_csv(csv_file_path) 40 | df2 = pd.read_csv(csv_file_path2) 41 | 42 | 43 | samples = len(df['gt_x'].to_numpy()) - 25 #1554 44 | samples = 100 45 | 46 | gt_x = df['gt_x'].to_numpy() 47 | gt_y = df['gt_y'].to_numpy() 48 | gt_yaw = df['gt_yaw'].to_numpy() 49 | 50 | p_x = df['pose_x'].to_numpy() 51 | p_y = df['pose_y'].to_numpy() 52 | p_yaw = df['pose_yaw'].to_numpy() 53 | 54 | pt_x = df2['pose_x'].to_numpy() 55 | pt_y = df2['pose_y'].to_numpy() 56 | pt_yaw = df2['pose_yaw'].to_numpy() 57 | 58 | fig = plt.figure(figsize=(point2inch(linewidth_latex), 0.5 * point2inch(linewidth_latex))) 59 | clr = cm.rainbow(np.linspace(0, 1, samples)) 60 | im = plt.imshow(mapdoors) 61 | #cbar = fig.colorbar(im, ticks=[0, 85, 170, 255]) 62 | #cbar.set_ticklabels(np.array([0, int(samples / 3), int(2 * samples / 3), samples])) 63 | 64 | offset = 450 65 | #for i in range(samples): 66 | for i in range(offset, offset+samples): 67 | gt = [gt_x[i], gt_y[i]] 68 | p = [p_x[i], p_y[i]] 69 | pt = [pt_x[i], pt_y[i]] 70 | u, v = gmap.world2map(gt) 71 | up, vp = gmap.world2map(p) 72 | ut, vt = gmap.world2map(pt) 73 | plt.plot(u, v, 'o', markersize=1, color='k', label=names[0]) 74 | plt.plot(up, vp, 'o', markersize=1, color='b', label=names[1]) 75 | plt.plot(ut, vt, 'o', markersize=1, color='g', label=names[2]) 76 | 77 | if i%40 == 0 : 78 | plt.plot(u, v, '>', markersize=5, color='k') 79 | plt.plot(up, vp, '<', markersize=5, color='b') 80 | plt.plot(ut, vt, '>', markersize=5, color='g') 81 | 82 | 83 | lgnd = plt.legend(names, markerscale=5) 84 | 85 | plt.axis('off') 86 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 87 | fig.savefig(resultsFolder + 'MotivationTrajectory.eps', format='eps') 88 | fig.savefig(resultsFolder + 'MotivationTrajectory.pdf') 89 | plt.show() -------------------------------------------------------------------------------- /ros1_ws/src/evaluation/src/plottextmaps.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import cv2 4 | from evalutils import cam2BaselinkTF, angDist, EvalAllRuns, point2inch 5 | import matplotlib.pyplot as plt 6 | from matplotlib import cm 7 | import matplotlib.patches as patches 8 | 9 | from matplotlib import rcParams 10 | from matplotlib.ticker import FormatStrFormatter 11 | from GMAP import GMAP 12 | 13 | rcParams['hatch.linewidth'] = 5.0 14 | rcParams.update({'font.size': 7}) 15 | rcParams.update({'pdf.fonttype': 42}) 16 | # rcParams['text.usetex'] = True 17 | linewidth_latex = 245.71811 18 | 19 | 20 | 21 | if __name__ == "__main__": 22 | 23 | folderPath = "/home/nickybones/Code/YouBotMCL/ncore/data/floor/Faro/TextMaps/" 24 | resultsFolder = "/home/nickybones/Code/zimmerman2022iros/pics/" 25 | gridmap = cv2.imread("/home/nickybones/Code/YouBotMCL/ncore/data/floor/Faro/FMap.pgm") 26 | gmap = GMAP(gridmap, 0.05, [-13.9155, -10.99537, 0.0]) 27 | roomnames = ["Room 1", "Room 2", "Room 3", "Room 4", "Room 5", "Room 6", "Room 7", "Room 8", "Room 9", "Room 10"] 28 | roomcoords = [[410, 50], [330, 50], [240, 50], [140, 50], [50, 50], [40, 220], [120, 220], [200, 220],[310, 220], [440, 220]] 29 | 30 | fig, ax = plt.subplots(figsize=(point2inch(linewidth_latex), 0.5 * point2inch(linewidth_latex))) 31 | clr = cm.rainbow(np.linspace(0, 1, 10)) 32 | im = ax.imshow(gridmap) 33 | 34 | for i in range(1, 11): 35 | textmap = cv2.imread(folderPath + "Room " + str(i) + ".png") 36 | r = textmap[:, :, 0] 37 | locations = cv2.findNonZero(r) 38 | x,y,w,h = cv2.boundingRect(locations) 39 | for l in locations: 40 | # print(l) 41 | # print(l.shape) 42 | ax.plot(l[0, 0], l[0,1], 'o', alpha=0.8, markersize=1, color=clr[i-1]) 43 | 44 | text_kwargs = dict(ha='center', va='center', fontsize=5, color=clr[i-1]) 45 | ax.text(roomcoords[i - 1][0], roomcoords[i - 1][1], roomnames[i - 1], **text_kwargs) 46 | 47 | #rect = patches.Rectangle((x, y), w, h, linewidth=1, edgecolor='k', facecolor='none', zorder=10000) 48 | #ax.add_patch(rect) 49 | ax.axis('off') 50 | # text_kwargs = dict(ha='center', va='center', fontsize=5, color='k') 51 | # ax.text(410, 50, 'Room 1', **text_kwargs) 52 | # ax.text(330, 50, 'Room 2', **text_kwargs) 53 | # ax.text(240, 50, 'Room 3', **text_kwargs) 54 | # ax.text(140, 50, 'Room 4', **text_kwargs) 55 | # ax.text(50, 50, 'Room 5', **text_kwargs) 56 | 57 | # ax.text(440, 220, 'Room 10', **text_kwargs) 58 | # ax.text(310, 220, 'Room 9', **text_kwargs) 59 | # ax.text(200, 220, 'Room 8', **text_kwargs) 60 | # ax.text(120, 220, 'Room 7', **text_kwargs) 61 | # ax.text(40, 220, 'Room 6', **text_kwargs) 62 | 63 | plt.tight_layout(pad=0.0, rect=[0.01, 0.01, 0.99, 0.99]) 64 | fig.savefig(resultsFolder + 'TextMaps.eps', format='eps') 65 | plt.show() 66 | 67 | 68 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_msgs/msg/LidarScanMask.msg: -------------------------------------------------------------------------------- 1 | Header header 2 | int32 id 3 | string type 4 | float32[] t1 5 | float32[] t2 -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_msgs/msg/MergedLaserScan.msg: -------------------------------------------------------------------------------- 1 | Header header 2 | float32[] xy 3 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_msgs/msg/TextArray.msg: -------------------------------------------------------------------------------- 1 | 2 | Header header 3 | string[] text 4 | int32 id -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_msgs/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | nmcl_msgs 4 | 0.0.0 5 | The nmcl_msgs package 6 | 7 | 8 | 9 | 10 | nickybones 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | catkin 52 | roscpp 53 | rospy 54 | std_msgs 55 | sensor_msgs 56 | roscpp 57 | rospy 58 | std_msgs 59 | roscpp 60 | rospy 61 | std_msgs 62 | 63 | 64 | 65 | message_generation 66 | message_runtime 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/480x640/cam0.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 0, 3 | "yaw" : 0, 4 | "k" : [380.07025146484375, 0.0, 324.4729919433594, 0.0, 379.66119384765625, 237.78517150878906, 0.0, 0.0, 1.0], 5 | "t" : [0, 0, 1, -1, 0, 0, 0, -1, 0] 6 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/480x640/cam1.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 1, 3 | "yaw" : -1.57079632679, 4 | "k" : [380.07025146484375, 0.0, 324.4729919433594, 0.0, 379.66119384765625, 237.78517150878906, 0.0, 0.0, 1.0], 5 | "t" : [0, 0, 1, -1, 0, 0, 0, -1, 0] 6 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/480x640/cam2.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 2, 3 | "yaw" : 3.14159265359, 4 | "k" : [380.07025146484375, 0.0, 324.4729919433594, 0.0, 379.66119384765625, 237.78517150878906, 0.0, 0.0, 1.0], 5 | "t" : [0, 0, 1, -1, 0, 0, 0, -1, 0] 6 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/480x640/cam3.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 3, 3 | "yaw" : 1.57079632679, 4 | "k" : [381.7715148925781, 0.0, 318.88604736328125, 0.0, 381.3864440917969, 241.0621795654297, 0.0, 0.0, 1.0], 5 | "t" : [0, 0, 1, -1, 0, 0, 0, -1, 0] 6 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/cam0.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 0, 3 | "yaw" : 0 4 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/cam1.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 1, 3 | "yaw" : -1.57079632679 4 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/cam2.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 2, 3 | "yaw" : 3.14159265359 4 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/cam3.config: -------------------------------------------------------------------------------- 1 | { 2 | "id": 3, 3 | "yaw" : 1.57079632679 4 | } -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/front_laser.config: -------------------------------------------------------------------------------- 1 | { 2 | "name": "front_laser", 3 | "angle_max" : 2.268928050994873, 4 | "angle_min" : -2.268928050994873, 5 | "num_beams" : 1041, 6 | "origin" : [0.25, 0.155, 0.785] 7 | } 8 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/front_laser.yaml: -------------------------------------------------------------------------------- 1 | name: front_laser 2 | angle_max: 2.268928050994873 3 | angle_min: -2.268928050994873 4 | num_beams: 1041 5 | origin: [0.25, 0.155, 0.785] 6 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/rear_laser.config: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rear_laser", 3 | "angle_max" : 2.268928050994873, 4 | "angle_min" : -2.268928050994873, 5 | "num_beams" : 1041, 6 | "origin" : [-0.25, -0.155, -2.356] 7 | } 8 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/config/rear_laser.yaml: -------------------------------------------------------------------------------- 1 | name: rear_laser 2 | angle_max: 2.268928050994873 3 | angle_min: -2.268928050994873 4 | num_beams: 1041 5 | origin: [-0.25, -0.155, -2.356] 6 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/include/RosUtils.h: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: RosUtils.h # 9 | # ############################################################################## 10 | **/ 11 | 12 | #ifndef ROSUTILS_H 13 | #define ROSUTILS_H 14 | 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include "opencv2/opencv.hpp" 24 | #include 25 | 26 | 27 | 28 | Eigen::Vector3f OdomMsg2Pose2D(const nav_msgs::OdometryConstPtr& odom); 29 | 30 | Eigen::Vector3f PoseMsg2Pose2D(const geometry_msgs::PoseStampedConstPtr& poseMsg); 31 | 32 | std::vector OdomMsg2Pose3D(const nav_msgs::OdometryConstPtr& odom); 33 | 34 | std::vector PoseMsg2Pose3D(const geometry_msgs::PoseStampedConstPtr& odom); 35 | 36 | geometry_msgs::PoseStamped Pose2D2PoseMsg(Eigen::Vector3f pose2d); 37 | 38 | 39 | pcl::PointCloud Vec2PointCloud(const std::vector& points3d, Eigen::Vector3f rgb); 40 | 41 | pcl::PointCloud Vec2PointCloud(const std::vector& points3d); 42 | 43 | 44 | pcl::PointCloud ManyVec2PointCloud(const std::vector>& points3d, std::vector rgb); 45 | 46 | 47 | geometry_msgs::PoseWithCovarianceStamped Pred2PoseWithCov(Eigen::Vector3d pred, Eigen::Matrix3d cov); 48 | 49 | sensor_msgs::Image CVMat2ImgMsg(const cv::Mat& img, std_msgs::Header header, const std::string& type); 50 | 51 | 52 | 53 | 54 | 55 | #endif 56 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/launch/camerasync.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/launch/confignmcl.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | [0.02, 0.02, 0.02] 17 | [0.5, 0.5] 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/launch/textrec.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | nmcl_ros 4 | 0.0.0 5 | The nmcl_ros package 6 | 7 | 8 | 9 | 10 | nickybones 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | catkin 53 | roscpp 54 | rospy 55 | std_msgs 56 | sensor_msgs 57 | nmcl_msgs 58 | 59 | 60 | roscpp 61 | rospy 62 | std_msgs 63 | roscpp 64 | rospy 65 | std_msgs 66 | sensor_msgs 67 | nmcl_msgs 68 | 69 | catkin 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | add_executable(ScanMergeNode ScanMergeNode.cpp RosUtils.cpp) 7 | target_link_libraries(ScanMergeNode ${OpenCV_LIBS} ${catkin_LIBRARIES} NSENSORS) 8 | 9 | 10 | add_executable(TextRecoNode TextRecoNode.cpp) 11 | target_link_libraries(TextRecoNode ${OpenCV_LIBS} ${catkin_LIBRARIES} NDL) 12 | 13 | 14 | add_executable(ConfigNMCLNode ConfigNMCLNode.cpp RosUtils.cpp) 15 | target_link_libraries(ConfigNMCLNode ${OpenCV_LIBS} ${catkin_LIBRARIES} NMCL NSENSORS NMAP ${Boost_LIBRARIES} nlohmann_json::nlohmann_json) 16 | 17 | -------------------------------------------------------------------------------- /ros1_ws/src/nmcl_ros/src/TextRecoNode.cpp: -------------------------------------------------------------------------------- 1 | /** 2 | # ############################################################################## 3 | # Copyright (c) 2021- University of Bonn # 4 | # All rights reserved. # 5 | # # 6 | # Author: Nicky Zimmerman # 7 | # # 8 | # File: TextRecoNode.cpp # 9 | # ############################################################################## 10 | **/ 11 | 12 | 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include "opencv2/opencv.hpp" 18 | #include 19 | 20 | #include "TextSpotting.h" 21 | 22 | class TextRecoNode 23 | { 24 | public: 25 | 26 | TextRecoNode(std::string name) 27 | { 28 | ros::NodeHandle nh(name); 29 | 30 | std::string jsonPath; 31 | std::string cameraImgTopic; 32 | std::string cameraInfoTopic; 33 | std::string textTopic; 34 | 35 | nh.getParam("textSpottingConfig", jsonPath); 36 | nh.getParam("cameraImgTopic", cameraImgTopic); 37 | nh.getParam("camID", o_camID); 38 | nh.getParam("textTopic", textTopic); 39 | 40 | TextSpotting ts = TextSpotting(jsonPath); 41 | 42 | o_textSpotter = std::make_shared(ts); 43 | o_textPub = nh.advertise(textTopic, 10); 44 | o_camSub = nh.subscribe(cameraImgTopic, 1, &TextRecoNode::callback, this); 45 | } 46 | 47 | void callback(const sensor_msgs::ImageConstPtr& imgMsg) 48 | { 49 | cv_bridge::CvImagePtr cvPtr = cv_bridge::toCvCopy(imgMsg, sensor_msgs::image_encodings::TYPE_8UC3); 50 | cv::Mat frame = cvPtr->image; 51 | 52 | try 53 | { 54 | std::vector recRes = o_textSpotter->Infer(frame); 55 | nmcl_msgs::TextArray msg; 56 | msg.header = imgMsg->header; 57 | msg.text = recRes; 58 | msg.id = o_camID; 59 | o_textPub.publish(msg); 60 | } 61 | catch (...) 62 | { 63 | ROS_INFO_STREAM(std::string("Failed to infer text from camera ") + std::to_string(o_camID)); 64 | } 65 | 66 | } 67 | 68 | 69 | private: 70 | 71 | std::shared_ptr o_textSpotter; 72 | ros::Publisher o_textPub; 73 | ros::Subscriber o_camSub; 74 | int o_camID; 75 | 76 | }; 77 | 78 | 79 | int main(int argc, char** argv) 80 | { 81 | std::string name = argv[1]; 82 | ros::init(argc, argv, name); 83 | TextRecoNode tr = TextRecoNode(name); 84 | ros::spin(); 85 | 86 | 87 | return 0; 88 | } 89 | --------------------------------------------------------------------------------