├── Data_Preprocessing ├── Data_Augmentation │ ├── CMakeLists.txt │ ├── build │ │ ├── Makefile │ │ └── data_aug │ ├── data_aug.c │ ├── data_aug.h │ ├── data_aug.py │ ├── data_aug.pyx │ ├── data_augmentation.cpp │ └── test_data_aug.cpp ├── KITTI2PCD │ ├── CMakeLists.txt │ ├── build │ │ ├── Makefile │ │ └── kitti2pcd │ └── kitti2pcd.cpp └── Rename │ └── rename.py ├── Feature_Engineering ├── CMakeLists.txt ├── README.md ├── build │ └── bin │ │ ├── car1.pcd │ │ ├── car2.pcd │ │ ├── car742.pcd │ │ └── pedestrian0.pcd ├── dataset_example │ ├── car1.pcd │ ├── car2.pcd │ ├── car742.pcd │ └── pedestrian0.pcd ├── lib │ ├── CMakeLists.txt │ ├── accessFile.cpp │ ├── accessFile.h │ ├── commonHeadFiles.h │ ├── extractFeature.cpp │ ├── extractFeature.h │ ├── getFeatureVector.cpp │ ├── getFeatureVector.h │ ├── get_file_name.c │ ├── get_file_name.h │ ├── get_file_name.pyx │ ├── sampleCloud.cpp │ ├── sampleCloud.h │ ├── searchKdtree.cpp │ └── searchKdtree.h └── src │ ├── CMakeLists.txt │ ├── analyseFeature.cpp │ ├── estimate_mutual_info.py │ └── testModule.cpp ├── README.md ├── Test ├── basic_test_randomforest.py ├── basic_test_svm.py ├── basic_test_xgb.py ├── noise_test_rf.py ├── noise_test_svm.py ├── noise_test_xgb.py ├── occlusion_test_rf.py ├── occlusion_test_svm.py ├── occlusion_test_xgb.py ├── robust_test_data_features │ ├── CMakeLists.txt │ ├── build │ │ └── Makefile │ ├── lib │ │ ├── CMakeLists.txt │ │ ├── accessFile.cpp │ │ ├── accessFile.h │ │ ├── commonHeadFiles.h │ │ ├── extractFeature.cpp │ │ ├── extractFeature.h │ │ ├── gen_robust_test_data.cpp │ │ ├── gen_robust_test_data.h │ │ ├── getFeatureVector.cpp │ │ ├── getFeatureVector.h │ │ ├── get_file_name.c │ │ ├── get_file_name.h │ │ ├── get_file_name.pyx │ │ ├── sampleCloud.cpp │ │ ├── sampleCloud.h │ │ ├── searchKdtree.cpp │ │ └── searchKdtree.h │ └── src │ │ ├── CMakeLists.txt │ │ ├── noise.cpp │ │ ├── occlusion.cpp │ │ └── sparsity.cpp ├── sparsity_test_rf.py ├── sparsity_test_svm.py └── sparsity_test_xgb.py └── Training ├── rf.pkl ├── svm.pkl ├── train_randomforest.py ├── train_svm.py ├── train_xgb.py └── xgb.pkl /Data_Preprocessing/Data_Augmentation/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | project(data_aug) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | #add_compile_options(-std=c++11) 8 | 9 | include_directories(${PCL_INCLUDE_DIRS}) 10 | link_directories(${PCL_LIBRARY_DIRS}) 11 | add_definitions(${PCL_DEFINITIONS}) 12 | 13 | add_executable (data_aug data_augmentation.cpp data_aug.c) 14 | target_link_libraries (data_aug ${PCL_LIBRARIES}) 15 | -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/build/Makefile: -------------------------------------------------------------------------------- 1 | # CMAKE generated file: DO NOT EDIT! 2 | # Generated by "Unix Makefiles" Generator, CMake Version 3.5 3 | 4 | # Default target executed when no arguments are given to make. 5 | default_target: all 6 | 7 | .PHONY : default_target 8 | 9 | # Allow only one "make -f Makefile2" at a time, but pass parallelism. 10 | .NOTPARALLEL: 11 | 12 | 13 | #============================================================================= 14 | # Special targets provided by cmake. 15 | 16 | # Disable implicit rules so canonical targets will work. 17 | .SUFFIXES: 18 | 19 | 20 | # Remove some rules from gmake that .SUFFIXES does not remove. 21 | SUFFIXES = 22 | 23 | .SUFFIXES: .hpux_make_needs_suffix_list 24 | 25 | 26 | # Suppress display of executed commands. 27 | $(VERBOSE).SILENT: 28 | 29 | 30 | # A target that is always out of date. 31 | cmake_force: 32 | 33 | .PHONY : cmake_force 34 | 35 | #============================================================================= 36 | # Set environment variables for the build. 37 | 38 | # The shell in which to execute make rules. 39 | SHELL = /bin/sh 40 | 41 | # The CMake executable. 42 | CMAKE_COMMAND = /usr/bin/cmake 43 | 44 | # The command to remove a file. 45 | RM = /usr/bin/cmake -E remove -f 46 | 47 | # Escaping for special characters. 48 | EQUALS = = 49 | 50 | # The top-level source directory on which CMake was run. 51 | CMAKE_SOURCE_DIR = /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/Data_Augmentation 52 | 53 | # The top-level build directory on which CMake was run. 54 | CMAKE_BINARY_DIR = /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/Data_Augmentation/build 55 | 56 | #============================================================================= 57 | # Targets provided globally by CMake. 58 | 59 | # Special rule for the target edit_cache 60 | edit_cache: 61 | @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "No interactive CMake dialog available..." 62 | /usr/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available. 63 | .PHONY : edit_cache 64 | 65 | # Special rule for the target edit_cache 66 | edit_cache/fast: edit_cache 67 | 68 | .PHONY : edit_cache/fast 69 | 70 | # Special rule for the target rebuild_cache 71 | rebuild_cache: 72 | @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..." 73 | /usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) 74 | .PHONY : rebuild_cache 75 | 76 | # Special rule for the target rebuild_cache 77 | rebuild_cache/fast: rebuild_cache 78 | 79 | .PHONY : rebuild_cache/fast 80 | 81 | # The main all target 82 | all: cmake_check_build_system 83 | $(CMAKE_COMMAND) -E cmake_progress_start /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/Data_Augmentation/build/CMakeFiles /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/Data_Augmentation/build/CMakeFiles/progress.marks 84 | $(MAKE) -f CMakeFiles/Makefile2 all 85 | $(CMAKE_COMMAND) -E cmake_progress_start /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/Data_Augmentation/build/CMakeFiles 0 86 | .PHONY : all 87 | 88 | # The main clean target 89 | clean: 90 | $(MAKE) -f CMakeFiles/Makefile2 clean 91 | .PHONY : clean 92 | 93 | # The main clean target 94 | clean/fast: clean 95 | 96 | .PHONY : clean/fast 97 | 98 | # Prepare targets for installation. 99 | preinstall: all 100 | $(MAKE) -f CMakeFiles/Makefile2 preinstall 101 | .PHONY : preinstall 102 | 103 | # Prepare targets for installation. 104 | preinstall/fast: 105 | $(MAKE) -f CMakeFiles/Makefile2 preinstall 106 | .PHONY : preinstall/fast 107 | 108 | # clear depends 109 | depend: 110 | $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1 111 | .PHONY : depend 112 | 113 | #============================================================================= 114 | # Target rules for targets named data_aug 115 | 116 | # Build rule for target. 117 | data_aug: cmake_check_build_system 118 | $(MAKE) -f CMakeFiles/Makefile2 data_aug 119 | .PHONY : data_aug 120 | 121 | # fast build rule for target. 122 | data_aug/fast: 123 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/build 124 | .PHONY : data_aug/fast 125 | 126 | data_aug.o: data_aug.c.o 127 | 128 | .PHONY : data_aug.o 129 | 130 | # target to build an object file 131 | data_aug.c.o: 132 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/data_aug.c.o 133 | .PHONY : data_aug.c.o 134 | 135 | data_aug.i: data_aug.c.i 136 | 137 | .PHONY : data_aug.i 138 | 139 | # target to preprocess a source file 140 | data_aug.c.i: 141 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/data_aug.c.i 142 | .PHONY : data_aug.c.i 143 | 144 | data_aug.s: data_aug.c.s 145 | 146 | .PHONY : data_aug.s 147 | 148 | # target to generate assembly for a file 149 | data_aug.c.s: 150 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/data_aug.c.s 151 | .PHONY : data_aug.c.s 152 | 153 | data_augmentation.o: data_augmentation.cpp.o 154 | 155 | .PHONY : data_augmentation.o 156 | 157 | # target to build an object file 158 | data_augmentation.cpp.o: 159 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/data_augmentation.cpp.o 160 | .PHONY : data_augmentation.cpp.o 161 | 162 | data_augmentation.i: data_augmentation.cpp.i 163 | 164 | .PHONY : data_augmentation.i 165 | 166 | # target to preprocess a source file 167 | data_augmentation.cpp.i: 168 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/data_augmentation.cpp.i 169 | .PHONY : data_augmentation.cpp.i 170 | 171 | data_augmentation.s: data_augmentation.cpp.s 172 | 173 | .PHONY : data_augmentation.s 174 | 175 | # target to generate assembly for a file 176 | data_augmentation.cpp.s: 177 | $(MAKE) -f CMakeFiles/data_aug.dir/build.make CMakeFiles/data_aug.dir/data_augmentation.cpp.s 178 | .PHONY : data_augmentation.cpp.s 179 | 180 | # Help Target 181 | help: 182 | @echo "The following are some of the valid targets for this Makefile:" 183 | @echo "... all (the default if no target is provided)" 184 | @echo "... clean" 185 | @echo "... depend" 186 | @echo "... edit_cache" 187 | @echo "... rebuild_cache" 188 | @echo "... data_aug" 189 | @echo "... data_aug.o" 190 | @echo "... data_aug.i" 191 | @echo "... data_aug.s" 192 | @echo "... data_augmentation.o" 193 | @echo "... data_augmentation.i" 194 | @echo "... data_augmentation.s" 195 | .PHONY : help 196 | 197 | 198 | 199 | #============================================================================= 200 | # Special targets to cleanup operation of make. 201 | 202 | # Special rule to run CMake to check the build system integrity. 203 | # No rule that depends on this can have commands that come from listfiles 204 | # because they might be regenerated. 205 | cmake_check_build_system: 206 | $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0 207 | .PHONY : cmake_check_build_system 208 | -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/build/data_aug: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaozhenghan/PointCloud_Classification_using_ML/94abb10744a6090f941d32d972de01d53521b324/Data_Preprocessing/Data_Augmentation/build/data_aug -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/data_aug.h: -------------------------------------------------------------------------------- 1 | /* Generated by Cython 0.28.3 */ 2 | 3 | #ifndef __PYX_HAVE__data_aug 4 | #define __PYX_HAVE__data_aug 5 | 6 | 7 | #ifndef __PYX_HAVE_API__data_aug 8 | 9 | #ifndef __PYX_EXTERN_C 10 | #ifdef __cplusplus 11 | #define __PYX_EXTERN_C extern "C" 12 | #else 13 | #define __PYX_EXTERN_C extern 14 | #endif 15 | #endif 16 | 17 | #ifndef DL_IMPORT 18 | #define DL_IMPORT(_T) _T 19 | #endif 20 | 21 | __PYX_EXTERN_C PyObject *augment_data(PyObject *, PyObject *, PyObject *, PyObject *); 22 | 23 | #endif /* !__PYX_HAVE_API__data_aug */ 24 | 25 | /* WARNING: the interface of the module init function changed in CPython 3.5. */ 26 | /* It now returns a PyModuleDef instance instead of a PyModule instance. */ 27 | 28 | #if PY_MAJOR_VERSION < 3 29 | PyMODINIT_FUNC initdata_aug(void); 30 | #else 31 | PyMODINIT_FUNC PyInit_data_aug(void); 32 | #endif 33 | 34 | #endif /* !__PYX_HAVE__data_aug */ 35 | -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/data_aug.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ####################################### 3 | ########## Data Augmentation ########## 4 | ####################################### 5 | 6 | import numpy as np 7 | 8 | ########### 9 | # 绕Z轴旋转 # 10 | ########### 11 | # point: vector(1*3:x,y,z) 12 | # rotation_angle: scaler 0~2*pi 13 | def rotate_point (point, rotation_angle): 14 | point = np.array(point) 15 | cos_theta = np.cos(rotation_angle) 16 | sin_theta = np.sin(rotation_angle) 17 | rotation_matrix = np.array([[cos_theta, sin_theta, 0], 18 | [-sin_theta, cos_theta, 0], 19 | [0, 0, 1]]) 20 | rotated_point = np.dot(point.reshape(-1, 3), rotation_matrix) 21 | return rotated_point 22 | 23 | # point = np.array([1,2,3]) 24 | # rotated_point = rotate_point(point, 0.1*np.pi) 25 | # print rotated_point 26 | 27 | 28 | ########### 29 | # 在XYZ上加高斯噪声 # 30 | ########### 31 | def jitter_point(point, sigma=0.01, clip=0.05): 32 | assert(clip > 0) 33 | point = np.array(point) 34 | point = point.reshape(-1,3) 35 | Row, Col = point.shape 36 | jittered_point = np.clip(sigma * np.random.randn(Row, Col), -1*clip, clip) 37 | jittered_point += point 38 | return jittered_point 39 | 40 | 41 | # jittered_point = jitter_point(point) 42 | # print jittered_point 43 | 44 | 45 | ########### 46 | # Data Augmentation # 47 | ########### 48 | def augment_data(point, rotation_angle, sigma, clip): 49 | return jitter_point(rotate_point(point, rotation_angle), sigma, clip) 50 | 51 | 52 | point = [0,1,3] 53 | print augment_data(point, 3.14, 0.01, 0.05) 54 | 55 | -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/data_aug.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ####################################### 3 | ########## Data Augmentation ########## 4 | ####################################### 5 | 6 | import numpy as np 7 | 8 | 9 | ########### 10 | # 绕Z轴旋转 # 11 | ########### 12 | # point: vector(1*3) 13 | # rotation_angle: scaler 0~2*pi 14 | def rotate_point (point, rotation_angle): 15 | point = np.array(point) 16 | cos_theta = np.cos(rotation_angle) 17 | sin_theta = np.sin(rotation_angle) 18 | rotation_matrix = np.array([[cos_theta, sin_theta, 0], 19 | [-sin_theta, cos_theta, 0], 20 | [0, 0, 1]]) 21 | rotated_point = np.dot(point.reshape(-1, 3), rotation_matrix) 22 | return rotated_point 23 | 24 | 25 | ################### 26 | # 在XYZ上加高斯噪声 # 27 | ################## 28 | def jitter_point(point, sigma=0.01, clip=0.05): 29 | assert(clip > 0) 30 | point = np.array(point) 31 | point = point.reshape(-1,3) 32 | Row, Col = point.shape 33 | jittered_point = np.clip(sigma * np.random.randn(Row, Col), -1*clip, clip) 34 | jittered_point += point 35 | return jittered_point 36 | 37 | 38 | ##################### 39 | # Data Augmentation # 40 | ##################### 41 | cdef public augment_data(point, rotation_angle, sigma, clip): 42 | return jitter_point(rotate_point(point, rotation_angle), sigma, clip).tolist() 43 | -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/data_augmentation.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | ***************************************************** 3 | ************ data augmentation for pointcloud ******* 4 | ***************************************************** 5 | */ 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include "data_aug.h" 12 | 13 | typedef pcl::PointXYZI PointT; 14 | 15 | int 16 | main(int argc, char** argv) 17 | { 18 | pcl::PCDReader reader; 19 | pcl::PCDWriter writer; 20 | 21 | // argv[1]: one of {"car", "cyclist", "pedestrian", "truck", "van"} 22 | // number of the pcd-files in every folder("car", "cyclist", "pedestrian", "truck", "van") 23 | unsigned int num_files = 0; 24 | unsigned int augment_multiple = 1; 25 | float sigma_ = 0.0; 26 | float clip_ = 0.0; 27 | if(strcmp(argv[1], "car") == 0) 28 | { 29 | // num_files = 128; 30 | num_files = 56; 31 | augment_multiple = 6; 32 | sigma_ = 0.04; 33 | clip_ = 0.06; 34 | } 35 | 36 | else if (strcmp(argv[1], "cyclist") == 0) 37 | { 38 | // num_files = 28; 39 | num_files = 17; 40 | augment_multiple = 12; 41 | sigma_ = 0.02; 42 | clip_ = 0.05; 43 | } 44 | else if (strcmp(argv[1], "pedestrian") == 0) 45 | { 46 | // num_files = 81; 47 | num_files = 29; 48 | augment_multiple = 6; 49 | sigma_ = 0.01; 50 | clip_ = 0.05; 51 | } 52 | else if (strcmp(argv[1], "truck") == 0) 53 | { 54 | // num_files = 19; 55 | num_files = 9; 56 | augment_multiple = 20; 57 | sigma_ = 0.05; 58 | clip_ = 0.1; 59 | } 60 | else if (strcmp(argv[1], "van") == 0) 61 | { 62 | // num_files = 46; 63 | num_files = 23; 64 | augment_multiple = 12; 65 | sigma_ = 0.04; 66 | clip_ = 0.06; 67 | } 68 | else 69 | { 70 | // Exception 71 | // code here 72 | std::cerr << "Invalid input for argv[1]!" << std::endl; 73 | return(1); 74 | } 75 | 76 | std::string read_pcd_file_base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/test_data_original/"; 77 | // pointer for the cloud in original pcd-file 78 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 79 | // initialize Python module 80 | Py_Initialize(); 81 | initdata_aug(); 82 | // for-loop to augment every original pcd-file in the folder 83 | for(unsigned int file_index = 0; file_index < num_files; ++file_index) 84 | { 85 | // path of the file to be read 86 | std::stringstream read_pcd_file_path; 87 | read_pcd_file_path << read_pcd_file_base_path 88 | << argv[1] 89 | << "/" 90 | << argv[1] 91 | << file_index 92 | << ".pcd"; 93 | // read the pcd-file 94 | reader.read (read_pcd_file_path.str(), *cloud); 95 | // print the size of cloud 96 | unsigned int point_size = cloud->points.size(); 97 | std::cout << "PointCloud in " 98 | << argv[1] 99 | << file_index 100 | << ".pcd " 101 | <<"has: " 102 | << point_size 103 | << " data points." 104 | << std::endl; 105 | 106 | // for-loop for the Data Augmentation 107 | // every original pcd-file is augmented in [augment_multiple] new files. 108 | static unsigned int augment_file_index = num_files; 109 | for(unsigned int i = 1; i < augment_multiple; ++i, ++augment_file_index) 110 | { 111 | // randomly rotation with angle 112 | PyObject *angle = Py_BuildValue("f", 6.28*i/augment_multiple); // theta = 2*pi*i/10, i = 1,2,...,9 113 | // jitter the data with sigma, clip 114 | PyObject *sigma = Py_BuildValue("f", sigma_); 115 | PyObject *clip = Py_BuildValue("f", clip_); 116 | // augmented new cloud 117 | pcl::PointCloud::Ptr augmented_cloud (new pcl::PointCloud); 118 | // augmentation for every point in original pcd-file 119 | for(unsigned int point_index = 0; point_index < point_size; ++point_index) 120 | { 121 | //std::cout << point_index << std::endl; // debug 122 | float x = cloud->points[point_index].x; 123 | float y = cloud->points[point_index].y; 124 | float z = cloud->points[point_index].z; 125 | PyObject *point = Py_BuildValue("[f,f,f]", x, y, z); 126 | 127 | PyObject *augmented_point = augment_data(point, angle, sigma, clip); 128 | // assert(PyList_Check(augmented_point)); 129 | 130 | PyObject *pValue = PyList_GetItem(augmented_point, 0); 131 | PyObject *pValue_0 = PyList_GET_ITEM(pValue, 0); 132 | PyObject *pValue_1 = PyList_GET_ITEM(pValue, 1); 133 | PyObject *pValue_2 = PyList_GET_ITEM(pValue, 2); 134 | 135 | // augmented new point to be push back to the augmented_cloud. 136 | PointT point_new; 137 | // float x_a = PyFloat_AsDouble(pValue_0); 138 | // float y_a = PyFloat_AsDouble(pValue_1); 139 | // float z_a = PyFloat_AsDouble(pValue_2); 140 | point_new.x = PyFloat_AsDouble(pValue_0); 141 | point_new.y = PyFloat_AsDouble(pValue_1); 142 | point_new.z = PyFloat_AsDouble(pValue_2); 143 | point_new.intensity = cloud->points[point_index].intensity; 144 | augmented_cloud->points.push_back(point_new); 145 | } 146 | // set the height and width for the augmented cloud. Important!! 147 | augmented_cloud->height = 1; 148 | augmented_cloud->width = augmented_cloud->points.size(); 149 | // path of the file to be written 150 | std::string write_pcd_file_base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/test_data_augmented/"; 151 | std::stringstream write_pcd_file_path; 152 | write_pcd_file_path << write_pcd_file_base_path 153 | << argv[1] 154 | << "/" 155 | << argv[1] 156 | << augment_file_index 157 | << ".pcd"; 158 | // write the file 159 | writer.write (write_pcd_file_path.str(), *augmented_cloud, false); 160 | //std::cout << "bis hier" << i << std::endl; // debug 161 | } 162 | } 163 | // finalize the python module 164 | Py_Finalize(); 165 | return(0); 166 | } -------------------------------------------------------------------------------- /Data_Preprocessing/Data_Augmentation/test_data_aug.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "data_aug.h" 3 | #include 4 | 5 | int main(int argc, char const *argv[]) 6 | { 7 | PyObject *point; 8 | PyObject *angle; 9 | PyObject *sigma; 10 | PyObject *clip; 11 | PyObject *augmented_point; 12 | 13 | Py_Initialize(); 14 | initdata_aug(); 15 | // 浮点形数据必须写为1.0, 2.0 这样的,否则Py_BuildValue()精度损失导致严重错误 16 | point = Py_BuildValue("[f,f,f]", 1.0, 2.0, 3.0); 17 | angle = Py_BuildValue("f", 3.14); 18 | sigma = Py_BuildValue("f", 0.01); 19 | clip = Py_BuildValue("f", 0.05); 20 | augmented_point = augment_data(point, angle, sigma, clip); 21 | 22 | float x=0.0, y=0.0, z=0.0; 23 | PyObject *pValue = PyList_GetItem(augmented_point, 0); 24 | PyObject *pValue_0 = PyList_GET_ITEM(pValue, 0); 25 | PyObject *pValue_1 = PyList_GET_ITEM(pValue, 1); 26 | PyObject *pValue_2 = PyList_GET_ITEM(pValue, 2); 27 | 28 | x = PyFloat_AsDouble(pValue_0); 29 | y = PyFloat_AsDouble(pValue_1); 30 | z = PyFloat_AsDouble(pValue_2); 31 | 32 | std::cout << PyList_Size(pValue) << std::endl; 33 | std::cout << x << std::endl << y << std::endl << z << std::endl; 34 | 35 | Py_Finalize(); 36 | return 0; 37 | } 38 | -------------------------------------------------------------------------------- /Data_Preprocessing/KITTI2PCD/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | project(kitti2pcd) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | add_compile_options(-std=c++11) 8 | 9 | include_directories(${PCL_INCLUDE_DIRS}) 10 | link_directories(${PCL_LIBRARY_DIRS}) 11 | add_definitions(${PCL_DEFINITIONS}) 12 | 13 | add_executable (kitti2pcd kitti2pcd.cpp) 14 | target_link_libraries (kitti2pcd ${PCL_LIBRARIES}) 15 | -------------------------------------------------------------------------------- /Data_Preprocessing/KITTI2PCD/build/Makefile: -------------------------------------------------------------------------------- 1 | # CMAKE generated file: DO NOT EDIT! 2 | # Generated by "Unix Makefiles" Generator, CMake Version 3.5 3 | 4 | # Default target executed when no arguments are given to make. 5 | default_target: all 6 | 7 | .PHONY : default_target 8 | 9 | # Allow only one "make -f Makefile2" at a time, but pass parallelism. 10 | .NOTPARALLEL: 11 | 12 | 13 | #============================================================================= 14 | # Special targets provided by cmake. 15 | 16 | # Disable implicit rules so canonical targets will work. 17 | .SUFFIXES: 18 | 19 | 20 | # Remove some rules from gmake that .SUFFIXES does not remove. 21 | SUFFIXES = 22 | 23 | .SUFFIXES: .hpux_make_needs_suffix_list 24 | 25 | 26 | # Suppress display of executed commands. 27 | $(VERBOSE).SILENT: 28 | 29 | 30 | # A target that is always out of date. 31 | cmake_force: 32 | 33 | .PHONY : cmake_force 34 | 35 | #============================================================================= 36 | # Set environment variables for the build. 37 | 38 | # The shell in which to execute make rules. 39 | SHELL = /bin/sh 40 | 41 | # The CMake executable. 42 | CMAKE_COMMAND = /usr/bin/cmake 43 | 44 | # The command to remove a file. 45 | RM = /usr/bin/cmake -E remove -f 46 | 47 | # Escaping for special characters. 48 | EQUALS = = 49 | 50 | # The top-level source directory on which CMake was run. 51 | CMAKE_SOURCE_DIR = /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/KITTI2PCD 52 | 53 | # The top-level build directory on which CMake was run. 54 | CMAKE_BINARY_DIR = /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/KITTI2PCD/build 55 | 56 | #============================================================================= 57 | # Targets provided globally by CMake. 58 | 59 | # Special rule for the target edit_cache 60 | edit_cache: 61 | @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "No interactive CMake dialog available..." 62 | /usr/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available. 63 | .PHONY : edit_cache 64 | 65 | # Special rule for the target edit_cache 66 | edit_cache/fast: edit_cache 67 | 68 | .PHONY : edit_cache/fast 69 | 70 | # Special rule for the target rebuild_cache 71 | rebuild_cache: 72 | @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..." 73 | /usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) 74 | .PHONY : rebuild_cache 75 | 76 | # Special rule for the target rebuild_cache 77 | rebuild_cache/fast: rebuild_cache 78 | 79 | .PHONY : rebuild_cache/fast 80 | 81 | # The main all target 82 | all: cmake_check_build_system 83 | $(CMAKE_COMMAND) -E cmake_progress_start /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/KITTI2PCD/build/CMakeFiles /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/KITTI2PCD/build/CMakeFiles/progress.marks 84 | $(MAKE) -f CMakeFiles/Makefile2 all 85 | $(CMAKE_COMMAND) -E cmake_progress_start /home/shao/文档/VSCodeWS/Masterarbeit_Code/KITTI_processing/KITTI2PCD/build/CMakeFiles 0 86 | .PHONY : all 87 | 88 | # The main clean target 89 | clean: 90 | $(MAKE) -f CMakeFiles/Makefile2 clean 91 | .PHONY : clean 92 | 93 | # The main clean target 94 | clean/fast: clean 95 | 96 | .PHONY : clean/fast 97 | 98 | # Prepare targets for installation. 99 | preinstall: all 100 | $(MAKE) -f CMakeFiles/Makefile2 preinstall 101 | .PHONY : preinstall 102 | 103 | # Prepare targets for installation. 104 | preinstall/fast: 105 | $(MAKE) -f CMakeFiles/Makefile2 preinstall 106 | .PHONY : preinstall/fast 107 | 108 | # clear depends 109 | depend: 110 | $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1 111 | .PHONY : depend 112 | 113 | #============================================================================= 114 | # Target rules for targets named kitti2pcd 115 | 116 | # Build rule for target. 117 | kitti2pcd: cmake_check_build_system 118 | $(MAKE) -f CMakeFiles/Makefile2 kitti2pcd 119 | .PHONY : kitti2pcd 120 | 121 | # fast build rule for target. 122 | kitti2pcd/fast: 123 | $(MAKE) -f CMakeFiles/kitti2pcd.dir/build.make CMakeFiles/kitti2pcd.dir/build 124 | .PHONY : kitti2pcd/fast 125 | 126 | kitti2pcd.o: kitti2pcd.cpp.o 127 | 128 | .PHONY : kitti2pcd.o 129 | 130 | # target to build an object file 131 | kitti2pcd.cpp.o: 132 | $(MAKE) -f CMakeFiles/kitti2pcd.dir/build.make CMakeFiles/kitti2pcd.dir/kitti2pcd.cpp.o 133 | .PHONY : kitti2pcd.cpp.o 134 | 135 | kitti2pcd.i: kitti2pcd.cpp.i 136 | 137 | .PHONY : kitti2pcd.i 138 | 139 | # target to preprocess a source file 140 | kitti2pcd.cpp.i: 141 | $(MAKE) -f CMakeFiles/kitti2pcd.dir/build.make CMakeFiles/kitti2pcd.dir/kitti2pcd.cpp.i 142 | .PHONY : kitti2pcd.cpp.i 143 | 144 | kitti2pcd.s: kitti2pcd.cpp.s 145 | 146 | .PHONY : kitti2pcd.s 147 | 148 | # target to generate assembly for a file 149 | kitti2pcd.cpp.s: 150 | $(MAKE) -f CMakeFiles/kitti2pcd.dir/build.make CMakeFiles/kitti2pcd.dir/kitti2pcd.cpp.s 151 | .PHONY : kitti2pcd.cpp.s 152 | 153 | # Help Target 154 | help: 155 | @echo "The following are some of the valid targets for this Makefile:" 156 | @echo "... all (the default if no target is provided)" 157 | @echo "... clean" 158 | @echo "... depend" 159 | @echo "... edit_cache" 160 | @echo "... rebuild_cache" 161 | @echo "... kitti2pcd" 162 | @echo "... kitti2pcd.o" 163 | @echo "... kitti2pcd.i" 164 | @echo "... kitti2pcd.s" 165 | .PHONY : help 166 | 167 | 168 | 169 | #============================================================================= 170 | # Special targets to cleanup operation of make. 171 | 172 | # Special rule to run CMake to check the build system integrity. 173 | # No rule that depends on this can have commands that come from listfiles 174 | # because they might be regenerated. 175 | cmake_check_build_system: 176 | $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0 177 | .PHONY : cmake_check_build_system 178 | -------------------------------------------------------------------------------- /Data_Preprocessing/KITTI2PCD/build/kitti2pcd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaozhenghan/PointCloud_Classification_using_ML/94abb10744a6090f941d32d972de01d53521b324/Data_Preprocessing/KITTI2PCD/build/kitti2pcd -------------------------------------------------------------------------------- /Data_Preprocessing/KITTI2PCD/kitti2pcd.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by zzy on 3/14/18. 3 | // 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | // #include "ros/ros.h" 10 | // #include "fcn_data_gen/ground_remove.h" 11 | 12 | // static ros::Publisher g_cloud_pub; 13 | static std::vector file_lists; 14 | 15 | void read_filelists(const std::string& dir_path,std::vector& out_filelsits,std::string type) 16 | { 17 | struct dirent *ptr; 18 | DIR *dir; 19 | dir = opendir(dir_path.c_str()); 20 | out_filelsits.clear(); 21 | while ((ptr = readdir(dir)) != NULL){ 22 | std::string tmp_file = ptr->d_name; 23 | if (tmp_file[0] == '.')continue; 24 | if (type.size() <= 0){ 25 | out_filelsits.push_back(ptr->d_name); 26 | }else{ 27 | if (tmp_file.size() < type.size())continue; 28 | std::string tmp_cut_type = tmp_file.substr(tmp_file.size() - type.size(),type.size()); 29 | if (tmp_cut_type == type){ 30 | out_filelsits.push_back(ptr->d_name); 31 | } 32 | } 33 | } 34 | } 35 | 36 | bool computePairNum(std::string pair1,std::string pair2) 37 | { 38 | return pair1 < pair2; 39 | } 40 | 41 | void sort_filelists(std::vector& filists,std::string type) 42 | { 43 | if (filists.empty())return; 44 | 45 | std::sort(filists.begin(),filists.end(),computePairNum); 46 | } 47 | 48 | void readKittiPclBinData(std::string &in_file, std::string& out_file) 49 | { 50 | // load point cloud 51 | std::fstream input(in_file.c_str(), std::ios::in | std::ios::binary); 52 | if(!input.good()){ 53 | std::cerr << "Could not read file: " << in_file << std::endl; 54 | exit(EXIT_FAILURE); 55 | } 56 | input.seekg(0, std::ios::beg); 57 | 58 | pcl::PointCloud::Ptr points (new pcl::PointCloud); 59 | 60 | int i; 61 | for (i=0; input.good() && !input.eof(); i++) { 62 | pcl::PointXYZI point; 63 | input.read((char *) &point.x, 3*sizeof(float)); 64 | input.read((char *) &point.intensity, sizeof(float)); 65 | points->push_back(point); 66 | } 67 | input.close(); 68 | // g_cloud_pub.publish( points ); 69 | 70 | std::cout << "Read KTTI point cloud with " << i << " points, writing to " << out_file << std::endl; 71 | pcl::PCDWriter writer; 72 | 73 | // Save DoN features 74 | writer.write< pcl::PointXYZI > (out_file, *points, false); 75 | } 76 | 77 | 78 | int main(int argc, char **argv) 79 | { 80 | // ros::init(argc, argv, "ground_remove_test"); 81 | // ros::NodeHandle n; 82 | // g_cloud_pub = n.advertise< pcl::PointCloud< pcl::PointXYZI > > ("point_chatter", 1); 83 | 84 | // std::string bin_path = "../velodyne/binary/"; 85 | std::string bin_path = "/media/shao/TOSHIBA EXT/KITTI_kit/2011_09_26_drive_0005_sync/2011_09_26/2011_09_26_drive_0005_sync/velodyne_points/data/"; 86 | // std::string pcd_path = "../velodyne/pcd/"; 87 | std::string pcd_path = "/media/shao/TOSHIBA EXT/KITTI_kit/2011_09_26_drive_0005_sync/2011_09_26/2011_09_26_drive_0005_sync/velodyne_points/data_pcd/"; 88 | read_filelists( bin_path, file_lists, "bin" ); 89 | sort_filelists( file_lists, "bin" ); 90 | for (int i = 0; i < file_lists.size(); ++i) 91 | { 92 | std::string bin_file = bin_path + file_lists[i]; 93 | std::string tmp_str = file_lists[i].substr(0, file_lists[i].length() - 4) + ".pcd"; 94 | std::string pcd_file = pcd_path + tmp_str; 95 | readKittiPclBinData( bin_file, pcd_file ); 96 | } 97 | // std::string bin_file = "/media/shao/TOSHIBA EXT/data_object_velodyne/training/velodyne/000011.bin"; 98 | // std::string pcd_file = "/media/shao/TOSHIBA EXT/data_object_velodyne/training/velodyne_pcd/velodyne000011.pcd"; 99 | // readKittiPclBinData(bin_file, pcd_file); 100 | return 0; 101 | } -------------------------------------------------------------------------------- /Data_Preprocessing/Rename/rename.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | 4 | def rename(): 5 | i=0 6 | # 下面两行根据需要改 7 | object_class = "van" 8 | path="/media/shao/TOSHIBA EXT/data_object_velodyne/testing/test_data_original_1/van" 9 | 10 | # 该文件夹下所有的文件(包括文件夹) 11 | filelist = os.listdir(path) 12 | # print(filelist) 13 | # 遍历所有文件 14 | for files in filelist: 15 | if object_class in filelist[0]: 16 | break 17 | # 原来的文件路径 18 | Olddir=os.path.join(path,files) 19 | # 如果是文件夹则跳过 20 | if os.path.isdir(Olddir): 21 | continue 22 | # 旧文件名 23 | filename=os.path.splitext(files)[0] 24 | # 文件扩展名,需要保留 25 | filetype=os.path.splitext(files)[1] 26 | # 新的文件路径; object_class+str(i)+filetype 为新文件名 27 | Newdir=os.path.join(path, object_class+str(i)+filetype) 28 | # 重命名 29 | os.rename(Olddir,Newdir) 30 | i=i+1 31 | 32 | rename() 33 | -------------------------------------------------------------------------------- /Feature_Engineering/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | project(feature) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | add_compile_options(-std=c++11) 8 | 9 | #set(CMAKE_VERBOSE_MAKEFILE on) 10 | 11 | #message(${PROJECT_SOURCE_DIR}) 12 | 13 | add_subdirectory(src) 14 | 15 | add_subdirectory(lib) 16 | -------------------------------------------------------------------------------- /Feature_Engineering/README.md: -------------------------------------------------------------------------------- 1 | # PointCloud_FeatureEngineering 2 | Feature Engineering of 3D Lidar Point Cloud for Machine Learning 3 | -------------------------------------------------------------------------------- /Feature_Engineering/build/bin/car2.pcd: -------------------------------------------------------------------------------- 1 | # .PCD v0.7 - Point Cloud Data file format 2 | VERSION 0.7 3 | FIELDS x y z intensity 4 | SIZE 4 4 4 4 5 | TYPE F F F F 6 | COUNT 1 1 1 1 7 | WIDTH 349 8 | HEIGHT 1 9 | VIEWPOINT 0 0 0 1 0 0 0 10 | POINTS 349 11 | DATA ascii 12 | 14.27 -1.332 -0.169 0.25999999 13 | 14.24 -1.307 -0.168 0.25 14 | 14.216 -1.26 -0.167 0.25 15 | 14.214 -1.215 -0.167 0.23999999 16 | 14.198 -1.169 -0.167 0.23999999 17 | 14.185 -1.123 -0.16599999 0.1 18 | 14.185 -1.0779999 -0.16599999 0.16 19 | 14.18 -1.033 -0.16599999 0.1 20 | 14.196 -1.011 -0.16599999 0.16 21 | 14.201 -0.96700001 -0.16599999 0.16 22 | 14.214 -0.92299998 -0.167 0.16 23 | 14.231 -0.87900001 -0.167 0.16 24 | 14.265 -0.83600003 -0.168 0.16 25 | 14.3 -0.79299998 -0.169 0.16 26 | 14.331 -0.77200001 -0.17 0.17 27 | 14.351 -0.72799999 -0.17 0.16 28 | 13.486 -1.507 -0.23 0 29 | 13.494 -1.465 -0.23 0 30 | 13.473 -1.442 -0.229 0 31 | 13.461 -1.398 -0.229 0 32 | 13.454 -1.354 -0.228 0 33 | 13.432 -1.309 -0.228 0 34 | 13.43 -1.267 -0.227 0 35 | 13.446 -1.225 -0.228 0 36 | 13.44 -1.204 -0.228 0 37 | 13.43 -1.16 -0.227 0 38 | 13.427 -1.117 -0.227 0 39 | 13.435 -1.076 -0.227 0 40 | 13.438 -1.033 -0.227 0 41 | 13.419 -0.98900002 -0.226 0 42 | 13.434 -0.94800001 -0.227 0 43 | 13.436 -0.92699999 -0.227 0 44 | 13.443 -0.88499999 -0.227 0 45 | 13.445 -0.84299999 -0.227 0 46 | 13.458 -0.801 -0.227 0 47 | 13.456 -0.759 -0.227 0 48 | 13.485 -0.71799999 -0.228 0 49 | 13.473 -0.67500001 -0.227 0 50 | 13.495 -0.63300002 -0.228 0 51 | 13.484 -0.61199999 -0.228 0 52 | 13.512 -0.56999999 -0.228 0 53 | 13.531 -0.52899998 -0.229 0 54 | 13.551 -0.48699999 -0.23 0 55 | 13.58 -0.44499999 -0.23100001 0 56 | 13.59 -0.403 -0.23100001 0 57 | 13.625 -0.361 -0.23199999 0 58 | 13.585 -0.33899999 -0.23100001 0.07 59 | 13.808 -0.301 -0.23800001 0 60 | 13.285 -1.575 -0.31200001 0 61 | 13.345 -1.539 -0.31400001 0.07 62 | 13.36 -1.499 -0.31400001 0.22 63 | 13.345 -1.454 -0.31400001 0.25 64 | 13.337 -1.432 -0.31299999 0.25999999 65 | 13.322 -1.388 -0.31200001 0.11 66 | 13.322 -1.346 -0.31200001 0.16 67 | 13.324 -1.304 -0.31200001 0.12 68 | 13.326 -1.262 -0.31200001 0.16 69 | 13.322 -1.22 -0.31200001 0.27000001 70 | 13.304 -1.176 -0.31099999 0.11 71 | 13.311 -1.1339999 -0.31099999 0 72 | 13.273 -1.11 -0.31 0 73 | 13.277 -1.068 -0.31 0 74 | 13.29 -1.027 -0.31 0 75 | 13.279 -0.98500001 -0.30899999 0 76 | 13.292 -0.94400001 -0.31 0 77 | 13.277 -0.90100002 -0.30899999 0 78 | 13.294 -0.86000001 -0.31 0 79 | 13.287 -0.83899999 -0.30899999 0 80 | 13.282 -0.796 -0.30899999 0 81 | 13.298 -0.755 -0.31 0 82 | 13.315 -0.71399999 -0.31 0.14 83 | 13.327 -0.67299998 -0.31 0.11 84 | 13.333 -0.63099998 -0.31099999 0 85 | 13.337 -0.58899999 -0.31099999 0 86 | 13.348 -0.56900001 -0.31099999 0 87 | 13.357 -0.52700001 -0.31099999 0 88 | 13.337 -0.48500001 -0.31099999 0 89 | 13.372 -0.44400001 -0.31200001 0 90 | 13.366 -0.40099999 -0.31200001 0.090000004 91 | 13.385 -0.36000001 -0.31200001 0.18000001 92 | 13.406 -0.31799999 -0.31299999 0.25999999 93 | 13.287 -0.29499999 -0.308 0 94 | 13.529 -0.257 -0.31799999 0 95 | 13.14 -1.601 -0.38499999 0 96 | 13.158 -1.5829999 -0.38600001 0 97 | 13.201 -1.546 -0.38800001 0 98 | 13.825 -1.576 -0.41499999 0 99 | 13.317 -1.053 -0.391 0 100 | 13.301 -1.0089999 -0.38999999 0 101 | 13.304 -0.96700001 -0.38999999 0 102 | 13.301 -0.92500001 -0.389 0 103 | 13.318 -0.884 -0.38999999 0 104 | 13.354 -0.84500003 -0.39199999 0 105 | 13.377 -0.80400002 -0.39199999 0 106 | 13.268 -0.296 -0.38699999 0 107 | 13.229 -0.25400001 -0.38499999 0 108 | 13.652 -0.241 -0.40400001 0 109 | 12.977 -1.595 -0.45300001 0 110 | 13.018 -1.559 -0.45500001 0 111 | 13.298 -1.571 -0.46900001 0 112 | 13.868 -1.5930001 -0.498 0 113 | 13.906 -1.3099999 -0.498 0 114 | 14.018 -0.69999999 -0.50099999 0 115 | 14.034 -0.61299998 -0.50199997 0 116 | 14.04 -0.56900001 -0.50199997 0 117 | 13.231 -0.30899999 -0.461 0 118 | 13.1 -0.26499999 -0.45500001 0 119 | 13.051 -0.223 -0.45199999 0 120 | 13.534 -0.20900001 -0.47600001 0 121 | 12.898 -1.6339999 -0.53200001 0 122 | 12.909 -1.594 -0.53299999 0 123 | 12.939 -1.557 -0.53399998 0 124 | 13.862 -1.626 -0.58600003 0 125 | 13.903 -1.586 -0.588 0 126 | 12.965 -0.31 -0.52999997 0 127 | 13.028 -0.271 -0.53399998 0 128 | 12.965 -0.229 -0.52999997 0 129 | 12.981 -0.18799999 -0.53100002 0 130 | 13.755 -0.15800001 -0.57499999 0 131 | 12.81 -1.636 -0.59600002 0 132 | 12.82 -1.617 -0.597 0 133 | 12.877 -1.5829999 -0.60000002 0 134 | 12.947 -0.26199999 -0.59799999 0 135 | 12.91 -0.221 -0.59600002 0 136 | 12.889 -0.18000001 -0.59500003 0 137 | 13.098 0.016000001 -0.699 0 138 | 13.112 0.035999998 -0.69999999 0 139 | 13.149 0.078000002 -0.70300001 0 140 | 13.063 -1.906 -0.70599997 0.050000001 141 | 13.034 -1.86 -0.704 0.17 142 | 13.025 -1.8380001 -0.70300001 0.27000001 143 | 12.995 -1.7920001 -0.69999999 0.16 144 | 12.85 -1.73 -0.69 0 145 | 12.693 -1.668 -0.67900002 0 146 | 12.677 -1.626 -0.67699999 0 147 | 12.696 -1.5880001 -0.67799997 0 148 | 12.727 -0.28400001 -0.67400002 0 149 | 12.736 -0.244 -0.67400002 0 150 | 12.783 -0.22499999 -0.67699999 0 151 | 12.751 -0.184 -0.67500001 0 152 | 12.784 -0.145 -0.67699999 0 153 | 13.048 -0.066 -0.69599998 0 154 | 13.066 -0.025 -0.69700003 0 155 | 13.167 0.0020000001 -0.76899999 0 156 | 12.578 -1.669 -0.73299998 0 157 | 12.556 -1.626 -0.73100001 0 158 | 12.549 -1.585 -0.73000002 0 159 | 12.53 -1.543 -0.72899997 0 160 | 12.521 -1.521 -0.72799999 0 161 | 12.514 -1.4809999 -0.727 0 162 | 12.502 -1.4400001 -0.72600001 0 163 | 12.493 -1.399 -0.72500002 0 164 | 12.503 -1.36 -0.72500002 0 165 | 12.489 -1.319 -0.72399998 0 166 | 12.498 -1.28 -0.72399998 0 167 | 12.484 -1.239 -0.72299999 0 168 | 12.468 -1.2180001 -0.722 0 169 | 12.519 -1.183 -0.72500002 0.13 170 | 12.527 -1.1440001 -0.72500002 0.25999999 171 | 12.538 -1.105 -0.72600001 0.27000001 172 | 12.554 -1.067 -0.727 0.25999999 173 | 12.551 -1.027 -0.72600001 0.31999999 174 | 12.538 -0.98699999 -0.72500002 0.23 175 | 12.562 -0.96799999 -0.727 0.36000001 176 | 12.569 -0.92900002 -0.727 0.22 177 | 12.587 -0.89099997 -0.72799999 0.34 178 | 12.572 -0.85000002 -0.727 0.18000001 179 | 12.607 -0.81300002 -0.72899997 0.38 180 | 12.577 -0.77100003 -0.727 0.19 181 | 12.601 -0.73299998 -0.72899997 0.37 182 | 12.597 -0.713 -0.72799999 0.23 183 | 12.603 -0.67299998 -0.72799999 0.18000001 184 | 12.607 -0.634 -0.72899997 0.16 185 | 12.619 -0.59500003 -0.72899997 0 186 | 12.634 -0.47600001 -0.73000002 0 187 | 12.66 -0.45699999 -0.73199999 0 188 | 12.708 -0.259 -0.73500001 0 189 | 12.713 -0.219 -0.73500001 0 190 | 12.729 -0.199 -0.73699999 0 191 | 12.724 -0.15899999 -0.736 0 192 | 13.015 -0.121 -0.75800002 0 193 | 13.111 -0.081 -0.76499999 0 194 | 13.133 -0.039999999 -0.76599997 0 195 | 12.408 -1.674 -0.80299997 0 196 | 12.303 -1.601 -0.79400003 0 197 | 12.29 -1.5599999 -0.79299998 0 198 | 12.263 -1.518 -0.79000002 0 199 | 12.26 -1.478 -0.79000002 0 200 | 12.255 -1.438 -0.78899997 0 201 | 12.247 -1.398 -0.78799999 0 202 | 12.236 -1.378 -0.787 0 203 | 12.248 -1.34 -0.787 0 204 | 12.224 -1.299 -0.78500003 0 205 | 12.224 -1.26 -0.78500003 0 206 | 12.218 -1.22 -0.78399998 0 207 | 12.22 -1.182 -0.78399998 0 208 | 12.214 -1.142 -0.78299999 0 209 | 12.212 -1.123 -0.78299999 0 210 | 12.231 -1.086 -0.78399998 0 211 | 12.207 -1.045 -0.78200001 0 212 | 12.214 -1.007 -0.78200001 0 213 | 12.213 -0.96799999 -0.78200001 0 214 | 12.226 -0.93099999 -0.78200001 0 215 | 12.223 -0.89200002 -0.78200001 0 216 | 12.21 -0.87199998 -0.78100002 0 217 | 12.217 -0.83399999 -0.78100002 0 218 | 12.229 -0.796 -0.78200001 0 219 | 12.236 -0.75800002 -0.78200001 0 220 | 12.238 -0.71899998 -0.78200001 0 221 | 12.24 -0.68099999 -0.78200001 0 222 | 12.256 -0.64300001 -0.78299999 0 223 | 12.263 -0.62400001 -0.78399998 0 224 | 12.269 -0.58600003 -0.78399998 0 225 | 12.279 -0.54799998 -0.78500003 0 226 | 12.298 -0.50999999 -0.78600001 0 227 | 12.296 -0.47099999 -0.78600001 0 228 | 12.313 -0.433 -0.787 0 229 | 12.319 -0.39399999 -0.78799999 0 230 | 12.337 -0.37599999 -0.78899997 0 231 | 12.348 -0.33700001 -0.79000002 0 232 | 12.365 -0.29899999 -0.79100001 0 233 | 12.358 -0.25999999 -0.79000002 0 234 | 12.405 -0.222 -0.79400003 0 235 | 12.425 -0.183 -0.796 0 236 | 12.444 -0.14399999 -0.79699999 0 237 | 12.554 -0.126 -0.80599999 0 238 | 13.034 -0.090999998 -0.84500003 0 239 | 12.156 -1.658 -0.85399997 0 240 | 12.112 -1.614 -0.84899998 0 241 | 12.085 -1.591 -0.847 0 242 | 12.09 -1.553 -0.847 0 243 | 12.061 -1.511 -0.84399998 0 244 | 12.061 -1.472 -0.84399998 0 245 | 12.058 -1.434 -0.84299999 0 246 | 12.031 -1.392 -0.83999997 0 247 | 12.025 -1.353 -0.83899999 0 248 | 12.012 -1.3329999 -0.838 0 249 | 12.01 -1.294 -0.83700001 0 250 | 12.002 -1.255 -0.83600003 0 251 | 12.014 -1.2180001 -0.83700001 0 252 | 11.998 -1.179 -0.83499998 0 253 | 12.009 -1.142 -0.83600003 0 254 | 12.009 -1.104 -0.83600003 0 255 | 12.001 -1.084 -0.83499998 0 256 | 11.992 -1.045 -0.83399999 0 257 | 11.991 -1.007 -0.833 0 258 | 12.006 -0.97100002 -0.83399999 0 259 | 11.987 -0.93099999 -0.833 0 260 | 12.008 -0.89499998 -0.83399999 0 261 | 12.009 -0.85699999 -0.83399999 0 262 | 12.004 -0.838 -0.833 0 263 | 12.019 -0.801 -0.83399999 0 264 | 12.019 -0.76300001 -0.83399999 0 265 | 12.022 -0.72500002 -0.83399999 0 266 | 12.044 -0.68800002 -0.83600003 0 267 | 12.036 -0.64999998 -0.83499998 0 268 | 12.03 -0.61199999 -0.83399999 0 269 | 12.055 -0.59399998 -0.83700001 0 270 | 12.053 -0.55599999 -0.83600003 0 271 | 12.072 -0.51899999 -0.838 0 272 | 12.082 -0.48100001 -0.838 0 273 | 12.091 -0.44400001 -0.83899999 0 274 | 12.106 -0.40599999 -0.83999997 0 275 | 12.118 -0.368 -0.84100002 0 276 | 12.138 -0.34999999 -0.84299999 0 277 | 12.151 -0.31200001 -0.84399998 0 278 | 12.172 -0.27399999 -0.84600002 0 279 | 12.189 -0.236 -0.847 0 280 | 12.203 -0.198 -0.84799999 0 281 | 12.22 -0.16 -0.85000002 0 282 | 12.268 -0.122 -0.85399997 0 283 | 12.457 -0.104 -0.87 0 284 | 12.944 -0.066 -0.912 0 285 | 11.923 -1.678 -0.98199999 0.17 286 | 11.913 -1.638 -0.98100001 0.19 287 | 11.897 -1.579 -0.97799999 0.69999999 288 | 11.93 -1.507 -0.98100001 0.60000002 289 | 11.824 -1.456 -0.97000003 0.34 290 | 11.811 -1.4170001 -0.96799999 0 291 | 11.77 -1.374 -0.96399999 0 292 | 11.733 -1.3329999 -0.95999998 0 293 | 11.713 -1.312 -0.95700002 0 294 | 11.709 -1.274 -0.95700002 0 295 | 11.725 -1.239 -0.958 0 296 | 11.697 -1.199 -0.95499998 0 297 | 11.711 -1.163 -0.95599997 0 298 | 11.708 -1.126 -0.95499998 0 299 | 11.704 -1.0880001 -0.954 0 300 | 11.709 -1.0700001 -0.95499998 0 301 | 11.703 -1.0319999 -0.954 0 302 | 11.714 -0.99599999 -0.954 0 303 | 11.705 -0.95899999 -0.95300001 0 304 | 11.736 -0.92400002 -0.95599997 0 305 | 11.737 -0.88700002 -0.95599997 0 306 | 11.714 -0.84799999 -0.95300001 0 307 | 11.703 -0.829 -0.95200002 0 308 | 11.718 -0.79299998 -0.95300001 0 309 | 11.714 -0.75599998 -0.95300001 0 310 | 11.722 -0.71899998 -0.95300001 0 311 | 11.734 -0.68300003 -0.954 0 312 | 11.739 -0.64600003 -0.954 0 313 | 11.738 -0.62800002 -0.954 0 314 | 11.749 -0.59100002 -0.95499998 0 315 | 11.751 -0.55400002 -0.95499998 0 316 | 11.765 -0.51800001 -0.95599997 0 317 | 11.764 -0.48100001 -0.95599997 0 318 | 11.764 -0.44400001 -0.95599997 0 319 | 11.807 -0.40799999 -0.95999998 0 320 | 11.82 -0.38999999 -0.96100003 0 321 | 11.849 -0.354 -0.96399999 0 322 | 11.838 -0.31600001 -0.963 0 323 | 11.857 -0.28 -0.96499997 0.07 324 | 12.278 -0.211 -1.006 0 325 | 11.934 -0.169 -0.972 0 326 | 11.973 -0.131 -0.97600001 0 327 | 12.08 -0.113 -0.986 0 328 | 12.286 -0.075999998 -1.006 0 329 | 11.668 -1.29 -1.025 0 330 | 11.648 -1.251 -1.023 0 331 | 11.682 -1.2359999 -1.026 0 332 | 11.733 -1.13 -1.03 0 333 | 11.709 -1.09 -1.027 0 334 | 11.71 -1.053 -1.027 0.12 335 | 11.68 -1.0319999 -1.024 0.090000004 336 | 11.679 -0.995 -1.023 0 337 | 11.668 -0.95700002 -1.022 0 338 | 11.661 -0.92000002 -1.021 0.11 339 | 11.67 -0.884 -1.021 0 340 | 11.655 -0.84600002 -1.0190001 0.079999998 341 | 11.673 -0.81 -1.021 0.11 342 | 11.708 -0.79400003 -1.025 0.38999999 343 | 11.691 -0.75599998 -1.023 0.11 344 | 11.693 -0.71899998 -1.023 0.079999998 345 | 11.733 -0.685 -1.027 0 346 | 11.712 -0.64700001 -1.024 0 347 | 11.829 -0.616 -1.036 0 348 | 11.727 -0.574 -1.025 0 349 | 11.695 -0.53500003 -1.022 0 350 | 11.708 -0.51700002 -1.023 0.15000001 351 | 11.72 -0.48100001 -1.024 0.27000001 352 | 11.943 -0.41499999 -1.0470001 0.2 353 | 11.94 -0.37799999 -1.0470001 0.69 354 | 12.013 -0.34200001 -1.054 0.66000003 355 | 11.865 -0.301 -1.039 0.17 356 | 11.917 -0.28299999 -1.044 0.30000001 357 | 11.991 -0.248 -1.052 0.76999998 358 | 11.95 -0.20900001 -1.0470001 0.99000001 359 | 11.911 -0.171 -1.043 0.74000001 360 | 11.948 -0.096000001 -1.0470001 0 361 | -------------------------------------------------------------------------------- /Feature_Engineering/dataset_example/car2.pcd: -------------------------------------------------------------------------------- 1 | # .PCD v0.7 - Point Cloud Data file format 2 | VERSION 0.7 3 | FIELDS x y z intensity 4 | SIZE 4 4 4 4 5 | TYPE F F F F 6 | COUNT 1 1 1 1 7 | WIDTH 349 8 | HEIGHT 1 9 | VIEWPOINT 0 0 0 1 0 0 0 10 | POINTS 349 11 | DATA ascii 12 | 14.27 -1.332 -0.169 0.25999999 13 | 14.24 -1.307 -0.168 0.25 14 | 14.216 -1.26 -0.167 0.25 15 | 14.214 -1.215 -0.167 0.23999999 16 | 14.198 -1.169 -0.167 0.23999999 17 | 14.185 -1.123 -0.16599999 0.1 18 | 14.185 -1.0779999 -0.16599999 0.16 19 | 14.18 -1.033 -0.16599999 0.1 20 | 14.196 -1.011 -0.16599999 0.16 21 | 14.201 -0.96700001 -0.16599999 0.16 22 | 14.214 -0.92299998 -0.167 0.16 23 | 14.231 -0.87900001 -0.167 0.16 24 | 14.265 -0.83600003 -0.168 0.16 25 | 14.3 -0.79299998 -0.169 0.16 26 | 14.331 -0.77200001 -0.17 0.17 27 | 14.351 -0.72799999 -0.17 0.16 28 | 13.486 -1.507 -0.23 0 29 | 13.494 -1.465 -0.23 0 30 | 13.473 -1.442 -0.229 0 31 | 13.461 -1.398 -0.229 0 32 | 13.454 -1.354 -0.228 0 33 | 13.432 -1.309 -0.228 0 34 | 13.43 -1.267 -0.227 0 35 | 13.446 -1.225 -0.228 0 36 | 13.44 -1.204 -0.228 0 37 | 13.43 -1.16 -0.227 0 38 | 13.427 -1.117 -0.227 0 39 | 13.435 -1.076 -0.227 0 40 | 13.438 -1.033 -0.227 0 41 | 13.419 -0.98900002 -0.226 0 42 | 13.434 -0.94800001 -0.227 0 43 | 13.436 -0.92699999 -0.227 0 44 | 13.443 -0.88499999 -0.227 0 45 | 13.445 -0.84299999 -0.227 0 46 | 13.458 -0.801 -0.227 0 47 | 13.456 -0.759 -0.227 0 48 | 13.485 -0.71799999 -0.228 0 49 | 13.473 -0.67500001 -0.227 0 50 | 13.495 -0.63300002 -0.228 0 51 | 13.484 -0.61199999 -0.228 0 52 | 13.512 -0.56999999 -0.228 0 53 | 13.531 -0.52899998 -0.229 0 54 | 13.551 -0.48699999 -0.23 0 55 | 13.58 -0.44499999 -0.23100001 0 56 | 13.59 -0.403 -0.23100001 0 57 | 13.625 -0.361 -0.23199999 0 58 | 13.585 -0.33899999 -0.23100001 0.07 59 | 13.808 -0.301 -0.23800001 0 60 | 13.285 -1.575 -0.31200001 0 61 | 13.345 -1.539 -0.31400001 0.07 62 | 13.36 -1.499 -0.31400001 0.22 63 | 13.345 -1.454 -0.31400001 0.25 64 | 13.337 -1.432 -0.31299999 0.25999999 65 | 13.322 -1.388 -0.31200001 0.11 66 | 13.322 -1.346 -0.31200001 0.16 67 | 13.324 -1.304 -0.31200001 0.12 68 | 13.326 -1.262 -0.31200001 0.16 69 | 13.322 -1.22 -0.31200001 0.27000001 70 | 13.304 -1.176 -0.31099999 0.11 71 | 13.311 -1.1339999 -0.31099999 0 72 | 13.273 -1.11 -0.31 0 73 | 13.277 -1.068 -0.31 0 74 | 13.29 -1.027 -0.31 0 75 | 13.279 -0.98500001 -0.30899999 0 76 | 13.292 -0.94400001 -0.31 0 77 | 13.277 -0.90100002 -0.30899999 0 78 | 13.294 -0.86000001 -0.31 0 79 | 13.287 -0.83899999 -0.30899999 0 80 | 13.282 -0.796 -0.30899999 0 81 | 13.298 -0.755 -0.31 0 82 | 13.315 -0.71399999 -0.31 0.14 83 | 13.327 -0.67299998 -0.31 0.11 84 | 13.333 -0.63099998 -0.31099999 0 85 | 13.337 -0.58899999 -0.31099999 0 86 | 13.348 -0.56900001 -0.31099999 0 87 | 13.357 -0.52700001 -0.31099999 0 88 | 13.337 -0.48500001 -0.31099999 0 89 | 13.372 -0.44400001 -0.31200001 0 90 | 13.366 -0.40099999 -0.31200001 0.090000004 91 | 13.385 -0.36000001 -0.31200001 0.18000001 92 | 13.406 -0.31799999 -0.31299999 0.25999999 93 | 13.287 -0.29499999 -0.308 0 94 | 13.529 -0.257 -0.31799999 0 95 | 13.14 -1.601 -0.38499999 0 96 | 13.158 -1.5829999 -0.38600001 0 97 | 13.201 -1.546 -0.38800001 0 98 | 13.825 -1.576 -0.41499999 0 99 | 13.317 -1.053 -0.391 0 100 | 13.301 -1.0089999 -0.38999999 0 101 | 13.304 -0.96700001 -0.38999999 0 102 | 13.301 -0.92500001 -0.389 0 103 | 13.318 -0.884 -0.38999999 0 104 | 13.354 -0.84500003 -0.39199999 0 105 | 13.377 -0.80400002 -0.39199999 0 106 | 13.268 -0.296 -0.38699999 0 107 | 13.229 -0.25400001 -0.38499999 0 108 | 13.652 -0.241 -0.40400001 0 109 | 12.977 -1.595 -0.45300001 0 110 | 13.018 -1.559 -0.45500001 0 111 | 13.298 -1.571 -0.46900001 0 112 | 13.868 -1.5930001 -0.498 0 113 | 13.906 -1.3099999 -0.498 0 114 | 14.018 -0.69999999 -0.50099999 0 115 | 14.034 -0.61299998 -0.50199997 0 116 | 14.04 -0.56900001 -0.50199997 0 117 | 13.231 -0.30899999 -0.461 0 118 | 13.1 -0.26499999 -0.45500001 0 119 | 13.051 -0.223 -0.45199999 0 120 | 13.534 -0.20900001 -0.47600001 0 121 | 12.898 -1.6339999 -0.53200001 0 122 | 12.909 -1.594 -0.53299999 0 123 | 12.939 -1.557 -0.53399998 0 124 | 13.862 -1.626 -0.58600003 0 125 | 13.903 -1.586 -0.588 0 126 | 12.965 -0.31 -0.52999997 0 127 | 13.028 -0.271 -0.53399998 0 128 | 12.965 -0.229 -0.52999997 0 129 | 12.981 -0.18799999 -0.53100002 0 130 | 13.755 -0.15800001 -0.57499999 0 131 | 12.81 -1.636 -0.59600002 0 132 | 12.82 -1.617 -0.597 0 133 | 12.877 -1.5829999 -0.60000002 0 134 | 12.947 -0.26199999 -0.59799999 0 135 | 12.91 -0.221 -0.59600002 0 136 | 12.889 -0.18000001 -0.59500003 0 137 | 13.098 0.016000001 -0.699 0 138 | 13.112 0.035999998 -0.69999999 0 139 | 13.149 0.078000002 -0.70300001 0 140 | 13.063 -1.906 -0.70599997 0.050000001 141 | 13.034 -1.86 -0.704 0.17 142 | 13.025 -1.8380001 -0.70300001 0.27000001 143 | 12.995 -1.7920001 -0.69999999 0.16 144 | 12.85 -1.73 -0.69 0 145 | 12.693 -1.668 -0.67900002 0 146 | 12.677 -1.626 -0.67699999 0 147 | 12.696 -1.5880001 -0.67799997 0 148 | 12.727 -0.28400001 -0.67400002 0 149 | 12.736 -0.244 -0.67400002 0 150 | 12.783 -0.22499999 -0.67699999 0 151 | 12.751 -0.184 -0.67500001 0 152 | 12.784 -0.145 -0.67699999 0 153 | 13.048 -0.066 -0.69599998 0 154 | 13.066 -0.025 -0.69700003 0 155 | 13.167 0.0020000001 -0.76899999 0 156 | 12.578 -1.669 -0.73299998 0 157 | 12.556 -1.626 -0.73100001 0 158 | 12.549 -1.585 -0.73000002 0 159 | 12.53 -1.543 -0.72899997 0 160 | 12.521 -1.521 -0.72799999 0 161 | 12.514 -1.4809999 -0.727 0 162 | 12.502 -1.4400001 -0.72600001 0 163 | 12.493 -1.399 -0.72500002 0 164 | 12.503 -1.36 -0.72500002 0 165 | 12.489 -1.319 -0.72399998 0 166 | 12.498 -1.28 -0.72399998 0 167 | 12.484 -1.239 -0.72299999 0 168 | 12.468 -1.2180001 -0.722 0 169 | 12.519 -1.183 -0.72500002 0.13 170 | 12.527 -1.1440001 -0.72500002 0.25999999 171 | 12.538 -1.105 -0.72600001 0.27000001 172 | 12.554 -1.067 -0.727 0.25999999 173 | 12.551 -1.027 -0.72600001 0.31999999 174 | 12.538 -0.98699999 -0.72500002 0.23 175 | 12.562 -0.96799999 -0.727 0.36000001 176 | 12.569 -0.92900002 -0.727 0.22 177 | 12.587 -0.89099997 -0.72799999 0.34 178 | 12.572 -0.85000002 -0.727 0.18000001 179 | 12.607 -0.81300002 -0.72899997 0.38 180 | 12.577 -0.77100003 -0.727 0.19 181 | 12.601 -0.73299998 -0.72899997 0.37 182 | 12.597 -0.713 -0.72799999 0.23 183 | 12.603 -0.67299998 -0.72799999 0.18000001 184 | 12.607 -0.634 -0.72899997 0.16 185 | 12.619 -0.59500003 -0.72899997 0 186 | 12.634 -0.47600001 -0.73000002 0 187 | 12.66 -0.45699999 -0.73199999 0 188 | 12.708 -0.259 -0.73500001 0 189 | 12.713 -0.219 -0.73500001 0 190 | 12.729 -0.199 -0.73699999 0 191 | 12.724 -0.15899999 -0.736 0 192 | 13.015 -0.121 -0.75800002 0 193 | 13.111 -0.081 -0.76499999 0 194 | 13.133 -0.039999999 -0.76599997 0 195 | 12.408 -1.674 -0.80299997 0 196 | 12.303 -1.601 -0.79400003 0 197 | 12.29 -1.5599999 -0.79299998 0 198 | 12.263 -1.518 -0.79000002 0 199 | 12.26 -1.478 -0.79000002 0 200 | 12.255 -1.438 -0.78899997 0 201 | 12.247 -1.398 -0.78799999 0 202 | 12.236 -1.378 -0.787 0 203 | 12.248 -1.34 -0.787 0 204 | 12.224 -1.299 -0.78500003 0 205 | 12.224 -1.26 -0.78500003 0 206 | 12.218 -1.22 -0.78399998 0 207 | 12.22 -1.182 -0.78399998 0 208 | 12.214 -1.142 -0.78299999 0 209 | 12.212 -1.123 -0.78299999 0 210 | 12.231 -1.086 -0.78399998 0 211 | 12.207 -1.045 -0.78200001 0 212 | 12.214 -1.007 -0.78200001 0 213 | 12.213 -0.96799999 -0.78200001 0 214 | 12.226 -0.93099999 -0.78200001 0 215 | 12.223 -0.89200002 -0.78200001 0 216 | 12.21 -0.87199998 -0.78100002 0 217 | 12.217 -0.83399999 -0.78100002 0 218 | 12.229 -0.796 -0.78200001 0 219 | 12.236 -0.75800002 -0.78200001 0 220 | 12.238 -0.71899998 -0.78200001 0 221 | 12.24 -0.68099999 -0.78200001 0 222 | 12.256 -0.64300001 -0.78299999 0 223 | 12.263 -0.62400001 -0.78399998 0 224 | 12.269 -0.58600003 -0.78399998 0 225 | 12.279 -0.54799998 -0.78500003 0 226 | 12.298 -0.50999999 -0.78600001 0 227 | 12.296 -0.47099999 -0.78600001 0 228 | 12.313 -0.433 -0.787 0 229 | 12.319 -0.39399999 -0.78799999 0 230 | 12.337 -0.37599999 -0.78899997 0 231 | 12.348 -0.33700001 -0.79000002 0 232 | 12.365 -0.29899999 -0.79100001 0 233 | 12.358 -0.25999999 -0.79000002 0 234 | 12.405 -0.222 -0.79400003 0 235 | 12.425 -0.183 -0.796 0 236 | 12.444 -0.14399999 -0.79699999 0 237 | 12.554 -0.126 -0.80599999 0 238 | 13.034 -0.090999998 -0.84500003 0 239 | 12.156 -1.658 -0.85399997 0 240 | 12.112 -1.614 -0.84899998 0 241 | 12.085 -1.591 -0.847 0 242 | 12.09 -1.553 -0.847 0 243 | 12.061 -1.511 -0.84399998 0 244 | 12.061 -1.472 -0.84399998 0 245 | 12.058 -1.434 -0.84299999 0 246 | 12.031 -1.392 -0.83999997 0 247 | 12.025 -1.353 -0.83899999 0 248 | 12.012 -1.3329999 -0.838 0 249 | 12.01 -1.294 -0.83700001 0 250 | 12.002 -1.255 -0.83600003 0 251 | 12.014 -1.2180001 -0.83700001 0 252 | 11.998 -1.179 -0.83499998 0 253 | 12.009 -1.142 -0.83600003 0 254 | 12.009 -1.104 -0.83600003 0 255 | 12.001 -1.084 -0.83499998 0 256 | 11.992 -1.045 -0.83399999 0 257 | 11.991 -1.007 -0.833 0 258 | 12.006 -0.97100002 -0.83399999 0 259 | 11.987 -0.93099999 -0.833 0 260 | 12.008 -0.89499998 -0.83399999 0 261 | 12.009 -0.85699999 -0.83399999 0 262 | 12.004 -0.838 -0.833 0 263 | 12.019 -0.801 -0.83399999 0 264 | 12.019 -0.76300001 -0.83399999 0 265 | 12.022 -0.72500002 -0.83399999 0 266 | 12.044 -0.68800002 -0.83600003 0 267 | 12.036 -0.64999998 -0.83499998 0 268 | 12.03 -0.61199999 -0.83399999 0 269 | 12.055 -0.59399998 -0.83700001 0 270 | 12.053 -0.55599999 -0.83600003 0 271 | 12.072 -0.51899999 -0.838 0 272 | 12.082 -0.48100001 -0.838 0 273 | 12.091 -0.44400001 -0.83899999 0 274 | 12.106 -0.40599999 -0.83999997 0 275 | 12.118 -0.368 -0.84100002 0 276 | 12.138 -0.34999999 -0.84299999 0 277 | 12.151 -0.31200001 -0.84399998 0 278 | 12.172 -0.27399999 -0.84600002 0 279 | 12.189 -0.236 -0.847 0 280 | 12.203 -0.198 -0.84799999 0 281 | 12.22 -0.16 -0.85000002 0 282 | 12.268 -0.122 -0.85399997 0 283 | 12.457 -0.104 -0.87 0 284 | 12.944 -0.066 -0.912 0 285 | 11.923 -1.678 -0.98199999 0.17 286 | 11.913 -1.638 -0.98100001 0.19 287 | 11.897 -1.579 -0.97799999 0.69999999 288 | 11.93 -1.507 -0.98100001 0.60000002 289 | 11.824 -1.456 -0.97000003 0.34 290 | 11.811 -1.4170001 -0.96799999 0 291 | 11.77 -1.374 -0.96399999 0 292 | 11.733 -1.3329999 -0.95999998 0 293 | 11.713 -1.312 -0.95700002 0 294 | 11.709 -1.274 -0.95700002 0 295 | 11.725 -1.239 -0.958 0 296 | 11.697 -1.199 -0.95499998 0 297 | 11.711 -1.163 -0.95599997 0 298 | 11.708 -1.126 -0.95499998 0 299 | 11.704 -1.0880001 -0.954 0 300 | 11.709 -1.0700001 -0.95499998 0 301 | 11.703 -1.0319999 -0.954 0 302 | 11.714 -0.99599999 -0.954 0 303 | 11.705 -0.95899999 -0.95300001 0 304 | 11.736 -0.92400002 -0.95599997 0 305 | 11.737 -0.88700002 -0.95599997 0 306 | 11.714 -0.84799999 -0.95300001 0 307 | 11.703 -0.829 -0.95200002 0 308 | 11.718 -0.79299998 -0.95300001 0 309 | 11.714 -0.75599998 -0.95300001 0 310 | 11.722 -0.71899998 -0.95300001 0 311 | 11.734 -0.68300003 -0.954 0 312 | 11.739 -0.64600003 -0.954 0 313 | 11.738 -0.62800002 -0.954 0 314 | 11.749 -0.59100002 -0.95499998 0 315 | 11.751 -0.55400002 -0.95499998 0 316 | 11.765 -0.51800001 -0.95599997 0 317 | 11.764 -0.48100001 -0.95599997 0 318 | 11.764 -0.44400001 -0.95599997 0 319 | 11.807 -0.40799999 -0.95999998 0 320 | 11.82 -0.38999999 -0.96100003 0 321 | 11.849 -0.354 -0.96399999 0 322 | 11.838 -0.31600001 -0.963 0 323 | 11.857 -0.28 -0.96499997 0.07 324 | 12.278 -0.211 -1.006 0 325 | 11.934 -0.169 -0.972 0 326 | 11.973 -0.131 -0.97600001 0 327 | 12.08 -0.113 -0.986 0 328 | 12.286 -0.075999998 -1.006 0 329 | 11.668 -1.29 -1.025 0 330 | 11.648 -1.251 -1.023 0 331 | 11.682 -1.2359999 -1.026 0 332 | 11.733 -1.13 -1.03 0 333 | 11.709 -1.09 -1.027 0 334 | 11.71 -1.053 -1.027 0.12 335 | 11.68 -1.0319999 -1.024 0.090000004 336 | 11.679 -0.995 -1.023 0 337 | 11.668 -0.95700002 -1.022 0 338 | 11.661 -0.92000002 -1.021 0.11 339 | 11.67 -0.884 -1.021 0 340 | 11.655 -0.84600002 -1.0190001 0.079999998 341 | 11.673 -0.81 -1.021 0.11 342 | 11.708 -0.79400003 -1.025 0.38999999 343 | 11.691 -0.75599998 -1.023 0.11 344 | 11.693 -0.71899998 -1.023 0.079999998 345 | 11.733 -0.685 -1.027 0 346 | 11.712 -0.64700001 -1.024 0 347 | 11.829 -0.616 -1.036 0 348 | 11.727 -0.574 -1.025 0 349 | 11.695 -0.53500003 -1.022 0 350 | 11.708 -0.51700002 -1.023 0.15000001 351 | 11.72 -0.48100001 -1.024 0.27000001 352 | 11.943 -0.41499999 -1.0470001 0.2 353 | 11.94 -0.37799999 -1.0470001 0.69 354 | 12.013 -0.34200001 -1.054 0.66000003 355 | 11.865 -0.301 -1.039 0.17 356 | 11.917 -0.28299999 -1.044 0.30000001 357 | 11.991 -0.248 -1.052 0.76999998 358 | 11.95 -0.20900001 -1.0470001 0.99000001 359 | 11.911 -0.171 -1.043 0.74000001 360 | 11.948 -0.096000001 -1.0470001 0 361 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | add_compile_options(-std=c++11) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | #set(CMAKE_VERBOSE_MAKEFILE on) 8 | 9 | #message(${PROJECT_SOURCE_DIR}) 10 | 11 | set(LIB_SRC accessFile.cpp extractFeature.cpp sampleCloud.cpp searchKdtree.cpp getFeatureVector.cpp get_file_name.c) 12 | 13 | include_directories(${PCL_INCLUDE_DIRS}) 14 | link_directories(${PCL_LIBRARY_DIRS}) 15 | add_definitions(${PCL_DEFINITIONS}) 16 | 17 | #add_library(libfeature SHARED ${LIB_SRC}) #dll 18 | add_library(libfeature ${LIB_SRC}) #lib 19 | 20 | set(LIBRARY_OUTPUT_PATH ${PROJECT_BINARY_DIR}/lib) 21 | 22 | set_target_properties(libfeature PROPERTIES OUTPUT_NAME "feature") 23 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/accessFile.cpp: -------------------------------------------------------------------------------- 1 | #include "accessFile.h" 2 | 3 | /* 4 | **************************************** 5 | ********** Read pcd file *************** 6 | **************************************** 7 | */ 8 | void 9 | readPCD( 10 | const std::string & filename, 11 | pcl::PointCloud::Ptr & cloud 12 | ) 13 | { 14 | pcl::PCDReader reader; 15 | reader.read (filename, *cloud); 16 | std::cout << "readPCD(): " 17 | << cloud->points.size() 18 | << " points in " 19 | << filename 20 | << std::endl; 21 | } 22 | 23 | 24 | /* 25 | **************************************** 26 | ********** Write pcd file ************** 27 | **************************************** 28 | */ 29 | 30 | // pcl::PointCloud 31 | void 32 | writePCD( 33 | const std::string & filename, 34 | const pcl::PointCloud::Ptr & cloud 35 | ) 36 | { 37 | if (cloud->points.empty()) 38 | { 39 | std::cout << "writePCD(): There is no points in this cloud!" << std::endl; 40 | return; 41 | } 42 | pcl::PCDWriter writer; 43 | writer.write (filename, *cloud, false); 44 | } 45 | 46 | // pcl::PointCloud 47 | void 48 | writePCD( 49 | const std::string & filename, 50 | const pcl::PointCloud::Ptr & normals 51 | ) 52 | { 53 | if (normals->points.empty()) 54 | { 55 | std::cout << "writePCD(): There is no points in this cloud!" << std::endl; 56 | return; 57 | } 58 | pcl::PCDWriter writer; 59 | writer.write (filename, *normals, false); 60 | } 61 | 62 | 63 | /* 64 | ******************************************************* 65 | ********** Write txt file with "Append Mode" ********** 66 | ******************************************************* 67 | */ 68 | void 69 | writeTXT( 70 | const std::vector & globalFeatureVector, 71 | const unsigned int & label, 72 | const std::string & txt_path 73 | ) 74 | { 75 | std::ofstream writeTXT; 76 | writeTXT.open(txt_path.c_str(), std::ios::app); 77 | for (auto & e : globalFeatureVector) 78 | { 79 | writeTXT << e << ' '; 80 | } 81 | writeTXT << label << "\n"; 82 | } 83 | 84 | 85 | /* 86 | **************************************** 87 | ********** Get file name *************** 88 | **************************************** 89 | */ 90 | void 91 | getFileName( 92 | const std::string & path, 93 | std::vector & file_name_vec 94 | ) 95 | { 96 | // use Cython so that C++ can call Python function 97 | 98 | // initialize Python module 99 | Py_Initialize(); 100 | initget_file_name(); 101 | 102 | // get the parameter and returned value in python format 103 | PyObject * path_ = Py_BuildValue("s", path.c_str()); 104 | PyObject * file_name_list = get_file_name(path_); 105 | assert(PyList_Check(file_name_list)); 106 | 107 | // build the file name vector 108 | Py_ssize_t size = PyList_Size(file_name_list); 109 | for (unsigned int file_index = 0; file_index != size; ++file_index) 110 | { 111 | PyObject * file_name_ = PyList_GetItem(file_name_list, file_index); 112 | std::string file_name = PyString_AsString(file_name_); 113 | file_name_vec.push_back(file_name); 114 | } 115 | 116 | // finalize the python module 117 | Py_Finalize(); 118 | } 119 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/accessFile.h: -------------------------------------------------------------------------------- 1 | #ifndef ACCESSFILE_H 2 | #define ACCESSFILE_H 3 | 4 | #include "commonHeadFiles.h" 5 | 6 | #include 7 | #include 8 | #include 9 | #include "get_file_name.h" 10 | 11 | 12 | 13 | /* 14 | **************************************** 15 | ********** Read pcd file *************** 16 | **************************************** 17 | */ 18 | void 19 | readPCD( 20 | const std::string & filename, 21 | pcl::PointCloud::Ptr & cloud 22 | ); 23 | 24 | 25 | /* 26 | **************************************** 27 | ********** Write pcd file ************** 28 | **************************************** 29 | */ 30 | 31 | // pcl::PointCloud 32 | void 33 | writePCD( 34 | const std::string & filename, 35 | const pcl::PointCloud::Ptr & cloud 36 | ); 37 | 38 | // pcl::PointCloud 39 | void 40 | writePCD( 41 | const std::string & filename, 42 | const pcl::PointCloud::Ptr & normals 43 | ); 44 | 45 | 46 | /* 47 | ******************************************************* 48 | ********** Write txt file with "Append Mode" ********** 49 | ******************************************************* 50 | */ 51 | void 52 | writeTXT( 53 | const std::vector & globalFeatureVector, 54 | const unsigned int & label, 55 | const std::string & txt_path 56 | ); 57 | 58 | 59 | 60 | /* 61 | **************************************** 62 | ********** Get file name *************** 63 | **************************************** 64 | */ 65 | void 66 | getFileName( 67 | const std::string & path, 68 | std::vector & file_name_vec 69 | ); 70 | 71 | #endif 72 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/commonHeadFiles.h: -------------------------------------------------------------------------------- 1 | #ifndef COMMONHEADFILES_H 2 | #define COMMONHEADFILES_H 3 | 4 | #include 5 | #include 6 | // #include // interface for ROS 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | #endif 13 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/extractFeature.h: -------------------------------------------------------------------------------- 1 | #ifndef FEATUREEXTRACTION_H 2 | #define FEATUREEXTRACTION_H 3 | 4 | #include "commonHeadFiles.h" 5 | 6 | #include "searchKdtree.h" 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | /* 16 | **************************************** 17 | ********** Normals estimation ********** 18 | **************************************** 19 | */ 20 | 21 | void 22 | estimateNormals( 23 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 24 | const pcl::PointCloud::Ptr & searchSurface, // search surface 25 | const float & searchRadius, // radius for searching [m] 26 | pcl::PointCloud::Ptr & normals, // output normals for every point 27 | bool debug = false // debug mode, default false 28 | // bool removeNaN = true, 29 | // std::vector & mapping, 30 | // bool visualize = false 31 | ); 32 | 33 | 34 | 35 | /* 36 | **************************************** 37 | ********** Remove NaN Normals ********** 38 | **************************************** 39 | */ 40 | 41 | void 42 | removeNanNormals( 43 | const pcl::PointCloud::Ptr & inputNormals, // input normals inklusive NaN 44 | pcl::PointCloud::Ptr & outputNormals, // output normals without NaN 45 | std::vector & mapping // output index in inputNormals with 46 | // inputNormals[index[i]] != NaN 47 | ); 48 | 49 | 50 | 51 | /* 52 | **************************************************** 53 | ********** Remove Points with NaN Normals ********** 54 | **************************************************** 55 | */ 56 | 57 | void 58 | removePointWithNanNormal( 59 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 60 | pcl::PointCloud::Ptr & outputCloud, // output cloud, which has no NaN normal 61 | const std::vector & mapping, // index in inputCloud, inputCloud[index[i]] has no NaN normal 62 | bool debug = false // debug mode, default false 63 | ); 64 | 65 | 66 | 67 | /* 68 | *************************************** 69 | ********** Visualize normals ********** 70 | *************************************** 71 | */ 72 | 73 | void 74 | visualizeNormals( 75 | const pcl::PointCloud::Ptr & cloud, // cloud with XYZI 76 | const pcl::PointCloud::Ptr & normals // normals 77 | ); 78 | 79 | 80 | 81 | 82 | /* 83 | *************************** 84 | ****** Estimate FPFH ****** 85 | *************************** 86 | */ 87 | 88 | void 89 | estimateFPFH( 90 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 91 | const pcl::PointCloud::Ptr & inputNormals, // input normals 92 | const float & searchRadius, // param search radius 93 | std::vector & FPFHHisto, // output FPFH histogram 94 | bool debug = false // debug mode, defualt false 95 | ); 96 | 97 | 98 | 99 | 100 | /* 101 | ***************************************************** 102 | ******* Transform point cloud to Eigen Matrix ******* 103 | ***************************************************** 104 | */ 105 | 106 | void 107 | cloud2Matrix( 108 | const pcl::PointCloud::Ptr & cloud, // input cloud 109 | Eigen::MatrixXf & PointMat, // output point matrix 110 | const std::string & mode // transform mode: "xy" or "xyz" 111 | ); 112 | 113 | 114 | /* 115 | ************************************************* 116 | ********** Calculate covariance matrix ********** 117 | ************************************************* 118 | */ 119 | 120 | void 121 | covMatrix( 122 | const Eigen::MatrixXf & M, 123 | Eigen::MatrixXf & CovMat 124 | ); 125 | 126 | 127 | 128 | /* 129 | ***************************************** 130 | ****** Calculate 3D Size using PCA ****** 131 | ***************************************** 132 | */ 133 | 134 | void 135 | geometrySize( 136 | const pcl::PointCloud::Ptr & cloud, // input cloud 137 | float & length, // output length 138 | float & width, // output width 139 | float & height // output height 140 | ); 141 | 142 | 143 | 144 | /* 145 | **************************************************************************** 146 | ****** Calculate eigen values of covaraince matrix in geometric domain****** 147 | **************************************************************************** 148 | */ 149 | 150 | void 151 | geoCovEigen( 152 | const pcl::PointCloud::Ptr & cloud, // input cloud 153 | std::vector & eigvals, // output eigen values 154 | bool sort = true // sort mode, if true (default), sort eigvals from large to small 155 | // 1 >= eigvals[0] >= eigvals[1] >= eigvals[2] >= 0 156 | ); 157 | 158 | 159 | 160 | /* 161 | **************************************** 162 | ****** Calculate Lalonde features ****** 163 | **************************************** 164 | */ 165 | 166 | void 167 | lalondeFeat( 168 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 169 | const pcl::PointCloud::Ptr & searchSurface, // input search surface 170 | const float & searchRadius, // param search radius 171 | std::vector & lalondeHisto // output lalonde histogram 172 | ); 173 | 174 | 175 | 176 | /* 177 | ************************************************ 178 | ****** Calculate Features about intensity ****** 179 | ************************************************ 180 | */ 181 | 182 | void 183 | intensity( 184 | const pcl::PointCloud::Ptr & cloud, // input cloud 185 | float & Imax, // output max intensity 186 | float & Imean, // output mean intensity 187 | float & Ivar // output intensity variance 188 | ); 189 | 190 | 191 | 192 | /* 193 | **************************************** 194 | ****** Estimate moment invariants ****** 195 | **************************************** 196 | */ 197 | 198 | void 199 | momentInvariants( 200 | const pcl::PointCloud::Ptr & cloud, // input cloud 201 | std::vector & jvec // output moment invariants vector jvec[0] jvec[1] jvec[2] 202 | ); 203 | 204 | 205 | 206 | /* 207 | *********************************************** 208 | ****** Calculate mean intensity gradient ****** 209 | *********************************************** 210 | */ 211 | 212 | void 213 | meanIntensityGradient( 214 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 215 | const pcl::PointCloud::Ptr & inputNormals, // input normals 216 | const float & searchRadius, // param search radius 217 | float & meanIG, // output meanIG 218 | bool debug = false // debug mode, default false 219 | ); 220 | 221 | #endif 222 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/getFeatureVector.cpp: -------------------------------------------------------------------------------- 1 | #include "getFeatureVector.h" 2 | 3 | /* 4 | *********************************************** 5 | ********** build feature vector *************** 6 | *********************************************** 7 | */ 8 | 9 | void 10 | getFeatureVector ( 11 | const float & searchRadius, 12 | bool is_training 13 | ) 14 | { 15 | // table_drive 16 | const std::string label_name[5] = {"car", "van", "pedestrian", "truck", "cyclist"}; 17 | const unsigned int size_low_limit[5] = {1000, 1000, 150, 1000, 500}; 18 | const unsigned int size_high_limit[5] = {2000, 2000, 1000, 3000, 1500}; 19 | const unsigned int label[5] = {0, 1, 2, 3, 4}; 20 | // path 21 | std::string read_file_base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten"; 22 | if (is_training) 23 | read_file_base_path += "/train/data_augmented"; 24 | else 25 | read_file_base_path += "/test/data_augmented"; 26 | 27 | for (int i = 0; i != 5; ++i) 28 | { 29 | // path 30 | std::string read_file_path = read_file_base_path + "/" + label_name[i]; 31 | std::vector file_name_vec; 32 | // get all pcd-files' name under the folder 33 | getFileName(read_file_path, file_name_vec); 34 | // traverse every pcd-file to build feature vector 35 | for (auto & f : file_name_vec) 36 | { 37 | // feature vector 38 | std::vector featVec; 39 | // pcd file path 40 | std::string pcd_path = read_file_path + "/" + f; 41 | // read pcd data file 42 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 43 | readPCD(pcd_path, cloud); 44 | if (cloud->points.empty()) 45 | continue; 46 | 47 | /*** global features have nothing to do with normals and search radius ***/ 48 | 49 | // geometry size: length, width, height 50 | std::vector geoSize {0.0, 0.0, 0.0}; 51 | geometrySize(cloud, geoSize[0], geoSize[1], geoSize[2]); 52 | for (auto & gs : geoSize) 53 | featVec.push_back(gs); 54 | // intensity: Imax, Imean, Ivar 55 | std::vector intenFeat {0.0, 0.0, 0.0}; 56 | intensity(cloud, intenFeat[0], intenFeat[1], intenFeat[2]); 57 | for (auto & i : intenFeat) 58 | featVec.push_back(i); 59 | // moment invariants (results should be divided by point number in cloud) 60 | std::vector jvec; 61 | momentInvariants(cloud, jvec); 62 | featVec.push_back(jvec[0]/cloud->points.size()); 63 | // global eigen values of covariance matrix 64 | std::vector eigvals; 65 | geoCovEigen(cloud, eigvals, true); 66 | for (auto & e : eigvals) 67 | featVec.push_back(e); 68 | 69 | 70 | /****** local features, which depend on search radius ******/ 71 | 72 | // upsample 73 | while (cloud->points.size() < size_low_limit[i]) 74 | { 75 | pcl::PointCloud::Ptr 76 | cloud_dense (new pcl::PointCloud); 77 | upSample(cloud, cloud_dense); 78 | cloud->points = cloud_dense->points; 79 | } 80 | cloud->width = cloud->points.size(); 81 | cloud->height = 1; 82 | cloud->is_dense = true; 83 | 84 | // downsample 85 | pcl::PointCloud::Ptr 86 | cloud_sparse (new pcl::PointCloud); 87 | downSample_rand(cloud, cloud_sparse, size_high_limit[i], false); 88 | 89 | // calculate lalonde feature histogram 90 | std::vector lalondeHisto; 91 | lalondeFeat(cloud_sparse, cloud, searchRadius, lalondeHisto); 92 | if (lalondeHisto.empty()) 93 | continue; 94 | for (auto & lh : lalondeHisto) 95 | featVec.push_back(lh); 96 | 97 | // estimate normals 98 | pcl::PointCloud::Ptr 99 | normals (new pcl::PointCloud); 100 | estimateNormals(cloud_sparse, cloud, searchRadius, normals); 101 | 102 | // remove NaN normals 103 | pcl::PointCloud::Ptr 104 | normals_valid (new pcl::PointCloud); 105 | std::vector mapping; 106 | removeNanNormals(normals, normals_valid, mapping); 107 | if (mapping.empty()) 108 | continue; 109 | 110 | // remove point with NaN normal 111 | pcl::PointCloud::Ptr 112 | cloud_valid (new pcl::PointCloud); 113 | removePointWithNanNormal(cloud_sparse, cloud_valid, mapping); 114 | 115 | // estimate mean intensity gradient 116 | float meanIG = 0.0; 117 | meanIntensityGradient(cloud_valid, normals_valid, searchRadius, meanIG); 118 | featVec.push_back(meanIG); 119 | 120 | // estimate FPFH histogram 121 | std::vector FPFHHisto; 122 | estimateFPFH(cloud_valid, normals_valid, searchRadius, FPFHHisto); 123 | if (FPFHHisto.empty()) 124 | continue; 125 | for (auto & fh : FPFHHisto) 126 | featVec.push_back(fh); 127 | 128 | // check the size of the feature vector featVec 129 | assert(featVec.size() == 47); 130 | 131 | /****** save the calculated feature vector into disk ******/ 132 | std::string write_file_base_path = 133 | "/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label"; 134 | if (is_training) 135 | write_file_base_path += "/train"; 136 | else 137 | write_file_base_path += "/test"; 138 | std::stringstream rs; 139 | rs << searchRadius; 140 | std::string write_file_path = write_file_base_path + "/" + "r_" + rs.str() + ".txt"; 141 | writeTXT(featVec, label[i], write_file_path); 142 | } 143 | } 144 | } -------------------------------------------------------------------------------- /Feature_Engineering/lib/getFeatureVector.h: -------------------------------------------------------------------------------- 1 | #ifndef GETFEATUREVECTOR_H 2 | #define GETFEATUREVECTOR_H 3 | 4 | #include "accessFile.h" 5 | #include "sampleCloud.h" 6 | #include "extractFeature.h" 7 | 8 | /* 9 | *********************************************** 10 | ********** build feature vector *************** 11 | *********************************************** 12 | */ 13 | 14 | void 15 | getFeatureVector ( 16 | const float & searchRadius, 17 | bool is_training 18 | ); 19 | 20 | 21 | #endif 22 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/get_file_name.h: -------------------------------------------------------------------------------- 1 | /* Generated by Cython 0.28.3 */ 2 | 3 | #ifndef __PYX_HAVE__get_file_name 4 | #define __PYX_HAVE__get_file_name 5 | 6 | 7 | #ifndef __PYX_HAVE_API__get_file_name 8 | 9 | #ifndef __PYX_EXTERN_C 10 | #ifdef __cplusplus 11 | #define __PYX_EXTERN_C extern "C" 12 | #else 13 | #define __PYX_EXTERN_C extern 14 | #endif 15 | #endif 16 | 17 | #ifndef DL_IMPORT 18 | #define DL_IMPORT(_T) _T 19 | #endif 20 | 21 | __PYX_EXTERN_C PyObject *get_file_name(PyObject *); 22 | 23 | #endif /* !__PYX_HAVE_API__get_file_name */ 24 | 25 | /* WARNING: the interface of the module init function changed in CPython 3.5. */ 26 | /* It now returns a PyModuleDef instance instead of a PyModule instance. */ 27 | 28 | #if PY_MAJOR_VERSION < 3 29 | PyMODINIT_FUNC initget_file_name(void); 30 | #else 31 | PyMODINIT_FUNC PyInit_get_file_name(void); 32 | #endif 33 | 34 | #endif /* !__PYX_HAVE__get_file_name */ 35 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/get_file_name.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ################################### 3 | # Get all files' name in a folder # 4 | ################################### 5 | 6 | import os 7 | 8 | # use Cython so that C++ can call Python function 9 | 10 | cdef public get_file_name(path): 11 | filelist = os.listdir(path) 12 | return filelist -------------------------------------------------------------------------------- /Feature_Engineering/lib/sampleCloud.cpp: -------------------------------------------------------------------------------- 1 | #include "sampleCloud.h" 2 | 3 | /* 4 | ************************************************* 5 | ********** Down sample with Voxel Grid ********** 6 | ************************************************* 7 | */ 8 | 9 | void 10 | downSample_vg( 11 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 12 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 13 | const float & cube_leaf_side_length, // param side length of cube leaf of voxel grid [m] 14 | bool debug // debug mode, default true 15 | ) 16 | { 17 | pcl::VoxelGrid vg; 18 | vg.setInputCloud (inputCloud); 19 | vg.setLeafSize (cube_leaf_side_length, cube_leaf_side_length, cube_leaf_side_length); 20 | // clear the possible invalid residual values in outputCloud 21 | if (!outputCloud->points.empty()) 22 | outputCloud->points.clear(); 23 | // compute downsampled cloud 24 | vg.filter (*outputCloud); 25 | 26 | // debug mode 27 | if (debug) 28 | { 29 | std::cout << "downSample_vg(): points size before downsampling: " 30 | << inputCloud->points.size() 31 | << std::endl; 32 | std::cout << "downSample_vg(): points size after downsampling: " 33 | << outputCloud->points.size() 34 | << std::endl; 35 | } 36 | } 37 | 38 | 39 | 40 | /* 41 | ****************************************************** 42 | ********** Down sample with random sampling ********** 43 | ****************************************************** 44 | */ 45 | 46 | void 47 | downSample_rand( 48 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 49 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 50 | const unsigned int & highThreshold, // number of points in outputCloud after downsampling 51 | bool debug // debug mode, default true 52 | ) 53 | { 54 | size_t size = inputCloud->points.size(); 55 | // clear the possible invalid residual values in outputCloud 56 | if (!outputCloud->points.empty()) 57 | outputCloud->points.clear(); 58 | // exception: size of the inputCloud <= highThreshold, no need to downsample 59 | if (size <= highThreshold) 60 | outputCloud = inputCloud; // that's why & outputCloud must have "&" 61 | else 62 | { 63 | // generate sequential index 64 | std::vector idxSeq; 65 | for (unsigned int i = 0; i != size; ++i) 66 | idxSeq.push_back(i); 67 | // randomly shuffle index 68 | srand(time(NULL)); 69 | std::random_shuffle(idxSeq.begin(), idxSeq.end()); 70 | // get randomly downsampled outputCloud 71 | for (auto idx = idxSeq.cbegin(); idx != idxSeq.cbegin()+highThreshold; ++idx) 72 | outputCloud->points.push_back(inputCloud->points[*idx]); 73 | outputCloud->width = outputCloud->size(); 74 | outputCloud->height = 1; 75 | outputCloud->is_dense = true; 76 | } 77 | // debug mode 78 | if (debug) 79 | { 80 | std::cout << "downSample_rand(): points size before downsampling: " 81 | << inputCloud->points.size() 82 | << std::endl; 83 | std::cout << "downSample_rand(): points size after downsampling: " 84 | << outputCloud->points.size() 85 | << std::endl; 86 | } 87 | } 88 | 89 | 90 | /* 91 | **************************************************************** 92 | ********** Check the existence in vector of a element ********** 93 | **************************************************************** 94 | */ 95 | 96 | // true: element exists in vector; false: not in vector 97 | bool inVector( 98 | const NeighborPointIdxPair & PIdxPair, 99 | const std::vector & PIdxPairVec 100 | ) 101 | { 102 | // if (PIdxPairVec.empty()) 103 | // { 104 | // std::cout << "inVector(): The input vector is empty!" << std::endl; 105 | // return (false); 106 | // } 107 | auto it = std::find(PIdxPairVec.cbegin(), PIdxPairVec.cend(), PIdxPair); 108 | return (!(it == PIdxPairVec.cend())); 109 | } 110 | 111 | 112 | 113 | /* 114 | ******************************************************* 115 | ********** Upsampling for sparse point inputCloud ********** 116 | ******************************************************* 117 | */ 118 | 119 | void 120 | upSample( 121 | const pcl::PointCloud::Ptr & inputCloud, // original cloud as input 122 | pcl::PointCloud::Ptr & outputCloud // upsampled cloud as output 123 | ) 124 | { 125 | if (inputCloud->points.empty()) 126 | { 127 | std::cout << "upSample(): The inputCloud is empty!" << std::endl; 128 | return; 129 | } 130 | pcl::PointCloud::Ptr addedCloud (new pcl::PointCloud); 131 | std::vector PIdxPairVec; 132 | NeighborPointIdxPair PIdxPair; 133 | std::vector searchedPointIdx; 134 | pcl::PointXYZI newPoint; 135 | for (std::size_t i = 0; i < inputCloud->points.size(); ++i) 136 | { 137 | // search mode: k search. number of points to search: 2 138 | kdtreeSearch(inputCloud, inputCloud->points[i], searchedPointIdx, "k", 2); 139 | // searchedPointIdx[0] = i, searchedPointIdx[1] = index of the searched point 140 | PIdxPair.index1 = i; 141 | PIdxPair.index2 = searchedPointIdx[1]; 142 | // // debug 143 | // std::cout << "PIdxPair.index1: " << PIdxPair.index1 << std::endl; 144 | // std::cout << "PIdxPair.index2: " << PIdxPair.index2 << std::endl; 145 | 146 | if (inVector(PIdxPair, PIdxPairVec)) 147 | continue; 148 | PIdxPairVec.push_back(PIdxPair); 149 | newPoint.x = (inputCloud->points[i].x + inputCloud->points[searchedPointIdx[1]].x) / 2.0; 150 | newPoint.y = (inputCloud->points[i].y + inputCloud->points[searchedPointIdx[1]].y) / 2.0; 151 | newPoint.z = (inputCloud->points[i].z + inputCloud->points[searchedPointIdx[1]].z) / 2.0; 152 | newPoint.intensity = (inputCloud->points[i].intensity + 153 | inputCloud->points[searchedPointIdx[1]].intensity) / 2.0; 154 | addedCloud->points.push_back(newPoint); 155 | } 156 | addedCloud->width = addedCloud->points.size(); 157 | addedCloud->height = 1; 158 | addedCloud->is_dense = true; 159 | // clear the possible invalid residual values in outputCloud 160 | if (!outputCloud->points.empty()) 161 | outputCloud->points.clear(); 162 | // concatenate the inputCloud and the addedCloud 163 | (*outputCloud) = (*inputCloud) + (*addedCloud); 164 | } 165 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/sampleCloud.h: -------------------------------------------------------------------------------- 1 | #ifndef PREPROCESSING_H 2 | #define PREPROCESSING_H 3 | 4 | #include "commonHeadFiles.h" 5 | 6 | #include 7 | #include "searchKdtree.h" 8 | #include 9 | 10 | 11 | /* 12 | ************************************************* 13 | ********** Down sample with Voxel Grid ********** 14 | ************************************************* 15 | */ 16 | 17 | void 18 | downSample_vg( 19 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 20 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 21 | const float & cube_leaf_side_length, // param side length of cube leaf of voxel grid [m] 22 | bool debug = true // debug mode, default true 23 | ); 24 | 25 | 26 | 27 | /* 28 | ****************************************************** 29 | ********** Down sample with random sampling ********** 30 | ****************************************************** 31 | */ 32 | 33 | void 34 | downSample_rand( 35 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 36 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 37 | const unsigned int & highThreshold, // number of points in outputCloud after downsampling 38 | bool debug = true // debug mode, default true 39 | ); 40 | 41 | 42 | 43 | 44 | // struct // 45 | struct NeighborPointIdxPair 46 | { 47 | int index1; 48 | int index2; 49 | bool operator==(const NeighborPointIdxPair & pair) const 50 | { 51 | return (pair.index1 == this->index1 && 52 | pair.index2 == this->index2)|| 53 | (pair.index1 == this->index2 && 54 | pair.index2 == this->index1); 55 | } 56 | bool operator!=(const NeighborPointIdxPair & pair) const 57 | { 58 | // return (pair.index1 != this->index1 || 59 | // pair.index2 != this->index2)&& 60 | // (pair.index1 != this->index2 || 61 | // pair.index2 != this->index1); 62 | return !(this->operator==(pair)); 63 | } 64 | }; 65 | 66 | 67 | /* 68 | **************************************************************** 69 | ********** Check the existence in vector of a element ********** 70 | **************************************************************** 71 | */ 72 | 73 | // true: element exists in vector; false: not in vector 74 | bool inVector( 75 | const NeighborPointIdxPair & PIdxPair, 76 | const std::vector & PIdxPairVec 77 | ); 78 | 79 | 80 | 81 | /* 82 | ************************************************************ 83 | ********** Upsampling for sparse point inputCloud ********** 84 | ************************************************************ 85 | */ 86 | 87 | void 88 | upSample( 89 | const pcl::PointCloud::Ptr & inputCloud, // original cloud as input 90 | pcl::PointCloud::Ptr & outputCloud // upsampled cloud as output 91 | ); 92 | 93 | #endif 94 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/searchKdtree.cpp: -------------------------------------------------------------------------------- 1 | #include "searchKdtree.h" 2 | 3 | /* 4 | ************************************************************************ 5 | ********** kdtree search for searching points in neighborhood ********** 6 | ************************************************************************ 7 | */ 8 | 9 | bool 10 | kdtreeSearch ( 11 | const pcl::PointCloud::Ptr & cloud, // input cloud for searching 12 | const pcl::PointXYZI & searchPoint, // search point 13 | std::vector & searchedPointIdx, // output: point index of searched neighbot points 14 | const std::string & mode, // serch mode: k: nearestKSearch; r: radiusSearch 15 | const unsigned int & k, // k nearest points for mode=k, defualt 1 16 | const float & r // search radius for mode=r, default 0.02m 17 | ) 18 | { 19 | pcl::KdTreeFLANN kdtree; 20 | kdtree.setInputCloud (cloud); 21 | std::vectorpointSqrDistance; 22 | // clear the possible invalid residual values in vector 23 | if (!searchedPointIdx.empty()) 24 | searchedPointIdx.clear(); 25 | 26 | if (mode == "r") 27 | { 28 | if (kdtree.radiusSearch(searchPoint,r, searchedPointIdx, pointSqrDistance) > 0) 29 | return (true); 30 | else 31 | { 32 | std::cerr << "kdtreeSearch(): no points are successfully searched!" << std::endl; 33 | return (false); 34 | } 35 | } 36 | 37 | else if (mode == "k") 38 | { 39 | if (kdtree.nearestKSearch(searchPoint, k, searchedPointIdx, pointSqrDistance) > 0) 40 | return (true); 41 | else 42 | { 43 | std::cerr << "kdtreeSearch(): no points are successfully searched!" << std::endl; 44 | return (false); 45 | } 46 | } 47 | 48 | else 49 | { 50 | std::cerr << "kdtreeSearch(): invalid mode!" << std::endl; 51 | std::cerr << "valid modes are:" << "\n" 52 | << "k: nearestKSearch" << "\n" 53 | << "r: radiusSearch" << std::endl; 54 | return (false); 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /Feature_Engineering/lib/searchKdtree.h: -------------------------------------------------------------------------------- 1 | #ifndef KDTREESEARCH_H 2 | #define KDTREESEARCH_H 3 | 4 | #include "commonHeadFiles.h" 5 | #include 6 | 7 | /* 8 | ************************************************************************ 9 | ********** kdtree search for searching points in neighborhood ********** 10 | ************************************************************************ 11 | */ 12 | 13 | // if points are succussfully searched, return ture; if not, return false 14 | bool 15 | kdtreeSearch ( 16 | const pcl::PointCloud::Ptr & cloud, // input cloud for searching 17 | const pcl::PointXYZI & searchPoint, // search point 18 | std::vector & searchedPointIdx, // output: point index of searched neighbot points 19 | const std::string & mode, // serch mode: k: nearestKSearch; r: radiusSearch 20 | const unsigned int & k = 1, // k nearest points for mode=k, default 1 21 | const float & r = 0.02 // search radius for mode=r, default 0.02m 22 | ); 23 | 24 | 25 | #endif 26 | -------------------------------------------------------------------------------- /Feature_Engineering/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | add_compile_options(-std=c++11) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | #set(CMAKE_VERBOSE_MAKEFILE on) 8 | 9 | #message(${PROJECT_SOURCE_DIR}) 10 | 11 | include_directories(${PROJECT_SOURCE_DIR}/lib) 12 | 13 | set(APP_SRC1 testModule.cpp) 14 | set(APP_SRC2 analyseFeature.cpp) 15 | 16 | include_directories(${PCL_INCLUDE_DIRS}) 17 | link_directories(${PCL_LIBRARY_DIRS}) 18 | add_definitions(${PCL_DEFINITIONS}) 19 | 20 | add_executable(testModule ${APP_SRC1}) 21 | add_executable(analyseFeature ${APP_SRC2}) 22 | 23 | set(EXECUTABLE_OUTPUT_PATH ${PROJECT_BINARY_DIR}/bin) 24 | 25 | target_link_libraries(testModule libfeature ${PCL_LIBRARIES}) 26 | target_link_libraries(analyseFeature libfeature ${PCL_LIBRARIES}) 27 | -------------------------------------------------------------------------------- /Feature_Engineering/src/analyseFeature.cpp: -------------------------------------------------------------------------------- 1 | /* ******** test every module ********** */ 2 | 3 | #include "accessFile.h" 4 | #include "sampleCloud.h" 5 | #include "extractFeature.h" 6 | #include "getFeatureVector.h" 7 | 8 | #define FILE_PATH "/home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_FeatureEngineering/dataset_example/car742.pcd" 9 | 10 | /* 11 | * influence of random downsample on eigen values of covariance matrix * 12 | */ 13 | void 14 | influence_downsample_rand_on_covmat_eig ( 15 | const unsigned int & init_size_after_downsampling, 16 | const unsigned int & num_downsample_size, 17 | const unsigned int & runningtime_for_average 18 | ) 19 | { 20 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 21 | pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 22 | // before downsampling 23 | readPCD(FILE_PATH, cloud); 24 | std::vector eigvals; 25 | geoCovEigen(cloud, eigvals, true); 26 | std::cout << "eigen values of global covariance matrix before downsampling: " << std::endl; 27 | std::cout << eigvals[0] << ">=" << eigvals[1] << ">=" << eigvals[2] << std::endl; 28 | // downSample_rand 29 | unsigned int size_after_downsampling = init_size_after_downsampling; 30 | for (unsigned int n = 0; n != num_downsample_size; ++n) 31 | { 32 | if (n) 33 | size_after_downsampling /= 2; 34 | std::vector mean_eigvals {0.0, 0.0, 0.0}; 35 | for (unsigned int i = 0; i != runningtime_for_average; ++i) 36 | { 37 | downSample_rand(cloud, cloud_downsampled, size_after_downsampling, false); 38 | geoCovEigen(cloud_downsampled, eigvals, true); 39 | for (size_t j = 0; j != eigvals.size(); ++j) 40 | mean_eigvals[j] += eigvals[j]; 41 | } 42 | for (auto & e : mean_eigvals) 43 | e /= runningtime_for_average; 44 | 45 | std::cout << "mean eigen values of global covariance matrix after downsampling with cloud size: " 46 | << cloud_downsampled->points.size() 47 | << " over " 48 | << runningtime_for_average 49 | << " running: " 50 | << std::endl; 51 | std::cout << mean_eigvals[0] << ">=" << mean_eigvals[1] << ">=" << mean_eigvals[2] << std::endl; 52 | } 53 | } 54 | 55 | 56 | /* 57 | * influence of voxel grid downsample on eigen values of covariance matrix * 58 | */ 59 | void 60 | influence_downsample_vg_on_covmat_eig ( 61 | const float & init_cube_leaf_side_length, 62 | const unsigned int & runningtime 63 | ) 64 | { 65 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 66 | pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 67 | float leaf_size = init_cube_leaf_side_length; 68 | readPCD(FILE_PATH, cloud); 69 | std::vector eigvals; 70 | geoCovEigen(cloud, eigvals, true); 71 | std::cout << "eigen values of global covariance matrix before downsampling: " << std::endl; 72 | std::cout << eigvals[0] << ">=" << eigvals[1] << ">=" << eigvals[2] << std::endl; 73 | for (unsigned int i = 1; i != runningtime+1; ++i) 74 | { 75 | if (i > 1) 76 | leaf_size *= 2; 77 | downSample_vg(cloud, cloud_downsampled, leaf_size, true); 78 | geoCovEigen(cloud_downsampled, eigvals, true); 79 | std::cout << "eigen values after downsampling with leaf size " 80 | << leaf_size << "m " 81 | << "are: " 82 | << "\n" 83 | << eigvals[0] << ">=" << eigvals[1] << ">=" << eigvals[2] 84 | << std::endl; 85 | } 86 | } 87 | 88 | 89 | /* 90 | * influence of upsample on eigen values of covariance matrix * 91 | */ 92 | void 93 | influence_upsample_on_covmat_eig ( 94 | const unsigned int & runningtime 95 | ) 96 | { 97 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 98 | pcl::PointCloud::Ptr cloud_upsampled (new pcl::PointCloud); 99 | readPCD(FILE_PATH, cloud); 100 | std::vector eigvals; 101 | geoCovEigen(cloud, eigvals, true); 102 | std::cout << "eigen values of global covariance matrix before upsampling: " << std::endl; 103 | for (auto & e : eigvals) 104 | std::cout << e << " "; 105 | std::cout << std::endl; 106 | // upsample 107 | for (unsigned int i = 0; i != runningtime; ++i) 108 | { 109 | upSample(cloud, cloud_upsampled); 110 | geoCovEigen(cloud_upsampled, eigvals, true); 111 | std::cout << "eigen values of global covariance matrix after upsampling with cloud size: " 112 | << cloud_upsampled->points.size() 113 | << std::endl; 114 | for (auto & e : eigvals) 115 | std::cout << e << " "; 116 | std::cout << std::endl; 117 | // cloud->points.clear(); 118 | cloud->points = cloud_upsampled->points; 119 | // cloud_upsampled->points.clear(); 120 | } 121 | } 122 | 123 | 124 | /* 125 | * influence of random downsample on moment invariants * 126 | */ 127 | void 128 | influence_downsample_rand_on_moment_invariants ( 129 | const unsigned int & init_size_after_downsampling, 130 | const unsigned int & num_downsample_size, 131 | const unsigned int & runningtime_for_average 132 | ) 133 | { 134 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 135 | pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 136 | // before downsampling 137 | readPCD(FILE_PATH, cloud); 138 | std::vector jvec; 139 | momentInvariants(cloud, jvec); 140 | std::cout << "moment invariants before downsampling: " << std::endl; 141 | for (auto & mi : jvec) 142 | std::cout << mi/cloud->points.size() << " "; // divided by cloud->points.size() 143 | std::cout << std::endl; 144 | // downSample_rand 145 | unsigned int size_after_downsampling = init_size_after_downsampling; 146 | for (unsigned int n = 0; n != num_downsample_size; ++n) 147 | { 148 | if (n) 149 | size_after_downsampling /= 2; 150 | std::vector mean_jvec {0.0, 0.0, 0.0}; 151 | for (unsigned int i = 0; i != runningtime_for_average; ++i) 152 | { 153 | downSample_rand(cloud, cloud_downsampled, size_after_downsampling, false); 154 | momentInvariants(cloud_downsampled, jvec); 155 | for (size_t j = 0; j != jvec.size(); ++j) 156 | mean_jvec[j] += jvec[j]; 157 | } 158 | std::cout << "mean moment invariants after downsampling with cloud size: " 159 | << cloud_downsampled->points.size() 160 | << " over " 161 | << runningtime_for_average 162 | << " running: " 163 | << std::endl; 164 | for (auto & e : mean_jvec) 165 | { 166 | e /= runningtime_for_average; 167 | e /= cloud_downsampled->points.size(); // divided by cloud->points.size() 168 | std::cout << e << " "; 169 | } 170 | std::cout << std::endl; 171 | } 172 | } 173 | 174 | 175 | /* 176 | * influence of voxel grid downsample on moment invariants * 177 | */ 178 | void 179 | influence_downsample_vg_on_moment_invariants ( 180 | const float & init_cube_leaf_side_length, 181 | const unsigned int & runningtime 182 | ) 183 | { 184 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 185 | pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 186 | float leaf_size = init_cube_leaf_side_length; 187 | readPCD(FILE_PATH, cloud); 188 | std::vector jvec; 189 | momentInvariants(cloud, jvec); 190 | std::cout << "moment invariants before downsampling: " << std::endl; 191 | for (auto & mi : jvec) 192 | std::cout << mi/cloud->points.size() << " "; // divided by cloud->points.size() 193 | std::cout << std::endl; 194 | for (unsigned int i = 0; i != runningtime; ++i) 195 | { 196 | if (i) 197 | leaf_size *= 2; 198 | downSample_vg(cloud, cloud_downsampled, leaf_size, true); 199 | momentInvariants(cloud_downsampled, jvec); 200 | std::cout << "moment invariants after downsampling with leaf size " 201 | << leaf_size 202 | << "m are: " 203 | << std::endl; 204 | for (auto & e : jvec) 205 | { 206 | e /= cloud_downsampled->points.size(); // divided by cloud->points.size() 207 | std::cout << e << " "; 208 | } 209 | std::cout << std::endl; 210 | } 211 | } 212 | 213 | 214 | /* 215 | * influence of upsample on moment invariants * 216 | */ 217 | void 218 | influence_upsample_on_moment_invariants ( 219 | const unsigned int & runningtime 220 | ) 221 | { 222 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 223 | pcl::PointCloud::Ptr cloud_upsampled (new pcl::PointCloud); 224 | readPCD(FILE_PATH, cloud); 225 | std::vector jvec; 226 | momentInvariants(cloud, jvec); 227 | std::cout << "moment invariants before upsampling: " << std::endl; 228 | for (auto & e : jvec) 229 | std::cout << e/cloud->points.size() << " "; // divided by cloud->points.size() 230 | std::cout << std::endl; 231 | // upsample 232 | for (unsigned int i = 0; i != runningtime; ++i) 233 | { 234 | upSample(cloud, cloud_upsampled); 235 | momentInvariants(cloud_upsampled, jvec); 236 | std::cout << "moment invariants after upsampling with cloud size: " 237 | << cloud_upsampled->points.size() 238 | << std::endl; 239 | for (auto & e : jvec) 240 | { 241 | e /= cloud_upsampled->points.size(); // divided by cloud->points.size() 242 | std::cout << e << " "; 243 | } 244 | std::cout << std::endl; 245 | cloud->points = cloud_upsampled->points; 246 | } 247 | } 248 | 249 | 250 | 251 | int 252 | main(void) 253 | { 254 | // /**** test the influence of random downsample on eigen values of covaraince matrix ****/ 255 | // influence_downsample_rand_on_covmat_eig(4000, 4, 1000); 256 | 257 | 258 | // /**** test the influence of voxel grid downsample on eigen values of covaraince matrix ****/ 259 | // influence_downsample_vg_on_covmat_eig(0.01, 5); 260 | 261 | 262 | // /**** test the influence of upsample on eigen values of covaraince matrix ****/ 263 | // influence_upsample_on_covmat_eig(4); 264 | 265 | 266 | // /**** test the influence of random downsample on moment invariants ****/ 267 | // // influence_downsample_rand_on_moment_invariants(4000, 4, 1000); 268 | 269 | 270 | // /**** test the influence of voxel grid downsample on moment invariants ****/ 271 | // influence_downsample_vg_on_moment_invariants(0.01, 5); 272 | 273 | 274 | // /**** test the influence of upsample on moment invariants ****/ 275 | // influence_upsample_on_moment_invariants(4); 276 | 277 | 278 | // /**** build training feature vector with different search radius ****/ 279 | // const std::vector searchRadius { 0.08, 0.10, 0.12, 0.14, 0.16, 0.18, 0.20, 0.22}; 280 | // for (auto & r : searchRadius) 281 | // { 282 | // getFeatureVector(r, true); 283 | // } 284 | 285 | 286 | /**** build test feature vector with search radius = 0.16m ****/ 287 | getFeatureVector(0.16, false); 288 | 289 | return 0; 290 | } 291 | -------------------------------------------------------------------------------- /Feature_Engineering/src/estimate_mutual_info.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | ######################################################## 4 | ### estimate mutual information (dependency) between ### 5 | ### feature vectors with different search radius for ### 6 | ### local feature estimation and target ### 7 | ######################################################## 8 | 9 | import numpy as np 10 | from sklearn import preprocessing 11 | from sklearn.feature_selection import mutual_info_classif 12 | import os 13 | import matplotlib.pyplot as plt 14 | 15 | base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/train" 16 | 17 | # calculate and save statistic info about every label # 18 | def get_samples_num (): 19 | read_path = os.path.join(base_path, "data") 20 | filelist = os.listdir(read_path) 21 | for file in filelist: 22 | read_file = os.path.join(read_path, file) 23 | if os.path.isdir(read_file): 24 | continue 25 | dataset = np.loadtxt(read_file) 26 | target = dataset[:, -1] 27 | sumples_num = np.zeros([1, 5], dtype=float) 28 | for i in range(0, 5): 29 | idx = np.argwhere(target == i) 30 | idx = np.array(idx) 31 | for _ in idx: 32 | sumples_num[0, i] += 1 33 | write_path = os.path.join(base_path, "samples_number") 34 | write_file = os.path.join(write_path, file) 35 | np.savetxt(os.path.splitext(write_file)[0]+'_samples_num.txt', sumples_num) 36 | 37 | 38 | 39 | # estimate mutual info and save # 40 | def estimate_mutual_info (): 41 | read_path = os.path.join(base_path, "data") 42 | filelist = os.listdir(read_path) 43 | for file in filelist: 44 | read_file = os.path.join(read_path, file) 45 | if os.path.isdir(read_file): 46 | continue 47 | dataset = np.loadtxt(read_file) 48 | target = dataset[:, -1] 49 | # feature scaling and normalization 50 | scaler = preprocessing.StandardScaler(copy=False) 51 | scaler.fit_transform(dataset[:, :-1]) 52 | normalizer = preprocessing.Normalizer(norm='l2', copy=False) 53 | normalizer.fit_transform(dataset[:, :-1]) 54 | # mutual_info of local features 55 | data_local = dataset[:, 10:-1] 56 | mi = mutual_info_classif(data_local, target, 'auto', copy='true', n_neighbors=3) 57 | write_path = os.path.join(base_path, "mutual_info_local") 58 | write_file = os.path.join(write_path, file) 59 | np.savetxt(os.path.splitext(write_file)[0]+'_m.txt', mi) 60 | # mutual_info of global features 61 | data_global = dataset[:, 0:10] 62 | mi = mutual_info_classif(data_global, target, 'auto', copy='true', n_neighbors=3) 63 | write_path = os.path.join(base_path, "mutual_info_global") 64 | write_file = os.path.join(write_path, file) 65 | np.savetxt(os.path.splitext(write_file)[0]+'_m.txt', mi) 66 | 67 | 68 | 69 | 70 | # visualize relationship between mutual_info of local features and search radius using bar graph # 71 | def visualize_mutual_info_local (): 72 | read_path = os.path.join(base_path, "mutual_info_local") 73 | filelist = os.listdir(read_path) 74 | plt.figure(1) 75 | feature_name = ['lalonde1','lalonde2','lalonde3','meanIG'] 76 | color = ['#0072BC', '#ED1C24', '#0C1234', '#7200AD', '#0702CD', '#DE08FF', '#0088AC', '#AF2C24'] 77 | x = list(range(len(feature_name))) 78 | total_width = 0.8 79 | num_bar = len(filelist) 80 | width = total_width / num_bar 81 | for j in range(0, 8): 82 | read_file = os.path.join(read_path, filelist[j]) 83 | if os.path.isdir(read_file): 84 | continue 85 | mutual_info = np.loadtxt(read_file) 86 | np.squeeze(mutual_info) 87 | data = mutual_info[0:4] 88 | plt.bar(x, data, width=width, color=color[j], 89 | label=os.path.splitext(filelist[j])[0], 90 | tick_label = feature_name, edgecolor='white') 91 | for i in range(len(x)): 92 | x[i] = x[i] + width 93 | 94 | plt.rcParams['font.size'] = 18 95 | plt.rcParams['figure.figsize'] = (1, 1) 96 | plt.xlabel('Merkmale') 97 | plt.ylabel('Gegenseitige Information') 98 | plt.legend(loc='upper right') 99 | plt.title('Beziehung zwischen gegenseitiger Info und Suchradius') 100 | # plt.savefig('mutual_info_and_search_radius') # savefig must be called before plt.show() 101 | plt.show() 102 | 103 | 104 | 105 | # visualize mutual_info of global features using bar graph # 106 | def visualize_mutual_info_global (): 107 | read_path = os.path.join(base_path, "mutual_info_global") 108 | filelist = os.listdir(read_path) 109 | plt.figure(2) 110 | feature_name = ['Laenge','Breite','Hoehe','Imax', 'Imean', 'Ivar', 'MI', 'e1', 'e2', 'e3'] 111 | read_file = os.path.join(read_path, filelist[3]) 112 | if os.path.isdir(read_file): 113 | return 114 | mutual_info = np.loadtxt(read_file) 115 | np.squeeze(mutual_info) 116 | plt.bar(range(len(mutual_info)), mutual_info, color='blue', 117 | width=0.4, tick_label = feature_name, edgecolor='white') 118 | 119 | plt.rcParams['font.size'] = 18 120 | plt.rcParams['figure.figsize'] = (1, 1) 121 | plt.xlabel('Merkmale') 122 | plt.ylabel('Gegenseitige Information') 123 | plt.legend(loc='upper right') 124 | plt.title('Gegenseitige Info globaler Merkmale') 125 | # plt.savefig('mutual_info_and_search_radius') # savefig must be called before plt.show() 126 | plt.show() 127 | 128 | 129 | 130 | # visualize relationship between mutual_info of FPFH and search radius # 131 | def visualize_mutual_info_FPFH (): 132 | read_path = os.path.join(base_path, "mutual_info_local") 133 | filelist = os.listdir(read_path) 134 | plt.figure(3) 135 | for file in filelist: 136 | read_file = os.path.join(read_path, file) 137 | if os.path.isdir(read_file): 138 | continue 139 | mutual_info = np.loadtxt(read_file) 140 | np.squeeze(mutual_info) 141 | data = mutual_info[4:] 142 | plt.plot(range(0, 33), data, label=os.path.splitext(file)[0]) 143 | 144 | plt.rcParams['font.size'] = 18 145 | plt.rcParams['figure.figsize'] = (1, 1) 146 | plt.xlabel('FPFH') 147 | plt.ylabel('Gegenseitige Information') 148 | plt.legend(loc='upper right') 149 | plt.title('Beziehung zwischen gegenseitiger Info und Suchradius') 150 | # plt.savefig('mutual_info_and_search_radius') # savefig must be called before plt.show() 151 | plt.show() 152 | 153 | 154 | 155 | estimate_mutual_info() 156 | get_samples_num() 157 | visualize_mutual_info_local() 158 | visualize_mutual_info_global() 159 | visualize_mutual_info_FPFH() 160 | -------------------------------------------------------------------------------- /Feature_Engineering/src/testModule.cpp: -------------------------------------------------------------------------------- 1 | /* ******** test every module ********** */ 2 | 3 | #include "accessFile.h" 4 | #include 5 | #include "sampleCloud.h" 6 | #include "extractFeature.h" 7 | #include "searchKdtree.h" 8 | 9 | int 10 | main(void) 11 | { 12 | // /* 13 | // * Test functions: readPCD and writePCD * 14 | // */ 15 | // std::stringstream r_filename, w_filename; 16 | // r_filename << "car1.pcd"; 17 | // w_filename << "car1_w.pcd"; 18 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 19 | // readPCD(r_filename.str(), cloud); 20 | // writePCD(w_filename.str(), cloud); 21 | 22 | 23 | 24 | 25 | // /* 26 | // * Test function: writeTXT * 27 | // */ 28 | // std::vector featureVector {1,2,3,4,5,6,5,4,3,2,1}; 29 | // for (unsigned int label = 0; label < 4; ++label) 30 | // { 31 | // const std::string txt_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/test.txt"; 32 | // writeTXT(featureVector, label, txt_path); 33 | // } 34 | 35 | 36 | 37 | 38 | 39 | 40 | // /* 41 | // * Test function: getFileName * 42 | // */ 43 | // std::string path = "/home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_FeatureEngineering/dataset_example"; 44 | // std::vector file_name_vec; 45 | // getFileName(path, file_name_vec); 46 | // for (auto & f : file_name_vec) 47 | // std::cout << f << std::endl; 48 | 49 | 50 | 51 | 52 | 53 | // /* 54 | // * Test function: downSample_vg * 55 | // */ 56 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 57 | // pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 58 | // readPCD("car1.pcd", cloud); 59 | // downSample_vg(cloud, cloud_downsampled, 0.2, true); 60 | // writePCD("car1_downsampled.pcd", cloud_downsampled); 61 | 62 | 63 | 64 | 65 | // /* 66 | // * Test function: kdtreeSearch * 67 | // */ 68 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 69 | // pcl::PointCloud::Ptr cloud_searched (new pcl::PointCloud); 70 | 71 | // readPCD("car1.pcd", cloud); 72 | // pcl::PointXYZI searchPoint = cloud.points[0]; 73 | // std::string mode = "x"; 74 | // std::vector index; 75 | 76 | // kdtreeSearch(cloud, searchPoint, index, mode, 100, 0); 77 | 78 | // std::cout << "number of searched points: " << index.size() << std::endl; 79 | 80 | // for (auto it = index.cbegin(); it != index.cend(); ++it) 81 | // { 82 | // cloud_searched.points.push_back(cloud.points[*it]); 83 | // } 84 | // cloud_searched.width = cloud_searched.points.size(); 85 | // cloud_searched.height = 1; 86 | 87 | // writePCD("car1_kdtree.pcd", cloud_searched); 88 | 89 | 90 | 91 | 92 | 93 | 94 | // /* 95 | // * Test function: estimateNormals, removeNanNormals, removePointWithNanNormal, visualizeNormals * 96 | // */ 97 | // // estimateNormals 98 | // pcl::PointCloud::Ptr inputCloud (new pcl::PointCloud); 99 | // readPCD("car742.pcd", inputCloud); 100 | // pcl::PointCloud::Ptr searchSurface (new pcl::PointCloud); 101 | // searchSurface = inputCloud; 102 | // float searchRadius = 0.08; 103 | // pcl::PointCloud::Ptr normals (new pcl::PointCloud); 104 | // estimateNormals(inputCloud, searchSurface, searchRadius, normals, false); 105 | // writePCD("car742_Normals.pcd", normals); 106 | // // delete searchSurface; // shared_ptr 自动释放内存,不能手动delete 107 | 108 | // // removeNanNormals 109 | // pcl::PointCloud::Ptr noNaNnormals (new pcl::PointCloud); 110 | // std::vector mapping; 111 | // removeNanNormals(normals, noNaNnormals, mapping); 112 | 113 | // // removePointWithNanNormal 114 | // pcl::PointCloud::Ptr noNanNormalsCloud (new pcl::PointCloud); 115 | // removePointWithNanNormal(inputCloud, noNanNormalsCloud, mapping, true); 116 | // writePCD("car742_noNanNormalsCloud.pcd", noNanNormalsCloud); 117 | // writePCD("car742_noNanNormals.pcd", noNaNnormals); 118 | 119 | // // visualizeNormals 120 | // visualizeNormals(noNanNormalsCloud, noNaNnormals); 121 | 122 | 123 | 124 | 125 | 126 | 127 | // /* 128 | // * Test function: inVector, upSample 1* 129 | // */ 130 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 131 | // readPCD("car2.pcd", cloud); // car2.pcd is sparse 132 | // std::cout << "cloud size before upsampling: " << cloud->points.size() << std::endl; 133 | // pcl::PointCloud::Ptr outcloud (new pcl::PointCloud); 134 | // upSample(cloud, outcloud); 135 | // std::cout << "cloud size after upsampling: " << outcloud->points.size() << std::endl; 136 | // writePCD("car2_dense.pcd", outcloud); 137 | // std::cout << cloud->points[348].x << " " 138 | // << cloud->points[348].y << " " 139 | // << cloud->points[348].z << " " 140 | // << cloud->points[348].intensity << std::endl; 141 | 142 | 143 | 144 | 145 | 146 | // /* 147 | // * Test function: inVector, upSample 2* 148 | // */ 149 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 150 | // readPCD("/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/train/data_augmented/car/car2.pcd", cloud); 151 | // while (cloud->points.size() < 5000) 152 | // { 153 | // pcl::PointCloud::Ptr 154 | // cloud_dense (new pcl::PointCloud); 155 | // upSample(cloud, cloud_dense); 156 | // cloud->points = cloud_dense->points; 157 | // } 158 | // cloud->width = cloud->points.size(); 159 | // cloud->height = 1; 160 | // cloud->is_dense = true; 161 | // std::cout << "cloud size after upsampling: " << cloud->points.size() << std::endl; 162 | // writePCD("car2_dense.pcd", cloud); 163 | 164 | 165 | 166 | // /* 167 | // * Test function: cloud2Matrix * 168 | // */ 169 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 170 | // readPCD("car2.pcd", cloud); 171 | // Eigen::MatrixXf pointMatrix; 172 | // cloud2Matrix(cloud, pointMatrix, "xyz"); 173 | // std::cout << pointMatrix << std::endl; 174 | 175 | 176 | 177 | 178 | 179 | // /* 180 | // * Test function: covMatrix * 181 | // */ 182 | // Eigen::MatrixXf M(10,2); 183 | // M << 3.7, 1.7, 184 | // 4.1, 3.8, 185 | // 4.7, 2.9, 186 | // 5.2, 2.8, 187 | // 6.0, 4.0, 188 | // 6.3, 3.6, 189 | // 9.7, 6.3, 190 | // 10.0, 4.9, 191 | // 11.0, 3.6, 192 | // 12.5, 6.4; 193 | // Eigen::MatrixXf Cov; 194 | // covMatrix(M, Cov); 195 | // // ground truth: 196 | // // 9.0836 3.365 197 | // // 3.365 2.016 198 | // std::cout << Cov << std::endl; 199 | 200 | 201 | 202 | 203 | 204 | 205 | // /* 206 | // * Test function: geometrySize * 207 | // */ 208 | // Eigen::MatrixXf M(10,3); 209 | // M << 3.7, 1.7, 0, 210 | // 4.1, 3.8, 0, 211 | // 4.7, 2.9, 0, 212 | // 5.2, 2.8, 5, 213 | // 6.0, 4.0, 6, 214 | // 6.3, 3.6, 7, 215 | // 9.7, 6.3, 7, 216 | // 10.0, 4.9, 3, 217 | // 11.0, 3.6, 10, 218 | // 12.5, 6.4, 1.1; 219 | 220 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 221 | // pcl::PointXYZI point; 222 | // for(int i = 0; i != 10; ++i) 223 | // { 224 | // point.x = M(i, 0); 225 | // point.y = M(i, 1); 226 | // point.z = M(i, 2); 227 | // point.intensity = 0.0; 228 | // cloud->points.push_back(point); 229 | // } 230 | // cloud->height = 1; 231 | // cloud->width = cloud->points.size(); 232 | // cloud->is_dense = true; 233 | 234 | // float length = 0.0, width = 0.0, height = 0.0; 235 | // geometrySize(cloud, length, width, height); 236 | // // ground truth: length = 9.916, width = 2.989, height = 10 237 | // std::cout << length << std::endl; 238 | // std::cout << width << std::endl; 239 | // std::cout << height << std::endl; 240 | 241 | 242 | 243 | 244 | 245 | // /* 246 | // * Test function: geoCovEigen 1* 247 | // */ 248 | 249 | // Eigen::MatrixXf M(10,3); 250 | // M << 3.7, 1.7, 0, 251 | // 4.1, 3.8, 0, 252 | // 4.7, 2.9, 0, 253 | // 5.2, 2.8, 0, 254 | // 6.0, 4.0, 0, 255 | // 6.3, 3.6, 0, 256 | // 9.7, 6.3, 0, 257 | // 10.0, 4.9, 0, 258 | // 11.0, 3.6, 0, 259 | // 12.5, 6.4, 0; 260 | 261 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 262 | // pcl::PointXYZI point; 263 | // for(int i = 0; i != 10; ++i) 264 | // { 265 | // point.x = M(i, 2); 266 | // point.y = M(i, 1); 267 | // point.z = M(i, 0); 268 | // point.intensity = 0.0; 269 | // cloud->points.push_back(point); 270 | // } 271 | // cloud->height = 1; 272 | // cloud->width = cloud->points.size(); 273 | // cloud->is_dense = true; 274 | // std::vector eigvals {1,2,3}; 275 | // geoCovEigen(cloud, eigvals); 276 | // geoCovEigen(cloud, eigvals); 277 | // // ground truth: 0.939624>=0.0603762>=0 278 | // std::cout << eigvals[0] << ">=" << eigvals[1] << ">=" << eigvals[2] << std::endl; 279 | 280 | 281 | 282 | 283 | 284 | 285 | // /* 286 | // * Test function: geoCovEigen 2: gloabal; * 287 | // */ 288 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 289 | // readPCD("car742.pcd", cloud); 290 | // std::vector eigvals {1,2,3}; 291 | // geoCovEigen(cloud, eigvals, true); 292 | // std::cout << "eigen values of global covariance matrix before downsampling: " << std::endl; 293 | // std::cout << eigvals[0] << ">=" << eigvals[1] << ">=" << eigvals[2] << std::endl; 294 | 295 | // std::vector mean_eigvals {0.0, 0.0, 0.0}; 296 | // for (int i = 0; i != 1000; ++i) 297 | // { 298 | // pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 299 | // downSample_rand(cloud, cloud_downsampled, 500); 300 | // // downSample_vg(cloud, cloud_downsampled, 0.2, 0.2, 0.2); 301 | // geoCovEigen(cloud_downsampled, eigvals, true); 302 | // for (size_t j = 0; j != eigvals.size(); ++j) 303 | // mean_eigvals[j] += eigvals[j]; 304 | // } 305 | // for (auto & e : mean_eigvals) 306 | // e /= 1000; 307 | 308 | // std::cout << "mean eigen values of global covariance matrix after downsampling over 1000 running: " 309 | // << std::endl; 310 | // std::cout << mean_eigvals[0] << ">=" << mean_eigvals[1] << ">=" << mean_eigvals[2] << std::endl; 311 | 312 | 313 | 314 | 315 | 316 | 317 | // /* 318 | // * Test function: lalondeFeat * 319 | // */ 320 | // pcl::PointCloud::Ptr inputCloud (new pcl::PointCloud); 321 | // pcl::PointCloud::Ptr searchSurface (new pcl::PointCloud); 322 | // // readPCD("car2.pcd", inputCloud); 323 | // // readPCD("car2_dense.pcd", searchSurface); 324 | // readPCD("pedestrian0.pcd", inputCloud); 325 | // readPCD("pedestrian0.pcd", searchSurface); 326 | // std::vector lalondeHisto; 327 | // float r = 0.1; // searchRadius = 0.1m 此参数特别关键 328 | // lalondeFeat(inputCloud, searchSurface, r, lalondeHisto); 329 | // for(auto & r : lalondeHisto) 330 | // std::cout << r << " "; 331 | // std::cout << "\n"; 332 | 333 | 334 | 335 | 336 | 337 | 338 | // /* 339 | // * Test function: intensity * 340 | // */ 341 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 342 | // // readPCD("car742.pcd", cloud); 343 | // pcl::PointXYZI point; 344 | // for (unsigned int i = 1; i !=4; ++i) 345 | // { 346 | // point.x = 0; 347 | // point.y = 0; 348 | // point.z = 0; 349 | // point.intensity = i; 350 | // cloud->points.push_back(point); 351 | // } 352 | // cloud->width = cloud->points.size(); 353 | // cloud->height = 1; 354 | // cloud->is_dense = true; 355 | // float Imax = 0.0, Imean = 0.0, Ivar = 0.0; 356 | // intensity(cloud, Imax, Imean, Ivar); 357 | // // ground truth: Imax=3, Imean=2, Ivar = 0.6667; 358 | // std::cout << Imax << " " << Imean << " " << Ivar << std::endl; 359 | 360 | 361 | 362 | 363 | 364 | 365 | // /* 366 | // * Test function: momentInvariants and downSample_vg, downSample_rand * 367 | // */ 368 | // pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 369 | // readPCD("car742.pcd", cloud); 370 | // pcl::PointCloud::Ptr cloud_downsampled (new pcl::PointCloud); 371 | // // downSample_vg(cloud, cloud_downsampled, 0.05); 372 | // downSample_rand(cloud, cloud_downsampled, 1000); 373 | // writePCD("car742_sparse.pcd", cloud_downsampled); 374 | // std::vector j; 375 | // size_t size_m = cloud->points.size(); 376 | // size_t size_f = cloud_downsampled->points.size(); 377 | // momentInvariants(cloud, j); 378 | // std::cout << "moment invriants before downsampling: " << "\n"; 379 | // std::cout << j[0]/size_m << " " << j[1]/size_m << " " << j[2]/size_m << std::endl; 380 | // momentInvariants(cloud_downsampled, j); 381 | // std::cout << "moment invriants after downsampling: " << "\n"; 382 | // std::cout << j[0]/size_f << " " << j[1]/size_f << " " << j[2]/size_f << std::endl; 383 | 384 | 385 | 386 | 387 | 388 | 389 | // /* 390 | // * Test function: estimateFPFH part* 391 | // */ 392 | // pcl::PointCloud fpfh; 393 | // fpfh.height = 1; 394 | // fpfh.width = 3; 395 | // fpfh.is_dense = true; 396 | // fpfh.points.resize(fpfh.height * fpfh.width); 397 | // for (int i = 0; i != fpfh.points.size(); ++i) 398 | // for (int j = 0; j != 33; ++j) 399 | // fpfh.points[i].histogram[j] = j + i + 1; 400 | // std::vector FPFHHisto {3,2,1}; 401 | // // build FPFH histogram 402 | // if (!FPFHHisto.empty()) 403 | // FPFHHisto.clear(); 404 | // size_t size = fpfh.points.size(); 405 | // float mean = 0.0; 406 | // for (unsigned int i = 0; i != 33; ++i) 407 | // { 408 | // mean = 0.0; 409 | // for (auto & r : fpfh.points) 410 | // mean += r.histogram[i]; 411 | // mean /= size; 412 | // FPFHHisto.push_back(mean); 413 | // } 414 | // // ground truth: 2,3,4,5,...,34 415 | // for (auto & r : FPFHHisto) 416 | // std::cout << r << " "; 417 | // std::cout << "\n" << std::endl; 418 | 419 | 420 | 421 | 422 | 423 | 424 | // /* 425 | // * Test function: estimateFPFH full* 426 | // */ 427 | // pcl::PointCloud::Ptr inputCloud (new pcl::PointCloud); 428 | // pcl::PointCloud::Ptr searchSurface (new pcl::PointCloud); 429 | // float searchRadius = 0.2; 430 | // pcl::PointCloud::Ptr normals (new pcl::PointCloud); 431 | // readPCD("car742.pcd", inputCloud); 432 | // searchSurface = inputCloud; 433 | // estimateNormals(inputCloud, searchSurface, searchRadius, normals); 434 | // std::vector mapping; 435 | // pcl::PointCloud::Ptr outputNormals (new pcl::PointCloud); 436 | // removeNanNormals(normals, outputNormals, mapping); 437 | // pcl::PointCloud::Ptr outputCloud (new pcl::PointCloud); 438 | // removePointWithNanNormal(inputCloud, outputCloud, mapping); 439 | // std::vector FPFHHisto; 440 | // estimateFPFH(outputCloud, outputNormals, searchRadius/2, FPFHHisto, true); 441 | // for (auto & r : FPFHHisto) 442 | // std::cout << r << " "; 443 | // std::cout << "\n" << std::endl; 444 | 445 | 446 | 447 | 448 | 449 | 450 | // /* 451 | // * Test function: meanIntensityGradient* 452 | // */ 453 | // pcl::PointCloud::Ptr inputCloud (new pcl::PointCloud); 454 | // pcl::PointCloud::Ptr searchSurface (new pcl::PointCloud); 455 | // float searchRadius = 0.08; 456 | // pcl::PointCloud::Ptr normals (new pcl::PointCloud); 457 | // readPCD("car1.pcd", inputCloud); 458 | // searchSurface = inputCloud; 459 | // estimateNormals(inputCloud, searchSurface, searchRadius, normals, true); 460 | // std::vector mapping; 461 | // pcl::PointCloud::Ptr outputNormals (new pcl::PointCloud); 462 | // removeNanNormals(normals, outputNormals, mapping); 463 | // pcl::PointCloud::Ptr outputCloud (new pcl::PointCloud); 464 | // removePointWithNanNormal(inputCloud, outputCloud, mapping, true); 465 | // // compute mean intensity gradient (squared) 466 | // float meanIG = 0.0; 467 | // meanIntensityGradient(outputCloud, outputNormals, searchRadius, meanIG, true); 468 | // std::cout << meanIG << std::endl; 469 | } 470 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PointCloud_Classification 2 | Point Cloud Object Classification using Machine Learning 3 | -------------------------------------------------------------------------------- /Test/basic_test_randomforest.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############################## 4 | # basic_test_randomforest.py # 5 | ############################## 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/rf.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/test/r_0.16.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | rfc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = rfc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = rfc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/basic_test_svm.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ##################### 4 | # basic_test_svm.py # 5 | ##################### 6 | 7 | from sklearn import preprocessing 8 | from sklearn.externals import joblib 9 | import numpy as np 10 | from sklearn import metrics 11 | import os 12 | import sys 13 | 14 | # get model file path 15 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | ROOT_DIR = os.path.dirname(BASE_DIR) 17 | MODEL_DIR = ROOT_DIR + '/Training/svm.pkl' 18 | 19 | # load data for testing 20 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/test/r_0.16.txt') 21 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 22 | 23 | # feature scaling and normalizing 24 | np.random.shuffle(feature_matrix) 25 | data = feature_matrix[:, :-1] 26 | target = feature_matrix[:, -1] 27 | # scaler = preprocessing.MaxAbsScaler(copy=False) 28 | scaler = preprocessing.StandardScaler(copy=False) 29 | scaler.fit_transform(data) 30 | normalizer = preprocessing.Normalizer(norm='l2', copy=False) 31 | normalizer.fit_transform(data) 32 | 33 | # load the trained model 34 | clf = joblib.load(MODEL_DIR) 35 | 36 | # prediction / test 37 | y_pred = clf.predict(data) 38 | score = metrics.accuracy_score(target, y_pred) 39 | print('accuracy score = ', score) 40 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 41 | print('confusion matrix = ') 42 | print(conf_matrix) 43 | recall = metrics.recall_score(target, y_pred, average='weighted') 44 | print('recall score = ', recall) 45 | precision = metrics.precision_score(target, y_pred, average='weighted') 46 | print('precision score = ', precision) 47 | f1 = metrics.f1_score(target, y_pred, average='weighted') 48 | print('f1 score = ', f1) 49 | # y_pred = clf.decision_function(data) 50 | # hinge_loss = metrics.hinge_loss(target, y_pred, labels=np.array([0,1,2,3,4])) 51 | # print('hinge loss = ', hinge_loss) 52 | prob = clf.predict_proba(data) 53 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 54 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/basic_test_xgb.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ######################### 4 | # basic_test_xgboost.py # 5 | ######################### 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/xgb.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/test/r_0.16.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | xgbc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = xgbc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = xgbc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/noise_test_rf.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############################## 4 | # basic_test_randomforest.py # 5 | ############################## 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/rf.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/noise/noi_0.09.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | rfc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = rfc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = rfc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/noise_test_svm.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ##################### 4 | # basic_test_svm.py # 5 | ##################### 6 | 7 | from sklearn import preprocessing 8 | from sklearn.externals import joblib 9 | import numpy as np 10 | from sklearn import metrics 11 | import os 12 | import sys 13 | 14 | # get model file path 15 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | ROOT_DIR = os.path.dirname(BASE_DIR) 17 | MODEL_DIR = ROOT_DIR + '/Training/svm.pkl' 18 | 19 | # load data for testing 20 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/noise/noi_0.09.txt') 21 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 22 | 23 | # feature scaling and normalizing 24 | np.random.shuffle(feature_matrix) 25 | data = feature_matrix[:, :-1] 26 | target = feature_matrix[:, -1] 27 | # scaler = preprocessing.MaxAbsScaler(copy=False) 28 | scaler = preprocessing.StandardScaler(copy=False) 29 | scaler.fit_transform(data) 30 | normalizer = preprocessing.Normalizer(norm='l2', copy=False) 31 | normalizer.fit_transform(data) 32 | 33 | # load the trained model 34 | clf = joblib.load(MODEL_DIR) 35 | 36 | # prediction / test 37 | y_pred = clf.predict(data) 38 | score = metrics.accuracy_score(target, y_pred) 39 | print('accuracy score = ', score) 40 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 41 | print('confusion matrix = ') 42 | print(conf_matrix) 43 | recall = metrics.recall_score(target, y_pred, average='weighted') 44 | print('recall score = ', recall) 45 | precision = metrics.precision_score(target, y_pred, average='weighted') 46 | print('precision score = ', precision) 47 | f1 = metrics.f1_score(target, y_pred, average='weighted') 48 | print('f1 score = ', f1) 49 | # y_pred = clf.decision_function(data) 50 | # hinge_loss = metrics.hinge_loss(target, y_pred, labels=np.array([0,1,2,3,4])) 51 | # print('hinge loss = ', hinge_loss) 52 | prob = clf.predict_proba(data) 53 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 54 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/noise_test_xgb.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ######################### 4 | # noise_test_xgboost.py # 5 | ######################### 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/xgb.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/noise/noi_0.09.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | xgbc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = xgbc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = xgbc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/occlusion_test_rf.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############################## 4 | # basic_test_randomforest.py # 5 | ############################## 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/rf.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/occlusion/op_90.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | rfc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = rfc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = rfc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/occlusion_test_svm.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ##################### 4 | # basic_test_svm.py # 5 | ##################### 6 | 7 | from sklearn import preprocessing 8 | from sklearn.externals import joblib 9 | import numpy as np 10 | from sklearn import metrics 11 | import os 12 | import sys 13 | 14 | # get model file path 15 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | ROOT_DIR = os.path.dirname(BASE_DIR) 17 | MODEL_DIR = ROOT_DIR + '/Training/svm.pkl' 18 | 19 | # load data for testing 20 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/occlusion/op_90.txt') 21 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 22 | 23 | # feature scaling and normalizing 24 | np.random.shuffle(feature_matrix) 25 | data = feature_matrix[:, :-1] 26 | target = feature_matrix[:, -1] 27 | # scaler = preprocessing.MaxAbsScaler(copy=False) 28 | scaler = preprocessing.StandardScaler(copy=False) 29 | scaler.fit_transform(data) 30 | normalizer = preprocessing.Normalizer(norm='l2', copy=False) 31 | normalizer.fit_transform(data) 32 | 33 | # load the trained model 34 | clf = joblib.load(MODEL_DIR) 35 | 36 | # prediction / test 37 | y_pred = clf.predict(data) 38 | score = metrics.accuracy_score(target, y_pred) 39 | print('accuracy score = ', score) 40 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 41 | print('confusion matrix = ') 42 | print(conf_matrix) 43 | recall = metrics.recall_score(target, y_pred, average='weighted') 44 | print('recall score = ', recall) 45 | precision = metrics.precision_score(target, y_pred, average='weighted') 46 | print('precision score = ', precision) 47 | f1 = metrics.f1_score(target, y_pred, average='weighted') 48 | print('f1 score = ', f1) 49 | # y_pred = clf.decision_function(data) 50 | # hinge_loss = metrics.hinge_loss(target, y_pred, labels=np.array([0,1,2,3,4])) 51 | # print('hinge loss = ', hinge_loss) 52 | prob = clf.predict_proba(data) 53 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 54 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/occlusion_test_xgb.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############################# 4 | # occlusion_test_xgboost.py # 5 | ############################# 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/xgb.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/occlusion/op_90.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | xgbc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = xgbc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = xgbc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/robust_test_data_features/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | project(robust_test_data_feature) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | add_compile_options(-std=c++11) 8 | 9 | #set(CMAKE_VERBOSE_MAKEFILE on) 10 | 11 | #message(${PROJECT_SOURCE_DIR}) 12 | 13 | add_subdirectory(src) 14 | 15 | add_subdirectory(lib) 16 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/build/Makefile: -------------------------------------------------------------------------------- 1 | # CMAKE generated file: DO NOT EDIT! 2 | # Generated by "Unix Makefiles" Generator, CMake Version 3.5 3 | 4 | # Default target executed when no arguments are given to make. 5 | default_target: all 6 | 7 | .PHONY : default_target 8 | 9 | # Allow only one "make -f Makefile2" at a time, but pass parallelism. 10 | .NOTPARALLEL: 11 | 12 | 13 | #============================================================================= 14 | # Special targets provided by cmake. 15 | 16 | # Disable implicit rules so canonical targets will work. 17 | .SUFFIXES: 18 | 19 | 20 | # Remove some rules from gmake that .SUFFIXES does not remove. 21 | SUFFIXES = 22 | 23 | .SUFFIXES: .hpux_make_needs_suffix_list 24 | 25 | 26 | # Suppress display of executed commands. 27 | $(VERBOSE).SILENT: 28 | 29 | 30 | # A target that is always out of date. 31 | cmake_force: 32 | 33 | .PHONY : cmake_force 34 | 35 | #============================================================================= 36 | # Set environment variables for the build. 37 | 38 | # The shell in which to execute make rules. 39 | SHELL = /bin/sh 40 | 41 | # The CMake executable. 42 | CMAKE_COMMAND = /usr/bin/cmake 43 | 44 | # The command to remove a file. 45 | RM = /usr/bin/cmake -E remove -f 46 | 47 | # Escaping for special characters. 48 | EQUALS = = 49 | 50 | # The top-level source directory on which CMake was run. 51 | CMAKE_SOURCE_DIR = /home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_Classification/Test/robust_test_data_features 52 | 53 | # The top-level build directory on which CMake was run. 54 | CMAKE_BINARY_DIR = /home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_Classification/Test/robust_test_data_features/build 55 | 56 | #============================================================================= 57 | # Targets provided globally by CMake. 58 | 59 | # Special rule for the target edit_cache 60 | edit_cache: 61 | @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "No interactive CMake dialog available..." 62 | /usr/bin/cmake -E echo No\ interactive\ CMake\ dialog\ available. 63 | .PHONY : edit_cache 64 | 65 | # Special rule for the target edit_cache 66 | edit_cache/fast: edit_cache 67 | 68 | .PHONY : edit_cache/fast 69 | 70 | # Special rule for the target rebuild_cache 71 | rebuild_cache: 72 | @$(CMAKE_COMMAND) -E cmake_echo_color --switch=$(COLOR) --cyan "Running CMake to regenerate build system..." 73 | /usr/bin/cmake -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) 74 | .PHONY : rebuild_cache 75 | 76 | # Special rule for the target rebuild_cache 77 | rebuild_cache/fast: rebuild_cache 78 | 79 | .PHONY : rebuild_cache/fast 80 | 81 | # The main all target 82 | all: cmake_check_build_system 83 | $(CMAKE_COMMAND) -E cmake_progress_start /home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_Classification/Test/robust_test_data_features/build/CMakeFiles /home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_Classification/Test/robust_test_data_features/build/CMakeFiles/progress.marks 84 | $(MAKE) -f CMakeFiles/Makefile2 all 85 | $(CMAKE_COMMAND) -E cmake_progress_start /home/shao/文档/VSCodeWS/Masterarbeit_Code/PointCloud_Classification/Test/robust_test_data_features/build/CMakeFiles 0 86 | .PHONY : all 87 | 88 | # The main clean target 89 | clean: 90 | $(MAKE) -f CMakeFiles/Makefile2 clean 91 | .PHONY : clean 92 | 93 | # The main clean target 94 | clean/fast: clean 95 | 96 | .PHONY : clean/fast 97 | 98 | # Prepare targets for installation. 99 | preinstall: all 100 | $(MAKE) -f CMakeFiles/Makefile2 preinstall 101 | .PHONY : preinstall 102 | 103 | # Prepare targets for installation. 104 | preinstall/fast: 105 | $(MAKE) -f CMakeFiles/Makefile2 preinstall 106 | .PHONY : preinstall/fast 107 | 108 | # clear depends 109 | depend: 110 | $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 1 111 | .PHONY : depend 112 | 113 | #============================================================================= 114 | # Target rules for targets named occlusion 115 | 116 | # Build rule for target. 117 | occlusion: cmake_check_build_system 118 | $(MAKE) -f CMakeFiles/Makefile2 occlusion 119 | .PHONY : occlusion 120 | 121 | # fast build rule for target. 122 | occlusion/fast: 123 | $(MAKE) -f src/CMakeFiles/occlusion.dir/build.make src/CMakeFiles/occlusion.dir/build 124 | .PHONY : occlusion/fast 125 | 126 | #============================================================================= 127 | # Target rules for targets named noise 128 | 129 | # Build rule for target. 130 | noise: cmake_check_build_system 131 | $(MAKE) -f CMakeFiles/Makefile2 noise 132 | .PHONY : noise 133 | 134 | # fast build rule for target. 135 | noise/fast: 136 | $(MAKE) -f src/CMakeFiles/noise.dir/build.make src/CMakeFiles/noise.dir/build 137 | .PHONY : noise/fast 138 | 139 | #============================================================================= 140 | # Target rules for targets named sparsity 141 | 142 | # Build rule for target. 143 | sparsity: cmake_check_build_system 144 | $(MAKE) -f CMakeFiles/Makefile2 sparsity 145 | .PHONY : sparsity 146 | 147 | # fast build rule for target. 148 | sparsity/fast: 149 | $(MAKE) -f src/CMakeFiles/sparsity.dir/build.make src/CMakeFiles/sparsity.dir/build 150 | .PHONY : sparsity/fast 151 | 152 | #============================================================================= 153 | # Target rules for targets named libfeature 154 | 155 | # Build rule for target. 156 | libfeature: cmake_check_build_system 157 | $(MAKE) -f CMakeFiles/Makefile2 libfeature 158 | .PHONY : libfeature 159 | 160 | # fast build rule for target. 161 | libfeature/fast: 162 | $(MAKE) -f lib/CMakeFiles/libfeature.dir/build.make lib/CMakeFiles/libfeature.dir/build 163 | .PHONY : libfeature/fast 164 | 165 | # Help Target 166 | help: 167 | @echo "The following are some of the valid targets for this Makefile:" 168 | @echo "... all (the default if no target is provided)" 169 | @echo "... clean" 170 | @echo "... depend" 171 | @echo "... edit_cache" 172 | @echo "... rebuild_cache" 173 | @echo "... occlusion" 174 | @echo "... noise" 175 | @echo "... sparsity" 176 | @echo "... libfeature" 177 | .PHONY : help 178 | 179 | 180 | 181 | #============================================================================= 182 | # Special targets to cleanup operation of make. 183 | 184 | # Special rule to run CMake to check the build system integrity. 185 | # No rule that depends on this can have commands that come from listfiles 186 | # because they might be regenerated. 187 | cmake_check_build_system: 188 | $(CMAKE_COMMAND) -H$(CMAKE_SOURCE_DIR) -B$(CMAKE_BINARY_DIR) --check-build-system CMakeFiles/Makefile.cmake 0 189 | .PHONY : cmake_check_build_system 190 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | add_compile_options(-std=c++11) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | #set(CMAKE_VERBOSE_MAKEFILE on) 8 | 9 | #message(${PROJECT_SOURCE_DIR}) 10 | 11 | set(LIB_SRC accessFile.cpp extractFeature.cpp sampleCloud.cpp searchKdtree.cpp getFeatureVector.cpp gen_robust_test_data.cpp get_file_name.c) 12 | 13 | include_directories(${PCL_INCLUDE_DIRS}) 14 | link_directories(${PCL_LIBRARY_DIRS}) 15 | add_definitions(${PCL_DEFINITIONS}) 16 | 17 | #add_library(libfeature SHARED ${LIB_SRC}) #dll 18 | add_library(libfeature ${LIB_SRC}) #lib 19 | 20 | set(LIBRARY_OUTPUT_PATH ${PROJECT_BINARY_DIR}/lib) 21 | 22 | set_target_properties(libfeature PROPERTIES OUTPUT_NAME "feature") 23 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/accessFile.cpp: -------------------------------------------------------------------------------- 1 | #include "accessFile.h" 2 | 3 | /* 4 | **************************************** 5 | ********** Read pcd file *************** 6 | **************************************** 7 | */ 8 | void 9 | readPCD( 10 | const std::string & filename, 11 | pcl::PointCloud::Ptr & cloud 12 | ) 13 | { 14 | pcl::PCDReader reader; 15 | reader.read (filename, *cloud); 16 | std::cout << "readPCD(): " 17 | << cloud->points.size() 18 | << " points in " 19 | << filename 20 | << std::endl; 21 | } 22 | 23 | 24 | /* 25 | **************************************** 26 | ********** Write pcd file ************** 27 | **************************************** 28 | */ 29 | 30 | // pcl::PointCloud 31 | void 32 | writePCD( 33 | const std::string & filename, 34 | const pcl::PointCloud::Ptr & cloud 35 | ) 36 | { 37 | if (cloud->points.empty()) 38 | { 39 | std::cout << "writePCD(): There is no points in this cloud!" << std::endl; 40 | return; 41 | } 42 | pcl::PCDWriter writer; 43 | writer.write (filename, *cloud, false); 44 | } 45 | 46 | // pcl::PointCloud 47 | void 48 | writePCD( 49 | const std::string & filename, 50 | const pcl::PointCloud::Ptr & normals 51 | ) 52 | { 53 | if (normals->points.empty()) 54 | { 55 | std::cout << "writePCD(): There is no points in this cloud!" << std::endl; 56 | return; 57 | } 58 | pcl::PCDWriter writer; 59 | writer.write (filename, *normals, false); 60 | } 61 | 62 | 63 | /* 64 | ******************************************************* 65 | ********** Write txt file with "Append Mode" ********** 66 | ******************************************************* 67 | */ 68 | void 69 | writeTXT( 70 | const std::vector & globalFeatureVector, 71 | const unsigned int & label, 72 | const std::string & txt_path 73 | ) 74 | { 75 | std::ofstream writeTXT; 76 | writeTXT.open(txt_path.c_str(), std::ios::app); 77 | for (auto & e : globalFeatureVector) 78 | { 79 | writeTXT << e << ' '; 80 | } 81 | writeTXT << label << "\n"; 82 | } 83 | 84 | 85 | /* 86 | **************************************** 87 | ********** Get file name *************** 88 | **************************************** 89 | */ 90 | void 91 | getFileName( 92 | const std::string & path, 93 | std::vector & file_name_vec 94 | ) 95 | { 96 | // use Cython so that C++ can call Python function 97 | 98 | // initialize Python module 99 | Py_Initialize(); 100 | initget_file_name(); 101 | 102 | // get the parameter and returned value in python format 103 | PyObject * path_ = Py_BuildValue("s", path.c_str()); 104 | PyObject * file_name_list = get_file_name(path_); 105 | assert(PyList_Check(file_name_list)); 106 | 107 | // build the file name vector 108 | Py_ssize_t size = PyList_Size(file_name_list); 109 | for (unsigned int file_index = 0; file_index != size; ++file_index) 110 | { 111 | PyObject * file_name_ = PyList_GetItem(file_name_list, file_index); 112 | std::string file_name = PyString_AsString(file_name_); 113 | file_name_vec.push_back(file_name); 114 | } 115 | 116 | // finalize the python module 117 | Py_Finalize(); 118 | } 119 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/accessFile.h: -------------------------------------------------------------------------------- 1 | #ifndef ACCESSFILE_H 2 | #define ACCESSFILE_H 3 | 4 | #include "commonHeadFiles.h" 5 | 6 | #include 7 | #include 8 | #include 9 | #include "get_file_name.h" 10 | 11 | 12 | 13 | /* 14 | **************************************** 15 | ********** Read pcd file *************** 16 | **************************************** 17 | */ 18 | void 19 | readPCD( 20 | const std::string & filename, 21 | pcl::PointCloud::Ptr & cloud 22 | ); 23 | 24 | 25 | /* 26 | **************************************** 27 | ********** Write pcd file ************** 28 | **************************************** 29 | */ 30 | 31 | // pcl::PointCloud 32 | void 33 | writePCD( 34 | const std::string & filename, 35 | const pcl::PointCloud::Ptr & cloud 36 | ); 37 | 38 | // pcl::PointCloud 39 | void 40 | writePCD( 41 | const std::string & filename, 42 | const pcl::PointCloud::Ptr & normals 43 | ); 44 | 45 | 46 | /* 47 | ******************************************************* 48 | ********** Write txt file with "Append Mode" ********** 49 | ******************************************************* 50 | */ 51 | void 52 | writeTXT( 53 | const std::vector & globalFeatureVector, 54 | const unsigned int & label, 55 | const std::string & txt_path 56 | ); 57 | 58 | 59 | 60 | /* 61 | **************************************** 62 | ********** Get file name *************** 63 | **************************************** 64 | */ 65 | void 66 | getFileName( 67 | const std::string & path, 68 | std::vector & file_name_vec 69 | ); 70 | 71 | #endif 72 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/commonHeadFiles.h: -------------------------------------------------------------------------------- 1 | #ifndef COMMONHEADFILES_H 2 | #define COMMONHEADFILES_H 3 | 4 | #include 5 | #include 6 | // #include // interface for ROS 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | #endif 13 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/extractFeature.h: -------------------------------------------------------------------------------- 1 | #ifndef FEATUREEXTRACTION_H 2 | #define FEATUREEXTRACTION_H 3 | 4 | #include "commonHeadFiles.h" 5 | 6 | #include "searchKdtree.h" 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | /* 16 | **************************************** 17 | ********** Normals estimation ********** 18 | **************************************** 19 | */ 20 | 21 | void 22 | estimateNormals( 23 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 24 | const pcl::PointCloud::Ptr & searchSurface, // search surface 25 | const float & searchRadius, // radius for searching [m] 26 | pcl::PointCloud::Ptr & normals, // output normals for every point 27 | bool debug = false // debug mode, default false 28 | // bool removeNaN = true, 29 | // std::vector & mapping, 30 | // bool visualize = false 31 | ); 32 | 33 | 34 | 35 | /* 36 | **************************************** 37 | ********** Remove NaN Normals ********** 38 | **************************************** 39 | */ 40 | 41 | void 42 | removeNanNormals( 43 | const pcl::PointCloud::Ptr & inputNormals, // input normals inklusive NaN 44 | pcl::PointCloud::Ptr & outputNormals, // output normals without NaN 45 | std::vector & mapping // output index in inputNormals with 46 | // inputNormals[index[i]] != NaN 47 | ); 48 | 49 | 50 | 51 | /* 52 | **************************************************** 53 | ********** Remove Points with NaN Normals ********** 54 | **************************************************** 55 | */ 56 | 57 | void 58 | removePointWithNanNormal( 59 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 60 | pcl::PointCloud::Ptr & outputCloud, // output cloud, which has no NaN normal 61 | const std::vector & mapping, // index in inputCloud, inputCloud[index[i]] has no NaN normal 62 | bool debug = false // debug mode, default false 63 | ); 64 | 65 | 66 | 67 | /* 68 | *************************************** 69 | ********** Visualize normals ********** 70 | *************************************** 71 | */ 72 | 73 | void 74 | visualizeNormals( 75 | const pcl::PointCloud::Ptr & cloud, // cloud with XYZI 76 | const pcl::PointCloud::Ptr & normals // normals 77 | ); 78 | 79 | 80 | 81 | 82 | /* 83 | *************************** 84 | ****** Estimate FPFH ****** 85 | *************************** 86 | */ 87 | 88 | void 89 | estimateFPFH( 90 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 91 | const pcl::PointCloud::Ptr & inputNormals, // input normals 92 | const float & searchRadius, // param search radius 93 | std::vector & FPFHHisto, // output FPFH histogram 94 | bool debug = false // debug mode, defualt false 95 | ); 96 | 97 | 98 | 99 | 100 | /* 101 | ***************************************************** 102 | ******* Transform point cloud to Eigen Matrix ******* 103 | ***************************************************** 104 | */ 105 | 106 | void 107 | cloud2Matrix( 108 | const pcl::PointCloud::Ptr & cloud, // input cloud 109 | Eigen::MatrixXf & PointMat, // output point matrix 110 | const std::string & mode // transform mode: "xy" or "xyz" 111 | ); 112 | 113 | 114 | /* 115 | ************************************************* 116 | ********** Calculate covariance matrix ********** 117 | ************************************************* 118 | */ 119 | 120 | void 121 | covMatrix( 122 | const Eigen::MatrixXf & M, 123 | Eigen::MatrixXf & CovMat 124 | ); 125 | 126 | 127 | 128 | /* 129 | ***************************************** 130 | ****** Calculate 3D Size using PCA ****** 131 | ***************************************** 132 | */ 133 | 134 | void 135 | geometrySize( 136 | const pcl::PointCloud::Ptr & cloud, // input cloud 137 | float & length, // output length 138 | float & width, // output width 139 | float & height // output height 140 | ); 141 | 142 | 143 | 144 | /* 145 | **************************************************************************** 146 | ****** Calculate eigen values of covaraince matrix in geometric domain****** 147 | **************************************************************************** 148 | */ 149 | 150 | void 151 | geoCovEigen( 152 | const pcl::PointCloud::Ptr & cloud, // input cloud 153 | std::vector & eigvals, // output eigen values 154 | bool sort = true // sort mode, if true (default), sort eigvals from large to small 155 | // 1 >= eigvals[0] >= eigvals[1] >= eigvals[2] >= 0 156 | ); 157 | 158 | 159 | 160 | /* 161 | **************************************** 162 | ****** Calculate Lalonde features ****** 163 | **************************************** 164 | */ 165 | 166 | void 167 | lalondeFeat( 168 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 169 | const pcl::PointCloud::Ptr & searchSurface, // input search surface 170 | const float & searchRadius, // param search radius 171 | std::vector & lalondeHisto // output lalonde histogram 172 | ); 173 | 174 | 175 | 176 | /* 177 | ************************************************ 178 | ****** Calculate Features about intensity ****** 179 | ************************************************ 180 | */ 181 | 182 | void 183 | intensity( 184 | const pcl::PointCloud::Ptr & cloud, // input cloud 185 | float & Imax, // output max intensity 186 | float & Imean, // output mean intensity 187 | float & Ivar // output intensity variance 188 | ); 189 | 190 | 191 | 192 | /* 193 | **************************************** 194 | ****** Estimate moment invariants ****** 195 | **************************************** 196 | */ 197 | 198 | void 199 | momentInvariants( 200 | const pcl::PointCloud::Ptr & cloud, // input cloud 201 | std::vector & jvec // output moment invariants vector jvec[0] jvec[1] jvec[2] 202 | ); 203 | 204 | 205 | 206 | /* 207 | *********************************************** 208 | ****** Calculate mean intensity gradient ****** 209 | *********************************************** 210 | */ 211 | 212 | void 213 | meanIntensityGradient( 214 | const pcl::PointCloud::Ptr & inputCloud, // input cloud 215 | const pcl::PointCloud::Ptr & inputNormals, // input normals 216 | const float & searchRadius, // param search radius 217 | float & meanIG, // output meanIG 218 | bool debug = false // debug mode, default false 219 | ); 220 | 221 | #endif 222 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/gen_robust_test_data.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | ************************************************** 3 | ****** generate data (cloud) for robust test ***** 4 | ************************************************** 5 | */ 6 | 7 | 8 | #include "gen_robust_test_data.h" 9 | 10 | #define PI 3.1415926 11 | #define HIGH_NOISE_LIMIT 0.12 12 | #define LOW_NOISE_LIMIT -HIGH_NOISE_LIMIT 13 | 14 | // occlusion with different percentages 15 | void 16 | getOccludedCloud ( 17 | const pcl::PointCloud::Ptr & cloud, 18 | const std::string & label_name, 19 | const float & occlusion_percentage, 20 | pcl::PointCloud::Ptr & occluded_cloud 21 | ) 22 | { 23 | assert(occlusion_percentage >=0.0 && occlusion_percentage <= 100.0); 24 | 25 | float mp = 0.0, 26 | length = 0.0, 27 | half_occluded_length = 0.0; 28 | 29 | if (label_name == "car" || label_name == "van" || label_name == "truck") 30 | { 31 | float minpx = cloud->points[0].x; 32 | float maxpx = cloud->points[0].x; 33 | float minpy = cloud->points[0].y; 34 | float maxpy = cloud->points[0].y; 35 | for (auto & p : cloud->points) 36 | { 37 | if (p.x > maxpx) 38 | maxpx = p.x; 39 | else if (p.x < minpx) 40 | minpx = p.x; 41 | if (p.y > maxpy) 42 | maxpy = p.y; 43 | else if (p.y < minpy) 44 | minpy = p.y; 45 | } 46 | if (maxpx-minpx > maxpy-minpy) 47 | { 48 | length = maxpx - minpx; 49 | mp = (maxpx + minpx) / 2.0; 50 | half_occluded_length = length * occlusion_percentage / 200.0; 51 | for (auto & p : cloud->points) 52 | { 53 | if (p.x <= mp+half_occluded_length && p.x >= mp-half_occluded_length) 54 | continue; 55 | else 56 | occluded_cloud->points.push_back(p); 57 | } 58 | } 59 | else 60 | { 61 | length = maxpy-minpy; 62 | mp = (maxpy + minpy) / 2.0; 63 | half_occluded_length = length * occlusion_percentage / 200.0; 64 | for (auto & p : cloud->points) 65 | { 66 | if (p.y <= mp+half_occluded_length && p.y >= mp-half_occluded_length) 67 | continue; 68 | else 69 | occluded_cloud->points.push_back(p); 70 | } 71 | } 72 | } 73 | else if (label_name == "pedestrian" || label_name == "cyclist") 74 | { 75 | float maxpz = cloud->points[0].z; 76 | float minpz = cloud->points[0].z; 77 | for (auto & p : cloud->points) 78 | { 79 | if (p.z > maxpz) 80 | maxpz = p.z; 81 | else if (p.z < minpz) 82 | minpz = p.z; 83 | } 84 | length = maxpz - minpz; 85 | mp = (maxpz + minpz) / 2.0; 86 | half_occluded_length = length * occlusion_percentage / 200.0; 87 | for (auto & p : cloud->points) 88 | { 89 | if (p.z <= mp+half_occluded_length && p.z >= mp-half_occluded_length) 90 | continue; 91 | else 92 | occluded_cloud->points.push_back(p); 93 | } 94 | } 95 | else 96 | { 97 | std::cerr << "getOccludedCloud(): invalid label name!" << std::endl; 98 | return; 99 | } 100 | occluded_cloud->width = occluded_cloud->points.size(); 101 | occluded_cloud->height = 1; 102 | occluded_cloud->is_dense = true; 103 | } 104 | 105 | 106 | // random downsample with different percentage 107 | void 108 | getSparseCloud ( 109 | const pcl::PointCloud::Ptr & cloud, // input cloud 110 | const float & sparse_percentage, // percentage of points after downsampling 111 | pcl::PointCloud::Ptr & sparse_cloud // output sparse cloud 112 | ) 113 | { 114 | assert(sparse_percentage >= 0 && sparse_percentage <= 100); 115 | 116 | unsigned int num_points_after_downsampled = sparse_percentage * cloud->points.size() / 100; 117 | downSample_rand(cloud, sparse_cloud, num_points_after_downsampled, false); 118 | } 119 | 120 | 121 | // prepare random number 122 | void 123 | UNIFORM (float *p) 124 | { 125 | int i = 0, a = 0, x = 0; 126 | double f; 127 | for (i = 0; i != 2; ++i, x += 689) 128 | { 129 | a = rand() + x; // 加上689是因为系统产生随机数的更换频率远远不及程序调用函数的时间 130 | a = a % 1000; 131 | f = (float)a; 132 | f = f / 1000.0; 133 | *p = f; 134 | p++; 135 | } 136 | } 137 | 138 | // add gaussian noise to cloud using Box-Muller algorithm 139 | void 140 | addNoise ( 141 | const pcl::PointCloud::Ptr & cloud, // input cloud 142 | const float & sigma, // param standard variance 143 | pcl::PointCloud::Ptr & noise_cloud // output cloud with noise 144 | ) 145 | { 146 | float A = 0.0, B = 0.0, C = 0.0, r = 0.0; 147 | float uni[2]; 148 | pcl::PointXYZI noise_point; 149 | srand((unsigned)time(NULL)); 150 | if (!noise_cloud->points.empty()) 151 | noise_cloud->points.clear(); 152 | 153 | for (auto & p : cloud->points) 154 | { 155 | std::vector rnv; 156 | for (unsigned int j = 0; j != 3; ++j) 157 | { 158 | // Box-Muller algorithm 159 | UNIFORM(uni); 160 | A = sqrt((-2)*log(uni[0])); 161 | B = 2 * PI*uni[1]; 162 | C = A*cos(B); 163 | r = 0.0 + C * sigma; // meanvalue = 0.0, stdvar = sigma 164 | // clip 165 | if (r > HIGH_NOISE_LIMIT) 166 | r = HIGH_NOISE_LIMIT; 167 | else if (r < LOW_NOISE_LIMIT) 168 | r = LOW_NOISE_LIMIT; 169 | rnv.push_back(r); 170 | } 171 | 172 | assert(rnv.size() == 3); 173 | 174 | noise_point.x = p.x + rnv[0]; 175 | noise_point.y = p.y + rnv[1]; 176 | noise_point.z = p.z + rnv[2]; 177 | noise_point.intensity = p.intensity; 178 | noise_cloud->points.push_back(noise_point); 179 | } 180 | noise_cloud->width = noise_cloud->points.size(); 181 | noise_cloud->height = 1; 182 | noise_cloud->is_dense = true; 183 | } -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/gen_robust_test_data.h: -------------------------------------------------------------------------------- 1 | #ifndef GEN_ROBUST_TEST_DATA_H 2 | #define GEN_ROBUST_TEST_DATA_H 3 | 4 | #include "commonHeadFiles.h" 5 | #include "sampleCloud.h" 6 | 7 | 8 | // occlusion with different percentages 9 | void 10 | getOccludedCloud ( 11 | const pcl::PointCloud::Ptr & cloud, 12 | const std::string & label_name, 13 | const float & occlusion_percentage, 14 | pcl::PointCloud::Ptr & occluded_cloud 15 | ); 16 | 17 | 18 | 19 | // random downsample with different percentage 20 | void 21 | getSparseCloud ( 22 | const pcl::PointCloud::Ptr & cloud, // input cloud 23 | const float & sparse_percentage, // percentage of points after downsampling 24 | pcl::PointCloud::Ptr & sparse_cloud // output sparse cloud 25 | ); 26 | 27 | 28 | 29 | // add gaussian noise to cloud using Box-Muller algorithm 30 | void 31 | addNoise ( 32 | const pcl::PointCloud::Ptr & cloud, // input cloud 33 | const float & sigma, // param standard variance 34 | pcl::PointCloud::Ptr & noise_cloud // output cloud with noise 35 | ); 36 | 37 | #endif -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/getFeatureVector.cpp: -------------------------------------------------------------------------------- 1 | #include "getFeatureVector.h" 2 | 3 | /* 4 | *********************************************** 5 | ********** build feature vector *************** 6 | *********************************************** 7 | */ 8 | 9 | void 10 | getFeatureVector ( 11 | const float & searchRadius, // param search radius 12 | pcl::PointCloud::Ptr & cloud, // input cloud 13 | const int & i, // param label name index 14 | std::vector & featVec // output feature vector 15 | ) 16 | { 17 | if (!featVec.empty()) 18 | featVec.clear(); 19 | /*** global features have nothing to do with normals and search radius ***/ 20 | 21 | // geometry size: length, width, height 22 | std::vector geoSize {0.0, 0.0, 0.0}; 23 | geometrySize(cloud, geoSize[0], geoSize[1], geoSize[2]); 24 | for (auto & gs : geoSize) 25 | featVec.push_back(gs); 26 | // intensity: Imax, Imean, Ivar 27 | std::vector intenFeat {0.0, 0.0, 0.0}; 28 | intensity(cloud, intenFeat[0], intenFeat[1], intenFeat[2]); 29 | for (auto & i : intenFeat) 30 | featVec.push_back(i); 31 | // moment invariants (results should be divided by point number in cloud) 32 | std::vector jvec; 33 | momentInvariants(cloud, jvec); 34 | featVec.push_back(jvec[0]/cloud->points.size()); 35 | // global eigen values of covariance matrix 36 | std::vector eigvals; 37 | geoCovEigen(cloud, eigvals, true); 38 | for (auto & e : eigvals) 39 | featVec.push_back(e); 40 | 41 | 42 | /****** local features, which depend on search radius ******/ 43 | 44 | // upsample 45 | while (cloud->points.size() < size_low_limit[i]) 46 | { 47 | pcl::PointCloud::Ptr 48 | cloud_dense (new pcl::PointCloud); 49 | upSample(cloud, cloud_dense); 50 | cloud->points = cloud_dense->points; 51 | } 52 | cloud->width = cloud->points.size(); 53 | cloud->height = 1; 54 | cloud->is_dense = true; 55 | 56 | // downsample 57 | pcl::PointCloud::Ptr cloud_sparse (new pcl::PointCloud); 58 | downSample_rand(cloud, cloud_sparse, size_high_limit[i], false); 59 | 60 | // calculate lalonde feature histogram 61 | std::vector lalondeHisto; 62 | lalondeFeat(cloud_sparse, cloud, searchRadius, lalondeHisto); 63 | if (!lalondeHisto.empty()) 64 | for (auto & lh : lalondeHisto) 65 | featVec.push_back(lh); 66 | 67 | // estimate normals 68 | pcl::PointCloud::Ptr normals (new pcl::PointCloud); 69 | estimateNormals(cloud_sparse, cloud, searchRadius, normals); 70 | 71 | // remove NaN normals 72 | pcl::PointCloud::Ptr normals_valid (new pcl::PointCloud); 73 | std::vector mapping; 74 | removeNanNormals(normals, normals_valid, mapping); 75 | if (mapping.empty()) 76 | return; 77 | 78 | // remove point with NaN normal 79 | pcl::PointCloud::Ptr cloud_valid (new pcl::PointCloud); 80 | removePointWithNanNormal(cloud_sparse, cloud_valid, mapping); 81 | 82 | // estimate mean intensity gradient 83 | float meanIG = 0.0; 84 | meanIntensityGradient(cloud_valid, normals_valid, searchRadius, meanIG); 85 | featVec.push_back(meanIG); 86 | 87 | // estimate FPFH histogram 88 | std::vector FPFHHisto; 89 | estimateFPFH(cloud_valid, normals_valid, searchRadius, FPFHHisto); 90 | if (!FPFHHisto.empty()) 91 | for (auto & fh : FPFHHisto) 92 | featVec.push_back(fh); 93 | } -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/getFeatureVector.h: -------------------------------------------------------------------------------- 1 | #ifndef GETFEATUREVECTOR_H 2 | #define GETFEATUREVECTOR_H 3 | 4 | #include "accessFile.h" 5 | #include "sampleCloud.h" 6 | #include "extractFeature.h" 7 | 8 | 9 | 10 | // global variables for table drive 11 | const std::string label_name[5] = {"car", "van", "pedestrian", "truck", "cyclist"}; 12 | const unsigned int size_low_limit[5] = {1000, 1000, 150, 1000, 500}; 13 | const unsigned int size_high_limit[5] = {2000, 2000, 1000, 3000, 1500}; 14 | const unsigned int label[5] = {0, 1, 2, 3, 4}; 15 | 16 | /* 17 | *********************************************** 18 | ********** build feature vector *************** 19 | *********************************************** 20 | */ 21 | 22 | void 23 | getFeatureVector ( 24 | const float & searchRadius, // param search radius 25 | pcl::PointCloud::Ptr & cloud, // input cloud 26 | const int & i, // param label name index 27 | std::vector & featVec // output feature vector 28 | ); 29 | 30 | 31 | #endif 32 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/get_file_name.h: -------------------------------------------------------------------------------- 1 | /* Generated by Cython 0.28.3 */ 2 | 3 | #ifndef __PYX_HAVE__get_file_name 4 | #define __PYX_HAVE__get_file_name 5 | 6 | 7 | #ifndef __PYX_HAVE_API__get_file_name 8 | 9 | #ifndef __PYX_EXTERN_C 10 | #ifdef __cplusplus 11 | #define __PYX_EXTERN_C extern "C" 12 | #else 13 | #define __PYX_EXTERN_C extern 14 | #endif 15 | #endif 16 | 17 | #ifndef DL_IMPORT 18 | #define DL_IMPORT(_T) _T 19 | #endif 20 | 21 | __PYX_EXTERN_C PyObject *get_file_name(PyObject *); 22 | 23 | #endif /* !__PYX_HAVE_API__get_file_name */ 24 | 25 | /* WARNING: the interface of the module init function changed in CPython 3.5. */ 26 | /* It now returns a PyModuleDef instance instead of a PyModule instance. */ 27 | 28 | #if PY_MAJOR_VERSION < 3 29 | PyMODINIT_FUNC initget_file_name(void); 30 | #else 31 | PyMODINIT_FUNC PyInit_get_file_name(void); 32 | #endif 33 | 34 | #endif /* !__PYX_HAVE__get_file_name */ 35 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/get_file_name.pyx: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ################################### 3 | # Get all files' name in a folder # 4 | ################################### 5 | 6 | import os 7 | 8 | # use Cython so that C++ can call Python function 9 | 10 | cdef public get_file_name(path): 11 | filelist = os.listdir(path) 12 | return filelist -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/sampleCloud.cpp: -------------------------------------------------------------------------------- 1 | #include "sampleCloud.h" 2 | 3 | /* 4 | ************************************************* 5 | ********** Down sample with Voxel Grid ********** 6 | ************************************************* 7 | */ 8 | 9 | void 10 | downSample_vg( 11 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 12 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 13 | const float & cube_leaf_side_length, // param side length of cube leaf of voxel grid [m] 14 | bool debug // debug mode, default true 15 | ) 16 | { 17 | pcl::VoxelGrid vg; 18 | vg.setInputCloud (inputCloud); 19 | vg.setLeafSize (cube_leaf_side_length, cube_leaf_side_length, cube_leaf_side_length); 20 | // clear the possible invalid residual values in outputCloud 21 | if (!outputCloud->points.empty()) 22 | outputCloud->points.clear(); 23 | // compute downsampled cloud 24 | vg.filter (*outputCloud); 25 | 26 | // debug mode 27 | if (debug) 28 | { 29 | std::cout << "downSample_vg(): points size before downsampling: " 30 | << inputCloud->points.size() 31 | << std::endl; 32 | std::cout << "downSample_vg(): points size after downsampling: " 33 | << outputCloud->points.size() 34 | << std::endl; 35 | } 36 | } 37 | 38 | 39 | 40 | /* 41 | ****************************************************** 42 | ********** Down sample with random sampling ********** 43 | ****************************************************** 44 | */ 45 | 46 | void 47 | downSample_rand( 48 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 49 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 50 | const unsigned int & highThreshold, // number of points in outputCloud after downsampling 51 | bool debug // debug mode, default true 52 | ) 53 | { 54 | size_t size = inputCloud->points.size(); 55 | // clear the possible invalid residual values in outputCloud 56 | if (!outputCloud->points.empty()) 57 | outputCloud->points.clear(); 58 | // exception: size of the inputCloud <= highThreshold, no need to downsample 59 | if (size <= highThreshold) 60 | outputCloud = inputCloud; // that's why & outputCloud must have "&" 61 | else 62 | { 63 | // generate sequential index 64 | std::vector idxSeq; 65 | for (unsigned int i = 0; i != size; ++i) 66 | idxSeq.push_back(i); 67 | // randomly shuffle index 68 | srand(time(NULL)); 69 | std::random_shuffle(idxSeq.begin(), idxSeq.end()); 70 | // get randomly downsampled outputCloud 71 | for (auto idx = idxSeq.cbegin(); idx != idxSeq.cbegin()+highThreshold; ++idx) 72 | outputCloud->points.push_back(inputCloud->points[*idx]); 73 | outputCloud->width = outputCloud->size(); 74 | outputCloud->height = 1; 75 | outputCloud->is_dense = true; 76 | } 77 | // debug mode 78 | if (debug) 79 | { 80 | std::cout << "downSample_rand(): points size before downsampling: " 81 | << inputCloud->points.size() 82 | << std::endl; 83 | std::cout << "downSample_rand(): points size after downsampling: " 84 | << outputCloud->points.size() 85 | << std::endl; 86 | } 87 | } 88 | 89 | 90 | /* 91 | **************************************************************** 92 | ********** Check the existence in vector of a element ********** 93 | **************************************************************** 94 | */ 95 | 96 | // true: element exists in vector; false: not in vector 97 | bool inVector( 98 | const NeighborPointIdxPair & PIdxPair, 99 | const std::vector & PIdxPairVec 100 | ) 101 | { 102 | // if (PIdxPairVec.empty()) 103 | // { 104 | // std::cout << "inVector(): The input vector is empty!" << std::endl; 105 | // return (false); 106 | // } 107 | auto it = std::find(PIdxPairVec.cbegin(), PIdxPairVec.cend(), PIdxPair); 108 | return (!(it == PIdxPairVec.cend())); 109 | } 110 | 111 | 112 | 113 | /* 114 | ******************************************************* 115 | ********** Upsampling for sparse point inputCloud ********** 116 | ******************************************************* 117 | */ 118 | 119 | void 120 | upSample( 121 | const pcl::PointCloud::Ptr & inputCloud, // original cloud as input 122 | pcl::PointCloud::Ptr & outputCloud // upsampled cloud as output 123 | ) 124 | { 125 | if (inputCloud->points.empty()) 126 | { 127 | std::cout << "upSample(): The inputCloud is empty!" << std::endl; 128 | return; 129 | } 130 | pcl::PointCloud::Ptr addedCloud (new pcl::PointCloud); 131 | std::vector PIdxPairVec; 132 | NeighborPointIdxPair PIdxPair; 133 | std::vector searchedPointIdx; 134 | pcl::PointXYZI newPoint; 135 | for (std::size_t i = 0; i < inputCloud->points.size(); ++i) 136 | { 137 | // search mode: k search. number of points to search: 2 138 | kdtreeSearch(inputCloud, inputCloud->points[i], searchedPointIdx, "k", 2); 139 | // searchedPointIdx[0] = i, searchedPointIdx[1] = index of the searched point 140 | PIdxPair.index1 = i; 141 | PIdxPair.index2 = searchedPointIdx[1]; 142 | // // debug 143 | // std::cout << "PIdxPair.index1: " << PIdxPair.index1 << std::endl; 144 | // std::cout << "PIdxPair.index2: " << PIdxPair.index2 << std::endl; 145 | 146 | if (inVector(PIdxPair, PIdxPairVec)) 147 | continue; 148 | PIdxPairVec.push_back(PIdxPair); 149 | newPoint.x = (inputCloud->points[i].x + inputCloud->points[searchedPointIdx[1]].x) / 2.0; 150 | newPoint.y = (inputCloud->points[i].y + inputCloud->points[searchedPointIdx[1]].y) / 2.0; 151 | newPoint.z = (inputCloud->points[i].z + inputCloud->points[searchedPointIdx[1]].z) / 2.0; 152 | newPoint.intensity = (inputCloud->points[i].intensity + 153 | inputCloud->points[searchedPointIdx[1]].intensity) / 2.0; 154 | addedCloud->points.push_back(newPoint); 155 | } 156 | addedCloud->width = addedCloud->points.size(); 157 | addedCloud->height = 1; 158 | addedCloud->is_dense = true; 159 | // clear the possible invalid residual values in outputCloud 160 | if (!outputCloud->points.empty()) 161 | outputCloud->points.clear(); 162 | // concatenate the inputCloud and the addedCloud 163 | (*outputCloud) = (*inputCloud) + (*addedCloud); 164 | } 165 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/sampleCloud.h: -------------------------------------------------------------------------------- 1 | #ifndef PREPROCESSING_H 2 | #define PREPROCESSING_H 3 | 4 | #include "commonHeadFiles.h" 5 | 6 | #include 7 | #include "searchKdtree.h" 8 | #include 9 | 10 | 11 | /* 12 | ************************************************* 13 | ********** Down sample with Voxel Grid ********** 14 | ************************************************* 15 | */ 16 | 17 | void 18 | downSample_vg( 19 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 20 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 21 | const float & cube_leaf_side_length, // param side length of cube leaf of voxel grid [m] 22 | bool debug = true // debug mode, default true 23 | ); 24 | 25 | 26 | 27 | /* 28 | ****************************************************** 29 | ********** Down sample with random sampling ********** 30 | ****************************************************** 31 | */ 32 | 33 | void 34 | downSample_rand( 35 | const pcl::PointCloud::Ptr & inputCloud, // input inputCloud before downsampling 36 | pcl::PointCloud::Ptr & outputCloud, // output inputCloud after downsampling 37 | const unsigned int & highThreshold, // number of points in outputCloud after downsampling 38 | bool debug = true // debug mode, default true 39 | ); 40 | 41 | 42 | 43 | 44 | // struct // 45 | struct NeighborPointIdxPair 46 | { 47 | int index1; 48 | int index2; 49 | bool operator==(const NeighborPointIdxPair & pair) const 50 | { 51 | return (pair.index1 == this->index1 && 52 | pair.index2 == this->index2)|| 53 | (pair.index1 == this->index2 && 54 | pair.index2 == this->index1); 55 | } 56 | bool operator!=(const NeighborPointIdxPair & pair) const 57 | { 58 | // return (pair.index1 != this->index1 || 59 | // pair.index2 != this->index2)&& 60 | // (pair.index1 != this->index2 || 61 | // pair.index2 != this->index1); 62 | return !(this->operator==(pair)); 63 | } 64 | }; 65 | 66 | 67 | /* 68 | **************************************************************** 69 | ********** Check the existence in vector of a element ********** 70 | **************************************************************** 71 | */ 72 | 73 | // true: element exists in vector; false: not in vector 74 | bool inVector( 75 | const NeighborPointIdxPair & PIdxPair, 76 | const std::vector & PIdxPairVec 77 | ); 78 | 79 | 80 | 81 | /* 82 | ************************************************************ 83 | ********** Upsampling for sparse point inputCloud ********** 84 | ************************************************************ 85 | */ 86 | 87 | void 88 | upSample( 89 | const pcl::PointCloud::Ptr & inputCloud, // original cloud as input 90 | pcl::PointCloud::Ptr & outputCloud // upsampled cloud as output 91 | ); 92 | 93 | #endif 94 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/searchKdtree.cpp: -------------------------------------------------------------------------------- 1 | #include "searchKdtree.h" 2 | 3 | /* 4 | ************************************************************************ 5 | ********** kdtree search for searching points in neighborhood ********** 6 | ************************************************************************ 7 | */ 8 | 9 | bool 10 | kdtreeSearch ( 11 | const pcl::PointCloud::Ptr & cloud, // input cloud for searching 12 | const pcl::PointXYZI & searchPoint, // search point 13 | std::vector & searchedPointIdx, // output: point index of searched neighbot points 14 | const std::string & mode, // serch mode: k: nearestKSearch; r: radiusSearch 15 | const unsigned int & k, // k nearest points for mode=k, defualt 1 16 | const float & r // search radius for mode=r, default 0.02m 17 | ) 18 | { 19 | pcl::KdTreeFLANN kdtree; 20 | kdtree.setInputCloud (cloud); 21 | std::vectorpointSqrDistance; 22 | // clear the possible invalid residual values in vector 23 | if (!searchedPointIdx.empty()) 24 | searchedPointIdx.clear(); 25 | 26 | if (mode == "r") 27 | { 28 | if (kdtree.radiusSearch(searchPoint,r, searchedPointIdx, pointSqrDistance) > 0) 29 | return (true); 30 | else 31 | { 32 | std::cerr << "kdtreeSearch(): no points are successfully searched!" << std::endl; 33 | return (false); 34 | } 35 | } 36 | 37 | else if (mode == "k") 38 | { 39 | if (kdtree.nearestKSearch(searchPoint, k, searchedPointIdx, pointSqrDistance) > 0) 40 | return (true); 41 | else 42 | { 43 | std::cerr << "kdtreeSearch(): no points are successfully searched!" << std::endl; 44 | return (false); 45 | } 46 | } 47 | 48 | else 49 | { 50 | std::cerr << "kdtreeSearch(): invalid mode!" << std::endl; 51 | std::cerr << "valid modes are:" << "\n" 52 | << "k: nearestKSearch" << "\n" 53 | << "r: radiusSearch" << std::endl; 54 | return (false); 55 | } 56 | 57 | } 58 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/lib/searchKdtree.h: -------------------------------------------------------------------------------- 1 | #ifndef KDTREESEARCH_H 2 | #define KDTREESEARCH_H 3 | 4 | #include "commonHeadFiles.h" 5 | #include 6 | 7 | /* 8 | ************************************************************************ 9 | ********** kdtree search for searching points in neighborhood ********** 10 | ************************************************************************ 11 | */ 12 | 13 | // if points are succussfully searched, return ture; if not, return false 14 | bool 15 | kdtreeSearch ( 16 | const pcl::PointCloud::Ptr & cloud, // input cloud for searching 17 | const pcl::PointXYZI & searchPoint, // search point 18 | std::vector & searchedPointIdx, // output: point index of searched neighbot points 19 | const std::string & mode, // serch mode: k: nearestKSearch; r: radiusSearch 20 | const unsigned int & k = 1, // k nearest points for mode=k, default 1 21 | const float & r = 0.02 // search radius for mode=r, default 0.02m 22 | ); 23 | 24 | 25 | #endif 26 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/src/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8 FATAL_ERROR) 2 | 3 | add_compile_options(-std=c++11) 4 | 5 | find_package(PCL 1.8 REQUIRED) 6 | 7 | #set(CMAKE_VERBOSE_MAKEFILE on) 8 | 9 | #message(${PROJECT_SOURCE_DIR}) 10 | 11 | include_directories(${PROJECT_SOURCE_DIR}/lib) 12 | 13 | set(APP_SRC1 occlusion.cpp) 14 | set(APP_SRC2 sparsity.cpp) 15 | set(APP_SRC3 noise.cpp) 16 | 17 | include_directories(${PCL_INCLUDE_DIRS}) 18 | link_directories(${PCL_LIBRARY_DIRS}) 19 | add_definitions(${PCL_DEFINITIONS}) 20 | 21 | add_executable(occlusion ${APP_SRC1}) 22 | add_executable(sparsity ${APP_SRC2}) 23 | add_executable(noise ${APP_SRC3}) 24 | 25 | set(EXECUTABLE_OUTPUT_PATH ${PROJECT_BINARY_DIR}/bin) 26 | 27 | target_link_libraries(occlusion libfeature ${PCL_LIBRARIES}) 28 | target_link_libraries(sparsity libfeature ${PCL_LIBRARIES}) 29 | target_link_libraries(noise libfeature ${PCL_LIBRARIES}) 30 | -------------------------------------------------------------------------------- /Test/robust_test_data_features/src/noise.cpp: -------------------------------------------------------------------------------- 1 | #include "getFeatureVector.h" 2 | #include "gen_robust_test_data.h" 3 | 4 | int main (void) 5 | { 6 | // standard variance of gaussian noise with mean value = 0 7 | const std::vector sigma_vec {0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09}; 8 | // path 9 | std::string read_file_base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/data_augmented"; 10 | 11 | for (auto & sigma : sigma_vec) 12 | { 13 | // file path to write 14 | std::string write_file_base_path = 15 | "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/noise"; 16 | std::stringstream ss; 17 | ss << sigma; 18 | std::string write_file_path = write_file_base_path + 19 | "/" + 20 | "noi_" + 21 | ss.str() + 22 | ".txt"; 23 | for (int i = 0; i != 5; ++i) 24 | { 25 | // path 26 | std::string read_file_path = read_file_base_path + "/" + label_name[i]; 27 | std::vector file_name_vec; 28 | // get all pcd-files' name under the folder 29 | getFileName(read_file_path, file_name_vec); 30 | // traverse every pcd-file to build feature vector 31 | for (auto & f : file_name_vec) 32 | { 33 | // pcd file path 34 | std::string pcd_path = read_file_path + "/" + f; 35 | // read pcd data file 36 | pcl::PointCloud::Ptr cloud_original (new pcl::PointCloud); 37 | readPCD(pcd_path, cloud_original); 38 | if (cloud_original->points.empty()) 39 | continue; 40 | // add gaussian noise to point cloud 41 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 42 | addNoise(cloud_original, sigma, cloud); 43 | if (cloud->points.empty()) 44 | continue; 45 | // calculate or estimate feature vector 46 | std::vector featVec; 47 | getFeatureVector(0.16, cloud, i, featVec); 48 | 49 | // save the calculated feature vector into disk, when featVec is valid 50 | if (featVec.size() == 47) 51 | writeTXT(featVec, label[i], write_file_path); 52 | } 53 | } 54 | } 55 | } -------------------------------------------------------------------------------- /Test/robust_test_data_features/src/occlusion.cpp: -------------------------------------------------------------------------------- 1 | #include "getFeatureVector.h" 2 | #include "gen_robust_test_data.h" 3 | 4 | int main (void) 5 | { 6 | // occlusion percentage vector 7 | const std::vector occ_percen_vec {10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0}; // % 8 | // path 9 | std::string read_file_base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/data_augmented"; 10 | 11 | for (auto & op : occ_percen_vec) 12 | { 13 | // file path to write 14 | std::string write_file_base_path = 15 | "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/occlusion"; 16 | std::stringstream occper; 17 | occper << op; 18 | std::string write_file_path = write_file_base_path + 19 | "/" + 20 | "op_" + 21 | occper.str() + 22 | ".txt"; 23 | for (int i = 0; i != 5; ++i) 24 | { 25 | // path 26 | std::string read_file_path = read_file_base_path + "/" + label_name[i]; 27 | std::vector file_name_vec; 28 | // get all pcd-files' name under the folder 29 | getFileName(read_file_path, file_name_vec); 30 | // traverse every pcd-file to build feature vector 31 | for (auto & f : file_name_vec) 32 | { 33 | // pcd file path 34 | std::string pcd_path = read_file_path + "/" + f; 35 | // read pcd data file 36 | pcl::PointCloud::Ptr cloud_original (new pcl::PointCloud); 37 | readPCD(pcd_path, cloud_original); 38 | if (cloud_original->points.empty()) 39 | continue; 40 | // generate occluded point cloud 41 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 42 | getOccludedCloud(cloud_original, label_name[i], op, cloud); 43 | if (cloud->points.empty()) 44 | continue; 45 | // calculate or estimate feature vector 46 | std::vector featVec; 47 | getFeatureVector(0.16, cloud, i, featVec); 48 | 49 | // save the calculated feature vector into disk, when featVec is valid 50 | if (featVec.size() == 47) 51 | writeTXT(featVec, label[i], write_file_path); 52 | } 53 | } 54 | } 55 | } -------------------------------------------------------------------------------- /Test/robust_test_data_features/src/sparsity.cpp: -------------------------------------------------------------------------------- 1 | #include "getFeatureVector.h" 2 | #include "gen_robust_test_data.h" 3 | 4 | int main (void) 5 | { 6 | // percentage of points after downsampling 7 | const std::vector spa_percen_vec {10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0}; // % 8 | // path 9 | std::string read_file_base_path = "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/data_augmented"; 10 | 11 | for (auto & sp : spa_percen_vec) 12 | { 13 | // file path to write 14 | std::string write_file_base_path = 15 | "/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/sparsity"; 16 | std::stringstream spaper; 17 | spaper << sp; 18 | std::string write_file_path = write_file_base_path + 19 | "/" + 20 | "sp_" + 21 | spaper.str() + 22 | ".txt"; 23 | for (int i = 0; i != 5; ++i) 24 | { 25 | // path 26 | std::string read_file_path = read_file_base_path + "/" + label_name[i]; 27 | std::vector file_name_vec; 28 | // get all pcd-files' name under the folder 29 | getFileName(read_file_path, file_name_vec); 30 | // traverse every pcd-file to build feature vector 31 | for (auto & f : file_name_vec) 32 | { 33 | // pcd file path 34 | std::string pcd_path = read_file_path + "/" + f; 35 | // read pcd data file 36 | pcl::PointCloud::Ptr cloud_original (new pcl::PointCloud); 37 | readPCD(pcd_path, cloud_original); 38 | if (cloud_original->points.empty()) 39 | continue; 40 | // generate sparse point cloud 41 | pcl::PointCloud::Ptr cloud (new pcl::PointCloud); 42 | getSparseCloud(cloud_original, sp, cloud); 43 | if (cloud->points.empty()) 44 | continue; 45 | // calculate or estimate feature vector 46 | std::vector featVec; 47 | getFeatureVector(0.16, cloud, i, featVec); 48 | 49 | // save the calculated feature vector into disk, when featVec is valid 50 | if (featVec.size() == 47) 51 | writeTXT(featVec, label[i], write_file_path); 52 | } 53 | } 54 | } 55 | } -------------------------------------------------------------------------------- /Test/sparsity_test_rf.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############################## 4 | # basic_test_randomforest.py # 5 | ############################## 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | 13 | # get model file path 14 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 15 | ROOT_DIR = os.path.dirname(BASE_DIR) 16 | MODEL_DIR = ROOT_DIR + '/Training/rf.pkl' 17 | 18 | # load data for testing 19 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/sparsity/sp_90.txt') 20 | data = feature_matrix[:, :-1] 21 | target = feature_matrix[:, -1] 22 | 23 | # load the trained model 24 | rfc = joblib.load(MODEL_DIR) 25 | 26 | # prediction / test 27 | y_pred = rfc.predict(data) 28 | score = metrics.accuracy_score(target, y_pred) 29 | print('accuracy score = ', score) 30 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 31 | print('confusion matrix = ') 32 | print(conf_matrix) 33 | recall = metrics.recall_score(target, y_pred, average='weighted') 34 | print('recall score = ', recall) 35 | precision = metrics.precision_score(target, y_pred, average='weighted') 36 | print('precision score = ', precision) 37 | f1 = metrics.f1_score(target, y_pred, average='weighted') 38 | print('f1 score = ', f1) 39 | prob = rfc.predict_proba(data) 40 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 41 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/sparsity_test_svm.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ##################### 4 | # basic_test_svm.py # 5 | ##################### 6 | 7 | from sklearn import preprocessing 8 | from sklearn.externals import joblib 9 | import numpy as np 10 | from sklearn import metrics 11 | import os 12 | import sys 13 | 14 | # get model file path 15 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | ROOT_DIR = os.path.dirname(BASE_DIR) 17 | MODEL_DIR = ROOT_DIR + '/Training/svm.pkl' 18 | 19 | # load data for testing 20 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/sparsity/sp_90.txt') 21 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 22 | 23 | # feature scaling and normalizing 24 | np.random.shuffle(feature_matrix) 25 | data = feature_matrix[:, :-1] 26 | target = feature_matrix[:, -1] 27 | # scaler = preprocessing.MaxAbsScaler(copy=False) 28 | scaler = preprocessing.StandardScaler(copy=False) 29 | scaler.fit_transform(data) 30 | normalizer = preprocessing.Normalizer(norm='l2', copy=False) 31 | normalizer.fit_transform(data) 32 | 33 | # load the trained model 34 | clf = joblib.load(MODEL_DIR) 35 | 36 | # prediction / test 37 | y_pred = clf.predict(data) 38 | score = metrics.accuracy_score(target, y_pred) 39 | print('accuracy score = ', score) 40 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 41 | print('confusion matrix = ') 42 | print(conf_matrix) 43 | recall = metrics.recall_score(target, y_pred, average='weighted') 44 | print('recall score = ', recall) 45 | precision = metrics.precision_score(target, y_pred, average='weighted') 46 | print('precision score = ', precision) 47 | f1 = metrics.f1_score(target, y_pred, average='weighted') 48 | print('f1 score = ', f1) 49 | # y_pred = clf.decision_function(data) 50 | # hinge_loss = metrics.hinge_loss(target, y_pred, labels=np.array([0,1,2,3,4])) 51 | # print('hinge loss = ', hinge_loss) 52 | prob = clf.predict_proba(data) 53 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 54 | print('log loss = ', log_loss) -------------------------------------------------------------------------------- /Test/sparsity_test_xgb.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############################ 4 | # sparsity_test_xgboost.py # 5 | ############################ 6 | 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | from sklearn import metrics 10 | import os 11 | import sys 12 | # import datetime 13 | 14 | # get model file path 15 | BASE_DIR = os.path.dirname(os.path.abspath(__file__)) 16 | ROOT_DIR = os.path.dirname(BASE_DIR) 17 | MODEL_DIR = ROOT_DIR + '/Training/xgb.pkl' 18 | 19 | # load data for testing 20 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/Daten/test/robust_test/sparsity/sp_90.txt') 21 | data = feature_matrix[:, :-1] 22 | target = feature_matrix[:, -1] 23 | 24 | # load the trained model 25 | xgbc = joblib.load(MODEL_DIR) 26 | 27 | # prediction / test 28 | y_pred = xgbc.predict(data) 29 | score = metrics.accuracy_score(target, y_pred) 30 | print('accuracy score = ', score) 31 | conf_matrix = metrics.confusion_matrix(target, y_pred, [0,1,2,3,4]) 32 | print('confusion matrix = ') 33 | print(conf_matrix) 34 | recall = metrics.recall_score(target, y_pred, average='weighted') 35 | print('recall score = ', recall) 36 | precision = metrics.precision_score(target, y_pred, average='weighted') 37 | print('precision score = ', precision) 38 | f1 = metrics.f1_score(target, y_pred, average='weighted') 39 | print('f1 score = ', f1) 40 | prob = xgbc.predict_proba(data) 41 | log_loss = metrics.log_loss(target, prob, labels=np.array([0,1,2,3,4])) 42 | print('log loss = ', log_loss) 43 | 44 | # now_time = datetime.datetime.now() 45 | # with open("log_test_result.txt","a") as f: 46 | # f.write("\n") 47 | # f.write('----- {} -----'.format(str(now_time))) 48 | # f.write("\n") 49 | # f.write('accuracy {}'.format(str(score))) 50 | # f.write("\n") 51 | # f.write('confusion_matrix {}'.format(str(conf_matrix))) 52 | # f.write("\n") 53 | # f.write('log loss {}'.format(str(log_loss))) 54 | # f.write("\n") -------------------------------------------------------------------------------- /Training/rf.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaozhenghan/PointCloud_Classification_using_ML/94abb10744a6090f941d32d972de01d53521b324/Training/rf.pkl -------------------------------------------------------------------------------- /Training/svm.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaozhenghan/PointCloud_Classification_using_ML/94abb10744a6090f941d32d972de01d53521b324/Training/svm.pkl -------------------------------------------------------------------------------- /Training/train_randomforest.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ####################### 4 | # train random forest # 5 | ####################### 6 | 7 | from sklearn.ensemble import RandomForestClassifier 8 | import numpy as np 9 | from sklearn.model_selection import GridSearchCV 10 | from sklearn.metrics import fbeta_score, make_scorer 11 | 12 | # load data for training 13 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/train/data/r_0.16.txt') 14 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 15 | data = feature_matrix[:, :-1] 16 | target = feature_matrix[:, -1] 17 | 18 | # grid search for tuning hyperparameters 19 | 20 | # coarse tune 21 | # params = { 22 | # 'max_depth':[6, 8, 10, 12, 15, 18, 20], 23 | # 'n_estimators':[10, 20, 30, 40, 50, 60, 70], 24 | # 'max_features':[15, 20, 25, 30, 35, 40] 25 | # } 26 | 27 | # fine tune 28 | params = { 29 | 'max_depth':[10, 11, 12, 13, 14, 15], # optimal 12 30 | 'n_estimators':[54, 55, 56, 57, 58], # optimal 56 31 | 'max_features':[18, 19, 20, 21, 22] # optimal 20 32 | } 33 | rfc = RandomForestClassifier( 34 | # max_depth=10, 35 | random_state=0, 36 | # n_estimators=10, 37 | # max_features=30, 38 | oob_score=True, 39 | bootstrap=True, 40 | class_weight='balanced' 41 | ) 42 | fone_scorer = make_scorer(fbeta_score, beta=1, average='weighted') 43 | clf = GridSearchCV ( 44 | rfc, 45 | params, 46 | scoring=fone_scorer, 47 | n_jobs=4, 48 | cv=5, 49 | iid=True, 50 | refit=True 51 | ) 52 | clf.fit(data, target) 53 | 54 | # print important info 55 | # print(rfc.feature_importances_) 56 | # print(rfc.oob_score_) 57 | print('clf.cv_results_', clf.cv_results_) 58 | print('clf.best_params_', clf.best_params_) 59 | print('clf.best_estimator_', clf.best_estimator_) 60 | print('clf.grid_scores_', clf.grid_scores_) 61 | print('best score', clf.grid_scores_[clf.best_index_]) 62 | 63 | # save the trained model 64 | from sklearn.externals import joblib 65 | joblib.dump(clf, 'rf.pkl') -------------------------------------------------------------------------------- /Training/train_svm.py: -------------------------------------------------------------------------------- 1 | # encoding=utf-8 2 | 3 | ############# 4 | # train svm # 5 | ############# 6 | 7 | import numpy as np 8 | from sklearn import svm 9 | from sklearn import preprocessing 10 | from sklearn.model_selection import GridSearchCV 11 | from sklearn.metrics import fbeta_score, make_scorer 12 | 13 | # load data for training 14 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/train/data/r_0.16.txt') 15 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 16 | 17 | # feature scaling and normalizing 18 | np.random.shuffle(feature_matrix) 19 | data = feature_matrix[:, :-1] 20 | target = feature_matrix[:, -1] 21 | # scaler = preprocessing.MaxAbsScaler(copy=False) 22 | scaler = preprocessing.StandardScaler(copy=False) 23 | scaler.fit_transform(data) 24 | normalizer = preprocessing.Normalizer(norm='l2', copy=False) 25 | normalizer.fit_transform(data) 26 | 27 | # train svm using grid search 28 | fone_scorer = make_scorer(fbeta_score, beta=1, average='weighted') 29 | params = { 'kernel':['rbf', 'linear'], 30 | 'C':[1.0, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6], 31 | 'gamma':[0.5, 1.0, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0] 32 | } 33 | # params = {'kernel':['rbf'], 'C':[2.3], 'gamma':[1.4]} 34 | svc = svm.SVC(probability=True, class_weight='balanced') 35 | clf = GridSearchCV ( 36 | svc, 37 | params, 38 | scoring=fone_scorer, 39 | n_jobs=5, 40 | cv=5, 41 | return_train_score=False, 42 | iid=True, 43 | refit=True 44 | ) 45 | clf.fit(data, target) 46 | 47 | # print important info 48 | print('clf.cv_results_', clf.cv_results_) 49 | print('clf.best_params_', clf.best_params_) 50 | print('clf.best_estimator_', clf.best_estimator_) 51 | print('clf.grid_scores_', clf.grid_scores_) 52 | print('best score', clf.grid_scores_[clf.best_index_]) 53 | 54 | # save the trained model 55 | from sklearn.externals import joblib 56 | joblib.dump(clf, 'svm.pkl') 57 | -------------------------------------------------------------------------------- /Training/train_xgb.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | ################# 4 | # train xgboost # 5 | ################# 6 | 7 | from sklearn import metrics 8 | from xgboost.sklearn import XGBClassifier 9 | import numpy as np 10 | from sklearn.model_selection import GridSearchCV 11 | from sklearn.metrics import fbeta_score, make_scorer 12 | 13 | # load data for training 14 | feature_matrix = np.loadtxt('/media/shao/TOSHIBA EXT/data_object_velodyne/feature_matrix_with_label/train/data/r_0.16.txt') 15 | print('the shape of the loaded feature matrix is ', feature_matrix.shape) 16 | data = feature_matrix[:, :-1] 17 | target = feature_matrix[:, -1] 18 | 19 | # xgbc = XGBClassifier( 20 | # silent=0 ,#设置成1则没有运行信息输出,最好是设置为0.是否在运行升级时打印消息。 21 | # #nthread=4,# cpu 线程数 默认最大 22 | # learning_rate= 0.3, # 如同学习率 23 | # min_child_weight=1, 24 | # # 这个参数默认是 1,是每个叶子里面 h 的和至少是多少,对正负样本不均衡时的 0-1 分类而言 25 | # #,假设 h 在 0.01 附近,min_child_weight 为 1 意味着叶子节点中最少需要包含 100 个样本。 26 | # #这个参数非常影响结果,控制叶子节点中二阶导的和的最小值,该参数值越小,越容易 overfitting。 27 | # max_depth=6, # 构建树的深度,越大越容易过拟合 28 | # gamma=0.1, # 树的叶子节点上作进一步分区所需的最小损失减少,越大越保守,一般0.1、0.2这样子。 29 | # subsample=1, # 随机采样训练样本 训练实例的子采样比 30 | # max_delta_step=0,#最大增量步长,我们允许每个树的权重估计。 31 | # colsample_bytree=1, # 生成树时进行的列采样 32 | # reg_lambda=1, # 控制模型复杂度的权重值的L2正则化项参数,参数越大,模型越不容易过拟合。 33 | # #reg_alpha=0, # L1 正则项参数 34 | # scale_pos_weight=1, #如果取值大于0的话,在类别样本不平衡的情况下有助于快速收敛。平衡正负权重 35 | # objective= 'multi:softmax', #多分类的问题 指定学习任务和相应的学习目标 36 | # num_class=5, # 类别数,多分类与 multisoftmax 并用 37 | # n_estimators=100, #树的个数 38 | # seed=1000 #随机种子 39 | # #eval_metric= 'auc' 40 | # ) 41 | 42 | # xgbc.fit(data, target, eval_metric='auc') 43 | # y_true, y_pred = target, xgbc.predict(data) 44 | # print"Accuracy : %.4g" % metrics.accuracy_score(y_true, y_pred) 45 | 46 | 47 | xgbc = XGBClassifier( num_class=5, 48 | objective='multi:softmax', 49 | scale_pos_weight=1, 50 | seed=1000, 51 | colsample_bytree=1, 52 | silent=0, 53 | subsample=1 54 | ) 55 | # # coarse tune 56 | # params = { 'learning_rate':[0.1, 0.3, 0.5, 0.7, 0.9], 57 | # 'min_child_weight':[1, 2, 3], 58 | # 'max_depth':[4, 6, 8, 10], 59 | # 'gamma':[0.1, 0.2, 0.3, 0.4], 60 | # 'max_delta_step':[0, 1], 61 | # 'reg_lambda':[1, 1.5, 2, 2.5, 3], 62 | # 'n_estimators':[10, 20, 40, 60, 100, 120] 63 | # } 64 | 65 | # fine tune 66 | params = { 'learning_rate':[0.9, 1.0, 1.1, 1.2, 1.4], # optimal 1.0 67 | 'min_child_weight':[2], # optimal 2 68 | 'max_depth':[8, 9, 10], # optimal 9 69 | 'gamma':[0.07, 0.08, 0.09, 0.1], # optimal 0.09 70 | 'max_delta_step':[0], # optimal 0 71 | 'reg_lambda':[3.3, 3.4, 3.5, 3.7, 3.9], # optimal 3.4 72 | 'n_estimators':[10, 11, 12, 13, 14] # optimal 13 73 | } 74 | fone_scorer = make_scorer(fbeta_score, beta=1, average='weighted') 75 | clf = GridSearchCV ( 76 | xgbc, 77 | params, 78 | scoring=fone_scorer, 79 | n_jobs=4, 80 | cv=5, 81 | return_train_score=False, 82 | iid=True 83 | ) 84 | clf.fit(data, target) 85 | 86 | # print important info 87 | print('clf.cv_results_', clf.cv_results_) 88 | print('clf.best_params_', clf.best_params_) 89 | print('clf.best_estimator_', clf.best_estimator_) 90 | print('clf.grid_scores_', clf.grid_scores_) 91 | print('best score', clf.grid_scores_[clf.best_index_]) 92 | 93 | # save the trained model 94 | from sklearn.externals import joblib 95 | joblib.dump(clf, 'xgb.pkl') -------------------------------------------------------------------------------- /Training/xgb.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaozhenghan/PointCloud_Classification_using_ML/94abb10744a6090f941d32d972de01d53521b324/Training/xgb.pkl --------------------------------------------------------------------------------