├── .gitignore ├── LICENSE ├── README.md ├── docs └── pics │ ├── campus.png │ ├── file_structure.png │ └── urban.png └── file.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2022, METASLAM 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ALITA 2 | [**ALITA: A Large-scale Incremental Dataset for Long-term Autonomy**](https://metaslam.github.io/datasets/alita/) 3 | 4 | ## Competition 5 | :trophy:[GPR Competition](https://github.com/MetaSLAM/GPR_Competition/tree/main) which aims to push visual and LiDAR state-of-the-art techniques for localization in large-scale environments.
6 | :trophy:[General Place Recognition (GPR) for Autonomous Map Assembling](https://metaslam.github.io/competitions/iros2023/) which aims to evaluate the data association ability between trajectories that exhibit overlapping regions, without any GPS assistance. Participants who are interested could pay a visit to our [official competition website](https://www.aicrowd.com/challenges/icra2022-general-place-recognition-visual-terrain-relative-navigation/) for more details.
7 | **Sign up for GPR Competition:**
8 | :point_right: [[ICRA2022] General Place Recognition: Visual Terrain Relative Navigation](https://www.aicrowd.com/challenges/icra2022-general-place-recognition-visual-terrain-relative-navigation/)
9 | :point_right: [[ICRA2022 & IROS2023] General Place Recognition: City-scale UGV Localization](https://www.aicrowd.com/challenges/icra2022-general-place-recognition-city-scale-ugv-localization/) 10 | 11 | ## Dataset Release 12 | 13 | * Raw Data https://www.dropbox.com/sh/svxb160qcrq0j3e/AABPvyeOxNPMKuTMERfEcwaPa?dl=0 14 | * Processed Data (human-parseable data) https://www.dropbox.com/scl/fo/9o3uhejbyidxxwlnx912m/h?rlkey=bpryzdghexxez9p1m9100viqd&dl=0 15 | 16 | ## Dataset Description 17 | 18 | ALITA dataset is composed by two dataset 19 | 20 | - **Urban Dataset**: This dataset concentrates on the LiDAR place recognition over a large-scale area within urban environment. We collected 50 vehicle trajectories covering partial of the Pittsburgh and thus including diverse enviroments. Each trajectory is at least overlapped at one junction with the others, and some trajectories even have multiple junctions. This feature enables the dataset to be used in tasks such as LiDAR place recognition and multi-map fusion. 21 | ![urban](docs/pics/urban.png) 22 | 23 | - **Campus Dataset**: This dataset focuses on visual localization for UGVs using omnidirectional cameras within outdoor campus-type environments. We collected 80 real-world UAV sequences using a rover robot equipped with a 360 camera, a Velodyne VLP-16 LiDAR, a RealSense VIO and an Xsens MTI IMU. These consisted of 10 different trajectories. For each trajectory, we traversed 8 times, including forward(start point to endpoint)/backward(endpoint to start point) directions and day-light (2pm to 4:30pm)/dawn-light (6am to 7am or 5pm to 6pm). 8-times includes two forward sequences and two backward sequences during day-light and two forward and two backward sequences during dawn-light. 24 | ![campus](docs/pics/campus.png) 25 | 26 | ## Data Format 27 | ![file_structure](docs/pics/file_structure.png) 28 | 29 | ### Urban Dataset 30 | - CloudGlobal.pcd: Global maps are processed to contain 31 | the 3D structure of each trajectory, which is provided 32 | in Point Cloud Data (PCD) file format. 33 | - Intra-sequence Odometry (poses.csv): We save the key poses 34 | generated by SLAM as odometry information and 35 | provide them in (CSV) file format. The key poses are 36 | within the local coordinate of each trajectory, and the 37 | distance between adjacent poses is around 1m. 38 | - Intra-trajectory Correspondences (correspondence.csv): The correspondences between the poses in two trajectories which 39 | share overlapping areas are saved in (CSV) file format 40 | - GPS (gps.txt) : The GPS data has limited precision and should not be considered as ground truth, but rather 41 | for visualization purposes only. 42 | - Clouds: Based on the global map and poses, submaps 43 | are generated by querying points within 50 meters 44 | centered as each pose from the global map. **This part 45 | of data is not directly provided from the section Dataset Release.** 46 | This can be generated with the [file.py](file.py) provided. 47 | - Raw Data: Raw data is also provided in (rosbag) 48 | ROS package. Each rosbag includes two ROS 49 | topics, namely /imu/data, /velodyne_packets, representing 50 | Inertial Measurement Unit(IMU) and LiDAR respectively. 51 | 52 | ### Campus Dataset 53 | - **CloudGlobal.pcd, Intra-sequence Odometry(poses_intra.csv) and Clouds** 54 | are processed in the same format with Urban. 55 | - Inter-sequence Odometry (poses_inter.csv): The poses of 8 56 | sequences within the same trajectory are unified into 57 | the same global coordinate and saved in (CSV) file 58 | format. For each trajectory, day_forward_1's coordinate 59 | is selected as the global coordinate. 60 | - Panoramas: For each key pose, a corresponding 61 | omnidirectional picture with a resolution of 1024 × 62 | 512 is provided in (PNG) file format. 63 | - Raw Data: Raw data is also provided in (rosbag) 64 | ROS package. Each rosbag includes three key ROS 65 | topics, namely /imu/data, /velodyne_points and 66 | /camera/image, representing Inertial Measurement 67 | Unit(IMU), LiDAR and camera, respectively 68 | 69 | ## Citation 70 | 71 | If you use this dataset in your research, please cite as: 72 | 73 | ``` 74 | @misc{yin2022alita, 75 | title={ALITA: A Large-scale Incremental Dataset for Long-term Autonomy}, 76 | author={Peng Yin and Shiqi Zhao and Ruohai Ge and Ivan Cisneros and Ruijie Fu and Ji Zhang and Howie Choset and Sebastian Scherer}, 77 | year={2022}, 78 | eprint={2205.10737}, 79 | archivePrefix={arXiv}, 80 | primaryClass={cs.RO} 81 | } 82 | ``` 83 | 84 | ## Related Work 85 | **[AutoMerge: A Framework for Map Assembling and Smoothing in City-scale Environments](https://ieeexplore.ieee.org/document/10203034)**
86 | 87 | Github: https://github.com/MetaSLAM/AutoMerge_Server 88 | -------------------------------------------------------------------------------- /docs/pics/campus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MetaSLAM/ALITA/92f0e914e6666b3cd67f76cc16dbc0edf5698681/docs/pics/campus.png -------------------------------------------------------------------------------- /docs/pics/file_structure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MetaSLAM/ALITA/92f0e914e6666b3cd67f76cc16dbc0edf5698681/docs/pics/file_structure.png -------------------------------------------------------------------------------- /docs/pics/urban.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MetaSLAM/ALITA/92f0e914e6666b3cd67f76cc16dbc0edf5698681/docs/pics/urban.png -------------------------------------------------------------------------------- /file.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import shutil 3 | import os 4 | from copy import copy, deepcopy 5 | 6 | import numpy as np 7 | import pandas as pd 8 | import open3d as o3d 9 | from scipy.spatial.transform import Rotation as R 10 | 11 | def create_or_clear_folder(folder_path): 12 | if os.path.exists(folder_path): 13 | shutil.rmtree(folder_path) 14 | 15 | os.makedirs(folder_path) 16 | 17 | def generate_submap(dataset, input_folder, down_sampled): 18 | create_or_clear_folder(f"{input_folder}/Clouds") 19 | map_pcd = o3d.io.read_point_cloud(f'{input_folder}/CloudGlobal.pcd') 20 | pcd_tree = o3d.geometry.KDTreeFlann(map_pcd) 21 | bbox = o3d.geometry.AxisAlignedBoundingBox( 22 | np.array([-20, -20, -1.8]), 23 | np.array([20, 20, 100.0])) 24 | 25 | if (dataset == "Campus"): 26 | poses_df = pd.read_csv(f'{input_folder}/poses_intra.csv', dtype=str) 27 | else: 28 | poses_df = pd.read_csv(f'{input_folder}/poses.csv', dtype=str) 29 | timestamp_list = list(poses_df["timestamp"]) 30 | poses = poses_df.drop('timestamp', axis=1).to_numpy().astype(np.float64) 31 | for index, item in enumerate(poses): 32 | timestamp = timestamp_list[index].replace('.', '_') 33 | trans = item[0:3] 34 | rot_matrix = R.from_quat(item[3:]) 35 | 36 | # save submap 37 | [k, p_idx, _] = pcd_tree.search_radius_vector_3d(trans, 50) 38 | pcd_data = np.asarray(map_pcd.points)[p_idx, :] 39 | pcd_data -= trans 40 | if pcd_data.shape[0] == 0: 41 | continue 42 | pcd = o3d.geometry.PointCloud() 43 | pcd.points = o3d.utility.Vector3dVector(pcd_data) 44 | trans_matrix = np.eye(4) 45 | trans_matrix[0:3, 0:3] = rot_matrix.inv().as_matrix() 46 | pcd.transform(trans_matrix) 47 | 48 | if (down_sampled): 49 | # save downsampled submap 50 | pnv_pcd = pcd.crop(bbox) 51 | pnv_downsampled_pcd = pcd_downsample(pnv_pcd, 4096, 5, 0.1) 52 | o3d.io.write_point_cloud(f"{input_folder}/Clouds/{timestamp}.pcd", pnv_downsampled_pcd) 53 | else: 54 | o3d.io.write_point_cloud(f"{input_folder}/Clouds/{timestamp}.pcd", pcd) 55 | 56 | 57 | def pcd_downsample(initPcd, desiredNumOfPoint, leftVoxelSize, rightVoxelSize): 58 | """ 59 | Downsample pointcloud to 4096 points 60 | Modify based on the version from https://blog.csdn.net/SJTUzhou/article/details/122927787 61 | """ 62 | assert leftVoxelSize > rightVoxelSize, "leftVoxelSize should be larger than rightVoxelSize" 63 | assert len(initPcd.points) > desiredNumOfPoint, "desiredNumOfPoint should be less than or equal to the num of points in the given point cloud." 64 | if len(initPcd.points) == desiredNumOfPoint: 65 | return initPcd 66 | 67 | pcd = deepcopy(initPcd) 68 | pcd = pcd.voxel_down_sample(leftVoxelSize) 69 | assert len(pcd.points) <= desiredNumOfPoint, "Please specify a larger leftVoxelSize." 70 | pcd = deepcopy(initPcd) 71 | pcd = pcd.voxel_down_sample(rightVoxelSize) 72 | assert len(pcd.points) >= desiredNumOfPoint, "Please specify a smaller rightVoxelSize." 73 | 74 | pcd = deepcopy(initPcd) 75 | midVoxelSize = (leftVoxelSize + rightVoxelSize) / 2. 76 | pcd = pcd.voxel_down_sample(midVoxelSize) 77 | while len(pcd.points) != desiredNumOfPoint: 78 | if len(pcd.points) < desiredNumOfPoint: 79 | leftVoxelSize = copy(midVoxelSize) 80 | else: 81 | rightVoxelSize = copy(midVoxelSize) 82 | midVoxelSize = (leftVoxelSize + rightVoxelSize) / 2. 83 | pcd = deepcopy(initPcd) 84 | pcd = pcd.voxel_down_sample(midVoxelSize) 85 | 86 | return pcd 87 | 88 | def main(dataset, input_folder, down_sampled): 89 | if dataset not in ["Campus", "Urban"]: 90 | raise ValueError("Dataset must be either 'Campus' or 'Urban'") 91 | 92 | if not isinstance(down_sampled, bool): 93 | raise ValueError("down_sampled must be a boolean value") 94 | 95 | # Placeholder for actual data processing logic 96 | print(f"Processing dataset: {dataset}") 97 | print(f"Reading data from: {input_folder}") 98 | print(f"Using down-sampling: {down_sampled}") 99 | 100 | # Here you would add the code to process the data 101 | generate_submap(dataset, input_folder, down_sampled) 102 | 103 | if __name__ == "__main__": 104 | parser = argparse.ArgumentParser(description="Process some data.") 105 | parser.add_argument("dataset", type=str, help="Dataset to process ('Campus' or 'Urban')") 106 | parser.add_argument("input_folder", type=str, help="Folder to read input data from") 107 | parser.add_argument("--down_sampled", type=bool, default=False, help="Use down samples pcd (default: False)") 108 | 109 | args = parser.parse_args() 110 | 111 | main(args.dataset, args.input_folder, args.down_sampled) 112 | 113 | ''' 114 | Examples 115 | python file.py Campus "/Dataset/Campus/Traj_01/day_forward_2" 116 | python file.py Campus "/Dataset/Campus/Traj_01/day_forward_1" --down_sampled=True 117 | python file.py Urban "/Dataset/Urban/Traj_01-10/Traj_01" 118 | ''' 119 | --------------------------------------------------------------------------------