├── .idea
├── depth-hints.iml
├── misc.xml
├── modules.xml
├── vcs.xml
└── workspace.xml
├── LICENSE
├── README.md
├── __pycache__
├── kitti_utils.cpython-36.pyc
├── layers.cpython-36.pyc
├── options.cpython-36.pyc
└── utils.cpython-36.pyc
├── assets
├── copyright_notice.txt
├── kitti.gif
├── sceneflow.gif
└── test_image.jpg
├── datasets
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── kitti_dataset.cpython-36.pyc
│ └── mono_dataset.cpython-36.pyc
├── kitti_dataset.py
└── mono_dataset.py
├── depth_prediction_example.ipynb
├── evaluate_depth.py
├── evaluate_pose.py
├── export_gt_depth.py
├── kitti_utils.py
├── layers.py
├── networks
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── depth_decoder.cpython-36.pyc
│ ├── pose_cnn.cpython-36.pyc
│ ├── pose_decoder.cpython-36.pyc
│ └── resnet_encoder.cpython-36.pyc
├── depth_decoder.py
├── pose_cnn.py
├── pose_decoder.py
└── resnet_encoder.py
├── options.py
├── precompute_depth_hints.py
├── splits
├── benchmark
│ ├── eigen_to_benchmark_ids.npy
│ ├── test_files.txt
│ ├── train_files.txt
│ └── val_files.txt
├── eigen
│ ├── gt_depths.npz
│ └── test_files.txt
├── eigen_benchmark
│ └── test_files.txt
├── eigen_full
│ ├── all_files.txt
│ ├── train_files.txt
│ └── val_files.txt
├── eigen_zhou
│ ├── train_files.txt
│ └── val_files.txt
├── kitti_archives_to_download.txt
└── odom
│ ├── test_files_09.txt
│ ├── test_files_10.txt
│ ├── train_files.txt
│ └── val_files.txt
├── test_simple.py
├── train.py
├── trainer.py
└── utils.py
/.idea/depth-hints.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 | 1582310364637
111 |
112 |
113 | 1582310364637
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright © Niantic, Inc. 2020. Patent Pending.
2 |
3 | All rights reserved.
4 |
5 |
6 |
7 | ================================================================================
8 |
9 |
10 |
11 | This Software is licensed under the terms of the following DepthHints license
12 | which allows for non-commercial use only. For any other use of the software not
13 | covered by the terms of this license, please contact partnerships@nianticlabs.com
14 |
15 |
16 |
17 | ================================================================================
18 |
19 |
20 |
21 | DepthHints License
22 |
23 |
24 | This Agreement is made by and between the Licensor and the Licensee as
25 | defined and identified below.
26 |
27 |
28 | 1. Definitions.
29 |
30 | In this Agreement (“the Agreement”) the following words shall have the
31 | following meanings:
32 |
33 | "Authors" shall mean J. Watson, M. Firman, G. Brostow, D. Turmukhambetov
34 | "Licensee" Shall mean the person or organization agreeing to use the
35 | Software in accordance with these terms and conditions.
36 | "Licensor" shall mean Niantic Inc., a company organized and existing under
37 | the laws of Delaware, whose principal place of business is at 1 Ferry Building,
38 | Suite 200, San Francisco, 94111.
39 | "Software" shall mean the DepthHints Software uploaded by Licensor to the
40 | GitHub repository at https://github.com/nianticlabs/depth-hints on
41 | 24th Feb 2020 in source code or object code form and any
42 | accompanying documentation as well as any modifications or additions uploaded
43 | to the same GitHub repository by Licensor.
44 |
45 |
46 | 2. License.
47 |
48 | 2.1 The Licensor has all necessary rights to grant a license under: (i)
49 | copyright and rights in the nature of copyright subsisting in the Software; and
50 | (ii) certain patent rights resulting from a patent application filed by the
51 | Licensor in the United States in connection with the Software. The Licensor
52 | grants the Licensee for the duration of this Agreement, a free of charge,
53 | non-sublicenseable, non-exclusive, non-transferable copyright and patent
54 | license (in consequence of said patent application) to use the Software for
55 | non-commercial purpose only, including teaching and research at educational
56 | institutions and research at not-for-profit research institutions in accordance
57 | with the provisions of this Agreement. Non-commercial use expressly excludes
58 | any profit-making or commercial activities, including without limitation sale,
59 | license, manufacture or development of commercial products, use in
60 | commercially-sponsored research, use at a laboratory or other facility owned or
61 | controlled (whether in whole or in part) by a commercial entity, provision of
62 | consulting service, use for or on behalf of any commercial entity, and use in
63 | research where a commercial party obtains rights to research results or any
64 | other benefit. Any use of the Software for any purpose other than
65 | non-commercial research shall automatically terminate this License.
66 |
67 |
68 | 2.2 The Licensee is permitted to make modifications to the Software
69 | provided that any distribution of such modifications is in accordance with
70 | Clause 3.
71 |
72 | 2.3 Except as expressly permitted by this Agreement and save to the
73 | extent and in the circumstances expressly required to be permitted by law, the
74 | Licensee is not permitted to rent, lease, sell, offer to sell, or loan the
75 | Software or its associated documentation.
76 |
77 |
78 | 3. Redistribution and modifications
79 |
80 | 3.1 The Licensee may reproduce and distribute copies of the Software, with
81 | or without modifications, in source format only and only to this same GitHub
82 | repository , and provided that any and every distribution is accompanied by an
83 | unmodified copy of this License and that the following copyright notice is
84 | always displayed in an obvious manner: Copyright © Niantic, Inc. 2020. All
85 | rights reserved.
86 |
87 |
88 | 3.2 In the case where the Software has been modified, any distribution must
89 | include prominent notices indicating which files have been changed.
90 |
91 | 3.3 The Licensee shall cause any work that it distributes or publishes,
92 | that in whole or in part contains or is derived from the Software or any part
93 | thereof (“Work based on the Software”), to be licensed as a whole at no charge
94 | to all third parties entitled to a license to the Software under the terms of
95 | this License and on the same terms provided in this License.
96 |
97 |
98 | 4. Duration.
99 |
100 | This Agreement is effective until the Licensee terminates it by destroying
101 | the Software, any Work based on the Software, and its documentation together
102 | with all copies. It will also terminate automatically if the Licensee fails to
103 | abide by its terms. Upon automatic termination the Licensee agrees to destroy
104 | all copies of the Software, Work based on the Software, and its documentation.
105 |
106 |
107 | 5. Disclaimer of Warranties.
108 |
109 | The Software is provided as is. To the maximum extent permitted by law,
110 | Licensor provides no warranties or conditions of any kind, either express or
111 | implied, including without limitation, any warranties or condition of title,
112 | non-infringement or fitness for a particular purpose.
113 |
114 |
115 | 6. LIMITATION OF LIABILITY.
116 |
117 | IN NO EVENT SHALL THE LICENSOR AND/OR AUTHORS BE LIABLE FOR ANY DIRECT,
118 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY OR CONSEQUENTIAL DAMAGES (INCLUDING
119 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
120 | DATA OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
121 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
122 | OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
123 | ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
124 |
125 |
126 | 7. Indemnity.
127 |
128 | The Licensee shall indemnify the Licensor and/or Authors against all third
129 | party claims that may be asserted against or suffered by the Licensor and/or
130 | Authors and which relate to use of the Software by the Licensee.
131 |
132 |
133 | 8. Intellectual Property.
134 |
135 | 8.1 As between the Licensee and Licensor, copyright and all other
136 | intellectual property rights subsisting in or in connection with the Software
137 | and supporting information shall remain at all times the property of the
138 | Licensor. The Licensee shall acquire no rights in any such material except as
139 | expressly provided in this Agreement.
140 |
141 | 8.2 No permission is granted to use the trademarks or product names of the
142 | Licensor except as required for reasonable and customary use in describing the
143 | origin of the Software and for the purposes of abiding by the terms of Clause
144 | 3.1.
145 |
146 | 8.3 The Licensee shall promptly notify the Licensor of any improvement or
147 | new use of the Software (“Improvements”) in sufficient detail for Licensor to
148 | evaluate the Improvements. The Licensee hereby grants the Licensor and its
149 | affiliates a non-exclusive, fully paid-up, royalty-free, irrevocable and
150 | perpetual license to all Improvements for non-commercial academic research and
151 | teaching purposes upon creation of such improvements.
152 |
153 | 8.4 The Licensee grants an exclusive first option to the Licensor to be
154 | exercised by the Licensor within three (3) years of the date of notification of
155 | an Improvement under Clause 8.3 to use any the Improvement for commercial
156 | purposes on terms to be negotiated and agreed by Licensee and Licensor in good
157 | faith within a period of six (6) months from the date of exercise of the said
158 | option (including without limitation any royalty share in net income from such
159 | commercialization payable to the Licensee, as the case may be).
160 |
161 |
162 | 9. Acknowledgements.
163 |
164 | The Licensee shall acknowledge the Authors and use of the Software in the
165 | publication of any work that uses, or results that are achieved through, the
166 | use of the Software. The following citation shall be included in the
167 | acknowledgement: “Self-Supervised Monocular Depth Hints,
168 | by J. Watson, M. Firman, G. Brostow, D. Turmukhambetov, arXiv:1909.09051”.
169 |
170 |
171 | 10. Governing Law.
172 |
173 | This Agreement shall be governed by, construed and interpreted in
174 | accordance with English law and the parties submit to the exclusive
175 | jurisdiction of the English courts.
176 |
177 |
178 | 11. Termination.
179 |
180 | Upon termination of this Agreement, the licenses granted hereunder will
181 | terminate and Sections 5, 6, 7, 8, 9, 10 and 11 shall survive any termination
182 | of this Agreement.
183 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Self-Supervised Monocular Depth Hints
2 |
3 |
4 | **[Jamie Watson](https://scholar.google.com/citations?view_op=list_works&hl=en&user=5pC7fw8AAAAJ), [Michael Firman](http://www.michaelfirman.co.uk), [Gabriel J. Brostow](http://www0.cs.ucl.ac.uk/staff/g.brostow/) and [Daniyar Turmukhambetov](http://dantkz.github.io/) – ICCV 2019**
5 |
6 | [[Link to paper]](https://arxiv.org/abs/1909.09051)
7 |
8 |
9 |
10 |
11 |
12 | **We introduce *Depth Hints*, which improve monocular depth estimation algorithms trained from stereo pairs.**
13 |
14 | We find that photometric reprojection losses used with self-supervised learning typically have multiple local minima.
15 | This can restrict what a regression network learns, for example causing artifacts around thin structures.
16 |
17 | **Depth Hints are complementary depth suggestions obtained from simple off-the-shelf stereo algorithms**, e.g. Semi-Global Matching.
18 | These hints are used during training to guide the network to learn better weights.
19 | They require no additional data, and are assumed to be right only sometimes.
20 |
21 | Combined with other good practices, Depth Hints gives **state-of-the-art depth predictions on the KITTI benchmark** (see images above and results table below).
22 | We show additional monocular depth estimation results on the [sceneflow dataset](https://lmb.informatik.uni-freiburg.de/resources/datasets/SceneFlowDatasets.en.html):
23 |
24 |
25 |
26 |
27 |
28 | ## ✏️ 📄 Citation
29 |
30 |
31 | If you find our work useful or interesting, please consider citing [our paper](https://arxiv.org/abs/1909.09051):
32 |
33 | ```
34 | @inproceedings{watson-2019-depth-hints,
35 | title = {Self-Supervised Monocular Depth Hints},
36 | author = {Jamie Watson and
37 | Michael Firman and
38 | Gabriel J. Brostow and
39 | Daniyar Turmukhambetov},
40 | booktitle = {The International Conference on Computer Vision (ICCV)},
41 | month = {October},
42 | year = {2019}
43 | }
44 | ```
45 |
46 |
47 | ## 📈 KITTI Results
48 |
49 | | Model name | Training modality | ImageNet pretrained | Resolution | Abs rel | Sq rel | 𝛿 < 1.25 |
50 | | ---------- | ---------- | ---------- | ---------- | ---------- | ---------- | ----- |
51 | | `Ours Resnet50` | Stereo | Yes | 640 x 192 | 0.102 | 0.762 | 0.880 |
52 | | `Ours Resnet50 no pt` | Stereo | No | 640 x 192 | 0.118 | 0.941 | 0.850 |
53 | | `Ours HR Resnet50` | Stereo | Yes | 1024 x 320 | 0.096 | 0.710 | 0.890 |
54 | | `Ours HR Resnet50 no pt` | Stereo | No | 1024 x 320 | 0.112 | 0.857 | 0.861 |
55 | | `Ours HR` | Mono + Stereo | Yes | 1024 x 320 | 0.098 | 0.702 | 0.887 |
56 |
57 | Please see the paper for full results. To download the weights and predictions for each model please follow the links below:
58 |
59 | | Model name | Training modality | ImageNet pretrained | Resolution | Weights | Eigen Predictions |
60 | | ---------- | ---------- | ---------- | ---------- | ---------- | ---------- |
61 | | `Ours Resnet50` | Stereo | Yes | 640 x 192 | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-Resnet50/models.zip) | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-Resnet50/disps_eigen_split.npy) |
62 | | `Ours Resnet50 no pt` | Stereo | No | 640 x 192 | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-Resnet50-nopt/models.zip)| [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-Resnet50-nopt/disps_eigen_split.npy) |
63 | | `Ours HR Resnet50` | Stereo | Yes | 1024 x 320 | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-HR-Resnet50/models.zip) | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-HR-Resnet50/disps_eigen_split.npy) |
64 | | `Ours HR Resnet50 no pt` | Stereo | No | 1024 x 320 | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-HR-Resnet50-nopt/models.zip) | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-HR-Resnet50-nopt/disps_eigen_split.npy) |
65 | | `Ours HR` | Mono + Stereo | Yes | 1024 x 320 | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-HR-Mono%2BStereo/models.zip) | [Download](https://storage.googleapis.com/niantic-lon-static/research/depth-hints/DH-HR-Mono%2BStereo/disps_eigen_split.npy) |
66 |
67 |
68 |
69 | ## ⚙️ Code
70 |
71 | The code for ***Depth Hints*** builds upon [monodepth2](https://github.com/nianticlabs/monodepth2). If you have questions about running the code, please see the issues in that repository first.
72 |
73 | To train using depth hints:
74 | - Clone this repository
75 | - Run `python precompute_depth_hints.py --data_path `, optionally setting `--save_path` (will default to /depth_hints) and `--filenames` (will default to training and validation images for the eigen split). This will create the "fused" depth hints referenced in the paper. This process takes approximately 4 hours on a GPU.
76 | - Add the flag `--use_depth_hints` to your usual monodepth2 training command, optionally also setting `--depth_hint_path` (will default to /depth_hints). See below for a full command.
77 |
78 | 🎉 And that's it! 🎉
79 |
80 | ## 👀 Reproducing Paper Results
81 |
82 | To recreate the results from our paper, run:
83 |
84 | ```
85 | python train.py
86 | --data_path
87 | --log_dir
88 | --model_name stereo_depth_hints
89 | --use_depth_hints
90 | --depth_hint_path
91 | --frame_ids 0 --use_stereo
92 | --scheduler_step_size 5
93 | --split eigen_full
94 | --disparity_smoothness 0
95 | ```
96 |
97 | Additionally:
98 | - For Resnet50 models, add `--num_layers 50`
99 | - Add `--height 320 --width 1024` for High Resolution models (you may also have to set `--batch_size 6` depending on the size of your GPU)
100 | - For Mono + Stereo add `--frame_ids 0 -1 1 ` and remove `--split eigen_full`
101 |
102 | The results above and in the main paper arise from evaluating on the KITTI sparse LiDAR point cloud, using the Eigen Test split.
103 |
104 | To test on KITTI, run:
105 |
106 | ```
107 | python evaluate_depth.py
108 | --data_path
109 | --load_weights_folder
110 | --use_stereo
111 | ```
112 |
113 | Make sure you have run `export_gt_depth.py` to extract ground truth files.
114 |
115 | Additionally, if you see `ValueError: Object arrays cannot be loaded when allow_pickle=False`, then either downgrade numpy, or change line 166 in `evaluate_depth.py` to
116 |
117 | ```
118 | gt_depths = np.load(gt_path, fix_imports=True, encoding='latin1', allow_pickle=True)["data"]
119 | ```
120 |
121 | ## 🖼 Running on your own images
122 |
123 | To run on your own images, run:
124 |
125 | ```
126 | python test_simple.py
127 | --image_path
128 | --model_path
129 | --num_layers <18 or 50>
130 | ```
131 |
132 | This will save a numpy array of depths, and a colormapped depth image.
133 |
134 | ## 👩⚖️ License
135 | Copyright © Niantic, Inc. 2020. Patent Pending.
136 | All rights reserved.
137 | Please see the [license file](LICENSE) for terms.
138 |
--------------------------------------------------------------------------------
/__pycache__/kitti_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/__pycache__/kitti_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/__pycache__/layers.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/__pycache__/layers.cpython-36.pyc
--------------------------------------------------------------------------------
/__pycache__/options.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/__pycache__/options.cpython-36.pyc
--------------------------------------------------------------------------------
/__pycache__/utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/__pycache__/utils.cpython-36.pyc
--------------------------------------------------------------------------------
/assets/copyright_notice.txt:
--------------------------------------------------------------------------------
1 | The image test_image.jpg is a modified (cropped) version of the original image which can be found at:
2 | https://www.flickr.com/photos/31176607@N05/44108158051/in/faves-41364765@N06/
3 |
4 | This image was licensed under the creative commons 2.0 CC BY 2.0 licence.
5 | https://creativecommons.org/licenses/by/2.0/
6 |
--------------------------------------------------------------------------------
/assets/kitti.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/assets/kitti.gif
--------------------------------------------------------------------------------
/assets/sceneflow.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/assets/sceneflow.gif
--------------------------------------------------------------------------------
/assets/test_image.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/assets/test_image.jpg
--------------------------------------------------------------------------------
/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | from .kitti_dataset import KITTIRAWDataset, KITTIOdomDataset, KITTIDepthDataset
2 |
--------------------------------------------------------------------------------
/datasets/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/datasets/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/datasets/__pycache__/kitti_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/datasets/__pycache__/kitti_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/datasets/__pycache__/mono_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/datasets/__pycache__/mono_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/datasets/kitti_dataset.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import os
10 | import skimage.transform
11 | import numpy as np
12 | import PIL.Image as pil
13 |
14 | from kitti_utils import generate_depth_map
15 | from .mono_dataset import MonoDataset
16 |
17 |
18 | class KITTIDataset(MonoDataset):
19 | """Superclass for different types of KITTI dataset loaders
20 | """
21 | def __init__(self, *args, **kwargs):
22 | super(KITTIDataset, self).__init__(*args, **kwargs)
23 |
24 | self.K = np.array([[0.58, 0, 0.5, 0],
25 | [0, 1.92, 0.5, 0],
26 | [0, 0, 1, 0],
27 | [0, 0, 0, 1]], dtype=np.float32)
28 |
29 | self.full_res_shape = (1242, 375)
30 | self.side_map = {"2": 2, "3": 3, "l": 2, "r": 3}
31 |
32 | def check_depth(self):
33 | line = self.filenames[0].split()
34 | scene_name = line[0]
35 | frame_index = int(line[1])
36 |
37 | velo_filename = os.path.join(
38 | self.data_path,
39 | scene_name,
40 | "velodyne_points/data/{:010d}.bin".format(int(frame_index)))
41 |
42 | return os.path.isfile(velo_filename)
43 |
44 | def get_color(self, folder, frame_index, side, do_flip):
45 | color = self.loader(self.get_image_path(folder, frame_index, side))
46 |
47 | if do_flip:
48 | color = color.transpose(pil.FLIP_LEFT_RIGHT)
49 |
50 | return color
51 |
52 |
53 | class KITTIRAWDataset(KITTIDataset):
54 | """KITTI dataset which loads the original velodyne depth maps for ground truth
55 | """
56 | def __init__(self, *args, **kwargs):
57 | super(KITTIRAWDataset, self).__init__(*args, **kwargs)
58 |
59 | def get_image_path(self, folder, frame_index, side):
60 | f_str = "{:010d}{}".format(frame_index, self.img_ext)
61 | image_path = os.path.join(
62 | self.data_path, folder, "image_0{}/data".format(self.side_map[side]), f_str)
63 | return image_path
64 |
65 | def get_depth(self, folder, frame_index, side, do_flip):
66 | calib_path = os.path.join(self.data_path, folder.split("/")[0])
67 |
68 | velo_filename = os.path.join(
69 | self.data_path,
70 | folder,
71 | "velodyne_points/data/{:010d}.bin".format(int(frame_index)))
72 |
73 | depth_gt = generate_depth_map(calib_path, velo_filename, self.side_map[side])
74 | depth_gt = skimage.transform.resize(
75 | depth_gt, self.full_res_shape[::-1], order=0, preserve_range=True, mode='constant')
76 |
77 | if do_flip:
78 | depth_gt = np.fliplr(depth_gt)
79 |
80 | return depth_gt
81 |
82 |
83 | class KITTIOdomDataset(KITTIDataset):
84 | """KITTI dataset for odometry training and testing
85 | """
86 | def __init__(self, *args, **kwargs):
87 | super(KITTIOdomDataset, self).__init__(*args, **kwargs)
88 |
89 | def get_image_path(self, folder, frame_index, side):
90 | f_str = "{:06d}{}".format(frame_index, self.img_ext)
91 | image_path = os.path.join(
92 | self.data_path,
93 | "sequences/{:02d}".format(int(folder)),
94 | "image_{}".format(self.side_map[side]),
95 | f_str)
96 | return image_path
97 |
98 |
99 | class KITTIDepthDataset(KITTIDataset):
100 | """KITTI dataset which uses the updated ground truth depth maps
101 | """
102 | def __init__(self, *args, **kwargs):
103 | super(KITTIDepthDataset, self).__init__(*args, **kwargs)
104 |
105 | def get_image_path(self, folder, frame_index, side):
106 | f_str = "{:010d}{}".format(frame_index, self.img_ext)
107 | image_path = os.path.join(
108 | self.data_path,
109 | folder,
110 | "image_0{}/data".format(self.side_map[side]),
111 | f_str)
112 | return image_path
113 |
114 | def get_depth(self, folder, frame_index, side, do_flip):
115 | f_str = "{:010d}.png".format(frame_index)
116 | depth_path = os.path.join(
117 | self.data_path,
118 | folder,
119 | "proj_depth/groundtruth/image_0{}".format(self.side_map[side]),
120 | f_str)
121 |
122 | depth_gt = pil.open(depth_path)
123 | depth_gt = depth_gt.resize(self.full_res_shape, pil.NEAREST)
124 | depth_gt = np.array(depth_gt).astype(np.float32) / 256
125 |
126 | if do_flip:
127 | depth_gt = np.fliplr(depth_gt)
128 |
129 | return depth_gt
130 |
--------------------------------------------------------------------------------
/datasets/mono_dataset.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import os
10 | import random
11 | import numpy as np
12 | import copy
13 | from PIL import Image # using pillow-simd for increased speed
14 | import cv2
15 |
16 | import torch
17 | import torch.utils.data as data
18 | from torchvision import transforms
19 |
20 | cv2.setNumThreads(0)
21 |
22 |
23 | def pil_loader(path):
24 | # open path as file to avoid ResourceWarning
25 | # (https://github.com/python-pillow/Pillow/issues/835)
26 | with open(path, 'rb') as f:
27 | with Image.open(f) as img:
28 | return img.convert('RGB')
29 |
30 |
31 | class MonoDataset(data.Dataset):
32 | """Superclass for monocular dataloaders
33 |
34 | Args:
35 | data_path
36 | filenames
37 | height
38 | width
39 | frame_idxs
40 | num_scales
41 | use_depth_hints
42 | depth_hint_path
43 | is_train
44 | img_ext
45 | """
46 | def __init__(self,
47 | data_path,
48 | filenames,
49 | height,
50 | width,
51 | frame_idxs,
52 | num_scales,
53 | use_depth_hints,
54 | depth_hint_path=None,
55 | is_train=False,
56 | img_ext='.jpg'):
57 | super(MonoDataset, self).__init__()
58 |
59 | self.data_path = data_path
60 | self.filenames = filenames
61 | self.height = height
62 | self.width = width
63 | self.num_scales = num_scales
64 | self.use_depth_hints = use_depth_hints
65 |
66 | # assume depth hints npys are stored in data_path/depth_hints unless specified
67 | if depth_hint_path is None:
68 | self.depth_hint_path = os.path.join(self.data_path, 'depth_hints')
69 | else:
70 | self.depth_hint_path = depth_hint_path
71 |
72 | self.interp = Image.ANTIALIAS
73 |
74 | self.frame_idxs = frame_idxs
75 |
76 | self.is_train = is_train
77 | self.img_ext = img_ext
78 |
79 | self.loader = pil_loader
80 | self.to_tensor = transforms.ToTensor()
81 |
82 | # We need to specify augmentations differently in newer versions of torchvision.
83 | # We first try the newer tuple version; if this fails we fall back to scalars
84 | try:
85 | self.brightness = (0.8, 1.2)
86 | self.contrast = (0.8, 1.2)
87 | self.saturation = (0.8, 1.2)
88 | self.hue = (-0.1, 0.1)
89 | transforms.ColorJitter.get_params(
90 | self.brightness, self.contrast, self.saturation, self.hue)
91 | except TypeError:
92 | self.brightness = 0.2
93 | self.contrast = 0.2
94 | self.saturation = 0.2
95 | self.hue = 0.1
96 |
97 | self.resize = {}
98 | for i in range(self.num_scales):
99 | s = 2 ** i
100 | self.resize[i] = transforms.Resize((self.height // s, self.width // s),
101 | interpolation=self.interp)
102 |
103 | self.load_depth = self.check_depth()
104 |
105 | def preprocess(self, inputs, color_aug):
106 | """Resize colour images to the required scales and augment if required
107 |
108 | We create the color_aug object in advance and apply the same augmentation to all
109 | images in this item. This ensures that all images input to the pose network receive the
110 | same augmentation.
111 | """
112 | for k in list(inputs):
113 | frame = inputs[k]
114 | if "color" in k:
115 | n, im, i = k
116 | for i in range(self.num_scales):
117 | inputs[(n, im, i)] = self.resize[i](inputs[(n, im, i - 1)])
118 |
119 | for k in list(inputs):
120 | f = inputs[k]
121 | if "color" in k:
122 | n, im, i = k
123 | inputs[(n, im, i)] = self.to_tensor(f)
124 | inputs[(n + "_aug", im, i)] = self.to_tensor(color_aug(f))
125 |
126 | def __len__(self):
127 | return len(self.filenames)
128 |
129 | def __getitem__(self, index):
130 | """Returns a single training item from the dataset as a dictionary.
131 |
132 | Values correspond to torch tensors.
133 | Keys in the dictionary are either strings or tuples:
134 |
135 | ("color", , ) for raw colour images,
136 | ("color_aug", , ) for augmented colour images,
137 | ("K", scale) or ("inv_K", scale) for camera intrinsics,
138 | "stereo_T" for camera extrinsics, and
139 | "depth_gt" for ground truth depth maps
140 | "depth_hint" for depth hint
141 | "depth_hint_mask" for mask of valid depth hints
142 |
143 | is either:
144 | an integer (e.g. 0, -1, or 1) representing the temporal step relative to 'index',
145 | or
146 | "s" for the opposite image in the stereo pair.
147 |
148 | is an integer representing the scale of the image relative to the fullsize image:
149 | -1 images at native resolution as loaded from disk
150 | 0 images resized to (self.width, self.height )
151 | 1 images resized to (self.width // 2, self.height // 2)
152 | 2 images resized to (self.width // 4, self.height // 4)
153 | 3 images resized to (self.width // 8, self.height // 8)
154 | """
155 | inputs = {}
156 |
157 | do_color_aug = self.is_train and random.random() > 0.5
158 | do_flip = self.is_train and random.random() > 0.5
159 |
160 | line = self.filenames[index].split()
161 | folder = line[0]
162 |
163 | if len(line) == 3:
164 | frame_index = int(line[1])
165 | else:
166 | frame_index = 0
167 |
168 | if len(line) == 3:
169 | side = line[2]
170 | else:
171 | side = None
172 |
173 | for i in self.frame_idxs:
174 | if i == "s":
175 | other_side = {"r": "l", "l": "r"}[side]
176 | inputs[("color", i, -1)] = self.get_color(folder, frame_index, other_side, do_flip)
177 | else:
178 | inputs[("color", i, -1)] = self.get_color(folder, frame_index + i, side, do_flip)
179 |
180 | # adjusting intrinsics to match each scale in the pyramid
181 | for scale in range(self.num_scales):
182 | K = self.K.copy()
183 |
184 | K[0, :] *= self.width // (2 ** scale)
185 | K[1, :] *= self.height // (2 ** scale)
186 |
187 | inv_K = np.linalg.pinv(K)
188 |
189 | inputs[("K", scale)] = torch.from_numpy(K)
190 | inputs[("inv_K", scale)] = torch.from_numpy(inv_K)
191 |
192 | if do_color_aug:
193 | color_aug = transforms.ColorJitter.get_params(
194 | self.brightness, self.contrast, self.saturation, self.hue)
195 | else:
196 | color_aug = (lambda x: x)
197 |
198 | self.preprocess(inputs, color_aug)
199 |
200 | for i in self.frame_idxs:
201 | del inputs[("color", i, -1)]
202 | del inputs[("color_aug", i, -1)]
203 |
204 | if self.load_depth:
205 | depth_gt = self.get_depth(folder, frame_index, side, do_flip)
206 | inputs["depth_gt"] = np.expand_dims(depth_gt, 0)
207 | inputs["depth_gt"] = torch.from_numpy(inputs["depth_gt"].astype(np.float32))
208 |
209 | if "s" in self.frame_idxs:
210 | stereo_T = np.eye(4, dtype=np.float32)
211 | baseline_sign = -1 if do_flip else 1
212 | side_sign = -1 if side == "l" else 1
213 | stereo_T[0, 3] = side_sign * baseline_sign * 0.1
214 |
215 | inputs["stereo_T"] = torch.from_numpy(stereo_T)
216 |
217 | # load depth hint
218 | if self.use_depth_hints:
219 | side_folder = 'image_02' if side == 'l' else 'image_03'
220 | depth_folder = os.path.join(self.depth_hint_path, folder, side_folder,
221 | str(frame_index).zfill(10) + '.npy')
222 |
223 | try:
224 | depth = np.load(depth_folder)[0]
225 | except FileNotFoundError:
226 | raise FileNotFoundError("Warning - cannot find depth hint for {} {} {}! "
227 | "Either specify the correct path in option "
228 | "--depth_hint_path, or run precompute_depth_hints.py to"
229 | "train with depth hints".format(folder, side_folder,
230 | frame_index))
231 |
232 | if do_flip:
233 | depth = np.fliplr(depth)
234 |
235 | depth = cv2.resize(depth, dsize=(self.width, self.height),
236 | interpolation=cv2.INTER_NEAREST)
237 | inputs['depth_hint'] = torch.from_numpy(depth).float().unsqueeze(0)
238 | inputs['depth_hint_mask'] = (inputs['depth_hint'] > 0).float()
239 |
240 | return inputs
241 |
242 | def get_color(self, folder, frame_index, side, do_flip):
243 | raise NotImplementedError
244 |
245 | def check_depth(self):
246 | raise NotImplementedError
247 |
248 | def get_depth(self, folder, frame_index, side, do_flip):
249 | raise NotImplementedError
250 |
--------------------------------------------------------------------------------
/evaluate_depth.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, print_function
2 |
3 | import os
4 | import cv2
5 | import numpy as np
6 |
7 | import torch
8 | from torch.utils.data import DataLoader
9 |
10 | from layers import disp_to_depth
11 | from utils import readlines
12 | from options import MonodepthOptions
13 | import datasets
14 | import networks
15 |
16 | cv2.setNumThreads(0) # This speeds up evaluation 5x on our unix systems (OpenCV 3.3.1)
17 |
18 |
19 | splits_dir = os.path.join(os.path.dirname(__file__), "splits")
20 |
21 | # Models which were trained with stereo supervision were trained with a nominal
22 | # baseline of 0.1 units. The KITTI rig has a baseline of 54cm. Therefore,
23 | # to convert our stereo predictions to real-world scale we multiply our depths by 5.4.
24 | STEREO_SCALE_FACTOR = 5.4
25 |
26 |
27 | def compute_errors(gt, pred):
28 | """Computation of error metrics between predicted and ground truth depths
29 | """
30 | thresh = np.maximum((gt / pred), (pred / gt))
31 | a1 = (thresh < 1.25 ).mean()
32 | a2 = (thresh < 1.25 ** 2).mean()
33 | a3 = (thresh < 1.25 ** 3).mean()
34 |
35 | rmse = (gt - pred) ** 2
36 | rmse = np.sqrt(rmse.mean())
37 |
38 | rmse_log = (np.log(gt) - np.log(pred)) ** 2
39 | rmse_log = np.sqrt(rmse_log.mean())
40 |
41 | abs_rel = np.mean(np.abs(gt - pred) / gt)
42 |
43 | sq_rel = np.mean(((gt - pred) ** 2) / gt)
44 |
45 | return abs_rel, sq_rel, rmse, rmse_log, a1, a2, a3
46 |
47 |
48 | def batch_post_process_disparity(l_disp, r_disp):
49 | """Apply the disparity post-processing method as introduced in Monodepthv1
50 | """
51 | _, h, w = l_disp.shape
52 | m_disp = 0.5 * (l_disp + r_disp)
53 | l, _ = np.meshgrid(np.linspace(0, 1, w), np.linspace(0, 1, h))
54 | l_mask = (1.0 - np.clip(20 * (l - 0.05), 0, 1))[None, ...]
55 | r_mask = l_mask[:, :, ::-1]
56 | return r_mask * l_disp + l_mask * r_disp + (1.0 - l_mask - r_mask) * m_disp
57 |
58 |
59 | def evaluate(opt):
60 | """Evaluates a pretrained model using a specified test set
61 | """
62 | MIN_DEPTH = 1e-3
63 | MAX_DEPTH = 80
64 |
65 | assert sum((opt.eval_mono, opt.eval_stereo)) == 1, \
66 | "Please choose mono or stereo evaluation by setting either --eval_mono or --eval_stereo"
67 |
68 | if opt.ext_disp_to_eval is None:
69 |
70 | opt.load_weights_folder = os.path.expanduser(opt.load_weights_folder)
71 |
72 | assert os.path.isdir(opt.load_weights_folder), \
73 | "Cannot find a folder at {}".format(opt.load_weights_folder)
74 |
75 | print("-> Loading weights from {}".format(opt.load_weights_folder))
76 |
77 | filenames = readlines(os.path.join(splits_dir, opt.eval_split, "test_files.txt"))
78 | encoder_path = os.path.join(opt.load_weights_folder, "encoder.pth")
79 | decoder_path = os.path.join(opt.load_weights_folder, "depth.pth")
80 |
81 | encoder_dict = torch.load(encoder_path)
82 |
83 | dataset = datasets.KITTIRAWDataset(opt.data_path, filenames,
84 | encoder_dict['height'], encoder_dict['width'],
85 | [0], 4, is_train=False, use_depth_hints=False)
86 | dataloader = DataLoader(dataset, 16, shuffle=False, num_workers=opt.num_workers,
87 | pin_memory=True, drop_last=False)
88 |
89 | encoder = networks.ResnetEncoder(opt.num_layers, False)
90 | depth_decoder = networks.DepthDecoder(encoder.num_ch_enc)
91 |
92 | model_dict = encoder.state_dict()
93 | encoder.load_state_dict({k: v for k, v in encoder_dict.items() if k in model_dict})
94 | depth_decoder.load_state_dict(torch.load(decoder_path))
95 |
96 | encoder.cuda()
97 | encoder.eval()
98 | depth_decoder.cuda()
99 | depth_decoder.eval()
100 |
101 | pred_disps = []
102 |
103 | print("-> Computing predictions with size {}x{}".format(
104 | encoder_dict['width'], encoder_dict['height']))
105 |
106 | with torch.no_grad():
107 | for data in dataloader:
108 | input_color = data[("color", 0, 0)].cuda()
109 |
110 | if opt.post_process:
111 | # Post-processed results require each image to have two forward passes
112 | input_color = torch.cat((input_color, torch.flip(input_color, [3])), 0)
113 |
114 | output = depth_decoder(encoder(input_color))
115 |
116 | pred_disp, _ = disp_to_depth(output[("disp", 0)], opt.min_depth, opt.max_depth)
117 | pred_disp = pred_disp.cpu()[:, 0].numpy()
118 |
119 | if opt.post_process:
120 | N = pred_disp.shape[0] // 2
121 | pred_disp = batch_post_process_disparity(pred_disp[:N], pred_disp[N:, :, ::-1])
122 |
123 | pred_disps.append(pred_disp)
124 |
125 | pred_disps = np.concatenate(pred_disps)
126 |
127 | else:
128 | # Load predictions from file
129 | print("-> Loading predictions from {}".format(opt.ext_disp_to_eval))
130 | pred_disps = np.load(opt.ext_disp_to_eval)
131 |
132 | if opt.eval_eigen_to_benchmark:
133 | eigen_to_benchmark_ids = np.load(
134 | os.path.join(splits_dir, "benchmark", "eigen_to_benchmark_ids.npy"))
135 |
136 | pred_disps = pred_disps[eigen_to_benchmark_ids]
137 |
138 | if opt.save_pred_disps:
139 | output_path = os.path.join(
140 | opt.load_weights_folder, "disps_{}_split.npy".format(opt.eval_split))
141 | print("-> Saving predicted disparities to ", output_path)
142 | np.save(output_path, pred_disps)
143 |
144 | if opt.no_eval:
145 | print("-> Evaluation disabled. Done.")
146 | quit()
147 |
148 | elif opt.eval_split == 'benchmark':
149 | save_dir = os.path.join(opt.load_weights_folder, "benchmark_predictions")
150 | print("-> Saving out benchmark predictions to {}".format(save_dir))
151 | if not os.path.exists(save_dir):
152 | os.makedirs(save_dir)
153 |
154 | for idx in range(len(pred_disps)):
155 | disp_resized = cv2.resize(pred_disps[idx], (1216, 352))
156 | depth = STEREO_SCALE_FACTOR / disp_resized
157 | depth = np.clip(depth, 0, 80)
158 | depth = np.uint16(depth * 256)
159 | save_path = os.path.join(save_dir, "{:010d}.png".format(idx))
160 | cv2.imwrite(save_path, depth)
161 |
162 | print("-> No ground truth is available for the KITTI benchmark, so not evaluating. Done.")
163 | quit()
164 |
165 | gt_path = os.path.join(splits_dir, opt.eval_split, "gt_depths.npz")
166 | gt_depths = np.load(gt_path, fix_imports=True, encoding='latin1', allow_pickle=True)["data"]
167 |
168 | print("-> Evaluating")
169 |
170 | if opt.eval_stereo:
171 | print(" Stereo evaluation - "
172 | "disabling median scaling, scaling by {}".format(STEREO_SCALE_FACTOR))
173 | opt.disable_median_scaling = True
174 | opt.pred_depth_scale_factor = STEREO_SCALE_FACTOR
175 | else:
176 | print(" Mono evaluation - using median scaling")
177 |
178 | errors = []
179 | ratios = []
180 |
181 | for i in range(pred_disps.shape[0]):
182 |
183 | gt_depth = gt_depths[i]
184 | gt_height, gt_width = gt_depth.shape[:2]
185 |
186 | pred_disp = pred_disps[i]
187 | pred_disp = cv2.resize(pred_disp, (gt_width, gt_height))
188 | pred_depth = 1 / pred_disp
189 |
190 | if opt.eval_split == "eigen":
191 | mask = np.logical_and(gt_depth > MIN_DEPTH, gt_depth < MAX_DEPTH)
192 |
193 | crop = np.array([0.40810811 * gt_height, 0.99189189 * gt_height,
194 | 0.03594771 * gt_width, 0.96405229 * gt_width]).astype(np.int32)
195 | crop_mask = np.zeros(mask.shape)
196 | crop_mask[crop[0]:crop[1], crop[2]:crop[3]] = 1
197 | mask = np.logical_and(mask, crop_mask)
198 |
199 | else:
200 | mask = gt_depth > 0
201 |
202 | pred_depth = pred_depth[mask]
203 | gt_depth = gt_depth[mask]
204 |
205 | pred_depth *= opt.pred_depth_scale_factor
206 | if not opt.disable_median_scaling:
207 | ratio = np.median(gt_depth) / np.median(pred_depth)
208 | ratios.append(ratio)
209 | pred_depth *= ratio
210 |
211 | pred_depth[pred_depth < MIN_DEPTH] = MIN_DEPTH
212 | pred_depth[pred_depth > MAX_DEPTH] = MAX_DEPTH
213 |
214 | errors.append(compute_errors(gt_depth, pred_depth))
215 |
216 | if not opt.disable_median_scaling:
217 | ratios = np.array(ratios)
218 | med = np.median(ratios)
219 | print(" Scaling ratios | med: {:0.3f} | std: {:0.3f}".format(med, np.std(ratios / med)))
220 |
221 | mean_errors = np.array(errors).mean(0)
222 |
223 | print("\n " + ("{:>8} | " * 7).format("abs_rel", "sq_rel", "rmse", "rmse_log", "a1", "a2", "a3"))
224 | print(("&{: 8.3f} " * 7).format(*mean_errors.tolist()) + "\\\\")
225 | print("\n-> Done!")
226 |
227 |
228 | if __name__ == "__main__":
229 | options = MonodepthOptions()
230 | evaluate(options.parse())
231 |
--------------------------------------------------------------------------------
/evaluate_pose.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import os
10 | import numpy as np
11 |
12 | import torch
13 | from torch.utils.data import DataLoader
14 |
15 | from layers import transformation_from_parameters
16 | from utils import readlines
17 | from options import MonodepthOptions
18 | from datasets import KITTIOdomDataset
19 | import networks
20 |
21 |
22 | # from https://github.com/tinghuiz/SfMLearner
23 | def dump_xyz(source_to_target_transformations):
24 | xyzs = []
25 | cam_to_world = np.eye(4)
26 | xyzs.append(cam_to_world[:3, 3])
27 | for source_to_target_transformation in source_to_target_transformations:
28 | cam_to_world = np.dot(cam_to_world, source_to_target_transformation)
29 | xyzs.append(cam_to_world[:3, 3])
30 | return xyzs
31 |
32 |
33 | # from https://github.com/tinghuiz/SfMLearner
34 | def compute_ate(gtruth_xyz, pred_xyz_o):
35 |
36 | # Make sure that the first matched frames align (no need for rotational alignment as
37 | # all the predicted/ground-truth snippets have been converted to use the same coordinate
38 | # system with the first frame of the snippet being the origin).
39 | offset = gtruth_xyz[0] - pred_xyz_o[0]
40 | pred_xyz = pred_xyz_o + offset[None, :]
41 |
42 | # Optimize the scaling factor
43 | scale = np.sum(gtruth_xyz * pred_xyz) / np.sum(pred_xyz ** 2)
44 | alignment_error = pred_xyz * scale - gtruth_xyz
45 | rmse = np.sqrt(np.sum(alignment_error ** 2)) / gtruth_xyz.shape[0]
46 | return rmse
47 |
48 |
49 | def evaluate(opt):
50 | """Evaluate odometry on the KITTI dataset
51 | """
52 | assert os.path.isdir(opt.load_weights_folder), \
53 | "Cannot find a folder at {}".format(opt.load_weights_folder)
54 |
55 | assert opt.eval_split == "odom_9" or opt.eval_split == "odom_10", \
56 | "eval_split should be either odom_9 or odom_10"
57 |
58 | sequence_id = int(opt.eval_split.split("_")[1])
59 |
60 | filenames = readlines(
61 | os.path.join(os.path.dirname(__file__), "splits", "odom",
62 | "test_files_{:02d}.txt".format(sequence_id)))
63 |
64 | dataset = KITTIOdomDataset(opt.data_path, filenames, opt.height, opt.width,
65 | [0, 1], 4, is_train=False)
66 | dataloader = DataLoader(dataset, opt.batch_size, shuffle=False,
67 | num_workers=opt.num_workers, pin_memory=True, drop_last=False)
68 |
69 | pose_encoder_path = os.path.join(opt.load_weights_folder, "pose_encoder.pth")
70 | pose_decoder_path = os.path.join(opt.load_weights_folder, "pose.pth")
71 |
72 | pose_encoder = networks.ResnetEncoder(opt.num_layers, False, 2)
73 | pose_encoder.load_state_dict(torch.load(pose_encoder_path))
74 |
75 | pose_decoder = networks.PoseDecoder(pose_encoder.num_ch_enc, 1, 2)
76 | pose_decoder.load_state_dict(torch.load(pose_decoder_path))
77 |
78 | pose_encoder.cuda()
79 | pose_encoder.eval()
80 | pose_decoder.cuda()
81 | pose_decoder.eval()
82 |
83 | pred_poses = []
84 |
85 | print("-> Computing pose predictions")
86 |
87 | opt.frame_ids = [0, 1] # pose network only takes two frames as input
88 |
89 | with torch.no_grad():
90 | for inputs in dataloader:
91 | for key, ipt in inputs.items():
92 | inputs[key] = ipt.cuda()
93 |
94 | all_color_aug = torch.cat([inputs[("color_aug", i, 0)] for i in opt.frame_ids], 1)
95 |
96 | features = [pose_encoder(all_color_aug)]
97 | axisangle, translation = pose_decoder(features)
98 |
99 | pred_poses.append(
100 | transformation_from_parameters(axisangle[:, 0], translation[:, 0]).cpu().numpy())
101 |
102 | pred_poses = np.concatenate(pred_poses)
103 |
104 | gt_poses_path = os.path.join(opt.data_path, "poses", "{:02d}.txt".format(sequence_id))
105 | gt_global_poses = np.loadtxt(gt_poses_path).reshape(-1, 3, 4)
106 | gt_global_poses = np.concatenate(
107 | (gt_global_poses, np.zeros((gt_global_poses.shape[0], 1, 4))), 1)
108 | gt_global_poses[:, 3, 3] = 1
109 | gt_xyzs = gt_global_poses[:, :3, 3]
110 |
111 | gt_local_poses = []
112 | for i in range(1, len(gt_global_poses)):
113 | gt_local_poses.append(
114 | np.linalg.inv(np.dot(np.linalg.inv(gt_global_poses[i - 1]), gt_global_poses[i])))
115 |
116 | ates = []
117 | num_frames = gt_xyzs.shape[0]
118 | track_length = 5
119 | for i in range(0, num_frames - 1):
120 | local_xyzs = np.array(dump_xyz(pred_poses[i:i + track_length - 1]))
121 | gt_local_xyzs = np.array(dump_xyz(gt_local_poses[i:i + track_length - 1]))
122 |
123 | ates.append(compute_ate(gt_local_xyzs, local_xyzs))
124 |
125 | print("\n Trajectory error: {:0.3f}, std: {:0.3f}\n".format(np.mean(ates), np.std(ates)))
126 |
127 | save_path = os.path.join(opt.load_weights_folder, "poses.npy")
128 | np.save(save_path, pred_poses)
129 | print("-> Predictions saved to", save_path)
130 |
131 |
132 | if __name__ == "__main__":
133 | options = MonodepthOptions()
134 | evaluate(options.parse())
135 |
--------------------------------------------------------------------------------
/export_gt_depth.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import os
10 |
11 | import argparse
12 | import numpy as np
13 | import PIL.Image as pil
14 |
15 | from utils import readlines
16 | from kitti_utils import generate_depth_map
17 |
18 |
19 | def export_gt_depths_kitti():
20 |
21 | parser = argparse.ArgumentParser(description='export_gt_depth')
22 |
23 | parser.add_argument('--data_path',
24 | type=str,
25 | help='path to the root of the KITTI data',
26 | required=True)
27 | parser.add_argument('--split',
28 | type=str,
29 | help='which split to export gt from',
30 | required=True,
31 | choices=["eigen", "eigen_benchmark"])
32 | opt = parser.parse_args()
33 |
34 | split_folder = os.path.join(os.path.dirname(__file__), "splits", opt.split)
35 | lines = readlines(os.path.join(split_folder, "test_files.txt"))
36 |
37 | print("Exporting ground truth depths for {}".format(opt.split))
38 |
39 | gt_depths = []
40 | for line in lines:
41 |
42 | folder, frame_id, _ = line.split()
43 | frame_id = int(frame_id)
44 |
45 | if opt.split == "eigen":
46 | calib_dir = os.path.join(opt.data_path, folder.split("/")[0])
47 | velo_filename = os.path.join(opt.data_path, folder,
48 | "velodyne_points/data", "{:010d}.bin".format(frame_id))
49 | gt_depth = generate_depth_map(calib_dir, velo_filename, 2, True)
50 | elif opt.split == "eigen_benchmark":
51 | gt_depth_path = os.path.join(opt.data_path, folder, "proj_depth",
52 | "groundtruth", "image_02", "{:010d}.png".format(frame_id))
53 | gt_depth = np.array(pil.open(gt_depth_path)).astype(np.float32) / 256
54 |
55 | gt_depths.append(gt_depth.astype(np.float32))
56 |
57 | output_path = os.path.join(split_folder, "gt_depths.npz")
58 |
59 | print("Saving to {}".format(opt.split))
60 |
61 | np.savez_compressed(output_path, data=np.array(gt_depths))
62 |
63 |
64 | if __name__ == "__main__":
65 | export_gt_depths_kitti()
66 |
--------------------------------------------------------------------------------
/kitti_utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, print_function
2 |
3 | import os
4 | import numpy as np
5 | from collections import Counter
6 |
7 |
8 | def load_velodyne_points(filename):
9 | """Load 3D point cloud from KITTI file format
10 | (adapted from https://github.com/hunse/kitti)
11 | """
12 | points = np.fromfile(filename, dtype=np.float32).reshape(-1, 4)
13 | points[:, 3] = 1.0 # homogeneous
14 | return points
15 |
16 |
17 | def read_calib_file(path):
18 | """Read KITTI calibration file
19 | (from https://github.com/hunse/kitti)
20 | """
21 | float_chars = set("0123456789.e+- ")
22 | data = {}
23 | with open(path, 'r') as f:
24 | for line in f.readlines():
25 | key, value = line.split(':', 1)
26 | value = value.strip()
27 | data[key] = value
28 | if float_chars.issuperset(value):
29 | # try to cast to float array
30 | try:
31 | data[key] = np.array(list(map(float, value.split(' '))))
32 | except ValueError:
33 | # casting error: data[key] already eq. value, so pass
34 | pass
35 |
36 | return data
37 |
38 |
39 | def sub2ind(matrixSize, rowSub, colSub):
40 | """Convert row, col matrix subscripts to linear indices
41 | """
42 | m, n = matrixSize
43 | return rowSub * (n-1) + colSub - 1
44 |
45 |
46 | def generate_depth_map(calib_dir, velo_filename, cam=2, vel_depth=False):
47 | """Generate a depth map from velodyne data
48 | """
49 | # load calibration files
50 | cam2cam = read_calib_file(os.path.join(calib_dir, 'calib_cam_to_cam.txt'))
51 | velo2cam = read_calib_file(os.path.join(calib_dir, 'calib_velo_to_cam.txt'))
52 | velo2cam = np.hstack((velo2cam['R'].reshape(3, 3), velo2cam['T'][..., np.newaxis]))
53 | velo2cam = np.vstack((velo2cam, np.array([0, 0, 0, 1.0])))
54 |
55 | # get image shape
56 | im_shape = cam2cam["S_rect_02"][::-1].astype(np.int32)
57 |
58 | # compute projection matrix velodyne->image plane
59 | R_cam2rect = np.eye(4)
60 | R_cam2rect[:3, :3] = cam2cam['R_rect_00'].reshape(3, 3)
61 | P_rect = cam2cam['P_rect_0'+str(cam)].reshape(3, 4)
62 | P_velo2im = np.dot(np.dot(P_rect, R_cam2rect), velo2cam)
63 |
64 | # load velodyne points and remove all behind image plane (approximation)
65 | # each row of the velodyne data is forward, left, up, reflectance
66 | velo = load_velodyne_points(velo_filename)
67 | velo = velo[velo[:, 0] >= 0, :]
68 |
69 | # project the points to the camera
70 | velo_pts_im = np.dot(P_velo2im, velo.T).T
71 | velo_pts_im[:, :2] = velo_pts_im[:, :2] / velo_pts_im[:, 2][..., np.newaxis]
72 |
73 | if vel_depth:
74 | velo_pts_im[:, 2] = velo[:, 0]
75 |
76 | # check if in bounds
77 | # use minus 1 to get the exact same value as KITTI matlab code
78 | velo_pts_im[:, 0] = np.round(velo_pts_im[:, 0]) - 1
79 | velo_pts_im[:, 1] = np.round(velo_pts_im[:, 1]) - 1
80 | val_inds = (velo_pts_im[:, 0] >= 0) & (velo_pts_im[:, 1] >= 0)
81 | val_inds = val_inds & (velo_pts_im[:, 0] < im_shape[1]) & (velo_pts_im[:, 1] < im_shape[0])
82 | velo_pts_im = velo_pts_im[val_inds, :]
83 |
84 | # project to image
85 | depth = np.zeros((im_shape[:2]))
86 | depth[velo_pts_im[:, 1].astype(np.int), velo_pts_im[:, 0].astype(np.int)] = velo_pts_im[:, 2]
87 |
88 | # find the duplicate points and choose the closest depth
89 | inds = sub2ind(depth.shape, velo_pts_im[:, 1], velo_pts_im[:, 0])
90 | dupe_inds = [item for item, count in Counter(inds).items() if count > 1]
91 | for dd in dupe_inds:
92 | pts = np.where(inds == dd)[0]
93 | x_loc = int(velo_pts_im[pts[0], 0])
94 | y_loc = int(velo_pts_im[pts[0], 1])
95 | depth[y_loc, x_loc] = velo_pts_im[pts, 2].min()
96 | depth[depth < 0] = 0
97 |
98 | return depth
99 |
--------------------------------------------------------------------------------
/layers.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import numpy as np
10 |
11 | import torch
12 | import torch.nn as nn
13 | import torch.nn.functional as F
14 |
15 |
16 | def disp_to_depth(disp, min_depth, max_depth):
17 | """Convert network's sigmoid output into depth prediction
18 | The formula for this conversion is given in the 'additional considerations'
19 | section of the paper.
20 | """
21 | min_disp = 1 / max_depth
22 | max_disp = 1 / min_depth
23 | scaled_disp = min_disp + (max_disp - min_disp) * disp
24 | depth = 1 / scaled_disp
25 | return scaled_disp, depth
26 |
27 |
28 | def transformation_from_parameters(axisangle, translation, invert=False):
29 | """Convert the network's (axisangle, translation) output into a 4x4 matrix
30 | """
31 | R = rot_from_axisangle(axisangle)
32 | t = translation.clone()
33 |
34 | if invert:
35 | R = R.transpose(1, 2)
36 | t *= -1
37 |
38 | T = get_translation_matrix(t)
39 |
40 | if invert:
41 | M = torch.matmul(R, T)
42 | else:
43 | M = torch.matmul(T, R)
44 |
45 | return M
46 |
47 |
48 | def get_translation_matrix(translation_vector):
49 | """Convert a translation vector into a 4x4 transformation matrix
50 | """
51 | T = torch.zeros(translation_vector.shape[0], 4, 4).to(device=translation_vector.device)
52 |
53 | t = translation_vector.contiguous().view(-1, 3, 1)
54 |
55 | T[:, 0, 0] = 1
56 | T[:, 1, 1] = 1
57 | T[:, 2, 2] = 1
58 | T[:, 3, 3] = 1
59 | T[:, :3, 3, None] = t
60 |
61 | return T
62 |
63 |
64 | def rot_from_axisangle(vec):
65 | """Convert an axisangle rotation into a 4x4 transformation matrix
66 | (adapted from https://github.com/Wallacoloo/printipi)
67 | Input 'vec' has to be Bx1x3
68 | """
69 | angle = torch.norm(vec, 2, 2, True)
70 | axis = vec / (angle + 1e-7)
71 |
72 | ca = torch.cos(angle)
73 | sa = torch.sin(angle)
74 | C = 1 - ca
75 |
76 | x = axis[..., 0].unsqueeze(1)
77 | y = axis[..., 1].unsqueeze(1)
78 | z = axis[..., 2].unsqueeze(1)
79 |
80 | xs = x * sa
81 | ys = y * sa
82 | zs = z * sa
83 | xC = x * C
84 | yC = y * C
85 | zC = z * C
86 | xyC = x * yC
87 | yzC = y * zC
88 | zxC = z * xC
89 |
90 | rot = torch.zeros((vec.shape[0], 4, 4)).to(device=vec.device)
91 |
92 | rot[:, 0, 0] = torch.squeeze(x * xC + ca)
93 | rot[:, 0, 1] = torch.squeeze(xyC - zs)
94 | rot[:, 0, 2] = torch.squeeze(zxC + ys)
95 | rot[:, 1, 0] = torch.squeeze(xyC + zs)
96 | rot[:, 1, 1] = torch.squeeze(y * yC + ca)
97 | rot[:, 1, 2] = torch.squeeze(yzC - xs)
98 | rot[:, 2, 0] = torch.squeeze(zxC - ys)
99 | rot[:, 2, 1] = torch.squeeze(yzC + xs)
100 | rot[:, 2, 2] = torch.squeeze(z * zC + ca)
101 | rot[:, 3, 3] = 1
102 |
103 | return rot
104 |
105 |
106 | class ConvBlock(nn.Module):
107 | """Layer to perform a convolution followed by ELU
108 | """
109 | def __init__(self, in_channels, out_channels):
110 | super(ConvBlock, self).__init__()
111 |
112 | self.conv = Conv3x3(in_channels, out_channels)
113 | self.nonlin = nn.ELU(inplace=True)
114 |
115 | def forward(self, x):
116 | out = self.conv(x)
117 | out = self.nonlin(out)
118 | return out
119 |
120 |
121 | class Conv3x3(nn.Module):
122 | """Layer to pad and convolve input
123 | """
124 | def __init__(self, in_channels, out_channels, use_refl=True):
125 | super(Conv3x3, self).__init__()
126 |
127 | if use_refl:
128 | self.pad = nn.ReflectionPad2d(1)
129 | else:
130 | self.pad = nn.ZeroPad2d(1)
131 | self.conv = nn.Conv2d(int(in_channels), int(out_channels), 3)
132 |
133 | def forward(self, x):
134 | out = self.pad(x)
135 | out = self.conv(out)
136 | return out
137 |
138 |
139 | class BackprojectDepth(nn.Module):
140 | """Layer to transform a depth image into a point cloud
141 | """
142 | def __init__(self, batch_size, height, width):
143 | super(BackprojectDepth, self).__init__()
144 |
145 | self.batch_size = batch_size
146 | self.height = height
147 | self.width = width
148 |
149 | meshgrid = np.meshgrid(range(self.width), range(self.height), indexing='xy')
150 | self.id_coords = np.stack(meshgrid, axis=0).astype(np.float32)
151 | self.id_coords = nn.Parameter(torch.from_numpy(self.id_coords),
152 | requires_grad=False)
153 |
154 | self.ones = nn.Parameter(torch.ones(self.batch_size, 1, self.height * self.width),
155 | requires_grad=False)
156 |
157 | self.pix_coords = torch.unsqueeze(torch.stack(
158 | [self.id_coords[0].view(-1), self.id_coords[1].view(-1)], 0), 0)
159 | self.pix_coords = self.pix_coords.repeat(batch_size, 1, 1)
160 | self.pix_coords = nn.Parameter(torch.cat([self.pix_coords, self.ones], 1),
161 | requires_grad=False)
162 |
163 | def forward(self, depth, inv_K):
164 | cam_points = torch.matmul(inv_K[:, :3, :3], self.pix_coords)
165 | cam_points = depth.view(self.batch_size, 1, -1) * cam_points
166 | cam_points = torch.cat([cam_points, self.ones], 1)
167 |
168 | return cam_points
169 |
170 |
171 | class Project3D(nn.Module):
172 | """Layer which projects 3D points into a camera with intrinsics K and at position T
173 | """
174 | def __init__(self, batch_size, height, width, eps=1e-7):
175 | super(Project3D, self).__init__()
176 |
177 | self.batch_size = batch_size
178 | self.height = height
179 | self.width = width
180 | self.eps = eps
181 |
182 | def forward(self, points, K, T):
183 | P = torch.matmul(K, T)[:, :3, :]
184 |
185 | cam_points = torch.matmul(P, points)
186 |
187 | pix_coords = cam_points[:, :2, :] / (cam_points[:, 2, :].unsqueeze(1) + self.eps)
188 | pix_coords = pix_coords.view(self.batch_size, 2, self.height, self.width)
189 | pix_coords = pix_coords.permute(0, 2, 3, 1)
190 | pix_coords[..., 0] /= self.width - 1
191 | pix_coords[..., 1] /= self.height - 1
192 | pix_coords = (pix_coords - 0.5) * 2
193 | return pix_coords
194 |
195 |
196 | def upsample(x):
197 | """Upsample input tensor by a factor of 2
198 | """
199 | return F.interpolate(x, scale_factor=2, mode="nearest")
200 |
201 |
202 | def get_smooth_loss(disp, img):
203 | """Computes the smoothness loss for a disparity image
204 | The color image is used for edge-aware smoothness
205 | """
206 | grad_disp_x = torch.abs(disp[:, :, :, :-1] - disp[:, :, :, 1:])
207 | grad_disp_y = torch.abs(disp[:, :, :-1, :] - disp[:, :, 1:, :])
208 |
209 | grad_img_x = torch.mean(torch.abs(img[:, :, :, :-1] - img[:, :, :, 1:]), 1, keepdim=True)
210 | grad_img_y = torch.mean(torch.abs(img[:, :, :-1, :] - img[:, :, 1:, :]), 1, keepdim=True)
211 |
212 | grad_disp_x *= torch.exp(-grad_img_x)
213 | grad_disp_y *= torch.exp(-grad_img_y)
214 |
215 | return grad_disp_x.mean() + grad_disp_y.mean()
216 |
217 |
218 | class SSIM(nn.Module):
219 | """Layer to compute the SSIM loss between a pair of images
220 | """
221 | def __init__(self):
222 | super(SSIM, self).__init__()
223 | self.mu_x_pool = nn.AvgPool2d(3, 1)
224 | self.mu_y_pool = nn.AvgPool2d(3, 1)
225 | self.sig_x_pool = nn.AvgPool2d(3, 1)
226 | self.sig_y_pool = nn.AvgPool2d(3, 1)
227 | self.sig_xy_pool = nn.AvgPool2d(3, 1)
228 |
229 | self.refl = nn.ReflectionPad2d(1)
230 |
231 | self.C1 = 0.01 ** 2
232 | self.C2 = 0.03 ** 2
233 |
234 | def forward(self, x, y):
235 | x = self.refl(x)
236 | y = self.refl(y)
237 |
238 | mu_x = self.mu_x_pool(x)
239 | mu_y = self.mu_y_pool(y)
240 |
241 | sigma_x = self.sig_x_pool(x ** 2) - mu_x ** 2
242 | sigma_y = self.sig_y_pool(y ** 2) - mu_y ** 2
243 | sigma_xy = self.sig_xy_pool(x * y) - mu_x * mu_y
244 |
245 | SSIM_n = (2 * mu_x * mu_y + self.C1) * (2 * sigma_xy + self.C2)
246 | SSIM_d = (mu_x ** 2 + mu_y ** 2 + self.C1) * (sigma_x + sigma_y + self.C2)
247 |
248 | return torch.clamp((1 - SSIM_n / SSIM_d) / 2, 0, 1)
249 |
250 |
251 | def compute_depth_errors(gt, pred):
252 | """Computation of error metrics between predicted and ground truth depths
253 | """
254 | thresh = torch.max((gt / pred), (pred / gt))
255 | a1 = (thresh < 1.25 ).float().mean()
256 | a2 = (thresh < 1.25 ** 2).float().mean()
257 | a3 = (thresh < 1.25 ** 3).float().mean()
258 |
259 | rmse = (gt - pred) ** 2
260 | rmse = torch.sqrt(rmse.mean())
261 |
262 | rmse_log = (torch.log(gt) - torch.log(pred)) ** 2
263 | rmse_log = torch.sqrt(rmse_log.mean())
264 |
265 | abs_rel = torch.mean(torch.abs(gt - pred) / gt)
266 |
267 | sq_rel = torch.mean((gt - pred) ** 2 / gt)
268 |
269 | return abs_rel, sq_rel, rmse, rmse_log, a1, a2, a3
270 |
--------------------------------------------------------------------------------
/networks/__init__.py:
--------------------------------------------------------------------------------
1 | from .resnet_encoder import ResnetEncoder
2 | from .depth_decoder import DepthDecoder
3 | from .pose_decoder import PoseDecoder
4 | from .pose_cnn import PoseCNN
5 |
--------------------------------------------------------------------------------
/networks/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/networks/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/networks/__pycache__/depth_decoder.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/networks/__pycache__/depth_decoder.cpython-36.pyc
--------------------------------------------------------------------------------
/networks/__pycache__/pose_cnn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/networks/__pycache__/pose_cnn.cpython-36.pyc
--------------------------------------------------------------------------------
/networks/__pycache__/pose_decoder.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/networks/__pycache__/pose_decoder.cpython-36.pyc
--------------------------------------------------------------------------------
/networks/__pycache__/resnet_encoder.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/networks/__pycache__/resnet_encoder.cpython-36.pyc
--------------------------------------------------------------------------------
/networks/depth_decoder.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import numpy as np
10 | import torch
11 | import torch.nn as nn
12 |
13 | from collections import OrderedDict
14 | from layers import *
15 |
16 |
17 | class DepthDecoder(nn.Module):
18 | def __init__(self, num_ch_enc, scales=range(4), num_output_channels=1, use_skips=True):
19 | super(DepthDecoder, self).__init__()
20 |
21 | self.num_output_channels = num_output_channels
22 | self.use_skips = use_skips
23 | self.upsample_mode = 'nearest'
24 | self.scales = scales
25 |
26 | self.num_ch_enc = num_ch_enc
27 | self.num_ch_dec = np.array([16, 32, 64, 128, 256])
28 |
29 | # decoder
30 | self.convs = OrderedDict()
31 | for i in range(4, -1, -1):
32 | # upconv_0
33 | num_ch_in = self.num_ch_enc[-1] if i == 4 else self.num_ch_dec[i + 1]
34 | num_ch_out = self.num_ch_dec[i]
35 | self.convs[("upconv", i, 0)] = ConvBlock(num_ch_in, num_ch_out)
36 |
37 | # upconv_1
38 | num_ch_in = self.num_ch_dec[i]
39 | if self.use_skips and i > 0:
40 | num_ch_in += self.num_ch_enc[i - 1]
41 | num_ch_out = self.num_ch_dec[i]
42 | self.convs[("upconv", i, 1)] = ConvBlock(num_ch_in, num_ch_out)
43 |
44 | for s in self.scales:
45 | self.convs[("dispconv", s)] = Conv3x3(self.num_ch_dec[s], self.num_output_channels)
46 |
47 | self.decoder = nn.ModuleList(list(self.convs.values()))
48 | self.sigmoid = nn.Sigmoid()
49 |
50 | def forward(self, input_features):
51 | self.outputs = {}
52 |
53 | # decoder
54 | x = input_features[-1]
55 | for i in range(4, -1, -1):
56 | x = self.convs[("upconv", i, 0)](x)
57 | x = [upsample(x)]
58 | if self.use_skips and i > 0:
59 | x += [input_features[i - 1]]
60 | x = torch.cat(x, 1)
61 | x = self.convs[("upconv", i, 1)](x)
62 | if i in self.scales:
63 | self.outputs[("disp", i)] = self.sigmoid(self.convs[("dispconv", i)](x))
64 |
65 | return self.outputs
66 |
--------------------------------------------------------------------------------
/networks/pose_cnn.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import torch
10 | import torch.nn as nn
11 |
12 |
13 | class PoseCNN(nn.Module):
14 | def __init__(self, num_input_frames):
15 | super(PoseCNN, self).__init__()
16 |
17 | self.num_input_frames = num_input_frames
18 |
19 | self.convs = {}
20 | self.convs[0] = nn.Conv2d(3 * num_input_frames, 16, 7, 2, 3)
21 | self.convs[1] = nn.Conv2d(16, 32, 5, 2, 2)
22 | self.convs[2] = nn.Conv2d(32, 64, 3, 2, 1)
23 | self.convs[3] = nn.Conv2d(64, 128, 3, 2, 1)
24 | self.convs[4] = nn.Conv2d(128, 256, 3, 2, 1)
25 | self.convs[5] = nn.Conv2d(256, 256, 3, 2, 1)
26 | self.convs[6] = nn.Conv2d(256, 256, 3, 2, 1)
27 |
28 | self.pose_conv = nn.Conv2d(256, 6 * (num_input_frames - 1), 1)
29 |
30 | self.num_convs = len(self.convs)
31 |
32 | self.relu = nn.ReLU(True)
33 |
34 | self.net = nn.ModuleList(list(self.convs.values()))
35 |
36 | def forward(self, out):
37 |
38 | for i in range(self.num_convs):
39 | out = self.convs[i](out)
40 | out = self.relu(out)
41 |
42 | out = self.pose_conv(out)
43 | out = out.mean(3).mean(2)
44 |
45 | out = 0.01 * out.view(-1, self.num_input_frames - 1, 1, 6)
46 |
47 | axisangle = out[..., :3]
48 | translation = out[..., 3:]
49 |
50 | return axisangle, translation
51 |
--------------------------------------------------------------------------------
/networks/pose_decoder.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import torch
10 | import torch.nn as nn
11 | from collections import OrderedDict
12 |
13 |
14 | class PoseDecoder(nn.Module):
15 | def __init__(self, num_ch_enc, num_input_features, num_frames_to_predict_for=None, stride=1):
16 | super(PoseDecoder, self).__init__()
17 |
18 | self.num_ch_enc = num_ch_enc
19 | self.num_input_features = num_input_features
20 |
21 | if num_frames_to_predict_for is None:
22 | num_frames_to_predict_for = num_input_features - 1
23 | self.num_frames_to_predict_for = num_frames_to_predict_for
24 |
25 | self.convs = OrderedDict()
26 | self.convs[("squeeze")] = nn.Conv2d(self.num_ch_enc[-1], 256, 1)
27 | self.convs[("pose", 0)] = nn.Conv2d(num_input_features * 256, 256, 3, stride, 1)
28 | self.convs[("pose", 1)] = nn.Conv2d(256, 256, 3, stride, 1)
29 | self.convs[("pose", 2)] = nn.Conv2d(256, 6 * num_frames_to_predict_for, 1)
30 |
31 | self.relu = nn.ReLU()
32 |
33 | self.net = nn.ModuleList(list(self.convs.values()))
34 |
35 | def forward(self, input_features):
36 | last_features = [f[-1] for f in input_features]
37 |
38 | cat_features = [self.relu(self.convs["squeeze"](f)) for f in last_features]
39 | cat_features = torch.cat(cat_features, 1)
40 |
41 | out = cat_features
42 | for i in range(3):
43 | out = self.convs[("pose", i)](out)
44 | if i != 2:
45 | out = self.relu(out)
46 |
47 | out = out.mean(3).mean(2)
48 |
49 | out = 0.01 * out.view(-1, self.num_frames_to_predict_for, 1, 6)
50 |
51 | axisangle = out[..., :3]
52 | translation = out[..., 3:]
53 |
54 | return axisangle, translation
55 |
--------------------------------------------------------------------------------
/networks/resnet_encoder.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import numpy as np
10 |
11 | import torch
12 | import torch.nn as nn
13 | import torchvision.models as models
14 | import torch.utils.model_zoo as model_zoo
15 |
16 |
17 | class ResNetMultiImageInput(models.ResNet):
18 | """Constructs a resnet model with varying number of input images.
19 | Adapted from https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
20 | """
21 | def __init__(self, block, layers, num_classes=1000, num_input_images=1):
22 | super(ResNetMultiImageInput, self).__init__(block, layers)
23 | self.inplanes = 64
24 | self.conv1 = nn.Conv2d(
25 | num_input_images * 3, 64, kernel_size=7, stride=2, padding=3, bias=False)
26 | self.bn1 = nn.BatchNorm2d(64)
27 | self.relu = nn.ReLU(inplace=True)
28 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
29 | self.layer1 = self._make_layer(block, 64, layers[0])
30 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
31 | self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
32 | self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
33 |
34 | for m in self.modules():
35 | if isinstance(m, nn.Conv2d):
36 | nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
37 | elif isinstance(m, nn.BatchNorm2d):
38 | nn.init.constant_(m.weight, 1)
39 | nn.init.constant_(m.bias, 0)
40 |
41 |
42 | def resnet_multiimage_input(num_layers, pretrained=False, num_input_images=1):
43 | """Constructs a ResNet model.
44 | Args:
45 | num_layers (int): Number of resnet layers. Must be 18 or 50
46 | pretrained (bool): If True, returns a model pre-trained on ImageNet
47 | num_input_images (int): Number of frames stacked as input
48 | """
49 | assert num_layers in [18, 50], "Can only run with 18 or 50 layer resnet"
50 | blocks = {18: [2, 2, 2, 2], 50: [3, 4, 6, 3]}[num_layers]
51 | block_type = {18: models.resnet.BasicBlock, 50: models.resnet.Bottleneck}[num_layers]
52 | model = ResNetMultiImageInput(block_type, blocks, num_input_images=num_input_images)
53 |
54 | if pretrained:
55 | loaded = model_zoo.load_url(models.resnet.model_urls['resnet{}'.format(num_layers)])
56 | loaded['conv1.weight'] = torch.cat(
57 | [loaded['conv1.weight']] * num_input_images, 1) / num_input_images
58 | model.load_state_dict(loaded)
59 | return model
60 |
61 |
62 | class ResnetEncoder(nn.Module):
63 | """Pytorch module for a resnet encoder
64 | """
65 | def __init__(self, num_layers, pretrained, num_input_images=1):
66 | super(ResnetEncoder, self).__init__()
67 |
68 | self.num_ch_enc = np.array([64, 64, 128, 256, 512])
69 |
70 | resnets = {18: models.resnet18,
71 | 34: models.resnet34,
72 | 50: models.resnet50,
73 | 101: models.resnet101,
74 | 152: models.resnet152}
75 |
76 | if num_layers not in resnets:
77 | raise ValueError("{} is not a valid number of resnet layers".format(num_layers))
78 |
79 | if num_input_images > 1:
80 | self.encoder = resnet_multiimage_input(num_layers, pretrained, num_input_images)
81 | else:
82 | self.encoder = resnets[num_layers](pretrained)
83 |
84 | if num_layers > 34:
85 | self.num_ch_enc[1:] *= 4
86 |
87 | def forward(self, input_image):
88 | self.features = []
89 | x = (input_image - 0.45) / 0.225
90 | x = self.encoder.conv1(x)
91 | x = self.encoder.bn1(x)
92 | self.features.append(self.encoder.relu(x))
93 | self.features.append(self.encoder.layer1(self.encoder.maxpool(self.features[-1])))
94 | self.features.append(self.encoder.layer2(self.features[-1]))
95 | self.features.append(self.encoder.layer3(self.features[-1]))
96 | self.features.append(self.encoder.layer4(self.features[-1]))
97 |
98 | return self.features
99 |
--------------------------------------------------------------------------------
/options.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import os
10 | import argparse
11 |
12 | file_dir = os.path.dirname(__file__) # the directory that options.py resides in
13 |
14 |
15 | class MonodepthOptions:
16 | def __init__(self):
17 | self.parser = argparse.ArgumentParser(description="Monodepthv2 options")
18 |
19 | # PATHS
20 | self.parser.add_argument("--data_path",
21 | type=str,
22 | help="path to the training data",
23 | default=os.path.join(file_dir, "kitti_data"))
24 | self.parser.add_argument("--log_dir",
25 | type=str,
26 | help="log directory",
27 | default=os.path.join(os.path.expanduser("~"), "tmp"))
28 |
29 | # TRAINING options
30 | self.parser.add_argument("--model_name",
31 | type=str,
32 | help="the name of the folder to save the model in",
33 | default="mdp")
34 | self.parser.add_argument("--split",
35 | type=str,
36 | help="which training split to use",
37 | choices=["eigen_zhou", "eigen_full", "odom", "benchmark"],
38 | default="eigen_zhou")
39 | self.parser.add_argument("--num_layers",
40 | type=int,
41 | help="number of resnet layers",
42 | default=18,
43 | choices=[18, 34, 50, 101, 152])
44 | self.parser.add_argument("--dataset",
45 | type=str,
46 | help="dataset to train on",
47 | default="kitti",
48 | choices=["kitti", "kitti_odom", "kitti_depth", "kitti_test"])
49 | self.parser.add_argument("--png",
50 | help="if set, trains from raw KITTI png files (instead of jpgs)",
51 | action="store_true")
52 | self.parser.add_argument("--height",
53 | type=int,
54 | help="input image height",
55 | default=192)
56 | self.parser.add_argument("--width",
57 | type=int,
58 | help="input image width",
59 | default=640)
60 | self.parser.add_argument("--disparity_smoothness",
61 | type=float,
62 | help="disparity smoothness weight",
63 | default=1e-3)
64 | self.parser.add_argument("--scales",
65 | nargs="+",
66 | type=int,
67 | help="scales used in the loss",
68 | default=[0, 1, 2, 3])
69 | self.parser.add_argument("--min_depth",
70 | type=float,
71 | help="minimum depth",
72 | default=0.1)
73 | self.parser.add_argument("--max_depth",
74 | type=float,
75 | help="maximum depth",
76 | default=100.0)
77 | self.parser.add_argument("--use_stereo",
78 | help="if set, uses stereo pair for training",
79 | action="store_true")
80 | self.parser.add_argument("--frame_ids",
81 | nargs="+",
82 | type=int,
83 | help="frames to load",
84 | default=[0, -1, 1])
85 | # DEPTH HINT options
86 | self.parser.add_argument("--use_depth_hints",
87 | help="if set, apply depth hints during training",
88 | action="store_true")
89 | self.parser.add_argument("--depth_hint_path",
90 | help="path to load precomputed depth hints from. If not set will"
91 | "be assumed to be data_path/depth_hints",
92 | type=str)
93 |
94 | # OPTIMIZATION options
95 | self.parser.add_argument("--batch_size",
96 | type=int,
97 | help="batch size",
98 | default=12)
99 | self.parser.add_argument("--learning_rate",
100 | type=float,
101 | help="learning rate",
102 | default=1e-4)
103 | self.parser.add_argument("--num_epochs",
104 | type=int,
105 | help="number of epochs",
106 | default=20)
107 | self.parser.add_argument("--scheduler_step_size",
108 | type=int,
109 | help="step size of the scheduler",
110 | default=15)
111 |
112 | # ABLATION options
113 | self.parser.add_argument("--v1_multiscale",
114 | help="if set, uses monodepth v1 multiscale",
115 | action="store_true")
116 | self.parser.add_argument("--avg_reprojection",
117 | help="if set, uses average reprojection loss",
118 | action="store_true")
119 | self.parser.add_argument("--disable_automasking",
120 | help="if set, doesn't do auto-masking",
121 | action="store_true")
122 | self.parser.add_argument("--predictive_mask",
123 | help="if set, uses a predictive masking scheme as in Zhou et al",
124 | action="store_true")
125 | self.parser.add_argument("--no_ssim",
126 | help="if set, disables ssim in the loss",
127 | action="store_true")
128 | self.parser.add_argument("--weights_init",
129 | type=str,
130 | help="pretrained or scratch",
131 | default="pretrained",
132 | choices=["pretrained", "scratch"])
133 | self.parser.add_argument("--pose_model_input",
134 | type=str,
135 | help="how many images the pose network gets",
136 | default="pairs",
137 | choices=["pairs", "all"])
138 | self.parser.add_argument("--pose_model_type",
139 | type=str,
140 | help="normal or shared",
141 | default="separate_resnet",
142 | choices=["posecnn", "separate_resnet", "shared"])
143 |
144 | # SYSTEM options
145 | self.parser.add_argument("--no_cuda",
146 | help="if set disables CUDA",
147 | action="store_true")
148 | self.parser.add_argument("--num_workers",
149 | type=int,
150 | help="number of dataloader workers",
151 | default=12)
152 |
153 | # LOADING options
154 | self.parser.add_argument("--load_weights_folder",
155 | type=str,
156 | help="name of model to load")
157 | self.parser.add_argument("--models_to_load",
158 | nargs="+",
159 | type=str,
160 | help="models to load",
161 | default=["encoder", "depth", "pose_encoder", "pose"])
162 |
163 | # LOGGING options
164 | self.parser.add_argument("--log_frequency",
165 | type=int,
166 | help="number of batches between each tensorboard log",
167 | default=250)
168 | self.parser.add_argument("--save_frequency",
169 | type=int,
170 | help="number of epochs between each save",
171 | default=1)
172 |
173 | # EVALUATION options
174 | self.parser.add_argument("--eval_stereo",
175 | help="if set evaluates in stereo mode",
176 | action="store_true")
177 | self.parser.add_argument("--eval_mono",
178 | help="if set evaluates in mono mode",
179 | action="store_true")
180 | self.parser.add_argument("--disable_median_scaling",
181 | help="if set disables median scaling in evaluation",
182 | action="store_true")
183 | self.parser.add_argument("--pred_depth_scale_factor",
184 | help="if set multiplies predictions by this number",
185 | type=float,
186 | default=1)
187 | self.parser.add_argument("--ext_disp_to_eval",
188 | type=str,
189 | help="optional path to a .npy disparities file to evaluate")
190 | self.parser.add_argument("--eval_split",
191 | type=str,
192 | default="eigen",
193 | choices=[
194 | "eigen", "eigen_benchmark", "benchmark", "odom_9", "odom_10"],
195 | help="which split to run eval on")
196 | self.parser.add_argument("--save_pred_disps",
197 | help="if set saves predicted disparities",
198 | action="store_true")
199 | self.parser.add_argument("--no_eval",
200 | help="if set disables evaluation",
201 | action="store_true")
202 | self.parser.add_argument("--eval_eigen_to_benchmark",
203 | help="if set assume we are loading eigen results from npy but "
204 | "we want to evaluate using the new benchmark.",
205 | action="store_true")
206 | self.parser.add_argument("--eval_out_dir",
207 | help="if set will output the disparities to this folder",
208 | type=str)
209 | self.parser.add_argument("--post_process",
210 | help="if set will perform the flipping post processing "
211 | "from the original monodepth paper",
212 | action="store_true")
213 |
214 | def parse(self):
215 | self.options = self.parser.parse_args()
216 | return self.options
217 |
--------------------------------------------------------------------------------
/precompute_depth_hints.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | """ Script to precompute depth hints using the 'fused SGM' method """
8 |
9 | from __future__ import absolute_import, division, print_function
10 |
11 | import os
12 | import random
13 | import numpy as np
14 | import copy
15 | from PIL import Image # using pillow-simd for increased speed
16 |
17 | import argparse
18 | import time
19 |
20 | import torch
21 | from torch.utils.data import DataLoader
22 | from torchvision import transforms
23 |
24 | import torch.nn.functional as F
25 |
26 |
27 | import cv2
28 | cv2.setNumThreads(0)
29 |
30 | from utils import *
31 | from layers import *
32 |
33 |
34 | def pil_loader(path):
35 | # open path as file to avoid ResourceWarning
36 | # (https://github.com/python-pillow/Pillow/issues/835)
37 | with open(path, 'rb') as f:
38 | with Image.open(f) as img:
39 | return img.convert('RGB')
40 |
41 |
42 | def generate_stereo_matchers():
43 | """ Instantiate stereo matchers with different hyperparameters to build fused depth hints"""
44 | numDisparities = [64, 96, 128, 160]
45 | stereo_matchers = []
46 | for blockSize in [1, 2, 3]:
47 | for numDisparity in numDisparities:
48 |
49 | sad_window_size = 3
50 | stereo_params = dict(
51 | preFilterCap=63,
52 | P1=sad_window_size * sad_window_size * 4,
53 | P2=sad_window_size * sad_window_size * 32,
54 | minDisparity=0,
55 | numDisparities=numDisparity,
56 | uniquenessRatio=10,
57 | speckleWindowSize=100,
58 | speckleRange=16,
59 | blockSize=blockSize)
60 | stereo_matcher = cv2.StereoSGBM_create(**stereo_params)
61 | stereo_matchers.append(stereo_matcher)
62 |
63 | return stereo_matchers
64 |
65 |
66 | def compute_reprojection_loss(pred, target):
67 | """Computes reprojection loss between a batch of predicted and target images
68 | """
69 |
70 | ssim = SSIM()
71 | abs_diff = torch.abs(target - pred)
72 | l1_loss = abs_diff.mean(1, True)
73 |
74 | ssim_loss = ssim(pred, target).mean(1, True)
75 | reprojection_loss = 0.85 * ssim_loss + 0.15 * l1_loss
76 |
77 | return reprojection_loss
78 |
79 |
80 | class DepthHintDataset:
81 | """
82 | Class to load data to precompute depth hints.
83 |
84 | Set up as a pytorch dataset to make use of pytorch DataLoader multithreading.
85 | """
86 | def __init__(self,
87 | data_path,
88 | filenames,
89 | height, width,
90 | save_path,
91 | overwrite):
92 |
93 | self.data_path = data_path
94 | self.filenames = filenames
95 | self.save_path = save_path
96 | self.overwrite = overwrite
97 |
98 | self.height, self.width = height, width
99 |
100 | self.interp = Image.ANTIALIAS
101 | self.resizer = transforms.Resize((self.height, self.width), interpolation=self.interp)
102 |
103 | self.stereo_matchers = generate_stereo_matchers()
104 | self.data_size = len(self.stereo_matchers)
105 |
106 | # setup intrinsics and extrinsics for reprojection
107 | self.K = np.array([[0.58, 0, 0.5, 0],
108 | [0, 1.92, 0.5, 0],
109 | [0, 0, 1, 0],
110 | [0, 0, 0, 1]], dtype=np.float32)
111 | self.K[0] *= self.width
112 | self.K[1] *= self.height
113 | self.invK = np.linalg.pinv(self.K)
114 |
115 | # convert everything to tensors and reshape into a batch
116 | self.K = \
117 | torch.from_numpy(self.K).unsqueeze(0).expand(self.data_size, -1, -1).float()
118 | self.invK = \
119 | torch.from_numpy(self.invK).unsqueeze(0).expand(self.data_size, -1, -1).float()
120 |
121 | self.baseline = 0.1 # the same baseline in datasets/mono_dataset.py
122 | self.T = torch.eye(4).unsqueeze(0).float()
123 | self.T[0, 0, 3] = self.baseline
124 |
125 | def __len__(self):
126 | return len(self.filenames)
127 |
128 | def compute_depths(self, base_image, lookup_image, reverse=False):
129 | """ For a given stereo pair, compute multiple depth maps using stereo matching
130 | (OpenCV Semi-Global Block Matching). Raw pixel disparities are converted to depth using
131 | focal length and baseline.
132 |
133 | Set reverse flag to be True if base image is on the right and lookup image is on the left
134 | (OpenCV SGBM computes disparity for the left image)"""
135 |
136 | if reverse:
137 | base_image = base_image[:, ::-1]
138 | lookup_image = lookup_image[:, ::-1]
139 |
140 | disps = []
141 | for matcher in self.stereo_matchers:
142 | disp = matcher.compute(base_image, lookup_image) / 16 # convert to pixel disparity
143 | if reverse:
144 | disp = disp[:, ::-1]
145 | disps.append(disp)
146 |
147 | disps = np.stack(disps)
148 | disps = torch.from_numpy(disps).float()
149 |
150 | # convert disp to depth ignoring missing pixels
151 | depths = self.K[0, 0, 0] * self.baseline / (disps + 1e-7) * (disps > 0).float()
152 |
153 | return depths
154 |
155 | def __getitem__(self, index):
156 | """ For a given image, get multiple depth maps, intrinsics, extrinsics and images. """
157 | inputs = {}
158 |
159 | sequence, frame, side = self.filenames[index].split()
160 |
161 | if side == 'l':
162 | side, otherside = 'image_02', 'image_03'
163 | baseline_sign = -1
164 | else:
165 | side, otherside = 'image_03', 'image_02'
166 | baseline_sign = 1
167 |
168 | if not self.overwrite:
169 | # if depth exists, then skip this image
170 | if os.path.isfile(os.path.join(self.save_path, sequence, side,
171 | '{}.npy'.format(str(frame).zfill(10)))):
172 | return inputs
173 |
174 | # flip extrinsics if necessary
175 | T = self.T
176 | T[0, 0, 3] = baseline_sign * self.baseline
177 |
178 | base_image = pil_loader(os.path.join(self.data_path, sequence, side,
179 | 'data/{}.jpg'.format(str(frame).zfill(10))))
180 | lookup_image = pil_loader(os.path.join(self.data_path, sequence, otherside,
181 | 'data/{}.jpg'.format(str(frame).zfill(10))))
182 |
183 | base_image = np.array(self.resizer(base_image))
184 | lookup_image = np.array(self.resizer(lookup_image))
185 |
186 | depths = self.compute_depths(base_image, lookup_image, reverse=side == 'image_03')
187 |
188 | # convert to tensors and reshape into batch
189 | base_image = torch.from_numpy(base_image).permute(2, 0, 1).float().unsqueeze(0)\
190 | .expand(self.data_size, -1, -1, -1) / 255
191 | lookup_image = torch.from_numpy(lookup_image).permute(2, 0, 1).float().unsqueeze(0)\
192 | .expand(self.data_size, -1, -1, -1) / 255
193 |
194 | inputs['base_image'] = base_image
195 | inputs['lookup_image'] = lookup_image
196 | inputs['K'] = self.K
197 | inputs['invK'] = self.invK
198 | inputs['depths'] = depths
199 | inputs['T'] = T
200 |
201 | return inputs
202 |
203 |
204 | def run(opt):
205 | """ Computes depth hints for all files in opt.filenames.
206 |
207 | Makes use of pytorch DataLoader multithreading.
208 | """
209 |
210 | print('Computing depth hints...')
211 |
212 | if opt.save_path is None:
213 | opt.save_path = os.path.join(opt.data_path, 'depth_hints')
214 | print('Saving depth hints to {}'.format(opt.save_path))
215 |
216 | # setup projection mechanism - batch size of 12 as we have 4 x 3 stereo matchers
217 | cam_to_world = BackprojectDepth(12, opt.height, opt.width).cuda()
218 | world_to_cam = Project3D(12, opt.height, opt.width).cuda()
219 |
220 | # setup dataloader
221 | # batch size hardcoded to 1 as each item will contain multiple depth maps for a single image
222 | filenames = readlines(opt.filenames)
223 | dataset = DepthHintDataset(opt.data_path, filenames, opt.height, opt.width, opt.save_path,
224 | opt.overwrite_saved_depths)
225 | dataloader = DataLoader(dataset, batch_size=1, shuffle=False, drop_last=False, num_workers=12)
226 |
227 | time_before = time.time()
228 | for i, data in enumerate(dataloader):
229 |
230 | # log timings
231 | if i % 50 == 0:
232 | print('image {} of {}'.format(i, len(dataloader)))
233 | if i != 0:
234 | time_taken = time.time() - time_before
235 | print('time for 50 imgs: {}s'.format(time_taken))
236 | print('imgs/s: {}'.format(50/time_taken))
237 | time_before = time.time()
238 |
239 | # check dataloader actually returned something, if not we have skipped an image
240 | if data:
241 | for key in data:
242 | if torch.cuda.is_available():
243 | data[key] = data[key].cuda()
244 | data[key] = data[key][0] # dataloader returns batch of size 1
245 |
246 | # for each pixel, find 'best' depth which gives the lowest reprojection loss
247 | world_points = cam_to_world(data['depths'], data['invK'])
248 | cam_pix = world_to_cam(world_points, data['K'], data['T'])
249 | sample = F.grid_sample(data['lookup_image'], cam_pix, padding_mode='border')
250 | losses = compute_reprojection_loss(sample, data['base_image'])
251 | best_index = torch.argmin(losses, dim=0)
252 | best_depth = torch.gather(data['depths'], dim=0, index=best_index).cpu().numpy()
253 |
254 | sequence, frame, side = filenames[i].split(' ')
255 | if side == 'l':
256 | side = 'image_02'
257 | else:
258 | side = 'image_03'
259 |
260 | savepath = os.path.join(opt.save_path, sequence, side)
261 | os.makedirs(savepath, exist_ok=True)
262 | np.save(os.path.join(savepath, '{}.npy'.format(str(frame).zfill(10))), best_depth)
263 |
264 |
265 | def get_opts():
266 | """ parse command line options """
267 | parser = argparse.ArgumentParser()
268 |
269 | parser.add_argument('--data_path',
270 | help='path to images',
271 | type=str)
272 | parser.add_argument('--filenames',
273 | help='path to textfile containing list of images. Each line is expected to '
274 | 'be of the form "sequence_name frame_number side"',
275 | type=str,
276 | default='splits/eigen_full/all_files.txt')
277 | parser.add_argument('--save_path',
278 | help='path to save resulting depth hints to. If not set will save to '
279 | 'datapath/depth_hints',
280 | type=str)
281 | parser.add_argument('--height',
282 | help='height of computed depth hints',
283 | default=320,
284 | type=int)
285 | parser.add_argument('--width',
286 | help='width of computed depth hints',
287 | default=1024,
288 | type=int)
289 | parser.add_argument('--overwrite_saved_depths',
290 | help='if set, will overwrite any existing depth hints rather than skipping',
291 | action='store_true')
292 |
293 | return parser.parse_args()
294 |
295 |
296 | if __name__ == '__main__':
297 |
298 | opts = get_opts()
299 | run(opts)
300 |
--------------------------------------------------------------------------------
/splits/benchmark/eigen_to_benchmark_ids.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/splits/benchmark/eigen_to_benchmark_ids.npy
--------------------------------------------------------------------------------
/splits/benchmark/test_files.txt:
--------------------------------------------------------------------------------
1 | image 0
2 | image 1
3 | image 2
4 | image 3
5 | image 4
6 | image 5
7 | image 6
8 | image 7
9 | image 8
10 | image 9
11 | image 10
12 | image 11
13 | image 12
14 | image 13
15 | image 14
16 | image 15
17 | image 16
18 | image 17
19 | image 18
20 | image 19
21 | image 20
22 | image 21
23 | image 22
24 | image 23
25 | image 24
26 | image 25
27 | image 26
28 | image 27
29 | image 28
30 | image 29
31 | image 30
32 | image 31
33 | image 32
34 | image 33
35 | image 34
36 | image 35
37 | image 36
38 | image 37
39 | image 38
40 | image 39
41 | image 40
42 | image 41
43 | image 42
44 | image 43
45 | image 44
46 | image 45
47 | image 46
48 | image 47
49 | image 48
50 | image 49
51 | image 50
52 | image 51
53 | image 52
54 | image 53
55 | image 54
56 | image 55
57 | image 56
58 | image 57
59 | image 58
60 | image 59
61 | image 60
62 | image 61
63 | image 62
64 | image 63
65 | image 64
66 | image 65
67 | image 66
68 | image 67
69 | image 68
70 | image 69
71 | image 70
72 | image 71
73 | image 72
74 | image 73
75 | image 74
76 | image 75
77 | image 76
78 | image 77
79 | image 78
80 | image 79
81 | image 80
82 | image 81
83 | image 82
84 | image 83
85 | image 84
86 | image 85
87 | image 86
88 | image 87
89 | image 88
90 | image 89
91 | image 90
92 | image 91
93 | image 92
94 | image 93
95 | image 94
96 | image 95
97 | image 96
98 | image 97
99 | image 98
100 | image 99
101 | image 100
102 | image 101
103 | image 102
104 | image 103
105 | image 104
106 | image 105
107 | image 106
108 | image 107
109 | image 108
110 | image 109
111 | image 110
112 | image 111
113 | image 112
114 | image 113
115 | image 114
116 | image 115
117 | image 116
118 | image 117
119 | image 118
120 | image 119
121 | image 120
122 | image 121
123 | image 122
124 | image 123
125 | image 124
126 | image 125
127 | image 126
128 | image 127
129 | image 128
130 | image 129
131 | image 130
132 | image 131
133 | image 132
134 | image 133
135 | image 134
136 | image 135
137 | image 136
138 | image 137
139 | image 138
140 | image 139
141 | image 140
142 | image 141
143 | image 142
144 | image 143
145 | image 144
146 | image 145
147 | image 146
148 | image 147
149 | image 148
150 | image 149
151 | image 150
152 | image 151
153 | image 152
154 | image 153
155 | image 154
156 | image 155
157 | image 156
158 | image 157
159 | image 158
160 | image 159
161 | image 160
162 | image 161
163 | image 162
164 | image 163
165 | image 164
166 | image 165
167 | image 166
168 | image 167
169 | image 168
170 | image 169
171 | image 170
172 | image 171
173 | image 172
174 | image 173
175 | image 174
176 | image 175
177 | image 176
178 | image 177
179 | image 178
180 | image 179
181 | image 180
182 | image 181
183 | image 182
184 | image 183
185 | image 184
186 | image 185
187 | image 186
188 | image 187
189 | image 188
190 | image 189
191 | image 190
192 | image 191
193 | image 192
194 | image 193
195 | image 194
196 | image 195
197 | image 196
198 | image 197
199 | image 198
200 | image 199
201 | image 200
202 | image 201
203 | image 202
204 | image 203
205 | image 204
206 | image 205
207 | image 206
208 | image 207
209 | image 208
210 | image 209
211 | image 210
212 | image 211
213 | image 212
214 | image 213
215 | image 214
216 | image 215
217 | image 216
218 | image 217
219 | image 218
220 | image 219
221 | image 220
222 | image 221
223 | image 222
224 | image 223
225 | image 224
226 | image 225
227 | image 226
228 | image 227
229 | image 228
230 | image 229
231 | image 230
232 | image 231
233 | image 232
234 | image 233
235 | image 234
236 | image 235
237 | image 236
238 | image 237
239 | image 238
240 | image 239
241 | image 240
242 | image 241
243 | image 242
244 | image 243
245 | image 244
246 | image 245
247 | image 246
248 | image 247
249 | image 248
250 | image 249
251 | image 250
252 | image 251
253 | image 252
254 | image 253
255 | image 254
256 | image 255
257 | image 256
258 | image 257
259 | image 258
260 | image 259
261 | image 260
262 | image 261
263 | image 262
264 | image 263
265 | image 264
266 | image 265
267 | image 266
268 | image 267
269 | image 268
270 | image 269
271 | image 270
272 | image 271
273 | image 272
274 | image 273
275 | image 274
276 | image 275
277 | image 276
278 | image 277
279 | image 278
280 | image 279
281 | image 280
282 | image 281
283 | image 282
284 | image 283
285 | image 284
286 | image 285
287 | image 286
288 | image 287
289 | image 288
290 | image 289
291 | image 290
292 | image 291
293 | image 292
294 | image 293
295 | image 294
296 | image 295
297 | image 296
298 | image 297
299 | image 298
300 | image 299
301 | image 300
302 | image 301
303 | image 302
304 | image 303
305 | image 304
306 | image 305
307 | image 306
308 | image 307
309 | image 308
310 | image 309
311 | image 310
312 | image 311
313 | image 312
314 | image 313
315 | image 314
316 | image 315
317 | image 316
318 | image 317
319 | image 318
320 | image 319
321 | image 320
322 | image 321
323 | image 322
324 | image 323
325 | image 324
326 | image 325
327 | image 326
328 | image 327
329 | image 328
330 | image 329
331 | image 330
332 | image 331
333 | image 332
334 | image 333
335 | image 334
336 | image 335
337 | image 336
338 | image 337
339 | image 338
340 | image 339
341 | image 340
342 | image 341
343 | image 342
344 | image 343
345 | image 344
346 | image 345
347 | image 346
348 | image 347
349 | image 348
350 | image 349
351 | image 350
352 | image 351
353 | image 352
354 | image 353
355 | image 354
356 | image 355
357 | image 356
358 | image 357
359 | image 358
360 | image 359
361 | image 360
362 | image 361
363 | image 362
364 | image 363
365 | image 364
366 | image 365
367 | image 366
368 | image 367
369 | image 368
370 | image 369
371 | image 370
372 | image 371
373 | image 372
374 | image 373
375 | image 374
376 | image 375
377 | image 376
378 | image 377
379 | image 378
380 | image 379
381 | image 380
382 | image 381
383 | image 382
384 | image 383
385 | image 384
386 | image 385
387 | image 386
388 | image 387
389 | image 388
390 | image 389
391 | image 390
392 | image 391
393 | image 392
394 | image 393
395 | image 394
396 | image 395
397 | image 396
398 | image 397
399 | image 398
400 | image 399
401 | image 400
402 | image 401
403 | image 402
404 | image 403
405 | image 404
406 | image 405
407 | image 406
408 | image 407
409 | image 408
410 | image 409
411 | image 410
412 | image 411
413 | image 412
414 | image 413
415 | image 414
416 | image 415
417 | image 416
418 | image 417
419 | image 418
420 | image 419
421 | image 420
422 | image 421
423 | image 422
424 | image 423
425 | image 424
426 | image 425
427 | image 426
428 | image 427
429 | image 428
430 | image 429
431 | image 430
432 | image 431
433 | image 432
434 | image 433
435 | image 434
436 | image 435
437 | image 436
438 | image 437
439 | image 438
440 | image 439
441 | image 440
442 | image 441
443 | image 442
444 | image 443
445 | image 444
446 | image 445
447 | image 446
448 | image 447
449 | image 448
450 | image 449
451 | image 450
452 | image 451
453 | image 452
454 | image 453
455 | image 454
456 | image 455
457 | image 456
458 | image 457
459 | image 458
460 | image 459
461 | image 460
462 | image 461
463 | image 462
464 | image 463
465 | image 464
466 | image 465
467 | image 466
468 | image 467
469 | image 468
470 | image 469
471 | image 470
472 | image 471
473 | image 472
474 | image 473
475 | image 474
476 | image 475
477 | image 476
478 | image 477
479 | image 478
480 | image 479
481 | image 480
482 | image 481
483 | image 482
484 | image 483
485 | image 484
486 | image 485
487 | image 486
488 | image 487
489 | image 488
490 | image 489
491 | image 490
492 | image 491
493 | image 492
494 | image 493
495 | image 494
496 | image 495
497 | image 496
498 | image 497
499 | image 498
500 | image 499
501 |
--------------------------------------------------------------------------------
/splits/eigen/gt_depths.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nianticlabs/depth-hints/aa2ecf7bc88ef2edbd434fbf064ac024cec8e85d/splits/eigen/gt_depths.npz
--------------------------------------------------------------------------------
/splits/eigen_benchmark/test_files.txt:
--------------------------------------------------------------------------------
1 | 2011_09_26/2011_09_26_drive_0002_sync 69 l
2 | 2011_09_26/2011_09_26_drive_0002_sync 54 l
3 | 2011_09_26/2011_09_26_drive_0002_sync 42 l
4 | 2011_09_26/2011_09_26_drive_0002_sync 57 l
5 | 2011_09_26/2011_09_26_drive_0002_sync 30 l
6 | 2011_09_26/2011_09_26_drive_0002_sync 27 l
7 | 2011_09_26/2011_09_26_drive_0002_sync 12 l
8 | 2011_09_26/2011_09_26_drive_0002_sync 36 l
9 | 2011_09_26/2011_09_26_drive_0002_sync 33 l
10 | 2011_09_26/2011_09_26_drive_0002_sync 15 l
11 | 2011_09_26/2011_09_26_drive_0002_sync 39 l
12 | 2011_09_26/2011_09_26_drive_0002_sync 9 l
13 | 2011_09_26/2011_09_26_drive_0002_sync 51 l
14 | 2011_09_26/2011_09_26_drive_0002_sync 60 l
15 | 2011_09_26/2011_09_26_drive_0002_sync 21 l
16 | 2011_09_26/2011_09_26_drive_0002_sync 24 l
17 | 2011_09_26/2011_09_26_drive_0002_sync 45 l
18 | 2011_09_26/2011_09_26_drive_0002_sync 18 l
19 | 2011_09_26/2011_09_26_drive_0002_sync 48 l
20 | 2011_09_26/2011_09_26_drive_0002_sync 6 l
21 | 2011_09_26/2011_09_26_drive_0002_sync 63 l
22 | 2011_09_26/2011_09_26_drive_0009_sync 16 l
23 | 2011_09_26/2011_09_26_drive_0009_sync 32 l
24 | 2011_09_26/2011_09_26_drive_0009_sync 48 l
25 | 2011_09_26/2011_09_26_drive_0009_sync 64 l
26 | 2011_09_26/2011_09_26_drive_0009_sync 80 l
27 | 2011_09_26/2011_09_26_drive_0009_sync 96 l
28 | 2011_09_26/2011_09_26_drive_0009_sync 112 l
29 | 2011_09_26/2011_09_26_drive_0009_sync 128 l
30 | 2011_09_26/2011_09_26_drive_0009_sync 144 l
31 | 2011_09_26/2011_09_26_drive_0009_sync 160 l
32 | 2011_09_26/2011_09_26_drive_0009_sync 176 l
33 | 2011_09_26/2011_09_26_drive_0009_sync 196 l
34 | 2011_09_26/2011_09_26_drive_0009_sync 212 l
35 | 2011_09_26/2011_09_26_drive_0009_sync 228 l
36 | 2011_09_26/2011_09_26_drive_0009_sync 244 l
37 | 2011_09_26/2011_09_26_drive_0009_sync 260 l
38 | 2011_09_26/2011_09_26_drive_0009_sync 276 l
39 | 2011_09_26/2011_09_26_drive_0009_sync 292 l
40 | 2011_09_26/2011_09_26_drive_0009_sync 308 l
41 | 2011_09_26/2011_09_26_drive_0009_sync 324 l
42 | 2011_09_26/2011_09_26_drive_0009_sync 340 l
43 | 2011_09_26/2011_09_26_drive_0009_sync 356 l
44 | 2011_09_26/2011_09_26_drive_0009_sync 372 l
45 | 2011_09_26/2011_09_26_drive_0009_sync 388 l
46 | 2011_09_26/2011_09_26_drive_0013_sync 90 l
47 | 2011_09_26/2011_09_26_drive_0013_sync 50 l
48 | 2011_09_26/2011_09_26_drive_0013_sync 110 l
49 | 2011_09_26/2011_09_26_drive_0013_sync 115 l
50 | 2011_09_26/2011_09_26_drive_0013_sync 60 l
51 | 2011_09_26/2011_09_26_drive_0013_sync 105 l
52 | 2011_09_26/2011_09_26_drive_0013_sync 125 l
53 | 2011_09_26/2011_09_26_drive_0013_sync 20 l
54 | 2011_09_26/2011_09_26_drive_0013_sync 85 l
55 | 2011_09_26/2011_09_26_drive_0013_sync 70 l
56 | 2011_09_26/2011_09_26_drive_0013_sync 80 l
57 | 2011_09_26/2011_09_26_drive_0013_sync 65 l
58 | 2011_09_26/2011_09_26_drive_0013_sync 95 l
59 | 2011_09_26/2011_09_26_drive_0013_sync 130 l
60 | 2011_09_26/2011_09_26_drive_0013_sync 100 l
61 | 2011_09_26/2011_09_26_drive_0013_sync 10 l
62 | 2011_09_26/2011_09_26_drive_0013_sync 30 l
63 | 2011_09_26/2011_09_26_drive_0013_sync 135 l
64 | 2011_09_26/2011_09_26_drive_0013_sync 40 l
65 | 2011_09_26/2011_09_26_drive_0013_sync 5 l
66 | 2011_09_26/2011_09_26_drive_0013_sync 120 l
67 | 2011_09_26/2011_09_26_drive_0013_sync 45 l
68 | 2011_09_26/2011_09_26_drive_0013_sync 35 l
69 | 2011_09_26/2011_09_26_drive_0020_sync 69 l
70 | 2011_09_26/2011_09_26_drive_0020_sync 57 l
71 | 2011_09_26/2011_09_26_drive_0020_sync 12 l
72 | 2011_09_26/2011_09_26_drive_0020_sync 72 l
73 | 2011_09_26/2011_09_26_drive_0020_sync 18 l
74 | 2011_09_26/2011_09_26_drive_0020_sync 63 l
75 | 2011_09_26/2011_09_26_drive_0020_sync 15 l
76 | 2011_09_26/2011_09_26_drive_0020_sync 66 l
77 | 2011_09_26/2011_09_26_drive_0020_sync 6 l
78 | 2011_09_26/2011_09_26_drive_0020_sync 48 l
79 | 2011_09_26/2011_09_26_drive_0020_sync 60 l
80 | 2011_09_26/2011_09_26_drive_0020_sync 9 l
81 | 2011_09_26/2011_09_26_drive_0020_sync 33 l
82 | 2011_09_26/2011_09_26_drive_0020_sync 21 l
83 | 2011_09_26/2011_09_26_drive_0020_sync 75 l
84 | 2011_09_26/2011_09_26_drive_0020_sync 27 l
85 | 2011_09_26/2011_09_26_drive_0020_sync 45 l
86 | 2011_09_26/2011_09_26_drive_0020_sync 78 l
87 | 2011_09_26/2011_09_26_drive_0020_sync 36 l
88 | 2011_09_26/2011_09_26_drive_0020_sync 51 l
89 | 2011_09_26/2011_09_26_drive_0020_sync 54 l
90 | 2011_09_26/2011_09_26_drive_0020_sync 42 l
91 | 2011_09_26/2011_09_26_drive_0023_sync 18 l
92 | 2011_09_26/2011_09_26_drive_0023_sync 90 l
93 | 2011_09_26/2011_09_26_drive_0023_sync 126 l
94 | 2011_09_26/2011_09_26_drive_0023_sync 378 l
95 | 2011_09_26/2011_09_26_drive_0023_sync 36 l
96 | 2011_09_26/2011_09_26_drive_0023_sync 288 l
97 | 2011_09_26/2011_09_26_drive_0023_sync 198 l
98 | 2011_09_26/2011_09_26_drive_0023_sync 450 l
99 | 2011_09_26/2011_09_26_drive_0023_sync 144 l
100 | 2011_09_26/2011_09_26_drive_0023_sync 72 l
101 | 2011_09_26/2011_09_26_drive_0023_sync 252 l
102 | 2011_09_26/2011_09_26_drive_0023_sync 180 l
103 | 2011_09_26/2011_09_26_drive_0023_sync 432 l
104 | 2011_09_26/2011_09_26_drive_0023_sync 396 l
105 | 2011_09_26/2011_09_26_drive_0023_sync 54 l
106 | 2011_09_26/2011_09_26_drive_0023_sync 468 l
107 | 2011_09_26/2011_09_26_drive_0023_sync 306 l
108 | 2011_09_26/2011_09_26_drive_0023_sync 108 l
109 | 2011_09_26/2011_09_26_drive_0023_sync 162 l
110 | 2011_09_26/2011_09_26_drive_0023_sync 342 l
111 | 2011_09_26/2011_09_26_drive_0023_sync 270 l
112 | 2011_09_26/2011_09_26_drive_0023_sync 414 l
113 | 2011_09_26/2011_09_26_drive_0023_sync 216 l
114 | 2011_09_26/2011_09_26_drive_0023_sync 360 l
115 | 2011_09_26/2011_09_26_drive_0023_sync 324 l
116 | 2011_09_26/2011_09_26_drive_0027_sync 77 l
117 | 2011_09_26/2011_09_26_drive_0027_sync 35 l
118 | 2011_09_26/2011_09_26_drive_0027_sync 91 l
119 | 2011_09_26/2011_09_26_drive_0027_sync 112 l
120 | 2011_09_26/2011_09_26_drive_0027_sync 7 l
121 | 2011_09_26/2011_09_26_drive_0027_sync 175 l
122 | 2011_09_26/2011_09_26_drive_0027_sync 42 l
123 | 2011_09_26/2011_09_26_drive_0027_sync 98 l
124 | 2011_09_26/2011_09_26_drive_0027_sync 133 l
125 | 2011_09_26/2011_09_26_drive_0027_sync 161 l
126 | 2011_09_26/2011_09_26_drive_0027_sync 14 l
127 | 2011_09_26/2011_09_26_drive_0027_sync 126 l
128 | 2011_09_26/2011_09_26_drive_0027_sync 168 l
129 | 2011_09_26/2011_09_26_drive_0027_sync 70 l
130 | 2011_09_26/2011_09_26_drive_0027_sync 84 l
131 | 2011_09_26/2011_09_26_drive_0027_sync 140 l
132 | 2011_09_26/2011_09_26_drive_0027_sync 49 l
133 | 2011_09_26/2011_09_26_drive_0027_sync 182 l
134 | 2011_09_26/2011_09_26_drive_0027_sync 147 l
135 | 2011_09_26/2011_09_26_drive_0027_sync 56 l
136 | 2011_09_26/2011_09_26_drive_0027_sync 63 l
137 | 2011_09_26/2011_09_26_drive_0027_sync 21 l
138 | 2011_09_26/2011_09_26_drive_0027_sync 119 l
139 | 2011_09_26/2011_09_26_drive_0027_sync 28 l
140 | 2011_09_26/2011_09_26_drive_0029_sync 380 l
141 | 2011_09_26/2011_09_26_drive_0029_sync 394 l
142 | 2011_09_26/2011_09_26_drive_0029_sync 324 l
143 | 2011_09_26/2011_09_26_drive_0029_sync 268 l
144 | 2011_09_26/2011_09_26_drive_0029_sync 366 l
145 | 2011_09_26/2011_09_26_drive_0029_sync 296 l
146 | 2011_09_26/2011_09_26_drive_0029_sync 14 l
147 | 2011_09_26/2011_09_26_drive_0029_sync 28 l
148 | 2011_09_26/2011_09_26_drive_0029_sync 182 l
149 | 2011_09_26/2011_09_26_drive_0029_sync 168 l
150 | 2011_09_26/2011_09_26_drive_0029_sync 196 l
151 | 2011_09_26/2011_09_26_drive_0029_sync 140 l
152 | 2011_09_26/2011_09_26_drive_0029_sync 84 l
153 | 2011_09_26/2011_09_26_drive_0029_sync 56 l
154 | 2011_09_26/2011_09_26_drive_0029_sync 112 l
155 | 2011_09_26/2011_09_26_drive_0029_sync 352 l
156 | 2011_09_26/2011_09_26_drive_0029_sync 126 l
157 | 2011_09_26/2011_09_26_drive_0029_sync 70 l
158 | 2011_09_26/2011_09_26_drive_0029_sync 310 l
159 | 2011_09_26/2011_09_26_drive_0029_sync 154 l
160 | 2011_09_26/2011_09_26_drive_0029_sync 98 l
161 | 2011_09_26/2011_09_26_drive_0029_sync 408 l
162 | 2011_09_26/2011_09_26_drive_0029_sync 42 l
163 | 2011_09_26/2011_09_26_drive_0029_sync 338 l
164 | 2011_09_26/2011_09_26_drive_0036_sync 128 l
165 | 2011_09_26/2011_09_26_drive_0036_sync 192 l
166 | 2011_09_26/2011_09_26_drive_0036_sync 32 l
167 | 2011_09_26/2011_09_26_drive_0036_sync 352 l
168 | 2011_09_26/2011_09_26_drive_0036_sync 608 l
169 | 2011_09_26/2011_09_26_drive_0036_sync 224 l
170 | 2011_09_26/2011_09_26_drive_0036_sync 576 l
171 | 2011_09_26/2011_09_26_drive_0036_sync 672 l
172 | 2011_09_26/2011_09_26_drive_0036_sync 64 l
173 | 2011_09_26/2011_09_26_drive_0036_sync 448 l
174 | 2011_09_26/2011_09_26_drive_0036_sync 704 l
175 | 2011_09_26/2011_09_26_drive_0036_sync 640 l
176 | 2011_09_26/2011_09_26_drive_0036_sync 512 l
177 | 2011_09_26/2011_09_26_drive_0036_sync 768 l
178 | 2011_09_26/2011_09_26_drive_0036_sync 160 l
179 | 2011_09_26/2011_09_26_drive_0036_sync 416 l
180 | 2011_09_26/2011_09_26_drive_0036_sync 480 l
181 | 2011_09_26/2011_09_26_drive_0036_sync 288 l
182 | 2011_09_26/2011_09_26_drive_0036_sync 544 l
183 | 2011_09_26/2011_09_26_drive_0036_sync 96 l
184 | 2011_09_26/2011_09_26_drive_0036_sync 384 l
185 | 2011_09_26/2011_09_26_drive_0036_sync 256 l
186 | 2011_09_26/2011_09_26_drive_0036_sync 320 l
187 | 2011_09_26/2011_09_26_drive_0046_sync 5 l
188 | 2011_09_26/2011_09_26_drive_0046_sync 10 l
189 | 2011_09_26/2011_09_26_drive_0046_sync 15 l
190 | 2011_09_26/2011_09_26_drive_0046_sync 20 l
191 | 2011_09_26/2011_09_26_drive_0046_sync 25 l
192 | 2011_09_26/2011_09_26_drive_0046_sync 30 l
193 | 2011_09_26/2011_09_26_drive_0046_sync 35 l
194 | 2011_09_26/2011_09_26_drive_0046_sync 40 l
195 | 2011_09_26/2011_09_26_drive_0046_sync 45 l
196 | 2011_09_26/2011_09_26_drive_0046_sync 50 l
197 | 2011_09_26/2011_09_26_drive_0046_sync 55 l
198 | 2011_09_26/2011_09_26_drive_0046_sync 60 l
199 | 2011_09_26/2011_09_26_drive_0046_sync 65 l
200 | 2011_09_26/2011_09_26_drive_0046_sync 70 l
201 | 2011_09_26/2011_09_26_drive_0046_sync 75 l
202 | 2011_09_26/2011_09_26_drive_0046_sync 80 l
203 | 2011_09_26/2011_09_26_drive_0046_sync 85 l
204 | 2011_09_26/2011_09_26_drive_0046_sync 90 l
205 | 2011_09_26/2011_09_26_drive_0046_sync 95 l
206 | 2011_09_26/2011_09_26_drive_0046_sync 100 l
207 | 2011_09_26/2011_09_26_drive_0046_sync 105 l
208 | 2011_09_26/2011_09_26_drive_0046_sync 110 l
209 | 2011_09_26/2011_09_26_drive_0046_sync 115 l
210 | 2011_09_26/2011_09_26_drive_0048_sync 5 l
211 | 2011_09_26/2011_09_26_drive_0048_sync 6 l
212 | 2011_09_26/2011_09_26_drive_0048_sync 7 l
213 | 2011_09_26/2011_09_26_drive_0048_sync 8 l
214 | 2011_09_26/2011_09_26_drive_0048_sync 9 l
215 | 2011_09_26/2011_09_26_drive_0048_sync 10 l
216 | 2011_09_26/2011_09_26_drive_0048_sync 11 l
217 | 2011_09_26/2011_09_26_drive_0048_sync 12 l
218 | 2011_09_26/2011_09_26_drive_0048_sync 13 l
219 | 2011_09_26/2011_09_26_drive_0048_sync 14 l
220 | 2011_09_26/2011_09_26_drive_0048_sync 15 l
221 | 2011_09_26/2011_09_26_drive_0048_sync 16 l
222 | 2011_09_26/2011_09_26_drive_0052_sync 46 l
223 | 2011_09_26/2011_09_26_drive_0052_sync 14 l
224 | 2011_09_26/2011_09_26_drive_0052_sync 36 l
225 | 2011_09_26/2011_09_26_drive_0052_sync 28 l
226 | 2011_09_26/2011_09_26_drive_0052_sync 26 l
227 | 2011_09_26/2011_09_26_drive_0052_sync 50 l
228 | 2011_09_26/2011_09_26_drive_0052_sync 40 l
229 | 2011_09_26/2011_09_26_drive_0052_sync 8 l
230 | 2011_09_26/2011_09_26_drive_0052_sync 16 l
231 | 2011_09_26/2011_09_26_drive_0052_sync 44 l
232 | 2011_09_26/2011_09_26_drive_0052_sync 18 l
233 | 2011_09_26/2011_09_26_drive_0052_sync 32 l
234 | 2011_09_26/2011_09_26_drive_0052_sync 42 l
235 | 2011_09_26/2011_09_26_drive_0052_sync 10 l
236 | 2011_09_26/2011_09_26_drive_0052_sync 20 l
237 | 2011_09_26/2011_09_26_drive_0052_sync 48 l
238 | 2011_09_26/2011_09_26_drive_0052_sync 52 l
239 | 2011_09_26/2011_09_26_drive_0052_sync 6 l
240 | 2011_09_26/2011_09_26_drive_0052_sync 30 l
241 | 2011_09_26/2011_09_26_drive_0052_sync 12 l
242 | 2011_09_26/2011_09_26_drive_0052_sync 38 l
243 | 2011_09_26/2011_09_26_drive_0052_sync 22 l
244 | 2011_09_26/2011_09_26_drive_0056_sync 11 l
245 | 2011_09_26/2011_09_26_drive_0056_sync 33 l
246 | 2011_09_26/2011_09_26_drive_0056_sync 242 l
247 | 2011_09_26/2011_09_26_drive_0056_sync 253 l
248 | 2011_09_26/2011_09_26_drive_0056_sync 286 l
249 | 2011_09_26/2011_09_26_drive_0056_sync 154 l
250 | 2011_09_26/2011_09_26_drive_0056_sync 99 l
251 | 2011_09_26/2011_09_26_drive_0056_sync 220 l
252 | 2011_09_26/2011_09_26_drive_0056_sync 22 l
253 | 2011_09_26/2011_09_26_drive_0056_sync 77 l
254 | 2011_09_26/2011_09_26_drive_0056_sync 187 l
255 | 2011_09_26/2011_09_26_drive_0056_sync 143 l
256 | 2011_09_26/2011_09_26_drive_0056_sync 66 l
257 | 2011_09_26/2011_09_26_drive_0056_sync 176 l
258 | 2011_09_26/2011_09_26_drive_0056_sync 110 l
259 | 2011_09_26/2011_09_26_drive_0056_sync 275 l
260 | 2011_09_26/2011_09_26_drive_0056_sync 264 l
261 | 2011_09_26/2011_09_26_drive_0056_sync 198 l
262 | 2011_09_26/2011_09_26_drive_0056_sync 55 l
263 | 2011_09_26/2011_09_26_drive_0056_sync 88 l
264 | 2011_09_26/2011_09_26_drive_0056_sync 121 l
265 | 2011_09_26/2011_09_26_drive_0056_sync 209 l
266 | 2011_09_26/2011_09_26_drive_0056_sync 165 l
267 | 2011_09_26/2011_09_26_drive_0056_sync 231 l
268 | 2011_09_26/2011_09_26_drive_0056_sync 44 l
269 | 2011_09_26/2011_09_26_drive_0059_sync 56 l
270 | 2011_09_26/2011_09_26_drive_0059_sync 344 l
271 | 2011_09_26/2011_09_26_drive_0059_sync 358 l
272 | 2011_09_26/2011_09_26_drive_0059_sync 316 l
273 | 2011_09_26/2011_09_26_drive_0059_sync 238 l
274 | 2011_09_26/2011_09_26_drive_0059_sync 98 l
275 | 2011_09_26/2011_09_26_drive_0059_sync 112 l
276 | 2011_09_26/2011_09_26_drive_0059_sync 28 l
277 | 2011_09_26/2011_09_26_drive_0059_sync 14 l
278 | 2011_09_26/2011_09_26_drive_0059_sync 330 l
279 | 2011_09_26/2011_09_26_drive_0059_sync 154 l
280 | 2011_09_26/2011_09_26_drive_0059_sync 42 l
281 | 2011_09_26/2011_09_26_drive_0059_sync 302 l
282 | 2011_09_26/2011_09_26_drive_0059_sync 182 l
283 | 2011_09_26/2011_09_26_drive_0059_sync 288 l
284 | 2011_09_26/2011_09_26_drive_0059_sync 140 l
285 | 2011_09_26/2011_09_26_drive_0059_sync 274 l
286 | 2011_09_26/2011_09_26_drive_0059_sync 224 l
287 | 2011_09_26/2011_09_26_drive_0059_sync 196 l
288 | 2011_09_26/2011_09_26_drive_0059_sync 126 l
289 | 2011_09_26/2011_09_26_drive_0059_sync 84 l
290 | 2011_09_26/2011_09_26_drive_0059_sync 210 l
291 | 2011_09_26/2011_09_26_drive_0059_sync 70 l
292 | 2011_09_26/2011_09_26_drive_0064_sync 528 l
293 | 2011_09_26/2011_09_26_drive_0064_sync 308 l
294 | 2011_09_26/2011_09_26_drive_0064_sync 44 l
295 | 2011_09_26/2011_09_26_drive_0064_sync 352 l
296 | 2011_09_26/2011_09_26_drive_0064_sync 66 l
297 | 2011_09_26/2011_09_26_drive_0064_sync 506 l
298 | 2011_09_26/2011_09_26_drive_0064_sync 176 l
299 | 2011_09_26/2011_09_26_drive_0064_sync 22 l
300 | 2011_09_26/2011_09_26_drive_0064_sync 242 l
301 | 2011_09_26/2011_09_26_drive_0064_sync 462 l
302 | 2011_09_26/2011_09_26_drive_0064_sync 418 l
303 | 2011_09_26/2011_09_26_drive_0064_sync 110 l
304 | 2011_09_26/2011_09_26_drive_0064_sync 440 l
305 | 2011_09_26/2011_09_26_drive_0064_sync 396 l
306 | 2011_09_26/2011_09_26_drive_0064_sync 154 l
307 | 2011_09_26/2011_09_26_drive_0064_sync 374 l
308 | 2011_09_26/2011_09_26_drive_0064_sync 88 l
309 | 2011_09_26/2011_09_26_drive_0064_sync 286 l
310 | 2011_09_26/2011_09_26_drive_0064_sync 550 l
311 | 2011_09_26/2011_09_26_drive_0064_sync 264 l
312 | 2011_09_26/2011_09_26_drive_0064_sync 220 l
313 | 2011_09_26/2011_09_26_drive_0064_sync 330 l
314 | 2011_09_26/2011_09_26_drive_0064_sync 484 l
315 | 2011_09_26/2011_09_26_drive_0064_sync 198 l
316 | 2011_09_26/2011_09_26_drive_0084_sync 283 l
317 | 2011_09_26/2011_09_26_drive_0084_sync 361 l
318 | 2011_09_26/2011_09_26_drive_0084_sync 270 l
319 | 2011_09_26/2011_09_26_drive_0084_sync 127 l
320 | 2011_09_26/2011_09_26_drive_0084_sync 205 l
321 | 2011_09_26/2011_09_26_drive_0084_sync 218 l
322 | 2011_09_26/2011_09_26_drive_0084_sync 153 l
323 | 2011_09_26/2011_09_26_drive_0084_sync 335 l
324 | 2011_09_26/2011_09_26_drive_0084_sync 192 l
325 | 2011_09_26/2011_09_26_drive_0084_sync 348 l
326 | 2011_09_26/2011_09_26_drive_0084_sync 101 l
327 | 2011_09_26/2011_09_26_drive_0084_sync 49 l
328 | 2011_09_26/2011_09_26_drive_0084_sync 179 l
329 | 2011_09_26/2011_09_26_drive_0084_sync 140 l
330 | 2011_09_26/2011_09_26_drive_0084_sync 374 l
331 | 2011_09_26/2011_09_26_drive_0084_sync 322 l
332 | 2011_09_26/2011_09_26_drive_0084_sync 309 l
333 | 2011_09_26/2011_09_26_drive_0084_sync 244 l
334 | 2011_09_26/2011_09_26_drive_0084_sync 62 l
335 | 2011_09_26/2011_09_26_drive_0084_sync 257 l
336 | 2011_09_26/2011_09_26_drive_0084_sync 88 l
337 | 2011_09_26/2011_09_26_drive_0084_sync 114 l
338 | 2011_09_26/2011_09_26_drive_0084_sync 75 l
339 | 2011_09_26/2011_09_26_drive_0084_sync 296 l
340 | 2011_09_26/2011_09_26_drive_0084_sync 231 l
341 | 2011_09_26/2011_09_26_drive_0086_sync 7 l
342 | 2011_09_26/2011_09_26_drive_0086_sync 196 l
343 | 2011_09_26/2011_09_26_drive_0086_sync 439 l
344 | 2011_09_26/2011_09_26_drive_0086_sync 169 l
345 | 2011_09_26/2011_09_26_drive_0086_sync 115 l
346 | 2011_09_26/2011_09_26_drive_0086_sync 34 l
347 | 2011_09_26/2011_09_26_drive_0086_sync 304 l
348 | 2011_09_26/2011_09_26_drive_0086_sync 331 l
349 | 2011_09_26/2011_09_26_drive_0086_sync 277 l
350 | 2011_09_26/2011_09_26_drive_0086_sync 520 l
351 | 2011_09_26/2011_09_26_drive_0086_sync 682 l
352 | 2011_09_26/2011_09_26_drive_0086_sync 628 l
353 | 2011_09_26/2011_09_26_drive_0086_sync 88 l
354 | 2011_09_26/2011_09_26_drive_0086_sync 601 l
355 | 2011_09_26/2011_09_26_drive_0086_sync 574 l
356 | 2011_09_26/2011_09_26_drive_0086_sync 223 l
357 | 2011_09_26/2011_09_26_drive_0086_sync 655 l
358 | 2011_09_26/2011_09_26_drive_0086_sync 358 l
359 | 2011_09_26/2011_09_26_drive_0086_sync 412 l
360 | 2011_09_26/2011_09_26_drive_0086_sync 142 l
361 | 2011_09_26/2011_09_26_drive_0086_sync 385 l
362 | 2011_09_26/2011_09_26_drive_0086_sync 61 l
363 | 2011_09_26/2011_09_26_drive_0086_sync 493 l
364 | 2011_09_26/2011_09_26_drive_0086_sync 466 l
365 | 2011_09_26/2011_09_26_drive_0086_sync 250 l
366 | 2011_09_26/2011_09_26_drive_0093_sync 16 l
367 | 2011_09_26/2011_09_26_drive_0093_sync 32 l
368 | 2011_09_26/2011_09_26_drive_0093_sync 48 l
369 | 2011_09_26/2011_09_26_drive_0093_sync 64 l
370 | 2011_09_26/2011_09_26_drive_0093_sync 80 l
371 | 2011_09_26/2011_09_26_drive_0093_sync 96 l
372 | 2011_09_26/2011_09_26_drive_0093_sync 112 l
373 | 2011_09_26/2011_09_26_drive_0093_sync 128 l
374 | 2011_09_26/2011_09_26_drive_0093_sync 144 l
375 | 2011_09_26/2011_09_26_drive_0093_sync 160 l
376 | 2011_09_26/2011_09_26_drive_0093_sync 176 l
377 | 2011_09_26/2011_09_26_drive_0093_sync 192 l
378 | 2011_09_26/2011_09_26_drive_0093_sync 208 l
379 | 2011_09_26/2011_09_26_drive_0093_sync 224 l
380 | 2011_09_26/2011_09_26_drive_0093_sync 240 l
381 | 2011_09_26/2011_09_26_drive_0093_sync 256 l
382 | 2011_09_26/2011_09_26_drive_0093_sync 305 l
383 | 2011_09_26/2011_09_26_drive_0093_sync 321 l
384 | 2011_09_26/2011_09_26_drive_0093_sync 337 l
385 | 2011_09_26/2011_09_26_drive_0093_sync 353 l
386 | 2011_09_26/2011_09_26_drive_0093_sync 369 l
387 | 2011_09_26/2011_09_26_drive_0093_sync 385 l
388 | 2011_09_26/2011_09_26_drive_0093_sync 401 l
389 | 2011_09_26/2011_09_26_drive_0093_sync 417 l
390 | 2011_09_26/2011_09_26_drive_0096_sync 19 l
391 | 2011_09_26/2011_09_26_drive_0096_sync 38 l
392 | 2011_09_26/2011_09_26_drive_0096_sync 57 l
393 | 2011_09_26/2011_09_26_drive_0096_sync 76 l
394 | 2011_09_26/2011_09_26_drive_0096_sync 95 l
395 | 2011_09_26/2011_09_26_drive_0096_sync 114 l
396 | 2011_09_26/2011_09_26_drive_0096_sync 133 l
397 | 2011_09_26/2011_09_26_drive_0096_sync 152 l
398 | 2011_09_26/2011_09_26_drive_0096_sync 171 l
399 | 2011_09_26/2011_09_26_drive_0096_sync 190 l
400 | 2011_09_26/2011_09_26_drive_0096_sync 209 l
401 | 2011_09_26/2011_09_26_drive_0096_sync 228 l
402 | 2011_09_26/2011_09_26_drive_0096_sync 247 l
403 | 2011_09_26/2011_09_26_drive_0096_sync 266 l
404 | 2011_09_26/2011_09_26_drive_0096_sync 285 l
405 | 2011_09_26/2011_09_26_drive_0096_sync 304 l
406 | 2011_09_26/2011_09_26_drive_0096_sync 323 l
407 | 2011_09_26/2011_09_26_drive_0096_sync 342 l
408 | 2011_09_26/2011_09_26_drive_0096_sync 361 l
409 | 2011_09_26/2011_09_26_drive_0096_sync 380 l
410 | 2011_09_26/2011_09_26_drive_0096_sync 399 l
411 | 2011_09_26/2011_09_26_drive_0096_sync 418 l
412 | 2011_09_26/2011_09_26_drive_0096_sync 437 l
413 | 2011_09_26/2011_09_26_drive_0096_sync 456 l
414 | 2011_09_26/2011_09_26_drive_0101_sync 692 l
415 | 2011_09_26/2011_09_26_drive_0101_sync 930 l
416 | 2011_09_26/2011_09_26_drive_0101_sync 760 l
417 | 2011_09_26/2011_09_26_drive_0101_sync 896 l
418 | 2011_09_26/2011_09_26_drive_0101_sync 284 l
419 | 2011_09_26/2011_09_26_drive_0101_sync 148 l
420 | 2011_09_26/2011_09_26_drive_0101_sync 522 l
421 | 2011_09_26/2011_09_26_drive_0101_sync 794 l
422 | 2011_09_26/2011_09_26_drive_0101_sync 624 l
423 | 2011_09_26/2011_09_26_drive_0101_sync 726 l
424 | 2011_09_26/2011_09_26_drive_0101_sync 216 l
425 | 2011_09_26/2011_09_26_drive_0101_sync 318 l
426 | 2011_09_26/2011_09_26_drive_0101_sync 488 l
427 | 2011_09_26/2011_09_26_drive_0101_sync 590 l
428 | 2011_09_26/2011_09_26_drive_0101_sync 454 l
429 | 2011_09_26/2011_09_26_drive_0101_sync 862 l
430 | 2011_09_26/2011_09_26_drive_0101_sync 386 l
431 | 2011_09_26/2011_09_26_drive_0101_sync 352 l
432 | 2011_09_26/2011_09_26_drive_0101_sync 420 l
433 | 2011_09_26/2011_09_26_drive_0101_sync 658 l
434 | 2011_09_26/2011_09_26_drive_0101_sync 828 l
435 | 2011_09_26/2011_09_26_drive_0101_sync 556 l
436 | 2011_09_26/2011_09_26_drive_0101_sync 114 l
437 | 2011_09_26/2011_09_26_drive_0101_sync 182 l
438 | 2011_09_26/2011_09_26_drive_0101_sync 80 l
439 | 2011_09_26/2011_09_26_drive_0106_sync 15 l
440 | 2011_09_26/2011_09_26_drive_0106_sync 35 l
441 | 2011_09_26/2011_09_26_drive_0106_sync 43 l
442 | 2011_09_26/2011_09_26_drive_0106_sync 51 l
443 | 2011_09_26/2011_09_26_drive_0106_sync 59 l
444 | 2011_09_26/2011_09_26_drive_0106_sync 67 l
445 | 2011_09_26/2011_09_26_drive_0106_sync 75 l
446 | 2011_09_26/2011_09_26_drive_0106_sync 83 l
447 | 2011_09_26/2011_09_26_drive_0106_sync 91 l
448 | 2011_09_26/2011_09_26_drive_0106_sync 99 l
449 | 2011_09_26/2011_09_26_drive_0106_sync 107 l
450 | 2011_09_26/2011_09_26_drive_0106_sync 115 l
451 | 2011_09_26/2011_09_26_drive_0106_sync 123 l
452 | 2011_09_26/2011_09_26_drive_0106_sync 131 l
453 | 2011_09_26/2011_09_26_drive_0106_sync 139 l
454 | 2011_09_26/2011_09_26_drive_0106_sync 147 l
455 | 2011_09_26/2011_09_26_drive_0106_sync 155 l
456 | 2011_09_26/2011_09_26_drive_0106_sync 163 l
457 | 2011_09_26/2011_09_26_drive_0106_sync 171 l
458 | 2011_09_26/2011_09_26_drive_0106_sync 179 l
459 | 2011_09_26/2011_09_26_drive_0106_sync 187 l
460 | 2011_09_26/2011_09_26_drive_0106_sync 195 l
461 | 2011_09_26/2011_09_26_drive_0106_sync 203 l
462 | 2011_09_26/2011_09_26_drive_0106_sync 211 l
463 | 2011_09_26/2011_09_26_drive_0106_sync 219 l
464 | 2011_09_26/2011_09_26_drive_0117_sync 312 l
465 | 2011_09_26/2011_09_26_drive_0117_sync 494 l
466 | 2011_09_26/2011_09_26_drive_0117_sync 104 l
467 | 2011_09_26/2011_09_26_drive_0117_sync 130 l
468 | 2011_09_26/2011_09_26_drive_0117_sync 156 l
469 | 2011_09_26/2011_09_26_drive_0117_sync 182 l
470 | 2011_09_26/2011_09_26_drive_0117_sync 598 l
471 | 2011_09_26/2011_09_26_drive_0117_sync 416 l
472 | 2011_09_26/2011_09_26_drive_0117_sync 364 l
473 | 2011_09_26/2011_09_26_drive_0117_sync 26 l
474 | 2011_09_26/2011_09_26_drive_0117_sync 78 l
475 | 2011_09_26/2011_09_26_drive_0117_sync 572 l
476 | 2011_09_26/2011_09_26_drive_0117_sync 468 l
477 | 2011_09_26/2011_09_26_drive_0117_sync 260 l
478 | 2011_09_26/2011_09_26_drive_0117_sync 624 l
479 | 2011_09_26/2011_09_26_drive_0117_sync 234 l
480 | 2011_09_26/2011_09_26_drive_0117_sync 442 l
481 | 2011_09_26/2011_09_26_drive_0117_sync 390 l
482 | 2011_09_26/2011_09_26_drive_0117_sync 546 l
483 | 2011_09_26/2011_09_26_drive_0117_sync 286 l
484 | 2011_09_26/2011_09_26_drive_0117_sync 338 l
485 | 2011_09_26/2011_09_26_drive_0117_sync 208 l
486 | 2011_09_26/2011_09_26_drive_0117_sync 650 l
487 | 2011_09_26/2011_09_26_drive_0117_sync 52 l
488 | 2011_09_28/2011_09_28_drive_0002_sync 24 l
489 | 2011_09_28/2011_09_28_drive_0002_sync 21 l
490 | 2011_09_28/2011_09_28_drive_0002_sync 36 l
491 | 2011_09_28/2011_09_28_drive_0002_sync 51 l
492 | 2011_09_28/2011_09_28_drive_0002_sync 18 l
493 | 2011_09_28/2011_09_28_drive_0002_sync 33 l
494 | 2011_09_28/2011_09_28_drive_0002_sync 90 l
495 | 2011_09_28/2011_09_28_drive_0002_sync 45 l
496 | 2011_09_28/2011_09_28_drive_0002_sync 54 l
497 | 2011_09_28/2011_09_28_drive_0002_sync 12 l
498 | 2011_09_28/2011_09_28_drive_0002_sync 39 l
499 | 2011_09_28/2011_09_28_drive_0002_sync 9 l
500 | 2011_09_28/2011_09_28_drive_0002_sync 30 l
501 | 2011_09_28/2011_09_28_drive_0002_sync 78 l
502 | 2011_09_28/2011_09_28_drive_0002_sync 60 l
503 | 2011_09_28/2011_09_28_drive_0002_sync 48 l
504 | 2011_09_28/2011_09_28_drive_0002_sync 84 l
505 | 2011_09_28/2011_09_28_drive_0002_sync 81 l
506 | 2011_09_28/2011_09_28_drive_0002_sync 6 l
507 | 2011_09_28/2011_09_28_drive_0002_sync 57 l
508 | 2011_09_28/2011_09_28_drive_0002_sync 72 l
509 | 2011_09_28/2011_09_28_drive_0002_sync 87 l
510 | 2011_09_28/2011_09_28_drive_0002_sync 63 l
511 | 2011_09_29/2011_09_29_drive_0071_sync 252 l
512 | 2011_09_29/2011_09_29_drive_0071_sync 540 l
513 | 2011_09_29/2011_09_29_drive_0071_sync 36 l
514 | 2011_09_29/2011_09_29_drive_0071_sync 360 l
515 | 2011_09_29/2011_09_29_drive_0071_sync 807 l
516 | 2011_09_29/2011_09_29_drive_0071_sync 879 l
517 | 2011_09_29/2011_09_29_drive_0071_sync 288 l
518 | 2011_09_29/2011_09_29_drive_0071_sync 771 l
519 | 2011_09_29/2011_09_29_drive_0071_sync 216 l
520 | 2011_09_29/2011_09_29_drive_0071_sync 951 l
521 | 2011_09_29/2011_09_29_drive_0071_sync 324 l
522 | 2011_09_29/2011_09_29_drive_0071_sync 432 l
523 | 2011_09_29/2011_09_29_drive_0071_sync 504 l
524 | 2011_09_29/2011_09_29_drive_0071_sync 576 l
525 | 2011_09_29/2011_09_29_drive_0071_sync 108 l
526 | 2011_09_29/2011_09_29_drive_0071_sync 180 l
527 | 2011_09_29/2011_09_29_drive_0071_sync 72 l
528 | 2011_09_29/2011_09_29_drive_0071_sync 612 l
529 | 2011_09_29/2011_09_29_drive_0071_sync 915 l
530 | 2011_09_29/2011_09_29_drive_0071_sync 735 l
531 | 2011_09_29/2011_09_29_drive_0071_sync 144 l
532 | 2011_09_29/2011_09_29_drive_0071_sync 396 l
533 | 2011_09_29/2011_09_29_drive_0071_sync 468 l
534 | 2011_09_30/2011_09_30_drive_0016_sync 132 l
535 | 2011_09_30/2011_09_30_drive_0016_sync 11 l
536 | 2011_09_30/2011_09_30_drive_0016_sync 154 l
537 | 2011_09_30/2011_09_30_drive_0016_sync 22 l
538 | 2011_09_30/2011_09_30_drive_0016_sync 242 l
539 | 2011_09_30/2011_09_30_drive_0016_sync 198 l
540 | 2011_09_30/2011_09_30_drive_0016_sync 176 l
541 | 2011_09_30/2011_09_30_drive_0016_sync 231 l
542 | 2011_09_30/2011_09_30_drive_0016_sync 220 l
543 | 2011_09_30/2011_09_30_drive_0016_sync 88 l
544 | 2011_09_30/2011_09_30_drive_0016_sync 143 l
545 | 2011_09_30/2011_09_30_drive_0016_sync 55 l
546 | 2011_09_30/2011_09_30_drive_0016_sync 33 l
547 | 2011_09_30/2011_09_30_drive_0016_sync 187 l
548 | 2011_09_30/2011_09_30_drive_0016_sync 110 l
549 | 2011_09_30/2011_09_30_drive_0016_sync 44 l
550 | 2011_09_30/2011_09_30_drive_0016_sync 77 l
551 | 2011_09_30/2011_09_30_drive_0016_sync 66 l
552 | 2011_09_30/2011_09_30_drive_0016_sync 165 l
553 | 2011_09_30/2011_09_30_drive_0016_sync 264 l
554 | 2011_09_30/2011_09_30_drive_0016_sync 253 l
555 | 2011_09_30/2011_09_30_drive_0016_sync 209 l
556 | 2011_09_30/2011_09_30_drive_0016_sync 121 l
557 | 2011_09_30/2011_09_30_drive_0018_sync 107 l
558 | 2011_09_30/2011_09_30_drive_0018_sync 2247 l
559 | 2011_09_30/2011_09_30_drive_0018_sync 1391 l
560 | 2011_09_30/2011_09_30_drive_0018_sync 535 l
561 | 2011_09_30/2011_09_30_drive_0018_sync 1819 l
562 | 2011_09_30/2011_09_30_drive_0018_sync 1177 l
563 | 2011_09_30/2011_09_30_drive_0018_sync 428 l
564 | 2011_09_30/2011_09_30_drive_0018_sync 1926 l
565 | 2011_09_30/2011_09_30_drive_0018_sync 749 l
566 | 2011_09_30/2011_09_30_drive_0018_sync 1284 l
567 | 2011_09_30/2011_09_30_drive_0018_sync 2140 l
568 | 2011_09_30/2011_09_30_drive_0018_sync 1605 l
569 | 2011_09_30/2011_09_30_drive_0018_sync 1498 l
570 | 2011_09_30/2011_09_30_drive_0018_sync 642 l
571 | 2011_09_30/2011_09_30_drive_0018_sync 2740 l
572 | 2011_09_30/2011_09_30_drive_0018_sync 2419 l
573 | 2011_09_30/2011_09_30_drive_0018_sync 856 l
574 | 2011_09_30/2011_09_30_drive_0018_sync 2526 l
575 | 2011_09_30/2011_09_30_drive_0018_sync 1712 l
576 | 2011_09_30/2011_09_30_drive_0018_sync 1070 l
577 | 2011_09_30/2011_09_30_drive_0018_sync 2033 l
578 | 2011_09_30/2011_09_30_drive_0018_sync 214 l
579 | 2011_09_30/2011_09_30_drive_0018_sync 963 l
580 | 2011_09_30/2011_09_30_drive_0018_sync 2633 l
581 | 2011_09_30/2011_09_30_drive_0027_sync 533 l
582 | 2011_09_30/2011_09_30_drive_0027_sync 1040 l
583 | 2011_09_30/2011_09_30_drive_0027_sync 82 l
584 | 2011_09_30/2011_09_30_drive_0027_sync 205 l
585 | 2011_09_30/2011_09_30_drive_0027_sync 835 l
586 | 2011_09_30/2011_09_30_drive_0027_sync 451 l
587 | 2011_09_30/2011_09_30_drive_0027_sync 164 l
588 | 2011_09_30/2011_09_30_drive_0027_sync 794 l
589 | 2011_09_30/2011_09_30_drive_0027_sync 328 l
590 | 2011_09_30/2011_09_30_drive_0027_sync 615 l
591 | 2011_09_30/2011_09_30_drive_0027_sync 917 l
592 | 2011_09_30/2011_09_30_drive_0027_sync 369 l
593 | 2011_09_30/2011_09_30_drive_0027_sync 287 l
594 | 2011_09_30/2011_09_30_drive_0027_sync 123 l
595 | 2011_09_30/2011_09_30_drive_0027_sync 876 l
596 | 2011_09_30/2011_09_30_drive_0027_sync 410 l
597 | 2011_09_30/2011_09_30_drive_0027_sync 492 l
598 | 2011_09_30/2011_09_30_drive_0027_sync 958 l
599 | 2011_09_30/2011_09_30_drive_0027_sync 656 l
600 | 2011_09_30/2011_09_30_drive_0027_sync 753 l
601 | 2011_09_30/2011_09_30_drive_0027_sync 574 l
602 | 2011_09_30/2011_09_30_drive_0027_sync 1081 l
603 | 2011_09_30/2011_09_30_drive_0027_sync 41 l
604 | 2011_09_30/2011_09_30_drive_0027_sync 246 l
605 | 2011_10_03/2011_10_03_drive_0027_sync 2906 l
606 | 2011_10_03/2011_10_03_drive_0027_sync 2544 l
607 | 2011_10_03/2011_10_03_drive_0027_sync 362 l
608 | 2011_10_03/2011_10_03_drive_0027_sync 4535 l
609 | 2011_10_03/2011_10_03_drive_0027_sync 734 l
610 | 2011_10_03/2011_10_03_drive_0027_sync 1096 l
611 | 2011_10_03/2011_10_03_drive_0027_sync 4173 l
612 | 2011_10_03/2011_10_03_drive_0027_sync 543 l
613 | 2011_10_03/2011_10_03_drive_0027_sync 1277 l
614 | 2011_10_03/2011_10_03_drive_0027_sync 4354 l
615 | 2011_10_03/2011_10_03_drive_0027_sync 1458 l
616 | 2011_10_03/2011_10_03_drive_0027_sync 1820 l
617 | 2011_10_03/2011_10_03_drive_0027_sync 3449 l
618 | 2011_10_03/2011_10_03_drive_0027_sync 3268 l
619 | 2011_10_03/2011_10_03_drive_0027_sync 915 l
620 | 2011_10_03/2011_10_03_drive_0027_sync 2363 l
621 | 2011_10_03/2011_10_03_drive_0027_sync 2725 l
622 | 2011_10_03/2011_10_03_drive_0027_sync 181 l
623 | 2011_10_03/2011_10_03_drive_0027_sync 1639 l
624 | 2011_10_03/2011_10_03_drive_0027_sync 3992 l
625 | 2011_10_03/2011_10_03_drive_0027_sync 3087 l
626 | 2011_10_03/2011_10_03_drive_0027_sync 2001 l
627 | 2011_10_03/2011_10_03_drive_0027_sync 3811 l
628 | 2011_10_03/2011_10_03_drive_0027_sync 3630 l
629 | 2011_10_03/2011_10_03_drive_0047_sync 96 l
630 | 2011_10_03/2011_10_03_drive_0047_sync 800 l
631 | 2011_10_03/2011_10_03_drive_0047_sync 320 l
632 | 2011_10_03/2011_10_03_drive_0047_sync 576 l
633 | 2011_10_03/2011_10_03_drive_0047_sync 480 l
634 | 2011_10_03/2011_10_03_drive_0047_sync 640 l
635 | 2011_10_03/2011_10_03_drive_0047_sync 32 l
636 | 2011_10_03/2011_10_03_drive_0047_sync 384 l
637 | 2011_10_03/2011_10_03_drive_0047_sync 160 l
638 | 2011_10_03/2011_10_03_drive_0047_sync 704 l
639 | 2011_10_03/2011_10_03_drive_0047_sync 736 l
640 | 2011_10_03/2011_10_03_drive_0047_sync 672 l
641 | 2011_10_03/2011_10_03_drive_0047_sync 64 l
642 | 2011_10_03/2011_10_03_drive_0047_sync 288 l
643 | 2011_10_03/2011_10_03_drive_0047_sync 352 l
644 | 2011_10_03/2011_10_03_drive_0047_sync 512 l
645 | 2011_10_03/2011_10_03_drive_0047_sync 544 l
646 | 2011_10_03/2011_10_03_drive_0047_sync 608 l
647 | 2011_10_03/2011_10_03_drive_0047_sync 128 l
648 | 2011_10_03/2011_10_03_drive_0047_sync 224 l
649 | 2011_10_03/2011_10_03_drive_0047_sync 416 l
650 | 2011_10_03/2011_10_03_drive_0047_sync 192 l
651 | 2011_10_03/2011_10_03_drive_0047_sync 448 l
652 | 2011_10_03/2011_10_03_drive_0047_sync 768 l
653 |
--------------------------------------------------------------------------------
/splits/kitti_archives_to_download.txt:
--------------------------------------------------------------------------------
1 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_calib.zip
2 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0001/2011_09_26_drive_0001_sync.zip
3 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0002/2011_09_26_drive_0002_sync.zip
4 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0005/2011_09_26_drive_0005_sync.zip
5 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0009/2011_09_26_drive_0009_sync.zip
6 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0011/2011_09_26_drive_0011_sync.zip
7 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0013/2011_09_26_drive_0013_sync.zip
8 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0014/2011_09_26_drive_0014_sync.zip
9 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0015/2011_09_26_drive_0015_sync.zip
10 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0017/2011_09_26_drive_0017_sync.zip
11 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0018/2011_09_26_drive_0018_sync.zip
12 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0019/2011_09_26_drive_0019_sync.zip
13 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0020/2011_09_26_drive_0020_sync.zip
14 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0022/2011_09_26_drive_0022_sync.zip
15 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0023/2011_09_26_drive_0023_sync.zip
16 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0027/2011_09_26_drive_0027_sync.zip
17 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0028/2011_09_26_drive_0028_sync.zip
18 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0029/2011_09_26_drive_0029_sync.zip
19 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0032/2011_09_26_drive_0032_sync.zip
20 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0035/2011_09_26_drive_0035_sync.zip
21 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0036/2011_09_26_drive_0036_sync.zip
22 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0039/2011_09_26_drive_0039_sync.zip
23 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0046/2011_09_26_drive_0046_sync.zip
24 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0048/2011_09_26_drive_0048_sync.zip
25 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0051/2011_09_26_drive_0051_sync.zip
26 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0052/2011_09_26_drive_0052_sync.zip
27 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0056/2011_09_26_drive_0056_sync.zip
28 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0057/2011_09_26_drive_0057_sync.zip
29 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0059/2011_09_26_drive_0059_sync.zip
30 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0060/2011_09_26_drive_0060_sync.zip
31 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0061/2011_09_26_drive_0061_sync.zip
32 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0064/2011_09_26_drive_0064_sync.zip
33 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0070/2011_09_26_drive_0070_sync.zip
34 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0079/2011_09_26_drive_0079_sync.zip
35 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_sync.zip
36 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0086/2011_09_26_drive_0086_sync.zip
37 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0087/2011_09_26_drive_0087_sync.zip
38 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0091/2011_09_26_drive_0091_sync.zip
39 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0093/2011_09_26_drive_0093_sync.zip
40 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0095/2011_09_26_drive_0095_sync.zip
41 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0096/2011_09_26_drive_0096_sync.zip
42 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0101/2011_09_26_drive_0101_sync.zip
43 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0104/2011_09_26_drive_0104_sync.zip
44 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0106/2011_09_26_drive_0106_sync.zip
45 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0113/2011_09_26_drive_0113_sync.zip
46 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0117/2011_09_26_drive_0117_sync.zip
47 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_28_calib.zip
48 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_28_drive_0001/2011_09_28_drive_0001_sync.zip
49 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_28_drive_0002/2011_09_28_drive_0002_sync.zip
50 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_29_calib.zip
51 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_29_drive_0004/2011_09_29_drive_0004_sync.zip
52 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_29_drive_0026/2011_09_29_drive_0026_sync.zip
53 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_29_drive_0071/2011_09_29_drive_0071_sync.zip
54 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_calib.zip
55 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0016/2011_09_30_drive_0016_sync.zip
56 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0018/2011_09_30_drive_0018_sync.zip
57 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0020/2011_09_30_drive_0020_sync.zip
58 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0027/2011_09_30_drive_0027_sync.zip
59 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0028/2011_09_30_drive_0028_sync.zip
60 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0033/2011_09_30_drive_0033_sync.zip
61 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_30_drive_0034/2011_09_30_drive_0034_sync.zip
62 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_10_03_calib.zip
63 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_10_03_drive_0027/2011_10_03_drive_0027_sync.zip
64 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_10_03_drive_0034/2011_10_03_drive_0034_sync.zip
65 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_10_03_drive_0042/2011_10_03_drive_0042_sync.zip
66 | https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_10_03_drive_0047/2011_10_03_drive_0047_sync.zip
67 |
--------------------------------------------------------------------------------
/splits/odom/test_files_09.txt:
--------------------------------------------------------------------------------
1 | 9 0 l
2 | 9 1 l
3 | 9 2 l
4 | 9 3 l
5 | 9 4 l
6 | 9 5 l
7 | 9 6 l
8 | 9 7 l
9 | 9 8 l
10 | 9 9 l
11 | 9 10 l
12 | 9 11 l
13 | 9 12 l
14 | 9 13 l
15 | 9 14 l
16 | 9 15 l
17 | 9 16 l
18 | 9 17 l
19 | 9 18 l
20 | 9 19 l
21 | 9 20 l
22 | 9 21 l
23 | 9 22 l
24 | 9 23 l
25 | 9 24 l
26 | 9 25 l
27 | 9 26 l
28 | 9 27 l
29 | 9 28 l
30 | 9 29 l
31 | 9 30 l
32 | 9 31 l
33 | 9 32 l
34 | 9 33 l
35 | 9 34 l
36 | 9 35 l
37 | 9 36 l
38 | 9 37 l
39 | 9 38 l
40 | 9 39 l
41 | 9 40 l
42 | 9 41 l
43 | 9 42 l
44 | 9 43 l
45 | 9 44 l
46 | 9 45 l
47 | 9 46 l
48 | 9 47 l
49 | 9 48 l
50 | 9 49 l
51 | 9 50 l
52 | 9 51 l
53 | 9 52 l
54 | 9 53 l
55 | 9 54 l
56 | 9 55 l
57 | 9 56 l
58 | 9 57 l
59 | 9 58 l
60 | 9 59 l
61 | 9 60 l
62 | 9 61 l
63 | 9 62 l
64 | 9 63 l
65 | 9 64 l
66 | 9 65 l
67 | 9 66 l
68 | 9 67 l
69 | 9 68 l
70 | 9 69 l
71 | 9 70 l
72 | 9 71 l
73 | 9 72 l
74 | 9 73 l
75 | 9 74 l
76 | 9 75 l
77 | 9 76 l
78 | 9 77 l
79 | 9 78 l
80 | 9 79 l
81 | 9 80 l
82 | 9 81 l
83 | 9 82 l
84 | 9 83 l
85 | 9 84 l
86 | 9 85 l
87 | 9 86 l
88 | 9 87 l
89 | 9 88 l
90 | 9 89 l
91 | 9 90 l
92 | 9 91 l
93 | 9 92 l
94 | 9 93 l
95 | 9 94 l
96 | 9 95 l
97 | 9 96 l
98 | 9 97 l
99 | 9 98 l
100 | 9 99 l
101 | 9 100 l
102 | 9 101 l
103 | 9 102 l
104 | 9 103 l
105 | 9 104 l
106 | 9 105 l
107 | 9 106 l
108 | 9 107 l
109 | 9 108 l
110 | 9 109 l
111 | 9 110 l
112 | 9 111 l
113 | 9 112 l
114 | 9 113 l
115 | 9 114 l
116 | 9 115 l
117 | 9 116 l
118 | 9 117 l
119 | 9 118 l
120 | 9 119 l
121 | 9 120 l
122 | 9 121 l
123 | 9 122 l
124 | 9 123 l
125 | 9 124 l
126 | 9 125 l
127 | 9 126 l
128 | 9 127 l
129 | 9 128 l
130 | 9 129 l
131 | 9 130 l
132 | 9 131 l
133 | 9 132 l
134 | 9 133 l
135 | 9 134 l
136 | 9 135 l
137 | 9 136 l
138 | 9 137 l
139 | 9 138 l
140 | 9 139 l
141 | 9 140 l
142 | 9 141 l
143 | 9 142 l
144 | 9 143 l
145 | 9 144 l
146 | 9 145 l
147 | 9 146 l
148 | 9 147 l
149 | 9 148 l
150 | 9 149 l
151 | 9 150 l
152 | 9 151 l
153 | 9 152 l
154 | 9 153 l
155 | 9 154 l
156 | 9 155 l
157 | 9 156 l
158 | 9 157 l
159 | 9 158 l
160 | 9 159 l
161 | 9 160 l
162 | 9 161 l
163 | 9 162 l
164 | 9 163 l
165 | 9 164 l
166 | 9 165 l
167 | 9 166 l
168 | 9 167 l
169 | 9 168 l
170 | 9 169 l
171 | 9 170 l
172 | 9 171 l
173 | 9 172 l
174 | 9 173 l
175 | 9 174 l
176 | 9 175 l
177 | 9 176 l
178 | 9 177 l
179 | 9 178 l
180 | 9 179 l
181 | 9 180 l
182 | 9 181 l
183 | 9 182 l
184 | 9 183 l
185 | 9 184 l
186 | 9 185 l
187 | 9 186 l
188 | 9 187 l
189 | 9 188 l
190 | 9 189 l
191 | 9 190 l
192 | 9 191 l
193 | 9 192 l
194 | 9 193 l
195 | 9 194 l
196 | 9 195 l
197 | 9 196 l
198 | 9 197 l
199 | 9 198 l
200 | 9 199 l
201 | 9 200 l
202 | 9 201 l
203 | 9 202 l
204 | 9 203 l
205 | 9 204 l
206 | 9 205 l
207 | 9 206 l
208 | 9 207 l
209 | 9 208 l
210 | 9 209 l
211 | 9 210 l
212 | 9 211 l
213 | 9 212 l
214 | 9 213 l
215 | 9 214 l
216 | 9 215 l
217 | 9 216 l
218 | 9 217 l
219 | 9 218 l
220 | 9 219 l
221 | 9 220 l
222 | 9 221 l
223 | 9 222 l
224 | 9 223 l
225 | 9 224 l
226 | 9 225 l
227 | 9 226 l
228 | 9 227 l
229 | 9 228 l
230 | 9 229 l
231 | 9 230 l
232 | 9 231 l
233 | 9 232 l
234 | 9 233 l
235 | 9 234 l
236 | 9 235 l
237 | 9 236 l
238 | 9 237 l
239 | 9 238 l
240 | 9 239 l
241 | 9 240 l
242 | 9 241 l
243 | 9 242 l
244 | 9 243 l
245 | 9 244 l
246 | 9 245 l
247 | 9 246 l
248 | 9 247 l
249 | 9 248 l
250 | 9 249 l
251 | 9 250 l
252 | 9 251 l
253 | 9 252 l
254 | 9 253 l
255 | 9 254 l
256 | 9 255 l
257 | 9 256 l
258 | 9 257 l
259 | 9 258 l
260 | 9 259 l
261 | 9 260 l
262 | 9 261 l
263 | 9 262 l
264 | 9 263 l
265 | 9 264 l
266 | 9 265 l
267 | 9 266 l
268 | 9 267 l
269 | 9 268 l
270 | 9 269 l
271 | 9 270 l
272 | 9 271 l
273 | 9 272 l
274 | 9 273 l
275 | 9 274 l
276 | 9 275 l
277 | 9 276 l
278 | 9 277 l
279 | 9 278 l
280 | 9 279 l
281 | 9 280 l
282 | 9 281 l
283 | 9 282 l
284 | 9 283 l
285 | 9 284 l
286 | 9 285 l
287 | 9 286 l
288 | 9 287 l
289 | 9 288 l
290 | 9 289 l
291 | 9 290 l
292 | 9 291 l
293 | 9 292 l
294 | 9 293 l
295 | 9 294 l
296 | 9 295 l
297 | 9 296 l
298 | 9 297 l
299 | 9 298 l
300 | 9 299 l
301 | 9 300 l
302 | 9 301 l
303 | 9 302 l
304 | 9 303 l
305 | 9 304 l
306 | 9 305 l
307 | 9 306 l
308 | 9 307 l
309 | 9 308 l
310 | 9 309 l
311 | 9 310 l
312 | 9 311 l
313 | 9 312 l
314 | 9 313 l
315 | 9 314 l
316 | 9 315 l
317 | 9 316 l
318 | 9 317 l
319 | 9 318 l
320 | 9 319 l
321 | 9 320 l
322 | 9 321 l
323 | 9 322 l
324 | 9 323 l
325 | 9 324 l
326 | 9 325 l
327 | 9 326 l
328 | 9 327 l
329 | 9 328 l
330 | 9 329 l
331 | 9 330 l
332 | 9 331 l
333 | 9 332 l
334 | 9 333 l
335 | 9 334 l
336 | 9 335 l
337 | 9 336 l
338 | 9 337 l
339 | 9 338 l
340 | 9 339 l
341 | 9 340 l
342 | 9 341 l
343 | 9 342 l
344 | 9 343 l
345 | 9 344 l
346 | 9 345 l
347 | 9 346 l
348 | 9 347 l
349 | 9 348 l
350 | 9 349 l
351 | 9 350 l
352 | 9 351 l
353 | 9 352 l
354 | 9 353 l
355 | 9 354 l
356 | 9 355 l
357 | 9 356 l
358 | 9 357 l
359 | 9 358 l
360 | 9 359 l
361 | 9 360 l
362 | 9 361 l
363 | 9 362 l
364 | 9 363 l
365 | 9 364 l
366 | 9 365 l
367 | 9 366 l
368 | 9 367 l
369 | 9 368 l
370 | 9 369 l
371 | 9 370 l
372 | 9 371 l
373 | 9 372 l
374 | 9 373 l
375 | 9 374 l
376 | 9 375 l
377 | 9 376 l
378 | 9 377 l
379 | 9 378 l
380 | 9 379 l
381 | 9 380 l
382 | 9 381 l
383 | 9 382 l
384 | 9 383 l
385 | 9 384 l
386 | 9 385 l
387 | 9 386 l
388 | 9 387 l
389 | 9 388 l
390 | 9 389 l
391 | 9 390 l
392 | 9 391 l
393 | 9 392 l
394 | 9 393 l
395 | 9 394 l
396 | 9 395 l
397 | 9 396 l
398 | 9 397 l
399 | 9 398 l
400 | 9 399 l
401 | 9 400 l
402 | 9 401 l
403 | 9 402 l
404 | 9 403 l
405 | 9 404 l
406 | 9 405 l
407 | 9 406 l
408 | 9 407 l
409 | 9 408 l
410 | 9 409 l
411 | 9 410 l
412 | 9 411 l
413 | 9 412 l
414 | 9 413 l
415 | 9 414 l
416 | 9 415 l
417 | 9 416 l
418 | 9 417 l
419 | 9 418 l
420 | 9 419 l
421 | 9 420 l
422 | 9 421 l
423 | 9 422 l
424 | 9 423 l
425 | 9 424 l
426 | 9 425 l
427 | 9 426 l
428 | 9 427 l
429 | 9 428 l
430 | 9 429 l
431 | 9 430 l
432 | 9 431 l
433 | 9 432 l
434 | 9 433 l
435 | 9 434 l
436 | 9 435 l
437 | 9 436 l
438 | 9 437 l
439 | 9 438 l
440 | 9 439 l
441 | 9 440 l
442 | 9 441 l
443 | 9 442 l
444 | 9 443 l
445 | 9 444 l
446 | 9 445 l
447 | 9 446 l
448 | 9 447 l
449 | 9 448 l
450 | 9 449 l
451 | 9 450 l
452 | 9 451 l
453 | 9 452 l
454 | 9 453 l
455 | 9 454 l
456 | 9 455 l
457 | 9 456 l
458 | 9 457 l
459 | 9 458 l
460 | 9 459 l
461 | 9 460 l
462 | 9 461 l
463 | 9 462 l
464 | 9 463 l
465 | 9 464 l
466 | 9 465 l
467 | 9 466 l
468 | 9 467 l
469 | 9 468 l
470 | 9 469 l
471 | 9 470 l
472 | 9 471 l
473 | 9 472 l
474 | 9 473 l
475 | 9 474 l
476 | 9 475 l
477 | 9 476 l
478 | 9 477 l
479 | 9 478 l
480 | 9 479 l
481 | 9 480 l
482 | 9 481 l
483 | 9 482 l
484 | 9 483 l
485 | 9 484 l
486 | 9 485 l
487 | 9 486 l
488 | 9 487 l
489 | 9 488 l
490 | 9 489 l
491 | 9 490 l
492 | 9 491 l
493 | 9 492 l
494 | 9 493 l
495 | 9 494 l
496 | 9 495 l
497 | 9 496 l
498 | 9 497 l
499 | 9 498 l
500 | 9 499 l
501 | 9 500 l
502 | 9 501 l
503 | 9 502 l
504 | 9 503 l
505 | 9 504 l
506 | 9 505 l
507 | 9 506 l
508 | 9 507 l
509 | 9 508 l
510 | 9 509 l
511 | 9 510 l
512 | 9 511 l
513 | 9 512 l
514 | 9 513 l
515 | 9 514 l
516 | 9 515 l
517 | 9 516 l
518 | 9 517 l
519 | 9 518 l
520 | 9 519 l
521 | 9 520 l
522 | 9 521 l
523 | 9 522 l
524 | 9 523 l
525 | 9 524 l
526 | 9 525 l
527 | 9 526 l
528 | 9 527 l
529 | 9 528 l
530 | 9 529 l
531 | 9 530 l
532 | 9 531 l
533 | 9 532 l
534 | 9 533 l
535 | 9 534 l
536 | 9 535 l
537 | 9 536 l
538 | 9 537 l
539 | 9 538 l
540 | 9 539 l
541 | 9 540 l
542 | 9 541 l
543 | 9 542 l
544 | 9 543 l
545 | 9 544 l
546 | 9 545 l
547 | 9 546 l
548 | 9 547 l
549 | 9 548 l
550 | 9 549 l
551 | 9 550 l
552 | 9 551 l
553 | 9 552 l
554 | 9 553 l
555 | 9 554 l
556 | 9 555 l
557 | 9 556 l
558 | 9 557 l
559 | 9 558 l
560 | 9 559 l
561 | 9 560 l
562 | 9 561 l
563 | 9 562 l
564 | 9 563 l
565 | 9 564 l
566 | 9 565 l
567 | 9 566 l
568 | 9 567 l
569 | 9 568 l
570 | 9 569 l
571 | 9 570 l
572 | 9 571 l
573 | 9 572 l
574 | 9 573 l
575 | 9 574 l
576 | 9 575 l
577 | 9 576 l
578 | 9 577 l
579 | 9 578 l
580 | 9 579 l
581 | 9 580 l
582 | 9 581 l
583 | 9 582 l
584 | 9 583 l
585 | 9 584 l
586 | 9 585 l
587 | 9 586 l
588 | 9 587 l
589 | 9 588 l
590 | 9 589 l
591 | 9 590 l
592 | 9 591 l
593 | 9 592 l
594 | 9 593 l
595 | 9 594 l
596 | 9 595 l
597 | 9 596 l
598 | 9 597 l
599 | 9 598 l
600 | 9 599 l
601 | 9 600 l
602 | 9 601 l
603 | 9 602 l
604 | 9 603 l
605 | 9 604 l
606 | 9 605 l
607 | 9 606 l
608 | 9 607 l
609 | 9 608 l
610 | 9 609 l
611 | 9 610 l
612 | 9 611 l
613 | 9 612 l
614 | 9 613 l
615 | 9 614 l
616 | 9 615 l
617 | 9 616 l
618 | 9 617 l
619 | 9 618 l
620 | 9 619 l
621 | 9 620 l
622 | 9 621 l
623 | 9 622 l
624 | 9 623 l
625 | 9 624 l
626 | 9 625 l
627 | 9 626 l
628 | 9 627 l
629 | 9 628 l
630 | 9 629 l
631 | 9 630 l
632 | 9 631 l
633 | 9 632 l
634 | 9 633 l
635 | 9 634 l
636 | 9 635 l
637 | 9 636 l
638 | 9 637 l
639 | 9 638 l
640 | 9 639 l
641 | 9 640 l
642 | 9 641 l
643 | 9 642 l
644 | 9 643 l
645 | 9 644 l
646 | 9 645 l
647 | 9 646 l
648 | 9 647 l
649 | 9 648 l
650 | 9 649 l
651 | 9 650 l
652 | 9 651 l
653 | 9 652 l
654 | 9 653 l
655 | 9 654 l
656 | 9 655 l
657 | 9 656 l
658 | 9 657 l
659 | 9 658 l
660 | 9 659 l
661 | 9 660 l
662 | 9 661 l
663 | 9 662 l
664 | 9 663 l
665 | 9 664 l
666 | 9 665 l
667 | 9 666 l
668 | 9 667 l
669 | 9 668 l
670 | 9 669 l
671 | 9 670 l
672 | 9 671 l
673 | 9 672 l
674 | 9 673 l
675 | 9 674 l
676 | 9 675 l
677 | 9 676 l
678 | 9 677 l
679 | 9 678 l
680 | 9 679 l
681 | 9 680 l
682 | 9 681 l
683 | 9 682 l
684 | 9 683 l
685 | 9 684 l
686 | 9 685 l
687 | 9 686 l
688 | 9 687 l
689 | 9 688 l
690 | 9 689 l
691 | 9 690 l
692 | 9 691 l
693 | 9 692 l
694 | 9 693 l
695 | 9 694 l
696 | 9 695 l
697 | 9 696 l
698 | 9 697 l
699 | 9 698 l
700 | 9 699 l
701 | 9 700 l
702 | 9 701 l
703 | 9 702 l
704 | 9 703 l
705 | 9 704 l
706 | 9 705 l
707 | 9 706 l
708 | 9 707 l
709 | 9 708 l
710 | 9 709 l
711 | 9 710 l
712 | 9 711 l
713 | 9 712 l
714 | 9 713 l
715 | 9 714 l
716 | 9 715 l
717 | 9 716 l
718 | 9 717 l
719 | 9 718 l
720 | 9 719 l
721 | 9 720 l
722 | 9 721 l
723 | 9 722 l
724 | 9 723 l
725 | 9 724 l
726 | 9 725 l
727 | 9 726 l
728 | 9 727 l
729 | 9 728 l
730 | 9 729 l
731 | 9 730 l
732 | 9 731 l
733 | 9 732 l
734 | 9 733 l
735 | 9 734 l
736 | 9 735 l
737 | 9 736 l
738 | 9 737 l
739 | 9 738 l
740 | 9 739 l
741 | 9 740 l
742 | 9 741 l
743 | 9 742 l
744 | 9 743 l
745 | 9 744 l
746 | 9 745 l
747 | 9 746 l
748 | 9 747 l
749 | 9 748 l
750 | 9 749 l
751 | 9 750 l
752 | 9 751 l
753 | 9 752 l
754 | 9 753 l
755 | 9 754 l
756 | 9 755 l
757 | 9 756 l
758 | 9 757 l
759 | 9 758 l
760 | 9 759 l
761 | 9 760 l
762 | 9 761 l
763 | 9 762 l
764 | 9 763 l
765 | 9 764 l
766 | 9 765 l
767 | 9 766 l
768 | 9 767 l
769 | 9 768 l
770 | 9 769 l
771 | 9 770 l
772 | 9 771 l
773 | 9 772 l
774 | 9 773 l
775 | 9 774 l
776 | 9 775 l
777 | 9 776 l
778 | 9 777 l
779 | 9 778 l
780 | 9 779 l
781 | 9 780 l
782 | 9 781 l
783 | 9 782 l
784 | 9 783 l
785 | 9 784 l
786 | 9 785 l
787 | 9 786 l
788 | 9 787 l
789 | 9 788 l
790 | 9 789 l
791 | 9 790 l
792 | 9 791 l
793 | 9 792 l
794 | 9 793 l
795 | 9 794 l
796 | 9 795 l
797 | 9 796 l
798 | 9 797 l
799 | 9 798 l
800 | 9 799 l
801 | 9 800 l
802 | 9 801 l
803 | 9 802 l
804 | 9 803 l
805 | 9 804 l
806 | 9 805 l
807 | 9 806 l
808 | 9 807 l
809 | 9 808 l
810 | 9 809 l
811 | 9 810 l
812 | 9 811 l
813 | 9 812 l
814 | 9 813 l
815 | 9 814 l
816 | 9 815 l
817 | 9 816 l
818 | 9 817 l
819 | 9 818 l
820 | 9 819 l
821 | 9 820 l
822 | 9 821 l
823 | 9 822 l
824 | 9 823 l
825 | 9 824 l
826 | 9 825 l
827 | 9 826 l
828 | 9 827 l
829 | 9 828 l
830 | 9 829 l
831 | 9 830 l
832 | 9 831 l
833 | 9 832 l
834 | 9 833 l
835 | 9 834 l
836 | 9 835 l
837 | 9 836 l
838 | 9 837 l
839 | 9 838 l
840 | 9 839 l
841 | 9 840 l
842 | 9 841 l
843 | 9 842 l
844 | 9 843 l
845 | 9 844 l
846 | 9 845 l
847 | 9 846 l
848 | 9 847 l
849 | 9 848 l
850 | 9 849 l
851 | 9 850 l
852 | 9 851 l
853 | 9 852 l
854 | 9 853 l
855 | 9 854 l
856 | 9 855 l
857 | 9 856 l
858 | 9 857 l
859 | 9 858 l
860 | 9 859 l
861 | 9 860 l
862 | 9 861 l
863 | 9 862 l
864 | 9 863 l
865 | 9 864 l
866 | 9 865 l
867 | 9 866 l
868 | 9 867 l
869 | 9 868 l
870 | 9 869 l
871 | 9 870 l
872 | 9 871 l
873 | 9 872 l
874 | 9 873 l
875 | 9 874 l
876 | 9 875 l
877 | 9 876 l
878 | 9 877 l
879 | 9 878 l
880 | 9 879 l
881 | 9 880 l
882 | 9 881 l
883 | 9 882 l
884 | 9 883 l
885 | 9 884 l
886 | 9 885 l
887 | 9 886 l
888 | 9 887 l
889 | 9 888 l
890 | 9 889 l
891 | 9 890 l
892 | 9 891 l
893 | 9 892 l
894 | 9 893 l
895 | 9 894 l
896 | 9 895 l
897 | 9 896 l
898 | 9 897 l
899 | 9 898 l
900 | 9 899 l
901 | 9 900 l
902 | 9 901 l
903 | 9 902 l
904 | 9 903 l
905 | 9 904 l
906 | 9 905 l
907 | 9 906 l
908 | 9 907 l
909 | 9 908 l
910 | 9 909 l
911 | 9 910 l
912 | 9 911 l
913 | 9 912 l
914 | 9 913 l
915 | 9 914 l
916 | 9 915 l
917 | 9 916 l
918 | 9 917 l
919 | 9 918 l
920 | 9 919 l
921 | 9 920 l
922 | 9 921 l
923 | 9 922 l
924 | 9 923 l
925 | 9 924 l
926 | 9 925 l
927 | 9 926 l
928 | 9 927 l
929 | 9 928 l
930 | 9 929 l
931 | 9 930 l
932 | 9 931 l
933 | 9 932 l
934 | 9 933 l
935 | 9 934 l
936 | 9 935 l
937 | 9 936 l
938 | 9 937 l
939 | 9 938 l
940 | 9 939 l
941 | 9 940 l
942 | 9 941 l
943 | 9 942 l
944 | 9 943 l
945 | 9 944 l
946 | 9 945 l
947 | 9 946 l
948 | 9 947 l
949 | 9 948 l
950 | 9 949 l
951 | 9 950 l
952 | 9 951 l
953 | 9 952 l
954 | 9 953 l
955 | 9 954 l
956 | 9 955 l
957 | 9 956 l
958 | 9 957 l
959 | 9 958 l
960 | 9 959 l
961 | 9 960 l
962 | 9 961 l
963 | 9 962 l
964 | 9 963 l
965 | 9 964 l
966 | 9 965 l
967 | 9 966 l
968 | 9 967 l
969 | 9 968 l
970 | 9 969 l
971 | 9 970 l
972 | 9 971 l
973 | 9 972 l
974 | 9 973 l
975 | 9 974 l
976 | 9 975 l
977 | 9 976 l
978 | 9 977 l
979 | 9 978 l
980 | 9 979 l
981 | 9 980 l
982 | 9 981 l
983 | 9 982 l
984 | 9 983 l
985 | 9 984 l
986 | 9 985 l
987 | 9 986 l
988 | 9 987 l
989 | 9 988 l
990 | 9 989 l
991 | 9 990 l
992 | 9 991 l
993 | 9 992 l
994 | 9 993 l
995 | 9 994 l
996 | 9 995 l
997 | 9 996 l
998 | 9 997 l
999 | 9 998 l
1000 | 9 999 l
1001 | 9 1000 l
1002 | 9 1001 l
1003 | 9 1002 l
1004 | 9 1003 l
1005 | 9 1004 l
1006 | 9 1005 l
1007 | 9 1006 l
1008 | 9 1007 l
1009 | 9 1008 l
1010 | 9 1009 l
1011 | 9 1010 l
1012 | 9 1011 l
1013 | 9 1012 l
1014 | 9 1013 l
1015 | 9 1014 l
1016 | 9 1015 l
1017 | 9 1016 l
1018 | 9 1017 l
1019 | 9 1018 l
1020 | 9 1019 l
1021 | 9 1020 l
1022 | 9 1021 l
1023 | 9 1022 l
1024 | 9 1023 l
1025 | 9 1024 l
1026 | 9 1025 l
1027 | 9 1026 l
1028 | 9 1027 l
1029 | 9 1028 l
1030 | 9 1029 l
1031 | 9 1030 l
1032 | 9 1031 l
1033 | 9 1032 l
1034 | 9 1033 l
1035 | 9 1034 l
1036 | 9 1035 l
1037 | 9 1036 l
1038 | 9 1037 l
1039 | 9 1038 l
1040 | 9 1039 l
1041 | 9 1040 l
1042 | 9 1041 l
1043 | 9 1042 l
1044 | 9 1043 l
1045 | 9 1044 l
1046 | 9 1045 l
1047 | 9 1046 l
1048 | 9 1047 l
1049 | 9 1048 l
1050 | 9 1049 l
1051 | 9 1050 l
1052 | 9 1051 l
1053 | 9 1052 l
1054 | 9 1053 l
1055 | 9 1054 l
1056 | 9 1055 l
1057 | 9 1056 l
1058 | 9 1057 l
1059 | 9 1058 l
1060 | 9 1059 l
1061 | 9 1060 l
1062 | 9 1061 l
1063 | 9 1062 l
1064 | 9 1063 l
1065 | 9 1064 l
1066 | 9 1065 l
1067 | 9 1066 l
1068 | 9 1067 l
1069 | 9 1068 l
1070 | 9 1069 l
1071 | 9 1070 l
1072 | 9 1071 l
1073 | 9 1072 l
1074 | 9 1073 l
1075 | 9 1074 l
1076 | 9 1075 l
1077 | 9 1076 l
1078 | 9 1077 l
1079 | 9 1078 l
1080 | 9 1079 l
1081 | 9 1080 l
1082 | 9 1081 l
1083 | 9 1082 l
1084 | 9 1083 l
1085 | 9 1084 l
1086 | 9 1085 l
1087 | 9 1086 l
1088 | 9 1087 l
1089 | 9 1088 l
1090 | 9 1089 l
1091 | 9 1090 l
1092 | 9 1091 l
1093 | 9 1092 l
1094 | 9 1093 l
1095 | 9 1094 l
1096 | 9 1095 l
1097 | 9 1096 l
1098 | 9 1097 l
1099 | 9 1098 l
1100 | 9 1099 l
1101 | 9 1100 l
1102 | 9 1101 l
1103 | 9 1102 l
1104 | 9 1103 l
1105 | 9 1104 l
1106 | 9 1105 l
1107 | 9 1106 l
1108 | 9 1107 l
1109 | 9 1108 l
1110 | 9 1109 l
1111 | 9 1110 l
1112 | 9 1111 l
1113 | 9 1112 l
1114 | 9 1113 l
1115 | 9 1114 l
1116 | 9 1115 l
1117 | 9 1116 l
1118 | 9 1117 l
1119 | 9 1118 l
1120 | 9 1119 l
1121 | 9 1120 l
1122 | 9 1121 l
1123 | 9 1122 l
1124 | 9 1123 l
1125 | 9 1124 l
1126 | 9 1125 l
1127 | 9 1126 l
1128 | 9 1127 l
1129 | 9 1128 l
1130 | 9 1129 l
1131 | 9 1130 l
1132 | 9 1131 l
1133 | 9 1132 l
1134 | 9 1133 l
1135 | 9 1134 l
1136 | 9 1135 l
1137 | 9 1136 l
1138 | 9 1137 l
1139 | 9 1138 l
1140 | 9 1139 l
1141 | 9 1140 l
1142 | 9 1141 l
1143 | 9 1142 l
1144 | 9 1143 l
1145 | 9 1144 l
1146 | 9 1145 l
1147 | 9 1146 l
1148 | 9 1147 l
1149 | 9 1148 l
1150 | 9 1149 l
1151 | 9 1150 l
1152 | 9 1151 l
1153 | 9 1152 l
1154 | 9 1153 l
1155 | 9 1154 l
1156 | 9 1155 l
1157 | 9 1156 l
1158 | 9 1157 l
1159 | 9 1158 l
1160 | 9 1159 l
1161 | 9 1160 l
1162 | 9 1161 l
1163 | 9 1162 l
1164 | 9 1163 l
1165 | 9 1164 l
1166 | 9 1165 l
1167 | 9 1166 l
1168 | 9 1167 l
1169 | 9 1168 l
1170 | 9 1169 l
1171 | 9 1170 l
1172 | 9 1171 l
1173 | 9 1172 l
1174 | 9 1173 l
1175 | 9 1174 l
1176 | 9 1175 l
1177 | 9 1176 l
1178 | 9 1177 l
1179 | 9 1178 l
1180 | 9 1179 l
1181 | 9 1180 l
1182 | 9 1181 l
1183 | 9 1182 l
1184 | 9 1183 l
1185 | 9 1184 l
1186 | 9 1185 l
1187 | 9 1186 l
1188 | 9 1187 l
1189 | 9 1188 l
1190 | 9 1189 l
1191 | 9 1190 l
1192 | 9 1191 l
1193 | 9 1192 l
1194 | 9 1193 l
1195 | 9 1194 l
1196 | 9 1195 l
1197 | 9 1196 l
1198 | 9 1197 l
1199 | 9 1198 l
1200 | 9 1199 l
1201 | 9 1200 l
1202 | 9 1201 l
1203 | 9 1202 l
1204 | 9 1203 l
1205 | 9 1204 l
1206 | 9 1205 l
1207 | 9 1206 l
1208 | 9 1207 l
1209 | 9 1208 l
1210 | 9 1209 l
1211 | 9 1210 l
1212 | 9 1211 l
1213 | 9 1212 l
1214 | 9 1213 l
1215 | 9 1214 l
1216 | 9 1215 l
1217 | 9 1216 l
1218 | 9 1217 l
1219 | 9 1218 l
1220 | 9 1219 l
1221 | 9 1220 l
1222 | 9 1221 l
1223 | 9 1222 l
1224 | 9 1223 l
1225 | 9 1224 l
1226 | 9 1225 l
1227 | 9 1226 l
1228 | 9 1227 l
1229 | 9 1228 l
1230 | 9 1229 l
1231 | 9 1230 l
1232 | 9 1231 l
1233 | 9 1232 l
1234 | 9 1233 l
1235 | 9 1234 l
1236 | 9 1235 l
1237 | 9 1236 l
1238 | 9 1237 l
1239 | 9 1238 l
1240 | 9 1239 l
1241 | 9 1240 l
1242 | 9 1241 l
1243 | 9 1242 l
1244 | 9 1243 l
1245 | 9 1244 l
1246 | 9 1245 l
1247 | 9 1246 l
1248 | 9 1247 l
1249 | 9 1248 l
1250 | 9 1249 l
1251 | 9 1250 l
1252 | 9 1251 l
1253 | 9 1252 l
1254 | 9 1253 l
1255 | 9 1254 l
1256 | 9 1255 l
1257 | 9 1256 l
1258 | 9 1257 l
1259 | 9 1258 l
1260 | 9 1259 l
1261 | 9 1260 l
1262 | 9 1261 l
1263 | 9 1262 l
1264 | 9 1263 l
1265 | 9 1264 l
1266 | 9 1265 l
1267 | 9 1266 l
1268 | 9 1267 l
1269 | 9 1268 l
1270 | 9 1269 l
1271 | 9 1270 l
1272 | 9 1271 l
1273 | 9 1272 l
1274 | 9 1273 l
1275 | 9 1274 l
1276 | 9 1275 l
1277 | 9 1276 l
1278 | 9 1277 l
1279 | 9 1278 l
1280 | 9 1279 l
1281 | 9 1280 l
1282 | 9 1281 l
1283 | 9 1282 l
1284 | 9 1283 l
1285 | 9 1284 l
1286 | 9 1285 l
1287 | 9 1286 l
1288 | 9 1287 l
1289 | 9 1288 l
1290 | 9 1289 l
1291 | 9 1290 l
1292 | 9 1291 l
1293 | 9 1292 l
1294 | 9 1293 l
1295 | 9 1294 l
1296 | 9 1295 l
1297 | 9 1296 l
1298 | 9 1297 l
1299 | 9 1298 l
1300 | 9 1299 l
1301 | 9 1300 l
1302 | 9 1301 l
1303 | 9 1302 l
1304 | 9 1303 l
1305 | 9 1304 l
1306 | 9 1305 l
1307 | 9 1306 l
1308 | 9 1307 l
1309 | 9 1308 l
1310 | 9 1309 l
1311 | 9 1310 l
1312 | 9 1311 l
1313 | 9 1312 l
1314 | 9 1313 l
1315 | 9 1314 l
1316 | 9 1315 l
1317 | 9 1316 l
1318 | 9 1317 l
1319 | 9 1318 l
1320 | 9 1319 l
1321 | 9 1320 l
1322 | 9 1321 l
1323 | 9 1322 l
1324 | 9 1323 l
1325 | 9 1324 l
1326 | 9 1325 l
1327 | 9 1326 l
1328 | 9 1327 l
1329 | 9 1328 l
1330 | 9 1329 l
1331 | 9 1330 l
1332 | 9 1331 l
1333 | 9 1332 l
1334 | 9 1333 l
1335 | 9 1334 l
1336 | 9 1335 l
1337 | 9 1336 l
1338 | 9 1337 l
1339 | 9 1338 l
1340 | 9 1339 l
1341 | 9 1340 l
1342 | 9 1341 l
1343 | 9 1342 l
1344 | 9 1343 l
1345 | 9 1344 l
1346 | 9 1345 l
1347 | 9 1346 l
1348 | 9 1347 l
1349 | 9 1348 l
1350 | 9 1349 l
1351 | 9 1350 l
1352 | 9 1351 l
1353 | 9 1352 l
1354 | 9 1353 l
1355 | 9 1354 l
1356 | 9 1355 l
1357 | 9 1356 l
1358 | 9 1357 l
1359 | 9 1358 l
1360 | 9 1359 l
1361 | 9 1360 l
1362 | 9 1361 l
1363 | 9 1362 l
1364 | 9 1363 l
1365 | 9 1364 l
1366 | 9 1365 l
1367 | 9 1366 l
1368 | 9 1367 l
1369 | 9 1368 l
1370 | 9 1369 l
1371 | 9 1370 l
1372 | 9 1371 l
1373 | 9 1372 l
1374 | 9 1373 l
1375 | 9 1374 l
1376 | 9 1375 l
1377 | 9 1376 l
1378 | 9 1377 l
1379 | 9 1378 l
1380 | 9 1379 l
1381 | 9 1380 l
1382 | 9 1381 l
1383 | 9 1382 l
1384 | 9 1383 l
1385 | 9 1384 l
1386 | 9 1385 l
1387 | 9 1386 l
1388 | 9 1387 l
1389 | 9 1388 l
1390 | 9 1389 l
1391 | 9 1390 l
1392 | 9 1391 l
1393 | 9 1392 l
1394 | 9 1393 l
1395 | 9 1394 l
1396 | 9 1395 l
1397 | 9 1396 l
1398 | 9 1397 l
1399 | 9 1398 l
1400 | 9 1399 l
1401 | 9 1400 l
1402 | 9 1401 l
1403 | 9 1402 l
1404 | 9 1403 l
1405 | 9 1404 l
1406 | 9 1405 l
1407 | 9 1406 l
1408 | 9 1407 l
1409 | 9 1408 l
1410 | 9 1409 l
1411 | 9 1410 l
1412 | 9 1411 l
1413 | 9 1412 l
1414 | 9 1413 l
1415 | 9 1414 l
1416 | 9 1415 l
1417 | 9 1416 l
1418 | 9 1417 l
1419 | 9 1418 l
1420 | 9 1419 l
1421 | 9 1420 l
1422 | 9 1421 l
1423 | 9 1422 l
1424 | 9 1423 l
1425 | 9 1424 l
1426 | 9 1425 l
1427 | 9 1426 l
1428 | 9 1427 l
1429 | 9 1428 l
1430 | 9 1429 l
1431 | 9 1430 l
1432 | 9 1431 l
1433 | 9 1432 l
1434 | 9 1433 l
1435 | 9 1434 l
1436 | 9 1435 l
1437 | 9 1436 l
1438 | 9 1437 l
1439 | 9 1438 l
1440 | 9 1439 l
1441 | 9 1440 l
1442 | 9 1441 l
1443 | 9 1442 l
1444 | 9 1443 l
1445 | 9 1444 l
1446 | 9 1445 l
1447 | 9 1446 l
1448 | 9 1447 l
1449 | 9 1448 l
1450 | 9 1449 l
1451 | 9 1450 l
1452 | 9 1451 l
1453 | 9 1452 l
1454 | 9 1453 l
1455 | 9 1454 l
1456 | 9 1455 l
1457 | 9 1456 l
1458 | 9 1457 l
1459 | 9 1458 l
1460 | 9 1459 l
1461 | 9 1460 l
1462 | 9 1461 l
1463 | 9 1462 l
1464 | 9 1463 l
1465 | 9 1464 l
1466 | 9 1465 l
1467 | 9 1466 l
1468 | 9 1467 l
1469 | 9 1468 l
1470 | 9 1469 l
1471 | 9 1470 l
1472 | 9 1471 l
1473 | 9 1472 l
1474 | 9 1473 l
1475 | 9 1474 l
1476 | 9 1475 l
1477 | 9 1476 l
1478 | 9 1477 l
1479 | 9 1478 l
1480 | 9 1479 l
1481 | 9 1480 l
1482 | 9 1481 l
1483 | 9 1482 l
1484 | 9 1483 l
1485 | 9 1484 l
1486 | 9 1485 l
1487 | 9 1486 l
1488 | 9 1487 l
1489 | 9 1488 l
1490 | 9 1489 l
1491 | 9 1490 l
1492 | 9 1491 l
1493 | 9 1492 l
1494 | 9 1493 l
1495 | 9 1494 l
1496 | 9 1495 l
1497 | 9 1496 l
1498 | 9 1497 l
1499 | 9 1498 l
1500 | 9 1499 l
1501 | 9 1500 l
1502 | 9 1501 l
1503 | 9 1502 l
1504 | 9 1503 l
1505 | 9 1504 l
1506 | 9 1505 l
1507 | 9 1506 l
1508 | 9 1507 l
1509 | 9 1508 l
1510 | 9 1509 l
1511 | 9 1510 l
1512 | 9 1511 l
1513 | 9 1512 l
1514 | 9 1513 l
1515 | 9 1514 l
1516 | 9 1515 l
1517 | 9 1516 l
1518 | 9 1517 l
1519 | 9 1518 l
1520 | 9 1519 l
1521 | 9 1520 l
1522 | 9 1521 l
1523 | 9 1522 l
1524 | 9 1523 l
1525 | 9 1524 l
1526 | 9 1525 l
1527 | 9 1526 l
1528 | 9 1527 l
1529 | 9 1528 l
1530 | 9 1529 l
1531 | 9 1530 l
1532 | 9 1531 l
1533 | 9 1532 l
1534 | 9 1533 l
1535 | 9 1534 l
1536 | 9 1535 l
1537 | 9 1536 l
1538 | 9 1537 l
1539 | 9 1538 l
1540 | 9 1539 l
1541 | 9 1540 l
1542 | 9 1541 l
1543 | 9 1542 l
1544 | 9 1543 l
1545 | 9 1544 l
1546 | 9 1545 l
1547 | 9 1546 l
1548 | 9 1547 l
1549 | 9 1548 l
1550 | 9 1549 l
1551 | 9 1550 l
1552 | 9 1551 l
1553 | 9 1552 l
1554 | 9 1553 l
1555 | 9 1554 l
1556 | 9 1555 l
1557 | 9 1556 l
1558 | 9 1557 l
1559 | 9 1558 l
1560 | 9 1559 l
1561 | 9 1560 l
1562 | 9 1561 l
1563 | 9 1562 l
1564 | 9 1563 l
1565 | 9 1564 l
1566 | 9 1565 l
1567 | 9 1566 l
1568 | 9 1567 l
1569 | 9 1568 l
1570 | 9 1569 l
1571 | 9 1570 l
1572 | 9 1571 l
1573 | 9 1572 l
1574 | 9 1573 l
1575 | 9 1574 l
1576 | 9 1575 l
1577 | 9 1576 l
1578 | 9 1577 l
1579 | 9 1578 l
1580 | 9 1579 l
1581 | 9 1580 l
1582 | 9 1581 l
1583 | 9 1582 l
1584 | 9 1583 l
1585 | 9 1584 l
1586 | 9 1585 l
1587 | 9 1586 l
1588 | 9 1587 l
1589 | 9 1588 l
1590 | 9 1589 l
1591 |
--------------------------------------------------------------------------------
/splits/odom/test_files_10.txt:
--------------------------------------------------------------------------------
1 | 10 0 l
2 | 10 1 l
3 | 10 2 l
4 | 10 3 l
5 | 10 4 l
6 | 10 5 l
7 | 10 6 l
8 | 10 7 l
9 | 10 8 l
10 | 10 9 l
11 | 10 10 l
12 | 10 11 l
13 | 10 12 l
14 | 10 13 l
15 | 10 14 l
16 | 10 15 l
17 | 10 16 l
18 | 10 17 l
19 | 10 18 l
20 | 10 19 l
21 | 10 20 l
22 | 10 21 l
23 | 10 22 l
24 | 10 23 l
25 | 10 24 l
26 | 10 25 l
27 | 10 26 l
28 | 10 27 l
29 | 10 28 l
30 | 10 29 l
31 | 10 30 l
32 | 10 31 l
33 | 10 32 l
34 | 10 33 l
35 | 10 34 l
36 | 10 35 l
37 | 10 36 l
38 | 10 37 l
39 | 10 38 l
40 | 10 39 l
41 | 10 40 l
42 | 10 41 l
43 | 10 42 l
44 | 10 43 l
45 | 10 44 l
46 | 10 45 l
47 | 10 46 l
48 | 10 47 l
49 | 10 48 l
50 | 10 49 l
51 | 10 50 l
52 | 10 51 l
53 | 10 52 l
54 | 10 53 l
55 | 10 54 l
56 | 10 55 l
57 | 10 56 l
58 | 10 57 l
59 | 10 58 l
60 | 10 59 l
61 | 10 60 l
62 | 10 61 l
63 | 10 62 l
64 | 10 63 l
65 | 10 64 l
66 | 10 65 l
67 | 10 66 l
68 | 10 67 l
69 | 10 68 l
70 | 10 69 l
71 | 10 70 l
72 | 10 71 l
73 | 10 72 l
74 | 10 73 l
75 | 10 74 l
76 | 10 75 l
77 | 10 76 l
78 | 10 77 l
79 | 10 78 l
80 | 10 79 l
81 | 10 80 l
82 | 10 81 l
83 | 10 82 l
84 | 10 83 l
85 | 10 84 l
86 | 10 85 l
87 | 10 86 l
88 | 10 87 l
89 | 10 88 l
90 | 10 89 l
91 | 10 90 l
92 | 10 91 l
93 | 10 92 l
94 | 10 93 l
95 | 10 94 l
96 | 10 95 l
97 | 10 96 l
98 | 10 97 l
99 | 10 98 l
100 | 10 99 l
101 | 10 100 l
102 | 10 101 l
103 | 10 102 l
104 | 10 103 l
105 | 10 104 l
106 | 10 105 l
107 | 10 106 l
108 | 10 107 l
109 | 10 108 l
110 | 10 109 l
111 | 10 110 l
112 | 10 111 l
113 | 10 112 l
114 | 10 113 l
115 | 10 114 l
116 | 10 115 l
117 | 10 116 l
118 | 10 117 l
119 | 10 118 l
120 | 10 119 l
121 | 10 120 l
122 | 10 121 l
123 | 10 122 l
124 | 10 123 l
125 | 10 124 l
126 | 10 125 l
127 | 10 126 l
128 | 10 127 l
129 | 10 128 l
130 | 10 129 l
131 | 10 130 l
132 | 10 131 l
133 | 10 132 l
134 | 10 133 l
135 | 10 134 l
136 | 10 135 l
137 | 10 136 l
138 | 10 137 l
139 | 10 138 l
140 | 10 139 l
141 | 10 140 l
142 | 10 141 l
143 | 10 142 l
144 | 10 143 l
145 | 10 144 l
146 | 10 145 l
147 | 10 146 l
148 | 10 147 l
149 | 10 148 l
150 | 10 149 l
151 | 10 150 l
152 | 10 151 l
153 | 10 152 l
154 | 10 153 l
155 | 10 154 l
156 | 10 155 l
157 | 10 156 l
158 | 10 157 l
159 | 10 158 l
160 | 10 159 l
161 | 10 160 l
162 | 10 161 l
163 | 10 162 l
164 | 10 163 l
165 | 10 164 l
166 | 10 165 l
167 | 10 166 l
168 | 10 167 l
169 | 10 168 l
170 | 10 169 l
171 | 10 170 l
172 | 10 171 l
173 | 10 172 l
174 | 10 173 l
175 | 10 174 l
176 | 10 175 l
177 | 10 176 l
178 | 10 177 l
179 | 10 178 l
180 | 10 179 l
181 | 10 180 l
182 | 10 181 l
183 | 10 182 l
184 | 10 183 l
185 | 10 184 l
186 | 10 185 l
187 | 10 186 l
188 | 10 187 l
189 | 10 188 l
190 | 10 189 l
191 | 10 190 l
192 | 10 191 l
193 | 10 192 l
194 | 10 193 l
195 | 10 194 l
196 | 10 195 l
197 | 10 196 l
198 | 10 197 l
199 | 10 198 l
200 | 10 199 l
201 | 10 200 l
202 | 10 201 l
203 | 10 202 l
204 | 10 203 l
205 | 10 204 l
206 | 10 205 l
207 | 10 206 l
208 | 10 207 l
209 | 10 208 l
210 | 10 209 l
211 | 10 210 l
212 | 10 211 l
213 | 10 212 l
214 | 10 213 l
215 | 10 214 l
216 | 10 215 l
217 | 10 216 l
218 | 10 217 l
219 | 10 218 l
220 | 10 219 l
221 | 10 220 l
222 | 10 221 l
223 | 10 222 l
224 | 10 223 l
225 | 10 224 l
226 | 10 225 l
227 | 10 226 l
228 | 10 227 l
229 | 10 228 l
230 | 10 229 l
231 | 10 230 l
232 | 10 231 l
233 | 10 232 l
234 | 10 233 l
235 | 10 234 l
236 | 10 235 l
237 | 10 236 l
238 | 10 237 l
239 | 10 238 l
240 | 10 239 l
241 | 10 240 l
242 | 10 241 l
243 | 10 242 l
244 | 10 243 l
245 | 10 244 l
246 | 10 245 l
247 | 10 246 l
248 | 10 247 l
249 | 10 248 l
250 | 10 249 l
251 | 10 250 l
252 | 10 251 l
253 | 10 252 l
254 | 10 253 l
255 | 10 254 l
256 | 10 255 l
257 | 10 256 l
258 | 10 257 l
259 | 10 258 l
260 | 10 259 l
261 | 10 260 l
262 | 10 261 l
263 | 10 262 l
264 | 10 263 l
265 | 10 264 l
266 | 10 265 l
267 | 10 266 l
268 | 10 267 l
269 | 10 268 l
270 | 10 269 l
271 | 10 270 l
272 | 10 271 l
273 | 10 272 l
274 | 10 273 l
275 | 10 274 l
276 | 10 275 l
277 | 10 276 l
278 | 10 277 l
279 | 10 278 l
280 | 10 279 l
281 | 10 280 l
282 | 10 281 l
283 | 10 282 l
284 | 10 283 l
285 | 10 284 l
286 | 10 285 l
287 | 10 286 l
288 | 10 287 l
289 | 10 288 l
290 | 10 289 l
291 | 10 290 l
292 | 10 291 l
293 | 10 292 l
294 | 10 293 l
295 | 10 294 l
296 | 10 295 l
297 | 10 296 l
298 | 10 297 l
299 | 10 298 l
300 | 10 299 l
301 | 10 300 l
302 | 10 301 l
303 | 10 302 l
304 | 10 303 l
305 | 10 304 l
306 | 10 305 l
307 | 10 306 l
308 | 10 307 l
309 | 10 308 l
310 | 10 309 l
311 | 10 310 l
312 | 10 311 l
313 | 10 312 l
314 | 10 313 l
315 | 10 314 l
316 | 10 315 l
317 | 10 316 l
318 | 10 317 l
319 | 10 318 l
320 | 10 319 l
321 | 10 320 l
322 | 10 321 l
323 | 10 322 l
324 | 10 323 l
325 | 10 324 l
326 | 10 325 l
327 | 10 326 l
328 | 10 327 l
329 | 10 328 l
330 | 10 329 l
331 | 10 330 l
332 | 10 331 l
333 | 10 332 l
334 | 10 333 l
335 | 10 334 l
336 | 10 335 l
337 | 10 336 l
338 | 10 337 l
339 | 10 338 l
340 | 10 339 l
341 | 10 340 l
342 | 10 341 l
343 | 10 342 l
344 | 10 343 l
345 | 10 344 l
346 | 10 345 l
347 | 10 346 l
348 | 10 347 l
349 | 10 348 l
350 | 10 349 l
351 | 10 350 l
352 | 10 351 l
353 | 10 352 l
354 | 10 353 l
355 | 10 354 l
356 | 10 355 l
357 | 10 356 l
358 | 10 357 l
359 | 10 358 l
360 | 10 359 l
361 | 10 360 l
362 | 10 361 l
363 | 10 362 l
364 | 10 363 l
365 | 10 364 l
366 | 10 365 l
367 | 10 366 l
368 | 10 367 l
369 | 10 368 l
370 | 10 369 l
371 | 10 370 l
372 | 10 371 l
373 | 10 372 l
374 | 10 373 l
375 | 10 374 l
376 | 10 375 l
377 | 10 376 l
378 | 10 377 l
379 | 10 378 l
380 | 10 379 l
381 | 10 380 l
382 | 10 381 l
383 | 10 382 l
384 | 10 383 l
385 | 10 384 l
386 | 10 385 l
387 | 10 386 l
388 | 10 387 l
389 | 10 388 l
390 | 10 389 l
391 | 10 390 l
392 | 10 391 l
393 | 10 392 l
394 | 10 393 l
395 | 10 394 l
396 | 10 395 l
397 | 10 396 l
398 | 10 397 l
399 | 10 398 l
400 | 10 399 l
401 | 10 400 l
402 | 10 401 l
403 | 10 402 l
404 | 10 403 l
405 | 10 404 l
406 | 10 405 l
407 | 10 406 l
408 | 10 407 l
409 | 10 408 l
410 | 10 409 l
411 | 10 410 l
412 | 10 411 l
413 | 10 412 l
414 | 10 413 l
415 | 10 414 l
416 | 10 415 l
417 | 10 416 l
418 | 10 417 l
419 | 10 418 l
420 | 10 419 l
421 | 10 420 l
422 | 10 421 l
423 | 10 422 l
424 | 10 423 l
425 | 10 424 l
426 | 10 425 l
427 | 10 426 l
428 | 10 427 l
429 | 10 428 l
430 | 10 429 l
431 | 10 430 l
432 | 10 431 l
433 | 10 432 l
434 | 10 433 l
435 | 10 434 l
436 | 10 435 l
437 | 10 436 l
438 | 10 437 l
439 | 10 438 l
440 | 10 439 l
441 | 10 440 l
442 | 10 441 l
443 | 10 442 l
444 | 10 443 l
445 | 10 444 l
446 | 10 445 l
447 | 10 446 l
448 | 10 447 l
449 | 10 448 l
450 | 10 449 l
451 | 10 450 l
452 | 10 451 l
453 | 10 452 l
454 | 10 453 l
455 | 10 454 l
456 | 10 455 l
457 | 10 456 l
458 | 10 457 l
459 | 10 458 l
460 | 10 459 l
461 | 10 460 l
462 | 10 461 l
463 | 10 462 l
464 | 10 463 l
465 | 10 464 l
466 | 10 465 l
467 | 10 466 l
468 | 10 467 l
469 | 10 468 l
470 | 10 469 l
471 | 10 470 l
472 | 10 471 l
473 | 10 472 l
474 | 10 473 l
475 | 10 474 l
476 | 10 475 l
477 | 10 476 l
478 | 10 477 l
479 | 10 478 l
480 | 10 479 l
481 | 10 480 l
482 | 10 481 l
483 | 10 482 l
484 | 10 483 l
485 | 10 484 l
486 | 10 485 l
487 | 10 486 l
488 | 10 487 l
489 | 10 488 l
490 | 10 489 l
491 | 10 490 l
492 | 10 491 l
493 | 10 492 l
494 | 10 493 l
495 | 10 494 l
496 | 10 495 l
497 | 10 496 l
498 | 10 497 l
499 | 10 498 l
500 | 10 499 l
501 | 10 500 l
502 | 10 501 l
503 | 10 502 l
504 | 10 503 l
505 | 10 504 l
506 | 10 505 l
507 | 10 506 l
508 | 10 507 l
509 | 10 508 l
510 | 10 509 l
511 | 10 510 l
512 | 10 511 l
513 | 10 512 l
514 | 10 513 l
515 | 10 514 l
516 | 10 515 l
517 | 10 516 l
518 | 10 517 l
519 | 10 518 l
520 | 10 519 l
521 | 10 520 l
522 | 10 521 l
523 | 10 522 l
524 | 10 523 l
525 | 10 524 l
526 | 10 525 l
527 | 10 526 l
528 | 10 527 l
529 | 10 528 l
530 | 10 529 l
531 | 10 530 l
532 | 10 531 l
533 | 10 532 l
534 | 10 533 l
535 | 10 534 l
536 | 10 535 l
537 | 10 536 l
538 | 10 537 l
539 | 10 538 l
540 | 10 539 l
541 | 10 540 l
542 | 10 541 l
543 | 10 542 l
544 | 10 543 l
545 | 10 544 l
546 | 10 545 l
547 | 10 546 l
548 | 10 547 l
549 | 10 548 l
550 | 10 549 l
551 | 10 550 l
552 | 10 551 l
553 | 10 552 l
554 | 10 553 l
555 | 10 554 l
556 | 10 555 l
557 | 10 556 l
558 | 10 557 l
559 | 10 558 l
560 | 10 559 l
561 | 10 560 l
562 | 10 561 l
563 | 10 562 l
564 | 10 563 l
565 | 10 564 l
566 | 10 565 l
567 | 10 566 l
568 | 10 567 l
569 | 10 568 l
570 | 10 569 l
571 | 10 570 l
572 | 10 571 l
573 | 10 572 l
574 | 10 573 l
575 | 10 574 l
576 | 10 575 l
577 | 10 576 l
578 | 10 577 l
579 | 10 578 l
580 | 10 579 l
581 | 10 580 l
582 | 10 581 l
583 | 10 582 l
584 | 10 583 l
585 | 10 584 l
586 | 10 585 l
587 | 10 586 l
588 | 10 587 l
589 | 10 588 l
590 | 10 589 l
591 | 10 590 l
592 | 10 591 l
593 | 10 592 l
594 | 10 593 l
595 | 10 594 l
596 | 10 595 l
597 | 10 596 l
598 | 10 597 l
599 | 10 598 l
600 | 10 599 l
601 | 10 600 l
602 | 10 601 l
603 | 10 602 l
604 | 10 603 l
605 | 10 604 l
606 | 10 605 l
607 | 10 606 l
608 | 10 607 l
609 | 10 608 l
610 | 10 609 l
611 | 10 610 l
612 | 10 611 l
613 | 10 612 l
614 | 10 613 l
615 | 10 614 l
616 | 10 615 l
617 | 10 616 l
618 | 10 617 l
619 | 10 618 l
620 | 10 619 l
621 | 10 620 l
622 | 10 621 l
623 | 10 622 l
624 | 10 623 l
625 | 10 624 l
626 | 10 625 l
627 | 10 626 l
628 | 10 627 l
629 | 10 628 l
630 | 10 629 l
631 | 10 630 l
632 | 10 631 l
633 | 10 632 l
634 | 10 633 l
635 | 10 634 l
636 | 10 635 l
637 | 10 636 l
638 | 10 637 l
639 | 10 638 l
640 | 10 639 l
641 | 10 640 l
642 | 10 641 l
643 | 10 642 l
644 | 10 643 l
645 | 10 644 l
646 | 10 645 l
647 | 10 646 l
648 | 10 647 l
649 | 10 648 l
650 | 10 649 l
651 | 10 650 l
652 | 10 651 l
653 | 10 652 l
654 | 10 653 l
655 | 10 654 l
656 | 10 655 l
657 | 10 656 l
658 | 10 657 l
659 | 10 658 l
660 | 10 659 l
661 | 10 660 l
662 | 10 661 l
663 | 10 662 l
664 | 10 663 l
665 | 10 664 l
666 | 10 665 l
667 | 10 666 l
668 | 10 667 l
669 | 10 668 l
670 | 10 669 l
671 | 10 670 l
672 | 10 671 l
673 | 10 672 l
674 | 10 673 l
675 | 10 674 l
676 | 10 675 l
677 | 10 676 l
678 | 10 677 l
679 | 10 678 l
680 | 10 679 l
681 | 10 680 l
682 | 10 681 l
683 | 10 682 l
684 | 10 683 l
685 | 10 684 l
686 | 10 685 l
687 | 10 686 l
688 | 10 687 l
689 | 10 688 l
690 | 10 689 l
691 | 10 690 l
692 | 10 691 l
693 | 10 692 l
694 | 10 693 l
695 | 10 694 l
696 | 10 695 l
697 | 10 696 l
698 | 10 697 l
699 | 10 698 l
700 | 10 699 l
701 | 10 700 l
702 | 10 701 l
703 | 10 702 l
704 | 10 703 l
705 | 10 704 l
706 | 10 705 l
707 | 10 706 l
708 | 10 707 l
709 | 10 708 l
710 | 10 709 l
711 | 10 710 l
712 | 10 711 l
713 | 10 712 l
714 | 10 713 l
715 | 10 714 l
716 | 10 715 l
717 | 10 716 l
718 | 10 717 l
719 | 10 718 l
720 | 10 719 l
721 | 10 720 l
722 | 10 721 l
723 | 10 722 l
724 | 10 723 l
725 | 10 724 l
726 | 10 725 l
727 | 10 726 l
728 | 10 727 l
729 | 10 728 l
730 | 10 729 l
731 | 10 730 l
732 | 10 731 l
733 | 10 732 l
734 | 10 733 l
735 | 10 734 l
736 | 10 735 l
737 | 10 736 l
738 | 10 737 l
739 | 10 738 l
740 | 10 739 l
741 | 10 740 l
742 | 10 741 l
743 | 10 742 l
744 | 10 743 l
745 | 10 744 l
746 | 10 745 l
747 | 10 746 l
748 | 10 747 l
749 | 10 748 l
750 | 10 749 l
751 | 10 750 l
752 | 10 751 l
753 | 10 752 l
754 | 10 753 l
755 | 10 754 l
756 | 10 755 l
757 | 10 756 l
758 | 10 757 l
759 | 10 758 l
760 | 10 759 l
761 | 10 760 l
762 | 10 761 l
763 | 10 762 l
764 | 10 763 l
765 | 10 764 l
766 | 10 765 l
767 | 10 766 l
768 | 10 767 l
769 | 10 768 l
770 | 10 769 l
771 | 10 770 l
772 | 10 771 l
773 | 10 772 l
774 | 10 773 l
775 | 10 774 l
776 | 10 775 l
777 | 10 776 l
778 | 10 777 l
779 | 10 778 l
780 | 10 779 l
781 | 10 780 l
782 | 10 781 l
783 | 10 782 l
784 | 10 783 l
785 | 10 784 l
786 | 10 785 l
787 | 10 786 l
788 | 10 787 l
789 | 10 788 l
790 | 10 789 l
791 | 10 790 l
792 | 10 791 l
793 | 10 792 l
794 | 10 793 l
795 | 10 794 l
796 | 10 795 l
797 | 10 796 l
798 | 10 797 l
799 | 10 798 l
800 | 10 799 l
801 | 10 800 l
802 | 10 801 l
803 | 10 802 l
804 | 10 803 l
805 | 10 804 l
806 | 10 805 l
807 | 10 806 l
808 | 10 807 l
809 | 10 808 l
810 | 10 809 l
811 | 10 810 l
812 | 10 811 l
813 | 10 812 l
814 | 10 813 l
815 | 10 814 l
816 | 10 815 l
817 | 10 816 l
818 | 10 817 l
819 | 10 818 l
820 | 10 819 l
821 | 10 820 l
822 | 10 821 l
823 | 10 822 l
824 | 10 823 l
825 | 10 824 l
826 | 10 825 l
827 | 10 826 l
828 | 10 827 l
829 | 10 828 l
830 | 10 829 l
831 | 10 830 l
832 | 10 831 l
833 | 10 832 l
834 | 10 833 l
835 | 10 834 l
836 | 10 835 l
837 | 10 836 l
838 | 10 837 l
839 | 10 838 l
840 | 10 839 l
841 | 10 840 l
842 | 10 841 l
843 | 10 842 l
844 | 10 843 l
845 | 10 844 l
846 | 10 845 l
847 | 10 846 l
848 | 10 847 l
849 | 10 848 l
850 | 10 849 l
851 | 10 850 l
852 | 10 851 l
853 | 10 852 l
854 | 10 853 l
855 | 10 854 l
856 | 10 855 l
857 | 10 856 l
858 | 10 857 l
859 | 10 858 l
860 | 10 859 l
861 | 10 860 l
862 | 10 861 l
863 | 10 862 l
864 | 10 863 l
865 | 10 864 l
866 | 10 865 l
867 | 10 866 l
868 | 10 867 l
869 | 10 868 l
870 | 10 869 l
871 | 10 870 l
872 | 10 871 l
873 | 10 872 l
874 | 10 873 l
875 | 10 874 l
876 | 10 875 l
877 | 10 876 l
878 | 10 877 l
879 | 10 878 l
880 | 10 879 l
881 | 10 880 l
882 | 10 881 l
883 | 10 882 l
884 | 10 883 l
885 | 10 884 l
886 | 10 885 l
887 | 10 886 l
888 | 10 887 l
889 | 10 888 l
890 | 10 889 l
891 | 10 890 l
892 | 10 891 l
893 | 10 892 l
894 | 10 893 l
895 | 10 894 l
896 | 10 895 l
897 | 10 896 l
898 | 10 897 l
899 | 10 898 l
900 | 10 899 l
901 | 10 900 l
902 | 10 901 l
903 | 10 902 l
904 | 10 903 l
905 | 10 904 l
906 | 10 905 l
907 | 10 906 l
908 | 10 907 l
909 | 10 908 l
910 | 10 909 l
911 | 10 910 l
912 | 10 911 l
913 | 10 912 l
914 | 10 913 l
915 | 10 914 l
916 | 10 915 l
917 | 10 916 l
918 | 10 917 l
919 | 10 918 l
920 | 10 919 l
921 | 10 920 l
922 | 10 921 l
923 | 10 922 l
924 | 10 923 l
925 | 10 924 l
926 | 10 925 l
927 | 10 926 l
928 | 10 927 l
929 | 10 928 l
930 | 10 929 l
931 | 10 930 l
932 | 10 931 l
933 | 10 932 l
934 | 10 933 l
935 | 10 934 l
936 | 10 935 l
937 | 10 936 l
938 | 10 937 l
939 | 10 938 l
940 | 10 939 l
941 | 10 940 l
942 | 10 941 l
943 | 10 942 l
944 | 10 943 l
945 | 10 944 l
946 | 10 945 l
947 | 10 946 l
948 | 10 947 l
949 | 10 948 l
950 | 10 949 l
951 | 10 950 l
952 | 10 951 l
953 | 10 952 l
954 | 10 953 l
955 | 10 954 l
956 | 10 955 l
957 | 10 956 l
958 | 10 957 l
959 | 10 958 l
960 | 10 959 l
961 | 10 960 l
962 | 10 961 l
963 | 10 962 l
964 | 10 963 l
965 | 10 964 l
966 | 10 965 l
967 | 10 966 l
968 | 10 967 l
969 | 10 968 l
970 | 10 969 l
971 | 10 970 l
972 | 10 971 l
973 | 10 972 l
974 | 10 973 l
975 | 10 974 l
976 | 10 975 l
977 | 10 976 l
978 | 10 977 l
979 | 10 978 l
980 | 10 979 l
981 | 10 980 l
982 | 10 981 l
983 | 10 982 l
984 | 10 983 l
985 | 10 984 l
986 | 10 985 l
987 | 10 986 l
988 | 10 987 l
989 | 10 988 l
990 | 10 989 l
991 | 10 990 l
992 | 10 991 l
993 | 10 992 l
994 | 10 993 l
995 | 10 994 l
996 | 10 995 l
997 | 10 996 l
998 | 10 997 l
999 | 10 998 l
1000 | 10 999 l
1001 | 10 1000 l
1002 | 10 1001 l
1003 | 10 1002 l
1004 | 10 1003 l
1005 | 10 1004 l
1006 | 10 1005 l
1007 | 10 1006 l
1008 | 10 1007 l
1009 | 10 1008 l
1010 | 10 1009 l
1011 | 10 1010 l
1012 | 10 1011 l
1013 | 10 1012 l
1014 | 10 1013 l
1015 | 10 1014 l
1016 | 10 1015 l
1017 | 10 1016 l
1018 | 10 1017 l
1019 | 10 1018 l
1020 | 10 1019 l
1021 | 10 1020 l
1022 | 10 1021 l
1023 | 10 1022 l
1024 | 10 1023 l
1025 | 10 1024 l
1026 | 10 1025 l
1027 | 10 1026 l
1028 | 10 1027 l
1029 | 10 1028 l
1030 | 10 1029 l
1031 | 10 1030 l
1032 | 10 1031 l
1033 | 10 1032 l
1034 | 10 1033 l
1035 | 10 1034 l
1036 | 10 1035 l
1037 | 10 1036 l
1038 | 10 1037 l
1039 | 10 1038 l
1040 | 10 1039 l
1041 | 10 1040 l
1042 | 10 1041 l
1043 | 10 1042 l
1044 | 10 1043 l
1045 | 10 1044 l
1046 | 10 1045 l
1047 | 10 1046 l
1048 | 10 1047 l
1049 | 10 1048 l
1050 | 10 1049 l
1051 | 10 1050 l
1052 | 10 1051 l
1053 | 10 1052 l
1054 | 10 1053 l
1055 | 10 1054 l
1056 | 10 1055 l
1057 | 10 1056 l
1058 | 10 1057 l
1059 | 10 1058 l
1060 | 10 1059 l
1061 | 10 1060 l
1062 | 10 1061 l
1063 | 10 1062 l
1064 | 10 1063 l
1065 | 10 1064 l
1066 | 10 1065 l
1067 | 10 1066 l
1068 | 10 1067 l
1069 | 10 1068 l
1070 | 10 1069 l
1071 | 10 1070 l
1072 | 10 1071 l
1073 | 10 1072 l
1074 | 10 1073 l
1075 | 10 1074 l
1076 | 10 1075 l
1077 | 10 1076 l
1078 | 10 1077 l
1079 | 10 1078 l
1080 | 10 1079 l
1081 | 10 1080 l
1082 | 10 1081 l
1083 | 10 1082 l
1084 | 10 1083 l
1085 | 10 1084 l
1086 | 10 1085 l
1087 | 10 1086 l
1088 | 10 1087 l
1089 | 10 1088 l
1090 | 10 1089 l
1091 | 10 1090 l
1092 | 10 1091 l
1093 | 10 1092 l
1094 | 10 1093 l
1095 | 10 1094 l
1096 | 10 1095 l
1097 | 10 1096 l
1098 | 10 1097 l
1099 | 10 1098 l
1100 | 10 1099 l
1101 | 10 1100 l
1102 | 10 1101 l
1103 | 10 1102 l
1104 | 10 1103 l
1105 | 10 1104 l
1106 | 10 1105 l
1107 | 10 1106 l
1108 | 10 1107 l
1109 | 10 1108 l
1110 | 10 1109 l
1111 | 10 1110 l
1112 | 10 1111 l
1113 | 10 1112 l
1114 | 10 1113 l
1115 | 10 1114 l
1116 | 10 1115 l
1117 | 10 1116 l
1118 | 10 1117 l
1119 | 10 1118 l
1120 | 10 1119 l
1121 | 10 1120 l
1122 | 10 1121 l
1123 | 10 1122 l
1124 | 10 1123 l
1125 | 10 1124 l
1126 | 10 1125 l
1127 | 10 1126 l
1128 | 10 1127 l
1129 | 10 1128 l
1130 | 10 1129 l
1131 | 10 1130 l
1132 | 10 1131 l
1133 | 10 1132 l
1134 | 10 1133 l
1135 | 10 1134 l
1136 | 10 1135 l
1137 | 10 1136 l
1138 | 10 1137 l
1139 | 10 1138 l
1140 | 10 1139 l
1141 | 10 1140 l
1142 | 10 1141 l
1143 | 10 1142 l
1144 | 10 1143 l
1145 | 10 1144 l
1146 | 10 1145 l
1147 | 10 1146 l
1148 | 10 1147 l
1149 | 10 1148 l
1150 | 10 1149 l
1151 | 10 1150 l
1152 | 10 1151 l
1153 | 10 1152 l
1154 | 10 1153 l
1155 | 10 1154 l
1156 | 10 1155 l
1157 | 10 1156 l
1158 | 10 1157 l
1159 | 10 1158 l
1160 | 10 1159 l
1161 | 10 1160 l
1162 | 10 1161 l
1163 | 10 1162 l
1164 | 10 1163 l
1165 | 10 1164 l
1166 | 10 1165 l
1167 | 10 1166 l
1168 | 10 1167 l
1169 | 10 1168 l
1170 | 10 1169 l
1171 | 10 1170 l
1172 | 10 1171 l
1173 | 10 1172 l
1174 | 10 1173 l
1175 | 10 1174 l
1176 | 10 1175 l
1177 | 10 1176 l
1178 | 10 1177 l
1179 | 10 1178 l
1180 | 10 1179 l
1181 | 10 1180 l
1182 | 10 1181 l
1183 | 10 1182 l
1184 | 10 1183 l
1185 | 10 1184 l
1186 | 10 1185 l
1187 | 10 1186 l
1188 | 10 1187 l
1189 | 10 1188 l
1190 | 10 1189 l
1191 | 10 1190 l
1192 | 10 1191 l
1193 | 10 1192 l
1194 | 10 1193 l
1195 | 10 1194 l
1196 | 10 1195 l
1197 | 10 1196 l
1198 | 10 1197 l
1199 | 10 1198 l
1200 | 10 1199 l
1201 |
--------------------------------------------------------------------------------
/test_simple.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | import os
10 | import sys
11 | import glob
12 | import argparse
13 | import numpy as np
14 | import PIL.Image as pil
15 | import matplotlib as mpl
16 | import matplotlib.cm as cm
17 |
18 | import torch
19 | from torchvision import transforms, datasets
20 |
21 | import networks
22 | from layers import disp_to_depth
23 | from utils import download_model_if_doesnt_exist
24 |
25 |
26 | def parse_args():
27 | parser = argparse.ArgumentParser(
28 | description='Simple testing funtion for Depth Hints models.')
29 |
30 | parser.add_argument('--image_path', type=str,
31 | help='path to a test image or folder of images', required=True)
32 | parser.add_argument('--model_path', type=str,
33 | help='path to a pretrained model to use', required=True)
34 | parser.add_argument('--num_layers', help='number of resnet layers in the model',
35 | type=int, choices=[18, 50])
36 | parser.add_argument('--ext', type=str,
37 | help='image extension to search for in folder', default="jpg")
38 | parser.add_argument("--no_cuda",
39 | help='if set, disables CUDA',
40 | action='store_true')
41 |
42 | return parser.parse_args()
43 |
44 |
45 | def test_simple(args):
46 | """Function to predict for a single image or folder of images
47 | """
48 | assert args.model_path is not None, \
49 | "You must specify the --model_path parameter"
50 |
51 | if torch.cuda.is_available() and not args.no_cuda:
52 | device = torch.device("cuda")
53 | else:
54 | device = torch.device("cpu")
55 |
56 | model_path = args.model_path
57 | print("-> Loading model from ", model_path)
58 | encoder_path = os.path.join(model_path, "encoder.pth")
59 | depth_decoder_path = os.path.join(model_path, "depth.pth")
60 |
61 | # LOADING PRETRAINED MODEL
62 | print(" Loading pretrained encoder with {} layers".format(args.num_layers))
63 | encoder = networks.ResnetEncoder(args.num_layers, False)
64 | loaded_dict_enc = torch.load(encoder_path, map_location=device)
65 |
66 | # extract the height and width of image that this model was trained with
67 | feed_height = loaded_dict_enc['height']
68 | feed_width = loaded_dict_enc['width']
69 | filtered_dict_enc = {k: v for k, v in loaded_dict_enc.items() if k in encoder.state_dict()}
70 | encoder.load_state_dict(filtered_dict_enc)
71 | encoder.to(device)
72 | encoder.eval()
73 |
74 | print(" Loading pretrained decoder")
75 | depth_decoder = networks.DepthDecoder(
76 | num_ch_enc=encoder.num_ch_enc, scales=range(4))
77 |
78 | loaded_dict = torch.load(depth_decoder_path, map_location=device)
79 | depth_decoder.load_state_dict(loaded_dict)
80 |
81 | depth_decoder.to(device)
82 | depth_decoder.eval()
83 |
84 | # FINDING INPUT IMAGES
85 | if os.path.isfile(args.image_path):
86 | # Only testing on a single image
87 | paths = [args.image_path]
88 | output_directory = os.path.dirname(args.image_path)
89 | elif os.path.isdir(args.image_path):
90 | # Searching folder for images
91 | paths = glob.glob(os.path.join(args.image_path, '*.{}'.format(args.ext)))
92 | output_directory = args.image_path
93 | else:
94 | raise Exception("Can not find args.image_path: {}".format(args.image_path))
95 |
96 | print("-> Predicting on {:d} test images".format(len(paths)))
97 |
98 | # PREDICTING ON EACH IMAGE IN TURN
99 | with torch.no_grad():
100 | for idx, image_path in enumerate(paths):
101 |
102 | if image_path.endswith("_disp.jpg"):
103 | # don't try to predict disparity for a disparity image!
104 | continue
105 |
106 | # Load image and preprocess
107 | input_image = pil.open(image_path).convert('RGB')
108 | original_width, original_height = input_image.size
109 | input_image = input_image.resize((feed_width, feed_height), pil.LANCZOS)
110 | input_image = transforms.ToTensor()(input_image).unsqueeze(0)
111 |
112 | # PREDICTION
113 | input_image = input_image.to(device)
114 | features = encoder(input_image)
115 | outputs = depth_decoder(features)
116 |
117 | disp = outputs[("disp", 0)]
118 | disp_resized = torch.nn.functional.interpolate(
119 | disp, (original_height, original_width), mode="bilinear", align_corners=False)
120 |
121 | # Saving numpy file
122 | output_name = os.path.splitext(os.path.basename(image_path))[0]
123 | name_dest_npy = os.path.join(output_directory, "{}_disp.npy".format(output_name))
124 | scaled_disp, _ = disp_to_depth(disp, 0.1, 100)
125 | np.save(name_dest_npy, scaled_disp.cpu().numpy())
126 |
127 | # Saving colormapped depth image
128 | disp_resized_np = disp_resized.squeeze().cpu().numpy()
129 | vmax = np.percentile(disp_resized_np, 95)
130 | normalizer = mpl.colors.Normalize(vmin=disp_resized_np.min(), vmax=vmax)
131 | mapper = cm.ScalarMappable(norm=normalizer, cmap='magma')
132 | colormapped_im = (mapper.to_rgba(disp_resized_np)[:, :, :3] * 255).astype(np.uint8)
133 | im = pil.fromarray(colormapped_im)
134 |
135 | name_dest_im = os.path.join(output_directory, "{}_disp.jpeg".format(output_name))
136 | im.save(name_dest_im)
137 |
138 | print(" Processed {:d} of {:d} images - saved prediction to {}".format(
139 | idx + 1, len(paths), name_dest_im))
140 |
141 | print('-> Done!')
142 |
143 |
144 | if __name__ == '__main__':
145 | args = parse_args()
146 | test_simple(args)
147 |
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 |
9 | from trainer import Trainer
10 | from options import MonodepthOptions
11 |
12 | options = MonodepthOptions()
13 | opts = options.parse()
14 |
15 |
16 | if __name__ == "__main__":
17 | trainer = Trainer(opts)
18 | trainer.train()
19 |
--------------------------------------------------------------------------------
/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright Niantic 2020. Patent Pending. All rights reserved.
2 | #
3 | # This software is licensed under the terms of the DepthHints licence
4 | # which allows for non-commercial use only, the full terms of which are made
5 | # available in the LICENSE file.
6 |
7 | from __future__ import absolute_import, division, print_function
8 | import os
9 | import hashlib
10 | import zipfile
11 | from six.moves import urllib
12 |
13 |
14 | def readlines(filename):
15 | """Read all the lines in a text file and return as a list
16 | """
17 | with open(filename, 'r') as f:
18 | lines = f.read().splitlines()
19 | return lines
20 |
21 |
22 | def normalize_image(x):
23 | """Rescale image pixels to span range [0, 1]
24 | """
25 | ma = float(x.max().cpu().data)
26 | mi = float(x.min().cpu().data)
27 | d = ma - mi if ma != mi else 1e5
28 | return (x - mi) / d
29 |
30 |
31 | def sec_to_hm(t):
32 | """Convert time in seconds to time in hours, minutes and seconds
33 | e.g. 10239 -> (2, 50, 39)
34 | """
35 | t = int(t)
36 | s = t % 60
37 | t //= 60
38 | m = t % 60
39 | t //= 60
40 | return t, m, s
41 |
42 |
43 | def sec_to_hm_str(t):
44 | """Convert time in seconds to a nice string
45 | e.g. 10239 -> '02h50m39s'
46 | """
47 | h, m, s = sec_to_hm(t)
48 | return "{:02d}h{:02d}m{:02d}s".format(h, m, s)
49 |
50 |
51 | def download_model_if_doesnt_exist(model_name):
52 | """If pretrained kitti model doesn't exist, download and unzip it
53 | """
54 | # values are tuples of (, )
55 | download_paths = {
56 | "mono_640x192":
57 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/mono_640x192.zip",
58 | "a964b8356e08a02d009609d9e3928f7c"),
59 | "stereo_640x192":
60 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/stereo_640x192.zip",
61 | "3dfb76bcff0786e4ec07ac00f658dd07"),
62 | "mono+stereo_640x192":
63 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/mono%2Bstereo_640x192.zip",
64 | "c024d69012485ed05d7eaa9617a96b81"),
65 | "mono_no_pt_640x192":
66 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/mono_no_pt_640x192.zip",
67 | "9c2f071e35027c895a4728358ffc913a"),
68 | "stereo_no_pt_640x192":
69 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/stereo_no_pt_640x192.zip",
70 | "41ec2de112905f85541ac33a854742d1"),
71 | "mono+stereo_no_pt_640x192":
72 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/mono%2Bstereo_no_pt_640x192.zip",
73 | "46c3b824f541d143a45c37df65fbab0a"),
74 | "mono_1024x320":
75 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/mono_1024x320.zip",
76 | "0ab0766efdfeea89a0d9ea8ba90e1e63"),
77 | "stereo_1024x320":
78 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/stereo_1024x320.zip",
79 | "afc2f2126d70cf3fdf26b550898b501a"),
80 | "mono+stereo_1024x320":
81 | ("https://storage.googleapis.com/niantic-lon-static/research/monodepth2/mono%2Bstereo_1024x320.zip",
82 | "cdc5fc9b23513c07d5b19235d9ef08f7"),
83 | }
84 |
85 | if not os.path.exists("models"):
86 | os.makedirs("models")
87 |
88 | model_path = os.path.join("models", model_name)
89 |
90 | def check_file_matches_md5(checksum, fpath):
91 | if not os.path.exists(fpath):
92 | return False
93 | with open(fpath, 'rb') as f:
94 | current_md5checksum = hashlib.md5(f.read()).hexdigest()
95 | return current_md5checksum == checksum
96 |
97 | # see if we have the model already downloaded...
98 | if not os.path.exists(os.path.join(model_path, "encoder.pth")):
99 |
100 | model_url, required_md5checksum = download_paths[model_name]
101 |
102 | if not check_file_matches_md5(required_md5checksum, model_path + ".zip"):
103 | print("-> Downloading pretrained model to {}".format(model_path + ".zip"))
104 | urllib.request.urlretrieve(model_url, model_path + ".zip")
105 |
106 | if not check_file_matches_md5(required_md5checksum, model_path + ".zip"):
107 | print(" Failed to download a file which matches the checksum - quitting")
108 | quit()
109 |
110 | print(" Unzipping model...")
111 | with zipfile.ZipFile(model_path + ".zip", 'r') as f:
112 | f.extractall(model_path)
113 |
114 | print(" Model unzipped to {}".format(model_path))
115 |
--------------------------------------------------------------------------------