├── .github └── workflows │ └── test.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE.md ├── README.rst ├── docs ├── Makefile ├── api │ ├── index.rst │ ├── pylas.compression.rst │ ├── pylas.errors.rst │ ├── pylas.header.rst │ ├── pylas.lasappender.rst │ ├── pylas.lasdata.rst │ ├── pylas.lasmmap.rst │ ├── pylas.lasreader.rst │ ├── pylas.laswriter.rst │ ├── pylas.lib.rst │ ├── pylas.point.format.rst │ ├── pylas.point.record.rst │ ├── pylas.vlrs.known.rst │ ├── pylas.vlrs.vlr.rst │ └── pylas.vlrs.vlrlist.rst ├── basic.rst ├── conf.py ├── examples.rst ├── index.rst ├── installation.rst ├── intro.rst ├── lessbasic.rst └── migration.rst ├── examples ├── field-randomizer.py └── recursive-split.py ├── noxfile.py ├── pylas ├── __init__.py ├── compression.py ├── errors.py ├── extradims.py ├── header.py ├── lasappender.py ├── lasdata.py ├── lasmmap.py ├── lasreader.py ├── laswriter.py ├── lib.py ├── lib.pyi ├── point │ ├── __init__.py │ ├── dims.py │ ├── format.py │ ├── packing.py │ └── record.py ├── typehints.py ├── utils.py └── vlrs │ ├── __init__.py │ ├── geotiff.py │ ├── known.py │ ├── vlr.py │ └── vlrlist.py ├── pylastests ├── 1_4_w_evlr.las ├── 1_4_w_evlr.laz ├── __init__.py ├── conftest.py ├── extra.laz ├── extrabytes.las ├── plane.laz ├── simple.las ├── simple.laz ├── test1_4.las ├── test_append_mode.py ├── test_chunk_read_write.py ├── test_common.py ├── test_constants.py ├── test_conversion.py ├── test_creation.py ├── test_extrabytes.py ├── test_field_views.py ├── test_header.py ├── test_mmap.py ├── test_modif_1_2.py ├── test_modif_1_4.py ├── test_point_format.py ├── test_reading_1_2.py ├── test_reading_1_4.py ├── test_vlrs.py └── vegetation_1_3.las ├── pytest.ini ├── requirements.txt └── setup.py /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | jobs: 4 | Formatting: 5 | runs-on: ubuntu-latest 6 | 7 | steps: 8 | - name: Clone 9 | uses: actions/checkout@v2 10 | 11 | - name: Install black 12 | run: | 13 | python3 -m pip install setuptools 14 | python3 -m pip install black 15 | 16 | - name: Run black check 17 | run: python3 -m black --check pylas 18 | 19 | 20 | Tests: 21 | runs-on: ubuntu-latest 22 | strategy: 23 | matrix: 24 | python-version: [ 3.6, 3.7, 3.8, 3.9] 25 | laz-backend: [ None, lazrs, laszip ] 26 | 27 | steps: 28 | - name: Clone 29 | uses: actions/checkout@v2 30 | 31 | - name: Setup python 32 | uses: actions/setup-python@v2 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | architecture: x64 36 | 37 | - name: Install With LAZ backend 38 | if: matrix.laz-backend != 'None' 39 | run: pip install .[${{ matrix.laz-backend }}] 40 | 41 | - name: Install Without LAZ backend 42 | if: matrix.laz-backend == 'None' 43 | run: pip install . 44 | 45 | - name: Run Tests 46 | run: | 47 | pip install pytest 48 | pytest pylastests 49 | pytest pylas 50 | pytest docs 51 | 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/*.las 2 | **/*.laz 3 | **/*.pyc 4 | 5 | .idea/ 6 | .vscode/ 7 | pylastests/.cache/ 8 | pylastests/.pytest_cache/ 9 | pylas.egg-info/ 10 | .pytest_cache/ 11 | .cache/ 12 | .mypy_cache/ 13 | .nox 14 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Not released, target: 1.0.0 2 | 3 | - Added Support for Scaled Extra bytes 4 | 5 | - Added more type hints, which in combination to others changes 6 | should help IDEs provide better autocompletion. 7 | 8 | - Added pylas.LazBackend to be able to select the backend to use if 9 | many are installed 10 | 11 | - Changed support for the laszip backend to now use 12 | bindings to the laszip c++ API instead of using 13 | the laszip executable found in the PATH 14 | 15 | - Changed simplify EVLR 16 | Previously we had a EVLR class which was just a VLR subclass. 17 | Now EVLR are just VLR, kepts in a different list, and written 18 | as EVLRs if there are any. 19 | 20 | - Changed: VLRList now subclass list 21 | 22 | - Changed Header classes & Vlr 23 | The hierarchy of header classes (RawHeader1_1, RawHeader1_2, etc) 24 | is removed and now only one class exists LasHeader, where most 25 | of the fields that reader/writers care about are removed and 26 | field that user care about are kept and put in user-friendly classes. 27 | 28 | The vlrs are now also a part of the header 29 | (it simplifies synchronizing, the header, vlrs, point_format and extra bytes vlr) 30 | 31 | - Removed pylas merge 32 | 33 | 34 | # 0.5.0b 35 | 36 | - Added write ('r') and append ('a') mode to pylas.open 37 | - Added ability to read a LAS/LAZ file chunk by chunk 38 | - Added ability to write as LAS/LAZ chunk by chunk 39 | - Added ability to append points to LAS/LAZ (LAZ only with lazrs) 40 | 41 | - Added SubFieldView class to handle LAS fields which are bit fields 42 | (e.g. return_number) in a more consistent way than was done. 43 | 44 | - Removed lazperf support for compression/decompression 45 | 46 | 47 | # 0.4.3 48 | 49 | - Added maximum version to the lazrs optional dependency to 0.2.0. 50 | 51 | # 0.4.2 52 | 53 | - Fixed writing LAZ file with EVLR when piping through laszip.exe 54 | 55 | # 0.4.0 56 | 57 | - Added support for compressing & decompressing with laz-rs (supports parallel processing) 58 | - Added support for compressing using laszip.exe 59 | - Added Overflow & Underflow checks to the scaled x, y,z setters 60 | 61 | # 0.3.4 62 | 63 | - Allow adding extra bytes to all las versions 64 | - Added rounding of x, y z coordinates when they are set 65 | 66 | # 0.3.2 67 | 68 | - Changed the x, y, z offsets are not longer changed when new x,y or 69 | are set 70 | 71 | # 0.3.0 72 | 73 | - Added `supported_version` function which returns the LAS version 74 | supported by pylas 75 | - Added a new `PointFormat` class 76 | - Added a `merge_las` function 77 | - Added `mins` & `maxs` properties to the header, they 78 | provide access to the x, y, z mins and maxs as numpy arrays 79 | 80 | - Fixed initialize the `header.date` to `date.today()` 81 | 82 | # 0.2.0 83 | 84 | - Updated lazperf to handle lazperf exrabytes 85 | 86 | - Changed all pylas specific exception now inhereits the `PylasError` 87 | exception class 88 | 89 | - Fixed extra dimension bug where the extra field was added to 90 | the LasData attrbute but not the the points array 91 | 92 | 93 | # 0.1.4 94 | 95 | # 0.1.0 96 | 97 | - Initial version 98 | 99 | 100 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018, Thomas Montaigu 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | pylas 2 | ----- 3 | 4 | .. important:: 5 | 6 | pylas is deprecated in favor of laspy_ 2.0 7 | 8 | laspy 2.0 being essentially what pylas 1.0 was meant to be, 9 | moving from pylas >= 0.5 to laspy 2.0 should be as simple as 10 | doing a text replace of 'pylas' with 'laspy' 11 | 12 | moving from pylas 0.4.x to laspy 2.0 may require small adjustments 13 | (the same as would have been needed to upgrade to pylas 0.5/1.0) 14 | 15 | .. _laspy: https://github.com/laspy/laspy 16 | 17 | Another way of reading point clouds in the LAS/LAZ in Python. 18 | 19 | .. image:: https://readthedocs.org/projects/pylas/badge/?version=latest 20 | :target: https://pylas.readthedocs.io/en/latest/?badge=latest 21 | :alt: Documentation Status 22 | 23 | 24 | .. image:: https://github.com/tmontaigu/pylas/workflows/.github/workflows/test.yml/badge.svg 25 | :target: https://github.com/tmontaigu/pylas/actions?query=workflow%3A.github%2Fworkflows%2Ftest.yml 26 | :alt: CI status 27 | 28 | 29 | Examples 30 | -------- 31 | 32 | Directly read and write las 33 | 34 | .. code:: python 35 | 36 | import pylas 37 | 38 | las = pylas.read('filename.las') 39 | las.points = las.points[las.classification == 2] 40 | las.write('ground.laz') 41 | 42 | Open data to inspect header (opening only reads the header and vlrs) 43 | 44 | .. code:: python 45 | 46 | import pylas 47 | 48 | with pylas.open('filename.las') as f: 49 | print(f"Point format: {f.header.point_format}") 50 | print(f"Number of points: {f.header.point_count}") 51 | print(f"Number of vlrs: {len(f.header.vlrs)}") 52 | 53 | Use the 'chunked' reading & writing features 54 | 55 | .. code:: python 56 | 57 | import pylas 58 | 59 | with pylas.open('big.laz') as input_las: 60 | with pylas.open('ground.laz', mode="w", header=input_las.header) as ground_las: 61 | for points in input_las.chunk_iterator(2_000_000): 62 | ground_las.write_points(points[points.classification == 2]) 63 | 64 | Appending points to existing file 65 | 66 | .. code:: python 67 | 68 | import pylas 69 | 70 | with pylas.open('big.laz') as input_las: 71 | with pylas.open('ground.laz', mode="a") as ground_las: 72 | for points in input_las.chunk_iterator(2_000_000): 73 | ground_las.append_points(points[points.classification == 2]) 74 | 75 | Documentation 76 | ------------- 77 | 78 | Documentation is hosted on ReadTheDocs_ . 79 | 80 | .. _ReadTheDocs: http://pylas.readthedocs.io/en/latest/index.html 81 | 82 | 83 | Dependencies & Requirements 84 | --------------------------- 85 | 86 | Supported CPython versions are: 3.6, 3.7, 3.8, 3.9 87 | 88 | pylas supports LAS natively, to support LAZ it needs one of its supported backend to be installed: 89 | 90 | - lazrs 91 | - laszip 92 | 93 | 94 | Installation 95 | ------------ 96 | 97 | .. code-block:: shell 98 | 99 | pip install pylas # without LAZ support 100 | # Or 101 | pip install pylas[laszip] # with LAZ support via LASzip 102 | # Or 103 | pip install pylas[lazrs] # with LAZ support via lazrs 104 | 105 | 106 | See the Installation_ section of the documentation for details: 107 | 108 | .. _Installation: https://pylas.readthedocs.io/en/latest/installation.html 109 | 110 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pylas 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/api/index.rst: -------------------------------------------------------------------------------- 1 | pylas package 2 | ============= 3 | 4 | .. module:: pylas 5 | 6 | Re-exported functions 7 | --------------------- 8 | 9 | .. autofunction:: read 10 | .. autofunction:: open 11 | .. autofunction:: create 12 | .. autofunction:: convert 13 | 14 | 15 | Re-exported classes 16 | ------------------- 17 | 18 | - :class:`.LasHeader` 19 | - :class:`.LasData` 20 | - :class:`.PointFormat` 21 | - :class:`.VLR` 22 | - :class:`.ExtraBytesParams` 23 | - :class:`.LasReader` 24 | - :class:`.LasWriter` 25 | - :class:`.LasAppender` 26 | 27 | 28 | Submodules 29 | ---------- 30 | 31 | .. toctree:: 32 | 33 | pylas.lib 34 | pylas.header 35 | pylas.lasreader 36 | pylas.lasdata 37 | pylas.vlrs.vlrlist 38 | pylas.vlrs.known 39 | pylas.vlrs.vlr 40 | pylas.point.record 41 | pylas.errors 42 | pylas.compression 43 | pylas.point.format 44 | pylas.lasmmap 45 | pylas.lasappender 46 | pylas.laswriter 47 | 48 | -------------------------------------------------------------------------------- /docs/api/pylas.compression.rst: -------------------------------------------------------------------------------- 1 | pylas.compression module 2 | ======================== 3 | 4 | LazBackend 5 | ---------- 6 | 7 | .. autoclass:: pylas.compression.LazBackend 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/pylas.errors.rst: -------------------------------------------------------------------------------- 1 | pylas.errors module 2 | =================== 3 | 4 | .. automodule:: pylas.errors 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/pylas.header.rst: -------------------------------------------------------------------------------- 1 | pylas.header module 2 | ====================== 3 | 4 | 5 | LasHeader 6 | --------- 7 | 8 | .. autoclass:: pylas.header.LasHeader 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | :exclude-members: write_to, read_from, set_compressed, update, partial_reset 13 | -------------------------------------------------------------------------------- /docs/api/pylas.lasappender.rst: -------------------------------------------------------------------------------- 1 | pylas.lasappender 2 | ================= 3 | 4 | 5 | LasAppender 6 | ----------- 7 | 8 | .. autoclass:: pylas.lasappender.LasAppender 9 | :members: -------------------------------------------------------------------------------- /docs/api/pylas.lasdata.rst: -------------------------------------------------------------------------------- 1 | pylas.lasdata module 2 | ====================== 3 | 4 | 5 | LasData 6 | ------- 7 | 8 | .. autoclass:: pylas.lasdata.LasData 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | -------------------------------------------------------------------------------- /docs/api/pylas.lasmmap.rst: -------------------------------------------------------------------------------- 1 | pylas.lasmmap module 2 | ====================== 3 | 4 | .. automodule:: pylas.lasmmap 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/pylas.lasreader.rst: -------------------------------------------------------------------------------- 1 | pylas.lasreader module 2 | ====================== 3 | 4 | LasReader 5 | --------- 6 | 7 | .. autoclass:: pylas.lasreader.LasReader 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/pylas.laswriter.rst: -------------------------------------------------------------------------------- 1 | pylas.laswriter module 2 | ====================== 3 | 4 | 5 | LasWriter 6 | --------- 7 | 8 | .. autoclass:: pylas.laswriter.LasWriter 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | -------------------------------------------------------------------------------- /docs/api/pylas.lib.rst: -------------------------------------------------------------------------------- 1 | pylas.lib module 2 | ================ 3 | 4 | .. automodule:: pylas.lib 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | -------------------------------------------------------------------------------- /docs/api/pylas.point.format.rst: -------------------------------------------------------------------------------- 1 | pylas.point.format module 2 | ========================= 3 | 4 | .. automodule:: pylas.point.format 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /docs/api/pylas.point.record.rst: -------------------------------------------------------------------------------- 1 | pylas.point.record module 2 | ========================= 3 | 4 | .. automodule:: pylas.point.record 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | -------------------------------------------------------------------------------- /docs/api/pylas.vlrs.known.rst: -------------------------------------------------------------------------------- 1 | pylas.vlrs.known module 2 | ======================= 3 | 4 | .. automodule:: pylas.vlrs.known 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/api/pylas.vlrs.vlr.rst: -------------------------------------------------------------------------------- 1 | pylas.vlrs.vlr 2 | -------------- 3 | 4 | .. autoclass:: pylas.VLR 5 | :members: 6 | :inherited-members: 7 | :undoc-members: 8 | :exclude-members: read_from 9 | -------------------------------------------------------------------------------- /docs/api/pylas.vlrs.vlrlist.rst: -------------------------------------------------------------------------------- 1 | pylas.vlrs.vlrlist module 2 | ========================= 3 | 4 | .. automodule:: pylas.vlrs.vlrlist 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | -------------------------------------------------------------------------------- /docs/basic.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Basic Manipulation 3 | ================== 4 | 5 | Opening & Reading 6 | ================= 7 | 8 | Reading 9 | ------- 10 | 11 | Reading is done using :func:`pylas.read` function. 12 | This function will read everything in the file (Header, vlrs, point records, ...) and return an object 13 | that you can use to access to the data. 14 | 15 | .. code:: python 16 | 17 | import pylas 18 | 19 | las = pylas.read('somefile.las') 20 | print(np.unique(las.classification)) 21 | 22 | Opening 23 | ------- 24 | 25 | pylas can also :func:`pylas.open` files reading just the header and vlrs but not the points, this is useful 26 | if you are interested in metadata that are contained in the header and do not need to read the points. 27 | 28 | .. code:: python 29 | 30 | import s3fs 31 | import pylas 32 | 33 | fs = s3fs.S3FileSystem() 34 | with fs.open('my-bucket/some_file.las', 'rb') as f: 35 | if f.header.point_count < 100_000_000: 36 | las = pylas.read(f) 37 | 38 | Chunked reading 39 | --------------- 40 | 41 | Sometimes files are big, too big to be read entirely and fit into your RAM. 42 | The object returned by the :func:`pylas.open` function, :class:`.LasReader` 43 | can also be used to read points chunk by chunk by using :meth:`.LasReader.chunk_iterator`, which will allow you to do some 44 | processing on large files (splitting, filtering, etc) 45 | 46 | .. code:: python 47 | 48 | import pylas 49 | 50 | with pylas.open("some_big_file.laz") as f: 51 | for points in f.chunk_iterator(1_000_000): 52 | do_something_with(points) 53 | 54 | 55 | Writing 56 | ======= 57 | 58 | 59 | To be able to write a las file you will need a :class:`.LasData`. 60 | You obtain this type of object by using one of the function described in the section above 61 | use its method :meth:`.LasData.write` to write to a file or a stream. 62 | 63 | Chunked Writing 64 | --------------- 65 | 66 | Similar to :class:`.LasReader` there exists a way to write a file 67 | chunk by chunk. 68 | 69 | .. code:: python 70 | 71 | import pylas 72 | 73 | with pylas.open("some_big_file.laz") as f: 74 | with pylas.open("grounds.laz", mode="w", header=f.header) as writer: 75 | for points in f.chunk_iterator(1_234_567): 76 | writer.write_points(points[points.classification == 2]) 77 | 78 | .. _accessing_header: 79 | 80 | Creating 81 | ======== 82 | 83 | Creating a new Las from scratch is simple. 84 | Use :func:`pylas.create`. 85 | 86 | 87 | Converting 88 | ========== 89 | 90 | pylas also offers the ability to convert a file between the different version and point format available 91 | (as long as they are compatible). 92 | 93 | To convert, use the :func:`pylas.convert` 94 | 95 | Accessing the file header 96 | ========================= 97 | 98 | You can access the header of a las file you read or opened by retrieving the 'header' attribute: 99 | 100 | >>> import pylas 101 | >>> las = pylas.read('pylastests/simple.las') 102 | >>> las.header 103 | )> 104 | >>> las.header.point_count 105 | 1065 106 | 107 | 108 | >>> with pylas.open('pylastests/simple.las') as f: 109 | ... f.header.point_count 110 | 1065 111 | 112 | 113 | 114 | you can see the accessible fields in :class:`.LasHeader`. 115 | 116 | 117 | Accessing Points Records 118 | ======================== 119 | 120 | To access point records using the dimension name, you have 2 options: 121 | 122 | 1) regular attribute access using the `las.dimension_name` syntax 123 | 2) dict-like attribute access `las[dimension_name]`. 124 | 125 | >>> import numpy as np 126 | >>> las = pylas.read('pylastests/simple.las') 127 | >>> np.all(las.user_data == las['user_data']) 128 | True 129 | 130 | Point Format 131 | ------------ 132 | 133 | The dimensions available in a file are dictated by the point format id. 134 | The tables in the introduction section contains the list of dimensions for each of the 135 | point format. 136 | To get the point format of a file you have to access it through the las object: 137 | 138 | >>> point_format = las.point_format 139 | >>> point_format 140 | 141 | >>> point_format.id 142 | 3 143 | 144 | If you don't want to remember the dimensions for each point format, 145 | you can access the list of available dimensions in the file you read just like that: 146 | 147 | >>> list(point_format.dimension_names) 148 | ['X', 'Y', 'Z', 'intensity', 'return_number', 'number_of_returns', 'scan_direction_flag', 'edge_of_flight_line', 'classification', 'synthetic', 'key_point', 'withheld', 'scan_angle_rank', 'user_data', 'point_source_id', 'gps_time', 'red', 'green', 'blue'] 149 | 150 | This gives you all the dimension names, including extra dimensions if any. 151 | If you wish to get only the extra dimension names the point format can give them to you: 152 | 153 | >>> list(point_format.standard_dimension_names) 154 | ['X', 'Y', 'Z', 'intensity', 'return_number', 'number_of_returns', 'scan_direction_flag', 'edge_of_flight_line', 'classification', 'synthetic', 'key_point', 'withheld', 'scan_angle_rank', 'user_data', 'point_source_id', 'gps_time', 'red', 'green', 'blue'] 155 | >>> list(point_format.extra_dimension_names) 156 | [] 157 | >>> las = pylas.read('pylastests/extrabytes.las') 158 | >>> list(las.point_format.extra_dimension_names) 159 | ['Colors', 'Reserved', 'Flags', 'Intensity', 'Time'] 160 | 161 | You can also have more information: 162 | 163 | >>> point_format[3].name 164 | 'intensity' 165 | >>> point_format[3].num_bits 166 | 16 167 | >>> point_format[3].kind 168 | 169 | >>> point_format[3].max 170 | 65535 171 | 172 | 173 | 174 | 175 | 176 | .. _manipulating_vlrs: 177 | 178 | Manipulating VLRs 179 | ================= 180 | 181 | To access the VLRs stored in a file, simply access the `vlr` member of the las object. 182 | 183 | >>> las = pylas.read('pylastests/extrabytes.las') 184 | >>> las.vlrs 185 | [] 186 | 187 | >>> with pylas.open('pylastests/extrabytes.las') as f: 188 | ... f.header.vlrs 189 | [] 190 | 191 | 192 | To retrieve a particular vlr from the list there are 2 ways: :meth:`.VLRList.get` and 193 | :meth:`.VLRList.get_by_id` 194 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/stable/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'pylas' 23 | copyright = '2018-2020, pylas' 24 | author = 'Thomas Montaigu' 25 | 26 | # Parse the version from setup.py. 27 | with open('../setup.py') as f: 28 | for line in f: 29 | if line.find("version") >= 0: 30 | version = line.split("=")[1].strip() 31 | version = version.strip(',') 32 | version = version.strip('"') 33 | version = version.strip("'") 34 | continue 35 | release = version 36 | 37 | # -- General configuration --------------------------------------------------- 38 | 39 | # If your documentation needs a minimal Sphinx version, state it here. 40 | # 41 | # needs_sphinx = '1.0' 42 | 43 | # Add any Sphinx extension module names here, as strings. They can be 44 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 45 | # ones. 46 | extensions = [ 47 | 'sphinx.ext.githubpages', 48 | 'sphinx.ext.autodoc', 49 | 'sphinx.ext.autosummary', 50 | 'sphinx.ext.todo', 51 | 'sphinx.ext.mathjax', 52 | 'sphinx.ext.viewcode', 53 | 'sphinx.ext.napoleon', 54 | 'sphinx.ext.intersphinx', 55 | 'sphinx.ext.doctest', 56 | # 'sphinx_autodoc_typehints' 57 | ] 58 | 59 | napoleon_use_param = True 60 | 61 | # Add any paths that contain templates here, relative to this directory. 62 | templates_path = ['_templates'] 63 | 64 | # The suffix(es) of source filenames. 65 | # You can specify multiple suffix as a list of string: 66 | # 67 | # source_suffix = ['.rst', '.md'] 68 | source_suffix = '.rst' 69 | 70 | # The master toctree document. 71 | master_doc = 'index' 72 | 73 | # The language for content autogenerated by Sphinx. Refer to documentation 74 | # for a list of supported languages. 75 | # 76 | # This is also used if you do content translation via gettext catalogs. 77 | # Usually you set "language" from the command line for these cases. 78 | language = None 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | # This pattern also affects html_static_path and html_extra_path . 83 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 84 | 85 | # The name of the Pygments (syntax highlighting) style to use. 86 | pygments_style = 'sphinx' 87 | 88 | autodoc_member_order = 'bysource' 89 | 90 | # -- Options for HTML output ------------------------------------------------- 91 | 92 | # The theme to use for HTML and HTML Help pages. See the documentation for 93 | # a list of builtin themes. 94 | # 95 | html_theme = 'sphinx_rtd_theme' 96 | 97 | # Theme options are theme-specific and customize the look and feel of a theme 98 | # further. For a list of options available for each theme, see the 99 | # documentation. 100 | # 101 | # html_theme_options = {} 102 | 103 | # Add any paths that contain custom static files (such as style sheets) here, 104 | # relative to this directory. They are copied after the builtin static files, 105 | # so a file named "default.css" will overwrite the builtin "default.css". 106 | html_static_path = [] 107 | 108 | # Custom sidebar templates, must be a dictionary that maps document names 109 | # to template names. 110 | # 111 | # The default sidebars (for documents that don't match any pattern) are 112 | # defined by theme itself. Builtin themes are using these templates by 113 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 114 | # 'searchbox.html']``. 115 | # 116 | # html_sidebars = {} 117 | 118 | 119 | # -- Options for HTMLHelp output --------------------------------------------- 120 | 121 | # Output file base name for HTML help builder. 122 | htmlhelp_basename = 'pylasdoc' 123 | 124 | 125 | # -- Options for LaTeX output ------------------------------------------------ 126 | 127 | latex_elements = { 128 | # The paper size ('letterpaper' or 'a4paper'). 129 | # 130 | # 'papersize': 'letterpaper', 131 | 132 | # The font size ('10pt', '11pt' or '12pt'). 133 | # 134 | # 'pointsize': '10pt', 135 | 136 | # Additional stuff for the LaTeX preamble. 137 | # 138 | # 'preamble': '', 139 | 140 | # Latex figure (float) alignment 141 | # 142 | # 'figure_align': 'htbp', 143 | } 144 | 145 | # Grouping the document tree into LaTeX files. List of tuples 146 | # (source start file, target name, title, 147 | # author, documentclass [howto, manual, or own class]). 148 | latex_documents = [ 149 | (master_doc, 'pylas.tex', 'pylas Documentation', 150 | 'Ryan McCarthy', 'manual'), 151 | ] 152 | 153 | 154 | # -- Options for manual page output ------------------------------------------ 155 | 156 | # One entry per manual page. List of tuples 157 | # (source start file, name, description, authors, manual section). 158 | man_pages = [ 159 | (master_doc, 'pylas', 'pylas Documentation', 160 | [author], 1) 161 | ] 162 | 163 | 164 | # -- Options for Texinfo output ---------------------------------------------- 165 | 166 | # Grouping the document tree into Texinfo files. List of tuples 167 | # (source start file, target name, title, author, 168 | # dir menu entry, description, category) 169 | texinfo_documents = [ 170 | (master_doc, 'pylas', 'pylas Documentation', 171 | author, 'pylas', 'One line description of project.', 172 | 'Miscellaneous'), 173 | ] 174 | 175 | 176 | # -- Extension configuration ------------------------------------------------- 177 | intersphinx_mapping = { 178 | 'python': ('https://docs.python.org/3', None), 179 | } 180 | 181 | 182 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | ================== 2 | Examples 3 | ================== 4 | 5 | 6 | Filtering 7 | --------- 8 | 9 | This example shows how you can extract points from a file and write them into a new one. 10 | We use the classification field to filter points, but this can work with the other fields. 11 | 12 | .. code-block:: python 13 | 14 | import pylas 15 | 16 | las = pylas.read('pylastests/simple.las') 17 | 18 | new_file = pylas.create(point_format=las.header.point_format_id, file_version=las.header.version) 19 | new_file.points = las.points[las.classification == 1] 20 | 21 | new_file.write('extracted_points.las') 22 | 23 | 24 | 25 | Creating from scratch 26 | --------------------- 27 | 28 | This example shows how you can create a new LAS file from scratch. 29 | 30 | .. code-block:: python 31 | 32 | import pylas 33 | import numpy as np 34 | 35 | las = pylas.create() 36 | 37 | array = np.linspace(0.0, 15.0, 10000) 38 | las.x = array 39 | las.y = array 40 | las.z = array 41 | 42 | las.write('diagonal.las') 43 | 44 | 45 | Using chunked reading & writing 46 | ------------------------------- 47 | 48 | This example shows how to use the 'chunked' reading and writing feature 49 | to split potentially large LAS/LAZ file into multiple smaller file. 50 | 51 | .. literalinclude:: ../examples/recursive-split.py 52 | :language: Python 53 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. pylas documentation master file, created by 2 | sphinx-quickstart on Wed Mar 28 09:00:58 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | =========================================== 7 | pylas: Python library for lidar LAS/LAZ IO. 8 | =========================================== 9 | 10 | `LAS`_ (and it's compressed counterpart LAZ), is a popular format for lidar pointcloud and full waveform, 11 | pylas reads and writes these formats and provides a Python API via Numpy Arrays. 12 | 13 | .. _LAS: https://www.asprs.org/committee-general/laser-las-file-format-exchange-activities.html 14 | 15 | Here is an example of reading in LAZ data and getting some simple summaries of the pointcloud: 16 | 17 | .. testcode:: 18 | 19 | import numpy as np 20 | import pylas 21 | 22 | with pylas.open('pylastests/simple.laz') as fh: 23 | print('Points from Header:', fh.header.point_count) 24 | las = fh.read() 25 | print(las) 26 | print('Points from data:', len(las.points)) 27 | ground_pts = las.classification == 2 28 | bins, counts = np.unique(las.return_number[ground_pts], return_counts=True) 29 | print('Ground Point Return Number distribution:') 30 | for r,c in zip(bins,counts): 31 | print(' {}:{}'.format(r,c)) 32 | 33 | 34 | Would output: 35 | 36 | .. testoutput:: 37 | 38 | Points from Header: 1065 39 | , 1065 points, 0 vlrs)> 40 | Points from data: 1065 41 | Ground Point Return Number distribution: 42 | 1:239 43 | 2:25 44 | 3:11 45 | 4:1 46 | 47 | 48 | User Guide 49 | ========== 50 | 51 | .. toctree:: 52 | :maxdepth: 2 53 | 54 | installation 55 | intro 56 | basic 57 | examples 58 | lessbasic 59 | migration 60 | 61 | API Documentation 62 | ================= 63 | 64 | .. toctree:: 65 | :maxdepth: 2 66 | 67 | api/index 68 | 69 | Indices and tables 70 | ================== 71 | 72 | * :ref:`genindex` 73 | * :ref:`modindex` 74 | * :ref:`search` 75 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Installing from PyPi 6 | ==================== 7 | 8 | .. code:: shell 9 | 10 | pip install pylas 11 | 12 | 13 | Optional dependencies for LAZ support 14 | ===================================== 15 | 16 | pylas does not support LAZ (.laz) file by itself but can use one of several optional dependencies 17 | to support compressed LAZ files. 18 | 19 | The 2 supported options are: 20 | 21 | 1) `lazrs`_ `[lazrs PyPi]`_ 22 | 23 | 2) `laszip-python`_ (bindings to `laszip`_) 24 | 25 | When encountering LAZ data, pylas will try to use one of the backend in the order described above. 26 | (Example: if lazrs is not installed or if it fails during, the process, pylas will try laszip) 27 | 28 | `lazrs`_ is a Rust port of the laszip compression and decompression. 29 | Its main advantage is that it is able to compress/decompress using multiple threads which can 30 | greatly speed up things. 31 | 32 | `laszip`_ is the official and original LAZ implementation by Martin Isenburg. 33 | The advantage of the `laszip` backend is that its the official implementation, 34 | but does not offer multi-threaded compression/decompression. 35 | 36 | 37 | Both the laszip bindings and lazrs are available on pip. 38 | 39 | To install pylas with one of its supported backend use one of the following commands 40 | 41 | .. code-block:: shell 42 | 43 | # To install with lazrs only 44 | pip install pylas[lazrs] 45 | 46 | # To install with laszip only 47 | pip install pylas[laszip] 48 | 49 | # To install with both 50 | pip install pylas[lazrs,laszip] 51 | 52 | 53 | .. _lazrs: https://github.com/tmontaigu/laz-rs 54 | .. _laszip-python: https://github.com/tmontaigu/laszip-python 55 | .. _laszip: https://github.com/LASzip/LASzip 56 | .. _[lazrs PyPi]: https://pypi.org/project/lazrs/ 57 | 58 | 59 | 60 | 61 | 62 | -------------------------------------------------------------------------------- /docs/intro.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | What is a LAS file ? 3 | ==================== 4 | 5 | LAS is a public file format meant to exchange 3D point data, mostly used to exchange lidar point clouds. 6 | LAZ is a **lossless** compression of the LAS format. 7 | 8 | The latest LAS specification is the `LAS 1.4`_. pylas supports LAS files from Version 1.2 to 1.4. 9 | 10 | .. _LAS 1.4: https://www.asprs.org/wp-content/uploads/2010/12/LAS_1_4_r13.pdf 11 | 12 | LAS files are organized in 3 main parts: 13 | 14 | 1) Header 15 | 2) VLRs 16 | 3) Point Records 17 | 18 | Header 19 | ------ 20 | 21 | The header contains information about the data such as its version, the point format (which tells the different 22 | dimensions stored for each points). 23 | 24 | See :ref:`accessing_header` 25 | 26 | VLRs 27 | ---- 28 | 29 | After the header, LAS files may contain VLRs (Variable Length Record). 30 | VLRs are meant to store additional information such as the SRS (Spatial Reference System), 31 | description on extra dimensions added to the points. 32 | 33 | VLRs are divided in two parts: 34 | 35 | 1) header 36 | 2) payload 37 | 38 | The payload is limited to 65,535 bytes (Because in the header, the length of the payload is stored on a uint16). 39 | 40 | See :ref:`manipulating_vlrs` 41 | 42 | 43 | 44 | Point Records 45 | ------------- 46 | The last chunk of data (and the biggest one) contains the point records. In a LAS file, points are stored sequentially. 47 | 48 | The point records holds the point cloud data the LAS Spec specifies 10 point formats. 49 | A point format describe the dimensions stored for each point in the record. 50 | 51 | Each LAS specification added new point formats, the table below describe the compatibility between point formats 52 | and LAS file version. 53 | 54 | +-----------------+-----------------------------------+ 55 | |LAS file version + Compatible point formats | 56 | +=================+===================================+ 57 | |1.2 | 0, 1, 2, 3 | 58 | +-----------------+-----------------------------------+ 59 | |1.3 | 0, 1, 2, 3, 4, 5 | 60 | +-----------------+-----------------------------------+ 61 | |1.4 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 | 62 | +-----------------+-----------------------------------+ 63 | 64 | The names written in the tables below are the one you will have to use in 65 | your code. 66 | 67 | .. note:: 68 | 69 | The dimensions 'X', 'Y', 'Z' are signed integers without the scale and 70 | offset applied. To access the coordinates as doubles simply use 'x', 'y' , 'z' 71 | 72 | 73 | Point Format 0 74 | ++++++++++++++ 75 | 76 | +----------------------+-----------+--------------+ 77 | | Dimensions | Type | Size (bit) | 78 | +======================+===========+==============+ 79 | | X | signed | 32 | 80 | +----------------------+-----------+--------------+ 81 | | Y | signed | 32 | 82 | +----------------------+-----------+--------------+ 83 | | Z | signed | 32 | 84 | +----------------------+-----------+--------------+ 85 | | intensity | unsigned | 16 | 86 | +----------------------+-----------+--------------+ 87 | | return_number | unsigned | 3 | 88 | +----------------------+-----------+--------------+ 89 | | number_of_returns | unsigned | 3 | 90 | +----------------------+-----------+--------------+ 91 | | scan_direction_flag | bool | 1 | 92 | +----------------------+-----------+--------------+ 93 | | edge_of_flight_line | bool | 1 | 94 | +----------------------+-----------+--------------+ 95 | | classification | unsigned | 5 | 96 | +----------------------+-----------+--------------+ 97 | | synthetic | bool | 1 | 98 | +----------------------+-----------+--------------+ 99 | | key_point | bool | 1 | 100 | +----------------------+-----------+--------------+ 101 | | withheld | bool | 1 | 102 | +----------------------+-----------+--------------+ 103 | | scan_angle_rank | signed | 8 | 104 | +----------------------+-----------+--------------+ 105 | | user_data | unsigned | 8 | 106 | +----------------------+-----------+--------------+ 107 | | point_source_id | unsigned | 8 | 108 | +----------------------+-----------+--------------+ 109 | 110 | 111 | The point formats 1, 2, 3, 4, 5 are based on the point format 0, meaning that they have 112 | the same dimensions plus some additional dimensions: 113 | 114 | Point Format 1 115 | ++++++++++++++ 116 | 117 | +----------------------+-----------+--------------+ 118 | | Added dimensions | Type | Size (bit) | 119 | +======================+===========+==============+ 120 | | gps_time | Floating | 64 | 121 | +----------------------+-----------+--------------+ 122 | 123 | 124 | Point Format 2 125 | ++++++++++++++ 126 | 127 | +----------------------+-----------+--------------+ 128 | | Added dimensions | Type | Size (bit) | 129 | +======================+===========+==============+ 130 | | red | unsigned | 16 | 131 | +----------------------+-----------+--------------+ 132 | | green | unsigned | 16 | 133 | +----------------------+-----------+--------------+ 134 | | blue | unsigned | 16 | 135 | +----------------------+-----------+--------------+ 136 | 137 | Point Format 3 138 | ++++++++++++++ 139 | 140 | +----------------------+-----------+--------------+ 141 | | Added dimensions | Type | Size (bit) | 142 | +======================+===========+==============+ 143 | | gps_time | Floating | 64 | 144 | +----------------------+-----------+--------------+ 145 | | red | unsigned | 16 | 146 | +----------------------+-----------+--------------+ 147 | | green | unsigned | 16 | 148 | +----------------------+-----------+--------------+ 149 | | blue | unsigned | 16 | 150 | +----------------------+-----------+--------------+ 151 | 152 | 153 | Point Format 4 154 | ++++++++++++++ 155 | 156 | +----------------------------+-----------+--------------+ 157 | | Added dimensions | Type | Size (bit) | 158 | +============================+===========+==============+ 159 | | gps_time | Floating | 64 | 160 | +----------------------------+-----------+--------------+ 161 | | wavepacket_index | unsigned | 8 | 162 | +----------------------------+-----------+--------------+ 163 | | wavepacket_offset | unsigned | 64 | 164 | +----------------------------+-----------+--------------+ 165 | | wavepacket_size | unsigned | 32 | 166 | +----------------------------+-----------+--------------+ 167 | | return_point_wave_location | unsigned | 32 | 168 | +----------------------------+-----------+--------------+ 169 | | x_t | floating | 32 | 170 | +----------------------------+-----------+--------------+ 171 | | y_t | floating | 32 | 172 | +----------------------------+-----------+--------------+ 173 | | z_t | floating | 32 | 174 | +----------------------------+-----------+--------------+ 175 | 176 | Point Format 5 177 | ++++++++++++++ 178 | 179 | +----------------------------+-----------+--------------+ 180 | | Added dimensions | Type | Size (bit) | 181 | +============================+===========+==============+ 182 | | gps_time | Floating | 64 | 183 | +----------------------------+-----------+--------------+ 184 | | red | unsigned | 16 | 185 | +----------------------------+-----------+--------------+ 186 | | green | unsigned | 16 | 187 | +----------------------------+-----------+--------------+ 188 | | blue | unsigned | 16 | 189 | +----------------------------+-----------+--------------+ 190 | | wavepacket_index | unsigned | 8 | 191 | +----------------------------+-----------+--------------+ 192 | | wavepacket_offset | unsigned | 64 | 193 | +----------------------------+-----------+--------------+ 194 | | wavepacket_size | unsigned | 32 | 195 | +----------------------------+-----------+--------------+ 196 | | return_point_wave_location | unsigned | 32 | 197 | +----------------------------+-----------+--------------+ 198 | | x_t | floating | 32 | 199 | +----------------------------+-----------+--------------+ 200 | | y_t | floating | 32 | 201 | +----------------------------+-----------+--------------+ 202 | | z_t | floating | 32 | 203 | +----------------------------+-----------+--------------+ 204 | 205 | 206 | Point Format 6 207 | ++++++++++++++ 208 | 209 | The Point Format 6, is the new base point format (6, 7, 8, 9, 10) introduced in the LAS specification 1.4. 210 | The main modifications from point format 0 and point format 6 are that now the gps_time is baseline 211 | and some fields takes more bits, for example the classification is now stored on 8 bits (previously 5). 212 | 213 | 214 | +----------------------+-----------+--------------+ 215 | | Dimensions | Type | Size (bit) | 216 | +======================+===========+==============+ 217 | | X | signed | 32 | 218 | +----------------------+-----------+--------------+ 219 | | Y | signed | 32 | 220 | +----------------------+-----------+--------------+ 221 | | Z | signed | 32 | 222 | +----------------------+-----------+--------------+ 223 | | intensity | unsigned | 16 | 224 | +----------------------+-----------+--------------+ 225 | | return_number | unsigned | 4 | 226 | +----------------------+-----------+--------------+ 227 | | number_of_returns | unsigned | 4 | 228 | +----------------------+-----------+--------------+ 229 | | synthetic | bool | 1 | 230 | +----------------------+-----------+--------------+ 231 | | key_point | bool | 1 | 232 | +----------------------+-----------+--------------+ 233 | | withheld | bool | 1 | 234 | +----------------------+-----------+--------------+ 235 | | overlap | bool | 1 | 236 | +----------------------+-----------+--------------+ 237 | | scanner_channel | unsigned | 2 | 238 | +----------------------+-----------+--------------+ 239 | | scan_direction_flag | bool | 1 | 240 | +----------------------+-----------+--------------+ 241 | | edge_of_flight_line | bool | 1 | 242 | +----------------------+-----------+--------------+ 243 | | classification | unsigned | 8 | 244 | +----------------------+-----------+--------------+ 245 | | user_data | unsigned | 8 | 246 | +----------------------+-----------+--------------+ 247 | | scan_angle_rank | signed | 16 | 248 | +----------------------+-----------+--------------+ 249 | | point_source_id | unsigned | 8 | 250 | +----------------------+-----------+--------------+ 251 | | gps_time | Floating | 64 | 252 | +----------------------+-----------+--------------+ 253 | 254 | Point Format 7 255 | ++++++++++++++ 256 | 257 | Add RGB to point format 6. 258 | 259 | +----------------------------+-----------+--------------+ 260 | | Added dimensions | Type | Size (bit) | 261 | +============================+===========+==============+ 262 | | red | unsigned | 16 | 263 | +----------------------------+-----------+--------------+ 264 | | green | unsigned | 16 | 265 | +----------------------------+-----------+--------------+ 266 | | blue | unsigned | 16 | 267 | +----------------------------+-----------+--------------+ 268 | 269 | 270 | Point Format 8 271 | ++++++++++++++ 272 | 273 | Adds RGB and Nir (Near Infrared) to point format 6. 274 | 275 | +----------------------------+-----------+--------------+ 276 | | Added dimensions | Type | Size (bit) | 277 | +============================+===========+==============+ 278 | | red | unsigned | 16 | 279 | +----------------------------+-----------+--------------+ 280 | | green | unsigned | 16 | 281 | +----------------------------+-----------+--------------+ 282 | | blue | unsigned | 16 | 283 | +----------------------------+-----------+--------------+ 284 | | nir | unsigned | 16 | 285 | +----------------------------+-----------+--------------+ 286 | 287 | 288 | Point Format 9 289 | ++++++++++++++ 290 | 291 | Add waveform data to points 292 | 293 | +----------------------------+-----------+--------------+ 294 | | Added dimensions | Type | Size (bit) | 295 | +============================+===========+==============+ 296 | | wavepacket_index | unsigned | 8 | 297 | +----------------------------+-----------+--------------+ 298 | | wavepacket_offset | unsigned | 64 | 299 | +----------------------------+-----------+--------------+ 300 | | wavepacket_size | unsigned | 32 | 301 | +----------------------------+-----------+--------------+ 302 | | return_point_wave_location | unsigned | 32 | 303 | +----------------------------+-----------+--------------+ 304 | | x_t | floating | 32 | 305 | +----------------------------+-----------+--------------+ 306 | | y_t | floating | 32 | 307 | +----------------------------+-----------+--------------+ 308 | | z_t | floating | 32 | 309 | +----------------------------+-----------+--------------+ 310 | 311 | 312 | Point Format 10 313 | +++++++++++++++ 314 | 315 | Adds RGB, Nir (near infrared), waveform data to point format 6 316 | 317 | +----------------------------+-----------+--------------+ 318 | | Added dimensions | Type | Size (bit) | 319 | +============================+===========+==============+ 320 | | red | unsigned | 16 | 321 | +----------------------------+-----------+--------------+ 322 | | green | unsigned | 16 | 323 | +----------------------------+-----------+--------------+ 324 | | blue | unsigned | 16 | 325 | +----------------------------+-----------+--------------+ 326 | | nir | unsigned | 16 | 327 | +----------------------------+-----------+--------------+ 328 | | wavepacket_index | unsigned | 8 | 329 | +----------------------------+-----------+--------------+ 330 | | wavepacket_offset | unsigned | 64 | 331 | +----------------------------+-----------+--------------+ 332 | | wavepacket_size | unsigned | 32 | 333 | +----------------------------+-----------+--------------+ 334 | | return_point_wave_location | unsigned | 32 | 335 | +----------------------------+-----------+--------------+ 336 | | x_t | floating | 32 | 337 | +----------------------------+-----------+--------------+ 338 | | y_t | floating | 32 | 339 | +----------------------------+-----------+--------------+ 340 | | z_t | floating | 32 | 341 | +----------------------------+-----------+--------------+ 342 | 343 | 344 | EVLRs 345 | ----- 346 | 347 | Version 1.4 of the LAS specification added a last block following the point records: EVLRs (Extended Variable 348 | Length Record) which are the same thing as VLRs but they can carry a higher payload (length of the payload is stored 349 | on a uint64) 350 | -------------------------------------------------------------------------------- /docs/lessbasic.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Less Basic Things 3 | ================= 4 | 5 | 6 | Extra Dimensions 7 | ================ 8 | 9 | The LAS Specification version 1.4 defines a standard way to add extra dimensions to 10 | a LAS file. 11 | 12 | In pylas you can add extra dimensions using the :meth:`.LasData.add_extra_dim` function 13 | 14 | 15 | The Allowed base types for an extra dimensions are: 16 | 17 | +-------------------------+-------------+-------------+ 18 | | pylas name | size (bits) | type | 19 | +=========================+=============+=============+ 20 | | u1 or uint8 | 8 | unsigned | 21 | +-------------------------+-------------+-------------+ 22 | | i1 or int8 | 8 | signed | 23 | +-------------------------+-------------+-------------+ 24 | | u2 or uint16 | 16 | unsigned | 25 | +-------------------------+-------------+-------------+ 26 | | i2 or int16 | 16 | signed | 27 | +-------------------------+-------------+-------------+ 28 | | u4 or uint32 | 32 | unsigned | 29 | +-------------------------+-------------+-------------+ 30 | | i4 or int32 | 32 | signed | 31 | +-------------------------+-------------+-------------+ 32 | | u8 or uint64 | 64 | unsigned | 33 | +-------------------------+-------------+-------------+ 34 | | i8 or int64 | 64 | signed | 35 | +-------------------------+-------------+-------------+ 36 | | f4 or float32 | 32 | floating | 37 | +-------------------------+-------------+-------------+ 38 | | f8 or float64 | 64 | floating | 39 | +-------------------------+-------------+-------------+ 40 | 41 | You can prepend the number '2' or '3' to one of the above base type to define an extra dimension 42 | that is array of 2 or 3 elements per points. 43 | Example: 3u2 -> each points will have an extra dimension that is an array of 3 * 16 bits 44 | 45 | 46 | Here we are adding a new dimension called "codification" where each value is stored on a 64 bit unsigned integer 47 | and an array field of 3 doubles for each points. 48 | 49 | 50 | .. code-block:: python 51 | 52 | import pylas 53 | las = pylas.read("somefile.las") 54 | 55 | las.add_extra_dim(pylas.ExtraBytesParams( 56 | name="codification", 57 | type="uint64", 58 | description="More classes available" 59 | )) 60 | 61 | las.add_extra_dim(pylas.ExtraBytesParams(name="mysterious", type="3f8")) 62 | 63 | 64 | 65 | .. note:: 66 | 67 | Although the specification of the ExtraBytesVlr appeared in the 1.4 LAS Spec, pylas allows to 68 | add new dimensions to file with version < 1.4 69 | 70 | .. note:: 71 | 72 | If you are adding multiple extra dimensions use :meth:`.LasData.add_extra_dims` 73 | as it is more efficient (it allows to allocate all the dimensions at once instead 74 | of re-allocating each time a new dimension is added. 75 | 76 | 77 | Custom VLRs 78 | =========== 79 | 80 | Provided you have a valid user_id and record_id (meaning that they are not taken by a VLR described in the LAS specification) 81 | You can add you own VLRs to a file 82 | 83 | Fast & Easy way 84 | --------------- 85 | 86 | The fastest and easiest way to add your custom VLR to a file is to create a :class:`.VLR`, 87 | set its record_data (which must be bytes) and add it to the VLR list. 88 | 89 | 90 | >>> import pylas 91 | >>> vlr = pylas.vlrs.VLR(user_id='A UserId', record_id=1, description='Example VLR') 92 | >>> vlr 93 | 94 | >>> vlr.record_data = b'somebytes' 95 | >>> vlr 96 | 97 | >>> las = pylas.create() 98 | >>> las.vlrs 99 | [] 100 | >>> las.vlrs.append(vlr) 101 | >>> las.vlrs 102 | [] 103 | 104 | 105 | Complete & Harder way 106 | --------------------- 107 | 108 | While the way shown above is quick & easy it might not be perfect for complex VLRs. 109 | Also when reading a file that has custom VLR, pylas won't be able to automatically parse its data 110 | into a better structure, so you will have to find the VLR in the vlrs list and parse it yourself 111 | one pylas is done. 112 | 113 | One way to automate this task is to create your own Custom VLR Class that extends 114 | :class:`.BaseKnownVLR` by implementing the missing methods pylas 115 | will be able to automatically parse the VLR when reading the file & write it when saving the file. 116 | 117 | >>> class CustomVLR(pylas.vlrs.BaseKnownVLR): 118 | ... def __init__(self): 119 | ... super().__init__() 120 | ... self.numbers = [] 121 | ... 122 | ... @staticmethod 123 | ... def official_user_id(): 124 | ... return "CustomId" 125 | ... 126 | ... @staticmethod 127 | ... def official_record_ids(): 128 | ... return 1, 129 | ... 130 | ... def record_data_bytes(self): 131 | ... return bytes(self.numbers) 132 | ... 133 | ... def parse_record_data(self, record_data): 134 | ... self.numbers = [b for b in record_data] 135 | ... 136 | ... def __repr__(self): 137 | ... return "" 138 | 139 | >>> import numpy as np 140 | >>> cvlr = CustomVLR() 141 | >>> cvlr.numbers 142 | [] 143 | >>> cvlr.numbers = [1,2, 3] 144 | >>> las = pylas.create() 145 | >>> las.vlrs.append(cvlr) 146 | >>> las.vlrs 147 | [] 148 | >>> las.x = np.array([1.0, 2.0]) 149 | >>> las = pylas.lib.write_then_read_again(las) 150 | >>> las.vlrs 151 | [] 152 | >>> las.vlrs[0].numbers 153 | [1, 2, 3] 154 | 155 | -------------------------------------------------------------------------------- /docs/migration.rst: -------------------------------------------------------------------------------- 1 | Migration guides 2 | ================ 3 | 4 | From 0.4.x to 1.0.0 5 | ------------------- 6 | 7 | Changes in LAZ backend 8 | ______________________ 9 | 10 | With pylas 1.0.0, the lazperf backend 11 | support was dropped, and the laszip backend 12 | changed from using the laszip executable 13 | to using laszip python bindings. 14 | 15 | If you used lazperf or relied on the laszip executable 16 | you'll have to choose between the available backends. 17 | (see Installation section). 18 | 19 | 20 | Changes in bit fields 21 | _____________________ 22 | 23 | Some fields in LAS are 'bit fields'. 24 | 25 | with pylas 0.4.x, there was a inconsistency between 26 | 'normal' fields and 'bit' fields, when getting a bit field, 27 | pylas returned a copy of the values in a new numpy array whereas 28 | when getting a normal field, the array you got acted as a 'view' 29 | on the real array where the values where stored. 30 | 31 | That meant that modifying the values of the array you got from 32 | a bit field would no propagate to the real array. 33 | 34 | .. code-block:: python 35 | 36 | import pylas 37 | import numpy as np 38 | 39 | las = pylas.read("pylastests/simple.las") 40 | 41 | # return number is a bit field 42 | print(las.return_number) 43 | # array([1, 1, 1, ..., 1, 1, 1], dtype=uint8) 44 | 45 | ascending_order = np.argsort(las.return_number)[::-1] 46 | print(las.return_number[ascending_order]) 47 | # array([4, 4, 4, ..., 1, 1, 1], dtype=uint8) 48 | las.return_number[:] = las.return_number[ascending_order] 49 | print(las.return_number) 50 | # array([1, 1, 1, ..., 1, 1, 1], dtype=uint8) # bif oof 51 | las.return_number[0] = 7 52 | print(las.return_number) 53 | # array([1, 1, 1, ..., 1, 1, 1], dtype=uint8) # again value not updated 54 | 55 | 56 | # To actually update you have to do 57 | las.return_number = las.return_number[ascending_order] 58 | print(las.return_number) 59 | # array([4, 4, 4, ..., 1, 1, 1], dtype=uint8) 60 | 61 | rn = las.return_number[ascending_order] 62 | rn[0] = 7 63 | las.return_number = rn 64 | print(las.return_number) 65 | # array([7, 4, 4, ..., 1, 1, 1], dtype=uint8) 66 | 67 | 68 | In order to try to solve this inconsistency, pylas >= 0.5.0 69 | introduced the :class:`.SubFieldView` class that is meant to propagate 70 | modifications to the real array, and tries to act like a real numpy array. 71 | 72 | .. code-block:: python 73 | 74 | import pylas 75 | import numpy as np 76 | 77 | las = pylas.read("pylastests/simple.las") 78 | 79 | print(las.return_number) 80 | # 81 | 82 | ascending_order = np.argsort(las.return_number)[::-1] 83 | las.return_number[:] = las.return_number[ascending_order] 84 | print(las.return_number) 85 | # 86 | las.return_number[0] = 7 87 | print(las.return_number) 88 | # 89 | 90 | It may be possible that some operation on SubFieldView fail, in that case 91 | it is easy to copy them to numpy arrays: 92 | 93 | .. code-block:: python 94 | 95 | 96 | import pylas 97 | import numpy as np 98 | 99 | las = pylas.read("pylastests/simple.las") 100 | print(las.return_number) 101 | # 102 | 103 | array = np.array(las.return_number) 104 | # array([1, 1, 1, ..., 1, 1, 1], dtype=uint8) 105 | 106 | 107 | The logic is also the same for 'Scaled dimensions' such as x, y, z and scaled extra bytes, 108 | where a ScaledArrayView class has been introduced 109 | 110 | .. code-block:: python 111 | 112 | import pylas 113 | import numpy as np 114 | 115 | las = pylas.read("pylastests/simple.las") 116 | print(las.x) 117 | # > 118 | 119 | # ScaledArray view should behave as much as possible as a numpy array 120 | # However if something breaks in your code when upgrading, and / or 121 | # you need a true numpy array you can get one by doing 122 | 123 | array = np.array(las.x) 124 | # array([637012.24, 636896.33, 636784.74, ..., 637501.67, 637433.27, 125 | # 637342.85]) 126 | 127 | 128 | 129 | Changes in extra bytes creation 130 | _______________________________ 131 | 132 | The API to create extra bytes changed slightly, now the parameters needed 133 | (and the optional ones) are coupled into :class:`.ExtraBytesParams` 134 | 135 | 136 | Other changes 137 | _____________ 138 | 139 | The `points` attribute of as :class:`.LasData` used to return a numpy array 140 | it now returns a :class:`.PackedPointRecord` to get the same array as before, 141 | use the `array` property of the point record. 142 | 143 | .. code-block:: python 144 | 145 | # pylas <= 0.4.3 146 | las = pylas.read("somefile.las") 147 | array = las.points 148 | 149 | # pylas 1.0.0 150 | las = pylas.read("somefile.las") 151 | array = las.points.array 152 | -------------------------------------------------------------------------------- /examples/field-randomizer.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | from pathlib import Path 3 | 4 | import numpy as np 5 | 6 | import pylas 7 | 8 | 9 | def main(args): 10 | files = [args.in_path] if args.in_path.is_file() else list(args.in_path.glob("*.la[s-z]")) 11 | 12 | if args.out_path.suffix and len(files) > 1: 13 | raise SystemExit("in_path is a directory and out path is a file") 14 | 15 | for i, path in enumerate(files, start=1): 16 | print(f"{i} / {len(files)} -> {path}") 17 | 18 | las = pylas.read(path) 19 | 20 | dimensions = (dim for dim in las.point_format.dimensions if dim.name not in args.exclude) 21 | for dimension in dimensions: 22 | print(f"\t{dimension.name}", end='') 23 | if dimension.kind == pylas.DimensionKind.FloatingPoint: 24 | print("...skipped because it is floating point") 25 | continue 26 | 27 | if np.any(las[dimension.name] != 0) and args.keep_existing: 28 | print("...skipped because it is already populated") 29 | continue 30 | 31 | type_str = dimension.type_str() if dimension.kind != pylas.DimensionKind.BitField else 'u1' 32 | las[dimension.name] = np.random.randint(dimension.min, dimension.max + 1, len(las.points), type_str) 33 | print() 34 | 35 | if args.out_path.suffix: 36 | las.write(args.out_path) 37 | else: 38 | las.write(args.out_path / path.name) 39 | 40 | 41 | if __name__ == '__main__': 42 | parser = ArgumentParser('Randomize fields of your las file') 43 | parser.add_argument('in_path', type=Path) 44 | parser.add_argument('out_path', type=Path) 45 | parser.add_argument('--keep-existing', action='store_true') 46 | parser.add_argument('--exclude', nargs='*', default=["X", "Y", "Z"]) 47 | 48 | args = parser.parse_args() 49 | 50 | main(args) 51 | -------------------------------------------------------------------------------- /examples/recursive-split.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import sys 3 | from pathlib import Path 4 | from typing import List 5 | from typing import Optional 6 | 7 | import numpy as np 8 | 9 | import pylas 10 | 11 | 12 | def recursive_split(x_min, y_min, x_max, y_max, max_x_size, max_y_size): 13 | x_size = x_max - x_min 14 | y_size = y_max - y_min 15 | 16 | if x_size > max_x_size: 17 | left = recursive_split(x_min, y_min, x_min + (x_size // 2), y_max, max_x_size, max_y_size) 18 | right = recursive_split(x_min + (x_size // 2), y_min, x_max, y_max, max_x_size, max_y_size) 19 | return left + right 20 | elif y_size > max_y_size: 21 | up = recursive_split(x_min, y_min, x_max, y_min + (y_size // 2), max_x_size, max_y_size) 22 | down = recursive_split(x_min, y_min + (y_size // 2), x_max, y_max, max_x_size, max_y_size) 23 | return up + down 24 | else: 25 | return [(x_min, y_min, x_max, y_max)] 26 | 27 | 28 | def tuple_size(string): 29 | try: 30 | return tuple(map(float, string.split("x"))) 31 | except: 32 | raise ValueError("Size must be in the form of numberxnumber eg: 50.0x65.14") 33 | 34 | 35 | def main(): 36 | parser = argparse.ArgumentParser("LAS recursive splitter", description="Splits a las file bounds recursively") 37 | parser.add_argument("input_file") 38 | parser.add_argument("output_dir") 39 | parser.add_argument("size", type=tuple_size, help="eg: 50x64.17") 40 | parser.add_argument("--points-per-iter", default=10**6, type=int) 41 | 42 | args = parser.parse_args() 43 | 44 | with pylas.open(sys.argv[1]) as file: 45 | sub_bounds = recursive_split( 46 | file.header.x_min, 47 | file.header.y_min, 48 | file.header.x_max, 49 | file.header.y_max, 50 | args.size[0], 51 | args.size[1] 52 | ) 53 | 54 | writers: List[Optional[pylas.LasWriter]] = [None] * len(sub_bounds) 55 | try: 56 | count = 0 57 | for points in file.chunk_iterator(args.points_per_iter): 58 | print(f"{count / file.header.point_count * 100}%") 59 | 60 | # For performance we need to use copy 61 | # so that the underlying arrays are contiguous 62 | x, y = points.x.copy(), points.y.copy() 63 | 64 | point_piped = 0 65 | 66 | for i, (x_min, y_min, x_max, y_max) in enumerate(sub_bounds): 67 | mask = (x >= x_min) & (x <= x_max) & (y >= y_min) & (y <= y_max) 68 | 69 | if np.any(mask): 70 | if writers[i] is None: 71 | output_path = Path(sys.argv[2]) / f"output_{i}.laz" 72 | writers[i] = pylas.open(output_path, 73 | mode='w', 74 | header=file.header) 75 | sub_points = points[mask] 76 | writers[i].write_points(sub_points) 77 | 78 | point_piped += np.sum(mask) 79 | if point_piped == len(points): 80 | break 81 | count += len(points) 82 | print(f"{count / file.header.point_count * 100}%") 83 | finally: 84 | for writer in writers: 85 | if writer is not None: 86 | writer.close() 87 | 88 | 89 | if __name__ == '__main__': 90 | main() 91 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | import nox 2 | 3 | 4 | @nox.session(python=["3.6", "3.7", "3.8", "3.9"]) 5 | @nox.parametrize("laz_backend", [None, "lazrs", "laszip"]) 6 | def tests(session, laz_backend): 7 | session.install("pytest") 8 | if laz_backend is None: 9 | session.install(".") 10 | else: 11 | session.install(f".[{laz_backend}]") 12 | session.run("pytest", "-q", "pylastests") 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /pylas/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.5.0a1" 2 | 3 | import logging 4 | 5 | from . import errors, vlrs 6 | from .errors import PylasError 7 | from .laswriter import LasWriter 8 | from .lasreader import LasReader 9 | from .lib import LazBackend, convert 10 | from .lib import create_las as create 11 | from .lib import mmap_las as mmap 12 | from .lib import open_las as open 13 | from .lib import read_las as read 14 | from .point import PointFormat, ExtraBytesParams, DimensionKind, DimensionInfo 15 | from .point.dims import supported_point_formats, supported_versions 16 | from .point.format import lost_dimensions 17 | from .header import LasHeader 18 | from .lasdata import LasData 19 | from .vlrs import VLR 20 | 21 | logging.getLogger(__name__).addHandler(logging.NullHandler()) 22 | -------------------------------------------------------------------------------- /pylas/compression.py: -------------------------------------------------------------------------------- 1 | """ The functions related to the LAZ format (compressed LAS) 2 | """ 3 | import enum 4 | from typing import Tuple 5 | 6 | 7 | class LazBackend(enum.Enum): 8 | """Supported backends for reading and writing LAS/LAZ""" 9 | 10 | # type_hint = Union[LazBackend, Iterable[LazBackend]] 11 | 12 | LazrsParallel = 0 13 | """lazrs in multi-thread mode""" 14 | Lazrs = 1 15 | """lazrs in single-thread mode""" 16 | Laszip = 2 17 | """laszip backend""" 18 | 19 | def is_available(self) -> bool: 20 | """Returns true if the backend is available""" 21 | if self == LazBackend.Lazrs or self == LazBackend.LazrsParallel: 22 | try: 23 | import lazrs 24 | except ModuleNotFoundError: 25 | return False 26 | else: 27 | return True 28 | elif self == LazBackend.Laszip: 29 | try: 30 | import laszip 31 | except ModuleNotFoundError: 32 | return False 33 | else: 34 | return True 35 | else: 36 | return False 37 | 38 | @staticmethod 39 | def detect_available() -> Tuple["LazBackend", ...]: 40 | """Returns a tuple containing the available backends in the current 41 | python environment 42 | """ 43 | available_backends = [] 44 | 45 | if LazBackend.LazrsParallel.is_available(): 46 | available_backends.append(LazBackend.LazrsParallel) 47 | available_backends.append(LazBackend.Lazrs) 48 | 49 | if LazBackend.Laszip.is_available(): 50 | available_backends.append(LazBackend.Laszip) 51 | 52 | return tuple(available_backends) 53 | 54 | 55 | def is_point_format_compressed(point_format_id: int) -> bool: 56 | compression_bit_7 = (point_format_id & 0x80) >> 7 57 | compression_bit_6 = (point_format_id & 0x40) >> 6 58 | if not compression_bit_6 and compression_bit_7: 59 | return True 60 | return False 61 | 62 | 63 | def compressed_id_to_uncompressed(point_format_id: int) -> int: 64 | return point_format_id & 0x3F 65 | 66 | 67 | def uncompressed_id_to_compressed(point_format_id: int) -> int: 68 | return (2 ** 7) | point_format_id 69 | -------------------------------------------------------------------------------- /pylas/errors.py: -------------------------------------------------------------------------------- 1 | """ All the custom exceptions types 2 | """ 3 | 4 | 5 | class PylasError(Exception): 6 | pass 7 | 8 | 9 | class UnknownExtraType(PylasError): 10 | pass 11 | 12 | 13 | class PointFormatNotSupported(PylasError): 14 | pass 15 | 16 | 17 | class FileVersionNotSupported(PylasError): 18 | pass 19 | 20 | 21 | class LazError(PylasError): 22 | pass 23 | 24 | 25 | class IncompatibleDataFormat(PylasError): 26 | pass 27 | -------------------------------------------------------------------------------- /pylas/extradims.py: -------------------------------------------------------------------------------- 1 | from . import errors 2 | from .point.dims import DimensionKind 3 | 4 | _extra_dims_base = ( 5 | "", 6 | "u1", 7 | "i1", 8 | "u2", 9 | "i2", 10 | "u4", 11 | "i4", 12 | "u8", 13 | "i8", 14 | "f4", 15 | "f8", 16 | ) 17 | 18 | _extra_dims_array_2 = tuple("2{}".format(_type) for _type in _extra_dims_base[1:]) 19 | _extra_dims_array_3 = tuple("3{}".format(_type) for _type in _extra_dims_base[1:]) 20 | 21 | _extra_dims = _extra_dims_base + _extra_dims_array_2 + _extra_dims_array_3 22 | 23 | _type_to_extra_dim_id = {type_str: i for i, type_str in enumerate(_extra_dims)} 24 | 25 | 26 | def get_kind_of_extra_dim(type_index: int) -> DimensionKind: 27 | """Returns the signedness foe the given type index 28 | 29 | Parameters 30 | ---------- 31 | type_index: int 32 | index of the type as defined in the LAS Specification 33 | 34 | Returns 35 | ------- 36 | DimensionSignedness, 37 | the enum variant 38 | """ 39 | try: 40 | t = _extra_dims[type_index] 41 | if t[0] == "i": 42 | return DimensionKind.UnsignedInteger 43 | elif t[0] == "u": 44 | return DimensionKind.SignedInteger 45 | else: 46 | return DimensionKind.FloatingPoint 47 | except IndexError: 48 | raise errors.UnknownExtraType(type_index) 49 | 50 | 51 | def get_type_for_extra_dim(type_index: int) -> str: 52 | """Returns the type str ('u1" or "u2", etc) for the given type index 53 | Parameters 54 | ---------- 55 | type_index: int 56 | index of the type as defined in the LAS Specification 57 | 58 | Returns 59 | ------- 60 | str, 61 | a string representing the type, can be understood by numpy 62 | 63 | """ 64 | try: 65 | return _extra_dims[type_index] 66 | except IndexError: 67 | raise errors.UnknownExtraType(type_index) from None 68 | 69 | 70 | def get_id_for_extra_dim_type(type_str: str) -> int: 71 | """Returns the index of the type as defined in the LAS Specification 72 | 73 | Parameters 74 | ---------- 75 | type_str: str 76 | 77 | Returns 78 | ------- 79 | int 80 | index of the type 81 | 82 | """ 83 | try: 84 | return _type_to_extra_dim_id[type_str] 85 | except KeyError: 86 | raise errors.UnknownExtraType(type_str) from None 87 | -------------------------------------------------------------------------------- /pylas/lasappender.py: -------------------------------------------------------------------------------- 1 | import io 2 | import math 3 | from typing import Union, Iterable, BinaryIO, Optional 4 | 5 | import numpy as np 6 | 7 | from .compression import LazBackend 8 | from .errors import PylasError 9 | from .header import LasHeader 10 | from .laswriter import UncompressedPointWriter 11 | from .point.record import PackedPointRecord 12 | from .vlrs.vlrlist import VLRList 13 | 14 | try: 15 | import lazrs 16 | except ModuleNotFoundError: 17 | pass 18 | 19 | 20 | class LasAppender: 21 | """Allows to append points to and existing LAS/LAZ file. 22 | 23 | Appending to LAZ is only supported by the lazrs backend 24 | """ 25 | 26 | def __init__( 27 | self, 28 | dest: BinaryIO, 29 | laz_backend: Optional[Union[LazBackend, Iterable[LazBackend]]] = None, 30 | closefd: bool = True, 31 | ) -> None: 32 | if not dest.seekable(): 33 | raise TypeError("Expected the 'dest' to be a seekable file object") 34 | header = LasHeader.read_from(dest) 35 | if laz_backend is None: 36 | laz_backend = LazBackend.detect_available() 37 | 38 | self.dest = dest 39 | self.header = header 40 | 41 | if not header.are_points_compressed: 42 | self.points_writer = UncompressedPointWriter(self.dest) 43 | self.dest.seek( 44 | (self.header.point_count * self.header.point_format.size) 45 | + self.header.offset_to_point_data, 46 | io.SEEK_SET, 47 | ) 48 | else: 49 | self.points_writer = self._create_laz_backend(laz_backend) 50 | 51 | if header.version.minor >= 4 and header.number_of_evlrs > 0: 52 | assert ( 53 | self.dest.tell() <= self.header.start_of_first_evlr 54 | ), "The position is past the start of evlrs" 55 | pos = self.dest.tell() 56 | self.dest.seek(self.header.start_of_first_evlr, io.SEEK_SET) 57 | self.evlrs: Optional[VLRList] = VLRList.read_from( 58 | self.dest, self.header.number_of_evlrs, extended=True 59 | ) 60 | dest.seek(self.header.start_of_first_evlr, io.SEEK_SET) 61 | self.dest.seek(pos, io.SEEK_SET) 62 | else: 63 | self.evlrs: Optional[VLRList] = None 64 | 65 | self.closefd = closefd 66 | 67 | def append_points(self, points: PackedPointRecord) -> None: 68 | """Append the points to the file, the points 69 | must have the same point format as the points 70 | already contained within the file. 71 | 72 | :param points: The points to append 73 | """ 74 | if points.point_format != self.header.point_format: 75 | raise PylasError("Point formats do not match") 76 | 77 | self.points_writer.write_points(points) 78 | self.header.update(points) 79 | 80 | def close(self) -> None: 81 | self.points_writer.done() 82 | self._write_evlrs() 83 | self._write_updated_header() 84 | 85 | if self.closefd: 86 | self.dest.close() 87 | 88 | def _write_evlrs(self) -> None: 89 | if ( 90 | self.header.version.minor >= 4 91 | and self.evlrs is not None 92 | and len(self.evlrs) > 0 93 | ): 94 | self.header.number_of_evlr = len(self.evlrs) 95 | self.header.start_of_first_evlr = self.dest.tell() 96 | self.evlrs.write_to(self.dest, as_extended=True) 97 | 98 | def _write_updated_header(self) -> None: 99 | pos = self.dest.tell() 100 | self.dest.seek(0, io.SEEK_SET) 101 | # we don't want to rewrite the vlrs 102 | # as written vlrs may not have to exact 103 | # same number of bytes (e.g. if we remove 104 | # extra spurious null bytes) 105 | self.header.write_to(self.dest, write_vlrs=False) 106 | self.dest.seek(pos, io.SEEK_SET) 107 | 108 | def _create_laz_backend( 109 | self, 110 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ( 111 | LazBackend.LazrsParallel, 112 | LazBackend.Lazrs, 113 | ), 114 | ) -> "LazrsAppender": 115 | try: 116 | laz_backend = iter(laz_backend) 117 | except TypeError: 118 | laz_backend = (laz_backend,) 119 | 120 | last_error = None 121 | for backend in laz_backend: 122 | if backend == LazBackend.Laszip: 123 | raise PylasError("Laszip backend does not support appending") 124 | elif backend == LazBackend.LazrsParallel: 125 | try: 126 | return LazrsAppender(self.dest, self.header, parallel=True) 127 | except Exception as e: 128 | last_error = e 129 | elif backend == LazBackend.Lazrs: 130 | try: 131 | return LazrsAppender(self.dest, self.header, parallel=False) 132 | except Exception as e: 133 | last_error = e 134 | 135 | if last_error is not None: 136 | raise PylasError(f"Could not initialize a laz backend: {last_error}") 137 | else: 138 | raise PylasError(f"No valid laz backend selected") 139 | 140 | def __enter__(self) -> "LasAppender": 141 | return self 142 | 143 | def __exit__(self, exc_type, exc_val, exc_tb) -> None: 144 | self.close() 145 | 146 | 147 | class LazrsAppender: 148 | """Appending in LAZ file 149 | works by seeking to start of the last chunk 150 | of compressed points, decompress it while keeping the points in 151 | memory. 152 | 153 | Then seek back to the start of the last chunk, and recompress 154 | the points we just read, so that we have a compressor in the proper state 155 | ready to compress new points. 156 | """ 157 | 158 | def __init__(self, dest: BinaryIO, header: LasHeader, parallel: bool) -> None: 159 | self.dest = dest 160 | self.offset_to_point_data = header.offset_to_point_data 161 | laszip_vlr = header.vlrs.get("LasZipVlr")[0] 162 | 163 | self.dest.seek(header.offset_to_point_data, io.SEEK_SET) 164 | decompressor = lazrs.LasZipDecompressor(self.dest, laszip_vlr.record_data) 165 | vlr = decompressor.vlr() 166 | number_of_complete_chunk = int( 167 | math.floor(header.point_count / vlr.chunk_size()) 168 | ) 169 | 170 | self.dest.seek(header.offset_to_point_data, io.SEEK_SET) 171 | chunk_table = lazrs.read_chunk_table(self.dest) 172 | if chunk_table is None: 173 | # The file does not have a chunk table 174 | # we cannot seek to the last chunk, so instead, we will 175 | # decompress points (which is slower) and build the chunk table 176 | # to write it later 177 | 178 | self.chunk_table = [] 179 | start_of_chunk = self.dest.tell() 180 | point_buf = bytearray(vlr.chunk_size() * vlr.item_size()) 181 | 182 | for _ in range(number_of_complete_chunk): 183 | decompressor.decompress_many(point_buf) 184 | pos = self.dest.tell() 185 | self.chunk_table.append(pos - start_of_chunk) 186 | start_of_chunk = pos 187 | else: 188 | self.chunk_table = chunk_table[:-1] 189 | idx_first_point_of_last_chunk = number_of_complete_chunk * vlr.chunk_size() 190 | decompressor.seek(idx_first_point_of_last_chunk) 191 | 192 | points_of_last_chunk = bytearray( 193 | (header.point_count % vlr.chunk_size()) * vlr.item_size() 194 | ) 195 | decompressor.decompress_many(points_of_last_chunk) 196 | 197 | self.dest.seek(header.offset_to_point_data, io.SEEK_SET) 198 | if parallel: 199 | self.compressor = lazrs.ParLasZipCompressor( 200 | self.dest, vlr 201 | ) # This overwrites old offset 202 | else: 203 | self.compressor = lazrs.LasZipCompressor( 204 | self.dest, vlr 205 | ) # This overwrites the old offset 206 | self.dest.seek(sum(self.chunk_table), io.SEEK_CUR) 207 | self.compressor.compress_many(points_of_last_chunk) 208 | 209 | def write_points(self, points: PackedPointRecord) -> None: 210 | points_bytes = np.frombuffer(points.array, np.uint8) 211 | self.compressor.compress_many(points_bytes) 212 | 213 | def done(self) -> None: 214 | # The chunk table written is at the good position 215 | # but it is incomplete (it's missing the chunk_table of 216 | # chunks before the one we appended) 217 | self.compressor.done() 218 | 219 | # So we update it 220 | self.dest.seek(self.offset_to_point_data, io.SEEK_SET) 221 | offset_to_chunk_table = int.from_bytes(self.dest.read(8), "little", signed=True) 222 | self.dest.seek(-8, io.SEEK_CUR) 223 | chunk_table = self.chunk_table + lazrs.read_chunk_table(self.dest) 224 | self.dest.seek(offset_to_chunk_table, io.SEEK_SET) 225 | lazrs.write_chunk_table(self.dest, chunk_table) 226 | -------------------------------------------------------------------------------- /pylas/lasdata.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import pathlib 3 | from typing import Union, Optional, List, Sequence, overload, BinaryIO 4 | 5 | import numpy as np 6 | 7 | from . import errors 8 | from .compression import LazBackend 9 | from .header import LasHeader 10 | from .laswriter import LasWriter 11 | from .point import record, dims, ExtraBytesParams, PointFormat 12 | from .point.dims import ScaledArrayView 13 | from .vlrs.vlrlist import VLRList 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | class LasData: 19 | """Class synchronizing all the moving parts of LAS files. 20 | 21 | It connects the point record, header, vlrs together. 22 | 23 | To access points dimensions using this class you have two possibilities 24 | 25 | .. code:: python 26 | 27 | las = pylas.read('some_file.las') 28 | las.classification 29 | # or 30 | las['classification'] 31 | """ 32 | 33 | def __init__( 34 | self, header: LasHeader, points: Optional[record.PackedPointRecord] = None 35 | ) -> None: 36 | if points is None: 37 | points = record.PackedPointRecord.zeros( 38 | header.point_format, header.point_count 39 | ) 40 | elif points.point_format != header.point_format: 41 | raise errors.PylasError("Incompatible Point Formats") 42 | self.__dict__["_points"] = points 43 | self.points: record.PackedPointRecord 44 | self.header: LasHeader = header 45 | if header.version.minor >= 4: 46 | self.evlrs: Optional[VLRList] = VLRList() 47 | else: 48 | self.evlrs: Optional[VLRList] = None 49 | 50 | @property 51 | def x(self) -> ScaledArrayView: 52 | """Returns the scaled x positions of the points as doubles""" 53 | return ScaledArrayView(self.X, self.header.x_scale, self.header.x_offset) 54 | 55 | @x.setter 56 | def x(self, value) -> None: 57 | if len(value) > len(self.points): 58 | self.points.resize(len(value)) 59 | self.x[:] = value 60 | 61 | @property 62 | def y(self) -> ScaledArrayView: 63 | """Returns the scaled y positions of the points as doubles""" 64 | return ScaledArrayView(self.Y, self.header.y_scale, self.header.y_offset) 65 | 66 | @y.setter 67 | def y(self, value) -> None: 68 | if len(value) > len(self.points): 69 | self.points.resize(len(value)) 70 | self.y[:] = value 71 | 72 | @property 73 | def z(self) -> ScaledArrayView: 74 | """Returns the scaled z positions of the points as doubles""" 75 | return ScaledArrayView(self.Z, self.header.z_scale, self.header.z_offset) 76 | 77 | @z.setter 78 | def z(self, value) -> None: 79 | if len(value) > len(self.points): 80 | self.points.resize(len(value)) 81 | self.z[:] = value 82 | 83 | @property 84 | def point_format(self) -> PointFormat: 85 | """Shortcut to get the point format""" 86 | return self.points.point_format 87 | 88 | @property 89 | def points(self) -> record.PackedPointRecord: 90 | """Returns the point record""" 91 | return self._points 92 | 93 | @points.setter 94 | def points(self, new_points: record.PackedPointRecord) -> None: 95 | if new_points.point_format != self._points.point_format: 96 | raise errors.IncompatibleDataFormat( 97 | "Cannot set points with a different point format, convert first" 98 | ) 99 | self._points = new_points 100 | self.update_header() 101 | 102 | @property 103 | def vlrs(self) -> VLRList: 104 | return self.header.vlrs 105 | 106 | @vlrs.setter 107 | def vlrs(self, vlrs) -> None: 108 | self.header.vlrs = VLRList(vlrs) 109 | 110 | def add_extra_dim(self, params: ExtraBytesParams) -> None: 111 | """Adds a new extra dimension to the point record 112 | 113 | .. note:: 114 | 115 | If you plan on adding multiple extra dimensions, 116 | prefer :meth:`.add_extra_dims` as it will 117 | save re-allocations and data copy 118 | 119 | Parameters 120 | ---------- 121 | params : ExtraBytesParams 122 | parameters of the new extra dimension to add 123 | 124 | """ 125 | self.add_extra_dims([params]) 126 | 127 | def add_extra_dims(self, params: List[ExtraBytesParams]) -> None: 128 | """Add multiple extra dimensions at once 129 | 130 | Parameters 131 | ---------- 132 | 133 | params: list of parameters of the new extra dimensions to add 134 | """ 135 | self.header.add_extra_dims(params) 136 | new_point_record = record.PackedPointRecord.from_point_record( 137 | self.points, self.header.point_format 138 | ) 139 | self.points = new_point_record 140 | 141 | def update_header(self) -> None: 142 | """Update the information stored in the header 143 | to be in sync with the actual data. 144 | 145 | This method is called automatically when you save a file using 146 | :meth:`pylas.lasdatas.base.LasBase.write` 147 | """ 148 | self.header.point_format_id = self.points.point_format.id 149 | self.header.point_count = len(self.points) 150 | self.header.point_data_record_length = self.points.point_size 151 | 152 | if len(self.points) > 0: 153 | self.header.x_max = self.x.max() 154 | self.header.y_max = self.y.max() 155 | self.header.z_max = self.z.max() 156 | 157 | self.header.x_min = self.x.min() 158 | self.header.y_min = self.y.min() 159 | self.header.z_min = self.z.min() 160 | 161 | unique, counts = np.unique(self.return_number, return_counts=True) 162 | self.header.number_of_points_by_return = counts 163 | 164 | if self.header.version.minor >= 4: 165 | if self.evlrs is not None: 166 | self.header.number_of_evlrs = len(self.evlrs) 167 | self.header.start_of_waveform_data_packet_record = 0 168 | # TODO 169 | # if len(self.vlrs.get("WktCoordinateSystemVlr")) == 1: 170 | # self.header.global_encoding.wkt = 1 171 | else: 172 | self.header.number_of_evlrs = 0 173 | 174 | @overload 175 | def write( 176 | self, 177 | destination: str, 178 | laz_backend: Optional[Union[LazBackend, Sequence[LazBackend]]] = ..., 179 | ) -> None: 180 | ... 181 | 182 | @overload 183 | def write( 184 | self, 185 | destination: BinaryIO, 186 | do_compress: Optional[bool] = ..., 187 | laz_backend: Optional[Union[LazBackend, Sequence[LazBackend]]] = ..., 188 | ) -> None: 189 | ... 190 | 191 | def write(self, destination, do_compress=None, laz_backend=None): 192 | """Writes to a stream or file 193 | 194 | .. note:: 195 | 196 | When destination is a string, it will be interpreted as the path were the file should be written to, 197 | and whether the file will be compressed depends on the extension used (case insensitive): 198 | 199 | - .laz -> compressed 200 | - .las -> uncompressed 201 | 202 | And the do_compress option will be ignored 203 | 204 | 205 | Parameters 206 | ---------- 207 | destination: str or file object 208 | filename or stream to write to 209 | do_compress: bool, optional 210 | Flags to indicate if you want to compress the data 211 | laz_backend: optional, the laz backend to use 212 | By default, pylas detect available backends 213 | """ 214 | if isinstance(destination, (str, pathlib.Path)): 215 | do_compress = pathlib.Path(destination).suffix.lower() == ".laz" 216 | 217 | with open(destination, mode="wb+") as out: 218 | self._write_to(out, do_compress=do_compress, laz_backend=laz_backend) 219 | else: 220 | self._write_to( 221 | destination, do_compress=do_compress, laz_backend=laz_backend 222 | ) 223 | 224 | def _write_to( 225 | self, 226 | out_stream: BinaryIO, 227 | do_compress: Optional[bool] = None, 228 | laz_backend: Optional[Union[LazBackend, Sequence[LazBackend]]] = None, 229 | ) -> None: 230 | with LasWriter( 231 | out_stream, 232 | self.header, 233 | do_compress=do_compress, 234 | closefd=False, 235 | laz_backend=laz_backend, 236 | ) as writer: 237 | writer.write_points(self.points) 238 | if self.header.version.minor >= 4 and self.evlrs is not None: 239 | writer.write_evlrs(self.evlrs) 240 | 241 | def change_scaling(self, scales=None, offsets=None) -> None: 242 | if scales is None: 243 | scales = self.header.scales 244 | if offsets is None: 245 | offsets = self.header.offsets 246 | 247 | record.apply_new_scaling(self, scales, offsets) 248 | 249 | self.header.scales = scales 250 | self.header.offsets = offsets 251 | 252 | def __getattr__(self, item): 253 | """Automatically called by Python when the attribute 254 | named 'item' is no found. We use this function to forward the call the 255 | point record. This is the mechanism used to allow the users to access 256 | the points dimensions directly through a LasData. 257 | 258 | Parameters 259 | ---------- 260 | item: str 261 | name of the attribute, should be a dimension name 262 | 263 | Returns 264 | ------- 265 | The requested dimension if it exists 266 | 267 | """ 268 | try: 269 | return self.points[item] 270 | except ValueError: 271 | raise AttributeError( 272 | f"{self.__class__.__name__} object has no attribute '{item}'" 273 | ) from None 274 | 275 | def __setattr__(self, key, value): 276 | """This is called on every access to an attribute of the instance. 277 | Again we use this to forward the call the the points record 278 | 279 | But this time checking if the key is actually a dimension name 280 | so that an error is raised if the user tries to set a valid 281 | LAS dimension even if it is not present in the field. 282 | eg: user tries to set the red field of a file with point format 0: 283 | an error is raised 284 | 285 | """ 286 | if key in self.point_format.dimension_names: 287 | self.points[key] = value 288 | elif key in dims.DIMENSIONS_TO_TYPE: 289 | raise ValueError( 290 | f"Point format {self.point_format} does not support {key} dimension" 291 | ) 292 | else: 293 | super().__setattr__(key, value) 294 | 295 | def __getitem__(self, item): 296 | return self.points[item] 297 | 298 | def __setitem__(self, key, value): 299 | self.points[key] = value 300 | 301 | def __repr__(self) -> str: 302 | return "".format( 303 | self.header.version.major, 304 | self.header.version.minor, 305 | self.points.point_format, 306 | len(self.points), 307 | len(self.vlrs), 308 | ) 309 | -------------------------------------------------------------------------------- /pylas/lasmmap.py: -------------------------------------------------------------------------------- 1 | import io 2 | import mmap 3 | 4 | from . import lasdata 5 | from .header import LasHeader 6 | from .point import record 7 | from .typehints import PathLike 8 | 9 | WHOLE_FILE = 0 10 | 11 | 12 | class LasMMAP(lasdata.LasData): 13 | """Memory map a LAS file. 14 | It works like a regular LasData however the data is not actually read in memory, 15 | 16 | Access to dimensions are made directly from the file itself, changes made to the points 17 | are directly reflected in the mmap file. 18 | 19 | Vlrs cannot be modified. 20 | 21 | This can be useful if you want to be able to process a big LAS file 22 | 23 | .. note:: 24 | A LAZ (compressed LAS) cannot be mmapped 25 | """ 26 | 27 | def __init__(self, filename: PathLike) -> None: 28 | fileref = open(filename, mode="r+b") 29 | 30 | m = mmap.mmap(fileref.fileno(), length=WHOLE_FILE, access=mmap.ACCESS_WRITE) 31 | header = LasHeader.read_from(m) 32 | if header.are_points_compressed: 33 | raise ValueError("Cannot mmap a compressed LAZ file") 34 | 35 | points_data = record.PackedPointRecord.from_buffer( 36 | m, 37 | header.point_format, 38 | count=header.point_count, 39 | offset=header.offset_to_point_data, 40 | ) 41 | super().__init__(header=header, points=points_data) 42 | 43 | self.fileref, self.mmap = fileref, m 44 | self.mmap.seek(0, io.SEEK_SET) 45 | 46 | def close(self) -> None: 47 | # These need to be set to None, so that 48 | # mmap.close() does not give an error because 49 | # there are still exported pointers 50 | self._points = None 51 | self.mmap.close() 52 | self.fileref.close() 53 | 54 | def __enter__(self) -> "LasMMAP": 55 | return self 56 | 57 | def __exit__(self, exc_type, exc_val, exc_tb) -> None: 58 | self.close() 59 | -------------------------------------------------------------------------------- /pylas/lasreader.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import io 3 | import logging 4 | from typing import Optional, BinaryIO, Iterable, Union 5 | 6 | from . import errors 7 | from .compression import LazBackend 8 | from .header import LasHeader 9 | from .lasdata import LasData 10 | from .point import record 11 | from .vlrs.known import LasZipVlr 12 | from .vlrs.vlrlist import VLRList 13 | 14 | try: 15 | import lazrs 16 | except ModuleNotFoundError: 17 | pass 18 | 19 | try: 20 | import laszip 21 | except ModuleNotFoundError: 22 | pass 23 | 24 | logger = logging.getLogger(__name__) 25 | 26 | 27 | class LasReader: 28 | """The reader class handles LAS and LAZ via one of the supported backend""" 29 | 30 | def __init__( 31 | self, 32 | source: BinaryIO, 33 | closefd: bool = True, 34 | laz_backend: Optional[Union[LazBackend, Iterable[LazBackend]]] = None, 35 | ): 36 | self.closefd = closefd 37 | if laz_backend is None: 38 | laz_backend = LazBackend.detect_available() 39 | self.laz_backend = laz_backend 40 | self.header = LasHeader.read_from(source) 41 | 42 | if self.header.are_points_compressed: 43 | if not laz_backend: 44 | raise errors.PylasError( 45 | "No LazBackend selected, cannot decompress data" 46 | ) 47 | self.point_source = self._create_laz_backend(source) 48 | if self.point_source is None: 49 | raise errors.PylasError( 50 | "Data is compressed, but no LazBacked could be initialized" 51 | ) 52 | else: 53 | self.point_source = UncompressedPointReader( 54 | source, self.header.point_format.size 55 | ) 56 | 57 | self.points_read = 0 58 | 59 | def read_points(self, n: int) -> Optional[record.ScaleAwarePointRecord]: 60 | """Read n points from the file 61 | 62 | If there are no points left to read, returns None. 63 | 64 | Parameters 65 | ---------- 66 | n: The number of points to read 67 | if n is less than 0, this function will read the remaining points 68 | """ 69 | points_left = self.header.point_count - self.points_read 70 | if points_left <= 0: 71 | return None 72 | 73 | if n < 0: 74 | n = points_left 75 | else: 76 | n = min(n, points_left) 77 | 78 | r = record.PackedPointRecord.from_buffer( 79 | self.point_source.read_n_points(n), self.header.point_format, n 80 | ) 81 | points = record.ScaleAwarePointRecord( 82 | r.array, r.point_format, self.header.scales, self.header.offsets 83 | ) 84 | self.points_read += n 85 | return points 86 | 87 | def read(self) -> LasData: 88 | """Reads all the points not read and returns a LasData object""" 89 | points = self.read_points(-1) 90 | if points is None: 91 | points = record.PackedPointRecord.empty(self.header.point_format) 92 | else: 93 | points = record.PackedPointRecord(points.array, points.point_format) 94 | 95 | las_data = LasData(header=self.header, points=points) 96 | if self.header.version.minor >= 4: 97 | if ( 98 | self.header.are_points_compressed 99 | and not self.point_source.source.seekable() 100 | ): 101 | # We explicitly require seekable stream because we have to seek 102 | # past the chunk table of LAZ file 103 | raise errors.PylasError( 104 | "source must be seekable, to read evlrs form LAZ file" 105 | ) 106 | self.point_source.source.seek(self.header.start_of_first_evlr, io.SEEK_SET) 107 | las_data.evlrs = self._read_evlrs(self.point_source.source, seekable=True) 108 | 109 | return las_data 110 | 111 | def chunk_iterator(self, points_per_iteration: int) -> "PointChunkIterator": 112 | """Returns an iterator, that will read points by chunks 113 | of the requested size 114 | 115 | :param points_per_iteration: number of points to be read with each iteration 116 | :return: 117 | """ 118 | return PointChunkIterator(self, points_per_iteration) 119 | 120 | def close(self) -> None: 121 | """closes the file object used by the reader""" 122 | if self.closefd: 123 | self.point_source.close() 124 | 125 | def _create_laz_backend(self, source) -> Optional["IPointReader"]: 126 | try: 127 | backends = iter(self.laz_backend) 128 | except TypeError: 129 | backends = (self.laz_backend,) 130 | 131 | laszip_vlr = self.header.vlrs.pop(self.header.vlrs.index("LasZipVlr")) 132 | for backend in backends: 133 | try: 134 | if not backend.is_available(): 135 | raise errors.PylasError(f"The '{backend}' is not available") 136 | 137 | if backend == LazBackend.LazrsParallel: 138 | return LazrsPointReader(source, laszip_vlr, parallel=True) 139 | elif backend == LazBackend.Lazrs: 140 | return LazrsPointReader(source, laszip_vlr, parallel=False) 141 | elif backend == LazBackend.Laszip: 142 | return LaszipPointReader(source, self.header) 143 | else: 144 | raise errors.PylasError("Unknown LazBackend: {}".format(backend)) 145 | 146 | except errors.LazError as e: 147 | logger.error(e) 148 | 149 | def _read_evlrs(self, source, seekable=False) -> Optional[VLRList]: 150 | """Reads the EVLRs of the file, will fail if the file version 151 | does not support evlrs 152 | """ 153 | if ( 154 | self.header.version.minor >= 4 155 | and self.points_read == self.header.point_count 156 | ): 157 | if seekable: 158 | source.seek(self.header.start_of_first_evlr) 159 | return VLRList.read_from(source, self.header.number_of_evlrs, extended=True) 160 | else: 161 | return None 162 | 163 | def __enter__(self): 164 | return self 165 | 166 | def __exit__(self, exc_type, exc_val, exc_tb): 167 | self.close() 168 | 169 | 170 | class PointChunkIterator: 171 | def __init__(self, reader: LasReader, points_per_iteration: int) -> None: 172 | self.reader = reader 173 | self.points_per_iteration = points_per_iteration 174 | 175 | def __next__(self) -> record.ScaleAwarePointRecord: 176 | points = self.reader.read_points(self.points_per_iteration) 177 | if points is None: 178 | raise StopIteration 179 | return points 180 | 181 | def __iter__(self) -> "PointChunkIterator": 182 | return self 183 | 184 | 185 | class IPointReader(abc.ABC): 186 | """The interface to be implemented by the class that actually reads 187 | points from as LAS/LAZ file so that the LasReader can use it. 188 | 189 | It is used to manipulate LAS/LAZ (with different LAZ backends) in the 190 | reader 191 | """ 192 | 193 | @abc.abstractmethod 194 | def read_n_points(self, n: int) -> bytearray: 195 | ... 196 | 197 | @abc.abstractmethod 198 | def close(self) -> None: 199 | ... 200 | 201 | 202 | class UncompressedPointReader(IPointReader): 203 | """Implementation of IPointReader for the simple uncompressed case""" 204 | 205 | def __init__(self, source, point_size) -> None: 206 | self.source = source 207 | self.point_size = point_size 208 | 209 | def read_n_points(self, n: int) -> bytearray: 210 | try: 211 | readinto = self.source.readinto 212 | except AttributeError: 213 | data = bytearray(self.source.read(n * self.point_size)) 214 | else: 215 | data = bytearray(n * self.point_size) 216 | readinto(data) 217 | 218 | return data 219 | 220 | def close(self): 221 | self.source.close() 222 | 223 | 224 | class LaszipPointReader(IPointReader): 225 | """Implementation for the laszip backend""" 226 | 227 | def __init__(self, source: BinaryIO, header: LasHeader) -> None: 228 | self.source = source 229 | self.source.seek(0) 230 | self.unzipper = laszip.LasUnZipper(source) 231 | unzipper_header = self.unzipper.header 232 | assert unzipper_header.point_data_format == header.point_format.id 233 | assert unzipper_header.point_data_record_length == header.point_format.size 234 | self.point_size = header.point_format.size 235 | 236 | def read_n_points(self, n: int) -> bytearray: 237 | points_data = bytearray(n * self.point_size) 238 | self.unzipper.decompress_into(points_data) 239 | return points_data 240 | 241 | def close(self) -> None: 242 | self.source.close() 243 | 244 | 245 | class LazrsPointReader(IPointReader): 246 | """Implementation for the laz-rs backend, supports single-threaded decompression 247 | as well as multi-threaded decompression 248 | """ 249 | 250 | def __init__(self, source, laszip_vlr: LasZipVlr, parallel: bool) -> None: 251 | self.source = source 252 | self.vlr = lazrs.LazVlr(laszip_vlr.record_data) 253 | if parallel: 254 | self.decompressor = lazrs.ParLasZipDecompressor( 255 | source, laszip_vlr.record_data 256 | ) 257 | else: 258 | self.decompressor = lazrs.LasZipDecompressor(source, laszip_vlr.record_data) 259 | 260 | def read_n_points(self, n: int) -> bytearray: 261 | point_bytes = bytearray(n * self.vlr.item_size()) 262 | self.decompressor.decompress_many(point_bytes) 263 | return point_bytes 264 | 265 | def close(self) -> None: 266 | self.source.close() 267 | -------------------------------------------------------------------------------- /pylas/laswriter.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import io 3 | import logging 4 | from copy import copy 5 | from typing import BinaryIO, Optional, Union, Iterable 6 | 7 | import numpy as np 8 | 9 | from .compression import LazBackend 10 | from .errors import PylasError 11 | from .header import LasHeader 12 | from .point import dims 13 | from .point.format import PointFormat 14 | from .point.record import PackedPointRecord 15 | from .vlrs.known import LasZipVlr 16 | from .vlrs.vlrlist import VLRList 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | try: 21 | import lazrs 22 | except ModuleNotFoundError: 23 | pass 24 | 25 | try: 26 | import laszip 27 | except ModuleNotFoundError: 28 | pass 29 | 30 | 31 | class LasWriter: 32 | """ 33 | Allows to write a complete LAS/LAZ file to the destination. 34 | """ 35 | 36 | def __init__( 37 | self, 38 | dest: BinaryIO, 39 | header: LasHeader, 40 | do_compress: Optional[bool] = None, 41 | laz_backend: Optional[Union[LazBackend, Iterable[LazBackend]]] = None, 42 | closefd: bool = True, 43 | ) -> None: 44 | """ 45 | Parameters 46 | ---------- 47 | dest: 48 | file object where the LAS/LAZ will be written 49 | 50 | header: 51 | The header of the file to be written 52 | 53 | do_compress: optional bool 54 | whether the file data should be written as LAS (uncompressed) 55 | or LAZ (compressed). 56 | If None, the file won't be compressed, unless a laz_backend is provided 57 | 58 | laz_backend: optional LazBackend or sequence of LazBackend 59 | The LazBackend to use (or if it is a sequence the LazBackend to try) 60 | for the compression 61 | 62 | closefd: default True 63 | should the `dest` be closed when the writer is closed 64 | """ 65 | self.closefd = closefd 66 | self.header = copy(header) 67 | self.header.partial_reset() 68 | self.header.maxs = [np.finfo("f8").min] * 3 69 | self.header.mins = [np.finfo("f8").max] * 3 70 | 71 | self.dest = dest 72 | self.done = False 73 | 74 | dims.raise_if_version_not_compatible_with_fmt( 75 | header.point_format.id, str(self.header.version) 76 | ) 77 | 78 | if laz_backend is not None: 79 | if do_compress is None: 80 | do_compress = True 81 | self.laz_backend = laz_backend 82 | else: 83 | if do_compress is None: 84 | do_compress = False 85 | self.laz_backend = LazBackend.detect_available() 86 | self.header.are_points_compressed = do_compress 87 | 88 | if do_compress: 89 | self.point_writer: IPointWriter = self._create_laz_backend(self.laz_backend) 90 | else: 91 | self.point_writer: IPointWriter = UncompressedPointWriter(self.dest) 92 | 93 | self.point_writer.write_initial_header_and_vlrs(self.header) 94 | 95 | def write_points(self, points: PackedPointRecord) -> None: 96 | if not points: 97 | return 98 | 99 | if self.done: 100 | raise PylasError("Cannot write points anymore") 101 | 102 | if points.point_format != self.header.point_format: 103 | raise PylasError("Incompatible point formats") 104 | 105 | self.header.update(points) 106 | self.point_writer.write_points(points) 107 | 108 | def write_evlrs(self, evlrs: VLRList) -> None: 109 | if self.header.version.minor < 4: 110 | raise PylasError( 111 | "EVLRs are not supported on files with version less than 1.4" 112 | ) 113 | 114 | if len(evlrs) > 0: 115 | self.point_writer.done() 116 | self.done = True 117 | self.header.number_of_evlrs = len(evlrs) 118 | self.header.start_of_first_evlr = self.dest.tell() 119 | evlrs.write_to(self.dest, as_extended=True) 120 | 121 | def close(self) -> None: 122 | if self.point_writer is not None: 123 | if not self.done: 124 | self.point_writer.done() 125 | self.point_writer.write_updated_header(self.header) 126 | if self.closefd: 127 | self.dest.close() 128 | 129 | def _create_laz_backend( 130 | self, laz_backends: Union[LazBackend, Iterable[LazBackend]] 131 | ) -> "IPointWriter": 132 | try: 133 | laz_backends = iter(laz_backends) 134 | except TypeError: 135 | laz_backends = (laz_backends,) 136 | 137 | last_error: Optional[Exception] = None 138 | for backend in laz_backends: 139 | try: 140 | if not backend.is_available(): 141 | raise PylasError(f"The '{backend}' is not available") 142 | 143 | if backend == LazBackend.Laszip: 144 | return LaszipPointWriter(self.dest, self.header) 145 | elif backend == LazBackend.LazrsParallel: 146 | return LazrsPointWriter( 147 | self.dest, self.header.point_format, parallel=True 148 | ) 149 | elif backend == LazBackend.Lazrs: 150 | return LazrsPointWriter( 151 | self.dest, self.header.point_format, parallel=False 152 | ) 153 | else: 154 | raise PylasError("Unknown LazBacked: {}".format(backend)) 155 | except Exception as e: 156 | logger.error(e) 157 | last_error = e 158 | 159 | if last_error is not None: 160 | raise PylasError("No LazBackend selected, cannot compress") 161 | else: 162 | raise PylasError(f"No LazBackend could be initialized: {last_error}") 163 | 164 | def __enter__(self): 165 | return self 166 | 167 | def __exit__(self, exc_type, exc_val, exc_tb): 168 | self.close() 169 | 170 | 171 | class IPointWriter(abc.ABC): 172 | """Interface to be implemented by the actual 173 | PointWriter backend 174 | 175 | """ 176 | 177 | @property 178 | @abc.abstractmethod 179 | def destination(self) -> BinaryIO: 180 | ... 181 | 182 | def write_initial_header_and_vlrs(self, header: LasHeader) -> None: 183 | header.write_to(self.destination) 184 | 185 | @abc.abstractmethod 186 | def write_points(self, points: PackedPointRecord) -> None: 187 | ... 188 | 189 | @abc.abstractmethod 190 | def done(self) -> None: 191 | ... 192 | 193 | def write_updated_header(self, header): 194 | self.destination.seek(0, io.SEEK_SET) 195 | header.write_to(self.destination) 196 | 197 | 198 | class UncompressedPointWriter(IPointWriter): 199 | """ 200 | Writing points in the simple uncompressed case. 201 | """ 202 | 203 | def __init__(self, dest: BinaryIO) -> None: 204 | self.dest = dest 205 | 206 | @property 207 | def destination(self) -> BinaryIO: 208 | return self.dest 209 | 210 | def write_points(self, points: PackedPointRecord) -> None: 211 | self.dest.write(points.memoryview()) 212 | 213 | def done(self) -> None: 214 | pass 215 | 216 | 217 | class LaszipPointWriter(IPointWriter): 218 | """ 219 | Compressed point writer using laszip backend 220 | """ 221 | 222 | def __init__(self, dest: BinaryIO, header: LasHeader) -> None: 223 | self.dest = dest 224 | header.set_compressed(False) 225 | with io.BytesIO() as tmp: 226 | header.write_to(tmp) 227 | header_bytes = tmp.getvalue() 228 | 229 | self.zipper = laszip.LasZipper(self.dest, header_bytes) 230 | zipper_header = self.zipper.header 231 | assert zipper_header.point_data_format == header.point_format.id 232 | assert zipper_header.point_data_record_length == header.point_format.size 233 | 234 | header.set_compressed(True) 235 | 236 | @property 237 | def destination(self) -> BinaryIO: 238 | return self.dest 239 | 240 | def write_points(self, points: PackedPointRecord) -> None: 241 | points_bytes = np.frombuffer(points.array, np.uint8) 242 | self.zipper.compress(points_bytes) 243 | 244 | def done(self) -> None: 245 | self.zipper.done() 246 | 247 | def write_initial_header_and_vlrs(self, header: LasHeader) -> None: 248 | # Do nothing as creating the laszip zipper writes the header and vlrs 249 | pass 250 | 251 | def write_updated_header(self, header: LasHeader) -> None: 252 | if header.number_of_evlrs != 0: 253 | # We wrote some evlrs, we have to update the header 254 | self.dest.seek(0, io.SEEK_SET) 255 | file_header = LasHeader.read_from(self.dest) 256 | end_of_header_pos = self.dest.tell() 257 | file_header.number_of_evlrs = header.number_of_evlrs 258 | file_header.start_of_first_evlr = header.start_of_first_evlr 259 | self.dest.seek(0, io.SEEK_SET) 260 | file_header.write_to(self.dest) 261 | assert self.dest.tell() == end_of_header_pos 262 | 263 | 264 | class LazrsPointWriter(IPointWriter): 265 | """ 266 | Compressed point writer using lazrs backend 267 | """ 268 | 269 | def __init__( 270 | self, dest: BinaryIO, point_format: PointFormat, parallel: bool 271 | ) -> None: 272 | self.dest = dest 273 | self.vlr = lazrs.LazVlr.new_for_compression( 274 | point_format.id, point_format.num_extra_bytes 275 | ) 276 | self.parallel = parallel 277 | self.compressor: Optional[ 278 | Union[lazrs.ParLasZipCompressor, lazrs.LasZipCompressor] 279 | ] = None 280 | 281 | def write_initial_header_and_vlrs(self, header: LasHeader) -> None: 282 | laszip_vlr = LasZipVlr(self.vlr.record_data()) 283 | header.vlrs.append(laszip_vlr) 284 | super().write_initial_header_and_vlrs(header) 285 | # We have to initialize our compressor here 286 | # because on init, it writes the offset to chunk table 287 | # so the header and vlrs have to be written 288 | if self.parallel: 289 | self.compressor = lazrs.ParLasZipCompressor(self.dest, self.vlr) 290 | else: 291 | self.compressor = lazrs.LasZipCompressor(self.dest, self.vlr) 292 | 293 | @property 294 | def destination(self) -> BinaryIO: 295 | return self.dest 296 | 297 | def write_points(self, points: PackedPointRecord) -> None: 298 | assert ( 299 | self.compressor is not None 300 | ), "Trying to write points without having written header" 301 | points_bytes = np.frombuffer(points.array, np.uint8) 302 | self.compressor.compress_many(points_bytes) 303 | 304 | def done(self) -> None: 305 | if self.compressor is not None: 306 | self.compressor.done() 307 | -------------------------------------------------------------------------------- /pylas/lib.py: -------------------------------------------------------------------------------- 1 | """ 'Entry point' of the library, Contains the various functions meant to be 2 | used directly by a user 3 | """ 4 | import copy 5 | import io 6 | import logging 7 | import os 8 | from pathlib import Path 9 | from typing import Union, Optional 10 | 11 | from .compression import LazBackend 12 | from .errors import PylasError 13 | from .header import LasHeader, Version 14 | from .lasappender import LasAppender 15 | from .lasdata import LasData 16 | from .lasmmap import LasMMAP 17 | from .lasreader import LasReader 18 | from .laswriter import LasWriter 19 | from .point import dims, record, PointFormat 20 | 21 | logger = logging.getLogger(__name__) 22 | 23 | 24 | def open_las( 25 | source, 26 | mode="r", 27 | closefd=True, 28 | laz_backend=None, 29 | header=None, 30 | do_compress=None, 31 | ) -> Union[LasReader, LasWriter, LasAppender]: 32 | """The pylas.open opens a LAS/LAZ file in one of the 3 supported 33 | mode: 34 | 35 | - "r" => Reading => a :class:`pylas.LasReader` will be returned 36 | - "w" => Writing => a :class:`pylas.LasWriter` will be returned 37 | - "a" => Appending => a :class:`pylas.LasAppender` will be returned 38 | 39 | 40 | When opening a file in 'w' mode, a header (:class:`pylas.LasHeader`) 41 | is required 42 | 43 | >>> with open_las('pylastests/simple.las') as f: 44 | ... print(f.header.point_format.id) 45 | 3 46 | 47 | 48 | >>> f = open('pylastests/simple.las', mode='rb') 49 | >>> with open_las(f,closefd=False) as flas: 50 | ... print(flas.header) 51 | )> 52 | >>> f.closed 53 | False 54 | >>> f.close() 55 | >>> f.closed 56 | True 57 | 58 | 59 | >>> f = open('pylastests/simple.las', mode='rb') 60 | >>> with open_las(f) as flas: 61 | ... las = flas.read() 62 | >>> f.closed 63 | True 64 | 65 | Parameters 66 | ---------- 67 | source: str or bytes or io.BytesIO 68 | if source is a str it must be a filename 69 | 70 | mode: Optional, the mode to open the file: 71 | - "r" for reading (default) 72 | - "w" for writing 73 | - "a" for appending 74 | 75 | laz_backend: Optional, the LAZ backend to use to handle decompression/comression 76 | 77 | By default available backends are detected, see LazBackend to see the 78 | preference order when multiple backends are available 79 | 80 | header: The header to use when opening in write mode. 81 | 82 | do_compress: optional, bool, only meaningful in writing mode: 83 | - None (default) guess if compression is needed using the file extension 84 | or if a laz_backend was explicitely provided 85 | - True compresses the file 86 | - False do not compress the file 87 | 88 | closefd: optional, bool, True by default 89 | Whether the stream/file object shall be closed, this only work 90 | when using open_las in a with statement. An exception is raised if 91 | closefd is specified and the source is a filename 92 | """ 93 | if mode == "r": 94 | if header is not None: 95 | raise PylasError( 96 | "header argument is not used when opening in read mode, " 97 | "did you meant to open in write mode ?" 98 | ) 99 | if do_compress is not None: 100 | raise PylasError( 101 | "do_compress argument is not used when opening in read mode, " 102 | "did you meant to open in write mode ?" 103 | ) 104 | if isinstance(source, (str, Path)): 105 | stream = open(source, mode="rb", closefd=closefd) 106 | elif isinstance(source, bytes): 107 | stream = io.BytesIO(source) 108 | else: 109 | stream = source 110 | return LasReader(stream, closefd=closefd, laz_backend=laz_backend) 111 | elif mode == "w": 112 | if header is None: 113 | raise ValueError("A header is needed when opening a file for writing") 114 | 115 | if isinstance(source, (str, Path)): 116 | if do_compress is None: 117 | do_compress = os.path.splitext(source)[1].lower() == ".laz" 118 | stream = open(source, mode="wb+", closefd=closefd) 119 | elif isinstance(source, bytes): 120 | stream = io.BytesIO(source) 121 | else: 122 | assert source.seekable() 123 | stream = source 124 | 125 | return LasWriter( 126 | stream, 127 | header=header, 128 | do_compress=do_compress, 129 | laz_backend=laz_backend, 130 | closefd=closefd, 131 | ) 132 | elif mode == "a": 133 | if isinstance(source, (str, Path)): 134 | stream = open(source, mode="rb+", closefd=closefd) 135 | elif isinstance(source, bytes): 136 | stream = io.BytesIO(source) 137 | else: 138 | stream = source 139 | return LasAppender(stream, closefd=closefd, laz_backend=laz_backend) 140 | 141 | else: 142 | raise ValueError(f"Unknown mode '{mode}'") 143 | 144 | 145 | def read_las(source, closefd=True, laz_backend=LazBackend.detect_available()): 146 | """Entry point for reading las data in pylas 147 | 148 | Reads the whole file into memory. 149 | 150 | >>> las = read_las("pylastests/simple.las") 151 | >>> las.classification 152 | 153 | 154 | Parameters 155 | ---------- 156 | source : str or io.BytesIO 157 | The source to read data from 158 | 159 | laz_backend: Optional, the backend to use when the file is as LAZ file. 160 | By default pylas will find the backend to use by himself. 161 | Use if you wan a specific backend to be used 162 | 163 | closefd: bool 164 | if True and the source is a stream, the function will close it 165 | after it is done reading 166 | 167 | 168 | Returns 169 | ------- 170 | pylas.lasdatas.base.LasBase 171 | The object you can interact with to get access to the LAS points & VLRs 172 | """ 173 | with open_las(source, closefd=closefd, laz_backend=laz_backend) as reader: 174 | return reader.read() 175 | 176 | 177 | def mmap_las(filename): 178 | """MMap a file, much like laspy did""" 179 | return LasMMAP(filename) 180 | 181 | 182 | def create_las( 183 | *, 184 | point_format: Optional[Union[int, PointFormat]] = None, 185 | file_version: Optional[Union[str, Version]] = None, 186 | ): 187 | """Function to create a new empty las data object 188 | 189 | .. note:: 190 | 191 | If you provide both point_format and file_version 192 | an exception will be raised if they are not compatible 193 | 194 | >>> las = create_las(point_format=6,file_version="1.2") 195 | Traceback (most recent call last): 196 | ... 197 | pylas.errors.PylasError: Point format 6 is not compatible with file version 1.2 198 | 199 | 200 | If you provide only the point_format the file_version will automatically 201 | selected for you. 202 | 203 | >>> las = create_las(point_format=0) 204 | >>> las.header.version == '1.2' 205 | True 206 | 207 | >>> las = create_las(point_format=PointFormat(6)) 208 | >>> las.header.version == '1.4' 209 | True 210 | 211 | 212 | Parameters 213 | ---------- 214 | point_format: 215 | The point format you want the created file to have 216 | 217 | file_version: 218 | The las version you want the created las to have 219 | 220 | Returns 221 | ------- 222 | pylas.lasdatas.base.LasBase 223 | A new las data object 224 | 225 | """ 226 | header = LasHeader(point_format=point_format, version=file_version) 227 | return LasData(header=header) 228 | 229 | 230 | def convert(source_las, *, point_format_id=None, file_version=None): 231 | """Converts a Las from one point format to another 232 | Automatically upgrades the file version if source file version is not compatible with 233 | the new point_format_id 234 | 235 | 236 | convert to point format 0 237 | 238 | >>> las = read_las('pylastests/simple.las') 239 | >>> las.header.version 240 | Version(major=1, minor=2) 241 | >>> las = convert(las, point_format_id=0) 242 | >>> las.header.point_format.id 243 | 0 244 | >>> str(las.header.version) 245 | '1.2' 246 | 247 | convert to point format 6, which need version >= 1.4 248 | then convert back to point format 0, version is not downgraded 249 | 250 | >>> las = read_las('pylastests/simple.las') 251 | >>> str(las.header.version) 252 | '1.2' 253 | >>> las = convert(las, point_format_id=6) 254 | >>> las.header.point_format.id 255 | 6 256 | >>> str(las.header.version) 257 | '1.4' 258 | >>> las = convert(las, point_format_id=0) 259 | >>> str(las.header.version) 260 | '1.4' 261 | 262 | an exception is raised if the requested point format is not compatible 263 | with the file version 264 | 265 | >>> las = read_las('pylastests/simple.las') 266 | >>> convert(las, point_format_id=6, file_version='1.2') 267 | Traceback (most recent call last): 268 | ... 269 | pylas.errors.PylasError: Point format 6 is not compatible with file version 1.2 270 | 271 | Parameters 272 | ---------- 273 | source_las : pylas.lasdatas.base.LasBase 274 | The source data to be converted 275 | 276 | point_format_id : int, optional 277 | The new point format id (the default is None, which won't change the source format id) 278 | 279 | file_version : str, optional, 280 | The new file version. None by default which means that the file_version 281 | may be upgraded for compatibility with the new point_format. The file version will not 282 | be downgraded. 283 | 284 | Returns 285 | ------- 286 | pylas.lasdatas.base.LasBase 287 | """ 288 | if point_format_id is None: 289 | point_format_id = source_las.point_format.id 290 | 291 | if file_version is None: 292 | file_version = max( 293 | str(source_las.header.version), 294 | dims.min_file_version_for_point_format(point_format_id), 295 | ) 296 | else: 297 | file_version = str(file_version) 298 | dims.raise_if_version_not_compatible_with_fmt(point_format_id, file_version) 299 | 300 | version = Version.from_str(file_version) 301 | 302 | point_format = PointFormat(point_format_id) 303 | point_format.dimensions.extend(source_las.point_format.extra_dimensions) 304 | 305 | header = copy.deepcopy(source_las.header) 306 | header.set_version_and_point_format(version, point_format) 307 | 308 | if source_las.evlrs is not None: 309 | evlrs = source_las.evlrs.copy() 310 | else: 311 | evlrs = None 312 | 313 | points = record.PackedPointRecord.from_point_record( 314 | source_las.points, header.point_format 315 | ) 316 | las = LasData(header=header, points=points) 317 | 318 | if file_version < "1.4" and evlrs is not None and evlrs: 319 | logger.warning( 320 | "The source contained {} EVLRs," 321 | " they will be lost as version {} doest not support them".format( 322 | len(evlrs), file_version 323 | ) 324 | ) 325 | else: 326 | las.evlrs = evlrs 327 | 328 | return las 329 | 330 | 331 | def write_then_read_again( 332 | las, do_compress=False, laz_backend=LazBackend.detect_available() 333 | ): 334 | """writes the given las into memory using BytesIO and 335 | reads it again, returning the newly read file. 336 | 337 | Mostly used for testing purposes, without having to write to disk 338 | """ 339 | out = io.BytesIO() 340 | las.write(out, do_compress=do_compress, laz_backend=laz_backend) 341 | out.seek(0) 342 | return read_las(out) 343 | -------------------------------------------------------------------------------- /pylas/lib.pyi: -------------------------------------------------------------------------------- 1 | from typing import Union, BinaryIO, Iterable, Optional, overload, Literal 2 | 3 | from . import LasWriter, PointFormat 4 | from .compression import LazBackend 5 | from .header import LasHeader 6 | from .lasappender import LasAppender 7 | from .lasdata import LasData 8 | from .lasmmap import LasMMAP 9 | from .lasreader import LasReader 10 | from .typehints import PathLike 11 | 12 | LazBackend = LazBackend 13 | @overload 14 | def open_las( 15 | source: PathLike, 16 | mode: Literal["r"] = ..., 17 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ..., 18 | ) -> LasReader: ... 19 | @overload 20 | def open_las( 21 | source: BinaryIO, 22 | mode: Literal["r"] = ..., 23 | closefd: bool = ..., 24 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ..., 25 | ) -> LasReader: ... 26 | @overload 27 | def open_las( 28 | source: PathLike, 29 | mode: Literal["w"], 30 | header: LasHeader, 31 | do_compress: Optional[bool] = ..., 32 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ..., 33 | ) -> LasWriter: ... 34 | @overload 35 | def open_las( 36 | source: BinaryIO, 37 | mode: Literal["w"], 38 | header: LasHeader, 39 | do_compress: Optional[bool] = ..., 40 | closefd: bool = ..., 41 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ..., 42 | ) -> LasWriter: ... 43 | @overload 44 | def open_las( 45 | source: PathLike, 46 | mode: Literal["a"], 47 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ..., 48 | ) -> LasAppender: ... 49 | @overload 50 | def open_las( 51 | source: BinaryIO, 52 | mode: Literal["a"], 53 | closefd: bool = ..., 54 | laz_backend: Union[LazBackend, Iterable[LazBackend]] = ..., 55 | ) -> LasAppender: ... 56 | def read_las( 57 | source: Union[BinaryIO, PathLike], 58 | closefd: bool = True, 59 | laz_backend: Union[ 60 | LazBackend, Iterable[LazBackend] 61 | ] = LazBackend.detect_available(), 62 | ) -> LasData: ... 63 | def mmap_las(filename: PathLike) -> LasMMAP: ... 64 | def merge_las(las_files: Union[Iterable[LasData], LasData]) -> LasData: ... 65 | def create_las( 66 | *, point_format: Union[int, PointFormat] = 0, file_version: Optional[str] = 0 67 | ) -> LasData: ... 68 | def convert( 69 | source_las: LasData, 70 | *, 71 | point_format_id: Optional[int] = ..., 72 | file_version: Optional[str] = ... 73 | ) -> LasData: ... 74 | def create_from_header(header: LasHeader) -> LasData: ... 75 | def write_then_read_again( 76 | las: LasData, do_compress: bool = ..., laz_backend: LazBackend = ... 77 | ) -> LasData: ... 78 | -------------------------------------------------------------------------------- /pylas/point/__init__.py: -------------------------------------------------------------------------------- 1 | from .dims import DimensionKind, DimensionInfo 2 | from .format import PointFormat, ExtraBytesParams 3 | -------------------------------------------------------------------------------- /pylas/point/format.py: -------------------------------------------------------------------------------- 1 | from itertools import zip_longest 2 | from typing import Optional, Iterable 3 | 4 | import numpy as np 5 | 6 | from . import dims 7 | from ..errors import PylasError 8 | 9 | 10 | class ExtraBytesParams: 11 | """All parameters needed to create extra bytes""" 12 | 13 | def __init__( 14 | self, 15 | name: str, 16 | type: str, 17 | description: str = "", 18 | offsets: Optional[np.ndarray] = None, 19 | scales: Optional[np.ndarray] = None, 20 | ) -> None: 21 | self.name = name 22 | """ The name of the extra dimension """ 23 | self.type = type 24 | """ The type of the extra dimension """ 25 | self.description = description 26 | """ A description of the extra dimension """ 27 | self.offsets = offsets 28 | """ The offsets to use if its a 'scaled dimension', can be none """ 29 | self.scales = scales 30 | """ The scales to use if its a 'scaled dimension', can be none """ 31 | 32 | 33 | class PointFormat: 34 | """Class that contains the informations about the dimensions that forms a PointFormat. 35 | 36 | A PointFormat has 'standard' dimensions (dimensions defined in the LAS standard, each 37 | point format has its set of dimensions), but it can also have extra (non-standard) dimensions 38 | defined by the user) 39 | 40 | >>> fmt = PointFormat(3) 41 | >>> all(dim.is_standard for dim in fmt.dimensions) 42 | True 43 | >>> dim = fmt.dimension_by_name("classification") # or fmt["classification"] 44 | >>> dim.max 45 | 31 46 | >>> dim.min 47 | 0 48 | >>> dim.num_bits 49 | 5 50 | 51 | """ 52 | 53 | def __init__( 54 | self, 55 | point_format_id: int, 56 | ): 57 | """ 58 | Parameters 59 | ---------- 60 | point_format_id: int 61 | point format id 62 | """ 63 | self.id = point_format_id 64 | self.dimensions = [] 65 | composed_dims = dims.COMPOSED_FIELDS[self.id] 66 | for dim_name in dims.ALL_POINT_FORMATS_DIMENSIONS[self.id]: 67 | try: 68 | sub_fields = composed_dims[dim_name] 69 | except KeyError: 70 | dimension = dims.DimensionInfo.from_type_str( 71 | dim_name, dims.DIMENSIONS_TO_TYPE[dim_name], is_standard=True 72 | ) 73 | self.dimensions.append(dimension) 74 | else: 75 | for sub_field in sub_fields: 76 | dimension = dims.DimensionInfo.from_bitmask( 77 | sub_field.name, sub_field.mask, is_standard=True 78 | ) 79 | self.dimensions.append(dimension) 80 | 81 | @property 82 | def standard_dimensions(self) -> Iterable[dims.DimensionInfo]: 83 | """Returns an iterable of the standard dimensions 84 | 85 | >>> fmt = PointFormat(0) 86 | >>> standard_dims = list(fmt.standard_dimensions) 87 | >>> len(standard_dims) 88 | 15 89 | >>> standard_dims[4].name 90 | 'return_number' 91 | 92 | 93 | """ 94 | return (dim for dim in self.dimensions if dim.is_standard) 95 | 96 | @property 97 | def extra_dimensions(self) -> Iterable[dims.DimensionInfo]: 98 | return (dim for dim in self.dimensions if dim.is_standard is False) 99 | 100 | @property 101 | def dimension_names(self) -> Iterable[str]: 102 | """Returns the names of the dimensions contained in the point format""" 103 | return (dim.name for dim in self.dimensions) 104 | 105 | @property 106 | def standard_dimension_names(self) -> Iterable[str]: 107 | """Returns the names of the extra dimensions in this point format""" 108 | return (dim.name for dim in self.standard_dimensions) 109 | 110 | @property 111 | def extra_dimension_names(self) -> Iterable[str]: 112 | """Returns the names of the extra dimensions in this point format""" 113 | return (dim.name for dim in self.extra_dimensions) 114 | 115 | @property 116 | def size(self) -> int: 117 | """Returns the number of bytes (standard + extra) a point takes 118 | 119 | >>> PointFormat(3).size 120 | 34 121 | 122 | >>> fmt = PointFormat(3) 123 | >>> fmt.add_extra_dimension(ExtraBytesParams("codification", "uint64")) 124 | >>> fmt.size 125 | 42 126 | """ 127 | return int(sum(dim.num_bits for dim in self.dimensions) // 8) 128 | 129 | @property 130 | def num_standard_bytes(self) -> int: 131 | """Returns the number of bytes used by standard dims 132 | 133 | >>> fmt = PointFormat(3) 134 | >>> fmt.add_extra_dimension(ExtraBytesParams("codification", "uint64")) 135 | >>> fmt.num_standard_bytes 136 | 34 137 | """ 138 | return int(sum(dim.num_bits for dim in self.standard_dimensions) // 8) 139 | 140 | @property 141 | def num_extra_bytes(self) -> int: 142 | """Returns the number of extra bytes 143 | 144 | >>> fmt = PointFormat(3) 145 | >>> fmt.add_extra_dimension(ExtraBytesParams("codification", "uint64")) 146 | >>> fmt.num_extra_bytes 147 | 8 148 | """ 149 | return int(sum(dim.num_bits for dim in self.extra_dimensions) // 8) 150 | 151 | @property 152 | def has_waveform_packet(self): 153 | """Returns True if the point format has waveform packet dimensions""" 154 | dimensions = set(self.dimension_names) 155 | return all(name in dimensions for name in dims.WAVEFORM_FIELDS_NAMES) 156 | 157 | def dimension_by_name(self, name: str) -> dims.DimensionInfo: 158 | """Returns the dimension info for the dimension by name 159 | 160 | ValueError is raised if the dimension does not exist un the point format 161 | 162 | >>> info = PointFormat(2).dimension_by_name('number_of_returns') 163 | >>> info.name == 'number_of_returns' 164 | True 165 | >>> info.num_bits == 3 166 | True 167 | 168 | 169 | >>> info = PointFormat(2).dimension_by_name('gps_time') 170 | Traceback (most recent call last): 171 | ... 172 | ValueError: Dimension 'gps_time' does not exist 173 | """ 174 | for dim in self.dimensions: 175 | if dim.name == name: 176 | return dim 177 | raise ValueError(f"Dimension '{name}' does not exist") 178 | 179 | def add_extra_dimension(self, param: ExtraBytesParams) -> None: 180 | """Add an extra, user-defined dimension""" 181 | dim_info = dims.DimensionInfo.from_type_str( 182 | param.name, 183 | param.type, 184 | is_standard=False, 185 | description=param.description, 186 | offsets=param.offsets, 187 | scales=param.scales, 188 | ) 189 | if ( 190 | dim_info.num_elements > 3 191 | and dim_info.kind != dims.DimensionKind.UnsignedInteger 192 | ): 193 | raise PylasError("Extra Dimensions do not support more than 3 elements") 194 | self.dimensions.append(dim_info) 195 | 196 | def dtype(self): 197 | """Returns the numpy.dtype used to store the point records in a numpy array 198 | 199 | .. note:: 200 | 201 | The dtype corresponds to the dtype with sub_fields *packed* into their 202 | composed fields 203 | 204 | """ 205 | dtype = dims.ALL_POINT_FORMATS_DTYPE[self.id] 206 | descr = dtype.descr 207 | for extra_dim in self.extra_dimensions: 208 | descr.append((extra_dim.name, extra_dim.type_str())) 209 | return np.dtype(descr) 210 | 211 | def __getitem__(self, item): 212 | if isinstance(item, str): 213 | return self.dimension_by_name(item) 214 | return self.dimensions[item] 215 | 216 | def __eq__(self, other): 217 | if self.id != other.id: 218 | return False 219 | 220 | for my_eb, ot_eb in zip_longest(self.extra_dimensions, other.extra_dimensions): 221 | if my_eb != ot_eb: 222 | return False 223 | 224 | return True 225 | 226 | def __repr__(self): 227 | return "".format( 228 | self.id, self.num_extra_bytes 229 | ) 230 | 231 | 232 | def lost_dimensions(point_fmt_in, point_fmt_out): 233 | """Returns a list of the names of the dimensions that will be lost 234 | when converting from point_fmt_in to point_fmt_out 235 | """ 236 | 237 | dimensions_in = set(PointFormat(point_fmt_in).dimension_names) 238 | dimensions_out = set(PointFormat(point_fmt_out).dimension_names) 239 | 240 | completely_lost = [] 241 | for dim_name in dimensions_in: 242 | if dim_name not in dimensions_out: 243 | completely_lost.append(dim_name) 244 | return completely_lost 245 | -------------------------------------------------------------------------------- /pylas/point/packing.py: -------------------------------------------------------------------------------- 1 | """ This module contains functions to pack and unpack point dimensions 2 | """ 3 | import numpy as np 4 | 5 | 6 | def least_significant_bit_set(mask: int) -> int: 7 | """Return the least significant bit set 8 | 9 | The index is 0-indexed. 10 | Returns -1 is no bit is set 11 | 12 | >>> least_significant_bit_set(0b0000_0001) 13 | 0 14 | >>> least_significant_bit_set(0b0001_0000) 15 | 4 16 | >>> least_significant_bit_set(0b0000_0000) 17 | -1 18 | """ 19 | return (mask & -mask).bit_length() - 1 20 | 21 | 22 | def pack(array, sub_field_array, mask, inplace=False): 23 | """Packs a sub field's array into another array using a mask 24 | 25 | Parameters: 26 | ---------- 27 | array : numpy.ndarray 28 | The array in which the sub field array will be packed into 29 | array_in : numpy.ndarray 30 | sub field array to pack 31 | mask : mask (ie: 0b00001111) 32 | Mask of the sub field 33 | inplace : {bool}, optional 34 | If true a new array is returned. (the default is False, which modifies the array in place) 35 | 36 | Raises 37 | ------ 38 | OverflowError 39 | If the values contained in the sub field array are greater than its mask's number of bits 40 | allows 41 | """ 42 | lsb = least_significant_bit_set(mask) 43 | max_value = int(mask >> lsb) 44 | if np.max(sub_field_array) > max_value: 45 | raise OverflowError( 46 | "value ({}) is greater than allowed (max: {})".format( 47 | sub_field_array.max(), max_value 48 | ) 49 | ) 50 | if inplace: 51 | array[:] = array & ~mask 52 | array[:] = array | ((sub_field_array << lsb) & mask).astype(array.dtype) 53 | else: 54 | array = array & ~mask 55 | return array | ((sub_field_array << lsb) & mask).astype(array.dtype) 56 | -------------------------------------------------------------------------------- /pylas/point/record.py: -------------------------------------------------------------------------------- 1 | """ Contains the classes that manages Las PointRecords 2 | Las PointRecords are represented using Numpy's structured arrays, 3 | The PointRecord classes provide a few extra things to manage these arrays 4 | in the context of Las point data 5 | """ 6 | import logging 7 | from typing import NoReturn 8 | 9 | import numpy as np 10 | 11 | from . import dims 12 | from .dims import ScaledArrayView 13 | from .. import errors 14 | from ..point import PointFormat 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | def scale_dimension(array_dim, scale, offset): 20 | return (array_dim * scale) + offset 21 | 22 | 23 | def unscale_dimension(array_dim, scale, offset): 24 | return np.round((np.array(array_dim) - offset) / scale) 25 | 26 | 27 | def raise_not_enough_bytes_error( 28 | expected_bytes_len, missing_bytes_len, point_data_buffer_len, points_dtype 29 | ) -> NoReturn: 30 | raise errors.PylasError( 31 | "The file does not contain enough bytes to store the expected number of points\n" 32 | "expected {} bytes, read {} bytes ({} bytes missing == {} points) and it cannot be corrected\n" 33 | "{} (bytes) / {} (point_size) = {} (points)".format( 34 | expected_bytes_len, 35 | point_data_buffer_len, 36 | missing_bytes_len, 37 | missing_bytes_len / points_dtype.itemsize, 38 | point_data_buffer_len, 39 | points_dtype.itemsize, 40 | point_data_buffer_len / points_dtype.itemsize, 41 | ) 42 | ) 43 | 44 | 45 | class PackedPointRecord: 46 | """ 47 | In the PackedPointRecord, fields that are a combinations of many sub-fields (fields stored on less than a byte) 48 | are still packed together and are only de-packed and re-packed when accessed. 49 | 50 | This uses of less memory than if the sub-fields were unpacked 51 | 52 | >>> #return number is a sub-field 53 | >>> from pylas import PointFormat 54 | >>> packed_point_record = PackedPointRecord.zeros(PointFormat(0), 10) 55 | >>> return_number = packed_point_record['return_number'] 56 | >>> return_number 57 | 58 | >>> return_number[:] = 1 59 | >>> np.alltrue(packed_point_record['return_number'] == 1) 60 | True 61 | """ 62 | 63 | def __init__(self, data: np.ndarray, point_format: PointFormat): 64 | self.array = data 65 | self.point_format = point_format 66 | self.sub_fields_dict = dims.get_sub_fields_dict(point_format.id) 67 | 68 | @property 69 | def point_size(self): 70 | """Returns the point size in bytes taken by each points of the record 71 | 72 | Returns 73 | ------- 74 | int 75 | The point size in byte 76 | 77 | """ 78 | return self.array.dtype.itemsize 79 | 80 | @classmethod 81 | def zeros(cls, point_format, point_count): 82 | """Creates a new point record with all dimensions initialized to zero 83 | 84 | Parameters 85 | ---------- 86 | point_format: PointFormat 87 | The point format id the point record should have 88 | point_count : int 89 | The number of point the point record should have 90 | 91 | Returns 92 | ------- 93 | PackedPointRecord 94 | 95 | """ 96 | data = np.zeros(point_count, point_format.dtype()) 97 | return cls(data, point_format) 98 | 99 | @classmethod 100 | def empty(cls, point_format): 101 | """Creates an empty point record. 102 | 103 | Parameters 104 | ---------- 105 | point_format: pylas.PointFormat 106 | The point format id the point record should have 107 | 108 | Returns 109 | ------- 110 | PackedPointRecord 111 | 112 | """ 113 | return cls.zeros(point_format, point_count=0) 114 | 115 | @classmethod 116 | def from_point_record( 117 | cls, other_point_record: "PackedPointRecord", new_point_format: PointFormat 118 | ) -> "PackedPointRecord": 119 | """Construct a new PackedPointRecord from an existing one with the ability to change 120 | to point format while doing so 121 | """ 122 | array = np.zeros_like(other_point_record.array, dtype=new_point_format.dtype()) 123 | new_record = cls(array, new_point_format) 124 | new_record.copy_fields_from(other_point_record) 125 | return new_record 126 | 127 | @classmethod 128 | def from_buffer(cls, buffer, point_format, count, offset=0): 129 | points_dtype = point_format.dtype() 130 | data = np.frombuffer(buffer, dtype=points_dtype, offset=offset, count=count) 131 | 132 | return cls(data, point_format) 133 | 134 | def copy_fields_from(self, other_record: "PackedPointRecord") -> None: 135 | """Tries to copy the values of the current dimensions from other_record""" 136 | for dim_name in self.point_format.dimension_names: 137 | try: 138 | self[dim_name] = np.array(other_record[dim_name]) 139 | except ValueError: 140 | pass 141 | 142 | def memoryview(self) -> memoryview: 143 | return memoryview(self.array) 144 | 145 | def resize(self, new_size: int) -> None: 146 | size_diff = new_size - len(self.array) 147 | if size_diff > 0: 148 | self.array = np.append( 149 | self.array, np.zeros(size_diff, dtype=self.array.dtype) 150 | ) 151 | elif size_diff < 0: 152 | self.array = self._array[:new_size].copy() 153 | 154 | def _append_zeros_if_too_small(self, value): 155 | """Appends zeros to the points stored if the value we are trying to 156 | fit is bigger 157 | """ 158 | size_diff = len(value) - len(self.array) 159 | if size_diff > 0: 160 | self.resize(size_diff) 161 | 162 | def __eq__(self, other): 163 | return self.point_format == other.point_format and np.all( 164 | self.array == other.array 165 | ) 166 | 167 | def __len__(self): 168 | return self.array.shape[0] 169 | 170 | def __getitem__(self, item): 171 | """Gives access to the underlying numpy array 172 | Unpack the dimension if item is the name a sub-field 173 | """ 174 | if isinstance(item, (int, slice, np.ndarray)): 175 | return PackedPointRecord(self.array[item], self.point_format) 176 | 177 | # 1) Is it a sub field ? 178 | try: 179 | composed_dim, sub_field = self.sub_fields_dict[item] 180 | return dims.SubFieldView(self.array[composed_dim], sub_field.mask) 181 | except KeyError: 182 | pass 183 | 184 | # 2) Is it a Scaled Extra Byte Dimension ? 185 | try: 186 | dim_info = self.point_format.dimension_by_name(item) 187 | if dim_info.is_standard is False: 188 | if dim_info.scales is not None or dim_info.offsets is not None: 189 | scale = ( 190 | np.ones(dim_info.num_elements, np.float64) 191 | if dim_info.scales is None 192 | else dim_info.scales[: dim_info.num_elements] 193 | ) 194 | offset = ( 195 | np.zeros(dim_info.num_elements, np.float64) 196 | if dim_info.offsets is None 197 | else dim_info.offsets[: dim_info.num_elements] 198 | ) 199 | return ScaledArrayView(self.array[item], scale, offset) 200 | except ValueError: 201 | pass 202 | 203 | return self.array[item] 204 | 205 | def __setitem__(self, key, value): 206 | """Sets elements in the array""" 207 | self._append_zeros_if_too_small(value) 208 | if isinstance(key, str): 209 | self[key][:] = value 210 | else: 211 | self.array[key] = value 212 | 213 | def __getattr__(self, item): 214 | try: 215 | return self[item] 216 | except ValueError: 217 | raise AttributeError("{} is not a valid dimension".format(item)) from None 218 | 219 | def __repr__(self): 220 | return "<{}(fmt: {}, len: {}, point size: {})>".format( 221 | self.__class__.__name__, 222 | self.point_format, 223 | len(self), 224 | self.point_format.size, 225 | ) 226 | 227 | 228 | def apply_new_scaling(record, scales: np.ndarray, offsets: np.ndarray) -> None: 229 | record["X"] = unscale_dimension(np.asarray(record.x), scales[0], offsets[0]) 230 | record["Y"] = unscale_dimension(np.asarray(record.y), scales[1], offsets[1]) 231 | record["Z"] = unscale_dimension(np.asarray(record.x), scales[2], offsets[2]) 232 | 233 | 234 | class ScaleAwarePointRecord(PackedPointRecord): 235 | def __init__(self, array, point_format, scales, offsets): 236 | super().__init__(array, point_format) 237 | self.scales = scales 238 | self.offsets = offsets 239 | 240 | def change_scaling(self, scales=None, offsets=None) -> None: 241 | if scales is not None: 242 | self.scales = scales 243 | if offsets is not None: 244 | self.offsets = offsets 245 | 246 | apply_new_scaling(self, scales, offsets) 247 | 248 | self.scales = scales 249 | self.offsets = offsets 250 | 251 | def __getitem__(self, item): 252 | if isinstance(item, (slice, np.ndarray)): 253 | return ScaleAwarePointRecord( 254 | self.array[item], self.point_format, self.scales, self.offsets 255 | ) 256 | 257 | if item == "x": 258 | return ScaledArrayView(self.array["X"], self.scales[0], self.offsets[0]) 259 | elif item == "y": 260 | return ScaledArrayView(self.array["Y"], self.scales[1], self.offsets[1]) 261 | elif item == "z": 262 | return ScaledArrayView(self.array["Z"], self.scales[2], self.offsets[2]) 263 | else: 264 | return super().__getitem__(item) 265 | -------------------------------------------------------------------------------- /pylas/typehints.py: -------------------------------------------------------------------------------- 1 | import pathlib 2 | from typing import Union 3 | 4 | from .compression import LazBackend 5 | 6 | LazBackend = LazBackend 7 | 8 | PathLike = Union[str, pathlib.Path] 9 | -------------------------------------------------------------------------------- /pylas/utils.py: -------------------------------------------------------------------------------- 1 | def encode_to_len(string: str, wanted_len: int, codec="ascii") -> bytes: 2 | encoded_str = string.encode(codec) 3 | 4 | missing_bytes = wanted_len - len(encoded_str) 5 | if missing_bytes < 0: 6 | raise ValueError(f"encoded str does not fit in {wanted_len} bytes") 7 | return encoded_str + (b"\0" * missing_bytes) 8 | 9 | 10 | def encode_to_null_terminated(string: str, codec: str = "utf-8") -> bytes: 11 | b = string.encode(codec) 12 | if b[-1] != 0: 13 | b += b"\0" 14 | return b 15 | -------------------------------------------------------------------------------- /pylas/vlrs/__init__.py: -------------------------------------------------------------------------------- 1 | from . import geotiff 2 | from .known import BaseKnownVLR 3 | from .vlr import VLR 4 | -------------------------------------------------------------------------------- /pylas/vlrs/geotiff.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | from typing import List 3 | 4 | from pylas.vlrs import vlrlist 5 | from pylas.vlrs.known import GeoAsciiParamsVlr, GeoDoubleParamsVlr, GeoKeyDirectoryVlr 6 | 7 | GeoTiffKey = namedtuple("GeoTiffKey", ("id", "value")) 8 | 9 | import logging 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | GTModelTypeGeoKey = 1024 14 | GTRasterTypeGeoKey = 1025 15 | GTCitationGeoKey = 1026 16 | GeogCitationGeoKey = 2049 17 | GeogAngularUnitsGeoKey = 2054 18 | ProjectedCSTypeGeoKey = 3072 19 | ProjLinearUnitsGeoKey = 3076 20 | 21 | 22 | def parse_geo_tiff_keys_from_vlrs(vlr_list: vlrlist.VLRList) -> List[GeoTiffKey]: 23 | """Gets the 3 GeoTiff vlrs from the vlr_list and parse them into 24 | a nicer structure 25 | 26 | Parameters 27 | ---------- 28 | vlr_list: pylas.vrls.vlrslist.VLRList list of vlrs from a las file 29 | 30 | Raises 31 | ------ 32 | IndexError if any of the needed GeoTiffVLR is not found in the list 33 | 34 | Returns 35 | ------- 36 | List of GeoTiff keys parsed from the VLRs 37 | 38 | """ 39 | geo_key_dir = vlr_list.get_by_id( 40 | GeoKeyDirectoryVlr.official_user_id(), GeoKeyDirectoryVlr.official_record_ids() 41 | )[0] 42 | geo_doubles = vlr_list.get_by_id( 43 | GeoDoubleParamsVlr.official_user_id(), GeoDoubleParamsVlr.official_record_ids() 44 | )[0] 45 | geo_ascii = vlr_list.get_by_id( 46 | GeoAsciiParamsVlr.official_user_id(), GeoAsciiParamsVlr.official_record_ids() 47 | )[0] 48 | return parse_geo_tiff(geo_key_dir, geo_doubles, geo_ascii) 49 | 50 | 51 | def parse_geo_tiff( 52 | key_dir_vlr: GeoKeyDirectoryVlr, 53 | double_vlr: GeoDoubleParamsVlr, 54 | ascii_vlr: GeoAsciiParamsVlr, 55 | ) -> List[GeoTiffKey]: 56 | """Parses the GeoTiff VLRs information into nicer structs""" 57 | geotiff_keys = [] 58 | 59 | for k in key_dir_vlr.geo_keys: 60 | if k.tiff_tag_location == 0: 61 | value = k.value_offset 62 | elif k.tiff_tag_location == 34736: 63 | value = double_vlr.doubles[k.value_offset] 64 | elif k.tiff_tag_location == 34737: 65 | try: 66 | value = ascii_vlr.strings[k.value_offset][k.count :] 67 | except IndexError: 68 | # Maybe I'm just misunderstanding the specification :thinking: 69 | value = ascii_vlr.strings[0][k.value_offset : k.value_offset + k.count] 70 | else: 71 | logger.warning( 72 | "GeoTiffKey with unknown tiff tag location ({})".format( 73 | k.tiff_tag_location 74 | ) 75 | ) 76 | continue 77 | 78 | geotiff_keys.append(GeoTiffKey(k.id, value)) 79 | return geotiff_keys 80 | -------------------------------------------------------------------------------- /pylas/vlrs/vlr.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import BinaryIO 3 | 4 | 5 | class IVLR(ABC): 6 | @property 7 | @abstractmethod 8 | def user_id(self) -> str: 9 | ... 10 | 11 | @property 12 | @abstractmethod 13 | def record_id(self) -> int: 14 | ... 15 | 16 | @property 17 | @abstractmethod 18 | def description(self) -> str: 19 | ... 20 | 21 | @abstractmethod 22 | def record_data_bytes(self) -> bytes: 23 | ... 24 | 25 | 26 | class BaseVLR(IVLR, ABC): 27 | def __init__(self, user_id, record_id, description=""): 28 | self._user_id = user_id 29 | self._record_id = record_id 30 | self._description = description 31 | 32 | @property 33 | def user_id(self) -> str: 34 | return self._user_id 35 | 36 | @property 37 | def record_id(self) -> int: 38 | return self._record_id 39 | 40 | @property 41 | def description(self) -> str: 42 | return self._description 43 | 44 | 45 | class VLR(BaseVLR): 46 | def __init__(self, user_id, record_id, description="", record_data=b""): 47 | super().__init__(user_id, record_id, description=description) 48 | #: The record_data as bytes 49 | self.record_data = record_data 50 | 51 | def record_data_bytes(self) -> bytes: 52 | return self.record_data 53 | 54 | @classmethod 55 | def read_from(cls, stream: BinaryIO): 56 | stream.read(2) 57 | user_id = stream.read(16).decode().rstrip("\0") 58 | record_id = int.from_bytes(stream.read(2), byteorder="little", signed=False) 59 | description = stream.read(16).decode().rstrip("\0") 60 | record_length = int.from_bytes(stream.read(2), byteorder="little", signed=False) 61 | record_data = stream.read(record_length) 62 | vlr = cls(user_id, record_id, description, record_data) 63 | return vlr 64 | 65 | def __eq__(self, other): 66 | return ( 67 | self.record_id == other.record_id 68 | and self.user_id == other.user_id 69 | and self.description == other.description 70 | and self.record_data == other.record_data 71 | ) 72 | 73 | def __repr__(self): 74 | return "<{}(user_id: '{}', record_id: '{}', data len: {})>".format( 75 | self.__class__.__name__, self.user_id, self.record_id, len(self.record_data) 76 | ) 77 | -------------------------------------------------------------------------------- /pylas/vlrs/vlrlist.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import BinaryIO, List 3 | 4 | import numpy as np 5 | 6 | from .known import vlr_factory, IKnownVLR 7 | from .vlr import VLR 8 | from ..utils import encode_to_len 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | RESERVED_LEN = 2 13 | USER_ID_LEN = 16 14 | DESCRIPTION_LEN = 32 15 | 16 | 17 | class VLRList(list): 18 | """Class responsible for managing the vlrs""" 19 | 20 | def __init__(self, *args, **kwargs): 21 | super().__init__(*args, **kwargs) 22 | 23 | def index(self, value, start: int = 0, stop: int = None) -> int: 24 | if stop is None: 25 | stop = len(self) 26 | if isinstance(value, str): 27 | for i, vlr in enumerate(self[start:stop]): 28 | if vlr.__class__.__name__ == value: 29 | return i + start 30 | else: 31 | return super().index(value, start, stop) 32 | 33 | def get_by_id(self, user_id="", record_ids=(None,)): 34 | """Function to get vlrs by user_id and/or record_ids. 35 | Always returns a list even if only one vlr matches the user_id and record_id 36 | 37 | >>> import pylas 38 | >>> from pylas.vlrs.known import ExtraBytesVlr, WktCoordinateSystemVlr 39 | >>> las = pylas.read("pylastests/extrabytes.las") 40 | >>> las.vlrs 41 | [] 42 | >>> las.vlrs.get(WktCoordinateSystemVlr.official_user_id()) 43 | [] 44 | >>> las.vlrs.get(WktCoordinateSystemVlr.official_user_id())[0] 45 | Traceback (most recent call last): 46 | IndexError: list index out of range 47 | >>> las.vlrs.get_by_id(ExtraBytesVlr.official_user_id()) 48 | [] 49 | >>> las.vlrs.get_by_id(ExtraBytesVlr.official_user_id())[0] 50 | 51 | 52 | Parameters 53 | ---------- 54 | user_id: str, optional 55 | the user id 56 | record_ids: iterable of int, optional 57 | THe record ids of the vlr(s) you wish to get 58 | 59 | Returns 60 | ------- 61 | :py:class:`list` 62 | a list of vlrs matching the user_id and records_ids 63 | 64 | """ 65 | if user_id != "" and record_ids != (None,): 66 | return [ 67 | vlr 68 | for vlr in self 69 | if vlr.user_id == user_id and vlr.record_id in record_ids 70 | ] 71 | else: 72 | return [ 73 | vlr 74 | for vlr in self 75 | if vlr.user_id == user_id or vlr.record_id in record_ids 76 | ] 77 | 78 | def get(self, vlr_type: str) -> List[IKnownVLR]: 79 | """Returns the list of vlrs of the requested type 80 | Always returns a list even if there is only one VLR of type vlr_type. 81 | 82 | >>> import pylas 83 | >>> las = pylas.read("pylastests/extrabytes.las") 84 | >>> las.vlrs 85 | [] 86 | >>> las.vlrs.get("WktCoordinateSystemVlr") 87 | [] 88 | >>> las.vlrs.get("WktCoordinateSystemVlr")[0] 89 | Traceback (most recent call last): 90 | IndexError: list index out of range 91 | >>> las.vlrs.get('ExtraBytesVlr') 92 | [] 93 | >>> las.vlrs.get('ExtraBytesVlr')[0] 94 | 95 | 96 | 97 | Parameters 98 | ---------- 99 | vlr_type: str 100 | the class name of the vlr 101 | 102 | Returns 103 | ------- 104 | :py:class:`list` 105 | a List of vlrs matching the user_id and records_ids 106 | 107 | """ 108 | return [v for v in self if v.__class__.__name__ == vlr_type] 109 | 110 | def extract(self, vlr_type: str) -> List[IKnownVLR]: 111 | """Returns the list of vlrs of the requested type 112 | The difference with get is that the returned vlrs will be removed from the list 113 | 114 | Parameters 115 | ---------- 116 | vlr_type: str 117 | the class name of the vlr 118 | 119 | Returns 120 | ------- 121 | list 122 | a List of vlrs matching the user_id and records_ids 123 | 124 | """ 125 | kept_vlrs, extracted_vlrs = [], [] 126 | for vlr in self: 127 | if vlr.__class__.__name__ == vlr_type: 128 | extracted_vlrs.append(vlr) 129 | else: 130 | kept_vlrs.append(vlr) 131 | self.clear() 132 | self.extend(kept_vlrs) 133 | return extracted_vlrs 134 | 135 | def __repr__(self): 136 | return "[{}]".format(", ".join(repr(vlr) for vlr in self)) 137 | 138 | @classmethod 139 | def read_from( 140 | cls, data_stream: BinaryIO, num_to_read: int, extended: bool = False 141 | ) -> "VLRList": 142 | """Reads vlrs and parse them if possible from the stream 143 | 144 | Parameters 145 | ---------- 146 | data_stream : io.BytesIO 147 | stream to read from 148 | num_to_read : int 149 | number of vlrs to be read 150 | 151 | extended : bool 152 | whether the vlrs are regular vlr or extended vlr 153 | 154 | Returns 155 | ------- 156 | pylas.vlrs.vlrlist.VLRList 157 | List of vlrs 158 | 159 | """ 160 | vlrlist = cls() 161 | for _ in range(num_to_read): 162 | data_stream.read(RESERVED_LEN) 163 | user_id = data_stream.read(USER_ID_LEN).decode().rstrip("\0") 164 | record_id = int.from_bytes( 165 | data_stream.read(2), byteorder="little", signed=False 166 | ) 167 | if extended: 168 | record_data_len = int.from_bytes( 169 | data_stream.read(8), byteorder="little", signed=False 170 | ) 171 | else: 172 | record_data_len = int.from_bytes( 173 | data_stream.read(2), byteorder="little", signed=False 174 | ) 175 | description = data_stream.read(DESCRIPTION_LEN).decode().rstrip("\0") 176 | record_data_bytes = data_stream.read(record_data_len) 177 | 178 | vlr = VLR(user_id, record_id, description, record_data_bytes) 179 | 180 | vlrlist.append(vlr_factory(vlr)) 181 | 182 | return vlrlist 183 | 184 | def write_to(self, stream: BinaryIO, as_extended: bool = False) -> int: 185 | bytes_written = 0 186 | for vlr in self: 187 | record_data = vlr.record_data_bytes() 188 | 189 | stream.write(b"\0\0") 190 | stream.write(encode_to_len(vlr.user_id, USER_ID_LEN)) 191 | stream.write(vlr.record_id.to_bytes(2, byteorder="little", signed=False)) 192 | if as_extended: 193 | if len(record_data) > np.iinfo("uint16").max: 194 | raise ValueError("vlr record_data is too long") 195 | stream.write( 196 | len(record_data).to_bytes(8, byteorder="little", signed=False) 197 | ) 198 | else: 199 | stream.write( 200 | len(record_data).to_bytes(2, byteorder="little", signed=False) 201 | ) 202 | stream.write(encode_to_len(vlr.description, DESCRIPTION_LEN)) 203 | stream.write(record_data) 204 | 205 | bytes_written += 54 if not as_extended else 60 206 | bytes_written += len(record_data) 207 | 208 | return bytes_written 209 | -------------------------------------------------------------------------------- /pylastests/1_4_w_evlr.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/1_4_w_evlr.las -------------------------------------------------------------------------------- /pylastests/1_4_w_evlr.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/1_4_w_evlr.laz -------------------------------------------------------------------------------- /pylastests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/__init__.py -------------------------------------------------------------------------------- /pylastests/conftest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import pytest 4 | import pylas 5 | 6 | SIMPLE_LAS_FILE_PATH = Path(__file__).parent / "simple.las" 7 | VEGETATION1_3_LAS_FILE_PATH = Path(__file__).parent / "vegetation_1_3.las" 8 | TEST1_4_LAS_FILE_PATH = Path(__file__).parent / "test1_4.las" 9 | EXTRA_BYTES_LAS_FILE_PATH = Path(__file__).parent / "extrabytes.las" 10 | 11 | SIMPLE_LAZ_FILE_PATH = Path(__file__).parent / "simple.laz" 12 | EXTRA_BYTES_LAZ_FILE_PATH = Path(__file__).parent / "extra.laz" 13 | PLANE_LAZ_FILE_PATH = Path(__file__).parent / "plane.laz" 14 | 15 | ALL_LAS_FILE_PATH = [SIMPLE_LAS_FILE_PATH, VEGETATION1_3_LAS_FILE_PATH, TEST1_4_LAS_FILE_PATH, 16 | EXTRA_BYTES_LAS_FILE_PATH] 17 | 18 | ALL_LAZ_FILE_PATH = [ 19 | SIMPLE_LAZ_FILE_PATH, EXTRA_BYTES_LAZ_FILE_PATH, PLANE_LAZ_FILE_PATH 20 | ] 21 | 22 | ALL_LAZ_BACKEND = pylas.LazBackend.detect_available() 23 | 24 | 25 | SUPPORTED_SINGULAR_EXTRA_BYTES_TYPE = ['u1', 'u2', 'u4', 'u8', 'i1', 'i2', 'i4', 'i8', 'f4', 'f8', 'uint8', 'uint16', 26 | 'uint32', 27 | 'uint64', 'int8', 28 | 'int16', 'int32', 'int64', 'float32', 'float64'] 29 | 30 | SUPPORTED_ARRAY_2_EXTRA_BYTES_TYPE = [f'2{base_type}' for base_type in SUPPORTED_SINGULAR_EXTRA_BYTES_TYPE] 31 | 32 | SUPPORTED_ARRAY_3_EXTRA_BYTES_TYPE = [f'3{base_type}' for base_type in SUPPORTED_SINGULAR_EXTRA_BYTES_TYPE] 33 | 34 | SUPPORTED_EXTRA_BYTES_TYPE = SUPPORTED_SINGULAR_EXTRA_BYTES_TYPE + SUPPORTED_ARRAY_2_EXTRA_BYTES_TYPE + SUPPORTED_ARRAY_3_EXTRA_BYTES_TYPE 35 | 36 | 37 | @pytest.fixture() 38 | def simple_las_path(): 39 | return SIMPLE_LAS_FILE_PATH 40 | 41 | 42 | @pytest.fixture(params=SUPPORTED_EXTRA_BYTES_TYPE) 43 | def extra_bytes_params(request): 44 | return pylas.ExtraBytesParams( 45 | name="just_a_name", 46 | type=request.param, 47 | description="pylas test ExtraBytes" 48 | ) 49 | 50 | 51 | @pytest.fixture(params=[EXTRA_BYTES_LAS_FILE_PATH, EXTRA_BYTES_LAZ_FILE_PATH], ids=repr) 52 | def las_file_path_with_extra_bytes(request): 53 | if request.param.suffix == '.laz' and len(pylas.LazBackend.detect_available()) == 0: 54 | return pytest.skip("No Laz Backend") 55 | else: 56 | return request.param 57 | 58 | 59 | @pytest.fixture(params=ALL_LAS_FILE_PATH, ids=repr) 60 | def las_file_path(request): 61 | return request.param 62 | 63 | 64 | @pytest.fixture(params=ALL_LAZ_FILE_PATH, ids=repr) 65 | def laz_file_path(request): 66 | if len(pylas.LazBackend.detect_available()) == 0: 67 | return pytest.skip('No Laz Backend') 68 | return request.param 69 | 70 | 71 | @pytest.fixture(params=ALL_LAS_FILE_PATH + ALL_LAZ_FILE_PATH, ids=repr) 72 | def file_path(request): 73 | if len(pylas.LazBackend.detect_available()) == 0: 74 | return pytest.skip('No Laz Backend') 75 | return request.param 76 | 77 | 78 | @pytest.fixture(params=ALL_LAZ_BACKEND if ALL_LAZ_BACKEND else [pytest.mark.skip("No Laz Backend installed")]) 79 | def laz_backend(request): 80 | return request.param 81 | 82 | 83 | def all_las_file_path(): 84 | return all_las_file_path() 85 | 86 | 87 | @pytest.fixture() 88 | def mmapped_file_path(tmp_path): 89 | import shutil 90 | copied_file = shutil.copy(SIMPLE_LAS_FILE_PATH, tmp_path) 91 | yield copied_file 92 | 93 | 94 | 95 | -------------------------------------------------------------------------------- /pylastests/extra.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/extra.laz -------------------------------------------------------------------------------- /pylastests/extrabytes.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/extrabytes.las -------------------------------------------------------------------------------- /pylastests/plane.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/plane.laz -------------------------------------------------------------------------------- /pylastests/simple.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/simple.las -------------------------------------------------------------------------------- /pylastests/simple.laz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/simple.laz -------------------------------------------------------------------------------- /pylastests/test1_4.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/test1_4.las -------------------------------------------------------------------------------- /pylastests/test_append_mode.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | 4 | import pytest 5 | 6 | import pylas 7 | from pylastests.test_common import simple_laz 8 | 9 | 10 | def test_append(file_path): 11 | """ 12 | Test appending 13 | """ 14 | if file_path.suffix == '.laz' and not pylas.LazBackend.Lazrs.is_available(): 15 | pytest.skip("Only Lazrs backed supports appending") 16 | append_self_and_check(file_path) 17 | 18 | 19 | def test_raises_for_laszip_backend(): 20 | with pytest.raises(pylas.PylasError): 21 | with pylas.open(simple_laz, mode="a", laz_backend=pylas.LazBackend.Laszip): 22 | ... 23 | 24 | 25 | def test_append_las_with_evlrs(): 26 | las = append_self_and_check(os.path.dirname(__file__) + "/1_4_w_evlr.las") 27 | 28 | expected_evlr = pylas.VLR(user_id="pylastest", record_id=42, description="just a test evlr") 29 | expected_evlr.record_data = b"Test 1 2 ... 1 2" 30 | 31 | assert len(las.evlrs) == 1 32 | evlr = las.evlrs[0] 33 | assert evlr.description == expected_evlr.description 34 | assert evlr.record_id == expected_evlr.record_id 35 | assert evlr.user_id == expected_evlr.user_id 36 | assert evlr.record_data == expected_evlr.record_data 37 | 38 | 39 | @pytest.mark.skipif(not pylas.LazBackend.Lazrs.is_available(), reason="Lazrs is not installed") 40 | def test_append_laz_with_evlrs(): 41 | las = append_self_and_check(os.path.dirname(__file__) + "/1_4_w_evlr.laz") 42 | 43 | expected_evlr = pylas.VLR(user_id="pylastest", record_id=42, description="just a test evlr") 44 | expected_evlr.record_data = b"Test 1 2 ... 1 2" 45 | 46 | assert len(las.evlrs) == 1 47 | evlr = las.evlrs[0] 48 | assert evlr.description == expected_evlr.description 49 | assert evlr.record_id == expected_evlr.record_id 50 | assert evlr.user_id == expected_evlr.user_id 51 | assert evlr.record_data == expected_evlr.record_data 52 | 53 | 54 | def append_self_and_check(las_path_fixture): 55 | with open(las_path_fixture, mode="rb") as f: 56 | file = io.BytesIO(f.read()) 57 | las = pylas.read(las_path_fixture) 58 | with pylas.open(file, mode='a', closefd=False) as laz_file: 59 | laz_file.append_points(las.points) 60 | file.seek(0, io.SEEK_SET) 61 | rlas = pylas.read(file) 62 | assert rlas.header.point_count == 2 * las.header.point_count 63 | assert rlas.points[:rlas.header.point_count // 2] == las.points 64 | assert rlas.points[rlas.header.point_count // 2:] == las.points 65 | 66 | return rlas 67 | -------------------------------------------------------------------------------- /pylastests/test_chunk_read_write.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests related to the 'chunked' reading and writing 3 | """ 4 | import io 5 | import math 6 | 7 | import numpy as np 8 | import pytest 9 | 10 | import pylas 11 | 12 | 13 | def test_chunked_las_reading_gives_expected_points(las_file_path): 14 | """ 15 | Test chunked LAS reading 16 | """ 17 | with pylas.open(las_file_path) as las_reader: 18 | with pylas.open(las_file_path) as reader: 19 | las = las_reader.read() 20 | check_chunked_reading_is_gives_expected_points( 21 | las, reader, iter_size=50) 22 | 23 | 24 | def test_chunked_laz_reading_gives_expected_points(laz_file_path, laz_backend): 25 | """ 26 | Test LAZ reading in chunked mode with different backends 27 | """ 28 | with pylas.open(laz_file_path) as las_reader: 29 | with pylas.open(laz_file_path, laz_backend=laz_backend) as laz_reader: 30 | expected_las = las_reader.read() 31 | check_chunked_reading_is_gives_expected_points( 32 | expected_las, laz_reader, iter_size=50 33 | ) 34 | 35 | 36 | @pytest.mark.parametrize("backend", pylas.LazBackend.detect_available() + (None,)) 37 | def test_chunked_writing_gives_expected_points(file_path, backend): 38 | """ 39 | Write in chunked mode then test that the points are correct 40 | """ 41 | original_las = pylas.read(file_path) 42 | iter_size = 51 43 | 44 | do_compress = True if backend is not None else False 45 | 46 | with io.BytesIO() as tmp_output: 47 | with pylas.open( 48 | tmp_output, 49 | mode="w", 50 | closefd=False, 51 | header=original_las.header, 52 | do_compress=do_compress, 53 | laz_backend=backend 54 | ) as las: 55 | for i in range(int(math.ceil(len(original_las.points) / iter_size))): 56 | original_points = original_las.points[ 57 | i * iter_size: (i + 1) * iter_size 58 | ] 59 | las.write_points(original_points) 60 | 61 | tmp_output.seek(0) 62 | with pylas.open(tmp_output, closefd=False) as reader: 63 | check_chunked_reading_is_gives_expected_points( 64 | original_las, reader, iter_size 65 | ) 66 | 67 | 68 | def check_chunked_reading_is_gives_expected_points(groundtruth_las, reader, iter_size): 69 | """Checks that the points read by the reader are the same as groundtruth points.""" 70 | assert groundtruth_las.point_format == reader.header.point_format 71 | for i, points in enumerate(reader.chunk_iterator(iter_size)): 72 | expected_points = groundtruth_las.points[i * iter_size: (i + 1) * iter_size] 73 | for dim_name in points.array.dtype.names: 74 | assert np.allclose(expected_points[dim_name], points[dim_name]), f"{dim_name} not equal" 75 | -------------------------------------------------------------------------------- /pylastests/test_common.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | import pylas 7 | from pylas.lib import write_then_read_again 8 | 9 | simple_las = Path(__file__).parent / "simple.las" 10 | simple_laz = Path(__file__).parent / "simple.laz" 11 | vegetation1_3_las = Path(__file__).parent / "vegetation_1_3.las" 12 | test1_4_las = Path(__file__).parent / "test1_4.las" 13 | extra_bytes_las = Path(__file__).parent / "extrabytes.las" 14 | extra_bytes_laz = Path(__file__).parent / "extra.laz" 15 | plane_laz = Path(__file__).parent / "plane.laz" 16 | 17 | if not pylas.LazBackend.detect_available(): 18 | do_compression = [False] 19 | all_file_paths = [simple_las, vegetation1_3_las, test1_4_las, extra_bytes_las] 20 | else: 21 | do_compression = [False, True] 22 | all_file_paths = [ 23 | simple_las, 24 | simple_laz, 25 | vegetation1_3_las, 26 | test1_4_las, 27 | plane_laz, 28 | extra_bytes_laz, 29 | extra_bytes_las, 30 | ] 31 | 32 | 33 | @pytest.fixture(params=all_file_paths) 34 | def las(request): 35 | return pylas.read(request.param) 36 | 37 | 38 | @pytest.fixture(params=[simple_las, vegetation1_3_las]) 39 | def all_las_but_1_4(request): 40 | return pylas.read(request.param) 41 | 42 | 43 | @pytest.fixture(params=[simple_las, vegetation1_3_las, test1_4_las, extra_bytes_las]) 44 | def las_path_fixture(request): 45 | return request.param 46 | 47 | 48 | @pytest.fixture(params=[simple_laz, extra_bytes_laz, plane_laz]) 49 | def all_laz_path(request): 50 | return request.param 51 | 52 | 53 | def dim_does_not_exists(las, dim_name): 54 | try: 55 | _ = getattr(las, dim_name) 56 | except AttributeError: 57 | return True 58 | return False 59 | 60 | 61 | def dim_does_exists(las, dim_name): 62 | try: 63 | _ = getattr(las, dim_name) 64 | except AttributeError: 65 | return False 66 | return True 67 | 68 | 69 | def test_change_format(las): 70 | in_version = las.header.version 71 | 72 | las = pylas.convert(las, point_format_id=2) 73 | las = write_then_read_again(las) 74 | assert las.points.point_format.id == 2 75 | assert las.header.point_format.id == 2 76 | assert las.header.version == in_version 77 | assert dim_does_not_exists(las, "gps_time") 78 | 79 | las = pylas.convert(las, point_format_id=1) 80 | las = write_then_read_again(las) 81 | assert las.points.point_format.id == 1 82 | assert las.header.point_format.id == 1 83 | assert las.header.version == in_version 84 | assert dim_does_not_exists(las, "red") 85 | assert dim_does_not_exists(las, "green") 86 | assert dim_does_not_exists(las, "blue") 87 | 88 | las = pylas.convert(las, point_format_id=0) 89 | las = write_then_read_again(las) 90 | assert las.points.point_format.id == 0 91 | assert las.header.point_format.id == 0 92 | assert las.header.version == in_version 93 | assert dim_does_not_exists(las, "red") 94 | assert dim_does_not_exists(las, "green") 95 | assert dim_does_not_exists(las, "blue") 96 | assert dim_does_not_exists(las, "gps_time") 97 | 98 | las = pylas.convert(las, point_format_id=8) 99 | las = write_then_read_again(las) 100 | assert str(las.header.version) == "1.4" 101 | assert las.points.point_format.id == 8 102 | assert las.header.point_format.id == 8 103 | assert dim_does_exists(las, "red") 104 | assert dim_does_exists(las, "green") 105 | assert dim_does_exists(las, "blue") 106 | assert dim_does_exists(las, "nir") 107 | 108 | las = pylas.convert(las, point_format_id=7) 109 | las = write_then_read_again(las) 110 | assert str(las.header.version) == "1.4" 111 | assert las.points.point_format.id == 7 112 | assert las.header.point_format.id == 7 113 | assert dim_does_exists(las, "red") 114 | assert dim_does_exists(las, "green") 115 | assert dim_does_exists(las, "blue") 116 | assert dim_does_not_exists(las, "nir") 117 | 118 | las = pylas.convert(las, point_format_id=6) 119 | las = write_then_read_again(las) 120 | assert str(las.header.version) == "1.4" 121 | assert las.points.point_format.id == 6 122 | assert las.header.point_format.id == 6 123 | assert dim_does_not_exists(las, "red") 124 | assert dim_does_not_exists(las, "green") 125 | assert dim_does_not_exists(las, "blue") 126 | assert dim_does_not_exists(las, "nir") 127 | 128 | 129 | def test_rw_all_set_one(las): 130 | for dim_name in las.point_format.dimension_names: 131 | las[dim_name][:] = 1 132 | 133 | for dim_name in las.point_format.dimension_names: 134 | assert np.alltrue(las[dim_name] == 1), "{} not equal".format(dim_name) 135 | 136 | las2 = write_then_read_again(las) 137 | 138 | for dim_name in las.point_format.dimension_names: 139 | assert np.alltrue(las[dim_name] == las2[dim_name]), "{} not equal".format( 140 | dim_name 141 | ) 142 | 143 | 144 | def test_coords_do_not_break(las): 145 | xs, ys, zs = las.x, las.y, las.z 146 | 147 | las.x = xs 148 | las.y = ys 149 | las.z = zs 150 | 151 | assert np.allclose(xs, las.x) 152 | assert np.allclose(ys, las.y) 153 | assert np.allclose(zs, las.z) 154 | 155 | 156 | def test_coords_when_setting_offsets_and_scales(las): 157 | new_las = pylas.create() 158 | 159 | new_las.header.offsets = las.header.offsets 160 | new_las.header.scales = las.header.scales 161 | 162 | new_las.x = las.x 163 | new_las.y = las.y 164 | new_las.z = las.z 165 | 166 | assert np.allclose(las.x, new_las.x) 167 | assert np.allclose(las.y, new_las.y) 168 | assert np.allclose(las.z, new_las.z) 169 | 170 | 171 | def test_coords_when_using_create_from_header(las): 172 | new_las = pylas.LasData(las.header) 173 | 174 | new_las.x = las.x 175 | new_las.y = las.y 176 | new_las.z = las.z 177 | 178 | assert np.allclose(las.x, new_las.x) 179 | assert np.allclose(las.y, new_las.y) 180 | assert np.allclose(las.z, new_las.z) 181 | 182 | 183 | def test_slicing(las): 184 | las.points = las.points[len(las.points) // 2:] 185 | 186 | 187 | @pytest.mark.parametrize("do_compress", do_compression) 188 | def test_can_write_then_re_read_files(las, do_compress): 189 | _las = write_then_read_again(las, do_compress=do_compress) 190 | 191 | 192 | def test_point_record_setitem_scaled_view(): 193 | las = pylas.read(simple_las) 194 | las.add_extra_dim( 195 | pylas.ExtraBytesParams( 196 | 'lol', 197 | 'uint64', 198 | scales=np.array([2.0]), 199 | offsets=np.array([0.0]) 200 | ) 201 | ) 202 | 203 | new_values = np.ones(len(las.points)) * 4 204 | las.lol = new_values 205 | 206 | assert np.allclose(las.lol, new_values) 207 | -------------------------------------------------------------------------------- /pylastests/test_constants.py: -------------------------------------------------------------------------------- 1 | import pylas 2 | from pylas import PointFormat 3 | 4 | 5 | def test_lost_dims(): 6 | assert set(pylas.point.format.lost_dimensions(3, 0)) == { 7 | "red", 8 | "green", 9 | "blue", 10 | "gps_time", 11 | } 12 | assert set(pylas.point.format.lost_dimensions(2, 0)) == {"red", "green", "blue"} 13 | assert set(pylas.point.format.lost_dimensions(1, 0)) == {"gps_time"} 14 | 15 | assert set(pylas.point.format.lost_dimensions(0, 0)) == set() 16 | assert set(pylas.point.format.lost_dimensions(0, 1)) == set() 17 | assert set(pylas.point.format.lost_dimensions(0, 2)) == set() 18 | assert set(pylas.point.format.lost_dimensions(0, 3)) == set() 19 | 20 | 21 | def test_has_waveform(): 22 | for i in (4, 5, 9, 10): 23 | assert PointFormat( 24 | i 25 | ).has_waveform_packet, "Point format {} should have waveform".format(i) 26 | 27 | for i in (0, 1, 2, 3, 6, 7, 8): 28 | assert not PointFormat( 29 | i 30 | ).has_waveform_packet, "Point format {} should not have waveform".format(i) 31 | 32 | 33 | def test_extra_bytes_struct_size(): 34 | assert pylas.vlrs.known.ExtraBytesStruct.size() == 192 35 | 36 | 37 | def test_waveform_packet_struct_size(): 38 | assert pylas.vlrs.known.WaveformPacketStruct.size() == 26 39 | -------------------------------------------------------------------------------- /pylastests/test_conversion.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import pylas 4 | import pytest 5 | 6 | from pylas.lib import write_then_read_again 7 | 8 | 9 | @pytest.mark.parametrize("target_point_format_id", [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) 10 | def test_point_format_conversion_copies_field_values(file_path, target_point_format_id): 11 | original = pylas.read(file_path) 12 | converted = pylas.convert(original, point_format_id=target_point_format_id) 13 | converted = write_then_read_again(converted) 14 | 15 | converted_dimension_names = set(converted.point_format.dimension_names) 16 | dimension_expected_to_be_kept = [ 17 | dim_name 18 | for dim_name in original.point_format.dimension_names 19 | if dim_name in converted_dimension_names 20 | ] 21 | 22 | for dim_name in dimension_expected_to_be_kept: 23 | assert np.allclose( 24 | converted[dim_name], original[dim_name] 25 | ), "{} not equal".format(dim_name) 26 | 27 | 28 | -------------------------------------------------------------------------------- /pylastests/test_creation.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | import pylas 7 | from pylas import PointFormat 8 | from pylastests.test_common import write_then_read_again, simple_las, test1_4_las 9 | 10 | 11 | @pytest.fixture() 12 | def file1_4(): 13 | return pylas.read(test1_4_las) 14 | 15 | 16 | @pytest.fixture() 17 | def file(): 18 | return pylas.read(simple_las) 19 | 20 | 21 | def test_xyz(): 22 | las = pylas.create() 23 | shape = (150,) 24 | las.X = np.zeros(shape, dtype=np.int32) 25 | las.Y = np.ones(shape, dtype=np.int32) 26 | las.Z = np.zeros(shape, dtype=np.int32) 27 | las.Z[:] = -152 28 | 29 | las = write_then_read_again(las) 30 | 31 | assert np.alltrue(las.X == 0) 32 | assert np.alltrue(las.Y == 1) 33 | assert np.alltrue(las.Z == -152) 34 | 35 | 36 | def test_wrong_version(): 37 | for i in range(6, 8): 38 | with pytest.raises(pylas.errors.PylasError): 39 | _ = pylas.create(point_format=i, file_version="1.2") 40 | 41 | 42 | def test_good_version_is_used(): 43 | for i in range(6, 8): 44 | las = pylas.create(point_format=i) 45 | assert las.header.version.major == 1 46 | assert las.header.version.minor == 4 47 | 48 | 49 | def test_create_fmt_0(): 50 | new = pylas.create(point_format=0) 51 | 52 | with pytest.raises(ValueError): 53 | new.red = np.zeros(len(new.points), np.uint16) 54 | 55 | with pytest.raises(ValueError): 56 | new.red = np.zeros(len(new.points), np.uint16) 57 | 58 | with pytest.raises(ValueError): 59 | new.red = np.zeros(len(new.points), np.uint16) 60 | 61 | with pytest.raises(ValueError): 62 | new.gps_time = np.zeros(len(new.points), np.float64) 63 | 64 | 65 | def test_create_fmt_1(): 66 | new = pylas.create(point_format=1) 67 | 68 | with pytest.raises(ValueError): 69 | new.red = np.zeros(len(new.points), np.uint16) 70 | 71 | with pytest.raises(ValueError): 72 | new.red = np.zeros(len(new.points), np.uint16) 73 | 74 | with pytest.raises(ValueError): 75 | new.red = np.zeros(len(new.points), np.uint16) 76 | 77 | gps_time = np.random.uniform(0, 25641, len(new.points)) 78 | new.gps_time = gps_time 79 | assert np.allclose(new.gps_time, gps_time) 80 | 81 | new = write_then_read_again(new) 82 | assert np.allclose(new.gps_time, gps_time) 83 | 84 | 85 | def test_create_fmt_2(file): 86 | new = pylas.create(point_format=2) 87 | 88 | with pytest.raises(ValueError): 89 | new.gps_time = file.gps_time 90 | 91 | new.red = file.red 92 | new.green = file.green 93 | new.blue = file.blue 94 | 95 | assert np.allclose(new.red, file.red) 96 | assert np.allclose(new.green, file.green) 97 | assert np.allclose(new.blue, file.blue) 98 | 99 | new = write_then_read_again(new) 100 | assert np.allclose(new.red, file.red) 101 | assert np.allclose(new.green, file.green) 102 | assert np.allclose(new.blue, file.blue) 103 | 104 | 105 | def test_create_fmt_3(file): 106 | new = pylas.create(point_format=3) 107 | 108 | new.red = file.red 109 | new.green = file.green 110 | new.blue = file.blue 111 | new.gps_time = file.gps_time 112 | 113 | assert np.allclose(new.red, file.red) 114 | assert np.allclose(new.green, file.green) 115 | assert np.allclose(new.blue, file.blue) 116 | assert np.allclose(new.gps_time, file.gps_time) 117 | 118 | new = write_then_read_again(new) 119 | assert np.allclose(new.red, file.red) 120 | assert np.allclose(new.green, file.green) 121 | assert np.allclose(new.blue, file.blue) 122 | assert np.allclose(new.gps_time, file.gps_time) 123 | 124 | 125 | def test_create_fmt_6(file1_4): 126 | new = pylas.create(point_format=6) 127 | assert str(new.header.version) == "1.4" 128 | 129 | dim_names_fmt_6 = PointFormat(6).dtype().names 130 | 131 | for dim_name in dim_names_fmt_6: 132 | new[dim_name] = file1_4[dim_name] 133 | 134 | for dim_name in dim_names_fmt_6: 135 | assert np.allclose(new[dim_name], file1_4[dim_name]), "{} not equal".format( 136 | dim_name 137 | ) 138 | 139 | new = write_then_read_again(new) 140 | for dim_name in dim_names_fmt_6: 141 | assert np.allclose(new[dim_name], file1_4[dim_name]), "{} not equal".format( 142 | dim_name 143 | ) 144 | 145 | 146 | @pytest.mark.parametrize("laz_backend", (None,) + pylas.LazBackend.detect_available()) 147 | def test_writing_empty_file(laz_backend): 148 | las = pylas.create() 149 | with io.BytesIO() as out: 150 | if laz_backend is None: 151 | las.write(out) 152 | else: 153 | las.write(out, laz_backend=laz_backend) 154 | -------------------------------------------------------------------------------- /pylastests/test_extrabytes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests related to extra bytes 3 | """ 4 | 5 | import numpy as np 6 | import pytest 7 | 8 | import pylas 9 | from pylas.lib import write_then_read_again 10 | 11 | 12 | def test_read_example_extra_bytes_las(las_file_path_with_extra_bytes): 13 | """ 14 | Test that we can read the files with extra bytes with have as examples 15 | """ 16 | las = pylas.read(las_file_path_with_extra_bytes) 17 | expected_names = [ 18 | "Colors", 19 | "Reserved", 20 | "Flags", 21 | "Intensity", 22 | "Time", 23 | ] 24 | assert expected_names == list(las.point_format.extra_dimension_names) 25 | 26 | 27 | def test_read_write_example_extra_bytes_file(las_file_path_with_extra_bytes): 28 | """ 29 | Test that we can write extra bytes without problem 30 | """ 31 | original = pylas.read(las_file_path_with_extra_bytes) 32 | las = write_then_read_again(original) 33 | 34 | for name in original.point_format.dimension_names: 35 | assert np.allclose(las[name], original[name]) 36 | 37 | 38 | def test_adding_extra_bytes_keeps_values_of_all_existing_fields( 39 | extra_bytes_params, simple_las_path 40 | ): 41 | """ 42 | Test that when extra bytes are added, the existing fields keep their 43 | values and then we don't somehow drop them 44 | """ 45 | las = pylas.read(simple_las_path) 46 | las.add_extra_dim(extra_bytes_params) 47 | 48 | original = pylas.read(simple_las_path) 49 | 50 | for name in original.point_format.dimension_names: 51 | assert np.allclose(las[name], original[name]) 52 | 53 | 54 | def test_creating_extra_bytes(extra_bytes_params, simple_las_path): 55 | """ 56 | Test that we can create extra byte dimensions for each 57 | data type. And they can be written then read. 58 | """ 59 | las = pylas.read(simple_las_path) 60 | las.add_extra_dim(extra_bytes_params) 61 | 62 | assert np.allclose(las[extra_bytes_params.name], 0) 63 | 64 | las[extra_bytes_params.name][:] = 42 65 | assert np.allclose(las[extra_bytes_params.name], 42) 66 | 67 | las = write_then_read_again(las) 68 | assert np.allclose(las[extra_bytes_params.name], 42) 69 | 70 | 71 | def test_creating_scaled_extra_bytes(extra_bytes_params, simple_las_path): 72 | las = pylas.read(simple_las_path) 73 | 74 | try: 75 | num_elements = int(extra_bytes_params.type[0]) 76 | except ValueError: 77 | num_elements = 1 78 | 79 | params = pylas.ExtraBytesParams( 80 | extra_bytes_params.name, 81 | extra_bytes_params.type, 82 | offsets=np.array([2.0] * num_elements), 83 | scales=np.array([1.0] * num_elements), 84 | ) 85 | las.add_extra_dim(params) 86 | 87 | assert np.allclose(las[extra_bytes_params.name], 2.0) 88 | 89 | las[params.name][:] = 42.0 90 | assert np.allclose(las[extra_bytes_params.name], 42.0) 91 | 92 | las = write_then_read_again(las) 93 | assert np.allclose(las[extra_bytes_params.name], 42.0) 94 | 95 | 96 | def test_scaled_extra_byte_array_type(simple_las_path): 97 | """ 98 | To make sure we handle scaled extra bytes 99 | """ 100 | las = pylas.read(simple_las_path) 101 | 102 | las.add_extra_dim( 103 | pylas.ExtraBytesParams( 104 | name="test_dim", 105 | type="3int32", 106 | scales=np.array([1.0, 2.0, 3.0], np.float64), 107 | offsets=np.array([10.0, 20.0, 30.0], np.float64), 108 | ) 109 | ) 110 | 111 | assert np.allclose(las.test_dim[..., 0], 10.0) 112 | assert np.allclose(las.test_dim[..., 1], 20.0) 113 | assert np.allclose(las.test_dim[..., 2], 30.0) 114 | 115 | las.test_dim[..., 0][:] = 42.0 116 | las.test_dim[..., 1][:] = 82.0 117 | las.test_dim[..., 2][:] = 123.0 118 | 119 | assert np.allclose(las.test_dim[..., 0], 42.0) 120 | assert np.allclose(las.test_dim[..., 1], 82.0) 121 | assert np.allclose(las.test_dim[..., 2], 123.0) 122 | 123 | las = write_then_read_again(las) 124 | assert np.allclose(las.test_dim[..., 0], 42.0) 125 | assert np.allclose(las.test_dim[..., 1], 82.0) 126 | assert np.allclose(las.test_dim[..., 2], 123.0) 127 | 128 | 129 | def test_extra_bytes_description_is_ok(extra_bytes_params, simple_las_path): 130 | """ 131 | Test that the description in ok 132 | """ 133 | las = pylas.read(simple_las_path) 134 | las.add_extra_dim(extra_bytes_params) 135 | 136 | extra_dim_info = list(las.point_format.extra_dimensions) 137 | assert len(extra_dim_info) == 1 138 | assert extra_dim_info[0].description == extra_bytes_params.description 139 | 140 | las = write_then_read_again(las) 141 | 142 | extra_dim_info = list(las.point_format.extra_dimensions) 143 | assert len(extra_dim_info) == 1 144 | assert extra_dim_info[0].description == extra_bytes_params.description 145 | 146 | 147 | def test_extra_bytes_with_spaces_in_name(simple_las_path): 148 | """ 149 | Test that we can create extra bytes with spaces in their name 150 | and that they can be accessed using __getitem__ ( [] ) 151 | as de normal '.name' won't work 152 | """ 153 | las = pylas.read(simple_las_path) 154 | las.add_extra_dim(pylas.ExtraBytesParams(name="Name With Spaces", type="int32")) 155 | 156 | assert np.alltrue(las["Name With Spaces"] == 0) 157 | las["Name With Spaces"][:] = 789_464 158 | 159 | las = write_then_read_again(las) 160 | np.alltrue(las["Name With Spaces"] == 789_464) 161 | 162 | 163 | def test_conversion_keeps_eb(las_file_path_with_extra_bytes): 164 | """ 165 | Test that converting point format does not lose extra bytes 166 | """ 167 | original = pylas.read(las_file_path_with_extra_bytes) 168 | converted_las = pylas.convert(original, point_format_id=0) 169 | 170 | assert len(list(original.point_format.extra_dimension_names)) == 5 171 | assert list(converted_las.point_format.extra_dimension_names) == list( 172 | original.point_format.extra_dimension_names 173 | ) 174 | for name in converted_las.point_format.extra_dimension_names: 175 | assert np.allclose(converted_las[name], original[name]) 176 | 177 | converted_las = pylas.lib.write_then_read_again(converted_las) 178 | assert list(converted_las.point_format.extra_dimension_names) == list( 179 | original.point_format.extra_dimension_names 180 | ) 181 | for name in converted_las.point_format.extra_dimension_names: 182 | assert np.allclose(converted_las[name], original[name]) 183 | 184 | 185 | def test_creating_bytes_with_name_too_long(simple_las_path): 186 | """ 187 | Test error thrown when creating extra bytes with a name that is too long 188 | """ 189 | las = pylas.read(simple_las_path) 190 | with pytest.raises(ValueError) as error: 191 | las.add_extra_dim( 192 | pylas.ExtraBytesParams( 193 | name="Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed non risus", 194 | type="int32", 195 | ) 196 | ) 197 | 198 | assert str(error.value) == "bytes too long (70, maximum length 32)" 199 | 200 | 201 | def test_creating_bytes_with_description_too_long(simple_las_path): 202 | """ 203 | Test error thrown when creating extra bytes with a name that is too long 204 | """ 205 | las = pylas.read(simple_las_path) 206 | with pytest.raises(ValueError) as error: 207 | las.add_extra_dim( 208 | pylas.ExtraBytesParams( 209 | name="a fine name", 210 | type="int32", 211 | description="Lorem ipsum dolor sit amet, consectetur adipiscing elit." 212 | " Sed non risus", 213 | ) 214 | ) 215 | 216 | assert str(error.value) == "bytes too long (70, maximum length 32)" 217 | 218 | 219 | def test_creating_extra_byte_with_invalid_type(simple_las_path): 220 | """ 221 | Test the error message when creating extra bytes with invalid type 222 | """ 223 | las = pylas.read(simple_las_path) 224 | with pytest.raises(TypeError): 225 | las.add_extra_dim(pylas.ExtraBytesParams("just_a_test", "i16")) 226 | 227 | 228 | def test_cant_create_scaled_extra_bytes_without_both_offsets_and_scales(): 229 | las = pylas.create() 230 | with pytest.raises(ValueError): 231 | las.add_extra_dim( 232 | pylas.ExtraBytesParams("must fail", "int64", scales=np.array([0.1])) 233 | ) 234 | 235 | with pytest.raises(ValueError): 236 | las.add_extra_dim( 237 | pylas.ExtraBytesParams("must fail", "int64", offsets=np.array([0.1])) 238 | ) 239 | 240 | 241 | @pytest.mark.parametrize("num_elements", [1, 2, 3]) 242 | def test_cant_create_scaled_extra_bytes_with_offsets_array_smaller(num_elements): 243 | las = pylas.create() 244 | with pytest.raises(ValueError) as error: 245 | las.add_extra_dim( 246 | pylas.ExtraBytesParams( 247 | "must fail", 248 | f"{num_elements}int64", 249 | scales=np.array([0.1] * num_elements), 250 | offsets=np.array([0.0] * (num_elements - 1)), 251 | ) 252 | ) 253 | assert ( 254 | str(error.value) 255 | == f"len(offsets) ({num_elements - 1}) is not the same as the number of elements ({num_elements})" 256 | ) 257 | 258 | 259 | @pytest.mark.parametrize("num_elements", [1, 2, 3]) 260 | def test_cant_create_scaled_extra_bytes_with_scales_array_smaller(num_elements): 261 | las = pylas.create() 262 | with pytest.raises(ValueError) as error: 263 | las.add_extra_dim( 264 | pylas.ExtraBytesParams( 265 | "must fail", 266 | f"{num_elements}int64", 267 | scales=np.array([0.1] * (num_elements - 1)), 268 | offsets=np.array([0.0] * num_elements), 269 | ) 270 | ) 271 | assert ( 272 | str(error.value) 273 | == f"len(scales) ({num_elements - 1}) is not the same as the number of elements ({num_elements})" 274 | ) 275 | -------------------------------------------------------------------------------- /pylastests/test_field_views.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import pylas 5 | from pylas.point.dims import SubFieldView, ScaledArrayView 6 | 7 | 8 | def test_sub_field_view_behaves_like_array(): 9 | """ This function is used to test if the SubFieldView class 10 | works & has an API that is similar to np.ndarray 11 | """ 12 | array = np.zeros(10, np.uint8) 13 | 14 | field = SubFieldView(array, 0b0000_0010) 15 | 16 | assert len(field) == 10 17 | assert np.all(field == 0) 18 | assert np.all(field[:] == 0) 19 | 20 | assert field.max() == 0 21 | assert np.max(field) == 0 22 | assert field.min() == 0 23 | assert np.min(field) == 0 24 | 25 | field[:] = 1 26 | assert np.all(field == 1) 27 | assert np.all(field[:] == 1) 28 | 29 | assert field.max() == 1 30 | assert np.max(field) == 1 31 | assert field.min() == 1 32 | assert np.min(field) == 1 33 | 34 | assert np.all(field > 0) 35 | assert not np.all(field < 0) 36 | assert np.all(field >= 1) 37 | assert np.all(field <= 1) 38 | 39 | # check that the real array is properly modified 40 | assert np.all(array == 2) 41 | 42 | with pytest.raises(OverflowError): 43 | field[4] = 2 44 | 45 | assert np.mean(field) == 1 46 | 47 | 48 | def test_array_view_int_index_return_singular_elements(): 49 | a = np.array([1, 2, 3, 4], np.int32) 50 | s = SubFieldView(a, 0x00_00_00_FF) 51 | 52 | for i in range(len(s)): 53 | assert type(s[i]) in (np.int32, np.int64) 54 | assert a[i] == s[i] 55 | 56 | s = ScaledArrayView(a, scale=2.0, offset=0.0) 57 | for i in range(len(s)): 58 | assert type(s[i]) == np.float64 59 | assert (a[i] * 2.0) == s[i] 60 | 61 | 62 | def test_sub_field_view_with_self(simple_las_path): 63 | las = pylas.read(simple_las_path) 64 | 65 | rn = np.array(las.return_number) 66 | order = np.argsort(las.return_number)[::-1] 67 | 68 | las.return_number[:] = las.return_number[order] 69 | 70 | assert np.all(las.return_number == rn[order]) 71 | 72 | 73 | def test_can_use_array_func_with_list(simple_las_path): 74 | las = pylas.read(simple_las_path) 75 | 76 | np.concatenate([las.return_number, las.classification]) 77 | np.concatenate([las.x, las.y]) 78 | 79 | 80 | def test_sub_field_as_array(): 81 | array = np.zeros(10, np.uint8) 82 | field = SubFieldView(array, 0b0000_0010) 83 | 84 | cpy = np.array(field) 85 | 86 | cpy[:] = 1 87 | assert np.all(cpy == 1) 88 | assert np.all(field != 1) 89 | 90 | cpy[:] = 17 91 | 92 | with pytest.raises(OverflowError): 93 | field[:] = cpy[:] 94 | 95 | 96 | def test_scaled_array_view(): 97 | array = np.zeros(10, np.int32) 98 | x = ScaledArrayView(array, 0.01, 10) 99 | 100 | assert np.max(x) == 10.0 101 | assert np.min(x) == 10.0 102 | 103 | assert np.all(x > 0.0) 104 | assert np.all(x < 18.0) 105 | assert np.all(x == 10.0) 106 | assert np.all(x != 17.0) 107 | 108 | assert np.mean(x) == 10.0 109 | 110 | x[:] = 155.0 111 | x[9] = 42.0 112 | assert np.all(x[2:5] == 155.0) 113 | assert x[9] == 42.0 114 | -------------------------------------------------------------------------------- /pylastests/test_header.py: -------------------------------------------------------------------------------- 1 | import io 2 | 3 | import pylas 4 | from pylas import LasHeader 5 | from pylastests import test_common 6 | from datetime import date 7 | 8 | all_las_but_1_4 = test_common.all_las_but_1_4 9 | 10 | 11 | def test_number_of_points_return_is_updated(all_las_but_1_4): 12 | las = all_las_but_1_4 13 | 14 | nb_points = len(las.points) 15 | nb_slice = 3 16 | 17 | r = las.return_number 18 | 19 | for i in reversed(range(nb_slice)): 20 | r[i * (nb_points // nb_slice) : (i + 1) * (nb_points // nb_slice)] = i + 1 21 | 22 | las = test_common.write_then_read_again(las) 23 | 24 | assert ( 25 | tuple(las.header.number_of_points_by_return[:nb_slice]) 26 | == (nb_points // nb_slice,) * nb_slice 27 | ) 28 | assert tuple(las.header.number_of_points_by_return[nb_slice:]) == (0,) * ( 29 | len(las.header.number_of_points_by_return) - nb_slice 30 | ) 31 | 32 | 33 | def test_nb_points_return_1_4(): 34 | las = pylas.read(test_common.test1_4_las) 35 | 36 | r = las.return_number 37 | 38 | for i in range(15): 39 | r[i] = i + 1 40 | 41 | r[15:] = 15 42 | 43 | las = test_common.write_then_read_again(las) 44 | 45 | assert tuple(las.header.number_of_points_by_return) == ((1,) * 14) + ( 46 | len(las.points) - 14, 47 | ) 48 | 49 | 50 | def test_header_copy(): 51 | import copy 52 | 53 | las = pylas.read(test_common.simple_las) 54 | header_copy = copy.copy(las.header) 55 | 56 | assert header_copy.point_format.id == las.header.point_format.id 57 | assert header_copy.version == las.header.version 58 | 59 | header_copy.point_format_id = 0 60 | assert header_copy.point_format_id != las.header.point_format.id 61 | assert header_copy.version == las.header.version 62 | 63 | 64 | def test_set_uuid(): 65 | import uuid 66 | 67 | las = pylas.read(test_common.simple_las) 68 | u = uuid.uuid4() 69 | las.header.uuid = u 70 | las = test_common.write_then_read_again(las) 71 | assert las.header.uuid == u 72 | 73 | 74 | def test_set_offsets(): 75 | header = pylas.header.LasHeader() 76 | header.offsets = [0.5, 0.6, 0.7] 77 | 78 | assert 0.5 == header.x_offset 79 | assert 0.6 == header.y_offset 80 | assert 0.7 == header.z_offset 81 | assert [0.5, 0.6, 0.7] == list(header.offsets) 82 | 83 | 84 | def test_set_scales(): 85 | header = pylas.header.LasHeader() 86 | header.scales = [0.001, 0.001, 0.01] 87 | 88 | assert 0.001 == header.x_scale 89 | assert 0.001 == header.y_scale 90 | assert 0.01 == header.z_scale 91 | assert [0.001, 0.001, 0.01] == list(header.scales) 92 | 93 | 94 | def test_set_maxs(): 95 | header = pylas.header.LasHeader() 96 | values = [42.0, 1337.42, 553.3] 97 | header.maxs = values 98 | 99 | assert values[0] == header.x_max 100 | assert values[1] == header.y_max 101 | assert values[2] == header.z_max 102 | assert values == list(header.maxs) 103 | 104 | 105 | def test_set_mins(): 106 | header = pylas.header.LasHeader() 107 | values = [42.0, 1337.42, 553.3] 108 | header.mins = values 109 | 110 | assert values[0] == header.x_min 111 | assert values[1] == header.y_min 112 | assert values[2] == header.z_min 113 | assert values == list(header.mins) 114 | 115 | 116 | def test_point_count_stays_synchronized(): 117 | las = pylas.read(test_common.simple_las) 118 | assert las.header.point_count == len(las.points) 119 | 120 | las.points = las.points[:120] 121 | assert 120 == las.header.point_count 122 | assert las.header.point_count == len(las.points) 123 | 124 | 125 | def test_header_date(): 126 | las = pylas.read(test_common.extra_bytes_las) 127 | with io.BytesIO() as out: 128 | las.header.write_to(out) 129 | out.seek(0) 130 | header_2 = LasHeader.read_from(out) 131 | 132 | expected_date = date(year=2015, month=2, day=22) 133 | assert las.header.creation_date == expected_date 134 | assert las.header.creation_date == header_2.creation_date 135 | -------------------------------------------------------------------------------- /pylastests/test_mmap.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import pylas 4 | 5 | 6 | def test_mmap(mmapped_file_path): 7 | with pylas.mmap(mmapped_file_path) as las: 8 | las.classification[:] = 25 9 | assert np.all(las.classification == 25) 10 | 11 | las = pylas.read(mmapped_file_path) 12 | assert np.all(las.classification == 25) 13 | 14 | 15 | -------------------------------------------------------------------------------- /pylastests/test_modif_1_2.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import pylas 5 | from pylastests.test_common import ( 6 | do_compression, 7 | simple_las, 8 | simple_laz, 9 | write_then_read_again, 10 | ) 11 | 12 | 13 | @pytest.fixture( 14 | params=[simple_las, simple_laz] 15 | if pylas.LazBackend.detect_available() 16 | else [simple_las] 17 | ) 18 | def las(request): 19 | return pylas.read(request.param) 20 | 21 | 22 | def test_classification_overflows(las): 23 | c = las.classification 24 | with pytest.raises(OverflowError): 25 | c[0] = 54 26 | 27 | 28 | @pytest.mark.parametrize("do_compress", do_compression) 29 | def test_classification_change(las, do_compress): 30 | c = las.classification 31 | c[:] = 10 32 | 33 | las.classification = c 34 | assert np.allclose(c, las.classification) 35 | 36 | las = write_then_read_again(las, do_compress=do_compress) 37 | assert np.allclose(c, las.classification) 38 | 39 | 40 | @pytest.mark.parametrize("do_compress", do_compression) 41 | def test_synthetic_change(las, do_compress): 42 | s = las.synthetic 43 | s[:] = False 44 | s[17] = True 45 | 46 | las.synthetic = s 47 | assert np.allclose(s, las.synthetic) 48 | las = write_then_read_again(las, do_compress=do_compress) 49 | 50 | assert np.allclose(s, las.synthetic) 51 | 52 | 53 | @pytest.mark.parametrize("do_compress", do_compression) 54 | def test_key_point_change(las, do_compress): 55 | kp = las.key_point 56 | kp[:] = False 57 | kp[25] = True 58 | 59 | las.key_point = kp 60 | assert np.allclose(kp, las.key_point) 61 | 62 | las = write_then_read_again(las, do_compress=do_compress) 63 | assert np.allclose(kp, las.key_point) 64 | 65 | 66 | @pytest.mark.parametrize("do_compress", do_compression) 67 | def test_withheld_changes(las, do_compress): 68 | withheld = las.withheld 69 | withheld[:] = False 70 | withheld[180] = True 71 | 72 | las.withheld = withheld 73 | assert np.allclose(withheld, las.withheld) 74 | 75 | las = write_then_read_again(las, do_compress=do_compress) 76 | 77 | assert np.allclose(withheld, las.withheld) 78 | -------------------------------------------------------------------------------- /pylastests/test_modif_1_4.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import pylas 5 | from pylas import LazBackend 6 | from pylas.errors import PylasError 7 | from pylastests.test_common import test1_4_las, write_then_read_again 8 | 9 | 10 | @pytest.fixture(scope="session") 11 | def las(): 12 | return pylas.read(test1_4_las) 13 | 14 | 15 | @pytest.fixture(params=LazBackend.detect_available()) 16 | def laz_backend(request): 17 | return request.param 18 | 19 | 20 | def test_classification(las): 21 | las.classification[:] = 234 22 | assert np.alltrue(las.classification == 234) 23 | 24 | res = write_then_read_again(las) 25 | 26 | assert np.alltrue(las.classification == res.classification) 27 | 28 | 29 | def test_intensity(las): 30 | las.intensity[:] = 89 31 | assert np.alltrue(las.intensity == 89) 32 | res = write_then_read_again(las) 33 | 34 | assert np.alltrue(las.intensity == res.intensity) 35 | 36 | 37 | def test_writing_las_with_evlrs(): 38 | las = pylas.read(test1_4_las) 39 | assert las.evlrs == [] 40 | 41 | evlr = pylas.VLR(user_id="pylastest", record_id=42, description="Just a test", record_data=b"And so he grinds his own hands") 42 | las.evlrs.append(evlr) 43 | 44 | las_1 = write_then_read_again(las, do_compress=False) 45 | assert las_1.evlrs == [evlr] 46 | 47 | 48 | def test_writing_laz_with_evlrs(laz_backend): 49 | las = pylas.read(test1_4_las) 50 | assert las.evlrs == [] 51 | 52 | evlr = pylas.VLR(user_id="pylastest", record_id=42, description="Just a test", record_data=b"And so he grinds he own hands") 53 | las.evlrs.append(evlr) 54 | 55 | las_1 = write_then_read_again(las, do_compress=True, laz_backend=laz_backend) 56 | assert las_1.evlrs == [evlr] 57 | -------------------------------------------------------------------------------- /pylastests/test_point_format.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import pylas 4 | from pylas import PointFormat 5 | from pylastests.test_common import extra_bytes_laz 6 | 7 | 8 | @pytest.mark.skipif( 9 | len(pylas.LazBackend.detect_available()) == 0, reason="No Laz Backend installed" 10 | ) 11 | def test_extra_dims_not_equal(): 12 | """Test to confirm that two point format with same id but 13 | not same extra dimension are not equal 14 | """ 15 | las = pylas.read(extra_bytes_laz) 16 | i = las.points.point_format.id 17 | assert las.points.point_format != PointFormat(i) 18 | -------------------------------------------------------------------------------- /pylastests/test_reading_1_2.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | import pylas 7 | from pylastests.test_common import ( 8 | simple_las, 9 | simple_laz, 10 | ) 11 | 12 | 13 | @pytest.fixture( 14 | params=[simple_las, simple_laz] 15 | if pylas.LazBackend.detect_available() 16 | else [simple_las], 17 | scope="session", 18 | ) 19 | def read_simple(request): 20 | return pylas.read(request.param) 21 | 22 | 23 | @pytest.fixture() 24 | def open_simple(): 25 | return open(simple_las, mode="rb") 26 | 27 | 28 | @pytest.fixture() 29 | def read_uncompressed(): 30 | return pylas.read(simple_laz) 31 | 32 | 33 | @pytest.fixture() 34 | def get_header(): 35 | with pylas.open(simple_las) as fin: 36 | return fin.header 37 | 38 | 39 | def test_header(get_header): 40 | header = get_header 41 | assert header.file_source_id == 0 42 | assert header.version.major == 1 43 | assert header.version.minor == 2 44 | assert header.system_identifier == "" 45 | assert header.generating_software == "TerraScan" 46 | assert header.creation_date is None 47 | assert header.offset_to_point_data == 227 48 | assert len(header.vlrs) == 0 49 | assert header.point_format.id == 3 50 | assert header.point_format.size == 34 51 | assert header.point_count == 1065 52 | assert tuple(header.number_of_points_by_return[:5]) == (925, 114, 21, 5, 0) 53 | assert header.x_scale == 0.01 54 | assert header.y_scale == 0.01 55 | assert header.z_scale == 0.01 56 | assert header.x_offset == 0 57 | assert header.y_offset == 0 58 | assert header.z_offset == 0 59 | assert header.x_max == pytest.approx(638982.55) 60 | assert header.x_min == pytest.approx(635619.85) 61 | assert header.y_max == pytest.approx(853535.43) 62 | assert header.y_min == pytest.approx(848899.70) 63 | assert header.z_max == pytest.approx(586.38) 64 | assert header.z_min == pytest.approx(406.59) 65 | 66 | 67 | def test_no_vlr_for_simple(read_simple): 68 | f = read_simple 69 | assert f.vlrs == [] 70 | 71 | 72 | def test_every_byte_has_been_read(open_simple): 73 | fp = open_simple 74 | _ = pylas.read(fp, closefd=False) 75 | assert fp.tell() == os.path.getsize(simple_las) 76 | fp.close() 77 | 78 | 79 | def test_unscaled_x(read_simple): 80 | f = read_simple 81 | assert f.X.max() == 63898255 82 | assert f.X.min() == 63561985 83 | 84 | 85 | def test_unscaled_y(read_simple): 86 | f = read_simple 87 | assert f.Y.max() == 85353543 88 | assert f.Y.min() == 84889970 89 | 90 | 91 | def test_unscaled_z(read_simple): 92 | f = read_simple 93 | assert f.Z.max() == 58638 94 | assert f.Z.min() == 40659 95 | 96 | 97 | def test_intensity(read_simple): 98 | f = read_simple 99 | assert f.intensity.max() == 254 100 | assert f.intensity.min() == 0 101 | 102 | 103 | def test_return_number(read_simple): 104 | f = read_simple 105 | assert f.return_number.max() == 4 106 | assert f.return_number.min() == 1 107 | 108 | 109 | def test_number_of_returns(read_simple): 110 | f = read_simple 111 | assert f.number_of_returns.max() == 4 112 | assert f.number_of_returns.min() == 1 113 | 114 | 115 | def test_edge_of_flight_line(read_simple): 116 | f = read_simple 117 | assert f.edge_of_flight_line.max() == 0 118 | assert f.edge_of_flight_line.min() == 0 119 | 120 | 121 | def test_scan_direction_flag(read_simple): 122 | f = read_simple 123 | assert f.scan_direction_flag.max() == 1 124 | assert f.scan_direction_flag.min() == 0 125 | 126 | 127 | def test_scan_angle_rank(read_simple): 128 | f = read_simple 129 | assert f.scan_angle_rank.max() == 18 130 | assert f.scan_angle_rank.min() == -19 131 | 132 | 133 | def test_classification_max_min(read_simple): 134 | f = read_simple 135 | assert f.classification.max() == 2 136 | assert f.classification.min() == 1 137 | 138 | 139 | def test_classification_count(read_simple): 140 | f = read_simple 141 | uniques, counts = np.unique(f.classification, return_counts=True) 142 | assert np.all(uniques == [1, 2]) 143 | assert counts[0] == 789 # class code 1 144 | assert counts[1] == 276 # class code 2 145 | 146 | 147 | def test_user_data(read_simple): 148 | f = read_simple 149 | assert f.user_data.max() == 149 150 | assert f.user_data.min() == 117 151 | 152 | 153 | def test_point_source_id(read_simple): 154 | f = read_simple 155 | assert f.point_source_id.max() == 7334 156 | assert f.point_source_id.min() == 7326 157 | 158 | 159 | def test_gps_time(read_simple): 160 | f = read_simple 161 | assert f.gps_time.max() == pytest.approx(249783.162158) 162 | assert f.gps_time.min() == pytest.approx(245370.417075) 163 | 164 | 165 | def test_red(read_simple): 166 | f = read_simple 167 | assert f.red.max() == 249 168 | assert f.red.min() == 39 169 | 170 | 171 | def test_green(read_simple): 172 | f = read_simple 173 | assert f.green.max() == 239 174 | assert f.green.min() == 57 175 | 176 | 177 | def test_blue(read_simple): 178 | f = read_simple 179 | assert f.blue.max() == 249 180 | assert f.blue.min() == 56 181 | 182 | 183 | @pytest.mark.skipif( 184 | len(pylas.LazBackend.detect_available()) == 0, reason="No Laz Backend installed" 185 | ) 186 | def test_decompression_is_same_as_uncompressed(): 187 | u_las = pylas.read(simple_las) 188 | c_las = pylas.read(simple_laz) 189 | 190 | u_point_buffer = bytes(u_las.points.memoryview()) 191 | c_points_buffer = bytes(c_las.points.memoryview()) 192 | 193 | assert u_point_buffer == c_points_buffer 194 | -------------------------------------------------------------------------------- /pylastests/test_reading_1_4.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import pylas 4 | from pylastests.test_common import test1_4_las 5 | 6 | 7 | @pytest.fixture() 8 | def file(): 9 | return pylas.read(test1_4_las) 10 | 11 | 12 | def test_unscaled_x(file): 13 | assert file.X.max() == 1751224820 14 | assert file.X.min() == 1320803567 15 | 16 | 17 | def test_unscaled_y(file): 18 | assert file.Y.max() == -860121188 19 | assert file.Y.min() == -864646690 20 | 21 | 22 | def test_unscaled_z(file): 23 | assert file.Z.max() == -1745638014 24 | assert file.Z.min() == -1751937981 25 | 26 | 27 | def test_intensity(file): 28 | assert file.intensity.max() == 68 29 | assert file.intensity.min() == 2 30 | 31 | 32 | def test_return_number(file): 33 | assert file.return_number.max() == 4 34 | assert file.return_number.min() == 1 35 | 36 | 37 | def test_number_of_returns(file): 38 | assert file.number_of_returns.max() == 4 39 | assert file.number_of_returns.min() == 1 40 | 41 | 42 | def test_edge_of_flight_line(file): 43 | assert file.edge_of_flight_line.max() == 1 44 | assert file.edge_of_flight_line.min() == 0 45 | 46 | 47 | def scan_direction_flag(file): 48 | assert file.scan_direction_flag.max() == 1 49 | assert file.scan_direction_flag.min() == 0 50 | 51 | 52 | def test_classification(file): 53 | assert file.classification.max() == 2 54 | assert file.classification.min() == 2 55 | 56 | 57 | def test_scan_angle_rank(file): 58 | assert file.scan_angle.max() == 3173 59 | assert file.scan_angle.min() == 1837 60 | 61 | 62 | def test_user_data(file): 63 | assert file.user_data.max() == 0 64 | assert file.user_data.min() == 0 65 | 66 | 67 | def test_point_source_id(file): 68 | assert file.point_source_id.max() == 202 69 | assert file.point_source_id.min() == 202 70 | 71 | 72 | def test_gps_time(file): 73 | assert file.gps_time.max() == pytest.approx(83177420.601045) 74 | assert file.gps_time.min() == pytest.approx(83177420.534005) 75 | 76 | 77 | def test_scanner_channel(file): 78 | assert file.scanner_channel.max() == 0 79 | assert file.scanner_channel.min() == 0 80 | -------------------------------------------------------------------------------- /pylastests/test_vlrs.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import pylas 4 | from pylastests import test_common 5 | 6 | 7 | def test_adding_classification_lookup(): 8 | simple = pylas.read(test_common.simple_las) 9 | classification_lookup = pylas.vlrs.known.ClassificationLookupVlr() 10 | 11 | assert len(classification_lookup.lookups) == 0 12 | classification_lookup[20] = "computer" 13 | assert len(classification_lookup.lookups) == 1 14 | classification_lookup[17] = "car" 15 | 16 | simple.vlrs.append(classification_lookup) 17 | 18 | simple = test_common.write_then_read_again(simple) 19 | classification_lookups = simple.vlrs.get("ClassificationLookupVlr")[0] 20 | 21 | assert classification_lookups[20] == "computer" 22 | assert classification_lookups[17] == "car" 23 | 24 | 25 | def test_lookup_out_of_range(): 26 | classification_lookup = pylas.vlrs.known.ClassificationLookupVlr() 27 | with pytest.raises(ValueError): 28 | classification_lookup[541] = "LiquidWater" 29 | 30 | with pytest.raises(ValueError): 31 | classification_lookup[-42] = "SolidWater" 32 | 33 | 34 | def test_adding_extra_bytes_vlr_by_hand(): 35 | """ 36 | Test that if someone adds an ExtraBytesVlr by himself 37 | without having matching extra bytes in the point record, the 38 | ExtraByteVlr is removed before writing 39 | """ 40 | 41 | simple = pylas.read(test_common.simple_las) 42 | ebvlr = pylas.vlrs.known.ExtraBytesVlr() 43 | ebs = pylas.vlrs.known.ExtraBytesStruct(data_type=3, name="Fake".encode()) 44 | ebvlr.extra_bytes_structs.append(ebs) 45 | simple.vlrs.append(ebvlr) 46 | assert len(simple.vlrs.get("ExtraBytesVlr")) == 1 47 | 48 | las = pylas.lib.write_then_read_again(simple) 49 | assert simple.points.point_size == las.points.point_size 50 | assert len(las.vlrs.get("ExtraBytesVlr")) == 0 51 | -------------------------------------------------------------------------------- /pylastests/vegetation_1_3.las: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmontaigu/pylas/8be16f1d420769cf4fc3c8d9932c93baa64d6247/pylastests/vegetation_1_3.las -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = -v --ignore docs/conf.py --doctest-modules --doctest-glob='*.rst' 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | with open("README.rst") as f: 4 | readme = f.read() 5 | 6 | setup( 7 | name="pylas", 8 | version="0.6.0a2", 9 | description="Las/Laz reading and writing in python", 10 | long_description=readme, 11 | url="https://github.com/tmontaigu/pylas", 12 | author="Thomas Montaigu", 13 | author_email="thomas.montaigu@laposte.net", 14 | python_requires=">=3.6", 15 | keywords="las laz lidar", 16 | license="BSD 3-Clause", 17 | packages=find_packages(exclude=("pylastests",)), 18 | zip_safe=False, 19 | install_requires=["numpy"], 20 | extras_require={ 21 | "dev": [ 22 | "pytest", 23 | "sphinx", 24 | "sphinx-rtd-theme", 25 | "nox", 26 | "black" 27 | ], 28 | "lazrs": [ 29 | "lazrs>=0.2.3, < 0.3.0" 30 | ], 31 | "laszip": [ 32 | "laszip >= 0.0.1, < 0.1.0" 33 | ] 34 | } 35 | ) 36 | --------------------------------------------------------------------------------