├── .gitignore
├── CHANGES.txt
├── LICENSE
├── PKG-INFO
├── README.md
├── WC1.las
├── forestlas
├── __init__.py
├── __init__.pyc
├── canopyComplexity.py
├── extractPlots.py
├── geometricTree.py
├── global_header.py
├── lasIO.py
├── lasIO_.py
├── lasIO_pool.py
├── lasStructure.py
├── lasStructure.pyc
├── recover_temp_files_.py
├── thinCloud.py
├── woodyAttribute.py
├── woodyAttribute_.py
└── woodyAttribute_pool.py
├── forestlas_intro.ipynb
└── setup.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 |
107 | build/*
108 |
--------------------------------------------------------------------------------
/CHANGES.txt:
--------------------------------------------------------------------------------
1 | v0.1.0, 1/10/2013 -- Initial release.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/PKG-INFO:
--------------------------------------------------------------------------------
1 | Metadata-Version: 1.0
2 | Name: ForestLAS
3 | Version: 0.1.3.5
4 | Summary: Tools for generating woody attribution features
5 | Home-page: http://www.crcsi.com.au/Research/2-Feature-Extraction/2-07-Woody-Vegetation
6 | Author: Phil Wilkes
7 | Author-email: phil.wilkes@rmit.edu.au
8 | License: LICENSE.txt
9 | Description: UNKNOWN
10 | Platform: UNKNOWN
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # forestlas
2 | [](https://www.gnu.org/licenses/gpl-3.0)
3 |
4 | 
5 | Python code for generating metrics of forest vertical structure from airborne LiDAR data. This code was developed as
6 | part of my PhD (completed in 2016, can be viewed
7 | here)
8 | and was developed over the forests of Victoria, Australia.
9 | The aim was to develop a suite of metrics that are robust to forest type i.e. can be applied without prior information of
10 | forest structure.
11 |
12 | There are a number of methods available, check this
13 | Jupyter notebook for an introduction.
14 | Functions include reading `.las` files to numpy array, writing to `.las` as well as a number of methods to dice, slice and tile
15 | LiDAR data.
16 | The main set of functions found in `forestlas.canopyComplexity`.
17 | These allow you to derive metrics of vertical canopy structure such as Pgap and also estimate number of canopy layers.
18 | More information can be found in this paper Wilkes, P. et al. (2016). Using discrete-return airborne laser scanning to
19 | quantify number of canopy strata across diverse forest types. Methods in Ecology and Evolution, 7(6), 700–712.
20 |
21 |
22 | #### Funding
23 | This research was funded by the Australian Postgraduate Award, Cooperative Research Centre for Spatial Information
24 | under Project 2.07, TERN/AusCover and Commonwealth Scientific and IndustrialResearch Organisation (CSIRO) Postgraduate
25 | Scholarship.
26 |
--------------------------------------------------------------------------------
/WC1.las:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/philwilkes/forestlas/5d0439adab918b51b00881186f1b1d670216b4cf/WC1.las
--------------------------------------------------------------------------------
/forestlas/__init__.py:
--------------------------------------------------------------------------------
1 | # http://www.diveintopython3.net/case-study-porting-chardet-to-python-3.html#multifile-modules
2 |
3 | def detect(aBuf):
4 | from . import universaldetector
5 | u = universaldetector.UniversalDetector()
6 | u.reset()
7 | u.feed(aBuf)
8 | u.close()
9 | return u.result
--------------------------------------------------------------------------------
/forestlas/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/philwilkes/forestlas/5d0439adab918b51b00881186f1b1d670216b4cf/forestlas/__init__.pyc
--------------------------------------------------------------------------------
/forestlas/canopyComplexity.py:
--------------------------------------------------------------------------------
1 | # Wilkes et al. in review. Using discrete-return ALS for the assessment
2 | # of vertical canopy structure across diverse forest types
3 | # Methods in Ecology and Evolution
4 |
5 | from scipy.stats import *
6 | import numpy as np
7 | from scipy.interpolate import UnivariateSpline
8 | import glob
9 | import os
10 | import tempfile
11 | import multiprocessing
12 | import numpy.lib.recfunctions as rfn
13 |
14 | from forestlas.lasIO import *
15 |
16 | np.seterr(invalid="ignore")
17 |
18 | class CanopyComplexity:
19 |
20 | def __init__(self, verbose=False, mp=False):
21 |
22 | self.verbose = verbose
23 |
24 | if mp is not False:
25 | self.tempDir = mp
26 | else:
27 | self.tempDir = False
28 |
29 | def fromLAS(self, las, threshold=2.0, top_threshold=99, z_scale=False):
30 |
31 | self.threshold = threshold
32 |
33 | if isinstance(las, str):
34 | lasRAW = lasIO(las, tmpDir=self.tempDir, keepTemp=False).all().asArray() # reads las file
35 |
36 | if lasRAW['rtn_tot'].max() == 0:
37 | lasRAW['rtn_tot'][:] = 1
38 |
39 | self.las = lasRAW[lasRAW['rtn_tot'] > 0] # inds lines where rtn_tot == 0 and removes them
40 | self.z_scale = parseHeader(las)['zscale']
41 |
42 | elif isinstance(las, np.ndarray):
43 |
44 | self.las = las
45 | if not z_scale:
46 | self.z_scale = .01
47 |
48 | else:
49 | raise Exception('input needs to path to LAS file or a LAS file array')
50 |
51 | self.z = self.las['z']
52 | self.zw = self.las['rtn_tot']
53 | self.lenLAS = len(self.las)
54 |
55 | rtn_weight = np.around(1. / self.las['rtn_tot'], decimals=2)
56 | self.total = np.sum(rtn_weight) # sum of weighted returns
57 |
58 | # removes ground and outliers
59 | idx = [(self.z > threshold) & (self.z < np.percentile(self.z, top_threshold))]
60 | self.z = self.z[idx] # ...from z
61 | self.zw = rtn_weight[idx] # ... and from weighted heights
62 | self._create_bins()
63 |
64 | return self
65 |
66 | def _create_bins(self):
67 |
68 | # create bin array
69 | #### required because of numpy float issue ###
70 | factor = 1 / self.z_scale
71 | z_min, z_max = self.threshold * factor, int((self.z.max() + (self.z_scale*1000)) * factor)
72 | ##############################################
73 | self.bins = int(z_max-(z_min-1)) # number of bins
74 | self.zxOrig = np.linspace(z_min, z_max, self.bins) / factor # "x-axis"
75 |
76 | return self
77 |
78 | def Pgap(self, frequency=False):
79 |
80 | if len(self.z) == 0:
81 | self.pgap = np.array([1])
82 | else:
83 | #calculatePgap
84 | if not frequency:
85 | self.pgap = np.zeros(self.bins) # array for populating with pgap
86 | for (i, height_below_toc) in enumerate(self.zxOrig):
87 | idx = [self.z >= height_below_toc] # index all rows >= height z
88 | weight_of_returns_above_z = sum(self.zw[idx]) # calculates sum of weighted returns above height z
89 | self.pgap[i] = 1. - (weight_of_returns_above_z / self.total) # populates pgap with proportion of weight
90 | else:
91 | #calculateFreqPgap
92 | self.pgap = np.zeros(self.bins) # array for populating with pgap
93 | for (i, height_below_toc) in enumerate(sorted(self.zxOrig)):
94 | idx = [self.z >= height_below_toc] # index all rows >= height z
95 | num_of_returns_above_z = len(self.z[idx]) # calculates len of z array above height z
96 | self.pgap[i] = 1. - (num_of_returns_above_z / float(self.lenLAS)) # populates pgap with proportion of weight
97 |
98 | return self
99 |
100 | def CHP(self, method="sample", alpha=.3, frequency=False, normalise=False, noise=0.05):
101 |
102 | """
103 | method is "model" for a log transformed and normalised CHP and "sample" for anything else!
104 | frequncy uses retrun frequency instead of weight
105 | """
106 |
107 | if len(self.z) == 0:
108 | self.pgap = np.array([1])
109 | self.fd = np.array([])
110 | self.sd = np.array([])
111 | self.spline = None
112 | self.ps = np.array([])
113 | self.zx = np.array([1])
114 | self.layerLocation = np.array([])
115 | self.layerCount = 0
116 | self.crownBase = np.array([])
117 |
118 | else:
119 | if method == 'sample':
120 | normalise = False
121 | self.alpha = alpha
122 | log_transform = False
123 | elif method == "model":
124 | normalise = True
125 | self.alpha = 0
126 | log_transform = True
127 | elif method == 'log_transform':
128 | normalise = False
129 | self.alpha = alpha
130 | log_transform = True
131 | else:
132 | raise Exception('method not recognised')
133 |
134 | self.Pgap()
135 |
136 | #smooth_pgap
137 | self.spline = UnivariateSpline(self.zxOrig, self.pgap, s=self.alpha)
138 | self.ps = self.spline(self.zxOrig)
139 |
140 | # clips ps and zx vectors to maximum height
141 | self.ps = self.ps[np.where(self.zxOrig < self.z.max() * 1)]
142 | self.pgap = self.pgap[np.where(self.zxOrig < self.z.max() * 1)]
143 | self.zx = self.zxOrig[np.where(self.zxOrig < self.z.max() * 1)]
144 |
145 | # log transformation ... or not
146 | if log_transform:
147 | self.cc = -np.log(self.ps)
148 | else:
149 | self.cc = 1 - self.ps
150 |
151 | # first_derivative
152 | # self.fd1 = np.hstack([np.diff(self.cc[::-1]) / self.z_scale, 0])[::-1]
153 | self.fd = -(np.gradient(self.cc) / self.z_scale)
154 |
155 | # removes any lingering negative values
156 | if self.fd.min() < 0:
157 | self.fd = self.fd + (-self.fd.min())
158 |
159 | # normalise so sum of vector == 1, required for probability
160 | if normalise:
161 | self.fd = self.fd * ((1-self.pgap.min())/np.sum(self.fd))
162 |
163 | #second_derivative
164 | self.sd = np.gradient(self.fd) / self.z_scale
165 |
166 | #number_of_modes
167 | signs = np.diff(self.sd/abs(self.sd)) # finds zero crossings
168 | idx = np.where(signs == -2)[0]
169 | potentialLayerLocation = self.zx[idx] # and their height
170 | layerAmplitude = self.fd[idx] # and the signal amplitude
171 | maxAmplitude = self.fd.max() # and the maximum amplitude for the CHP
172 | self.layerLocation = [layer for i, layer in enumerate(potentialLayerLocation) if layerAmplitude[i] > maxAmplitude * noise] # and filters noise
173 | self.layerCount = len(self.layerLocation)
174 | self.crownBase = self.zx[idx] # and their height
175 |
176 | return self
177 |
178 | def simulateCloud(self):
179 |
180 | """
181 | This simulates a height vector and then assigns heights a weight
182 | """
183 |
184 | self.zAll = np.hstack([0, self.zx]) # add ground to height bins
185 |
186 | # height weighting vector
187 | self.heightWeight = np.hstack([self.pgap.min(), self.fd]) # weights for each height bin and add ground weight
188 | #self.heightWeight = np.ma.masked_array(self.heightWeight, np.isnan(self.heightWeight))
189 |
190 | # return weighting vector
191 | self.returnWeight = {} # dictionary to store self.returnWeight
192 | for h in np.unique(np.floor(self.zAll)): # rounds xs down to 1 metre bins
193 | # selects returns with heights in bin h
194 | self.returnWeight[h] = {} # creates dictionary within self.returnWeight to store count of NoR values
195 | idx = [(self.z > h) & (self.z <= h + 1)]
196 | NoR = self.zw[idx] # total number of returns for returns in range h to h+1
197 | for rtn in np.unique(NoR):
198 | self.returnWeight[h][rtn] = len(NoR[NoR == rtn]) # counts number of returns by rtn_tot
199 | sumNoR = np.sum(self.returnWeight[h].values()) # counts number of returns in bin
200 | for rtn in self.returnWeight[h].keys(): # return values in height bin
201 | # self.returnWeight[h][rtn] = self.returnWeight[h][rtn] / np.float(sumNoR) # calculates weight
202 | self.returnWeight[h][rtn] /= np.float(sumNoR)
203 |
204 | # Simulated height
205 | self.simHeightAll = np.random.choice(self.zAll, max(self.lenLAS, 100), p=self.heightWeight)
206 | #self.simHeightAll = np.random.choice(self.zAll, 100, p=self.heightWeight)
207 |
208 | # Simulated weights
209 | self.simRtnWeight = np.zeros(len(self.simHeightAll)) # array to store weights
210 | for i, z in enumerate(self.simHeightAll):
211 | hgt_bin = np.floor(z)
212 | for offset in [0, 1, -1, 2, -2, 3, -3]: # if weighting bin is empty selects a neigbour
213 | try:
214 | rtn_num = self.returnWeight[hgt_bin + offset].keys() # return numbers
215 | rtn_weights = self.returnWeight[hgt_bin + offset].values() # probability of return number
216 | """ return number randomly chosen (weighted by probability of return) """
217 | self.simRtnWeight[i] = np.random.choice(rtn_num, 1, p=rtn_weights) # assignment of NoR value (divided by 1)
218 | break
219 | except:
220 | self.simRtnWeight[i] = 1
221 |
222 | return rfn.merge_arrays([np.array(self.simHeightAll, dtype=[('z', np.float)]),
223 | np.array(self.simRtnWeight, dtype=[('rtn_tot', np.float)])])
224 |
225 | class bootstrapComplexity:
226 |
227 | import multiprocessing
228 |
229 | def __init__(self, las, verbose=False, processes=1, N=100):
230 |
231 | self.N = N
232 | self.verbose = verbose
233 |
234 | if type(las) is str:
235 | if os.path.isdir(las):
236 | self.l = glob.glob(os.path.join(las, "*.znr"))
237 | if len(self.l) == 0: self.l = glob.glob(os.path.join(las, "*.las"))
238 | elif os.path.isfile(las): self.l = [las]
239 | elif type(las) is list:
240 | self.l = las
241 | else:
242 | raise IOError("No .las or .znr files in {}".format(las))
243 |
244 | self.mp(processes)
245 |
246 | def chp(self, las):
247 |
248 | if self.verbose: print 'processing:', las
249 |
250 | pid = multiprocessing.current_process()._identity[0]
251 | tempDirectoryName = "lidar.processing." + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + ".tmp"
252 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
253 |
254 | self.bsCHP = np.zeros(self.N)
255 |
256 | self.chp = CanopyComplexity(mp=tempDirectory).fromLAS(las).CHP("model")
257 |
258 | for i in range(self.N):
259 |
260 | z = self.chp.simulateCloud()
261 |
262 | if z['z'].max() < 2 or len(z) < 2 or 0 in z['rtn_tot']:
263 | self.bsCHP[i] = 0
264 | else:
265 | sample = CanopyComplexity().fromLAS(z).CHP()
266 | self.bsCHP[i] = sample.layerCount
267 |
268 | if type(las) is str: plotName = os.path.split(os.path.splitext(las)[0])[1]
269 | else: plotName = 'array'
270 | self.chp_dictionary[plotName] = self.bsCHP
271 | return self
272 |
273 | def mp(self, maxProcess):
274 |
275 | listI = 0
276 | manager = multiprocessing.Manager()
277 | self.chp_dictionary = manager.dict()
278 |
279 | for i in range((len(self.l) / maxProcess) + 1):
280 |
281 | jobs = []
282 |
283 | if (maxProcess * listI) + maxProcess < len(self.l):
284 | processingList = self.l[maxProcess * listI: (maxProcess * listI) + maxProcess]
285 | else: processingList = self.l[maxProcess * listI:]
286 |
287 | for j, las in enumerate(processingList): # limits number of images run at once
288 | p = multiprocessing.Process(target=self.chp, args=(las, ))
289 | jobs.append(p)
290 | p.start()
291 |
292 | for proc in jobs:
293 | proc.join()
294 |
295 | listI += 1
296 |
297 | self.bsCHPmutiple = dict(self.chp_dictionary)
298 |
299 | if __name__ == '__main__':
300 |
301 | path = '/Users/phil/ALS/WC/spl/tile_20/WC1_5m_TILES/383475.0_5828910.0.las'
302 | las = lasIO(path).all().asArray()
303 | # plot = 'PE2744N2556'
304 | # las_path = os.path.join(path, plot + '.las')
305 | # las = CanopyComplexity().fromSample(las['z'], las['rtn_tot']).CHP('model')
306 | las = CanopyComplexity().fromLAS(las).CHP('model')
307 | chp = CanopyComplexity().fromLAS(las.simulateCloud()).CHP()
308 | print chp.zw
309 |
310 |
311 |
--------------------------------------------------------------------------------
/forestlas/extractPlots.py:
--------------------------------------------------------------------------------
1 | """
2 | Extract points for a given trap size around a given coordinate
3 | """
4 |
5 | import os, sys, subprocess, csv, shutil, tempfile
6 | import MySQLdb as mdb
7 | import numpy as np
8 | from lasIO import *
9 | import multiprocessing
10 | import traceback
11 |
12 | class extract:
13 |
14 | def __init__(self, verbose=False, processes=4):
15 |
16 | self.verbose = verbose
17 | self.cwd = os.getcwd()
18 | self.maxProcess = int(processes)
19 |
20 | def query(self, points_raw, src="point", out=False, extent=24,
21 | round=True, flightlines=False, search=".", tmpDir=True,
22 | namePrefix=''):
23 |
24 | """
25 | Is it a points query or an extent?
26 | """
27 | try:
28 | self.fl = flightlines
29 | self.round = round
30 | self.extent = extent
31 | self.search = "%" + search + "%"
32 | self.tmpDir = tmpDir
33 | self.namePrefix = namePrefix
34 |
35 | if not out:
36 | parentDir, outDir = os.path.split(points_raw)
37 | if len(parentDir) == 0: parentDir = os.getcwd()
38 | self.outDir = os.path.join(parentDir, os.path.splitext(outDir)[0])
39 | self.outLAS = False
40 | elif os.path.isdir(out):
41 | self.outDir = out
42 | self.outLAS = False
43 | elif out.endswith('.las'):
44 | self.outDir, self.outLAS = os.path.split(out)
45 | else:
46 | raise IOError('output destination not recognised')
47 |
48 | if os.path.isdir(self.outDir) is False:
49 | os.makedirs(self.outDir)
50 |
51 | if self.verbose == True:
52 | print "src: {}".format(src)
53 | print "extent: {}".format(self.extent)
54 | print "round: {}".format(round)
55 | print "flightlines selected: {}".format(self.fl)
56 |
57 | if src == "point":
58 | self.read_points(points_raw)
59 | else:
60 | plots = read_extent(points_raw)
61 |
62 | except Exception as err:
63 | print traceback.format_exc()
64 | finally:
65 | os.chdir(self.cwd)
66 |
67 | def read_points(self, points_raw):
68 |
69 | self.tiles_to_process = []
70 |
71 | with open(points_raw) as pr:
72 |
73 | for i in pr.read().split('\n'):
74 | if i.find('#') != -1:
75 | continue
76 | elif i != "":
77 | i = str.split(i, ',')
78 | point = str(i[0])
79 | x = float(i[1])
80 | y = float(i[2])
81 | else: continue
82 |
83 | #names point if not
84 | if point == None:
85 | point = str(x) + '_' + str(y)
86 |
87 | #print "processing point: {} x: {} y: {}".format(point, x, y)
88 |
89 | # calculates search self.extent
90 | xmin = float(x)-(self.extent/2.)
91 | ymin = float(y)-(self.extent/2.)
92 | xmax = float(x)+(self.extent/2.)
93 | ymax = float(y)+(self.extent/2.)
94 |
95 | if xmin > ymin:
96 | xmin, ymin, xmax, ymax = ymin, xmin, ymax, xmax
97 |
98 | self.tiles_to_process.append({"xmin":xmin, "ymin":ymin,
99 | "xmax":xmax, "ymax":ymax,
100 | "point":point})
101 |
102 | if self.maxProcess == 0:
103 | self.select_tiles(xmin, ymin, xmax, ymax, point)
104 | else: self.mp()
105 |
106 | def mp(self):
107 |
108 | listI = 0
109 |
110 | for i in range((len(self.tiles_to_process) / self.maxProcess) + 1):
111 |
112 | jobs = []
113 |
114 | try:
115 | if (self.maxProcess * listI) + self.maxProcess < len(self.tiles_to_process):
116 | processingList = self.tiles_to_process[self.maxProcess * listI: (self.maxProcess * listI) + self.maxProcess]
117 | else: processingList = self.tiles_to_process[self.maxProcess * listI:]
118 |
119 |
120 |
121 | for j, t in enumerate(processingList): # limits number of images run at once
122 | p = multiprocessing.Process(target=self.select_tiles, args=(t["xmin"],
123 | t["ymin"],
124 | t["xmax"],
125 | t["ymax"],
126 | t["point"], ))
127 | jobs.append(p)
128 | p.start()
129 |
130 | for proc in jobs:
131 | proc.join()
132 |
133 | except:
134 | print self.tiles_to_process[0:1]
135 | print self.maxProcess, type(self.maxProcess)
136 | print listI, type(listI)
137 | raise NameError
138 |
139 | listI += 1
140 |
141 | def select_tiles(self, xmin, ymin, xmax, ymax, point):
142 |
143 | ## connects to db
144 | try:
145 | con = mdb.connect('127.0.0.1', 'seo', 'lidar', 'lidar') # server, user, pw, db
146 | cur = con.cursor()
147 | except Exception, err:
148 | raise Exception(err)
149 |
150 | ## selects either raw flightlines or processed .las files
151 | if self.fl == True:
152 | fl = '"%classed"'
153 | else:
154 | fl = '"%_height"'
155 |
156 | # queries db for available als tiles
157 | query = ('select concat(path, "/", tilename, ".", format) from tiles where \
158 | (xmin > %(xmin)s and xmin < %(xmax)s and ymin > %(ymin)s and ymin < %(ymax)s and (path like "%(name)s" or tilename like "%(name)s")) or \
159 | (xmax > %(xmin)s and xmax < %(xmax)s and ymax > %(ymin)s and ymax < %(ymax)s and (path like "%(name)s" or tilename like "%(name)s")) or \
160 | (xmin > %(xmin)s and xmin < %(xmax)s and ymax > %(ymin)s and ymax < %(ymax)s and (path like "%(name)s" or tilename like "%(name)s")) or \
161 | (xmax > %(xmin)s and xmax < %(xmax)s and ymin > %(ymin)s and ymin < %(ymax)s and (path like "%(name)s" or tilename like "%(name)s")) or \
162 | (xmax > %(xmax)s and xmin < %(xmin)s and ymax > %(ymin)s and ymax < %(ymax)s and (path like "%(name)s" or tilename like "%(name)s")) or \
163 | (xmax > %(xmax)s and xmin < %(xmin)s and ymin > %(ymin)s and ymin < %(ymax)s and (path like "%(name)s" or tilename like "%(name)s")) or \
164 | (xmax < %(xmax)s and xmax > %(xmin)s and ymax > %(ymax)s and ymin < %(ymin)s and (path like "%(name)s" or tilename like "%(name)s")) or \
165 | (xmin < %(xmax)s and xmin > %(xmin)s and ymax > %(ymax)s and ymin < %(ymin)s and (path like "%(name)s" or tilename like "%(name)s")) or \
166 | (xmax > %(xmax)s and xmin < %(xmin)s and ymax > %(ymax)s and ymin < %(ymin)s and (path like "%(name)s" or tilename like "%(name)s"))\
167 | ' % {'xmin':xmin, 'xmax':xmax, 'ymin':ymin, 'ymax':ymax, 'name':self.search})
168 |
169 | # if self.verbose is True:
170 | # print query
171 |
172 | # creates list of tiles from query
173 | cur.execute(query)
174 | ans = cur.fetchall()
175 | self.tile_list = [a[0] for a in ans]
176 | cur.close() # closes cursor
177 | con.close() # closes db connection
178 |
179 | if len(self.tile_list) == 0 and self.verbose == True:
180 | print query
181 |
182 | ## self.extent of tile
183 | print 'processing %s tile(s) for point %s' % (len(self.tile_list), point)
184 | if self.verbose:
185 | for tile in self.tile_list:
186 | print tile
187 |
188 | if len(self.tile_list) > 0:
189 | if not self.outLAS:
190 | self.outLAS = os.path.join(self.outDir,
191 | '{}_'.format(self.namePrefix) + point + '.las')
192 | self.clipLAS(xmin, ymin, xmax, ymax)
193 | else:
194 | print "outside of bounds: {}".format(point)
195 | with open(os.path.join(self.outDir, "log.csv"), "a") as log:
196 | log.write(",".join([point, "Out of bounds", str(np.mean([xmax, xmin])),
197 | str(np.mean([ymax, ymin])), "\n"]))
198 |
199 | def clipLAS(self, xmin, ymin, xmax, ymax):
200 |
201 | x = np.mean([xmax, xmin])
202 | y = np.mean([ymax, ymin])
203 |
204 | if not self.tmpDir or self.maxProcess == 0:
205 | tempDirectoryName = "lidar.processing." + str(np.random.randint(0, 9999999)) + ".tmp"
206 | else:
207 | pid = multiprocessing.current_process()._identity[0]
208 | tempDirectoryName = "lidar.processing." + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + ".tmp"
209 | self.tmpDir = os.path.join(tempfile.gettempdir(), tempDirectoryName)
210 |
211 | lasIO(self.tile_list,
212 | out=self.outLAS,
213 | verbose=self.verbose,
214 | znr=False,
215 | search=self.search,
216 | tmpDir=self.tmpDir,
217 | keepTemp=False).plot(x, y,
218 | extent=self.extent,
219 | round=self.round).exportLAS()
220 |
221 | if self.verbose:
222 | print ".las exported to: {}".format(self.outLAS)
223 |
224 | if not os.path.isfile(self.outLAS):
225 | with open(os.path.join(self.outDir, "log.csv"), "a") as log:
226 | log.write(",".join([point, ",".join(self.tile_list), str(x), str(y), "\n"]))
227 |
228 | if __name__=='__main__':
229 | extract(verbose=False).query(sys.argv[1])
230 |
--------------------------------------------------------------------------------
/forestlas/geometricTree.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | import math
4 |
5 | class crownDim:
6 |
7 | def __init__(self, verbose=False, plot=False):
8 |
9 | self.verbose = verbose
10 | self.plot = plot
11 |
12 | if plot is not False:
13 | self.a = plot
14 |
15 | def genus_params(self, allometric, dbh, height=None, genus=None, species=None, height_class=None, position="none"):
16 |
17 | self.genus = genus
18 | self.position = position
19 |
20 | if allometric == 'scanlan':
21 | ### Scanlan 2010 ###
22 | self.height = 15.868 + (0.411 * dbh) - 0.003 * (dbh - 53.346)**2
23 | self.base = 13.622 + (0.255 * dbh) - 0.002 * (dbh - 53.346)**2
24 | self.radius = 1.966 + (0.115 * dbh)
25 |
26 | elif allometric == 'rushworth':
27 | if species == "polyanthemos" and position == 1:
28 | self.height = 6.73589841952 + 0.199401565137 * dbh
29 | self.radius = 2.44438391898 + 0.0968236918828 * dbh
30 | self.base = 5.64863434095 + 0.0508669633267 * dbh
31 | elif species == "polyanthemos" and position == 2:
32 | self.height = 8.03659852077 + 0.0980603043708 * dbh
33 | self.radius = 1.53665512596 + 0.153832682204 * dbh
34 | self.base = 4.77895559438 + -0.0501664079081 * dbh
35 | elif species == "melliodora" and position == 1:
36 | self.height = 13.1074336283 + 0.181415929204 * dbh
37 | self.radius = -0.0258262268705 + 0.261383748994 * dbh
38 | self.base = -3.84514883347 + 0.421962992759 * dbh
39 | elif species == "melliodora" and position == 2:
40 | self.height = 4.62598991008 + 0.592333644974 * dbh
41 | self.radius = -2.06257925378 + 0.404778261046 * dbh
42 | self.base = 4.13866554657 + 0.0782895004268 * dbh
43 | elif species == "tricarpa" and position == 1:
44 | self.height = 11.2973396649 + 0.118405746654 * dbh
45 | self.radius = 1.90859934333 + 0.11935877297 * dbh
46 | self.base = 7.15742290658 + -0.0309056611952 * dbh
47 | elif species == "tricarpa" and position == 2:
48 | self.height = 5.05200102811 + 0.277243417286 * dbh
49 | self.radius = -0.306595001588 + 0.223036436419 * dbh
50 | self.base = 5.20539453592 + -0.00489743984257 * dbh
51 | elif species == "microcarpa":
52 | self.height = 6.14528846053 + 0.541168944217 * dbh
53 | self.radius = 0.937658231167 + 0.180962968401 * dbh
54 | self.base = 1.10902140512 + 0.3315224801 * dbh
55 | elif species == "macrorrhyncha":
56 | self.height = 3.56849545646 + 0.489722695707 * dbh
57 | self.radius = 0.192442248096 + 0.16049402261 * dbh
58 | self.base = 4.13546317166 + 0.10044532714 * dbh
59 | elif position == 1:
60 | self.height = 3.05768707007 + 3.75103882462 * np.log(dbh)
61 | self.radius = -8.08923000772 + 4.14669504717 * np.log(dbh)
62 | self.base = 7.26517208273 + -0.0301284879315 * dbh
63 | elif position == 2:
64 | self.height = 0.481343823925 + 3.74241504451 * np.log(dbh)
65 | self.radius = -6.32450180289 + 3.57228666209 * np.log(dbh)
66 | self.base = 5.34980523765 + -0.0219543192422 * dbh
67 | else:
68 | print species
69 |
70 | #self.base = 0.616759536617 + 3.07542949969 * np.log(self.height / 2.42)
71 |
72 | ### Rushworth field work
73 | #self.height = 5.59 * np.log(0.48 * dbh)
74 | #self.base = 4.91 * np.log(0.22 * dbh)
75 | #self.radius = 3.28 * np.log(0.15 * dbh)
76 |
77 | elif allometric == 'field':
78 | if "eucalypt" in genus.lower():
79 | if height_class == 0 and position <= 1: # low height and dominant
80 | self.height = 1.86064881159 + 4.33064331125 * np.log(dbh) # n:306, rmse: 1.45, r2: 0.68
81 | self.radius = -4.40385183196 + 2.99180690117 * np.log(dbh) # n:196, rmse: 0.51, r2: 0.84
82 | self.base = 3.77967694134 + 0.293670137086 * self.height # n:169, rmse: 1.01, r2: 0.22
83 | if height_class == 1 and position <= 1: # med height and dominant
84 | self.height = -6.3614102534 + 11.5476586081 * np.log(dbh) # n:285, rmse: 3.98, r2: 0.40
85 | self.radius = -30.9357885227 + 9.7432187604 * np.log(dbh)# n:272, rmse: 1.40, r2: 0.75
86 | self.base = 11.0629490818 + 0.410104416904 * self.height # n:233, rmse: 3.08, r2: 0.39
87 | # if height_class == 2 and position == 1: # tall height and dominant (all trees)
88 | # self.height = -14.8638124803 + 16.0836040014 * np.log(dbh) # n:22, rmse: 5.78, r2: 0.50
89 | # self.radius = -30.9357885227 + 9.7432187604 * np.log(dbh) # combined with height class >0 as n = 2
90 | # self.base = 11.0629490818 + 0.410104416904 * self.height # ditto
91 | # if height_class == 2 and position == 1: # tall height and dominant (regnans only)
92 | # self.height = -51.6987300663 + 22.503790377 * np.log(dbh) # n:22, rmse: 5.78, r2: 0.50
93 | # self.radius = -17.7715368458 + 7.04971631466 * np.log(dbh) # combined with height class >0 as n = 2
94 | # self.base = 9.83446096282 + 0.285250808259 * self.height # ditto
95 | if height_class == 2 and position == 1: # tall height and dominant (regnans only)
96 | self.height = height # n:22, rmse: 5.78, r2: 0.50
97 | self.radius = -17.7715368458 + 7.04971631466 * np.log(dbh) # combined with height class >0 as n = 2
98 | self.base = 0.6 * height # ditto
99 | if height_class == 0 and position == 2: # low height and suppressed
100 | self.height = -2.20552357108 + 4.98820033612 * np.log(dbh) # n:246, rmse: 1.77, r2: 0.39
101 | self.radius = -6.02277337577 + 3.42232555079 * np.log(dbh) # n:132, rmse: 0.76, r2: 0.48
102 | self.base = 0.84309744471 + 0.39574379931 * self.height # n:132, rmse: 0.99, r2: 0.35
103 | if height_class > 0 and position == 2: # med height and suppressed
104 | self.height = -10.3155403958 + 9.37056330951 * np.log(dbh) # n:23, rmse: 2.51, r2: 0.70
105 | self.radius = -6.02277337577 + 3.42232555079 * np.log(dbh) # combined with height class = 0 as n = 2
106 | self.base = 0.84309744471 + 0.39574379931 * self.height # ditto
107 | else: # non-eucalypt
108 | if height_class == 0: # tall height and dominant
109 | self.height = -7.25044287906 + 6.92940716229 * np.log(dbh) # n:97, rmse: 2.07, r2: 0.55
110 | self.radius = -7.42177845525 + 3.955374589 * np.log(dbh) # n:77, rmse: 0.63, r2: 0.75
111 | self.base = 1.99951178234 + 0.459496058702 * self.height # n:77, rmse: 1.00, r2: 0.69
112 | if height_class == 1: # low height and suppressed
113 | self.height = -11.4257385832 + 8.43514568771 * np.log(dbh) # n:86, rmse: 1.58, r2: 0.73
114 | self.radius = -4.94874693431 + 3.05793566579 * np.log(dbh) # n:11, rmse: 0.56, r2: 0.69
115 | self.base = 2.02235653003 + 0.485452812202 * self.height # n:11, rmse: 0.77, r2: 0.54
116 | if height_class == 2: # med height and suppressed
117 | self.height = -13.944895656 + 9.01486400668 * np.log(dbh) # n:75, rmse: 1.53, r2: 0.82
118 | self.radius = -10.1429095331 + 5.06708994335 * np.log(dbh) # n:32, rmse: 0.54, r2: 0.72
119 | self.base = 2.37924762553 + 0.421325043146 * self.height # n:32, rmse: 0.49, r2: 0.75
120 |
121 | self.height = height
122 |
123 | if height > 50 or species == "regnans":
124 | self.base = (1-0.29) * height
125 | elif height > 10:
126 | self.base = 0.55 * height
127 | else: self.base = 0.7 * height
128 | #print dbh, self.height, self.radius, self.base, height_class, position
129 |
130 | else: raise NameError("allometric relationship keyword not understood")
131 |
132 | #if self.base > self.height:
133 | # self.height, self.base = self.base, self.height
134 |
135 | if self.verbose:
136 | print "dbh: {}, h: {}, r: {}, b: {}, a:{}".format(dbh, self.height, self.radius, self.base, xxx)
137 |
138 | return self
139 |
140 | def ellipsoid_volume(self):
141 |
142 | crown_depth = self.height - self.base
143 | crown_centre = self.height - (crown_depth / 2)
144 | a = crown_depth / 2 # semimajor self.ais
145 | b = self.radius / 2 # semiminor self.ais
146 |
147 | L = {}
148 |
149 | if self.plot is not False:
150 | for t in range(0, 360):
151 | x = a*math.sin(math.radians(t))
152 | y = b*math.cos(math.radians(t))
153 | self.a.scatter(y, x + crown_centre, s=1, c="g", edgecolor="none")
154 | #if "ucalypt" in self.genus:
155 | # x = (a * .75) * math.sin(math.radians(t))
156 | # y = (b * .75) * math.cos(math.radians(t))
157 | # self.a.scatter(y, x + (a / 1.375) + self.base, s=1, c="r", edgecolor="none")
158 |
159 | eccentricity = np.sqrt(b**2./a**2.)
160 | hyp = a * eccentricity
161 | bins = np.linspace(-np.floor(a), np.floor(a), np.floor(a) * 2 + 1)
162 |
163 | for i, opp in enumerate(bins):
164 | angle = math.asin((opp * eccentricity) / hyp)
165 | adj = math.cos(angle) * hyp
166 | area = np.pi * adj**2
167 | # calculates "hollow" eucalypt crowns
168 | if "ucalypt" in self.genus:
169 | j = float(i) / len(bins)
170 | if j <= 0.75:
171 | factor = self.exp(j, 0.26229889, -1.52996682)
172 | area = area * factor
173 | # calculates suppressed trees
174 | if self.position == 2:
175 | area *= 0.5
176 | if "othofagus" in self.genus:
177 | area *= 1.2
178 | opp += crown_centre
179 | L[opp] = area
180 |
181 | return L
182 |
183 | def exp(self, x, a, b):
184 | return a * np.exp(-b * x)
--------------------------------------------------------------------------------
/forestlas/global_header.py:
--------------------------------------------------------------------------------
1 | __author__ = 'phil'
2 |
3 | def h():
4 | return {'sysid': 'RECOVERED TEMP FILE ',
5 | 'gensoftware': 'CRC207 LiDAR analysis software ',
6 | 'vermajor': 1,
7 | 'verminor': 2,
8 | 'pointformat': 1,
9 | 'numvlrecords': 0,
10 | 'pointreclen': 28,
11 | 'fileyear': 3000,
12 | 'fileday': 226,
13 | 'headersize': 227,
14 | 'reserved': 0,
15 | 'xmin': 9999999.,
16 | 'xmax': -9999999.,
17 | 'ymin': 9999999.,
18 | 'ymax': -9999999.,
19 | 'zmin': 9999.,
20 | 'zmax': -9999.,
21 | 'filesourceid': 0,
22 | 'zoffset': 0.0,
23 | 'guid1': 0,
24 | 'guid2': 0,
25 | 'guid3': 0,
26 | 'guid4': (0, 0, 0, 0, 0, 0, 0, 0),
27 | 'xscale': 0.01,
28 | 'yscale': 0.01,
29 | 'zscale': 0.01,
30 | 'numptbyreturn': (0, 0, 0, 0, 0),
31 | 'infile': None,
32 | 'yoffset': 0.0,
33 | 'filesig': 'LASF',
34 | 'offset': 313-54,
35 | 'xoffset': 0.0,
36 | 'numptrecords': 0,
37 | }
38 |
--------------------------------------------------------------------------------
/forestlas/lasIO.py:
--------------------------------------------------------------------------------
1 | import sys, os, struct, array, math, datetime, tempfile, shutil, subprocess
2 | import multiprocessing
3 | import numpy as np
4 |
5 | ### from forestlas.lasStructure import *
6 | from lasStructure import *
7 |
8 | class lasIO:
9 |
10 | """
11 | This allows reading and writing of .las files (currently supports 1.1 and 1.2).
12 | .las files can be read as one file(s), gridded or a plot of data extracted.
13 | Output types include .las, a numpy array or as .znr file where only the height
14 | and the "number of returns" metadata remain.
15 | """
16 |
17 | def __init__(self, path, out=False, znr=False, verbose=False, search=".",
18 | tmpDir=False, keepTemp=False):
19 |
20 | """
21 | Functions creates holder for .las file and setups required dictionaries
22 | etc.
23 |
24 | Parameters
25 | ----------
26 | path: File path or list
27 | file path to tile .las file or directory containing .las files,
28 | also excepts a list of file paths.
29 |
30 | out: Path to directory or path to save .las to, Default None
31 | If None then saves output to os.getcwd()
32 |
33 | znr: Boolean. Default True
34 | Save output as a .znr file where only the height and "number of
35 | returns metadata is retained.
36 |
37 | search: String. Default "."
38 | Can be used to filter .las files if a directory is supplied for
39 | "path"
40 |
41 | tmpDir: File path to temporary directory. Default False
42 | Multiprocessing is not available with this module but is by others
43 | that call lasIO. This specifies the temporary directory.
44 |
45 | keepTemp: Boolean. Default True
46 | When False all temporary files are kept. It is important to run
47 | removeTemp() to clean up before exiting
48 |
49 | Returns
50 | -------
51 | out: self
52 |
53 | """
54 |
55 | self.verbose = verbose
56 | self.keepTemp = keepTemp
57 |
58 | ### parse args and create file structure
59 | # is path a directory or file
60 | if isinstance(path, list) :
61 | self.tileList = path
62 | self.dir = os.path.split(path[0])[0]
63 | self.tile = None
64 | elif os.path.isfile(path) :
65 | self.dir = os.path.split(path)[0]
66 | if self.dir == '': self.dir = os.getcwd()
67 | self.tile = os.path.split(path)[1]
68 | self.tileList = [os.path.join(self.dir, self.tile)]
69 | elif os.path.isdir(path) :
70 | self.dir = path
71 | self.tile = None
72 | tileList = os.listdir(self.dir)
73 | self.tileList = [tile for tile in tileList if (tile.endswith(".las")
74 | or tile.endswith(".laz")
75 | or tile.endswith(".znr"))
76 | and tile.find(search) != -1]
77 | else:
78 | raise NameError ("path not recognised")
79 |
80 | if len(self.tileList) == 0:
81 | raise IOError("There are no .las, .laz or .znr tiles in {}".format(path))
82 |
83 | # create temporary directory in %temp%
84 | if tmpDir:
85 | self.tempDirectory = tmpDir
86 | if os.path.isdir(self.tempDirectory) is False:
87 | os.makedirs(self.tempDirectory)
88 | else:
89 | tempDirectoryName = "lidar.processing." + str(np.random.randint(0, 9999999)) + ".tmp"
90 | self.tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
91 | os.makedirs(self.tempDirectory)
92 |
93 | #tempDirectoryName = "lidar.processing." + str(np.random.randint(0, 9999999)) + ".tmp"
94 | #self.tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
95 | #os.makedirs(self.tempDirectory)
96 |
97 | if self.verbose: print "temporary directory at: {}".format(self.tempDirectory)
98 |
99 | # create output directory
100 | if not out:
101 | self.savePath = self.dir
102 | self.saveLAS = None
103 | elif os.path.isdir(out) :
104 | self.savePath = out
105 | self.saveLAS = None
106 | else:
107 | self.savePath = os.path.split(out)[0]
108 | self.saveLAS = os.path.split(out)[1]
109 |
110 | # znr
111 | if znr:
112 | self.znr = True
113 | else: self.znr = False
114 |
115 | # global variables
116 | self.numberTiles = 1
117 | for i, las in enumerate(self.tileList):
118 | las = os.path.join(self.dir, las)
119 | if i == 0:
120 | self.globalHeader = parseHeader(las)
121 | # self.globalHeader["numptrecords"] -= 1
122 | if self.globalHeader['filesig'] == "ZNRF":
123 | self.ptFrmt, self.dt = znrStruct()
124 | self.znr = True
125 | else:
126 | self.ptFrmt, self.dt = getPtFrmt(self.globalHeader, verbose=True)
127 | self.vlr = getVLR(self.globalHeader["headersize"], las)
128 | self.h = parseHeader(las)
129 | else:
130 | lasHeader = parseHeader(os.path.join(path, las))
131 | self.globalHeader["numptrecords"] += lasHeader["numptrecords"]
132 | if lasHeader["xmax"] > self.globalHeader["xmax"]:
133 | self.globalHeader["xmax"] = lasHeader["xmax"]
134 | if lasHeader["xmin"] < self.globalHeader["xmin"]:
135 | self.globalHeader["xmin"] = lasHeader["xmin"]
136 | if lasHeader["ymax"] > self.globalHeader["ymax"]:
137 | self.globalHeader["ymax"] = lasHeader["ymax"]
138 | if lasHeader["ymin"] < self.globalHeader["ymin"]:
139 | self.globalHeader["ymin"] = lasHeader["ymin"]
140 | self.numberTiles += 1
141 |
142 | self.x_centre = np.mean([self.globalHeader["xmax"], self.globalHeader["xmin"]])
143 | self.y_centre = np.mean([self.globalHeader["ymax"], self.globalHeader["ymin"]])
144 |
145 | if self.verbose: print "number of tiles to process: {}".format(self.numberTiles)
146 | self.area = (self.globalHeader["xmax"] - self.globalHeader["xmin"]) * (self.globalHeader["ymax"] - self.globalHeader["ymin"])
147 | # self.pointDensity = self.globalHeader["numptrecords"] / self.area
148 |
149 | # admin!
150 | self.counter = self.globalHeader["numptrecords"] // 20
151 | if self.globalHeader["numptrecords"] > 100000000:
152 | self.counter = self.globalHeader["numptrecords"] // 100
153 | self.badPoints = 0
154 | self.filesOpen = 0
155 |
156 | def laz2las(self, tile):
157 |
158 | """
159 | If input file is a .laz file then calls a LAStools to
160 | decompress to .las. Requires LAStools to be installed and in
161 | the PATH to be in the system environment variables
162 | """
163 |
164 | print "converting to .las: {}".format(os.path.abspath(tile))
165 | os.chdir(self.dir)
166 | tileName = os.path.splitext(os.path.split(tile)[1])[0] + ".las"
167 | tempFile = os.path.join(self.tempDirectory, tileName)
168 | cmd = ["las2las", "-i", tile, "-o", tempFile]
169 | subprocess.call(cmd)
170 | return tempFile
171 |
172 | def tiling(self, resolution, takeSample=False, buffer=0):
173 |
174 | """
175 | Used to tile .las file
176 |
177 | Parameters
178 | ----------
179 | resolution: int
180 | Tile resolution
181 |
182 | takeSample: int, Default False
183 | Samples data at specified factor
184 |
185 | Returns
186 | -------
187 | out: self
188 |
189 | """
190 |
191 | if self.globalHeader["filesig"] == "ZNRF":
192 | raise NameError("Can not tile a .znr file as there are no x y coordinates")
193 |
194 | self.totPoints = 0
195 | self.globalHeader["guid1"] = resolution
196 |
197 | self.xmax = (float(np.ceil(self.globalHeader["xmax"])) // resolution) * resolution # to make nice neat boxes!
198 | self.xmin = (float(np.floor(self.globalHeader["xmin"])) // resolution) * resolution
199 | self.ymax = (float(np.ceil(self.globalHeader["ymax"])) // resolution) * resolution
200 | self.ymin = (float(np.floor(self.globalHeader["ymin"])) // resolution) * resolution
201 |
202 | if self.tile == None:
203 | dirName = str(resolution) + "m_TILES"
204 | else:
205 | dirName = os.path.splitext(self.tile)[0] + "_" + str(resolution) + "m_TILES"
206 | self.tilePath = os.path.join(self.savePath, dirName)
207 | if os.path.isdir(self.tilePath) :
208 | shutil.rmtree(self.tilePath)
209 | os.makedirs(self.tilePath)
210 |
211 | # tile dictionary and other variables
212 | self.xy_dictionary = {}
213 | for i, x in enumerate(np.arange(self.xmin, self.xmax + resolution, resolution)):
214 | for j, y in enumerate(np.arange(self.ymin, self.ymax + resolution, resolution)):
215 | self.xy_dictionary[(x, y)] = {"xmin":x - buffer,
216 | "ymin":y - buffer,
217 | "zmin":999,
218 | "xmax":x + resolution + buffer,
219 | "ymax":y + resolution + buffer,
220 | "zmax": -999,
221 | "num_rtn": {1:0, 2:0, 3:0, 4:0, 5:0},
222 | "i":0,
223 | "tempFile": os.path.abspath(os.path.join(self.tempDirectory, str(x) + "_" + str(y) + ".temp")),
224 | "outFile": os.path.abspath(os.path.join(self.tilePath, str(x) + "_" + str(y) + ".las")),
225 | "isOpen": False,
226 | "lastTouched": None
227 | }
228 |
229 | keys = np.array(self.xy_dictionary.keys(), dtype=[('x', int), ('y', int)])
230 |
231 | if self.verbose: print "number of plots: {}".format(len(self.xy_dictionary))
232 |
233 | for tile in self.tileList:
234 |
235 | tile = os.path.join(self.dir, tile)
236 | self.h = parseHeader(tile)
237 |
238 | if self.h["filesig"] == "LASF":
239 | if self.h["guid2"] == 1:
240 | tile = self.laz2las(tile)
241 |
242 | self.h["num_rtn"] = {}
243 |
244 | if takeSample:
245 | sample = self.generateSample(takeSample)
246 | if self.verbose:
247 | print "random sample produced: {}".format(len(sample))
248 | else:
249 | sample = range(self.h['numptrecords'])
250 |
251 | with open(os.path.join(self.dir, tile), 'rb') as fh:
252 |
253 | fh.seek(self.h["offset"])
254 | numPoints = 0
255 |
256 | for i in sample: # loops through all points
257 |
258 | if i%self.counter == 0 and self.verbose:
259 | print "{}% | {} of {} points selected | {}".format(np.round((np.float(self.totPoints)/self.globalHeader['numptrecords'])*100), numPoints, self.globalHeader['numptrecords'], datetime.datetime.now())
260 |
261 | fh.seek(self.h["offset"] + (numPoints * self.h['pointreclen'])) # searches to beginning of point
262 |
263 | point_dictionary = extract_return(self, fh)
264 | X, Y = point_dictionary['x'], point_dictionary['y']
265 | KEYS = keys[(X >= keys['x'] - buffer) & (X < keys['x'] + resolution + buffer) &
266 | (Y >= keys['y'] - buffer) & (Y < keys['y'] + resolution + buffer)]
267 |
268 | for key in KEYS:
269 |
270 | self.xy_dictionary[tuple(key)] = self.writePoint(self.xy_dictionary[tuple(key)], point_dictionary)
271 |
272 | if point_dictionary["rtn_num"] < 6 and point_dictionary["rtn_num"] > 0:
273 | self.xy_dictionary[tuple(key)]["num_rtn"][point_dictionary["rtn_num"]] += 1
274 |
275 | numPoints += 1
276 | self.totPoints += 1
277 |
278 | # closes file handles that have been unused for a while
279 | if self.filesOpen == 40:
280 | self.closeFiles()
281 | self.filesOpen = 20
282 |
283 | # deletes .las tiles that were converted from .laz
284 | if self.tempDirectory in tile:
285 | os.unlink(tile)
286 |
287 | for key in self.xy_dictionary.keys():
288 | if self.xy_dictionary[key]["isOpen"] is not False:
289 | self.xy_dictionary[key]["isOpen"].close()
290 |
291 | if self.verbose : print "number of bad points = {}".format(self.badPoints)
292 | if self.znr :
293 | self.ptFrmt, self.dt = znrStruct()
294 | self.globalHeader["pointreclen"] = 5
295 | else:
296 | if resolution >= 1: self.h['guid1'] = resolution
297 |
298 | self.outTileCount = 0
299 |
300 | return self
301 |
302 | def all(self, takeSample=False):
303 |
304 | """
305 | Returns complete .las file
306 |
307 | Parameters
308 | ----------
309 |
310 | takeSample: int, Default False
311 | Samples data at specified factor
312 |
313 | Returns
314 | -------
315 | out: self
316 |
317 | """
318 |
319 | self.totPoints = 0
320 | self.globalHeader["guid1"] = 0
321 |
322 | x, y = self.globalHeader["xmin"], self.globalHeader["ymin"]
323 | if self.saveLAS == None:
324 | self.saveLAS = str(int(x)) + "_" + str(int(y)) + "_OUT.las"
325 | self.xy_dictionary = {}
326 | self.xy_dictionary['all'] = { "xmin":x, \
327 | "ymin":y, \
328 | "zmin":999, \
329 | "xmax":self.globalHeader["xmax"], \
330 | "ymax":self.globalHeader["ymax"], \
331 | "zmax": -999, \
332 | "num_rtn": {1:0, 2:0, 3:0, 4:0, 5:0}, \
333 | "i":0, \
334 | "outFile": os.path.abspath(os.path.join(self.savePath, self.saveLAS)), \
335 | "tempFile": os.path.abspath(os.path.join(self.tempDirectory, str(x) + "_" + str(y) + ".temp")), \
336 | "isOpen": False, \
337 | "lastTouched": None
338 | }
339 |
340 | for tile in self.tileList:
341 |
342 | tile = os.path.join(self.dir, tile)
343 | self.h = parseHeader(tile)
344 |
345 | if self.h["filesig"] == "LASF":
346 | if self.h["guid2"] == 1:
347 | tile = self.laz2las(tile)
348 |
349 | self.h["num_rtn"] = {}
350 |
351 | if takeSample:
352 | sample = self.generateSample(takeSample)
353 | if self.verbose :
354 | print "random sample produced: {}".format(len(sample))
355 | else:
356 | sample = range(self.h['numptrecords'])
357 |
358 | with open(os.path.join(self.dir, tile), 'rb') as fh:
359 |
360 | fh.seek(self.h["offset"])
361 | numPoints = 0
362 |
363 | for i in sample: # loops through all points
364 | #for i in range(10): # number of rows
365 |
366 | try:
367 | if i%self.counter == 0 and self.verbose:
368 | self.printProgress()
369 | except: pass
370 |
371 | fh.seek(self.h["offset"] + (numPoints * self.h['pointreclen'])) # searches to beginning of point
372 |
373 | try:
374 | point_dictionary = extract_return(self, fh)
375 |
376 | if self.znr is False:
377 | self.xy_dictionary["all"] = self.writePoint(self.xy_dictionary["all"], point_dictionary)
378 | # populates number of returns
379 | if point_dictionary["rtn_num"] < 6 and point_dictionary["rtn_num"] > 0:
380 | self.xy_dictionary["all"]["num_rtn"][point_dictionary["rtn_num"]] += 1
381 |
382 | else:
383 | self.xy_dictionary["all"] = self.writeZNR(self.xy_dictionary["all"], point_dictionary)
384 |
385 | numPoints += 1
386 | self.totPoints += 1
387 | except:
388 | self.badPoints += 1
389 | continue
390 |
391 | for key in self.xy_dictionary.keys():
392 | if self.xy_dictionary[key]["isOpen"] is not False:
393 | self.xy_dictionary[key]["isOpen"].close()
394 |
395 | if self.verbose : print "number of bad points: {}".format(self.badPoints)
396 | if self.znr :
397 | self.ptFrmt, self.dt = znrStruct()
398 | self.globalHeader["pointreclen"] = 5
399 |
400 | return self
401 |
402 | def plot(self, centre_x, centre_y, extent=24, round=False):
403 | """
404 |
405 | Returns a plotwise array of points from the tile defined with
406 | lasIO with plot centre at centre_x and centre_y and area equal
407 | to (radius*2)**2 if round=False and pi*radius**2 if round=True.
408 |
409 | The radius defaults to 10 metres. Plots are square by default
410 | but can be circular with round=True.
411 |
412 | Returns self which returns a numpy array if asArray is cploted,
413 | or can saved as .las, .txt or .xyz by cploting exportLAS,
414 | exportTXT or exportXYZ respectively.
415 |
416 | Paramaters
417 | ----------
418 | centre_x, centre_y : int or float
419 | Cartesian coordinates of plot centre (in the same coordinate
420 | system as data)
421 |
422 | extent : int, float or tuple with length of 2
423 | Diameter of round plot or extent of square plot. Will except
424 | a tuple of two ints or floats.
425 |
426 | round : Boolean, defualt False
427 | If False a square plot is returned, if True a round plot is
428 | returned
429 |
430 | Returns
431 | -------
432 | out : self
433 |
434 | """
435 |
436 | if isinstance(extent, tuple):
437 | extent_x = extent[0] / 2.
438 | extent_y = extent[1] / 2.
439 | else:
440 | extent_x, extent_y = extent / 2., extent / 2.
441 |
442 | xmin = centre_x - extent_x
443 | xmax = centre_x + extent_x
444 | ymin = centre_y - extent_y
445 | ymax = centre_y + extent_y
446 |
447 | self.totPoints = 0
448 | self.globalHeader["guid1"] = 0
449 |
450 | x, y = self.globalHeader["xmin"], self.globalHeader["ymin"]
451 | if self.saveLAS == None:
452 | self.saveLAS = str(int(x)) + "_" + str(int(y)) + "_OUT.las"
453 | self.xy_dictionary = {}
454 | self.xy_dictionary['plot'] = {"xmin":xmin,
455 | "ymin":ymin,
456 | "zmin":999,
457 | "xmax":xmax,
458 | "ymax":ymax,
459 | "zmax": -999,
460 | "num_rtn": {1:0, 2:0, 3:0, 4:0, 5:0},
461 | "i":0,
462 | "outFile": os.path.abspath(os.path.join(self.savePath, self.saveLAS)),
463 | "tempFile": os.path.abspath(os.path.join(self.tempDirectory, str(x) + "_" + str(y) + ".PLOT.temp")),
464 | "isOpen": False,
465 | "lastTouched": None
466 | }
467 |
468 | for tile in self.tileList:
469 |
470 | tile = os.path.join(self.dir, tile)
471 | self.h = parseHeader(tile)
472 | self.tile = tile
473 |
474 | if self.h["xmax"] < xmin or self.h["xmin"] > xmax or self.h["ymax"] < ymin or self.h["ymin"] > ymax:
475 | #print "skipping {}: out of bounds".format(tile)
476 | continue
477 |
478 | if self.h["filesig"] == "LASF":
479 | if self.h["guid2"] == 1:
480 | tile = self.laz2las(tile)
481 |
482 | self.h["num_rtn"] = {}
483 |
484 | with open(tile, 'rb') as fh:
485 |
486 | fh.seek(self.h["offset"])
487 |
488 | for i in range(self.h['numptrecords']): # loops through plot points
489 |
490 | if i%self.counter == 0 and self.verbose :
491 | self.printProgress()
492 |
493 | fh.seek(self.h["offset"] + (i * self.h['pointreclen'])) # searches to beginning of point
494 |
495 | try:
496 |
497 | # test x point first ...
498 | fh.seek(self.h['offset'] + (i * self.h['pointreclen']))
499 | x = fh.read(4)
500 | x = struct.unpack('=' + 'L', x)[0]
501 | x = (x * self.h['xscale'] ) + self.h['xoffset']
502 | if x < xmin or x > xmax:
503 | continue
504 |
505 | # test y point next ...
506 | fh.seek(self.h['offset'] + (i * self.h['pointreclen'] + 4))
507 | y = fh.read(4)
508 | y = struct.unpack('=' + 'L', y)[0]
509 | y = (y * self.h['yscale']) + self.h['yoffset']
510 | if y < ymin or y > ymax:
511 | continue
512 |
513 | # extract round plot
514 | if round and round_plot((x, y), centre_x, centre_y, xmax, xmin, ymax, ymin, extent_x) == 0:
515 | continue
516 |
517 | fh.seek(self.h["offset"] + (i * self.h['pointreclen'])) # searches to beginning of point
518 |
519 | if not self.znr:
520 | point_dictionary = extract_return(self, fh)
521 |
522 | self.xy_dictionary["plot"] = self.writePoint(self.xy_dictionary["plot"], point_dictionary)
523 |
524 | # populates number of returns
525 | if point_dictionary["rtn_num"] < 6 and point_dictionary["rtn_num"] > 0:
526 | self.xy_dictionary["plot"]["num_rtn"][point_dictionary["rtn_num"]] += 1
527 |
528 | else:
529 | point_dictionary = extract_return(self, fh)
530 | self.xy_dictionary["plot"] = self.writeZNR(self.xy_dictionary["plot"], point_dictionary)
531 |
532 | self.totPoints += 1
533 |
534 | except:
535 | self.badPoints += 1
536 | continue
537 |
538 | if self.xy_dictionary["plot"]["isOpen"]:
539 | self.xy_dictionary["plot"]["isOpen"].close()
540 |
541 | if self.verbose : print "number of bad points = {}".format(self.badPoints)
542 | if self.znr :
543 | self.ptFrmt, self.dt = znrStruct()
544 | self.globalHeader["pointreclen"] = 5
545 |
546 | return self
547 |
548 | def fromGrid(self, csv, resolution=None):
549 | """
550 |
551 | Returns a plotwise array of points from the tile defined with
552 | lasIO with plot centre at centre_x and centre_y and area equal
553 | to (radius*2)**2 if round=False and pi*radius**2 if round=True.
554 |
555 | The radius defaults to 10 metres. Plots are square by default
556 | but can be circular with round=True.
557 |
558 | Returns self which returns a numpy array if asArray is cploted,
559 | or can saved as .las, .txt or .xyz by cploting exportLAS,
560 | exportTXT or exportXYZ respectively.
561 |
562 | Paramaters
563 | ----------
564 | centre_x, centre_y : int or float
565 | Cartesian coordinates of plot centre (in the same coordinate
566 | system as data)
567 |
568 | extent : int, float or tuple with length of 2
569 | Diameter of round plot or extent of square plot. Will except
570 | a tuple of two ints or floats.
571 |
572 | round : Boolean, defualt False
573 | If False a square plot is returned, if True a round plot is
574 | returned
575 |
576 | Returns
577 | -------
578 | out : self
579 |
580 | """
581 |
582 | self.totPoints = 0
583 | self.globalHeader["guid1"] = 0
584 |
585 | grid = np.loadtxt(csv, skiprows=1, delimiter=',', dtype=([('x', np.float), ('y', np.float)]))
586 | grid['x'] = grid['x'].astype(int)
587 | grid['y'] = grid['y'].astype(int)
588 |
589 | if not resolution:
590 | resolution = grid['x'][1] - grid['x'][0]
591 |
592 | xmin = grid['x'].min() - (resolution / 2.)
593 | xmax = grid['x'].max() + (resolution / 2.)
594 | ymin = grid['y'].min() - (resolution / 2.)
595 | ymax = grid['y'].max() + (resolution / 2.)
596 |
597 | if self.verbose:
598 | print 'grid resolution:', resolution
599 | print 'aiming to produce {} tiles'.format(len(grid))
600 |
601 | self.xy_dictionary = {}
602 | for x, y in grid:
603 | self.xy_dictionary[(x, y)] = {"xmin":x - (resolution / 2.),
604 | "ymin":y - (resolution / 2.),
605 | "zmin":999,
606 | "xmax":x + (resolution / 2.),
607 | "ymax":y + (resolution / 2.),
608 | "zmax": -999,
609 | "num_rtn": {1:0, 2:0, 3:0, 4:0, 5:0},
610 | "i":0,
611 | "outFile": os.path.abspath(os.path.join(self.savePath, "{}_{}.PLOT.las".format(x, y))),
612 | "tempFile": os.path.abspath(os.path.join(self.tempDirectory, "{}_{}.PLOT.temp".format(x, y))),
613 | "isOpen": False,
614 | "lastTouched": None
615 | }
616 |
617 | keys = np.array(self.xy_dictionary.keys(), dtype=[('x', int), ('y', int)])
618 |
619 | for tile in self.tileList:
620 |
621 | tile = os.path.join(self.dir, tile)
622 | self.h = parseHeader(tile)
623 | self.tile = tile
624 |
625 | if self.h["xmax"] < xmin or self.h["xmin"] > xmax or self.h["ymax"] < ymin or self.h["ymin"] > ymax:
626 | continue
627 |
628 | if self.h["filesig"] == "LASF":
629 | if self.h["guid2"] == 1:
630 | tile = self.laz2las(tile)
631 |
632 | self.h["num_rtn"] = {}
633 |
634 | with open(tile, 'rb') as fh:
635 |
636 | fh.seek(self.h["offset"])
637 |
638 | for i in range(self.h['numptrecords']): # loops through plot points
639 |
640 | if i%self.counter == 0 and self.verbose :
641 | self.printProgress()
642 |
643 | fh.seek(self.h["offset"] + (i * self.h['pointreclen'])) # searches to beginning of point
644 |
645 | # test x point first ...
646 | fh.seek(self.h['offset'] + (i * self.h['pointreclen']))
647 | x = fh.read(4)
648 | x = struct.unpack('=' + 'L', x)[0]
649 | x = (x * self.h['xscale'] ) + self.h['xoffset']
650 | if not xmin < x < xmax:
651 | continue
652 |
653 | # test y point next ...
654 | fh.seek(self.h['offset'] + (i * self.h['pointreclen'] + 4))
655 | y = fh.read(4)
656 | y = struct.unpack('=' + 'L', y)[0]
657 | y = (y * self.h['yscale']) + self.h['yoffset']
658 | if not ymin < y < ymax:
659 | continue
660 |
661 | # extract round plot
662 | inGrid = False
663 | idx = [(grid['x'] > self.h["xmin"]) | (grid['x'] <= self.h["xmax"]) |
664 | (grid['y'] > self.h["ymin"]) | (grid['y'] <= self.h["ymax"])]
665 | for row in grid[idx]:
666 | if ((row[0] - (resolution / 2.)) < x < (row[0] + (resolution / 2.)) and
667 | (row[1] - (resolution / 2.)) < y < (row[1] + (resolution / 2.))):
668 | inGrid = True
669 | break
670 |
671 | if not inGrid: continue
672 |
673 | fh.seek(self.h["offset"] + (i * self.h['pointreclen'])) # searches to beginning of point
674 |
675 | # try:
676 | point_dictionary = extract_return(self, fh)
677 | X, Y = point_dictionary['x'], point_dictionary['y']
678 | KEYS = keys[(X >= keys['x'] - (resolution / 2.)) & (X <= keys['x'] + (resolution / 2.)) &
679 | (Y >= keys['y'] - (resolution / 2.)) & (Y <= keys['y'] + (resolution / 2.))]
680 |
681 | for key in KEYS:
682 |
683 | self.xy_dictionary[tuple(key)] = self.writePoint(self.xy_dictionary[tuple(key)], point_dictionary)
684 |
685 | if point_dictionary["rtn_num"] < 6 and point_dictionary["rtn_num"] > 0:
686 | self.xy_dictionary[tuple(key)]["num_rtn"][point_dictionary["rtn_num"]] += 1
687 |
688 | self.totPoints += 1
689 |
690 | # except:
691 | # self.badPoints += 1
692 | # continue
693 |
694 | # closes file handles that have been unused for a while
695 | if self.filesOpen == 40:
696 | self.closeFiles()
697 | self.filesOpen = 20
698 |
699 | # deletes .las tiles that were converted from .laz
700 | if self.tempDirectory in tile:
701 | os.unlink(tile)
702 |
703 | for key in self.xy_dictionary.keys():
704 | if self.xy_dictionary[key]["isOpen"] is not False:
705 | self.xy_dictionary[key]["isOpen"].close()
706 |
707 | if self.verbose : print "number of bad points = {}".format(self.badPoints)
708 | if self.znr :
709 | self.ptFrmt, self.dt = znrStruct()
710 | self.globalHeader["pointreclen"] = 5
711 | else:
712 | if resolution >= 1: self.h['guid1'] = resolution
713 |
714 | self.outTileCount = 0
715 |
716 | return self
717 |
718 | def writePoint(self, tile, d):
719 |
720 | # opens file for writing points to
721 |
722 | if not type(tile["isOpen"]) == 'file':
723 | tile["isOpen"] = open(tile["tempFile"], "ab")
724 | self.filesOpen += 1
725 |
726 | # writes point
727 | for i in self.ptFrmt:
728 | if i[0] == 'return_grp':
729 | byte = ((d['scan_edge'] & 1) << 7) | ((d['scan_dir'] & 1) << 6) | ((d['rtn_tot'] & 7) << 3) | (d['rtn_num'] & 7)
730 | elif i[0] == 'x':
731 | byte = (d['x'] - self.h['xoffset']) / self.h['xscale']
732 | elif i[0] == 'y':
733 | byte = (d['y'] - self.h['yoffset']) / self.h['yscale']
734 | elif i[0] == 'z':
735 | byte = (d['z'] - self.h['zoffset']) / self.h['zscale']
736 | else:
737 | byte = d[i[0]]
738 |
739 | tile["isOpen"].write(struct.pack('=' + i[2], byte))
740 |
741 | # updates header information
742 | if d['z'] > tile["zmax"]:
743 | tile["zmax"] = d['z']
744 | if d['z'] < tile["zmin"]:
745 | tile["zmin"] = d['z']
746 | tile["i"] += 1
747 |
748 | tile["lastTouched"] = datetime.datetime.now()
749 |
750 | return tile
751 |
752 | def writeZNR(self, tile, d):
753 |
754 | # opens file for writing points to
755 | if tile["isOpen"] == False:
756 | tile["isOpen"] = open(tile["tempFile"], "ab")
757 | self.filesOpen += 1
758 |
759 | # writes point
760 | for i in znrStruct()[0]: # retrieves struct from lasStructure
761 | if i[0] == 'z':
762 | byte = (d['z'] / .01 )
763 | else:
764 | byte = d[i[0]]
765 | tile["isOpen"].write(struct.pack('=' + i[2], byte))
766 |
767 | # updates header information
768 | if d['z'] > tile["zmax"]:
769 | tile["zmax"] = d['z']
770 | if d['z'] < tile["zmin"]:
771 | tile["zmin"] = d['z']
772 | tile["i"] += 1
773 |
774 | tile["lastTouched"] = datetime.datetime.now()
775 |
776 | return tile
777 |
778 | def asDic(self):
779 |
780 | """
781 | Returns array as a dictionary where the keys are
782 | the central coordinates and the values are a list of
783 | tuples (height, number of returns). This is useful
784 | for plotting or calculating continuous variables across
785 | a plot.
786 | """
787 |
788 | arr = {}
789 | for i, key in enumerate(self.xy_dictionary.keys()):
790 | tile = self.xy_dictionary[key]
791 | if tile["i"] == 0:
792 | del tile
793 | continue
794 | a = np.zeros(tile["i"], dtype = self.dt)
795 | with open(tile["tempFile"], "rb") as fh:
796 | for j, line in enumerate(range(tile["i"])):
797 | fh.seek(j * self.globalHeader['pointreclen'])
798 | d = extract_return(self, fh)
799 | for field in d:
800 | a[j][field] = d[field]
801 | arr[(tile["xmin"], tile["ymin"])] = a
802 | if self.keepTemp is False: os.unlink(tile["tempFile"])
803 |
804 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
805 |
806 | return arr
807 |
808 | def asArray(self):
809 |
810 | """
811 | Returns all points in a single array or as a list of
812 | arrays if there is more than one tile. If there is
813 | more than one tile then it may be preferable to use
814 | asDic command.
815 | """
816 |
817 | arr = []
818 | for i, key in enumerate(self.xy_dictionary.keys()):
819 | tile = self.xy_dictionary[key]
820 | if tile["i"] == 0:
821 | del tile
822 | continue
823 | if self.znr:
824 | a = np.zeros(tile["i"], dtype = znrStruct()[1])
825 | else:
826 | a = np.zeros(tile["i"], dtype = self.dt)
827 | with open(tile["tempFile"], "rb") as fh:
828 | for j, line in enumerate(range(tile["i"])):
829 | fh.seek(j * self.globalHeader['pointreclen'])
830 | d = extract_return(self, fh)
831 | for field in d:
832 | a[j][field] = d[field]
833 | arr.append(a)
834 | if self.keepTemp is False: os.unlink(tile["tempFile"])
835 |
836 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
837 |
838 | if len(arr) == 1: # if list arr has only one array...
839 | arr = arr[0] # ...return only the array
840 |
841 | return arr
842 |
843 | def exportLAS(self, out=False):
844 |
845 | """
846 | Writes tile(s) to .las format. Requires znr=False or
847 | will throw an exception
848 | """
849 |
850 | if self.znr : raise NameError("Processed as ZNR, can not export to .LAS")
851 | if len(self.xy_dictionary) > 10:
852 | nTileCounter = len(self.xy_dictionary) // 10
853 | else: nTileCounter = len(self.xy_dictionary)
854 |
855 | tileCount = 0
856 |
857 | for i, key in enumerate(self.xy_dictionary.keys()):
858 | if self.xy_dictionary[key]["i"] == 0:
859 | del self.xy_dictionary[key]
860 | continue
861 | else:
862 | tileCount += 1
863 |
864 | if i%nTileCounter == 0 and i > 0 and self.verbose:
865 | print "{:.0f}% | {} of {} tiles exported | {}".format( np.float(i) / len(self.xy_dictionary) * 100, i, len(self.xy_dictionary), datetime.datetime.now())
866 |
867 | tile = self.xy_dictionary[key]
868 | self.h['gensoftware'] = 'CRC207 LiDAR analysis software '
869 | self.h['sysid'] = 'CRC207 LiDAR analysis software '
870 | self.h['xmin'] = tile['xmin']
871 | self.h['xmax'] = tile['xmax']
872 | self.h['ymin'] = tile['ymin']
873 | self.h['ymax'] = tile['ymax']
874 | self.h['zmin'] = tile['zmin']
875 | self.h['zmax'] = tile['zmax']
876 | self.h['numptbyreturn'] = tuple([tile["num_rtn"][i] for i in range(1, 6)]) # sorting out the rtn_num tuple
877 | self.h['numptrecords'] = tile["i"]
878 | self.h['guid2'] = 0
879 | if self.h['numvlrecords'] > 0:
880 | self.h['offset'] = 313
881 | self.h['numvlrecords'] = 1
882 |
883 | if out:
884 | if out.endswith('.las') and len(self.xy_dictionary) > 1:
885 | outFile = out[:-4] + '.' + str(self.outTileCount) + '.las'
886 | elif out.endswith('.las') and os.path.isdir(os.path.split(out)[0]):
887 | outFile = out
888 | elif os.path.isdir(out):
889 | outFile = os.path.join(out, os.path.split(tile["outFile"])[1])
890 | else:
891 | raise IOError('out path not recognised')
892 | else:
893 | outFile = tile["outFile"]
894 | self.savePath = os.path.split(outFile)[0]
895 |
896 | with open(outFile, "wb") as outOpen:
897 | for j in headerstruct():
898 | if j[2] == 'c':
899 | outOpen.write(self.h[j[0]])
900 | elif j[3] > 1:
901 | outOpen.write(struct.pack('=' + str(j[3]) + j[2], *self.h[j[0]]))
902 | else:
903 | outOpen.write(struct.pack('=' + j[2] , self.h[j[0]]))
904 |
905 | ## write VLR
906 | if self.h['numvlrecords'] > 0:
907 | # keeps only the first VLR e.g. the projection data
908 | outOpen.write(self.vlr[:86])
909 |
910 | ## write points
911 | outOpen.seek(self.h['offset'])
912 | with open(tile["tempFile"], "rb") as o:
913 | points = o.read()
914 | outOpen.write(points)
915 |
916 | tile["isOpen"] = "written_to"
917 |
918 | if self.keepTemp is False: os.unlink(tile["tempFile"])
919 |
920 | if tileCount > 0:
921 | print "100% | {} of {} tiles exported | {}".format(len(self.xy_dictionary),
922 | len(self.xy_dictionary),
923 | datetime.datetime.now())
924 |
925 | if len(self.xy_dictionary) == 1:
926 | print ".las file written to {}".format(outFile)
927 | else:
928 | print ".las file(s) written to {}".format(os.path.split(outFile)[0])
929 | else:
930 | print "! no tiles to export !"
931 |
932 | if not self.keepTemp: shutil.rmtree(self.tempDirectory)
933 |
934 | return self
935 |
936 | def np2ZNR(self, arr):
937 |
938 | """
939 | Can be used to export a numpy array in .znr format
940 | """
941 |
942 | if self.znr == False: raise NameError ("Data processed as .LAS and therefore wcan not be saved as .ZNR")
943 |
944 | self.ptFrmt, self.dt = znrStruct()
945 |
946 | if len(self.xy_dictionary) > 10:
947 | nTileCounter = len(self.xy_dictionary) // 10
948 | else: nTileCounter = len(self.xy_dictionary)
949 |
950 | for i, key in enumerate(self.xy_dictionary.keys()):
951 | if self.xy_dictionary[key]["i"] == 0:
952 | del self.xy_dictionary[key]
953 | continue
954 |
955 | if i%nTileCounter == 0:
956 | print "{}% | {} of {} tiles exported | {}".format(np.round((i/np.float(len(self.xy_dictionary)))*100), i, len(self.xy_dictionary), datetime.datetime.now())
957 |
958 | tile = self.xy_dictionary[key]
959 | self.h['filesig'] = "ZNRF"
960 | self.h['gensoftware'] = 'CRC207 LiDAR analysis software '
961 | self.h['sysid'] = 'CRC207 LiDAR analysis software '
962 | self.h['xmin'] = tile['xmin']
963 | self.h['xmax'] = tile['xmax']
964 | self.h['ymin'] = tile['ymin']
965 | self.h['ymax'] = tile['ymax']
966 | self.h['zmin'] = tile['zmin']
967 | self.h['zmax'] = tile['zmax']
968 | self.h['numptrecords'] = tile["i"]
969 | self.h['resolution'] = self.globalHeader['guid1']
970 | self.h['pointreclen'] = 5
971 | self.h['headersize'] = 92
972 | self.h['offset'] = 178
973 | self.h['zscale'] = 0.01
974 | self.h['zoffset'] = 0
975 |
976 | outFile = os.path.splitext(tile["outFile"])[0] + ".znr"
977 | with open(outFile, "wb") as out:
978 | for i in znrHeaderStruct():
979 | if i[2] == 'c':
980 | out.write(self.h[i[0]])
981 | else:
982 | out.write(struct.pack('=' + i[2] , self.h[i[0]]))
983 |
984 | ## write VLR
985 | out.write(self.vlr)
986 |
987 | ## write points
988 | for line in arr:
989 | for i in znrStruct()[0]: # retrieves struct from lasStructure
990 | if i[0] == 'z':
991 | byte = line[0] / .01
992 | else:
993 | byte = line[1]
994 | out.write(struct.pack('=' + i[2], byte))
995 |
996 | tile["isOpen"] = "written_to"
997 | if self.keepTemp is False: os.unlink(tile["tempFile"])
998 |
999 | if self.verbose : print ".znr file written to {}".format(os.path.split(outFile)[0])
1000 |
1001 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
1002 |
1003 | def np2LAS(self, arr, out=False):
1004 |
1005 | """
1006 | Can be used to export a numpy array in .las format
1007 | """
1008 |
1009 | if self.znr: raise NameError ("Data processed as .ZNR and therefore wcan not be saved as .LAS")
1010 |
1011 | self.h['gensoftware'] = 'CRC207 LiDAR analysis software '
1012 | self.h['sysid'] = 'CRC207 LiDAR analysis software '
1013 | self.h['xmin'] = arr['x'].min()
1014 | self.h['xmax'] = arr['x'].max()
1015 | self.h['ymin'] = arr['y'].min()
1016 | self.h['ymax'] = arr['y'].max()
1017 | self.h['zmin'] = arr['z'].min()
1018 | self.h['zmax'] = arr['z'].max()
1019 | ### sorting out the rtn_num tuple
1020 | rtn = np.zeros(5).astype(int)
1021 | for row in arr:
1022 | rtn_num = row['rtn_num'] - 1
1023 | if rtn_num < 5:
1024 | rtn[rtn_num] += 1
1025 | self.h['numptbyreturn'] = tuple(rtn)
1026 | self.h['numptrecords'] = len(arr)
1027 | self.h['guid2'] = 0
1028 |
1029 | if out:
1030 | if out.endswith('.las'):
1031 | outFile = out
1032 | elif os.path.isdir(out):
1033 | outFile = os.path.join(out, os.path.split(tile["outFile"])[1])
1034 | else:
1035 | raise IOError('out path not recognised')
1036 | else:
1037 | x = str(np.mean(arr['x']).astype(int))
1038 | y = str(np.mean(arr['y']).astype(int))
1039 | outFile = os.path.join(self.savePath, "{}_{}.las".format(x, y))
1040 |
1041 | with open(outFile, "wb") as out:
1042 | for j in headerstruct():
1043 | if j[2] == 'c':
1044 | out.write(self.h[j[0]])
1045 | elif j[3] > 1:
1046 | out.write(struct.pack('=' + str(j[3]) + j[2], *self.h[j[0]]))
1047 | else:
1048 | out.write(struct.pack('=' + j[2] , self.h[j[0]]))
1049 |
1050 | ## write VLR
1051 | if self.h['numvlrecords'] > 0:
1052 | out.write(self.vlr)
1053 |
1054 | ## write points
1055 | out.seek(self.h['offset'])
1056 | for d in arr:
1057 | for i in self.ptFrmt:
1058 | if i[0] == 'return_grp':
1059 | byte = ((d['scan_edge'] & 1) << 7) | ((d['scan_dir'] & 1) << 6) | ((d['rtn_tot'] & 7) << 3) | (d['rtn_num'] & 7)
1060 | elif i[0] == 'x':
1061 | byte = (d['x'] - self.h['xoffset']) / self.h['xscale']
1062 | elif i[0] == 'y':
1063 | byte = (d['y'] - self.h['yoffset']) / self.h['yscale']
1064 | elif i[0] == 'z':
1065 | byte = (d['z'] - self.h['zoffset']) / self.h['zscale']
1066 | else:
1067 | byte = d[i[0]]
1068 | out.write(struct.pack('=' + i[2], byte))
1069 |
1070 | if self.verbose: print ".las file written to {}".format(outFile)
1071 |
1072 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
1073 |
1074 | return outFile
1075 |
1076 | def exportZNR(self):
1077 |
1078 | if self.znr == False: raise NameError ("Data processed as .LAS and therefore cannot be saved as .ZNR")
1079 |
1080 | self.ptFrmt, self.dt = znrStruct()
1081 | self.globalHeader["pointreclen"] = 5
1082 | if len(self.xy_dictionary) > 10:
1083 | nTileCounter = len(self.xy_dictionary) // 10
1084 | else: nTileCounter = len(self.xy_dictionary)
1085 |
1086 | for i, key in enumerate(self.xy_dictionary.keys()):
1087 | if self.xy_dictionary[key]["i"] == 0:
1088 | del self.xy_dictionary[key]
1089 | continue
1090 |
1091 | print len(self.xy_dictionary)
1092 | if i%nTileCounter == 0:
1093 | print "{}% | {} of {} tiles exported | {}".format(np.round((i/np.float(len(self.xy_dictionary)))*100), i, len(self.xy_dictionary), datetime.datetime.now())
1094 |
1095 | tile = self.xy_dictionary[key]
1096 | self.h['filesig'] = "ZNRF"
1097 | self.h['gensoftware'] = 'CRC207 LiDAR analysis software '
1098 | self.h['sysid'] = 'CRC207 LiDAR analysis software '
1099 | self.h['xmin'] = tile['xmin']
1100 | self.h['xmax'] = tile['xmax']
1101 | self.h['ymin'] = tile['ymin']
1102 | self.h['ymax'] = tile['ymax']
1103 | self.h['zmin'] = tile['zmin']
1104 | self.h['zmax'] = tile['zmax']
1105 | self.h['numptrecords'] = tile["i"]
1106 | self.h['resolution'] = self.globalHeader['guid1']
1107 | self.h['pointreclen'] = 5
1108 | self.h['headersize'] = 92
1109 | self.h['offset'] = 178
1110 | self.h['zscale'] = 0.01
1111 | self.h['zoffset'] = 0
1112 |
1113 | outFile = os.path.splitext(tile["outFile"])[0] + ".znr"
1114 | with open(outFile, "wb") as out:
1115 | for i in znrHeaderStruct():
1116 | if i[2] == 'c':
1117 | out.write(self.h[i[0]])
1118 | else:
1119 | out.write(struct.pack('=' + i[2] , self.h[i[0]]))
1120 |
1121 | ## write VLR
1122 | out.write(self.vlr)
1123 |
1124 | ## write points
1125 | with open(tile["tempFile"], "rb") as o:
1126 | points = o.read()
1127 | out.write(points)
1128 |
1129 | tile["isOpen"] = "written_to"
1130 | if self.keepTemp is False: os.unlink(tile["tempFile"])
1131 |
1132 | if len(self.xy_dictionary) == 0:
1133 | print "No tiles written"
1134 | elif len(self.xy_dictionary) == 1:
1135 | print ".znr file written to {}".format(tile["outFile"])
1136 | else:
1137 | if self.verbose : print ".znr files written to {}".format(os.path.split(outFile)[0])
1138 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
1139 |
1140 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
1141 |
1142 | def LAS2txt(self, enum=False, outFile=False):
1143 |
1144 | for i, key in enumerate(self.xy_dictionary.keys()):
1145 | tile = self.xy_dictionary[key]
1146 | if tile["i"] == 0:
1147 | del tile
1148 | continue
1149 | if enum:
1150 | savePath = os.path.join(os.path.split(tile["outFile"])[0], "{}.txt".format(i))
1151 | elif outFile:
1152 | if os.path.isdir(outFile):
1153 | savePath = os.path.join(outFile, "{}.txt".format(i))
1154 | else:
1155 | savePath = outFile
1156 | else: savePath = os.path.splitext(tile["outFile"])[0] + ".txt"
1157 | a = np.zeros(tile["i"], dtype = self.dt)
1158 | with open(tile["tempFile"], "rb") as fh:
1159 | for j, line in enumerate(range(tile["i"])):
1160 | fh.seek(j * self.globalHeader['pointreclen'])
1161 | d = extract_return(self, fh)
1162 | for field in d:
1163 | a[j][field] = d[field]
1164 | np.savetxt(savePath, np.transpose([a['x'], \
1165 | a['y'], \
1166 | a['z'], \
1167 | a['class'], \
1168 | a['i'], \
1169 | a['scan_ang'], \
1170 | a['rtn_num'], \
1171 | a['rtn_tot']]), \
1172 | fmt='%.1f', delimiter=',')
1173 | if self.verbose: print '.txt saved to:', savePath
1174 | if self.keepTemp is False: os.unlink(tile["tempFile"])
1175 |
1176 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
1177 |
1178 | def exportXYZ(self):
1179 |
1180 | for i, key in enumerate(self.xy_dictionary.keys()):
1181 | tile = self.xy_dictionary[key]
1182 | if tile["i"] == 0:
1183 | del tile
1184 | continue
1185 | self.savePath = os.path.splitext(tile["outFile"])[0] + ".txt"
1186 | a = np.zeros(tile["i"], dtype = self.dt)
1187 | with open(tile["tempFile"], "rb") as fh:
1188 | for j, line in enumerate(range(tile["i"])):
1189 | fh.seek(j * self.globalHeader['pointreclen'])
1190 | d = extract_return(self, fh)
1191 | for field in d:
1192 | a[j][field] = d[field]
1193 | np.savetxt(self.savePath, np.transpose([a['x'], a['y'], a['z']]), fmt='%.1f', delimiter=',')
1194 |
1195 | if self.keepTemp is False: os.unlink(tile["tempFile"])
1196 |
1197 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
1198 |
1199 | if self.verbose: print 'XYZ saved to:', self.savePath
1200 |
1201 | return self
1202 |
1203 | def closeFiles(self):
1204 |
1205 | l = {}
1206 | for key in self.xy_dictionary.keys():
1207 | if self.xy_dictionary[key]["lastTouched"] is not None:
1208 | l[key] = self.xy_dictionary[key]["lastTouched"]
1209 | for i, (key, val) in enumerate(zip(l.keys(), sorted(l.values()))):
1210 | if self.xy_dictionary[key]["isOpen"] != False:
1211 | self.xy_dictionary[key]["isOpen"].close()
1212 | self.xy_dictionary[key]["isOpen"] = False
1213 | if i == 20:
1214 | return
1215 |
1216 | def printProgress(self):
1217 |
1218 | print "{}% | {} of {} points processed | {}".format(np.round((np.float(self.totPoints)/self.globalHeader['numptrecords'])*100), self.totPoints, self.globalHeader['numptrecords'], datetime.datetime.now())
1219 |
1220 | def generateSample(self, sample):
1221 |
1222 | return np.random.choice(self.h['numptrecords'],
1223 | size=int(self.h['numptrecords'] / sample),
1224 | replace=False)
1225 |
1226 | def removeTemp(self):
1227 |
1228 | if os.path.isdir(self.tempDirectory):
1229 | for file in os.listdir(self.tempDirectory):
1230 | os.unlink(os.path.join(self.tempDirectory, file))
1231 |
1232 | shutil.rmtree(self.tempDirectory)
1233 | if self.verbose: print "{} has been deleted".format(self.tempDirectory)
1234 |
1235 |
1236 | def parseHeader(filename):
1237 |
1238 | """
1239 | returns header information as a dictionary.
1240 | """
1241 |
1242 | with open(filename,'rb') as fh:
1243 | header = {'infile':filename}
1244 |
1245 | if fh.read(4) == "ZNRF":
1246 | headerStructType = znrHeaderStruct()
1247 | else: headerStructType = headerstruct()
1248 |
1249 | fh.seek(0)
1250 |
1251 | for i in headerStructType:
1252 | if i[2] == 'c':
1253 | value = fh.read(i[1])
1254 | elif i[3] > 1:
1255 | value = struct.unpack( '=' + str(i[3]) + i[2] , fh.read(i[1]) )
1256 | else:
1257 | value = struct.unpack( '=' + i[2] , fh.read(i[1]) )[0]
1258 | header[i[0]] = value
1259 |
1260 | if headerStructType == headerstruct():
1261 | if header["pointformat"] > 127: # it is a .laz file
1262 | header["pointformat"] -= 128
1263 | header["numvlrecords"] -= 1
1264 | header["offset"] -= 100
1265 | header["guid2"] = 1
1266 |
1267 | return header
1268 |
1269 |
1270 | def extract_return(self, fh):
1271 |
1272 | if self.znr:
1273 | ptFrmt = znrStruct()[0]
1274 | else: ptFrmt = self.ptFrmt
1275 |
1276 | point_dictionary = {} # dictionary for storing values in for each point
1277 |
1278 | for ent in ptFrmt:
1279 | byte = fh.read(ent[1])
1280 | val = struct.unpack('=' + ent[2] , byte)[0]
1281 | if ent[0] == 'x':
1282 | val = (val * self.h['xscale'] ) + self.h['xoffset']
1283 | if ent[0] == 'y':
1284 | val = (val * self.h['yscale'] ) + self.h['yoffset']
1285 | if ent[0] == 'z':
1286 | val = (val * self.h['zscale'] ) + self.h['zoffset']
1287 | if ent[0] == 'return_grp':
1288 | point_dictionary['rtn_num'] = val & 7
1289 | #if point_dictionary['rtn_num'] == 0:
1290 | # raise Exception
1291 | point_dictionary['rtn_tot'] = (val >> 3) & 7
1292 | point_dictionary['scan_dir'] = (val >> 6) & 1
1293 | point_dictionary['scan_edge'] = (val >> 7)
1294 | continue # required so that 'return_grp' is not added to dictionary
1295 |
1296 | point_dictionary[ent[0]] = val
1297 |
1298 | if point_dictionary["z"] > 1000000:
1299 | raise NameError("z very high: {}".format(point_dictionary["z"]))
1300 |
1301 | return point_dictionary
1302 |
1303 |
1304 | def round_plot(point, plot_x, plot_y, xmax, xmin, ymax, ymin, r):
1305 |
1306 | x, y = point
1307 |
1308 | #NW
1309 | if plot_x <= x and plot_y >= y:
1310 | adj = x - plot_x
1311 | opp = plot_y - y
1312 | hyp = math.sqrt(math.pow(adj, 2) + math.pow(opp, 2))
1313 |
1314 | #SW
1315 | elif plot_x <= x and plot_y <= y:
1316 | adj = x - plot_x
1317 | opp = y - plot_y
1318 | hyp = math.sqrt(math.pow(adj, 2) + math.pow(opp, 2))
1319 |
1320 | #SE
1321 | elif plot_x >= x and plot_y <= y:
1322 | adj = plot_x - x
1323 | opp = y - plot_y
1324 | hyp = math.sqrt(math.pow(adj, 2) + math.pow(opp, 2))
1325 |
1326 | #NE
1327 | elif plot_x >= x and plot_y >= y:
1328 | adj = plot_x - x
1329 | opp = plot_y - y
1330 | hyp = math.sqrt(math.pow(adj, 2) + math.pow(opp, 2))
1331 |
1332 | if hyp > r: return 0
1333 |
1334 |
1335 | def getPtFrmt(globalHeader, verbose=False):
1336 |
1337 | # return structure
1338 | if "txt2las" in globalHeader["gensoftware"]:
1339 | ptFrmt, dt = point_fmtLTstruct()
1340 | elif globalHeader["pointformat"] == 0:
1341 | ptFrmt, dt = point_fmt0struct()
1342 | elif globalHeader["pointformat"] == 1:
1343 | ptFrmt, dt = point_fmt1struct()
1344 | elif globalHeader["pointformat"] == 3:
1345 | ptFrmt, dt = point_fmt3struct()
1346 |
1347 | return ptFrmt, dt
1348 |
1349 |
1350 | def getVLR(headerSize, las):
1351 | fh = open(os.path.join(las))
1352 | fh.seek(headerSize)
1353 | vlr = fh.read(86)
1354 | fh.close()
1355 | return vlr
1356 |
1357 |
1358 | if __name__ == '__main__':
1359 |
1360 | # import glob
1361 | #
1362 | # A = []
1363 | # for las in glob.glob('/Users/phil/ALS/RiverHealth/Las/originalLas/*.las'):
1364 | # name = os.path.split(las)[1]
1365 | # x, y = int(name[1:4]), int(name[5:9])
1366 | # # plt.scatter(x, y)
1367 | # if y > 5950: A.append(las)
1368 |
1369 | # T = lasIO('/Users/phil/ALS/WC/spl/tile_20/WC1_10m_TILES', verbose=True).tiling(5).exportLAS('/Users/phil/ALS/WC/spl/tile_20/WC1_5m_TILES')
1370 | T = lasIO('/Users/phil/ALS/WC/spl/tile_20/ForestLAS_tutorial/LAS/large_tile/WC45_SUB.las', verbose=True, keepTemp=True).tiling(20).exportLAS()
1371 | # T = lasIO('/Users/phil/ALS/WC/spl/tile_20/WC1_test', verbose=True).all().exportLAS('/Users/phil/ALS/WC/spl/tile_20/WC1_test/all.las')
1372 | # T = lasIO('/Users/phil/ALS/RiverHealth/Las/originalLas', verbose=True).fromGrid('/Users/phil/Dropbox/regular_points_3_30_test.csv').exportLAS('/Users/phil/ALS/WC/spl/tile_20/WC1_grid/works')
1373 |
1374 | # lasIO('/Users/phil/ALS/WC/spl/tile_20/WC1.las', verbose=True, keepTemp=True).all().exportLAS()
--------------------------------------------------------------------------------
/forestlas/lasIO_pool.py:
--------------------------------------------------------------------------------
1 | import os
2 | import struct
3 | import math
4 | import datetime
5 | import tempfile
6 | import shutil
7 | import subprocess
8 | import multiprocessing
9 | import random
10 | import numpy as np
11 | from lasStructure import *
12 |
13 | class lasIO:
14 |
15 | """
16 | This allows reading and writing of .las files (currently supports 1.1 and 1.2).
17 | .las files can be read as one file(s), gridded or a plot of data extracted.
18 | Output types include .las, a numpy array or as .znr file where only the height
19 | and the "number of returns" metadata remain.
20 | """
21 |
22 | def __init__(self, path, out=False, verbose=False, search=".",
23 | tmpDir=False, keepTemp=False, number_of_processes=1,
24 | copy=False, create_temp=True):
25 |
26 | """
27 | Functions creates holder for .las file and setups required dictionaries
28 | etc.
29 |
30 | Parameters
31 | ----------
32 | path: File path or list
33 | file path to tile .las file or directory containing .las files,
34 | also excepts a list of file paths.
35 |
36 | out: Path to directory or path to save .las to, Default None
37 | If None then saves output to os.getcwd()
38 |
39 | search: String. Default "."
40 | Can be used to filter .las files if a directory is supplied for
41 | "path"
42 |
43 | tmpDir: File path to temporary directory. Default False
44 | Multiprocessing is not available with this module but is by others
45 | that call lasIO. This specifies the temporary directory.
46 |
47 | keepTemp: Boolean. Default True
48 | When False all temporary files are kept. It is important to run
49 | removeTemp() to clean up before exiting
50 |
51 | number_of_processes: Int, Default 1
52 | If processing a number of tiles or using a regular grid, Python's
53 | multiprocessing canbe envoked. This variable sets the number of
54 | cores that are utilised.
55 |
56 | copy: Boolean, Default False
57 | Wether to copy files to the temp directory before processing. This
58 | can speed thinks up if reading a number of large files simultaneously
59 | from a remote drive.
60 |
61 | Returns
62 | -------
63 | out: self
64 |
65 | """
66 |
67 | self.verbose = verbose
68 | self.keepTemp = keepTemp
69 | self.number_of_processes = number_of_processes
70 |
71 | ### parse args and create file structure
72 | # is path a directory or file
73 | if isinstance(path, list) :
74 | self.tile_list = path
75 | self.dir = os.path.split(path[0])[0]
76 | self.tile = None
77 | elif os.path.isfile(path) :
78 | self.dir = os.path.split(path)[0]
79 | if self.dir == '': self.dir = os.getcwd()
80 | self.tile = os.path.split(path)[1]
81 | self.tile_list = [os.path.join(self.dir, self.tile)]
82 | elif os.path.isdir(path) :
83 | self.dir = path
84 | self.tile = None
85 | tile_list = os.listdir(self.dir)
86 | self.tile_list = [tile for tile in tile_list if (tile.endswith(".las")
87 | or tile.endswith(".laz"))
88 | and tile.find(search) != -1]
89 | else:
90 | raise NameError ("path not recognised")
91 |
92 | if len(self.tile_list) == 0:
93 | raise IOError("There are no .las or .laz tiles in {}".format(path))
94 |
95 | # create temporary directory in %temp%
96 | if create_temp:
97 | if tmpDir:
98 | self.tempDirectory = tmpDir
99 | if os.path.isdir(self.tempDirectory) is False:
100 | os.makedirs(self.tempDirectory)
101 | else:
102 | tempDirectoryName = "lidar.processing." + str(np.random.randint(0, 9999999)) + ".tmp"
103 | self.tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
104 | os.makedirs(self.tempDirectory)
105 |
106 | if self.verbose: print "temporary directory at: {}".format(self.tempDirectory)
107 |
108 | # create output directory
109 | if not out:
110 | self.savePath = self.dir
111 | self.saveLAS = None
112 | elif os.path.isdir(out):
113 | self.savePath = out
114 | self.saveLAS = None
115 | else:
116 | self.savePath = os.path.split(out)[0]
117 | self.saveLAS = os.path.split(out)[1]
118 |
119 | # global variables
120 | self.numberTiles = 1
121 | for i, las in enumerate(self.tile_list):
122 | las = os.path.join(self.dir, las)
123 | if i == 0:
124 | self.globalHeader = parseHeader(las)
125 | # self.globalHeader["numptrecords"] = lasHeader["numptrecords"]
126 | if self.globalHeader['filesig'] == "ZNRF":
127 | raise Exception('ZNR is deprecated, use an older version of lasIO')
128 | else:
129 | self.ptFrmt, self.dt = getPtFrmt(self.globalHeader, verbose=True)
130 | self.vlr = getVLR(self.globalHeader["headersize"], las)
131 | self.h = parseHeader(las)
132 | else:
133 | lasHeader = parseHeader(os.path.join(path, las))
134 | self.globalHeader["numptrecords"] += lasHeader["numptrecords"]
135 | if lasHeader["xmax"] > self.globalHeader["xmax"]:
136 | self.globalHeader["xmax"] = lasHeader["xmax"]
137 | if lasHeader["xmin"] < self.globalHeader["xmin"]:
138 | self.globalHeader["xmin"] = lasHeader["xmin"]
139 | if lasHeader["ymax"] > self.globalHeader["ymax"]:
140 | self.globalHeader["ymax"] = lasHeader["ymax"]
141 | if lasHeader["ymin"] < self.globalHeader["ymin"]:
142 | self.globalHeader["ymin"] = lasHeader["ymin"]
143 | self.numberTiles += 1
144 |
145 | self.x_centre = np.mean([self.globalHeader["xmax"], self.globalHeader["xmin"]])
146 | self.y_centre = np.mean([self.globalHeader["ymax"], self.globalHeader["ymin"]])
147 |
148 | if self.verbose: print "number of tiles to process:", self.numberTiles
149 | if self.numberTiles > 1 and self.verbose: print "processing tiles from:", self.dir
150 | # self.area = (self.globalHeader["xmax"] - self.globalHeader["xmin"]) * (self.globalHeader["ymax"] - self.globalHeader["ymin"])
151 | # self.pointDensity = self.globalHeader["numptrecords"] / self.area
152 |
153 | # admin!
154 | self.counter = self.globalHeader["numptrecords"] // 20
155 | if self.globalHeader["numptrecords"] > 1e7:
156 | self.counter = self.globalHeader["numptrecords"] // 100
157 | self.badPoints = 0
158 | self.copy = copy
159 | if self.copy and self.verbose:
160 | print 'tiles are copied to temp'
161 | self.resolution = None
162 |
163 | def grid(self, csv, resolution=None, take_sample=False):
164 |
165 | """
166 |
167 | Returns a plotwise array of points from the tile defined with
168 | lasIO with plot centre at centre_x and centre_y and area equal
169 | to (radius*2)**2 if round=False and pi*radius**2 if round=True.
170 |
171 | The radius defaults to 10 metres. Plots are square by default
172 | but can be circular with round=True.
173 |
174 | Returns self which returns a numpy array if asArray is cploted,
175 | or can saved as .las, .txt or .xyz by cploting exportLAS,
176 | exportTXT or exportXYZ respectively.
177 |
178 | Paramaters
179 | ----------
180 | centre_x, centre_y : int or float
181 | Cartesian coordinates of plot centre (in the same coordinate
182 | system as data)
183 |
184 | extent : int, float or tuple with length of 2
185 | Diameter of round plot or extent of square plot. Will except
186 | a tuple of two ints or floats.
187 |
188 | round : Boolean, defualt False
189 | If False a square plot is returned, if True a round plot is
190 | returned
191 |
192 | Returns
193 | -------
194 | out : self
195 |
196 | """
197 |
198 | self.take_sample = take_sample
199 |
200 | self.Global_totPoints = 0
201 | self.Global_files_open = 0
202 | self.globalHeader["guid1"] = 0
203 |
204 | self.grid = np.loadtxt(csv, skiprows=1, delimiter=',', dtype=([('x', np.float), ('y', np.float)]))
205 | self.grid['x'] = self.grid['x'].astype(int)
206 | self.grid['y'] = self.grid['y'].astype(int)
207 |
208 | if not resolution:
209 | self.resolution = self.grid['x'][1] - self.grid['x'][0]
210 | else:
211 | self.resolution = resolution
212 |
213 | self.xmin = self.grid['x'].min() - (self.resolution / 2.)
214 | self.xmax = self.grid['x'].max() + (self.resolution / 2.)
215 | self.ymin = self.grid['y'].min() - (self.resolution / 2.)
216 | self.ymax = self.grid['y'].max() + (self.resolution / 2.)
217 |
218 | if self.tile == None:
219 | dirName = str(self.resolution) + "m_GRID"
220 | else:
221 | dirName = os.path.splitext(self.tile)[0] + "_" + str(self.resolution) + "m_GRID"
222 | self.savePath = os.path.join(self.savePath, dirName)
223 | if os.path.isdir(self.savePath):
224 | shutil.rmtree(self.savePath)
225 |
226 | if self.verbose:
227 | print 'grid resolution:', self.resolution
228 | print 'aiming to produce {} tiles'.format(len(self.grid))
229 |
230 | self.xy_dictionary = dict()
231 | for x, y in self.grid:
232 | self.xy_dictionary[(x, y)] = {"xmin":x - (self.resolution / 2.),
233 | "ymin":y - (self.resolution / 2.),
234 | "zmin":999,
235 | "xmax":x + (self.resolution / 2.),
236 | "ymax":y + (self.resolution / 2.),
237 | "zmax": -999,
238 | "num_rtn": {1:0, 2:0, 3:0, 4:0, 5:0},
239 | "i":0,
240 | "outFile": os.path.abspath(os.path.join(self.savePath, "{}_{}.PLOT.las".format(x, y))),
241 | "tempFile": os.path.abspath(os.path.join(self.tempDirectory, "{}_{}.PLOT.temp".format(x, y))),
242 | "isOpen": False,
243 | "lastTouched": None
244 |
245 | }
246 |
247 | self.keys = np.array(self.xy_dictionary.keys(), dtype=[('x', int), ('y', int)])
248 |
249 | for tile in self.tile_list:
250 |
251 | if len(os.path.split(tile)) > 1:
252 | tile = os.path.join(self.dir, tile)
253 |
254 | if self.copy:
255 | shutil.copyfile(tile, os.path.join(self.tempDirectory, os.path.split(tile)[1]))
256 | tile = os.path.join(self.tempDirectory, os.path.split(tile)[1])
257 | h = parseHeader(tile)
258 |
259 | if h["filesig"] == "LASF":
260 | if h["guid2"] == 1:
261 | tile = self.laz2las(tile)
262 |
263 | if self.take_sample:
264 | sample = self.generateSample(self.take_sample)
265 | if self.verbose:
266 | print "random sample produced: {}".format(len(sample))
267 | else:
268 | sample = range(h['numptrecords'])
269 |
270 | grid = self.grid[(self.grid['x'] >= h['xmin']) & (self.grid['x'] <= h['xmax']) &
271 | (self.grid['y'] >= h['ymin']) & (self.grid['y'] <= h['ymax'])]
272 |
273 | with open(os.path.join(self.dir, tile), 'rb') as fh:
274 |
275 | fh.seek(h["offset"])
276 |
277 | for i in sample: # loops through all points
278 |
279 | if self.Global_totPoints%self.counter == 0 and self.verbose:
280 | print "{:.0f}% | {} of {} new tiles created | {} | {}".format((self.Global_totPoints / float(self.globalHeader['numptrecords'])) * 100,
281 | len(os.listdir(self.tempDirectory)), len(self.grid),
282 | os.path.split(tile)[1],
283 | datetime.datetime.now())
284 | self.Global_totPoints += 1
285 |
286 | try:
287 | # test x point first ...
288 | fh.seek(h['offset'] + (i * h['pointreclen']))
289 | x = struct.unpack('=' + 'L', fh.read(4))[0]
290 | x = (x * h['xscale'] ) + h['xoffset']
291 | if not self.xmin < x < self.xmax:
292 | continue
293 |
294 | # test y point next ...
295 | fh.seek(h['offset'] + (i * h['pointreclen'] + 4))
296 | y = struct.unpack('=' + 'L', fh.read(4))[0]
297 | y = (y * h['yscale']) + h['yoffset']
298 | if not self.ymin < y < self.ymax:
299 | continue
300 |
301 | # extract round plot
302 | inGrid = False
303 | idx = [(grid['x'] > h["xmin"]) | (grid['x'] <= h["xmax"]) |
304 | (grid['y'] > h["ymin"]) | (grid['y'] <= h["ymax"])]
305 | for row in grid[idx]:
306 | if ((row[0] - (self.resolution / 2.)) < x < (row[0] + (self.resolution / 2.)) and
307 | (row[1] - (self.resolution / 2.)) < y < (row[1] + (self.resolution / 2.))):
308 | inGrid = True
309 | break
310 |
311 | if not inGrid: continue
312 |
313 | fh.seek(h["offset"] + (i * h['pointreclen'])) # searches to beginning of point
314 |
315 | point_dictionary = extract_return(self, fh)
316 | X, Y = point_dictionary['x'], point_dictionary['y']
317 | KEYS = self.keys[(X >= self.keys['x'] - (self.resolution / 2.)) & (X < self.keys['x'] + (self.resolution / 2.)) &
318 | (Y >= self.keys['y'] - (self.resolution / 2.)) & (Y < self.keys['y'] + (self.resolution / 2.))]
319 |
320 | for key in KEYS:
321 | self.xy_dictionary[tuple(key)] = self.writePoint(self.xy_dictionary[tuple(key)], point_dictionary, h)
322 |
323 | except:
324 | self.badPoints += 1
325 |
326 | # deletes .las tiles that were converted from .laz or copies
327 | if self.tempDirectory in tile: os.unlink(tile)
328 |
329 | if self.resolution >= 1: h['guid1'] = self.resolution
330 |
331 | return self
332 |
333 | def writePoint(self, tile, d, h):
334 |
335 | pointString = ''
336 | for i in self.ptFrmt:
337 | if i[0] == 'return_grp':
338 | byte = ((d['scan_edge'] & 1) << 7) | ((d['scan_dir'] & 1) << 6) | \
339 | ((d['rtn_tot'] & 7) << 3) | (d['rtn_num'] & 7)
340 | elif i[0] == 'x':
341 | byte = (d['x'] - h['xoffset']) / h['xscale']
342 | elif i[0] == 'y':
343 | byte = (d['y'] - h['yoffset']) / h['yscale']
344 | elif i[0] == 'z':
345 | byte = (d['z'] - h['zoffset']) / h['zscale']
346 | else:
347 | byte = d[i[0]]
348 | pointString += struct.pack('=' + i[2], byte)
349 |
350 | with open(tile["tempFile"], "ab") as o:
351 | o.write(pointString)
352 |
353 | # updates header information
354 | if d['z'] > tile["zmax"]:
355 | tile["zmax"] = d['z']
356 | if d['z'] < tile["zmin"]:
357 | tile["zmin"] = d['z']
358 | if 0 < d["rtn_num"] < 6:
359 | tile["num_rtn"][d["rtn_num"]] += 1
360 | tile["i"] += 1
361 |
362 | return tile
363 |
364 | def asArray(self):
365 |
366 | """
367 | Returns all points in a single array or as a list of
368 | arrays if there is more than one tile. If there is
369 | more than one tile then it may be preferable to use
370 | asDic command.
371 | """
372 |
373 | arr = []
374 | for i, key in enumerate(self.xy_dictionary.keys()):
375 | tile = self.xy_dictionary[key]
376 | if tile["i"] == 0:
377 | del tile
378 | continue
379 | a = np.zeros(tile["i"], dtype = self.dt)
380 | with open(tile["tempFile"], "rb") as fh:
381 | for j, line in enumerate(range(tile["i"])):
382 | fh.seek(j * self.globalHeader['pointreclen'])
383 | d = extract_return(self, fh)
384 | for field in d:
385 | a[j][field] = d[field]
386 | arr.append(a)
387 | if self.keepTemp is False: os.unlink(tile["tempFile"])
388 |
389 | if self.keepTemp is False: shutil.rmtree(self.tempDirectory)
390 |
391 | if len(arr) == 1: # if list arr has only one array...
392 | arr = arr[0] # ...return only the array
393 |
394 | return arr
395 |
396 | def exportLAS(self, out=False):
397 |
398 | """
399 | Writes tile(s) to .las format.
400 | """
401 |
402 | if len(self.xy_dictionary) > 10:
403 | nTileCounter = len(self.xy_dictionary) // 10
404 | else: nTileCounter = len(self.xy_dictionary)
405 |
406 | self.xy_dictionary = {key:values for key, values in self.xy_dictionary.items() if values['i'] > 0}
407 |
408 | if len(self.xy_dictionary) > 0:
409 | for i, key in enumerate(self.xy_dictionary.keys()):
410 |
411 | if i%nTileCounter == 0 and i > 0 and self.verbose:
412 | print "{:.0f}% | {} of {} tiles exported | {}".format( np.float(i) / len(self.xy_dictionary) * 100, i, len(self.xy_dictionary), datetime.datetime.now())
413 |
414 | tile = self.xy_dictionary[key]
415 | self.h['gensoftware'] = 'CRC207 LiDAR analysis software '
416 | self.h['sysid'] = 'CRC207 LiDAR analysis software '
417 | self.h['xmin'] = tile['xmin']
418 | self.h['xmax'] = tile['xmax']
419 | self.h['ymin'] = tile['ymin']
420 | self.h['ymax'] = tile['ymax']
421 | self.h['zmin'] = tile['zmin']
422 | self.h['zmax'] = tile['zmax']
423 | self.h['numptbyreturn'] = tuple([tile["num_rtn"][i] for i in range(1, 6)]) # sorting out the rtn_num tuple
424 | self.h['numptrecords'] = tile["i"]
425 | self.h['guid2'] = 0
426 | if self.h['numvlrecords'] > 0:
427 | self.h['offset'] = 313
428 | self.h['numvlrecords'] = 1
429 | if self.resolution != None:
430 | self.h['guid1'] = self.resolution
431 |
432 | if out:
433 | if out.endswith('.las') and len(self.xy_dictionary) > 1:
434 | outFile = out[:-4] + '.' + str(self.outTileCount) + '.las'
435 | elif out.endswith('.las') and os.path.isdir(os.path.split(out)[0]):
436 | outFile = out
437 | elif os.path.isdir(out):
438 | outFile = os.path.join(out, os.path.split(tile["outFile"])[1])
439 | else:
440 | raise IOError('out path not recognised')
441 | else:
442 | if not os.path.isdir(self.savePath):
443 | os.makedirs(self.savePath)
444 | outFile = tile["outFile"]
445 | self.savePath = os.path.split(outFile)[0]
446 |
447 | with open(outFile, "wb") as outOpen:
448 | for j in headerstruct():
449 | if j[2] == 'c':
450 | outOpen.write(self.h[j[0]])
451 | elif j[3] > 1:
452 | outOpen.write(struct.pack('=' + str(j[3]) + j[2], *self.h[j[0]]))
453 | else:
454 | outOpen.write(struct.pack('=' + j[2] , self.h[j[0]]))
455 |
456 | ## write VLR
457 | if self.h['numvlrecords'] > 0:
458 | # keeps only the first VLR e.g. the projection data
459 | outOpen.write(self.vlr[:86])
460 |
461 | ## write points
462 | outOpen.seek(self.h['offset'])
463 | with open(tile["tempFile"], "rb") as o:
464 | points = o.read()
465 | outOpen.write(points)
466 |
467 | tile["isOpen"] = "written_to"
468 |
469 | if self.keepTemp is False: os.unlink(tile["tempFile"])
470 |
471 | print "100% | {} of {} tiles exported | {}".format(len(self.xy_dictionary),
472 | len(self.xy_dictionary),
473 | datetime.datetime.now())
474 |
475 | if len(self.xy_dictionary) == 1:
476 | print ".las file written to {}".format(outFile)
477 | else:
478 | print ".las file(s) written to {}".format(os.path.split(outFile)[0])
479 | else:
480 | print "! no tiles to export !"
481 |
482 | if not self.keepTemp: shutil.rmtree(self.tempDirectory)
483 |
484 | return self
485 |
486 | def removeTemp(self):
487 |
488 | if os.path.isdir(self.tempDirectory):
489 | for file in os.listdir(self.tempDirectory):
490 | os.unlink(os.path.join(self.tempDirectory, file))
491 |
492 | shutil.rmtree(self.tempDirectory)
493 | if self.verbose: print "{} has been deleted".format(self.tempDirectory)
494 |
495 | def parseHeader(filename):
496 |
497 | """
498 | returns header information as a dictionary.
499 | """
500 |
501 | with open(filename,'rb') as fh:
502 | header = {'infile':filename}
503 |
504 | if fh.read(4) == "ZNRF":
505 | raise Exception('ZNR is deprecated - use an older version of lasIO')
506 | else: headerStructType = headerstruct()
507 |
508 | fh.seek(0)
509 |
510 | for i in headerStructType:
511 | if i[2] == 'c':
512 | value = fh.read(i[1])
513 | elif i[3] > 1:
514 | value = struct.unpack( '=' + str(i[3]) + i[2] , fh.read(i[1]) )
515 | else:
516 | value = struct.unpack( '=' + i[2] , fh.read(i[1]) )[0]
517 | header[i[0]] = value
518 |
519 | if headerStructType == headerstruct():
520 | if header["pointformat"] > 127: # it is a .laz file
521 | header["pointformat"] -= 128
522 | header["numvlrecords"] -= 1
523 | header["offset"] -= 100
524 | header["guid2"] = 1
525 |
526 | return header
527 |
528 |
529 | def extract_return(self, fh):
530 |
531 | ptFrmt = self.ptFrmt
532 |
533 | point_dictionary = {} # dictionary for storing values in for each point
534 |
535 | for ent in ptFrmt:
536 | byte = fh.read(ent[1])
537 | val = struct.unpack('=' + ent[2] , byte)[0]
538 | if ent[0] == 'x':
539 | val = (val * self.h['xscale'] ) + self.h['xoffset']
540 | if ent[0] == 'y':
541 | val = (val * self.h['yscale'] ) + self.h['yoffset']
542 | if ent[0] == 'z':
543 | val = (val * self.h['zscale'] ) + self.h['zoffset']
544 | if ent[0] == 'return_grp':
545 | point_dictionary['rtn_num'] = val & 7
546 | #if point_dictionary['rtn_num'] == 0:
547 | # raise Exception
548 | point_dictionary['rtn_tot'] = (val >> 3) & 7
549 | point_dictionary['scan_dir'] = (val >> 6) & 1
550 | point_dictionary['scan_edge'] = (val >> 7)
551 | continue # required so that 'return_grp' is not added to dictionary
552 |
553 | point_dictionary[ent[0]] = val
554 |
555 | if point_dictionary["z"] > 1000000:
556 | raise NameError("z very high: {}".format(point_dictionary["z"]))
557 |
558 | return point_dictionary
559 |
560 |
561 | def getPtFrmt(globalHeader, verbose=False):
562 |
563 | # return structure
564 | if "txt2las" in globalHeader["gensoftware"]:
565 | ptFrmt, dt = point_fmtLTstruct()
566 | elif globalHeader["pointformat"] == 0:
567 | ptFrmt, dt = point_fmt0struct()
568 | elif globalHeader["pointformat"] == 1:
569 | ptFrmt, dt = point_fmt1struct()
570 | elif globalHeader["pointformat"] == 3:
571 | ptFrmt, dt = point_fmt3struct()
572 |
573 | return ptFrmt, dt
574 |
575 |
576 | def getVLR(headerSize, las):
577 | fh = open(os.path.join(las))
578 | fh.seek(headerSize)
579 | vlr = fh.read(86)
580 | fh.close()
581 | return vlr
582 | #
583 | #
584 | # if __name__ == '__main__':
585 | #
586 | # import shutil
587 | # path = '/Users/phil/ALS/WC/spl/tile_20/ForestLAS_tutorial/LAS/large_tile'
588 | # shutil.rmtree(os.path.join(path, 'WC45_SUB_20m_TILES'))
589 | # os.makedirs(os.path.join(path, 'WC45_SUB_20m_TILES'))
590 | # start = datetime.datetime.now()
591 | # # lasIO(os.path.join(path, 'WC45_SUB_5m_SUBSET'), verbose=True, number_of_processes=4).all().exportLAS()
592 | # lasIO(os.path.join(path, 'WC45_SUB.las'), verbose=True, number_of_processes=1).grid(os.path.join(path, 'coords_10.csv'), resolution=10).exportLAS()
593 | # # lasIO(os.path.join(path, 'WC45_SUB_5m_SUBSET'), verbose=True, number_of_processes=8).tiling(1000).exportLAS(os.path.join(path, 'WC45_SUB_20m_TILES'))
594 | # print datetime.datetime.now() - start
--------------------------------------------------------------------------------
/forestlas/lasStructure.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | def headerstruct():
4 | return (('filesig', 4,'c',4),
5 | ('filesourceid', 2,'H',1),
6 | ('reserved' , 2,'H',1),
7 | ('guid1' , 4,'L',1),
8 | ('guid2' , 2,'H',1),
9 | ('guid3' , 2,'H',1),
10 | ('guid4' , 8,'B',8),
11 | ('vermajor' , 1,'B',1),
12 | ('verminor' , 1,'B',1),
13 | ('sysid' , 32,'c',32),
14 | ('gensoftware' , 32,'c',32),
15 | ('fileday' , 2,'H',1),
16 | ('fileyear' , 2,'H',1),
17 | ('headersize' , 2,'H',1),
18 | ('offset' , 4,'L',1),
19 | ('numvlrecords', 4,'L',1),
20 | ('pointformat' , 1,'B',1),
21 | ('pointreclen' , 2,'H',1),
22 | ('numptrecords', 4,'L',1),
23 | ('numptbyreturn', 20,'L',5),
24 | ('xscale' , 8,'d',1),
25 | ('yscale' , 8,'d',1),
26 | ('zscale' , 8,'d',1),
27 | ('xoffset' , 8,'d',1),
28 | ('yoffset' , 8,'d',1),
29 | ('zoffset' , 8,'d',1),
30 | ('xmax' , 8,'d',1),
31 | ('xmin' , 8,'d',1),
32 | ('ymax' , 8,'d',1),
33 | ('ymin' , 8,'d',1),
34 | ('zmax' , 8,'d',1),
35 | ('zmin' , 8,'d',1) )
36 |
37 | def point_fmt0struct():
38 | return ((('x', 4,'l',1) ,
39 | ('y', 4,'l',1) ,
40 | ('z', 4,'l',1) ,
41 | ('i', 2,'H',1) ,
42 | ('return_grp', 1,'B',1) ,
43 | ('class', 1,'B',1) ,
44 | ('scan_ang', 1,'b',1) ,
45 | ('user_data', 1,'B',1) ,
46 | ('point_src_id', 2,'H',1)),
47 |
48 | np.dtype([('x', np.float),
49 | ('y', np.float),
50 | ('z', np.float),
51 | ('i', np.int),
52 | ('rtn_num', np.int),
53 | ('rtn_tot', np.int),
54 | ('scan_dir', np.int),
55 | ('scan_edge', np.int),
56 | ('class', np.int),
57 | ('scan_ang', np.int),
58 | ('user_data', np.int),
59 | ('point_src_id', np.int)]))
60 |
61 | def point_fmt1struct():
62 | return ((('x', 4,'l',1),
63 | ('y', 4,'l',1),
64 | ('z', 4,'l',1),
65 | ('i', 2,'H',1),
66 | ('return_grp', 1,'B',1),
67 | ('class', 1,'B',1),
68 | ('scan_ang', 1,'b',1),
69 | ('user_data', 1,'B',1),
70 | ('point_src_id', 2,'H',1),
71 | ('gps_time', 8,'d',1)),
72 |
73 | np.dtype([('x', np.float),
74 | ('y', np.float),
75 | ('z', np.float),
76 | ('i', np.int),
77 | ('rtn_num', np.int),
78 | ('rtn_tot', np.int),
79 | ('scan_dir', np.int),
80 | ('scan_edge', np.int),
81 | ('class', np.int),
82 | ('scan_ang', np.int),
83 | ('user_data', np.int),
84 | ('point_src_id', np.int),
85 | ('gps_time', np.float)]))
86 |
87 | def point_fmt3struct():
88 | return ((('x', 4,'l',1),
89 | ('y', 4,'l',1),
90 | ('z', 4,'l',1),
91 | ('i', 2,'H',1),
92 | ('return_grp', 1,'B',1),
93 | ('class', 1,'B',1),
94 | ('scan_ang', 1,'b',1),
95 | ('user_data', 1,'B',1),
96 | ('point_src_id', 2,'H',1),
97 | ('gps_time', 8,'d',1),
98 | ('r', 2,'H',1),
99 | ('g', 2,'H',1),
100 | ('b', 2,'H',1)),
101 |
102 | np.dtype([('x', np.float),
103 | ('y', np.float),
104 | ('z', np.float),
105 | ('i', np.int),
106 | ('rtn_num', np.int),
107 | ('rtn_tot', np.int),
108 | ('scan_dir', np.int),
109 | ('scan_edge', np.int),
110 | ('class', np.int),
111 | ('scan_ang', np.int),
112 | ('user_data', np.int),
113 | ('point_src_id', np.int),
114 | ('gps_time', np.float),
115 | ('r', np.int),
116 | ('g', np.int),
117 | ('b', np.int)]))
118 |
119 | def znrHeaderStruct():
120 | return (('filesig', 4,'c',4),
121 | ('filesourceid', 2,'H',1),
122 | ('reserved' , 2,'H',1),
123 | ('resolution' , 4,'L',1),
124 | ('headersize' , 2,'H',1),
125 | ('offset' , 4,'L',1),
126 | ('numvlrecords', 4,'L',1),
127 | ('pointreclen' , 2,'H',1),
128 | ('numptrecords', 4,'L',1),
129 | ('zscale' , 8,'d',1),
130 | ('zoffset' , 8,'d',1),
131 | ('xmax' , 8,'d',1),
132 | ('xmin' , 8,'d',1),
133 | ('ymax' , 8,'d',1),
134 | ('ymin' , 8,'d',1),
135 | ('zmax' , 8,'d',1),
136 | ('zmin' , 8,'d',1))
137 |
138 | def znrStruct():
139 | return ((('z', 4, 'l', 1, np.float),
140 | ('rtn_tot', 1, 'B', 1, np.int)),
141 |
142 | np.dtype([('z', np.float),
143 | ('rtn_tot', np.int)]))
144 |
145 | def point_fmtLTstruct():
146 | return ((('x', 4,'l',1),
147 | ('y', 4,'l',1),
148 | ('z', 4,'l',1),
149 | ('i', 2,'H',1),
150 | ('return_grp', 1,'B',1),
151 | ('class', 1,'B',1),
152 | ('scan_ang', 1,'b',1),
153 | ('user_data', 1,'B',1),
154 | ('point_src_id', 2,'H',1),
155 | ('gps_time', 8,'d',1)),
156 |
157 | np.dtype([('x', np.float),
158 | ('y', np.float),
159 | ('z', np.float),
160 | ('i', np.int),
161 | ('rtn_num', np.int),
162 | ('rtn_tot', np.int),
163 | ('scan_dir', np.int),
164 | ('scan_edge', np.int),
165 | ('class', np.int),
166 | ('scan_ang', np.int),
167 | ('user_data', np.int),
168 | ('point_src_id', np.int),
169 | ('gps_time', np.float)]) )
--------------------------------------------------------------------------------
/forestlas/lasStructure.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/philwilkes/forestlas/5d0439adab918b51b00881186f1b1d670216b4cf/forestlas/lasStructure.pyc
--------------------------------------------------------------------------------
/forestlas/recover_temp_files_.py:
--------------------------------------------------------------------------------
1 | __author__ = 'phil'
2 | import os
3 | import struct
4 | import numpy as np
5 | from forestlas import lasIO_, lasStructure
6 | import global_header
7 |
8 | class recover_temp_files:
9 |
10 | def __init__(self, temp_directory, out_dir=False, ptFrmt=1, ):
11 |
12 | self.temp_directory = temp_directory
13 | if not out_dir:
14 | out_dir = self.temp_directory
15 | else:
16 | out_dir = out_dir
17 |
18 | if ptFrmt == 1:
19 | ptFrmt = lasStructure.point_fmt1struct()[0]
20 | dt = lasStructure.point_fmt1struct()[1]
21 |
22 | for las_temp in os.listdir(self.temp_directory):
23 |
24 | h = global_header.h()
25 | num_records = os.path.getsize(os.path.join(self.temp_directory, las_temp)) / h['pointreclen']
26 | rtn_num = [0, 0, 0, 0, 0]
27 | a = np.zeros(num_records, dtype=dt)
28 |
29 | with open(os.path.join(self.temp_directory, las_temp), 'rb') as fh:
30 | for j, line in enumerate(range(num_records)):
31 | fh.seek(j * h['pointreclen'])
32 | d = extract_return(ptFrmt, fh, h)
33 | for field in d:
34 | # print field
35 | a[j][field] = d[field]
36 |
37 | if 0 < d['rtn_num'] < 6:
38 | rtn_num[d['rtn_num']-1] += 1
39 |
40 | # break
41 |
42 | h['numptbyreturn'] = tuple(rtn_num)
43 | np2LAS(a, h, ptFrmt, outFile=os.path.join(out_dir, '{}.recovered.las'.format(las_temp[:-6])))
44 |
45 | class recover_temp_file:
46 |
47 | def __init__(self, las_temp, out_dir=False, verbose=False):
48 |
49 | # self.temp_directory = temp_directory
50 | if not out_dir:
51 | out_dir, tile = os.path.split(las_temp)
52 | else:
53 | tile = os.path.split(las_temp)[1]
54 |
55 | ptFrmt = lasStructure.point_fmt1struct()[0]
56 | dt = lasStructure.point_fmt1struct()[1]
57 | h = global_header.h()
58 | num_records = os.path.getsize(las_temp) / h['pointreclen']
59 | rtn_num = [0, 0, 0, 0, 0]
60 | a = np.zeros(num_records, dtype=dt)
61 |
62 | with open(las_temp, 'rb') as fh:
63 | for j, line in enumerate(range(num_records)):
64 | fh.seek(j * h['pointreclen'])
65 | d = extract_return(ptFrmt, fh, h)
66 | for field in d:
67 | # print field
68 | a[j][field] = d[field]
69 |
70 | if 0 < d['rtn_num'] < 6:
71 | rtn_num[d['rtn_num']-1] += 1
72 |
73 | # break
74 |
75 | h['numptbyreturn'] = tuple(rtn_num)
76 | np2LAS(a, h, ptFrmt,
77 | outFile=os.path.join(out_dir, '{}.recovered.las'.format(tile[:-5])),
78 | verbose=verbose)
79 |
80 | def np2LAS(arr, h, ptFrmt, outFile=False, verbose=False):
81 |
82 | """
83 | Can be used to export a numpy array in .las format
84 | """
85 |
86 | h['gensoftware'] = 'CRC207 LiDAR analysis software '
87 | h['sysid'] = 'CRC207 LiDAR analysis software '
88 | h['xmin'] = arr['x'].min()
89 | h['xmax'] = arr['x'].max()
90 | h['ymin'] = arr['y'].min()
91 | h['ymax'] = arr['y'].max()
92 | h['zmin'] = arr['z'].min()
93 | h['zmax'] = arr['z'].max()
94 | ### sorting out the rtn_num tuple
95 | rtn = np.zeros(5).astype(int)
96 | for row in arr:
97 | rtn_num = row['rtn_num'] - 1
98 | if rtn_num < 5:
99 | rtn[rtn_num] += 1
100 | h['numptbyreturn'] = tuple(rtn)
101 | h['numptrecords'] = len(arr)
102 | h['guid2'] = 0
103 |
104 | with open(outFile, "wb") as out:
105 | for j in lasStructure.headerstruct():
106 | if j[2] == 'c':
107 | out.write(h[j[0]])
108 | elif j[3] > 1:
109 | out.write(struct.pack('=' + str(j[3]) + j[2], *h[j[0]]))
110 | else:
111 | out.write(struct.pack('=' + j[2] , h[j[0]]))
112 |
113 | ## write points
114 | out.seek(h['offset'])
115 | for d in arr:
116 | for i in ptFrmt:
117 | if i[0] == 'return_grp':
118 | byte = ((d['scan_edge'] & 1) << 7) | ((d['scan_dir'] & 1) << 6) | ((d['rtn_tot'] & 7) << 3) | (d['rtn_num'] & 7)
119 | elif i[0] == 'x':
120 | byte = (d['x'] - h['xoffset']) / h['xscale']
121 | elif i[0] == 'y':
122 | byte = (d['y'] - h['yoffset']) / h['yscale']
123 | elif i[0] == 'z':
124 | byte = (d['z'] - h['zoffset']) / h['zscale']
125 | else:
126 | byte = d[i[0]]
127 | out.write(struct.pack('=' + i[2], byte))
128 |
129 | if verbose: print ".las file recoered to {}".format(outFile)
130 |
131 | def extract_return(ptFrmt, fh, h):
132 |
133 | point_dictionary = {} # dictionary for storing values in for each point
134 |
135 | for ent in ptFrmt:
136 | byte = fh.read(ent[1])
137 | val = struct.unpack('=' + ent[2] , byte)[0]
138 | if ent[0] == 'x':
139 | val = (val * h['xscale'] ) + h['xoffset']
140 | if ent[0] == 'y':
141 | val = (val * h['yscale'] ) + h['yoffset']
142 | if ent[0] == 'z':
143 | val = (val * h['zscale'] ) + h['zoffset']
144 | if ent[0] == 'return_grp':
145 | point_dictionary['rtn_num'] = val & 7
146 | #if point_dictionary['rtn_num'] == 0:
147 | # raise Exception
148 | point_dictionary['rtn_tot'] = (val >> 3) & 7
149 | point_dictionary['scan_dir'] = (val >> 6) & 1
150 | point_dictionary['scan_edge'] = (val >> 7)
151 | continue # required so that 'return_grp' is not added to dictionary
152 |
153 | point_dictionary[ent[0]] = val
154 |
155 | if point_dictionary["z"] > 1000000:
156 | raise NameError("z very high: {}".format(point_dictionary["z"]))
157 |
158 | return point_dictionary
159 |
160 | if __name__ == '__main__':
161 |
162 | import multiprocessing
163 | import glob
164 |
165 | F = glob.glob('/var/folders/3g/x_8fg5zj7lvghkk1q3xz9d380000gp/T/lidar.processing.1402121.tmp/*.temp')
166 | # F = ((f, {'verbose':False}) for f in F)
167 | kw = {'verbose':True, 'out_dir':'/User/phil/ALS/SCRATCH'}
168 |
169 | # p = multiprocessing.Pool(4)
170 | # [p.apply(recover_temp_file, (f,), kw) for f in F]
171 |
172 | p = multiprocessing.Pool(4)
173 | p.map(recover_temp_file, ())
174 | for f in F:
175 | recover_temp_file(f[0], **f[1])
176 | # recover_temp_file(*arg, **kargs)
--------------------------------------------------------------------------------
/forestlas/thinCloud.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os
3 | from forestlas.lasIO import *
4 |
5 | # Wilkes et al. 2015. Understanding the effects of ALS pulse density
6 | # for metric retrieval across diverse forest types. PE&RS
7 |
8 | class thinCloud:
9 |
10 | def __init__(self, las, ppsm):
11 |
12 | if isinstance(las, str):
13 | las = lasIO(las).all().asArray()
14 |
15 | self.ppsm = ppsm
16 | self.las = las
17 | self.xy = 1. / np.sqrt(ppsm)
18 |
19 | self.xmin = np.round(self.las['x'].min())
20 | self.xmax = np.round(self.las['x'].max())
21 | self.ymin = np.round(self.las['y'].min())
22 | self.ymax = np.round(self.las['y'].max())
23 |
24 |
25 | def thinned(self, Ex=0, Ey=0, fsw=1):
26 |
27 | """
28 | fsw = first return search window, it is best to set this to the number of
29 | iterations but sometimes this is to small.
30 |
31 | RETURNS self.thinnedCloud POINT CLOUD
32 | """
33 |
34 | fsw = (self.xy / 2 ) / fsw
35 |
36 | self.thinnedCloud = np.array([], dtype=self.las.dtype)
37 | first = self.las[self.las['rtn_num'] == 1]
38 | other = self.las[self.las['rtn_num'] != 1]
39 | self.sw = (self.las['z'].max() - self.las['z'].min()) * np.tan(np.deg2rad(5))
40 |
41 | if self.sw == 0: self.sw = 1
42 |
43 | for ii, i in enumerate(np.linspace(self.xmin, self.xmax,
44 | (self.xmax - self.xmin) / self.xy + 1) + Ex):
45 | for jj, j in enumerate(np.linspace(self.ymin, self.ymax,
46 | (self.ymax - self.ymin) / self.xy + 1) + Ey):
47 |
48 | f = first[(first['x'] > (i - fsw)) &
49 | (first['x'] <= (i + fsw)) &
50 | (first['y'] > (j - fsw)) &
51 | (first['y'] <= (j + fsw))]
52 | o = other[(other['x'] > (i - self.sw)) & (other['x'] <= (i + self.sw)) &
53 | (other['y'] > (j - self.sw)) & (other['y'] <= (j + self.sw))]
54 |
55 | if len(f) > 0:
56 | fx = f['x'] - i
57 | fy = f['y'] - j
58 | distance_m = np.hypot(fx, fy)
59 | idx = [distance_m == distance_m.min()]
60 | f = f[idx][0]
61 | rtn_tot = f["rtn_tot"]
62 | self.thinnedCloud = np.hstack([self.thinnedCloud, f])
63 | if rtn_tot > 1:
64 | for rtn_num in range(2, rtn_tot + 1):
65 | rn = o[o["rtn_num"] == rtn_num]
66 | if len(rn) > 0:
67 | rx = rn['x'] - f['x']
68 | ry = rn['y'] - f['y']
69 | distance_m = np.hypot(rx, ry)
70 | idx = [distance_m == distance_m.min()]
71 | rn = rn[idx][0]
72 | self.thinnedCloud = np.hstack([self.thinnedCloud, rn])
73 |
74 | return self.thinnedCloud
75 |
--------------------------------------------------------------------------------
/forestlas/woodyAttribute.py:
--------------------------------------------------------------------------------
1 | import sys, os, glob, multiprocessing, datetime, shutil, tempfile
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from osgeo import osr, gdal
5 |
6 | from forestlas.lasIO import *
7 | from forestlas.canopyComplexity import *
8 |
9 | class woodyAttribute:
10 |
11 | """
12 | Input is a directory containing tiled .las files and generates
13 | a raster of desired metrics, the defult is canopy height
14 |
15 | TO DO:
16 | 1: Get projection information from .las
17 | """
18 |
19 | def __init__(self, verbose=False, maxProcess=4):
20 | self.verbose = verbose
21 | self.maxProcess = maxProcess
22 |
23 | def canopyHeight(self, directory, resolution=None, height=95):
24 |
25 | self.metric = 'height'
26 | self.height = height
27 |
28 | self.processSetup(directory, resolution)
29 | return self
30 |
31 | def canopyComplexity(self, directory, alpha=.3, N=20, resolution=None):
32 |
33 | self.metric = 'complexity'
34 | self.alpha = alpha
35 | self.N = N
36 |
37 | self.processSetup(directory, resolution)
38 | return self
39 |
40 | def Pgap(self, directory, N=20, resolution=None, ):
41 |
42 | self.metric = 'Pgap'
43 | self.N = N
44 |
45 | self.processSetup(directory, resolution)
46 | return self
47 |
48 | def fhd(self, directory, resolution=None, ):
49 |
50 | self.metric = 'fhd'
51 |
52 | self.processSetup(directory, resolution)
53 | return self
54 |
55 | def Cv(self, directory, resolution=None, ):
56 |
57 | self.metric = 'Cv'
58 |
59 | self.processSetup(directory, resolution)
60 | return self
61 |
62 | def fracCover(self, directory, resolution=None, cellSize=1, threshold=.3):
63 |
64 | self.metric = 'FC'
65 | self.cellSize = cellSize
66 | self.threshold = threshold
67 |
68 | self.processSetup(directory, resolution)
69 | return self
70 |
71 | def processSetup(self, directory, resolution):
72 |
73 | self.dir = directory
74 | self.resolution = resolution
75 |
76 | self.LASlist = sorted(glob.glob(os.path.join(self.dir, '*.znr')))
77 | if len(self.LASlist) == 0:
78 | self.LASlist = sorted(glob.glob(os.path.join(self.dir, '*.las')))
79 | self.counter = len(self.LASlist) // 100
80 |
81 | if len(self.LASlist) == 0: raise Exception('No .las files in {}'.format(self.dir))
82 |
83 | self.createGrid()
84 | self.mp(self.LASlist)
85 | self.v, self.X, self.Y = self.populateGrid(self.plot_dictionary)
86 | return self
87 |
88 | def calculateComplexity(self, las):
89 |
90 | ### Generate plot profile ###
91 |
92 | # calculate plot centre
93 | x, y = self.tileCentre(parseHeader(las)) # calculate plot centre
94 | pid = multiprocessing.current_process()._identity[0]
95 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
96 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
97 | model = CanopyComplexity(mp=tempDirectory).fromLAS(las).CHP(method='model')
98 |
99 | if model.z.max() < 2:
100 | self.plot_dictionary[x, y] = 0
101 |
102 | else:
103 | results = np.zeros(self.N)
104 |
105 | for i in range(self.N):
106 | z, zw = model.simulateCloud
107 | chp = CanopyComplexity().fromSample(z, zw).CHP(alpha=self.alpha)
108 | results[i] = chp.layerCount
109 |
110 | if len(results) > 0:
111 | self.plot_dictionary[x, y] = results.mean()
112 | else: self.plot_dictionary[x, y] = 0
113 |
114 | def calculatePgap(self, las):
115 |
116 | ### Generate plot profile ###
117 |
118 | # calculate plot centre
119 | x, y = self.tileCentre(parseHeader(las)) # calculate plot centre
120 | pid = multiprocessing.current_process()._identity[0]
121 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
122 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
123 |
124 | pgap = CanopyComplexity(mp=tempDirectory).fromLAS(las, threshold = 1).Pgap()
125 |
126 | self.plot_dictionary[x, y] = pgap.min()
127 |
128 | def calculateFC(self, las):
129 | LAS = lasIO.lasIO(las)
130 | x, y = self.tileCentre(parseHeader(las)) # calculate plot centre
131 | tiles = LAS.tiling(self.cellSize).asDic()
132 | o = np.zeros(len(tiles))
133 | for i, tile in enumerate(tiles.keys()):
134 | T = tiles[tile]
135 | if len(T) > 0 and T['z'].max() > 1.7:
136 | pgap = CanopyComplexity().fromSample(T['z'], T['rtn_tot'], threshold=0).Pgap()
137 | o[i] = pgap.pgap[np.where(pgap.z > 1.7)][0]
138 | else:
139 | o[i] = 0
140 | self.plot_dictionary[x, y] = len(o[o > self.threshold]) / float(len(o))
141 |
142 | def calculateHeight(self, las):
143 | x, y = self.tileCentre(parseHeader(las)) # calculate plot centre
144 | pid = multiprocessing.current_process()._identity[0]
145 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
146 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
147 | las = lasIO.lasIO(las, tmpDir=tempDirectory, keepTemp=False).all().asArray()
148 | self.plot_dictionary[x, y] = np.percentile(las['z'], self.height)
149 |
150 | def calculateCv(self, las):
151 | x, y = self.tileCentre(parseHeader(las)) # calculate plot centre
152 | pid = multiprocessing.current_process()._identity[0]
153 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
154 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
155 | las = lasIO(las, tmpDir=tempDirectory, keepTemp=False).all().asArray()
156 | self.plot_dictionary[x, y] = las['z'].std() / las['z'].mean()
157 |
158 | def calculateFHD(self, las):
159 | x, y = self.tileCentre(parseHeader(las)) # calculate plot centre
160 | pid = multiprocessing.current_process()._identity[0]
161 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
162 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
163 | chp = CanopyComplexity(mp=tempDirectory, keepTemp=False).fromLAS(las)
164 | fhd = []
165 | for h in np.linspace(2, chp.z.max() // 2 * 2, chp.z.max() // 2):
166 | pi = sum(chp.zw[(chp.z > h) & (chp.z < h + 2)]) / chp.zw.sum()
167 | if pi == 0:
168 | continue
169 | fhdh = pi * np.log(pi)
170 | if np.isnan(fhdh): fhdh = 0
171 | fhd.append(fhdh)
172 | self.plot_dictionary[x, y] = -sum(fhd)
173 |
174 | def mp(self, listItems):
175 |
176 | ''' Carries out the multiprocessing grunt work '''
177 |
178 | manager = multiprocessing.Manager()
179 | self.plot_dictionary = manager.dict()
180 | self.global_x = manager.list()
181 | self.global_y = manager.list()
182 |
183 | listI = 0
184 |
185 | for i in range((len(listItems) / self.maxProcess) + 1):
186 |
187 | if self.verbose:
188 | print '{:.2f}% | processing job {} of {} | {}'.format((float(i) / ((len(listItems) / self.maxProcess) + 1)) * 100, \
189 | i, (len(listItems) / self.maxProcess) + 1, \
190 | datetime.datetime.now())
191 | jobs = []
192 |
193 | if (self.maxProcess * listI) + self.maxProcess < len(listItems):
194 | processingList = listItems[self.maxProcess * listI: (self.maxProcess * listI) + self.maxProcess]
195 | else: processingList = listItems[self.maxProcess * listI:]
196 |
197 | for j, las in enumerate(processingList): # limits number of lass run at once
198 |
199 | p = False
200 | processMap = {'height':self.calculateHeight, 'complexity':self.calculateComplexity,
201 | 'fhd':self.calculateFHD, 'Cv':self.calculateCv, 'Pgap':self.calculatePgap,
202 | 'FC':self.calculateFC}
203 |
204 | p = multiprocessing.Process(target=processMap[self.metric], args=(las, ))
205 |
206 | if p:
207 | jobs.append(p)
208 | p.start()
209 |
210 | for proc in jobs:
211 | proc.join()
212 |
213 | listI += 1
214 |
215 | def createGrid(self):
216 |
217 | # generate parmaeters from .las header
218 | header = parseHeader(self.LASlist[0]) # .las header to dictionary
219 |
220 | # search for predefined resolution if not there calculate
221 | # tile resolution from data
222 | if self.resolution != None:
223 | pass
224 | elif 'guid1' in header.keys() and header['guid1'] > 0:
225 | self.resolution = header['guid1']
226 | elif 'resolution' in header.keys():
227 | self.resolution = header['resolution']
228 | else:
229 | self.resolution = header['xmax'] - header['xmin'] # a wild guess!
230 |
231 | if self.verbose == True: print 'grid resolution: {}'.format(self.resolution)
232 |
233 | # grabself.vlr info
234 | # will use this at a later date to grab projection info
235 | self.vlr = getVLR(header['headersize'], self.LASlist[0])
236 |
237 | return self
238 |
239 | def populateGrid(self, plot_dictionary):
240 |
241 | self.v, self.X, self.Y = np.meshgrid(0., np.unique(self.global_x), np.unique(self.global_y))
242 |
243 | for key in plot_dictionary.keys():
244 | idx = [(self.X == key[0]) & (self.Y == key[1])]
245 | self.v[idx] = np.float(plot_dictionary[key])
246 |
247 | self.v = np.rot90(self.v.reshape(np.shape(self.v)[0], np.shape(self.v)[2]))
248 |
249 | print self.X
250 | return self.v, self.X, self.Y
251 |
252 | def tileCentre(self, header):
253 |
254 | x_range = header['xmax'] - header['xmin']
255 | x_centre = (header['xmax'] - (x_range / 2.))
256 | x_min = x_centre - (self.resolution / 2.)
257 | x = x_min // self.resolution
258 | self.global_x.append(x)
259 |
260 | y_range = header['ymax'] - header['ymin']
261 | y_centre = (header['ymax'] - (y_range / 2.))
262 | y_max = y_centre + (self.resolution / 2.)
263 | y = y_max // self.resolution
264 | self.global_y.append(y)
265 |
266 | return x, y
267 |
268 | def exportTiff(self, saveTo=False):
269 |
270 | print 'writing to tiff'
271 |
272 | if saveTo:
273 | if os.path.isfile(saveTo):
274 | ans = raw_input('Image already exists, overwrite? (Y|N): ').lower()
275 | if ans == 'y':
276 | shutil.rmtree(path)
277 | else:
278 | raise NameError('Change save filepath')
279 | elif os.path.isdir(saveTo):
280 | saveTo = os.path.join(saveTo, '{}_{:.2f}.tif'.format(self.metric, self.resolution))
281 | else:
282 | saveTo = os.path.join(self.dir, '{}_{:.2f}.tif'.format(self.metric, self.resolution))
283 |
284 | driver = gdal.GetDriverByName('GTiff')
285 | xDim = int((self.X.max() - self.X.min()) + 1)
286 | yDim = int((self.Y.max() - self.Y.min()) + 1)
287 | dataset = driver.Create(saveTo, xDim, yDim, 1, gdal.GDT_Float32)
288 |
289 | # set projection
290 | srs = osr.SpatialReference()
291 | srs.ImportFromEPSG(28355)
292 | dataset.SetProjection(srs.ExportToWkt())
293 |
294 | # set transformation
295 | dataset.SetGeoTransform([self.X.min() * self.resolution, self.resolution, 0, self.Y.max() * self.resolution, 0, 0 - self.resolution])
296 |
297 | # write raster and close
298 | dataset.GetRasterBand(1).WriteArray(self.v.astype(float))
299 | dataset = None # closes .tif
300 |
301 | print '{} tif saved at {}'.format(self.metric, saveTo)
302 |
303 | def asArray(self):
304 | return self.v
305 |
306 | if __name__ == '__main__':
307 |
308 | G = woodyAttribute(verbose=True, maxProcess=4).canopyHeight('/Users/phil/ALS/WC/spl/tile_20/WC1_5m_TILES').exportTiff(saveTo='/Users/phil/ALS/WC/spl/tile_20/WC1_5m_TILES/height_5.00.old.tif')
--------------------------------------------------------------------------------
/forestlas/woodyAttribute_.py:
--------------------------------------------------------------------------------
1 | import os
2 | import glob
3 | import multiprocessing
4 | import datetime
5 | import tempfile
6 | import numpy as np
7 |
8 | from forestlas.lasIO import *
9 |
10 | class canopyStructure:
11 |
12 | """
13 | Input is a directory containing tiled .las files and generates
14 | a raster of desired metrics, the defult is canopy height
15 |
16 | TO DO:
17 | 1: Get projection information from .las
18 | """
19 |
20 | def __init__(self, las_files, alpha=.3, N=10, resolution=None,
21 | layers=False, canopyDensity=False, underDensity=False,
22 | height=False, pgap=False, baseHeight=False,
23 | canopyPgap=False, all=False, point_only=False,
24 | verbose=False, number_of_processes=4, threshold=2, points_per_voxel=2):
25 |
26 | self.verbose = verbose
27 | self.number_of_processes = number_of_processes
28 |
29 | if not layers \
30 | and not canopyDensity \
31 | and not underDensity \
32 | and not height \
33 | and not pgap \
34 | and not baseHeight \
35 | and not canopyPgap \
36 | and not all \
37 | and not point_only:
38 | raise Exception('no method chosen')
39 |
40 | if all:
41 | layers = True
42 | canopyDensity = True
43 | underDensity = True
44 | height = True
45 | pgap = True
46 | baseHeight = True
47 | canopyPgap = True
48 |
49 | self.alpha = alpha
50 | self.N = N
51 | self.threshold = threshold
52 | self.points_per_voxel = points_per_voxel
53 |
54 | self.metrics = {'layers':False,
55 | 'canopyDensity':False,
56 | 'underDensity':False,
57 | 'height': False,
58 | 'baseHeight': False,
59 | 'pgap': False,
60 | 'canopyPgap':False}
61 |
62 | if layers:
63 | self.metrics['layers'] = True
64 | if canopyDensity:
65 | self.metrics['canopyDensity'] = True
66 | if underDensity:
67 | self.metrics['underDensity'] = True
68 | if height:
69 | self.metrics['height'] = True
70 | if pgap:
71 | self.metrics['pgap'] = True
72 | if baseHeight:
73 | self.metrics['baseHeight'] = True
74 | if canopyPgap:
75 | self.metrics['canopyPgap'] = True
76 | if point_only:
77 | self.metrics['point_only'] = True
78 |
79 | if isinstance(las_files, list):
80 | self.dir = os.path.split(las_files[0])[0]
81 | if not os.path.isdir(self.dir):
82 | if os.path.isfile(os.path.join(os.getcwd(), las_files[0])):
83 | self.dir = os.getcwd()
84 | self.LASlist = las_files
85 | else:
86 | raise Exception('las files not in cwd and no directory supplied (suggest using glob)')
87 | else:
88 | self.LASlist = las_files
89 | elif os.path.isdir(las_files):
90 | self.dir = las_files
91 | self.LASlist = sorted(glob.glob(os.path.join(self.dir, '*.las')))
92 | else:
93 | raise Exception('input needs to be a directory or list of .las files')
94 |
95 | self.resolution = resolution
96 | self.counter = len(self.LASlist) // 100
97 |
98 | if len(self.LASlist) == 0: raise Exception('No .las files in {}'.format(self.dir))
99 | if self.verbose: print 'number of tiles to process:', len(self.LASlist)
100 |
101 | self.createGrid()
102 | self.mp(self.LASlist)
103 | self.populateGrid()
104 |
105 | def base(chp):
106 | return chp.threshold if len(chp.crownBase) == 0 else chp.crownBase[-1]
107 |
108 | def calculateStructure(self, las, x, y, lasF):
109 |
110 | from forestlas.canopyComplexity import CanopyComplexity
111 |
112 | ### Generate plot profile ###
113 |
114 | try:
115 | if las['z'].max() < self.threshold or len(las[las['z'] >= self.threshold]) < self.points_per_voxel:
116 | for metric in self.metrics.keys():
117 | if metric != 'height': self.metrics[metric][x, y] = np.nan
118 |
119 | else:
120 | model = CanopyComplexity().fromLAS(las).CHP(method='model')
121 | results = {metric:np.zeros(self.N) for metric in self.metrics.keys()}
122 | for i in range(self.N):
123 | sim = model.simulateCloud()
124 | if sim['z'].max() < self.threshold or len(sim[sim['z'] >= self.threshold]) < self.points_per_voxel:
125 | results['layers'][i] = 0
126 | results['canopyDensity'][i] = 0
127 | results['underDensity'][i] = 1
128 | results['baseHeight'][i] = np.nan
129 | results['pgap'] = 1
130 | results['canopyPgap'] = np.nan
131 | else:
132 | chp = CanopyComplexity().fromLAS(sim, top_threshold=100).CHP(method='sample', alpha=self.alpha)
133 | for metric in results.keys():
134 | if metric == 'layers':
135 | results[metric][i] = chp.layerCount
136 | if metric == 'canopyDensity':
137 | results[metric][i] = chp.fd[chp.zx >= self.base(chp)].sum() / float(chp.fd.sum())
138 | if metric == 'underDensity':
139 | results[metric][i] = chp.fd[chp.zx < self.base(chp)].sum() / float(chp.fd.sum())
140 | if metric == 'baseHeight':
141 | results[metric][i] = np.nan if len(chp.crownBase) == 0 else chp.crownBase[-1]
142 | if metric == 'pgap':
143 | results[metric][i] = chp.pgap.min()
144 | if metric == 'canopyPgap':
145 | results[metric][i] = chp.pgap[chp.zx == (chp.threshold if len(chp.crownBase) == 0
146 | else chp.crownBase[-1])][0]
147 |
148 | for metric in self.metrics.keys():
149 | if metric != 'height': self.metrics[metric][x, y] = results[metric].mean()
150 |
151 | except Exception as err:
152 | print '!!!', lasF, err, '!!!'
153 |
154 | return self
155 |
156 | def mp(self, listItems):
157 |
158 | ''' Carries out the multiprocessing grunt work '''
159 |
160 | manager = multiprocessing.Manager()
161 | for metric, do in self.metrics.items():
162 | if self.metrics[metric]:
163 | self.metrics[metric] = manager.dict()
164 | else:
165 | del self.metrics[metric]
166 |
167 | # self.plot_dictionary = manager.dict()
168 | self.global_x = manager.list()
169 | self.global_y = manager.list()
170 |
171 | listI = 0
172 |
173 | for i in range((len(listItems) / self.number_of_processes) + 1):
174 |
175 | if self.counter > 0 and i%self.counter == 0 and self.verbose:
176 | print '{:.2f}% | processing job {} of {} | {}'.format((float(i) / ((len(listItems) / self.number_of_processes) + 1)) * 100, \
177 | i, (len(listItems) / self.number_of_processes) + 1, \
178 | datetime.datetime.now())
179 | jobs = []
180 |
181 | if (self.number_of_processes * listI) + self.number_of_processes < len(listItems):
182 | processingList = listItems[self.number_of_processes * listI: (self.number_of_processes * listI) + self.number_of_processes]
183 | else: processingList = listItems[self.number_of_processes * listI:]
184 |
185 | for j, las in enumerate(processingList): # limits number of lass run at once
186 |
187 | p = multiprocessing.Process(target=self.readLAS, args=(las, ))
188 |
189 | if p:
190 | jobs.append(p)
191 | p.start()
192 |
193 | for proc in jobs:
194 | proc.join()
195 |
196 | listI += 1
197 |
198 | def readLAS(self, lasF):
199 |
200 | # read las file and send to different processes
201 | pid = multiprocessing.current_process()._identity[0]
202 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
203 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
204 |
205 | las = lasIO(lasF, tmpDir=tempDirectory)
206 | X, Y = self.tileCentre(las)
207 |
208 | if 'point_only' in self.metrics.keys():
209 | self.metrics['point_only'][X, Y] = 1
210 |
211 | else:
212 | las = las.all().asArray()
213 |
214 | if 'height' in self.metrics.keys():
215 | self.metrics['height'][X, Y] = np.percentile(las['z'], 95)
216 |
217 | if len(set(self.metrics.keys()).intersection(['layers', 'canopyDensity',
218 | 'underDensity', 'pgap',
219 | 'baseHeight'])) > 0:
220 | self.calculateStructure(las, X, Y, lasF)
221 |
222 | def createGrid(self):
223 |
224 | # generate parmaeters from .las header
225 | header = parseHeader(self.LASlist[0]) # .las header to dictionary
226 |
227 | # search for predefined resolution if not there calculate
228 | # tile resolution from data
229 | if self.resolution != None:
230 | pass
231 | elif 'guid1' in header.keys() and header['guid1'] > 0:
232 | self.resolution = header['guid1']
233 | elif 'resolution' in header.keys():
234 | self.resolution = header['resolution']
235 | else:
236 | self.resolution = header['xmax'] - header['xmin'] # a wild guess!
237 |
238 | if self.verbose == True: print 'grid resolution: {}'.format(self.resolution)
239 |
240 | # grabself.vlr info
241 | # will use this at a later date to grab projection info
242 | self.vlr = getVLR(header['headersize'], self.LASlist[0])
243 |
244 | return self
245 |
246 | def populateGrid(self):
247 |
248 | self.metricGrids = {key:None for key in self.metrics.keys()}
249 |
250 | for metric in self.metrics.keys():
251 |
252 | # array, self.X, self.Y = np.meshgrid(np.nan, np.unique(self.global_x), np.unique(self.global_y))
253 | array, self.X, self.Y = np.meshgrid(np.nan,
254 | np.arange(min(self.global_x), max(self.global_x) + 1, self.resolution),
255 | np.arange(min(self.global_y), max(self.global_y) + 1, self.resolution))
256 |
257 | for key in self.metrics[metric].keys():
258 | idx = [(self.X == key[0]) & (self.Y == key[1])]
259 | array[idx] = np.float(self.metrics[metric][key])
260 |
261 | self.metricGrids[metric] = np.rot90(array.reshape(np.shape(array)[0], np.shape(array)[2]))
262 |
263 | return self
264 |
265 | def tileCentre(self, las):
266 |
267 |
268 | x = np.floor(las.x_centre)
269 | self.global_x.append(x)
270 |
271 | y = np.round(las.y_centre)
272 | self.global_y.append(y)
273 |
274 | return x, y
275 |
276 | def exportTiff(self, saveTo=False):
277 |
278 | from osgeo import osr, gdal
279 |
280 | for metric in self.metrics.keys():
281 | if saveTo:
282 | if os.path.isfile(saveTo):
283 | ans = raw_input('Image already exists, overwrite? (Y|N): ').lower()
284 | if ans == 'y':
285 | os.unlink(saveTo)
286 | else:
287 | raise NameError('Change save filepath')
288 | savePath = saveTo
289 | elif os.path.isdir(saveTo):
290 | savePath = os.path.join(saveTo, '{}_{:.2f}.tif'.format(metric, self.resolution))
291 | elif saveTo.endswith('.tif'):
292 | dir = os.path.split(saveTo)[0]
293 | if os.path.isdir(dir):
294 | savePath = saveTo
295 | else:
296 | raise Exception('{} is not a directory'.format(dir))
297 | else:
298 | savePath = os.path.join(self.dir, '{}_{:.2f}.tif'.format(metric, self.resolution))
299 |
300 | driver = gdal.GetDriverByName('GTiff')
301 | xDim = len(np.unique(self.X))
302 | yDim = len(np.unique(self.Y))
303 | dataset = driver.Create(savePath, xDim, yDim, 1, gdal.GDT_Float32)
304 |
305 | # set projection
306 | srs = osr.SpatialReference()
307 | srs.ImportFromEPSG(28355)
308 | dataset.SetProjection(srs.ExportToWkt())
309 |
310 | dataset.SetGeoTransform([self.X.min() - float(self.resolution) / 2.,
311 | self.resolution,
312 | 0,
313 | self.Y.max() + float(self.resolution) / 2.,
314 | 0,
315 | 0 - self.resolution])
316 |
317 | # set no data value
318 | dataset.GetRasterBand(1).SetNoDataValue(-1)
319 |
320 | # write raster and close
321 | dataset.GetRasterBand(1).WriteArray(self.metricGrids[metric])
322 | del dataset # closes .tif
323 |
324 | print '{} tif saved at {}'.format(metric, savePath)
325 |
326 | def asArray(self):
327 | return self.metricGrids
328 |
329 | if __name__ == '__main__':
330 |
331 | import glob
332 | # path = '/Users/phil/ALS/WC/spl/tile_20/ForestLAS_tutorial/LAS/large_tile'
333 | path = '/Users/phil/ALS/WC/spl/tile_20/ForestLAS_tutorial/LAS/large_tile/WC45_SUB_20m_TILES'
334 | L = glob.glob(os.path.join(path, '*.las'))
335 | # L = ['/Users/phil/Google_Drive/RiverHealth/DATA/AREA_A/10/309231.0_6004106.0.PLOT.las']
336 | # canopyStructure(L, point_only=True, resolution=10, verbose=True)
337 | start = datetime.datetime.now()
338 | canopyStructure(L, height=True, verbose=True, number_of_processes=4).exportTiff()
339 | print datetime.datetime.now() - start
340 | # G = canopyStructure(glob.glob('/Users/phil/Google_Drive/RiverHealth/DATA/AREA_C/100/*.las'), height=True, verbose=True, number_of_processes=4, resolution=100)#
--------------------------------------------------------------------------------
/forestlas/woodyAttribute_pool.py:
--------------------------------------------------------------------------------
1 | import os
2 | import glob
3 | import multiprocessing
4 | import datetime
5 | import tempfile
6 | import numpy as np
7 |
8 | from forestlas.lasIO import *
9 |
10 | def base(chp):
11 | return chp.threshold if len(chp.crownBase) == 0 else chp.crownBase[-1]
12 |
13 | class canopyStructure:
14 |
15 | """
16 | Input is a directory containing tiled .las files and generates
17 | a raster of desired metrics, the defult is canopy height
18 |
19 | TO DO:
20 | 1: Get projection information from .las
21 | """
22 |
23 | def __init__(self, las_files, alpha=.3, N=10, resolution=None,
24 | layers=False, canopyDensity=False, underDensity=False,
25 | height=False, pgap=False, baseHeight=False,
26 | canopyPgap=False, all=False, point_only=False,
27 | verbose=False, number_of_processes=4, threshold=2, points_per_voxel=2):
28 |
29 | self.verbose = verbose
30 | self.number_of_processes = number_of_processes
31 |
32 | if not layers \
33 | and not canopyDensity \
34 | and not underDensity \
35 | and not height \
36 | and not pgap \
37 | and not baseHeight \
38 | and not canopyPgap \
39 | and not all \
40 | and not point_only:
41 | raise Exception('no method chosen')
42 |
43 | if all:
44 | layers = True
45 | canopyDensity = True
46 | underDensity = True
47 | height = True
48 | pgap = True
49 | baseHeight = True
50 | canopyPgap = True
51 |
52 | self.alpha = alpha
53 | self.N = N
54 | self.threshold = threshold
55 | self.points_per_voxel = points_per_voxel
56 |
57 | self.metrics = {'layers':False,
58 | 'canopyDensity':False,
59 | 'underDensity':False,
60 | 'height': False,
61 | 'baseHeight': False,
62 | 'pgap': False,
63 | 'canopyPgap':False}
64 |
65 | if layers:
66 | self.metrics['layers'] = True
67 | if canopyDensity:
68 | self.metrics['canopyDensity'] = True
69 | if underDensity:
70 | self.metrics['underDensity'] = True
71 | if height:
72 | self.metrics['height'] = True
73 | if pgap:
74 | self.metrics['pgap'] = True
75 | if baseHeight:
76 | self.metrics['baseHeight'] = True
77 | if canopyPgap:
78 | self.metrics['canopyPgap'] = True
79 | if point_only:
80 | self.metrics['point_only'] = True
81 |
82 | if isinstance(las_files, list):
83 | self.dir = os.path.split(las_files[0])[0]
84 | if not os.path.isdir(self.dir):
85 | if os.path.isfile(os.path.join(os.getcwd(), las_files[0])):
86 | self.dir = os.getcwd()
87 | self.LASlist = las_files
88 | else:
89 | raise Exception('las files not in cwd and no directory supplied (suggest using glob)')
90 | else:
91 | self.LASlist = las_files
92 | elif os.path.isdir(las_files):
93 | self.dir = las_files
94 | self.LASlist = sorted(glob.glob(os.path.join(self.dir, '*.las')))
95 | else:
96 | raise Exception('input needs to be a directory or list of .las files')
97 |
98 | self.resolution = resolution
99 | self.counter = len(self.LASlist) // 100
100 |
101 | if len(self.LASlist) == 0: raise Exception('No .las files in {}'.format(self.dir))
102 | if self.verbose:
103 | print 'number of tiles to process:', len(self.LASlist)
104 | print 'number of CPUs:', self.number_of_processes
105 |
106 | self.createGrid()
107 | self.mp(self.LASlist)
108 | self.populateGrid()
109 |
110 | def calculateStructure(self, las, x, y, lasF):
111 |
112 | from forestlas.canopyComplexity import CanopyComplexity
113 |
114 | ### Generate plot profile ###
115 |
116 | try:
117 | if las['z'].max() < self.threshold or len(las[las['z'] >= self.threshold]) < self.points_per_voxel:
118 | for metric in self.metrics.keys():
119 | if metric != 'height': self.metrics[metric][x, y] = np.nan
120 |
121 | else:
122 | model = CanopyComplexity().fromLAS(las).CHP(method='model')
123 | results = {metric:np.zeros(self.N) for metric in self.metrics.keys()}
124 | for i in range(self.N):
125 | sim = model.simulateCloud()
126 | if sim['z'].max() < self.threshold or len(sim[sim['z'] >= self.threshold]) < self.points_per_voxel:
127 | results['layers'][i] = 0
128 | results['canopyDensity'][i] = 0
129 | results['underDensity'][i] = 1
130 | results['baseHeight'][i] = np.nan
131 | results['pgap'] = 1
132 | results['canopyPgap'] = np.nan
133 | else:
134 | chp = CanopyComplexity().fromLAS(sim, top_threshold=100).CHP(method='sample', alpha=self.alpha)
135 | for metric in results.keys():
136 | if metric == 'layers':
137 | results[metric][i] = chp.layerCount
138 | if metric == 'canopyDensity':
139 | results[metric][i] = chp.fd[chp.zx >= self.base(chp)].sum() / float(chp.fd.sum())
140 | if metric == 'underDensity':
141 | results[metric][i] = chp.fd[chp.zx < self.base(chp)].sum() / float(chp.fd.sum())
142 | if metric == 'baseHeight':
143 | results[metric][i] = np.nan if len(chp.crownBase) == 0 else chp.crownBase[-1]
144 | if metric == 'pgap':
145 | results[metric][i] = chp.pgap.min()
146 | if metric == 'canopyPgap':
147 | results[metric][i] = chp.pgap[chp.zx == (chp.threshold if len(chp.crownBase) == 0
148 | else chp.crownBase[-1])][0]
149 |
150 | for metric in self.metrics.keys():
151 | if metric != 'height': self.metrics[metric][x, y] = results[metric].mean()
152 |
153 | except Exception as err:
154 | print '!!!', lasF, err, '!!!'
155 |
156 | return self
157 |
158 | def mp(self, listItems):
159 |
160 | ''' Carries out the multiprocessing grunt work '''
161 |
162 | manager = multiprocessing.Manager()
163 | for metric, do in self.metrics.items():
164 | if self.metrics[metric]:
165 | self.metrics[metric] = manager.dict()
166 | else:
167 | del self.metrics[metric]
168 |
169 | # self.plot_dictionary = manager.dict()
170 | self.global_x = manager.list()
171 | self.global_y = manager.list()
172 |
173 | p = multiprocessing.Pool(self.number_of_processes)
174 | # [p.apply(recover_temp_file, (f,), kw) for f in F]
175 | [p.apply(readLAS, (self, f)) for f in listItems]
176 | p.close()
177 | p.join()
178 |
179 | return self
180 |
181 | def createGrid(self):
182 |
183 | # generate parmaeters from .las header
184 | header = parseHeader(self.LASlist[0]) # .las header to dictionary
185 |
186 | # search for predefined resolution if not there calculate
187 | # tile resolution from data
188 | if self.resolution != None:
189 | pass
190 | elif 'guid1' in header.keys() and header['guid1'] > 0:
191 | self.resolution = header['guid1']
192 | elif 'resolution' in header.keys():
193 | self.resolution = header['resolution']
194 | else:
195 | self.resolution = header['xmax'] - header['xmin'] # a wild guess!
196 |
197 | if self.verbose == True: print 'grid resolution: {}'.format(self.resolution)
198 |
199 | # grabself.vlr info
200 | # will use this at a later date to grab projection info
201 | self.vlr = getVLR(header['headersize'], self.LASlist[0])
202 |
203 | return self
204 |
205 | def populateGrid(self):
206 |
207 | self.metricGrids = {key:None for key in self.metrics.keys()}
208 |
209 | for metric in self.metrics.keys():
210 |
211 | # array, self.X, self.Y = np.meshgrid(np.nan, np.unique(self.global_x), np.unique(self.global_y))
212 | array, self.X, self.Y = np.meshgrid(np.nan,
213 | np.arange(min(self.global_x), max(self.global_x) + 1, self.resolution),
214 | np.arange(min(self.global_y), max(self.global_y) + 1, self.resolution))
215 |
216 | for key in self.metrics[metric].keys():
217 | idx = [(self.X == key[0]) & (self.Y == key[1])]
218 | array[idx] = np.float(self.metrics[metric][key])
219 |
220 | self.metricGrids[metric] = np.rot90(array.reshape(np.shape(array)[0], np.shape(array)[2]))
221 |
222 | return self
223 |
224 | def tileCentre(self, las):
225 |
226 | x = np.round(las.x_centre)
227 | self.global_x.append(x)
228 |
229 | y = np.round(las.y_centre)
230 | self.global_y.append(y)
231 |
232 | return x, y
233 |
234 | def exportTiff(self, saveTo=False):
235 |
236 | from osgeo import osr, gdal
237 |
238 | for metric in self.metrics.keys():
239 | if saveTo:
240 | if os.path.isfile(saveTo):
241 | ans = raw_input('Image already exists, overwrite? (Y|N): ').lower()
242 | if ans == 'y':
243 | os.unlink(saveTo)
244 | else:
245 | raise NameError('Change save filepath')
246 | savePath = saveTo
247 | elif os.path.isdir(saveTo):
248 | savePath = os.path.join(saveTo, '{}_{:.2f}.tif'.format(metric, self.resolution))
249 | elif saveTo.endswith('.tif'):
250 | dir = os.path.split(saveTo)[0]
251 | if os.path.isdir(dir):
252 | savePath = saveTo
253 | else:
254 | raise Exception('{} is not a directory'.format(dir))
255 | else:
256 | savePath = os.path.join(self.dir, '{}_{:.2f}.tif'.format(metric, self.resolution))
257 |
258 | driver = gdal.GetDriverByName('GTiff')
259 | xDim = len(np.unique(self.X))
260 | yDim = len(np.unique(self.Y))
261 | dataset = driver.Create(savePath, xDim, yDim, 1, gdal.GDT_Float32)
262 |
263 | # set projection
264 | srs = osr.SpatialReference()
265 | srs.ImportFromEPSG(28355)
266 | dataset.SetProjection(srs.ExportToWkt())
267 |
268 | dataset.SetGeoTransform([self.X.min() - float(self.resolution) / 2.,
269 | self.resolution,
270 | 0,
271 | self.Y.max() + float(self.resolution) / 2.,
272 | 0,
273 | 0 - self.resolution])
274 |
275 | # set no data value
276 | dataset.GetRasterBand(1).SetNoDataValue(-1)
277 |
278 | # write raster and close
279 | dataset.GetRasterBand(1).WriteArray(self.metricGrids[metric])
280 | del dataset # closes .tif
281 |
282 | print '{} tif saved at {}'.format(metric, savePath)
283 |
284 | def asArray(self):
285 | return self.metricGrids
286 |
287 | def X(lasF):
288 | print lasF
289 |
290 | def readLAS(self, lasF):
291 |
292 | # read las file and send to different processes
293 | pid = multiprocessing.current_process()._identity[0]
294 | tempDirectoryName = 'lidar.processing.' + str(np.random.mtrand.RandomState(pid).randint(0, 9999999)) + '.tmp'
295 | tempDirectory = os.path.join(tempfile.gettempdir(), tempDirectoryName)
296 |
297 | las = lasIO(lasF, tmpDir=tempDirectory)
298 | X, Y = self.tileCentre(las)
299 |
300 | if 'point_only' in self.metrics.keys():
301 | self.metrics['point_only'][X, Y] = 1
302 |
303 | else:
304 | try:
305 | las = las.all().asArray()
306 |
307 | if 'height' in self.metrics.keys():
308 | self.metrics['height'][X, Y] = np.percentile(las['z'], 95)
309 |
310 | if len(set(self.metrics.keys()).intersection(['layers', 'canopyDensity',
311 | 'underDensity', 'pgap',
312 | 'baseHeight'])) > 0:
313 | self.calculateStructure(las, X, Y, lasF)
314 | except:
315 | print lasF
316 |
317 | return self
318 |
319 | if __name__ == '__main__':
320 |
321 | import glob
322 | # path = '/Users/phil/ALS/WC/spl/tile_20/ForestLAS_tutorial/LAS/large_tile'
323 | path = '/Users/phil/ALS/WC/spl/tile_20/ForestLAS_tutorial/LAS/large_tile/WC45_SUB_20m_TILES'
324 | L = glob.glob(os.path.join(path, '*.las'))
325 | # L = ['/Users/phil/Google_Drive/RiverHealth/DATA/AREA_A/10/309231.0_6004106.0.PLOT.las']
326 | # canopyStructure(L, point_only=True, resolution=10, verbose=True)
327 | start = datetime.datetime.now()
328 | canopyStructure(L, pgap=True, verbose=True, number_of_processes=4, resolution=5).exportTiff(path)
329 | print datetime.datetime.now() - start
330 | # G = canopyStructure(glob.glob('/Users/phil/Google_Drive/RiverHealth/DATA/AREA_C/100/*.las'), height=True, verbose=True, number_of_processes=4, resolution=100)#
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from distutils.core import setup
2 |
3 | setup(
4 | name='ForestLAS',
5 | version='0.1.3.5',
6 | author='Phil Wilkes',
7 | author_email='phil.wilkes@rmit.edu.au',
8 | packages=['forestlas'],
9 | url='http://www.crcsi.com.au/Research/2-Feature-Extraction/2-07-Woody-Vegetation',
10 | license='LICENSE.txt',
11 | description='Tools for generating woody attribution features',
12 | long_description=open('README.md').read(),
13 | )
14 |
--------------------------------------------------------------------------------