├── .gitattributes
├── .gitignore
├── .travis.yml
├── COPYING
├── FFST
├── __init__.py
├── _fft.py
├── _inverseShearletTransformSpect.py
├── _scalesShearsAndSpectra.py
├── _shearletTransformSpect.py
├── _version.py
├── meyerShearlet.py
├── shearletScaleShear.py
└── tests
│ ├── __init__.py
│ └── test_ffst.py
├── MANIFEST.in
├── README.orig
├── README.rst
├── doc
├── Makefile
├── conf.py
├── index.rst
├── reference
│ └── .gitignore
├── sphinxext
│ ├── docscrape.py
│ ├── docscrape_sphinx.py
│ ├── github.py
│ ├── math_dollar.py
│ └── numpydoc.py
└── tools
│ ├── LICENSE.txt
│ ├── apigen.py
│ └── buildmodref.py
├── examples
└── example.py
├── setup.cfg
├── setup.py
└── versioneer.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | FFST/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /build
2 | /dist
3 | /bin/old
4 | /cmind-py-dev.egg-info
5 | /MANIFEST
6 | .project
7 | .settings
8 | .pydevproject
9 | .spyderproject
10 | .spyderworkspace
11 | .cache
12 | .coverage
13 | *.egg-info
14 |
15 | # binary data files
16 |
17 | # Sphinx documentation
18 | /doc/_build
19 | /doc/generated
20 | /doc/img_extra
21 |
22 | # Byte-compiled / optimized / DLL files
23 | __pycache__/
24 | *.py[cod]
25 |
26 | # Installer logs
27 | pip-log.txt
28 | pip-delete-this-directory.txt
29 |
30 | # Compiled source #
31 | ###################
32 | *.so
33 | *.o
34 | *.lib
35 | *.obj
36 | *.dll
37 | *.exe
38 |
39 | # Logs and databases #
40 | ######################
41 | *.log
42 | *.sql
43 | *.sqlite
44 |
45 | # OS generated files #
46 | ######################
47 | .DS_Store
48 | .DS_Store?
49 | Thumbs.db
50 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 |
3 | sudo: false
4 |
5 | python:
6 | - 2.7
7 | - 3.3
8 | - 3.4
9 | - 3.5
10 |
11 | # based on example from: https://repo.continuum.io/miniconda/
12 | install:
13 | - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
14 | wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
15 | else
16 | wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
17 | fi
18 | - bash miniconda.sh -b -p $HOME/miniconda
19 | - export PATH="$HOME/miniconda/bin:$PATH"
20 | - hash -r
21 | - conda config --set always_yes yes --set changeps1 no
22 | - conda update -q conda
23 | - conda info -a
24 | - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION numpy nose
25 | - source activate test-environment
26 | - python setup.py build_ext --inplace
27 |
28 | script: python -m nose FFST/tests/*.py
29 |
--------------------------------------------------------------------------------
/COPYING:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/FFST/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from ._scalesShearsAndSpectra import scalesShearsAndSpectra
4 | from ._shearletTransformSpect import shearletTransformSpect
5 | from ._inverseShearletTransformSpect import inverseShearletTransformSpect
6 |
7 | from numpy.testing import Tester
8 | test = Tester().test
9 | bench = Tester().bench
10 |
11 | from ._version import get_versions
12 | __version__ = get_versions()['version']
13 | del get_versions
14 |
--------------------------------------------------------------------------------
/FFST/_fft.py:
--------------------------------------------------------------------------------
1 | """
2 | Try getting FFTs from pyFFTW, mklfft, numpy in that order of preference.
3 | """
4 |
5 | from __future__ import division, absolute_import, print_function
6 |
7 | import numpy as np
8 | import warnings
9 |
10 |
11 | try:
12 | import multiprocessing
13 | import pyfftw
14 | from functools import partial
15 |
16 | has_pyfftw = True
17 |
18 | pyfftw_threads = multiprocessing.cpu_count()
19 | pyfftw_planner_effort = 'FFTW_MEASURE'
20 |
21 | fft2 = partial(pyfftw.interfaces.numpy_fft.fft2,
22 | planner_effort=pyfftw_planner_effort,
23 | threads=pyfftw_threads)
24 | ifft2 = partial(pyfftw.interfaces.numpy_fft.ifft2,
25 | planner_effort=pyfftw_planner_effort,
26 | threads=pyfftw_threads)
27 | fft = partial(pyfftw.interfaces.numpy_fft.fft,
28 | planner_effort=pyfftw_planner_effort,
29 | threads=pyfftw_threads)
30 | ifft = partial(pyfftw.interfaces.numpy_fft.ifft,
31 | planner_effort=pyfftw_planner_effort,
32 | threads=pyfftw_threads)
33 | fftn = partial(pyfftw.interfaces.numpy_fft.fftn,
34 | planner_effort=pyfftw_planner_effort,
35 | threads=pyfftw_threads)
36 | ifftn = partial(pyfftw.interfaces.numpy_fft.ifftn,
37 | planner_effort=pyfftw_planner_effort,
38 | threads=pyfftw_threads)
39 | fftshift = pyfftw.interfaces.numpy_fft.fftshift
40 | ifftshift = pyfftw.interfaces.numpy_fft.ifftshift
41 | fftfreq = pyfftw.interfaces.numpy_fft.fftfreq
42 |
43 | # Turn on the cache for optimum performance
44 | pyfftw.interfaces.cache.enable()
45 |
46 | # increase cache preservation time from default of 0.1 seconds
47 | pyfftw.interfaces.cache.set_keepalive_time(5)
48 |
49 | except ImportError as e:
50 | has_pyfftw = False
51 | try:
52 | warnings.warn("pyFFTW not found. will try to use mklfft instead.")
53 | import mklfft
54 | fft = mklfft.fftpack.fft
55 | ifft = mklfft.fftpack.ifft
56 | fft2 = mklfft.fftpack.fft2
57 | ifft2 = mklfft.fftpack.ifft2
58 | fftn = mklfft.fftpack.fftn
59 | ifftn = mklfft.fftpack.ifftn
60 | fftshift = np.fft.fftshift
61 | ifftshift = np.fft.ifftshift
62 | fftfreq = np.fft.fftfreq
63 | except ImportError as e:
64 | warnings.warn("neither pyFFTW or mklfft found. will use numpy.fft.")
65 | # Numpy's n-dimensional FFT routines may be using MKL, so prefered
66 | # over scipy
67 | fft = np.fft.fft
68 | ifft = np.fft.ifft
69 | fft2 = np.fft.fft2
70 | ifft2 = np.fft.ifft2
71 | fftn = np.fft.fftn
72 | ifftn = np.fft.ifftn
73 | fftshift = np.fft.fftshift
74 | ifftshift = np.fft.ifftshift
75 | fftfreq = np.fft.fftfreq
76 |
77 |
78 | __all__ = ['fft', 'fft2', 'fftn', 'fftshift', 'fftfreq',
79 | 'ifft', 'ifft2', 'ifftn', 'ifftshift',
80 | 'fftnc', 'ifftnc', 'has_pyfftw']
81 | if has_pyfftw:
82 | # the following functions are PyFFTW dependent
83 | __all__ += ['build_fftn', 'build_ifftn', 'pyfftw_threads']
84 |
85 |
86 | # centered versions of fftn for convenience
87 | def fftnc(a, s=None, axes=None, pre_shift_axes=None, post_shift_axes=None):
88 | y = ifftshift(a, axes=pre_shift_axes)
89 | y = fftn(y, s=s, axes=axes)
90 | return fftshift(y, axes=post_shift_axes)
91 |
92 |
93 | def ifftnc(a, s=None, axes=None, pre_shift_axes=None, post_shift_axes=None):
94 | y = ifftshift(a, axes=pre_shift_axes)
95 | y = ifftn(y, s=s, axes=axes)
96 | return fftshift(y, axes=post_shift_axes)
97 |
98 |
99 | if has_pyfftw:
100 | def build_fftn(a, fft_axes=None, threads=pyfftw_threads,
101 | overwrite_input=False, planner_effort=pyfftw_planner_effort,
102 | **kwargs):
103 | if not has_pyfftw:
104 | raise ValueError("pyfftw is required by plan_fftn")
105 |
106 | return pyfftw.builders.fftn(a,
107 | axes=fft_axes,
108 | threads=threads,
109 | overwrite_input=overwrite_input,
110 | planner_effort=planner_effort,
111 | **kwargs)
112 |
113 | def build_ifftn(a, fft_axes=None, threads=pyfftw_threads,
114 | overwrite_input=False,
115 | planner_effort=pyfftw_planner_effort, **kwargs):
116 | if not has_pyfftw:
117 | raise ValueError("pyfftw is required by plan_fftn")
118 |
119 | return pyfftw.builders.ifftn(a,
120 | axes=fft_axes,
121 | threads=threads,
122 | overwrite_input=overwrite_input,
123 | planner_effort=planner_effort,
124 | **kwargs)
125 |
--------------------------------------------------------------------------------
/FFST/_inverseShearletTransformSpect.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function, absolute_import
2 |
3 | import numpy as np
4 |
5 | from .meyerShearlet import (meyerShearletSpect, meyeraux)
6 | from ._scalesShearsAndSpectra import scalesShearsAndSpectra
7 | from ._fft import fftshift, ifftshift, fftn, ifftn
8 |
9 |
10 | def inverseShearletTransformSpect(ST, Psi=None, maxScale='max',
11 | shearletSpect=meyerShearletSpect,
12 | shearletArg=meyeraux):
13 | """Compute inverse shearlet transform.
14 |
15 | If the shearlet spectra, Psi, are not given they are computed using
16 | parameters guessed from the coefficients.
17 |
18 | Parameters
19 | ----------
20 | ST : array (3d)
21 | shearlet transform
22 | Psi : array (3d), optional
23 | 3d spectrum of shearlets
24 | maxScale : {'min', 'max'}
25 | maximal or minimal finest scale
26 | shearletSpect : {meyerShearletSpect, meyerSmoothShearletSpect}
27 | shearlet spectrum to use
28 | shearletArg : function
29 | auxiliarry function for the shearlet
30 |
31 | Returns
32 | -------
33 | A : array (2d)
34 | reconstructed image
35 |
36 | """
37 |
38 | if Psi is None:
39 | # numOfScales
40 | # possible: 1, 4, 8, 16, 32,
41 | # -> -1 for lowpass
42 | # -> divide by for (1, 2, 4, 8,
43 | # -> +1 results in a 2^# number -> log returns #
44 | numOfScales = int(np.log2((ST.shape[-1] - 1)/4 + 1))
45 |
46 | # realCoefficients
47 | realCoefficients = True
48 |
49 | # realReal
50 | realReal = True
51 |
52 | # compute spectra
53 | Psi = scalesShearsAndSpectra((ST.shape[0], ST.shape[1]),
54 | numOfScales=numOfScales,
55 | realCoefficients=realCoefficients,
56 | realReal=realReal,
57 | shearletSpect=meyerShearletSpect,
58 | shearletArg=meyeraux)
59 |
60 | # inverse shearlet transform
61 | if False:
62 | # INCORRECT TO HAVE FFTSHIFT SINCE Psi ISNT SHIFTED!
63 | A = fftshift(fftn(ST, axes=(0, 1)), axes=(0, 1)) * Psi
64 | A = A.sum(axis=-1)
65 | A = ifftn(ifftshift(A))
66 | else:
67 | A = fftn(ST, axes=(0, 1)) * Psi
68 | A = A.sum(axis=-1)
69 | A = ifftn(A)
70 |
71 | if np.isrealobj(ST):
72 | A = A.real
73 |
74 | return A
75 |
--------------------------------------------------------------------------------
/FFST/_scalesShearsAndSpectra.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function, absolute_import
2 |
3 | import numpy as np
4 | import warnings
5 | from .meyerShearlet import meyerShearletSpect, meyeraux
6 |
7 |
8 | def _defaultNumberOfScales(l):
9 | numOfScales = int(np.floor(0.5 * np.log2(np.max(l))))
10 | if numOfScales < 1:
11 | raise ValueError('image too small!')
12 | return numOfScales
13 |
14 |
15 | def scalesShearsAndSpectra(shape, numOfScales=None,
16 | realCoefficients=True, maxScale='max',
17 | shearletSpect=meyerShearletSpect,
18 | shearletArg=meyeraux, realReal=True,
19 | fftshift_spectra=True):
20 | """ Compute the shearlet spectra of a given shape and number of scales.
21 |
22 | The number of scales and a boolean indicating real or complex shearlets
23 | are optional parameters.
24 |
25 | Parameters
26 | ----------
27 | shape : array-like
28 | dimensions of the image
29 | numOfScales : int
30 | number of scales
31 | realCoefficients : bool
32 | Controls whether real or complex shearlets are generated.
33 | shearletSpect : string or handle
34 | shearlet spectrum
35 | shearletArg : ???
36 | further parameters for shearlet
37 | realReal : bool
38 | guarantee truly real shearlets
39 | maxScale : {'max', 'min'}, optional
40 | maximal or minimal finest scale
41 |
42 | Returns
43 | -------
44 | Psi : ndarray
45 | Shearlets in the Fourier domain.
46 | """
47 | if len(shape) != 2:
48 | raise ValueError("2D image dimensions required")
49 |
50 | if numOfScales is None:
51 | numOfScales = _defaultNumberOfScales(shape)
52 |
53 | # rectangular images
54 | if shape[1] != shape[0]:
55 | rectangular = True
56 | else:
57 | rectangular = False
58 |
59 | # for better symmetry each dimensions of the array should be odd
60 | shape = np.asarray(shape)
61 | shape_orig = shape.copy()
62 | shapem = np.mod(shape, 2) == 0 # True for even sized axes
63 | both_even = np.all(np.equal(shapem, False))
64 | both_odd = np.all(np.equal(shapem, True))
65 | shape[shapem] += 1
66 |
67 | if not realCoefficients:
68 | warnings.warn("Complex shearlet case may be buggy. Doesn't "
69 | "currently give perfect reconstruction.")
70 |
71 | if not (both_even or both_odd):
72 | # for some reason reconstruction is not exact in this case, so don't
73 | # allow it for now.
74 | raise ValueError("Mixture of odd and even axis sizes is currently "
75 | "unsupported.")
76 |
77 | # create meshgrid
78 | # largest value where psi_1 is equal to 1
79 | maxScale = maxScale.lower()
80 | if maxScale == 'max':
81 | X = 2**(2 * (numOfScales - 1) + 1)
82 | elif maxScale == 'min':
83 | X = 2**(2 * (numOfScales - 1))
84 | else:
85 | raise ValueError('Wrong option for maxScale, must be "max" or "min"')
86 |
87 | xi_x_init = np.linspace(0, X, (shape[1] + 1) // 2)
88 | xi_x_init = np.concatenate((-xi_x_init[-1:0:-1], xi_x_init), axis=0)
89 | if rectangular:
90 | xi_y_init = np.linspace(0, X, (shape[0] + 1) // 2)
91 | xi_y_init = np.concatenate((-xi_y_init[-1:0:-1], xi_y_init), axis=0)
92 | else:
93 | xi_y_init = xi_x_init
94 |
95 | # create grid, from left to right, bottom to top
96 | [xi_x, xi_y] = np.meshgrid(xi_x_init, xi_y_init[::-1], indexing='xy')
97 |
98 | # cones
99 | C_hor = np.abs(xi_x) >= np.abs(xi_y) # with diag
100 | C_ver = np.abs(xi_x) < np.abs(xi_y)
101 |
102 | # number of shears: |-2^j,...,0,...,2^j| = 2 * 2^j + 1
103 | # now: inner shears for both cones:
104 | # |-(2^j-1),...,0,...,2^j-1|
105 | # = 2 * (2^j - 1) + 1
106 | # = 2^(j+1) - 2 + 1 = 2^(j+1) - 1
107 | # outer scales: 2 ("one" for each cone)
108 | # shears for each scale: hor: 2^(j+1) - 1, ver: 2^(j+1) - 1, diag: 2
109 | # -> hor + ver + diag = 2*(2^(j+1) - 1) +2 = 2^(j + 2)
110 | # + 1 for low-pass
111 | shearsPerScale = 2**(np.arange(numOfScales) + 2)
112 | numOfAllShears = 1 + shearsPerScale.sum()
113 |
114 | # init
115 | Psi = np.zeros(tuple(shape) + (numOfAllShears, ))
116 | # frequency domain:
117 | # k 2^j 0 -2^j
118 | #
119 | # 4 3 2 -2^j
120 | # \ | /
121 | # (5)- x -1 0
122 | # / | \
123 | # 2^j
124 | #
125 | # [0:-1:-2^j][-2^j:1:2^j][2^j:-1:1] (not 0)
126 | # hor ver hor
127 | #
128 | # start with shear -2^j (insert in index 2^j+1 (with transposed
129 | # added)) then continue with increasing scale. Save to index 2^j+1 +- k,
130 | # if + k save transposed. If shear 0 is reached save -k starting from
131 | # the end (thus modulo). For + k just continue.
132 | #
133 | # then in time domain:
134 | #
135 | # 2 1 8
136 | # \ | /
137 | # 3- x -7
138 | # / | \
139 | # 4 5 6
140 | #
141 |
142 | # lowpass
143 | Psi[:, :, 0] = shearletSpect(xi_x, xi_y, np.NaN, np.NaN, realCoefficients,
144 | shearletArg, scaling_only=True)
145 |
146 | # loop for each scale
147 | for j in range(numOfScales):
148 | # starting index
149 | idx = 2**j
150 | start_index = 1 + shearsPerScale[:j].sum()
151 | shift = 1
152 | for k in range(-2**j, 2**j + 1):
153 | # shearlet spectrum
154 | P_hor = shearletSpect(xi_x, xi_y, 2**(-2 * j), k * 2**(-j),
155 | realCoefficients, shearletArg)
156 | if rectangular:
157 | P_ver = shearletSpect(xi_y, xi_x, 2**(-2 * j), k * 2**(-j),
158 | realCoefficients, shearletArg)
159 | else:
160 | # the matrix is supposed to be mirrored at the counter
161 | # diagonal
162 | # P_ver = fliplr(flipud(P_hor'))
163 | P_ver = np.rot90(P_hor, 2).T # TODO: np.conj here too?
164 | if not realCoefficients:
165 | # workaround to cover left-upper part
166 | P_ver = np.rot90(P_ver, 2)
167 |
168 | if k == -2**j:
169 | Psi[:, :, start_index + idx] = P_hor * C_hor + P_ver * C_ver
170 | elif k == 2**j:
171 | Psi_idx = start_index + idx + shift
172 | Psi[:, :, Psi_idx] = P_hor * C_hor + P_ver * C_ver
173 | else:
174 | new_pos = np.mod(idx + 1 - shift, shearsPerScale[j]) - 1
175 | if(new_pos == -1):
176 | new_pos = shearsPerScale[j] - 1
177 | Psi[:, :, start_index + new_pos] = P_hor
178 | Psi[:, :, start_index + idx + shift] = P_ver
179 |
180 | # update shift
181 | shift += 1
182 |
183 | # generate output with size shape_orig
184 | Psi = Psi[:shape_orig[0], :shape_orig[1], :]
185 |
186 | # modify spectra at finest scales to obtain really real shearlets
187 | # the modification has only to be done for dimensions with even length
188 | if realCoefficients and realReal and (shapem[0] or shapem[1]):
189 | idx_finest_scale = (1 + np.sum(shearsPerScale[:-1]))
190 | scale_idx = idx_finest_scale + np.concatenate(
191 | (np.arange(1, (idx_finest_scale + 1) / 2 + 1),
192 | np.arange((idx_finest_scale + 1) / 2 + 2, shearsPerScale[-1])),
193 | axis=0)
194 | scale_idx = scale_idx.astype(np.int)
195 | if shapem[0]: # even number of rows -> modify first row:
196 | idx = slice(1, shape_orig[1])
197 | Psi[0, idx, scale_idx] = 1 / np.sqrt(2) * (
198 | Psi[0, idx, scale_idx] +
199 | Psi[0, shape_orig[1] - 1:0:-1, scale_idx])
200 | if shapem[1]: # even number of columns -> modify first column:
201 | idx = slice(1, shape_orig[0])
202 | Psi[idx, 0, scale_idx] = 1 / np.sqrt(2) * (
203 | Psi[idx, 0, scale_idx] +
204 | Psi[shape_orig[0] - 1:0:-1, 0, scale_idx])
205 |
206 | if fftshift_spectra:
207 | # Note: changed to ifftshift so roundtrip tests pass for odd sized
208 | # arrays
209 | Psi = np.fft.ifftshift(Psi, axes=(0, 1))
210 | return Psi
211 |
--------------------------------------------------------------------------------
/FFST/_shearletTransformSpect.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function, absolute_import
2 |
3 | import numpy as np
4 |
5 | from .meyerShearlet import (meyerShearletSpect, meyeraux)
6 |
7 | from ._scalesShearsAndSpectra import scalesShearsAndSpectra
8 | from ._fft import fftshift, ifftshift, fftn, ifftn
9 |
10 |
11 | def shearletTransformSpect(A, Psi=None, numOfScales=None,
12 | realCoefficients=True, maxScale='max',
13 | shearletSpect=meyerShearletSpect,
14 | shearletArg=meyeraux, realReal=True):
15 | """Compute the forward shearlet transform.
16 |
17 | If the shearlet spectra, Psi, are not given they are computed using
18 | parameters guessed from the coefficients.
19 |
20 | Parameters
21 | ----------
22 | A : array
23 | image to transform (2d)
24 | Psi : array (3d), optional
25 | spectrum of shearlets (3d)
26 | realCoefficients : bool, optional
27 | force real-valued coefficients
28 | maxScale : {'min', 'max'}
29 | maximal or minimal finest scale
30 | shearletSpect : {meyerShearletSpect, meyerSmoothShearletSpect}
31 | shearlet spectrum to use
32 | shearletArg : function
33 | auxiliarry function for the shearlet
34 | realReal : bool, optional
35 | return coefficients with real dtype (truncate minimal imaginary
36 | component).
37 |
38 | Returns
39 | -------
40 | ST : array (2d)
41 | reconstructed image
42 | Psi : array (3d), optional
43 | spectrum of shearlets (3d)
44 |
45 | Notes
46 | -----
47 | example usage
48 |
49 | # compute shearlet transform of image A with default parameters
50 | ST, Psi = shearletTransformSpect(A)
51 |
52 | # compute shearlet transform of image A with precomputed shearlet spectrum
53 | ST, Psi = shearletTransformSpect(A, Psi)
54 |
55 | # compute sharlet transform of image A with specified number of scales
56 | ST, Psi = shearletTransformSpect(A, numOfScales=4)
57 |
58 | """
59 | # parse input
60 | A = np.asarray(A)
61 | if (A.ndim != 2) or np.any(np.asarray(A.shape) <= 1):
62 | raise ValueError("2D image required")
63 |
64 | # compute spectra
65 | if Psi is None:
66 | l = A.shape
67 | if numOfScales is None:
68 | numOfScales = int(np.floor(0.5 * np.log2(np.max(l))))
69 | if numOfScales < 1:
70 | raise ValueError('image to small!')
71 | Psi = scalesShearsAndSpectra(l, numOfScales=numOfScales,
72 | realCoefficients=realCoefficients,
73 | shearletSpect=meyerShearletSpect,
74 | shearletArg=meyeraux)
75 |
76 | # shearlet transform
77 | if False:
78 | # INCORRECT TO HAVE FFTSHIFT SINCE Psi ISNT SHIFTED!
79 | uST = Psi * fftshift(fftn(A))[..., np.newaxis]
80 | ST = ifftn(ifftshift(uST, axes=(0, 1)), axes=(0, 1))
81 | else:
82 | uST = Psi * fftn(A)[..., np.newaxis]
83 | ST = ifftn(uST, axes=(0, 1))
84 |
85 | # due to round-off errors the imaginary part is not zero but very small
86 | # -> neglect it
87 | if realCoefficients and realReal and np.isrealobj(A):
88 | ST = ST.real
89 |
90 | return (ST, Psi)
91 |
--------------------------------------------------------------------------------
/FFST/_version.py:
--------------------------------------------------------------------------------
1 |
2 | # This file helps to compute a version number in source trees obtained from
3 | # git-archive tarball (such as those provided by githubs download-from-tag
4 | # feature). Distribution tarballs (built by setup.py sdist) and build
5 | # directories (produced by setup.py build) will contain a much shorter file
6 | # that just contains the computed version number.
7 |
8 | # This file is released into the public domain. Generated by
9 | # versioneer-0.16 (https://github.com/warner/python-versioneer)
10 |
11 | """Git implementation of _version.py."""
12 |
13 | import errno
14 | import os
15 | import re
16 | import subprocess
17 | import sys
18 |
19 |
20 | def get_keywords():
21 | """Get the keywords needed to look up the version information."""
22 | # these strings will be replaced by git during git-archive.
23 | # setup.py/versioneer.py will grep for the variable names, so they must
24 | # each be defined on a line of their own. _version.py will just call
25 | # get_keywords().
26 | git_refnames = " (HEAD -> master)"
27 | git_full = "6cdb1396170409dde9f41cf8865e7a9d95113719"
28 | keywords = {"refnames": git_refnames, "full": git_full}
29 | return keywords
30 |
31 |
32 | class VersioneerConfig:
33 | """Container for Versioneer configuration parameters."""
34 |
35 |
36 | def get_config():
37 | """Create, populate and return the VersioneerConfig() object."""
38 | # these strings are filled in when 'setup.py versioneer' creates
39 | # _version.py
40 | cfg = VersioneerConfig()
41 | cfg.VCS = "git"
42 | cfg.style = "pep440"
43 | cfg.tag_prefix = ""
44 | cfg.parentdir_prefix = "FFST-"
45 | cfg.versionfile_source = "FFST/_version.py"
46 | cfg.verbose = False
47 | return cfg
48 |
49 |
50 | class NotThisMethod(Exception):
51 | """Exception raised if a method is not valid for the current scenario."""
52 |
53 |
54 | LONG_VERSION_PY = {}
55 | HANDLERS = {}
56 |
57 |
58 | def register_vcs_handler(vcs, method): # decorator
59 | """Decorator to mark a method as the handler for a particular VCS."""
60 | def decorate(f):
61 | """Store f in HANDLERS[vcs][method]."""
62 | if vcs not in HANDLERS:
63 | HANDLERS[vcs] = {}
64 | HANDLERS[vcs][method] = f
65 | return f
66 | return decorate
67 |
68 |
69 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
70 | """Call the given command(s)."""
71 | assert isinstance(commands, list)
72 | p = None
73 | for c in commands:
74 | try:
75 | dispcmd = str([c] + args)
76 | # remember shell=False, so use git.cmd on windows, not just git
77 | p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
78 | stderr=(subprocess.PIPE if hide_stderr
79 | else None))
80 | break
81 | except EnvironmentError:
82 | e = sys.exc_info()[1]
83 | if e.errno == errno.ENOENT:
84 | continue
85 | if verbose:
86 | print("unable to run %s" % dispcmd)
87 | print(e)
88 | return None
89 | else:
90 | if verbose:
91 | print("unable to find command, tried %s" % (commands,))
92 | return None
93 | stdout = p.communicate()[0].strip()
94 | if sys.version_info[0] >= 3:
95 | stdout = stdout.decode()
96 | if p.returncode != 0:
97 | if verbose:
98 | print("unable to run %s (error)" % dispcmd)
99 | return None
100 | return stdout
101 |
102 |
103 | def versions_from_parentdir(parentdir_prefix, root, verbose):
104 | """Try to determine the version from the parent directory name.
105 |
106 | Source tarballs conventionally unpack into a directory that includes
107 | both the project name and a version string.
108 | """
109 | dirname = os.path.basename(root)
110 | if not dirname.startswith(parentdir_prefix):
111 | if verbose:
112 | print("guessing rootdir is '%s', but '%s' doesn't start with "
113 | "prefix '%s'" % (root, dirname, parentdir_prefix))
114 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
115 | return {"version": dirname[len(parentdir_prefix):],
116 | "full-revisionid": None,
117 | "dirty": False, "error": None}
118 |
119 |
120 | @register_vcs_handler("git", "get_keywords")
121 | def git_get_keywords(versionfile_abs):
122 | """Extract version information from the given file."""
123 | # the code embedded in _version.py can just fetch the value of these
124 | # keywords. When used from setup.py, we don't want to import _version.py,
125 | # so we do it with a regexp instead. This function is not used from
126 | # _version.py.
127 | keywords = {}
128 | try:
129 | f = open(versionfile_abs, "r")
130 | for line in f.readlines():
131 | if line.strip().startswith("git_refnames ="):
132 | mo = re.search(r'=\s*"(.*)"', line)
133 | if mo:
134 | keywords["refnames"] = mo.group(1)
135 | if line.strip().startswith("git_full ="):
136 | mo = re.search(r'=\s*"(.*)"', line)
137 | if mo:
138 | keywords["full"] = mo.group(1)
139 | f.close()
140 | except EnvironmentError:
141 | pass
142 | return keywords
143 |
144 |
145 | @register_vcs_handler("git", "keywords")
146 | def git_versions_from_keywords(keywords, tag_prefix, verbose):
147 | """Get version information from git keywords."""
148 | if not keywords:
149 | raise NotThisMethod("no keywords at all, weird")
150 | refnames = keywords["refnames"].strip()
151 | if refnames.startswith("$Format"):
152 | if verbose:
153 | print("keywords are unexpanded, not using")
154 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
155 | refs = set([r.strip() for r in refnames.strip("()").split(",")])
156 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
157 | # just "foo-1.0". If we see a "tag: " prefix, prefer those.
158 | TAG = "tag: "
159 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
160 | if not tags:
161 | # Either we're using git < 1.8.3, or there really are no tags. We use
162 | # a heuristic: assume all version tags have a digit. The old git %d
163 | # expansion behaves like git log --decorate=short and strips out the
164 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish
165 | # between branches and tags. By ignoring refnames without digits, we
166 | # filter out many common branch names like "release" and
167 | # "stabilization", as well as "HEAD" and "master".
168 | tags = set([r for r in refs if re.search(r'\d', r)])
169 | if verbose:
170 | print("discarding '%s', no digits" % ",".join(refs-tags))
171 | if verbose:
172 | print("likely tags: %s" % ",".join(sorted(tags)))
173 | for ref in sorted(tags):
174 | # sorting will prefer e.g. "2.0" over "2.0rc1"
175 | if ref.startswith(tag_prefix):
176 | r = ref[len(tag_prefix):]
177 | if verbose:
178 | print("picking %s" % r)
179 | return {"version": r,
180 | "full-revisionid": keywords["full"].strip(),
181 | "dirty": False, "error": None
182 | }
183 | # no suitable tags, so version is "0+unknown", but full hex is still there
184 | if verbose:
185 | print("no suitable tags, using unknown + full revision id")
186 | return {"version": "0+unknown",
187 | "full-revisionid": keywords["full"].strip(),
188 | "dirty": False, "error": "no suitable tags"}
189 |
190 |
191 | @register_vcs_handler("git", "pieces_from_vcs")
192 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
193 | """Get version from 'git describe' in the root of the source tree.
194 |
195 | This only gets called if the git-archive 'subst' keywords were *not*
196 | expanded, and _version.py hasn't already been rewritten with a short
197 | version string, meaning we're inside a checked out source tree.
198 | """
199 | if not os.path.exists(os.path.join(root, ".git")):
200 | if verbose:
201 | print("no .git in %s" % root)
202 | raise NotThisMethod("no .git directory")
203 |
204 | GITS = ["git"]
205 | if sys.platform == "win32":
206 | GITS = ["git.cmd", "git.exe"]
207 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
208 | # if there isn't one, this yields HEX[-dirty] (no NUM)
209 | describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
210 | "--always", "--long",
211 | "--match", "%s*" % tag_prefix],
212 | cwd=root)
213 | # --long was added in git-1.5.5
214 | if describe_out is None:
215 | raise NotThisMethod("'git describe' failed")
216 | describe_out = describe_out.strip()
217 | full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
218 | if full_out is None:
219 | raise NotThisMethod("'git rev-parse' failed")
220 | full_out = full_out.strip()
221 |
222 | pieces = {}
223 | pieces["long"] = full_out
224 | pieces["short"] = full_out[:7] # maybe improved later
225 | pieces["error"] = None
226 |
227 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
228 | # TAG might have hyphens.
229 | git_describe = describe_out
230 |
231 | # look for -dirty suffix
232 | dirty = git_describe.endswith("-dirty")
233 | pieces["dirty"] = dirty
234 | if dirty:
235 | git_describe = git_describe[:git_describe.rindex("-dirty")]
236 |
237 | # now we have TAG-NUM-gHEX or HEX
238 |
239 | if "-" in git_describe:
240 | # TAG-NUM-gHEX
241 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
242 | if not mo:
243 | # unparseable. Maybe git-describe is misbehaving?
244 | pieces["error"] = ("unable to parse git-describe output: '%s'"
245 | % describe_out)
246 | return pieces
247 |
248 | # tag
249 | full_tag = mo.group(1)
250 | if not full_tag.startswith(tag_prefix):
251 | if verbose:
252 | fmt = "tag '%s' doesn't start with prefix '%s'"
253 | print(fmt % (full_tag, tag_prefix))
254 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
255 | % (full_tag, tag_prefix))
256 | return pieces
257 | pieces["closest-tag"] = full_tag[len(tag_prefix):]
258 |
259 | # distance: number of commits since tag
260 | pieces["distance"] = int(mo.group(2))
261 |
262 | # commit: short hex revision ID
263 | pieces["short"] = mo.group(3)
264 |
265 | else:
266 | # HEX: no tags
267 | pieces["closest-tag"] = None
268 | count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
269 | cwd=root)
270 | pieces["distance"] = int(count_out) # total number of commits
271 |
272 | return pieces
273 |
274 |
275 | def plus_or_dot(pieces):
276 | """Return a + if we don't already have one, else return a ."""
277 | if "+" in pieces.get("closest-tag", ""):
278 | return "."
279 | return "+"
280 |
281 |
282 | def render_pep440(pieces):
283 | """Build up version string, with post-release "local version identifier".
284 |
285 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
286 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
287 |
288 | Exceptions:
289 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
290 | """
291 | if pieces["closest-tag"]:
292 | rendered = pieces["closest-tag"]
293 | if pieces["distance"] or pieces["dirty"]:
294 | rendered += plus_or_dot(pieces)
295 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
296 | if pieces["dirty"]:
297 | rendered += ".dirty"
298 | else:
299 | # exception #1
300 | rendered = "0+untagged.%d.g%s" % (pieces["distance"],
301 | pieces["short"])
302 | if pieces["dirty"]:
303 | rendered += ".dirty"
304 | return rendered
305 |
306 |
307 | def render_pep440_pre(pieces):
308 | """TAG[.post.devDISTANCE] -- No -dirty.
309 |
310 | Exceptions:
311 | 1: no tags. 0.post.devDISTANCE
312 | """
313 | if pieces["closest-tag"]:
314 | rendered = pieces["closest-tag"]
315 | if pieces["distance"]:
316 | rendered += ".post.dev%d" % pieces["distance"]
317 | else:
318 | # exception #1
319 | rendered = "0.post.dev%d" % pieces["distance"]
320 | return rendered
321 |
322 |
323 | def render_pep440_post(pieces):
324 | """TAG[.postDISTANCE[.dev0]+gHEX] .
325 |
326 | The ".dev0" means dirty. Note that .dev0 sorts backwards
327 | (a dirty tree will appear "older" than the corresponding clean one),
328 | but you shouldn't be releasing software with -dirty anyways.
329 |
330 | Exceptions:
331 | 1: no tags. 0.postDISTANCE[.dev0]
332 | """
333 | if pieces["closest-tag"]:
334 | rendered = pieces["closest-tag"]
335 | if pieces["distance"] or pieces["dirty"]:
336 | rendered += ".post%d" % pieces["distance"]
337 | if pieces["dirty"]:
338 | rendered += ".dev0"
339 | rendered += plus_or_dot(pieces)
340 | rendered += "g%s" % pieces["short"]
341 | else:
342 | # exception #1
343 | rendered = "0.post%d" % pieces["distance"]
344 | if pieces["dirty"]:
345 | rendered += ".dev0"
346 | rendered += "+g%s" % pieces["short"]
347 | return rendered
348 |
349 |
350 | def render_pep440_old(pieces):
351 | """TAG[.postDISTANCE[.dev0]] .
352 |
353 | The ".dev0" means dirty.
354 |
355 | Eexceptions:
356 | 1: no tags. 0.postDISTANCE[.dev0]
357 | """
358 | if pieces["closest-tag"]:
359 | rendered = pieces["closest-tag"]
360 | if pieces["distance"] or pieces["dirty"]:
361 | rendered += ".post%d" % pieces["distance"]
362 | if pieces["dirty"]:
363 | rendered += ".dev0"
364 | else:
365 | # exception #1
366 | rendered = "0.post%d" % pieces["distance"]
367 | if pieces["dirty"]:
368 | rendered += ".dev0"
369 | return rendered
370 |
371 |
372 | def render_git_describe(pieces):
373 | """TAG[-DISTANCE-gHEX][-dirty].
374 |
375 | Like 'git describe --tags --dirty --always'.
376 |
377 | Exceptions:
378 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
379 | """
380 | if pieces["closest-tag"]:
381 | rendered = pieces["closest-tag"]
382 | if pieces["distance"]:
383 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
384 | else:
385 | # exception #1
386 | rendered = pieces["short"]
387 | if pieces["dirty"]:
388 | rendered += "-dirty"
389 | return rendered
390 |
391 |
392 | def render_git_describe_long(pieces):
393 | """TAG-DISTANCE-gHEX[-dirty].
394 |
395 | Like 'git describe --tags --dirty --always -long'.
396 | The distance/hash is unconditional.
397 |
398 | Exceptions:
399 | 1: no tags. HEX[-dirty] (note: no 'g' prefix)
400 | """
401 | if pieces["closest-tag"]:
402 | rendered = pieces["closest-tag"]
403 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
404 | else:
405 | # exception #1
406 | rendered = pieces["short"]
407 | if pieces["dirty"]:
408 | rendered += "-dirty"
409 | return rendered
410 |
411 |
412 | def render(pieces, style):
413 | """Render the given version pieces into the requested style."""
414 | if pieces["error"]:
415 | return {"version": "unknown",
416 | "full-revisionid": pieces.get("long"),
417 | "dirty": None,
418 | "error": pieces["error"]}
419 |
420 | if not style or style == "default":
421 | style = "pep440" # the default
422 |
423 | if style == "pep440":
424 | rendered = render_pep440(pieces)
425 | elif style == "pep440-pre":
426 | rendered = render_pep440_pre(pieces)
427 | elif style == "pep440-post":
428 | rendered = render_pep440_post(pieces)
429 | elif style == "pep440-old":
430 | rendered = render_pep440_old(pieces)
431 | elif style == "git-describe":
432 | rendered = render_git_describe(pieces)
433 | elif style == "git-describe-long":
434 | rendered = render_git_describe_long(pieces)
435 | else:
436 | raise ValueError("unknown style '%s'" % style)
437 |
438 | return {"version": rendered, "full-revisionid": pieces["long"],
439 | "dirty": pieces["dirty"], "error": None}
440 |
441 |
442 | def get_versions():
443 | """Get version information or return default if unable to do so."""
444 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
445 | # __file__, we can work backwards from there to the root. Some
446 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
447 | # case we can only use expanded keywords.
448 |
449 | cfg = get_config()
450 | verbose = cfg.verbose
451 |
452 | try:
453 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
454 | verbose)
455 | except NotThisMethod:
456 | pass
457 |
458 | try:
459 | root = os.path.realpath(__file__)
460 | # versionfile_source is the relative path from the top of the source
461 | # tree (where the .git directory might live) to this file. Invert
462 | # this to find the root from __file__.
463 | for i in cfg.versionfile_source.split('/'):
464 | root = os.path.dirname(root)
465 | except NameError:
466 | return {"version": "0+unknown", "full-revisionid": None,
467 | "dirty": None,
468 | "error": "unable to find root of source tree"}
469 |
470 | try:
471 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
472 | return render(pieces, cfg.style)
473 | except NotThisMethod:
474 | pass
475 |
476 | try:
477 | if cfg.parentdir_prefix:
478 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
479 | except NotThisMethod:
480 | pass
481 |
482 | return {"version": "0+unknown", "full-revisionid": None,
483 | "dirty": None,
484 | "error": "unable to compute version"}
485 |
--------------------------------------------------------------------------------
/FFST/meyerShearlet.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function, absolute_import
2 |
3 | import numpy as np
4 |
5 |
6 | def meyeraux(x):
7 | """meyer wavelet auxiliary function.
8 |
9 | v(x) = 35*x^4 - 84*x^5 + 70*x^6 - 20*x^7.
10 |
11 | Parameters
12 | ----------
13 | x : array
14 | grid points
15 |
16 | Returns
17 | -------
18 | y : array
19 | values at x
20 | """
21 | # Auxiliary def values.
22 | y = np.polyval([-20, 70, -84, 35, 0, 0, 0, 0], x) * (x >= 0) * (x <= 1)
23 | y += (x > 1)
24 | return y
25 |
26 |
27 | def meyerBump(x, meyeraux_func=meyeraux):
28 | int1 = meyeraux_func(x) * (x >= 0) * (x <= 1)
29 | y = int1 + (x > 1)
30 | return y
31 |
32 |
33 | def bump(x, meyeraux_func=meyeraux):
34 | """compute the def psi_2^ at given points x.
35 |
36 | Parameters
37 | ----------
38 | x : array
39 | grid points
40 | meyeraux_func : function
41 | auxiliary function
42 |
43 | Returns
44 | -------
45 | y : array
46 | values at given points x
47 | """
48 | y = meyerBump(1+x, meyeraux_func)*(x <= 0) + \
49 | meyerBump(1-x, meyeraux_func)*(x > 0)
50 | y = np.sqrt(y)
51 | return y
52 |
53 |
54 | def meyerScaling(x, meyeraux_func=meyeraux):
55 | """mother scaling def for meyer shearlet.
56 |
57 | Parameters
58 | ----------
59 | x : array
60 | grid points
61 | meyeraux_func : function
62 | auxiliary function
63 |
64 | Returns
65 | -------
66 | y : phihat
67 | values at given points x
68 |
69 | """
70 | xa = np.abs(x)
71 |
72 | # Compute support of Fourier transform of phi.
73 | int1 = ((xa < 1/2))
74 | int2 = ((xa >= 1/2) & (xa < 1))
75 |
76 | # Compute Fourier transform of phi.
77 | # phihat = int1 * np.ones_like(xa)
78 | # phihat = phihat + int2 * np.cos(np.pi/2*meyeraux_func(2*xa-1))
79 | phihat = int1 + int2 * np.cos(np.pi/2*meyeraux_func(2*xa-1))
80 |
81 | return phihat
82 |
83 |
84 | def _meyerHelper(x, realCoefficients=True, meyeraux_func=meyeraux):
85 | if realCoefficients:
86 | xa = np.abs(x)
87 | else:
88 | # consider left and upper part of the image due to first row and column
89 | xa = -x
90 |
91 | int1 = ((xa >= 1) & (xa < 2))
92 | int2 = ((xa >= 2) & (xa < 4))
93 |
94 | psihat = int1 * np.sin(np.pi/2*meyeraux_func(xa-1))
95 | psihat = psihat + int2 * np.cos(np.pi/2*meyeraux_func(1/2*xa-1))
96 |
97 | y = psihat
98 | return y
99 |
100 |
101 | def meyerWavelet(x, realCoefficients=True, meyeraux_func=meyeraux):
102 | """ compute Meyer Wavelet.
103 |
104 | Parameters
105 | ----------
106 | x : array
107 | grid points
108 |
109 | Returns
110 | -------
111 | y : phihat
112 | values at given points x
113 |
114 | """
115 | y = np.sqrt(np.abs(_meyerHelper(x, realCoefficients, meyeraux_func))**2 +
116 | np.abs(_meyerHelper(2*x, realCoefficients, meyeraux_func))**2)
117 | return y
118 |
119 |
120 | def meyerShearletSpect(x, y, a, s, realCoefficients=True,
121 | meyeraux_func=meyeraux, scaling_only=False):
122 | """Returns the spectrum of the shearlet "meyerShearlet".
123 |
124 | Parameters
125 | ----------
126 | x : array
127 | meshgrid for the x-axis
128 | y : array
129 | meshgrid for the y-axis
130 | a : float
131 | scale
132 | s : float
133 | shear
134 | realCoefficients : bool, optional
135 | enforce real-valued coefficients
136 | meyeraux_func : function
137 | auxiliary function
138 | scaling_only : bool, optional
139 | return the scalings instead of computing the spectrum
140 |
141 | Returns
142 | -------
143 | y : Psi
144 | The shearlet spectrum
145 |
146 | """
147 | if scaling_only:
148 | # cones
149 | C_hor = np.abs(x) >= np.abs(y) # with diag
150 | C_ver = np.abs(x) < np.abs(y)
151 | Psi = (meyerScaling(x, meyeraux_func) * C_hor +
152 | meyerScaling(y, meyeraux_func) * C_ver)
153 | return Psi
154 |
155 | # compute scaling and shearing
156 | y = s * np.sqrt(a) * x + np.sqrt(a) * y
157 | x = a * x
158 |
159 | # set values with x=0 to 1 (for division)
160 | xx = (np.abs(x) == 0) + (np.abs(x) > 0)*x
161 |
162 | # compute spectrum
163 | Psi = meyerWavelet(x, realCoefficients, meyeraux_func) * \
164 | bump(y/xx, meyeraux_func)
165 | return Psi
166 |
167 |
168 | def meyerSmoothShearletSpect(x, y, a, s, realCoefficients=True,
169 | meyeraux_func=meyeraux, scaling_only=False):
170 | """Returns the spectrum of the smooth variant of the Meyer shearlet
171 |
172 | Parameters
173 | ----------
174 | x : array
175 | meshgrid for the x-axis
176 | y : array
177 | meshgrid for the y-axis
178 | a : float
179 | scale
180 | s : float
181 | shear
182 | realCoefficients : bool, optional
183 | enforce real-valued coefficients
184 | meyeraux_func : function
185 | auxiliary function
186 | scaling_only : bool, optional
187 | return the scalings instead of computing the spectrum
188 |
189 | Returns
190 | -------
191 | y : Psi
192 | The shearlet spectrum
193 |
194 | Notes
195 | -----
196 | construction based on ideas from [1]_.
197 |
198 | References
199 | ----------
200 | .. [1] Kanghui Guo, and Demetrio Labate.
201 | The construction of smooth Parseval frames of shearlets.
202 | Mathematical Modelling of Natural Phenomena 8.01 (2013): 82-105.
203 |
204 | """
205 | if scaling_only:
206 | Psi = meyerScaling(x, meyeraux_func) * meyerScaling(y, meyeraux_func)
207 | return Psi
208 |
209 | if not realCoefficients:
210 | raise ValueError('Complex shearlets not supported for smooth Meyer '
211 | 'shearlets!')
212 |
213 | # compute scaling and shearing
214 | asy = s * np.sqrt(a) * x + np.sqrt(a) * y
215 | y = a * y
216 | x = a * x
217 |
218 | # set values with x=0 to 1 (for division)
219 | # xx = (np.abs(x)==0) + (np.abs(x)>0)*x
220 |
221 | # compute spectrum
222 | W = np.sqrt((meyerScaling(2**(-2)*x, meyeraux_func) *
223 | meyerScaling(2**(-2)*y, meyeraux_func))**2 -
224 | (meyerScaling(x, meyeraux_func) *
225 | meyerScaling(y, meyeraux_func))**2)
226 | Psi = W * bump(asy/x, meyeraux_func)
227 |
228 | # reset NaN to 0
229 | Psi[np.isnan(Psi)] = 0
230 | return Psi
231 |
--------------------------------------------------------------------------------
/FFST/shearletScaleShear.py:
--------------------------------------------------------------------------------
1 | from __future__ import division, print_function, absolute_import
2 |
3 | import numpy as np
4 |
5 |
6 | def _jk2index(j, k, cone):
7 | """helper function, compute index from j, k and cone. """
8 | # lowpass
9 | index = 1
10 | if np.isnan(j) and np.isnan(k) and cone == '0':
11 | return None
12 | else:
13 | # sum of lower scales
14 | index = index + np.sum(2**(2+np.arange(j)))
15 |
16 | # get detail index from shear (and cone!)
17 | if cone == 'h':
18 | if k <= 0:
19 | index = index - k
20 | else:
21 | index = index + 4*2**j - k
22 | elif cone == 'v':
23 | index = index + 2**j + (k + 2**j)
24 | elif cone == 'x':
25 | # TODO: if k can be complex, will need to fix this
26 | index = index + (2 + np.sign(k)) * 2**j
27 |
28 | # sligth adjustment ( k=0 <=> index = 1)
29 | index += 1
30 |
31 | return index
32 |
33 |
34 | def _index2jk(index):
35 | """helper function: compute j, k and cone from index. """
36 | if index <= 1: # lowpass, j and k not needed:
37 | j = np.NaN
38 | k = np.NaN
39 | cone = '0'
40 | else:
41 | # substract 1 for the lowpass
42 | index = index - 1
43 |
44 | # determine scale j
45 | # substract number of shears in each scale:
46 | # 2**(j+0), 2**(j+1), 2**(j+2),
47 | j = 0
48 | while index > 2**(2 + j):
49 | index = index - 2**(j+2)
50 | j = j + 1
51 |
52 | # shift to zero (first index <=> k=0)
53 | index = index - 1
54 |
55 | # determine cone
56 | # index | 0 1 ... 2**j ... 2*2**j ... 3*2**j ... 4*2**j -1
57 | # k | 0 -1 -2**j 0 2**j 1
58 | # cone | h ... h x v ... v ... v x h ... h
59 | index2 = index / 2**j
60 |
61 | if index2 < 1:
62 | k = -index
63 | cone = 'h'
64 | elif index2 == 1:
65 | k = -2**j
66 | cone = 'x'
67 | elif index2 < 3:
68 | k = index - 2*2**j
69 | cone = 'v'
70 | elif index2 == 3:
71 | k = 2**j
72 | cone = 'x'
73 | else:
74 | k = -(index - 4*2**j)
75 | cone = 'h'
76 |
77 | return (j, k, cone)
78 |
79 |
80 | def shearletScaleShear(a, b=None, c=None, d=None):
81 | """ compute index from scale j, shear k and cone and vice versa.
82 |
83 | Optionally return values of shearlets (or coefficients) for given index or
84 | given scale j, shear k and cone.
85 |
86 | # TODO: convert the Matlab-formated docstring below
87 |
88 | #
89 | # OPTIONS
90 | ## scale, shear and cone from index
91 | # [j,k,cone] = shearletScaleShear(a)
92 | # INPUT:
93 | # a (int) index
94 | #
95 | # OUTPUT:
96 | # j (int) scale j (>= 0)
97 | # k (int) shear k, -2**j <= k <= 2**j
98 | # cone (char) cone [h,v,x,0]
99 | ## return data for index
100 | # ST = shearletScaleShear(a,b)
101 | # INPUT:
102 | # a (3-d-matrix) shearlets or shearlet coefficients
103 | # b (int) index
104 | #
105 | # OUTPUT:
106 | # ST (matrix) layer of ST for index [ST[:, :,index)]
107 | ## index from scale, shear and cone
108 | # index = shearletScaleShear(a,b,c)
109 | # INPUT:
110 | # a (int) scale j (>= 0)
111 | # b (int) shear k, -2**j <= k <= 2**j
112 | # c (char) cone [h,v,x,0]
113 | #
114 | # OUTPUT:
115 | # index (int) respective index
116 | ## return data for j,k and cone
117 | # ST = shearletScaleShear(a,b,c,d)
118 | # INPUT:
119 | # a (3-d-matrix) shearlets or shearlet coefficients
120 | # b (int) scale j (>= 0)
121 | # c (int) shear k, -2**j <= k <= 2**j
122 | # d (char) cone [h,v,x,0]
123 | #
124 | # OUTPUT:
125 | # index (int) respective index
126 | #
127 |
128 | """
129 | # display informations
130 | disp = False
131 |
132 | # different cases
133 | if b is None:
134 | # compute j and k from index
135 | index = a
136 | (j, k, cone) = _index2jk(index)
137 | varargout = (j, k, cone)
138 | if disp:
139 | print('index %d represents:\n' % index)
140 | print('scale j: %d (a = %.4f)\n' % (j, 4**(-j)))
141 | print('shear k: %d (s = %.4f)\n' % (k, 2**(-j)*k))
142 | print('cone : %s\n', cone)
143 | elif c is None:
144 | # return data for index
145 | ST = a
146 | index = b
147 | varargout = ST[:, :, index]
148 | (j, k, cone) = _index2jk(index)
149 |
150 | if disp:
151 | print('index %d represents:\n' % index)
152 | print('scale j: %d (a = %.4f)\n' % (j, 4**(-j)))
153 | print('shear k: %d (s = %.4f)\n' % (k, 2**(-j)*k))
154 | print('cone : %s\n', cone)
155 | elif d is None:
156 | # compute index from j and k and cone
157 | j = a
158 | k = b
159 | cone = c
160 | index = _jk2index(j, k, cone)
161 | varargout = [index]
162 |
163 | if disp:
164 | print('index %d represents:\n' % index)
165 | print('scale j: %d (a = %.4f)\n' % (j, 4**(-j)))
166 | print('shear k: %d (s = %.4f)\n' % (k, 2**(-j)*k))
167 | print('cone : %s\n', cone)
168 | else:
169 | # return data for j and k and cone
170 | ST = a
171 | j = b
172 | k = c
173 | cone = d
174 |
175 | index = _jk2index(j, k, cone)
176 | varargout = ST[:, :, index]
177 |
178 | if disp:
179 | print('index %d represents:\n' % index)
180 | print('scale j: %d (a = %.4f)\n' % (j, 4**(-j)))
181 | print('shear k: %d (s = %.4f)\n' % (k, 2**(-j)*k))
182 | print('cone : %s\n', cone)
183 |
184 | return varargout
185 |
--------------------------------------------------------------------------------
/FFST/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/grlee77/PyShearlets/6cdb1396170409dde9f41cf8865e7a9d95113719/FFST/tests/__init__.py
--------------------------------------------------------------------------------
/FFST/tests/test_ffst.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from numpy.testing import (assert_,
4 | assert_raises,
5 | assert_equal,
6 | run_module_suite)
7 |
8 | from FFST import (scalesShearsAndSpectra,
9 | inverseShearletTransformSpect,
10 | shearletTransformSpect)
11 |
12 |
13 | def test_tight_frame():
14 | for shape in [(32, 32), (64, 64), (128, 128), (256, 256)]:
15 | Psi = scalesShearsAndSpectra(shape)
16 | assert_equal(Psi[0, 0, 0], 1) # If FFTshifted will be 1 at corner
17 | # sum along last axis = 1 everywhere if it is a tight frame
18 | assert_(np.max(1 - np.sum(Psi**2, -1)) < 1e-14)
19 |
20 | # test odd shape
21 | for shape in [(65, 65), (125, 125)]:
22 | Psi = scalesShearsAndSpectra(shape)
23 | assert_equal(Psi[0, 0, 0], 1)
24 | assert_(np.max(1 - np.sum(Psi**2, -1)) < 1e-14)
25 |
26 | # test mixture of odd and even
27 | for shape in [(64, 65), (65, 64)]:
28 | # Psi = scalesShearsAndSpectra(shape, realReal=False)
29 | # assert_(np.max(1 - np.sum(Psi**2, -1)) < 1e-14)
30 | assert_raises(ValueError, scalesShearsAndSpectra, shape)
31 |
32 | for shape in [(32, 32), (33, 33)]:
33 | for maxScale in ['max', 'min']:
34 | for realReal in [True, False]:
35 | Psi = scalesShearsAndSpectra(shape, maxScale=maxScale,
36 | realReal=realReal)
37 | # sum along last axis = 1 everywhere if it is a tight frame
38 | assert_(np.max(1 - np.sum(Psi**2, -1)) < 1e-14)
39 |
40 |
41 | def test_perfect_recon():
42 | rstate = np.random.RandomState(1234)
43 | for shape in [(32, 32), (64, 64), (128, 128)]:
44 | X = rstate.standard_normal(shape)
45 | ST, Psi = shearletTransformSpect(X, realCoefficients=True)
46 | XX = inverseShearletTransformSpect(ST, Psi)
47 | # sum along last axis = 1 everywhere if it is a tight frame
48 | assert_(np.max(X - XX) < 1e-13)
49 |
50 | # test odd shape
51 | for shape in [(65, 65), (125, 125)]:
52 | X = rstate.standard_normal(shape)
53 | ST, Psi = shearletTransformSpect(X, realCoefficients=True)
54 | XX = inverseShearletTransformSpect(ST, Psi)
55 | # sum along last axis = 1 everywhere if it is a tight frame
56 | assert_(np.max(X - XX) < 1e-13)
57 |
58 | # check some other non-default settings
59 | for shape in [(32, 32), (33, 33)]:
60 | for maxScale in ['max', 'min']:
61 | for realReal in [True, False]:
62 | X = rstate.standard_normal(shape)
63 | ST, Psi = shearletTransformSpect(X,
64 | realCoefficients=True,
65 | maxScale=maxScale,
66 | realReal=realReal)
67 | XX = inverseShearletTransformSpect(ST, Psi)
68 | # sum along last axis = 1 everywhere if it is a tight frame
69 | assert_(np.max(X - XX) < 1e-13)
70 |
71 | # # test mixture of odd and even
72 | # for shape in [(64, 65), (65, 64)]:
73 | # X = rstate.standard_normal(shape)
74 | # ST, Psi = shearletTransformSpect(X, realCoefficients=True,
75 | # realReal=False)
76 | # XX = inverseShearletTransformSpect(ST, Psi)
77 | # # sum along last axis = 1 everywhere if it is a tight frame
78 | # assert_(np.max(X - XX) < 1e-2) # TODO: get close if realReal=False, but must be a small bug somewhere in mixed odd/even case
79 |
80 |
81 | if __name__ == "__main__":
82 | run_module_suite()
83 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include FFST/_version.py
3 |
--------------------------------------------------------------------------------
/README.orig:
--------------------------------------------------------------------------------
1 | FFST - Fast Finite Shearlet Transform
2 |
3 | Overview
4 | --------
5 | The FFST package provides a fast implementation of the Finite Shearlet Transform.
6 | Following the the path via the continuous shearlet transform,
7 | its counterpart on cones and finally its discretization on the full grid we obtain the
8 | translation invariant discrete shearlet transform. Our discrete shearlet transform
9 | can be efficiently computed by the fast Fourier transform (FFT).
10 | The discrete shearlets constitute a Parseval frame of the finite Euclidean space such that
11 |
12 | More information can be found in the following Papers
13 | - S. Häuser and G. Steidl, "Convex Multilabel Segmentation with Shearlet Regularization",
14 | International Journal of Computer Mathematics. 90, (1), 62-81, 2013
15 | - S. Häuser and G. Steidl, "FFST: a tutorial", arXiv Preprint 1202.1773, 2014
16 |
17 | The Matlab-Version of the toolbox is available for free download at
18 |
19 | http://www.mathematik.uni-kl.de/imagepro/software/
20 |
21 | The zip-file contains all relevant files and folders. Simply unzip the archive
22 | and add the folder (with subfolders!) to your Matlab-path or run installFFST.m.
23 |
24 | The folder FFST contains the main files for the both directions of the transform.
25 | The included shearlets are stored in the folder shearlets. The folder helper
26 | contains some helper functions. To create simple geometric structures some
27 | functions are provided in create. See contents.m and the comments in each
28 | file for more information.
29 |
30 | Installation
31 | ------------
32 | Run installFFST.m to add all folders to your MATLAB path.
33 | To check if everything is installed correctly run simple_example.m for testing.
34 |
35 | Feedback
36 | --------
37 | If you have any questions, comments or suggestions feel free to contact
38 |
39 | Sören Häuser
40 | TU Kaiserslautern, Dept. of Mathematics
41 | Paul-Ehrlich-Str. 31
42 | 67663 Kaiserslautern, GERMANY
43 |
44 | Please keep track of any bugs you might encounter and send them to the author.
45 |
46 | If you find FFST useful, we would be delighted to hear about what application
47 | you are using FFST for!
48 |
49 | Legal Information & Credits
50 | ---------------------------
51 | Copyright (c) 2014 Sören Häuser
52 |
53 | This software was written by Sören Häuser.
54 | It was developed at the Department of Mathematics, TU Kaiserslautern
55 |
56 | FFST is free software. You can redistribute it and/or modify it under the
57 | terms of the GNU General Public License as published by the Free Software
58 | Foundation; either version 3 of the License, or (at your option) any later
59 | version. If not stated otherwise, this applies to all files contained in this
60 | package and its sub-directories.
61 |
62 | This program is distributed in the hope that it will be useful,
63 | but WITHOUT ANY WARRANTY; without even the implied warranty of
64 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
65 | GNU General Public License for more details.
66 |
67 | You should have received a copy of the GNU General Public License
68 | along with this program; if not, write to the Free Software
69 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
70 |
71 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | Fast Finite Shearlet Transform
2 | ==============================
3 |
4 | Python port of FFST copyright 2015-2016, Gregory R. Lee.
5 |
6 | The original Matlab implementation is copyright 2014 Sören Häuser and is
7 | covered by a GPL v3 license (see README.orig and COPYING for the license
8 | terms).
9 |
10 | The original Matlab implementation (v0.2) can be obtained here:
11 | http://www.mathematik.uni-kl.de/imagepro/software/ffst/
12 |
13 | The implementation is described in the following publications.
14 |
15 | S. Häuser, G. Steidl (2014).
16 | Fast Finite Shearlet Transform: a tutorial.
17 | ArXiv. (1202.1773),
18 |
19 | S. Häuser and G. Steidl (2013).
20 | Convex Multiclass Segmentation with Shearlet Regularization.
21 | International Journal of Computer Mathematics. 90, (1), 62-81.
22 |
23 | Alternative implementations of shearlets for Matlab are available here:
24 | ShearLab: www.shearlab.org
25 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
21 |
22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
23 |
24 | help:
25 | @echo "Please use \`make ' where is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " dirhtml to make HTML files named index.html in directories"
28 | @echo " singlehtml to make a single large HTML file"
29 | @echo " pickle to make pickle files"
30 | @echo " json to make JSON files"
31 | @echo " htmlhelp to make HTML files and a HTML help project"
32 | @echo " qthelp to make HTML files and a qthelp project"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
38 | @echo " text to make text files"
39 | @echo " man to make manual pages"
40 | @echo " texinfo to make Texinfo files"
41 | @echo " info to make Texinfo files and run them through makeinfo"
42 | @echo " gettext to make PO message catalogs"
43 | @echo " changes to make an overview of all changed/added/deprecated items"
44 | @echo " xml to make Docutils-native XML files"
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
46 | @echo " linkcheck to check all external links for integrity"
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
48 |
49 |
50 | clean:
51 | rm -rf $(BUILDDIR)/*
52 | rm -rf reference/*
53 |
54 | html:
55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
56 | @echo
57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
58 |
59 | dirhtml:
60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
61 | @echo
62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
63 |
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | pickle:
70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
71 | @echo
72 | @echo "Build finished; now you can process the pickle files."
73 |
74 | json:
75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
76 | @echo
77 | @echo "Build finished; now you can process the JSON files."
78 |
79 | htmlhelp:
80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
81 | @echo
82 | @echo "Build finished; now you can run HTML Help Workshop with the" \
83 | ".hhp project file in $(BUILDDIR)/htmlhelp."
84 |
85 | qthelp:
86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
87 | @echo
88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PyShearlets.qhcp"
91 | @echo "To view the help file:"
92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyShearlets.qhc"
93 |
94 | devhelp:
95 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
96 | @echo
97 | @echo "Build finished."
98 | @echo "To view the help file:"
99 | @echo "# mkdir -p $$HOME/.local/share/devhelp/PyShearlets"
100 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PyShearlets"
101 | @echo "# devhelp"
102 |
103 | epub:
104 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
105 | @echo
106 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
107 |
108 | latex:
109 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
110 | @echo
111 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
112 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
113 | "(use \`make latexpdf' here to do that automatically)."
114 |
115 | latexpdf:
116 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
117 | @echo "Running LaTeX files through pdflatex..."
118 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
119 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
120 |
121 | latexpdfja:
122 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
123 | @echo "Running LaTeX files through platex and dvipdfmx..."
124 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
125 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
126 |
127 | text:
128 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
129 | @echo
130 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
131 |
132 | man:
133 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
134 | @echo
135 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
136 |
137 | texinfo:
138 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
139 | @echo
140 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
141 | @echo "Run \`make' in that directory to run these through makeinfo" \
142 | "(use \`make info' here to do that automatically)."
143 |
144 | info:
145 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
146 | @echo "Running Texinfo files through makeinfo..."
147 | make -C $(BUILDDIR)/texinfo info
148 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
149 |
150 | gettext:
151 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
152 | @echo
153 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
154 |
155 | changes:
156 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
157 | @echo
158 | @echo "The overview file is in $(BUILDDIR)/changes."
159 |
160 | linkcheck:
161 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
162 | @echo
163 | @echo "Link check complete; look for any errors in the above output " \
164 | "or in $(BUILDDIR)/linkcheck/output.txt."
165 |
166 | doctest:
167 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
168 | @echo "Testing of doctests in the sources finished, look at the " \
169 | "results in $(BUILDDIR)/doctest/output.txt."
170 |
171 | xml:
172 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
173 | @echo
174 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
175 |
176 | pseudoxml:
177 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
178 | @echo
179 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
180 |
--------------------------------------------------------------------------------
/doc/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | #
4 | # PyShearlets documentation build configuration file, created by
5 | # sphinx-quickstart on Tue Apr 14 10:29:06 2015.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | import sys
17 | import os
18 |
19 | # General information about the project.
20 | project = 'FFST'
21 | copyright = '2015-2016, Gregory R. Lee'
22 |
23 | currentdir = os.path.abspath(os.path.dirname(__file__))
24 | ver_file = os.path.join(currentdir, '..', project, '_version.py')
25 | with open(ver_file) as f:
26 | exec(f.read())
27 | source_version = get_versions()['version']
28 |
29 | currentdir = os.path.abspath(os.path.dirname(__file__))
30 | sys.path.append(os.path.join(currentdir, 'tools'))
31 | import buildmodref
32 |
33 | # autogenerate api documentation
34 | # (see https://github.com/rtfd/readthedocs.org/issues/1139)
35 | def generateapidoc(_):
36 | output_path = os.path.join(currentdir, 'reference')
37 | buildmodref.writeapi(project, output_path, source_version, True)
38 |
39 | def setup(app):
40 | app.connect('builder-inited', generateapidoc)
41 |
42 | # If extensions (or modules to document with autodoc) are in another directory,
43 | # add these directories to sys.path here. If the directory is relative to the
44 | # documentation root, use os.path.abspath to make it absolute, like shown here.
45 | sys.path.insert(0, os.path.abspath('../'))
46 |
47 | # -- General configuration ------------------------------------------------
48 |
49 | # If your documentation needs a minimal Sphinx version, state it here.
50 | needs_sphinx = '1.0' # numpydoc requires sphinc >= 1.0
51 |
52 | # Add any Sphinx extension module names here, as strings. They can be
53 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
54 | # ones.
55 | sys.path.append(os.path.abspath('sphinxext'))
56 |
57 | extensions = ['sphinx.ext.autodoc',
58 | 'sphinx.ext.doctest',
59 | 'sphinx.ext.intersphinx',
60 | 'sphinx.ext.todo',
61 | 'sphinx.ext.coverage',
62 | 'sphinx.ext.pngmath',
63 | 'sphinx.ext.ifconfig',
64 | 'sphinx.ext.autosummary',
65 | 'sphinx.ext.mathjax',
66 | 'math_dollar', # has to go before numpydoc
67 | 'numpydoc',
68 | 'github']
69 |
70 | # Add any paths that contain templates here, relative to this directory.
71 | templates_path = ['_templates']
72 |
73 | # The suffix of source filenames.
74 | source_suffix = '.rst'
75 |
76 | # The encoding of source files.
77 | #source_encoding = 'utf-8-sig'
78 |
79 | # The master toctree document.
80 | master_doc = 'index'
81 |
82 |
83 | # The version info for the project you're documenting, acts as replacement for
84 | # |version| and |release|, also used in various other places throughout the
85 | # built documents.
86 | #
87 | # The short X.Y version.
88 | version = '0.1'
89 | # The full version, including alpha/beta/rc tags.
90 | release = '0.1'
91 |
92 | # The language for content autogenerated by Sphinx. Refer to documentation
93 | # for a list of supported languages.
94 | #language = None
95 |
96 | # There are two options for replacing |today|: either, you set today to some
97 | # non-false value, then it is used:
98 | #today = ''
99 | # Else, today_fmt is used as the format for a strftime call.
100 | #today_fmt = '%B %d, %Y'
101 |
102 | # List of patterns, relative to source directory, that match files and
103 | # directories to ignore when looking for source files.
104 | exclude_patterns = ['_build', ]
105 |
106 | # The reST default role (used for this markup: `text`) to use for all
107 | # documents.
108 | #default_role = None
109 |
110 | # If true, '()' will be appended to :func: etc. cross-reference text.
111 | #add_function_parentheses = True
112 |
113 | # If true, the current module name will be prepended to all description
114 | # unit titles (such as .. function::).
115 | #add_module_names = True
116 |
117 | # If true, sectionauthor and moduleauthor directives will be shown in the
118 | # output. They are ignored by default.
119 | #show_authors = False
120 |
121 | # The name of the Pygments (syntax highlighting) style to use.
122 | pygments_style = 'sphinx'
123 |
124 | # A list of ignored prefixes for module index sorting.
125 | #modindex_common_prefix = []
126 |
127 | # If true, keep warnings as "system message" paragraphs in the built documents.
128 | #keep_warnings = False
129 |
130 |
131 | # -- Options for HTML output ----------------------------------------------
132 |
133 | # The theme to use for HTML and HTML Help pages. See the documentation for
134 | # a list of builtin themes.
135 | html_theme = 'sphinxdoc'
136 |
137 | # Theme options are theme-specific and customize the look and feel of a theme
138 | # further. For a list of options available for each theme, see the
139 | # documentation.
140 | #html_theme_options = {}
141 |
142 | # Add any paths that contain custom themes here, relative to this directory.
143 | #html_theme_path = []
144 |
145 | # The name for this set of Sphinx documents. If None, it defaults to
146 | # " v documentation".
147 | #html_title = None
148 |
149 | # A shorter title for the navigation bar. Default is the same as html_title.
150 | #html_short_title = None
151 |
152 | # The name of an image file (relative to this directory) to place at the top
153 | # of the sidebar.
154 | #html_logo = None
155 |
156 | # The name of an image file (within the static path) to use as favicon of the
157 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
158 | # pixels large.
159 | #html_favicon = None
160 |
161 | # Add any paths that contain custom static files (such as style sheets) here,
162 | # relative to this directory. They are copied after the builtin static files,
163 | # so a file named "default.css" will overwrite the builtin "default.css".
164 | html_static_path = ['_static']
165 |
166 | # Add any extra paths that contain custom files (such as robots.txt or
167 | # .htaccess) here, relative to this directory. These files are copied
168 | # directly to the root of the documentation.
169 | #html_extra_path = []
170 |
171 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
172 | # using the given strftime format.
173 | #html_last_updated_fmt = '%b %d, %Y'
174 |
175 | # If true, SmartyPants will be used to convert quotes and dashes to
176 | # typographically correct entities.
177 | #html_use_smartypants = True
178 |
179 | # Custom sidebar templates, maps document names to template names.
180 | #html_sidebars = {}
181 |
182 | # Additional templates that should be rendered to pages, maps page names to
183 | # template names.
184 | #html_additional_pages = {}
185 |
186 | # If false, no module index is generated.
187 | #html_domain_indices = True
188 | html_domain_indices = False
189 |
190 | # If false, no index is generated.
191 | #html_use_index = True
192 |
193 | # If true, the index is split into individual pages for each letter.
194 | #html_split_index = False
195 |
196 | # If true, links to the reST sources are added to the pages.
197 | #html_show_sourcelink = True
198 |
199 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
200 | #html_show_sphinx = True
201 |
202 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
203 | #html_show_copyright = True
204 |
205 | # If true, an OpenSearch description file will be output, and all pages will
206 | # contain a tag referring to it. The value of this option must be the
207 | # base URL from which the finished HTML is served.
208 | #html_use_opensearch = ''
209 |
210 | # This is the file name suffix for HTML files (e.g. ".xhtml").
211 | #html_file_suffix = None
212 |
213 | # Output file base name for HTML help builder.
214 | htmlhelp_basename = 'pyshearletsdoc'
215 |
216 |
217 | # -- Options for LaTeX output ---------------------------------------------
218 |
219 | latex_elements = {
220 | # The paper size ('letterpaper' or 'a4paper').
221 | #'papersize': 'letterpaper',
222 |
223 | # The font size ('10pt', '11pt' or '12pt').
224 | #'pointsize': '10pt',
225 |
226 | # Additional stuff for the LaTeX preamble.
227 | #'preamble': '',
228 | }
229 |
230 | # Grouping the document tree into LaTeX files. List of tuples
231 | # (source start file, target name, title,
232 | # author, documentclass [howto, manual, or own class]).
233 | latex_documents = [
234 | ('index', 'PyShearlets.tex', 'PyShearlets Documentation',
235 | 'Gregory R. Lee', 'manual'),
236 | ]
237 |
238 | # The name of an image file (relative to this directory) to place at the top of
239 | # the title page.
240 | #latex_logo = None
241 |
242 | # For "manual" documents, if this is true, then toplevel headings are parts,
243 | # not chapters.
244 | #latex_use_parts = False
245 |
246 | # If true, show page references after internal links.
247 | #latex_show_pagerefs = False
248 |
249 | # If true, show URL addresses after external links.
250 | #latex_show_urls = False
251 |
252 | # Documents to append as an appendix to all manuals.
253 | #latex_appendices = []
254 |
255 | # If false, no module index is generated.
256 | #latex_domain_indices = True
257 |
258 |
259 | # -- Options for manual page output ---------------------------------------
260 |
261 | # One entry per manual page. List of tuples
262 | # (source start file, name, description, authors, manual section).
263 | man_pages = [
264 | ('index', 'PyShearlets', 'PyShearlets Documentation',
265 | ['Gregory R. Lee'], 1)
266 | ]
267 |
268 | # If true, show URL addresses after external links.
269 | #man_show_urls = False
270 |
271 |
272 | # -- Options for Texinfo output -------------------------------------------
273 |
274 | # Grouping the document tree into Texinfo files. List of tuples
275 | # (source start file, target name, title, author,
276 | # dir menu entry, description, category)
277 | texinfo_documents = [
278 | ('index', 'PyShearlets', 'PyShearlets Documentation',
279 | 'Gregory R. Lee', 'PyShearlets', 'One line description of project.',
280 | 'Miscellaneous'),
281 | ]
282 |
283 | # Documents to append as an appendix to all manuals.
284 | #texinfo_appendices = []
285 |
286 | # If false, no module index is generated.
287 | texinfo_domain_indices = False
288 |
289 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
290 | #texinfo_show_urls = 'footnote'
291 |
292 | # If true, do not generate a @detailmenu in the "Top" node's menu.
293 | #texinfo_no_detailmenu = False
294 |
295 | # Example configuration for intersphinx: refer to the Python standard library.
296 | intersphinx_mapping = {'http://docs.python.org/': None}
297 |
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | .. PyShearlets documentation master file, created by sphinx-quickstart on Tue Apr 14 10:29:06 2015. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive.
2 |
3 | Welcome to PyShearlets's documentation!
4 | ====================================
5 |
6 | `PyShearlets` is a package for performing the fast finite shearlet transform
7 | (FFST).
8 |
9 | Both forward and inverse 2D transforms are implemented.
10 |
11 | This package is a port of the FFST Matlab toolbox. The original Matlab
12 | implementation is copyright 2014 Sören Häuser and is covered by a GPL v3
13 | license. See the `original readme `_ and COPYING_ for the license
14 | terms).
15 |
16 | The original Matlab implementation (v0.2) can be obtained `matlab toolbox `_
17 |
18 | The implementation is described in the following publications.
19 |
20 | S. Häuser, G. Steidl (2014). `Fast Finite Shearlet Transform: a tutorial. ArXiv. (1202.1773) `_,
21 |
22 | S. Häuser and G. Steidl (2013). Convex Multiclass Segmentation with Shearlet Regularization. International Journal of Computer Mathematics. 90, (1), 62-81.
23 |
24 | For more information, please refer to the readme_ in the Github repository.
25 |
26 | This documentation was generated using sphinx_.
27 |
28 | .. _sphinx: http://sphinx-doc.org/
29 | .. _matlab_toolbox: http://www.mathematik.uni-kl.de/imagepro/software/ffst
30 | .. _readme: https://github.com/grlee77/PyShearlets/blob/master/README.rst
31 | .. _COPYING: https://github.com/grlee77/PyShearlets/blob/master/COPYING
32 | .. _readme_orig: https://github.com/grlee77/PyShearlets/blob/master/README.orig
33 | .. _ref1: http://arxiv.org/abs/1202.1773
34 |
35 | Contents:
36 |
37 | .. toctree::
38 | :maxdepth: 2
39 |
40 | reference/index
41 |
42 | Indices and tables
43 | ==================
44 |
45 | * :ref:`genindex`
46 | * :ref:`modindex`
47 | * :ref:`search`
48 |
49 |
--------------------------------------------------------------------------------
/doc/reference/.gitignore:
--------------------------------------------------------------------------------
1 | *.rst
2 |
--------------------------------------------------------------------------------
/doc/sphinxext/docscrape.py:
--------------------------------------------------------------------------------
1 | """Extract reference documentation from the NumPy source tree.
2 |
3 | """
4 | from __future__ import division, absolute_import, print_function
5 |
6 | import inspect
7 | import textwrap
8 | import re
9 | import pydoc
10 | from warnings import warn
11 | import collections
12 | import sys
13 |
14 |
15 | class Reader(object):
16 | """A line-based string reader.
17 |
18 | """
19 | def __init__(self, data):
20 | """
21 | Parameters
22 | ----------
23 | data : str
24 | String with lines separated by '\n'.
25 |
26 | """
27 | if isinstance(data, list):
28 | self._str = data
29 | else:
30 | self._str = data.split('\n') # store string as list of lines
31 |
32 | self.reset()
33 |
34 | def __getitem__(self, n):
35 | return self._str[n]
36 |
37 | def reset(self):
38 | self._l = 0 # current line nr
39 |
40 | def read(self):
41 | if not self.eof():
42 | out = self[self._l]
43 | self._l += 1
44 | return out
45 | else:
46 | return ''
47 |
48 | def seek_next_non_empty_line(self):
49 | for l in self[self._l:]:
50 | if l.strip():
51 | break
52 | else:
53 | self._l += 1
54 |
55 | def eof(self):
56 | return self._l >= len(self._str)
57 |
58 | def read_to_condition(self, condition_func):
59 | start = self._l
60 | for line in self[start:]:
61 | if condition_func(line):
62 | return self[start:self._l]
63 | self._l += 1
64 | if self.eof():
65 | return self[start:self._l+1]
66 | return []
67 |
68 | def read_to_next_empty_line(self):
69 | self.seek_next_non_empty_line()
70 |
71 | def is_empty(line):
72 | return not line.strip()
73 |
74 | return self.read_to_condition(is_empty)
75 |
76 | def read_to_next_unindented_line(self):
77 | def is_unindented(line):
78 | return (line.strip() and (len(line.lstrip()) == len(line)))
79 | return self.read_to_condition(is_unindented)
80 |
81 | def peek(self, n=0):
82 | if self._l + n < len(self._str):
83 | return self[self._l + n]
84 | else:
85 | return ''
86 |
87 | def is_empty(self):
88 | return not ''.join(self._str).strip()
89 |
90 |
91 | class NumpyDocString(collections.Mapping):
92 | def __init__(self, docstring, config={}):
93 | docstring = textwrap.dedent(docstring).split('\n')
94 |
95 | self._doc = Reader(docstring)
96 | self._parsed_data = {
97 | 'Signature': '',
98 | 'Summary': [''],
99 | 'Extended Summary': [],
100 | 'Parameters': [],
101 | 'Returns': [],
102 | 'Yields': [],
103 | 'Raises': [],
104 | 'Warns': [],
105 | 'Other Parameters': [],
106 | 'Attributes': [],
107 | 'Methods': [],
108 | 'See Also': [],
109 | 'Notes': [],
110 | 'Warnings': [],
111 | 'References': '',
112 | 'Examples': '',
113 | 'index': {}
114 | }
115 |
116 | self._parse()
117 |
118 | def __getitem__(self, key):
119 | return self._parsed_data[key]
120 |
121 | def __setitem__(self, key, val):
122 | if key not in self._parsed_data:
123 | warn("Unknown section %s" % key)
124 | else:
125 | self._parsed_data[key] = val
126 |
127 | def __iter__(self):
128 | return iter(self._parsed_data)
129 |
130 | def __len__(self):
131 | return len(self._parsed_data)
132 |
133 | def _is_at_section(self):
134 | self._doc.seek_next_non_empty_line()
135 |
136 | if self._doc.eof():
137 | return False
138 |
139 | l1 = self._doc.peek().strip() # e.g. Parameters
140 |
141 | if l1.startswith('.. index::'):
142 | return True
143 |
144 | l2 = self._doc.peek(1).strip() # ---------- or ==========
145 | return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
146 |
147 | def _strip(self, doc):
148 | i = 0
149 | j = 0
150 | for i, line in enumerate(doc):
151 | if line.strip():
152 | break
153 |
154 | for j, line in enumerate(doc[::-1]):
155 | if line.strip():
156 | break
157 |
158 | return doc[i:len(doc)-j]
159 |
160 | def _read_to_next_section(self):
161 | section = self._doc.read_to_next_empty_line()
162 |
163 | while not self._is_at_section() and not self._doc.eof():
164 | if not self._doc.peek(-1).strip(): # previous line was empty
165 | section += ['']
166 |
167 | section += self._doc.read_to_next_empty_line()
168 |
169 | return section
170 |
171 | def _read_sections(self):
172 | while not self._doc.eof():
173 | data = self._read_to_next_section()
174 | name = data[0].strip()
175 |
176 | if name.startswith('..'): # index section
177 | yield name, data[1:]
178 | elif len(data) < 2:
179 | yield StopIteration
180 | else:
181 | yield name, self._strip(data[2:])
182 |
183 | def _parse_param_list(self, content):
184 | r = Reader(content)
185 | params = []
186 | while not r.eof():
187 | header = r.read().strip()
188 | if ' : ' in header:
189 | arg_name, arg_type = header.split(' : ')[:2]
190 | else:
191 | arg_name, arg_type = header, ''
192 |
193 | desc = r.read_to_next_unindented_line()
194 | desc = dedent_lines(desc)
195 |
196 | params.append((arg_name, arg_type, desc))
197 |
198 | return params
199 |
200 | _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|"
201 | r" (?P[a-zA-Z0-9_.-]+))\s*", re.X)
202 |
203 | def _parse_see_also(self, content):
204 | """
205 | func_name : Descriptive text
206 | continued text
207 | another_func_name : Descriptive text
208 | func_name1, func_name2, :meth:`func_name`, func_name3
209 |
210 | """
211 | items = []
212 |
213 | def parse_item_name(text):
214 | """Match ':role:`name`' or 'name'"""
215 | m = self._name_rgx.match(text)
216 | if m:
217 | g = m.groups()
218 | if g[1] is None:
219 | return g[3], None
220 | else:
221 | return g[2], g[1]
222 | raise ValueError("%s is not a item name" % text)
223 |
224 | def push_item(name, rest):
225 | if not name:
226 | return
227 | name, role = parse_item_name(name)
228 | items.append((name, list(rest), role))
229 | del rest[:]
230 |
231 | current_func = None
232 | rest = []
233 |
234 | for line in content:
235 | if not line.strip():
236 | continue
237 |
238 | m = self._name_rgx.match(line)
239 | if m and line[m.end():].strip().startswith(':'):
240 | push_item(current_func, rest)
241 | current_func, line = line[:m.end()], line[m.end():]
242 | rest = [line.split(':', 1)[1].strip()]
243 | if not rest[0]:
244 | rest = []
245 | elif not line.startswith(' '):
246 | push_item(current_func, rest)
247 | current_func = None
248 | if ',' in line:
249 | for func in line.split(','):
250 | if func.strip():
251 | push_item(func, [])
252 | elif line.strip():
253 | current_func = line
254 | elif current_func is not None:
255 | rest.append(line.strip())
256 | push_item(current_func, rest)
257 | return items
258 |
259 | def _parse_index(self, section, content):
260 | """
261 | .. index: default
262 | :refguide: something, else, and more
263 |
264 | """
265 | def strip_each_in(lst):
266 | return [s.strip() for s in lst]
267 |
268 | out = {}
269 | section = section.split('::')
270 | if len(section) > 1:
271 | out['default'] = strip_each_in(section[1].split(','))[0]
272 | for line in content:
273 | line = line.split(':')
274 | if len(line) > 2:
275 | out[line[1]] = strip_each_in(line[2].split(','))
276 | return out
277 |
278 | def _parse_summary(self):
279 | """Grab signature (if given) and summary"""
280 | if self._is_at_section():
281 | return
282 |
283 | # If several signatures present, take the last one
284 | while True:
285 | summary = self._doc.read_to_next_empty_line()
286 | summary_str = " ".join([s.strip() for s in summary]).strip()
287 | if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
288 | self['Signature'] = summary_str
289 | if not self._is_at_section():
290 | continue
291 | break
292 |
293 | if summary is not None:
294 | self['Summary'] = summary
295 |
296 | if not self._is_at_section():
297 | self['Extended Summary'] = self._read_to_next_section()
298 |
299 | def _parse(self):
300 | self._doc.reset()
301 | self._parse_summary()
302 |
303 | sections = list(self._read_sections())
304 | section_names = set([section for section, content in sections])
305 |
306 | has_returns = 'Returns' in section_names
307 | has_yields = 'Yields' in section_names
308 | # We could do more tests, but we are not. Arbitrarily.
309 | if has_returns and has_yields:
310 | msg = 'Docstring contains both a Returns and Yields section.'
311 | raise ValueError(msg)
312 |
313 | for (section, content) in sections:
314 | if not section.startswith('..'):
315 | section = (s.capitalize() for s in section.split(' '))
316 | section = ' '.join(section)
317 | if section in ('Parameters', 'Returns', 'Yields', 'Raises',
318 | 'Warns', 'Other Parameters', 'Attributes',
319 | 'Methods'):
320 | self[section] = self._parse_param_list(content)
321 | elif section.startswith('.. index::'):
322 | self['index'] = self._parse_index(section, content)
323 | elif section == 'See Also':
324 | self['See Also'] = self._parse_see_also(content)
325 | else:
326 | self[section] = content
327 |
328 | # string conversion routines
329 |
330 | def _str_header(self, name, symbol='-'):
331 | return [name, len(name)*symbol]
332 |
333 | def _str_indent(self, doc, indent=4):
334 | out = []
335 | for line in doc:
336 | out += [' '*indent + line]
337 | return out
338 |
339 | def _str_signature(self):
340 | if self['Signature']:
341 | return [self['Signature'].replace('*', '\*')] + ['']
342 | else:
343 | return ['']
344 |
345 | def _str_summary(self):
346 | if self['Summary']:
347 | return self['Summary'] + ['']
348 | else:
349 | return []
350 |
351 | def _str_extended_summary(self):
352 | if self['Extended Summary']:
353 | return self['Extended Summary'] + ['']
354 | else:
355 | return []
356 |
357 | def _str_param_list(self, name):
358 | out = []
359 | if self[name]:
360 | out += self._str_header(name)
361 | for param, param_type, desc in self[name]:
362 | if param_type:
363 | out += ['%s : %s' % (param, param_type)]
364 | else:
365 | out += [param]
366 | out += self._str_indent(desc)
367 | out += ['']
368 | return out
369 |
370 | def _str_section(self, name):
371 | out = []
372 | if self[name]:
373 | out += self._str_header(name)
374 | out += self[name]
375 | out += ['']
376 | return out
377 |
378 | def _str_see_also(self, func_role):
379 | if not self['See Also']:
380 | return []
381 | out = []
382 | out += self._str_header("See Also")
383 | last_had_desc = True
384 | for func, desc, role in self['See Also']:
385 | if role:
386 | link = ':%s:`%s`' % (role, func)
387 | elif func_role:
388 | link = ':%s:`%s`' % (func_role, func)
389 | else:
390 | link = "`%s`_" % func
391 | if desc or last_had_desc:
392 | out += ['']
393 | out += [link]
394 | else:
395 | out[-1] += ", %s" % link
396 | if desc:
397 | out += self._str_indent([' '.join(desc)])
398 | last_had_desc = True
399 | else:
400 | last_had_desc = False
401 | out += ['']
402 | return out
403 |
404 | def _str_index(self):
405 | idx = self['index']
406 | out = []
407 | out += ['.. index:: %s' % idx.get('default', '')]
408 | for section, references in idx.items():
409 | if section == 'default':
410 | continue
411 | out += [' :%s: %s' % (section, ', '.join(references))]
412 | return out
413 |
414 | def __str__(self, func_role=''):
415 | out = []
416 | out += self._str_signature()
417 | out += self._str_summary()
418 | out += self._str_extended_summary()
419 | for param_list in ('Parameters', 'Returns', 'Yields',
420 | 'Other Parameters', 'Raises', 'Warns'):
421 | out += self._str_param_list(param_list)
422 | out += self._str_section('Warnings')
423 | out += self._str_see_also(func_role)
424 | for s in ('Notes', 'References', 'Examples'):
425 | out += self._str_section(s)
426 | for param_list in ('Attributes', 'Methods'):
427 | out += self._str_param_list(param_list)
428 | out += self._str_index()
429 | return '\n'.join(out)
430 |
431 |
432 | def indent(str, indent=4):
433 | indent_str = ' '*indent
434 | if str is None:
435 | return indent_str
436 | lines = str.split('\n')
437 | return '\n'.join(indent_str + l for l in lines)
438 |
439 |
440 | def dedent_lines(lines):
441 | """Deindent a list of lines maximally"""
442 | return textwrap.dedent("\n".join(lines)).split("\n")
443 |
444 |
445 | def header(text, style='-'):
446 | return text + '\n' + style*len(text) + '\n'
447 |
448 |
449 | class FunctionDoc(NumpyDocString):
450 | def __init__(self, func, role='func', doc=None, config={}):
451 | self._f = func
452 | self._role = role # e.g. "func" or "meth"
453 |
454 | if doc is None:
455 | if func is None:
456 | raise ValueError("No function or docstring given")
457 | doc = inspect.getdoc(func) or ''
458 | NumpyDocString.__init__(self, doc)
459 |
460 | if not self['Signature'] and func is not None:
461 | func, func_name = self.get_func()
462 | try:
463 | # try to read signature
464 | if sys.version_info[0] >= 3:
465 | argspec = inspect.getfullargspec(func)
466 | else:
467 | argspec = inspect.getargspec(func)
468 | argspec = inspect.formatargspec(*argspec)
469 | argspec = argspec.replace('*', '\*')
470 | signature = '%s%s' % (func_name, argspec)
471 | except TypeError as e:
472 | signature = '%s()' % func_name
473 | self['Signature'] = signature
474 |
475 | def get_func(self):
476 | func_name = getattr(self._f, '__name__', self.__class__.__name__)
477 | if inspect.isclass(self._f):
478 | func = getattr(self._f, '__call__', self._f.__init__)
479 | else:
480 | func = self._f
481 | return func, func_name
482 |
483 | def __str__(self):
484 | out = ''
485 |
486 | func, func_name = self.get_func()
487 | signature = self['Signature'].replace('*', '\*')
488 |
489 | roles = {'func': 'function',
490 | 'meth': 'method'}
491 |
492 | if self._role:
493 | if self._role not in roles:
494 | print("Warning: invalid role %s" % self._role)
495 | out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''),
496 | func_name)
497 |
498 | out += super(FunctionDoc, self).__str__(func_role=self._role)
499 | return out
500 |
501 |
502 | class ClassDoc(NumpyDocString):
503 |
504 | extra_public_methods = ['__call__']
505 |
506 | def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc,
507 | config={}):
508 | if not inspect.isclass(cls) and cls is not None:
509 | raise ValueError("Expected a class or None, but got %r" % cls)
510 | self._cls = cls
511 |
512 | self.show_inherited_members = config.get(
513 | 'show_inherited_class_members', True)
514 |
515 | if modulename and not modulename.endswith('.'):
516 | modulename += '.'
517 | self._mod = modulename
518 |
519 | if doc is None:
520 | if cls is None:
521 | raise ValueError("No class or documentation string given")
522 | doc = pydoc.getdoc(cls)
523 |
524 | NumpyDocString.__init__(self, doc)
525 |
526 | if config.get('show_class_members', True):
527 | def splitlines_x(s):
528 | if not s:
529 | return []
530 | else:
531 | return s.splitlines()
532 |
533 | for field, items in [('Methods', self.methods),
534 | ('Attributes', self.properties)]:
535 | if not self[field]:
536 | doc_list = []
537 | for name in sorted(items):
538 | try:
539 | doc_item = pydoc.getdoc(getattr(self._cls, name))
540 | doc_list.append((name, '', splitlines_x(doc_item)))
541 | except AttributeError:
542 | pass # method doesn't exist
543 | self[field] = doc_list
544 |
545 | @property
546 | def methods(self):
547 | if self._cls is None:
548 | return []
549 | return [name for name, func in inspect.getmembers(self._cls)
550 | if ((not name.startswith('_')
551 | or name in self.extra_public_methods)
552 | and isinstance(func, collections.Callable)
553 | and self._is_show_member(name))]
554 |
555 | @property
556 | def properties(self):
557 | if self._cls is None:
558 | return []
559 | return [name for name, func in inspect.getmembers(self._cls)
560 | if (not name.startswith('_') and
561 | (func is None or isinstance(func, property) or
562 | inspect.isgetsetdescriptor(func))
563 | and self._is_show_member(name))]
564 |
565 | def _is_show_member(self, name):
566 | if self.show_inherited_members:
567 | return True # show all class members
568 | if name not in self._cls.__dict__:
569 | return False # class member is inherited, we do not show it
570 | return True
571 |
--------------------------------------------------------------------------------
/doc/sphinxext/docscrape_sphinx.py:
--------------------------------------------------------------------------------
1 | import re, inspect, textwrap, pydoc
2 | import sphinx
3 | from docscrape import NumpyDocString, FunctionDoc, ClassDoc
4 |
5 | class SphinxDocString(NumpyDocString):
6 | def __init__(self, docstring, config={}):
7 | self.use_plots = config.get('use_plots', False)
8 | NumpyDocString.__init__(self, docstring, config=config)
9 |
10 | # string conversion routines
11 | def _str_header(self, name, symbol='`'):
12 | return ['.. rubric:: ' + name, '']
13 |
14 | def _str_field_list(self, name):
15 | return [':' + name + ':']
16 |
17 | def _str_indent(self, doc, indent=4):
18 | out = []
19 | for line in doc:
20 | out += [' '*indent + line]
21 | return out
22 |
23 | def _str_signature(self):
24 | return ['']
25 | if self['Signature']:
26 | return ['``%s``' % self['Signature']] + ['']
27 | else:
28 | return ['']
29 |
30 | def _str_summary(self):
31 | return self['Summary'] + ['']
32 |
33 | def _str_extended_summary(self):
34 | return self['Extended Summary'] + ['']
35 |
36 | def _str_param_list(self, name):
37 | out = []
38 | if self[name]:
39 | out += self._str_field_list(name)
40 | out += ['']
41 | for param,param_type,desc in self[name]:
42 | out += self._str_indent(['**%s** : %s' % (param.strip(),
43 | param_type)])
44 | out += ['']
45 | out += self._str_indent(desc,8)
46 | out += ['']
47 | return out
48 |
49 | @property
50 | def _obj(self):
51 | if hasattr(self, '_cls'):
52 | return self._cls
53 | elif hasattr(self, '_f'):
54 | return self._f
55 | return None
56 |
57 | def _str_member_list(self, name):
58 | """
59 | Generate a member listing, autosummary:: table where possible,
60 | and a table where not.
61 |
62 | """
63 | out = []
64 | if self[name]:
65 | out += ['.. rubric:: %s' % name, '']
66 | prefix = getattr(self, '_name', '')
67 |
68 | if prefix:
69 | prefix = '~%s.' % prefix
70 |
71 | autosum = []
72 | others = []
73 | for param, param_type, desc in self[name]:
74 | param = param.strip()
75 | if not self._obj or hasattr(self._obj, param):
76 | autosum += [" %s%s" % (prefix, param)]
77 | else:
78 | others.append((param, param_type, desc))
79 |
80 | if autosum:
81 | out += ['.. autosummary::', ' :toctree:', '']
82 | out += autosum
83 |
84 | if others:
85 | maxlen_0 = max([len(x[0]) for x in others])
86 | maxlen_1 = max([len(x[1]) for x in others])
87 | hdr = "="*maxlen_0 + " " + "="*maxlen_1 + " " + "="*10
88 | fmt = '%%%ds %%%ds ' % (maxlen_0, maxlen_1)
89 | n_indent = maxlen_0 + maxlen_1 + 4
90 | out += [hdr]
91 | for param, param_type, desc in others:
92 | out += [fmt % (param.strip(), param_type)]
93 | out += self._str_indent(desc, n_indent)
94 | out += [hdr]
95 | out += ['']
96 | return out
97 |
98 | def _str_section(self, name):
99 | out = []
100 | if self[name]:
101 | out += self._str_header(name)
102 | out += ['']
103 | content = textwrap.dedent("\n".join(self[name])).split("\n")
104 | out += content
105 | out += ['']
106 | return out
107 |
108 | def _str_see_also(self, func_role):
109 | out = []
110 | if self['See Also']:
111 | see_also = super(SphinxDocString, self)._str_see_also(func_role)
112 | out = ['.. seealso::', '']
113 | out += self._str_indent(see_also[2:])
114 | return out
115 |
116 | def _str_warnings(self):
117 | out = []
118 | if self['Warnings']:
119 | out = ['.. warning::', '']
120 | out += self._str_indent(self['Warnings'])
121 | return out
122 |
123 | def _str_index(self):
124 | idx = self['index']
125 | out = []
126 | if len(idx) == 0:
127 | return out
128 |
129 | out += ['.. index:: %s' % idx.get('default','')]
130 | for section, references in idx.iteritems():
131 | if section == 'default':
132 | continue
133 | elif section == 'refguide':
134 | out += [' single: %s' % (', '.join(references))]
135 | else:
136 | out += [' %s: %s' % (section, ','.join(references))]
137 | return out
138 |
139 | def _str_references(self):
140 | out = []
141 | if self['References']:
142 | out += self._str_header('References')
143 | if isinstance(self['References'], str):
144 | self['References'] = [self['References']]
145 | out.extend(self['References'])
146 | out += ['']
147 | # Latex collects all references to a separate bibliography,
148 | # so we need to insert links to it
149 | if sphinx.__version__ >= "0.6":
150 | out += ['.. only:: latex','']
151 | else:
152 | out += ['.. latexonly::','']
153 | items = []
154 | for line in self['References']:
155 | m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
156 | if m:
157 | items.append(m.group(1))
158 | out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
159 | return out
160 |
161 | def _str_examples(self):
162 | examples_str = "\n".join(self['Examples'])
163 |
164 | if (self.use_plots and 'import matplotlib' in examples_str
165 | and 'plot::' not in examples_str):
166 | out = []
167 | out += self._str_header('Examples')
168 | out += ['.. plot::', '']
169 | out += self._str_indent(self['Examples'])
170 | out += ['']
171 | return out
172 | else:
173 | return self._str_section('Examples')
174 |
175 | def __str__(self, indent=0, func_role="obj"):
176 | out = []
177 | out += self._str_signature()
178 | out += self._str_index() + ['']
179 | out += self._str_summary()
180 | out += self._str_extended_summary()
181 | for param_list in ('Parameters', 'Returns', 'Other Parameters',
182 | 'Raises', 'Warns'):
183 | out += self._str_param_list(param_list)
184 | out += self._str_warnings()
185 | out += self._str_see_also(func_role)
186 | out += self._str_section('Notes')
187 | out += self._str_references()
188 | out += self._str_examples()
189 | for param_list in ('Attributes', 'Methods'):
190 | out += self._str_member_list(param_list)
191 | out = self._str_indent(out,indent)
192 | return '\n'.join(out)
193 |
194 | class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
195 | def __init__(self, obj, doc=None, config={}):
196 | self.use_plots = config.get('use_plots', False)
197 | FunctionDoc.__init__(self, obj, doc=doc, config=config)
198 |
199 | class SphinxClassDoc(SphinxDocString, ClassDoc):
200 | def __init__(self, obj, doc=None, func_doc=None, config={}):
201 | self.use_plots = config.get('use_plots', False)
202 | ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
203 |
204 | class SphinxObjDoc(SphinxDocString):
205 | def __init__(self, obj, doc=None, config={}):
206 | self._f = obj
207 | SphinxDocString.__init__(self, doc, config=config)
208 |
209 | def get_doc_object(obj, what=None, doc=None, config={}):
210 | if what is None:
211 | if inspect.isclass(obj):
212 | what = 'class'
213 | elif inspect.ismodule(obj):
214 | what = 'module'
215 | elif callable(obj):
216 | what = 'function'
217 | else:
218 | what = 'object'
219 | if what == 'class':
220 | return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
221 | config=config)
222 | elif what in ('function', 'method'):
223 | return SphinxFunctionDoc(obj, doc=doc, config=config)
224 | else:
225 | if doc is None:
226 | doc = pydoc.getdoc(obj)
227 | return SphinxObjDoc(obj, doc, config=config)
228 |
--------------------------------------------------------------------------------
/doc/sphinxext/github.py:
--------------------------------------------------------------------------------
1 | """Define text roles for GitHub
2 |
3 | * ghissue - Issue
4 | * ghpull - Pull Request
5 | * ghuser - User
6 |
7 | Adapted from bitbucket example here:
8 | https://bitbucket.org/birkenfeld/sphinx-contrib/src/tip/bitbucket/sphinxcontrib/bitbucket.py
9 |
10 | Authors
11 | -------
12 |
13 | * Doug Hellmann
14 | * Min RK
15 | """
16 | #
17 | # Original Copyright (c) 2010 Doug Hellmann. All rights reserved.
18 | #
19 |
20 | from docutils import nodes, utils
21 | from docutils.parsers.rst.roles import set_classes
22 |
23 | def make_link_node(rawtext, app, type, slug, options):
24 | """Create a link to a github resource.
25 |
26 | :param rawtext: Text being replaced with link node.
27 | :param app: Sphinx application context
28 | :param type: Link type (issues, changeset, etc.)
29 | :param slug: ID of the thing to link to
30 | :param options: Options dictionary passed to role func.
31 | """
32 |
33 | try:
34 | base = app.config.github_project_url
35 | if not base:
36 | raise AttributeError
37 | if not base.endswith('/'):
38 | base += '/'
39 | except AttributeError as err:
40 | raise ValueError('github_project_url configuration value is not set (%s)' % str(err))
41 |
42 | ref = base + type + '/' + slug + '/'
43 | set_classes(options)
44 | prefix = "#"
45 | if type == 'pull':
46 | prefix = "PR " + prefix
47 | node = nodes.reference(rawtext, prefix + utils.unescape(slug), refuri=ref,
48 | **options)
49 | return node
50 |
51 | def ghissue_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
52 | """Link to a GitHub issue.
53 |
54 | Returns 2 part tuple containing list of nodes to insert into the
55 | document and a list of system messages. Both are allowed to be
56 | empty.
57 |
58 | :param name: The role name used in the document.
59 | :param rawtext: The entire markup snippet, with role.
60 | :param text: The text marked with the role.
61 | :param lineno: The line number where rawtext appears in the input.
62 | :param inliner: The inliner instance that called us.
63 | :param options: Directive options for customization.
64 | :param content: The directive content for customization.
65 | """
66 |
67 | try:
68 | issue_num = int(text)
69 | if issue_num <= 0:
70 | raise ValueError
71 | except ValueError:
72 | msg = inliner.reporter.error(
73 | 'GitHub issue number must be a number greater than or equal to 1; '
74 | '"%s" is invalid.' % text, line=lineno)
75 | prb = inliner.problematic(rawtext, rawtext, msg)
76 | return [prb], [msg]
77 | app = inliner.document.settings.env.app
78 | #app.info('issue %r' % text)
79 | if 'pull' in name.lower():
80 | category = 'pull'
81 | elif 'issue' in name.lower():
82 | category = 'issues'
83 | else:
84 | msg = inliner.reporter.error(
85 | 'GitHub roles include "ghpull" and "ghissue", '
86 | '"%s" is invalid.' % name, line=lineno)
87 | prb = inliner.problematic(rawtext, rawtext, msg)
88 | return [prb], [msg]
89 | node = make_link_node(rawtext, app, category, str(issue_num), options)
90 | return [node], []
91 |
92 | def ghuser_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
93 | """Link to a GitHub user.
94 |
95 | Returns 2 part tuple containing list of nodes to insert into the
96 | document and a list of system messages. Both are allowed to be
97 | empty.
98 |
99 | :param name: The role name used in the document.
100 | :param rawtext: The entire markup snippet, with role.
101 | :param text: The text marked with the role.
102 | :param lineno: The line number where rawtext appears in the input.
103 | :param inliner: The inliner instance that called us.
104 | :param options: Directive options for customization.
105 | :param content: The directive content for customization.
106 | """
107 | app = inliner.document.settings.env.app
108 | #app.info('user link %r' % text)
109 | ref = 'https://www.github.com/' + text
110 | node = nodes.reference(rawtext, text, refuri=ref, **options)
111 | return [node], []
112 |
113 | def ghcommit_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
114 | """Link to a GitHub commit.
115 |
116 | Returns 2 part tuple containing list of nodes to insert into the
117 | document and a list of system messages. Both are allowed to be
118 | empty.
119 |
120 | :param name: The role name used in the document.
121 | :param rawtext: The entire markup snippet, with role.
122 | :param text: The text marked with the role.
123 | :param lineno: The line number where rawtext appears in the input.
124 | :param inliner: The inliner instance that called us.
125 | :param options: Directive options for customization.
126 | :param content: The directive content for customization.
127 | """
128 | app = inliner.document.settings.env.app
129 | #app.info('user link %r' % text)
130 | try:
131 | base = app.config.github_project_url
132 | if not base:
133 | raise AttributeError
134 | if not base.endswith('/'):
135 | base += '/'
136 | except AttributeError as err:
137 | raise ValueError('github_project_url configuration value is not set (%s)' % str(err))
138 |
139 | ref = base + text
140 | node = nodes.reference(rawtext, text[:6], refuri=ref, **options)
141 | return [node], []
142 |
143 |
144 | def setup(app):
145 | """Install the plugin.
146 |
147 | :param app: Sphinx application context.
148 | """
149 | app.info('Initializing GitHub plugin')
150 | app.add_role('ghissue', ghissue_role)
151 | app.add_role('ghpull', ghissue_role)
152 | app.add_role('ghuser', ghuser_role)
153 | app.add_role('ghcommit', ghcommit_role)
154 | app.add_config_value('github_project_url', None, 'env')
155 | return
156 |
--------------------------------------------------------------------------------
/doc/sphinxext/math_dollar.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 | def dollars_to_math(source):
4 | r"""
5 | Replace dollar signs with backticks.
6 |
7 | More precisely, do a regular expression search. Replace a plain
8 | dollar sign ($) by a backtick (`). Replace an escaped dollar sign
9 | (\$) by a dollar sign ($). Don't change a dollar sign preceded or
10 | followed by a backtick (`$ or $`), because of strings like
11 | "``$HOME``". Don't make any changes on lines starting with
12 | spaces, because those are indented and hence part of a block of
13 | code or examples.
14 |
15 | This also doesn't replaces dollar signs enclosed in curly braces,
16 | to avoid nested math environments, such as ::
17 |
18 | $f(n) = 0 \text{ if $n$ is prime}$
19 |
20 | Thus the above line would get changed to
21 |
22 | `f(n) = 0 \text{ if $n$ is prime}`
23 | """
24 | s = "\n".join(source)
25 | if s.find("$") == -1:
26 | return
27 | # This searches for "$blah$" inside a pair of curly braces --
28 | # don't change these, since they're probably coming from a nested
29 | # math environment. So for each match, we replace it with a temporary
30 | # string, and later on we substitute the original back.
31 | global _data
32 | _data = {}
33 | def repl(matchobj):
34 | global _data
35 | s = matchobj.group(0)
36 | t = "___XXX_REPL_%d___" % len(_data)
37 | _data[t] = s
38 | return t
39 | s = re.sub(r"({[^{}$]*\$[^{}$]*\$[^{}]*})", repl, s)
40 | # matches $...$
41 | dollars = re.compile(r"(?= 3:
35 | sixu = lambda s: s
36 | else:
37 | sixu = lambda s: unicode(s, 'unicode_escape')
38 |
39 |
40 | def mangle_docstrings(app, what, name, obj, options, lines,
41 | reference_offset=[0]):
42 |
43 | cfg = {'use_plots': app.config.numpydoc_use_plots,
44 | 'show_class_members': app.config.numpydoc_show_class_members,
45 | 'show_inherited_class_members':
46 | app.config.numpydoc_show_inherited_class_members,
47 | 'class_members_toctree': app.config.numpydoc_class_members_toctree}
48 |
49 | u_NL = sixu('\n')
50 | if what == 'module':
51 | # Strip top title
52 | pattern = '^\\s*[#*=]{4,}\\n[a-z0-9 -]+\\n[#*=]{4,}\\s*'
53 | title_re = re.compile(sixu(pattern), re.I | re.S)
54 | lines[:] = title_re.sub(sixu(''), u_NL.join(lines)).split(u_NL)
55 | else:
56 | doc = get_doc_object(obj, what, u_NL.join(lines), config=cfg)
57 | if sys.version_info[0] >= 3:
58 | doc = str(doc)
59 | else:
60 | doc = unicode(doc)
61 | lines[:] = doc.split(u_NL)
62 |
63 | if (app.config.numpydoc_edit_link and hasattr(obj, '__name__') and
64 | obj.__name__):
65 | if hasattr(obj, '__module__'):
66 | v = dict(full_name=sixu("%s.%s") % (obj.__module__, obj.__name__))
67 | else:
68 | v = dict(full_name=obj.__name__)
69 | lines += [sixu(''), sixu('.. htmlonly::'), sixu('')]
70 | lines += [sixu(' %s') % x for x in
71 | (app.config.numpydoc_edit_link % v).split("\n")]
72 |
73 | # replace reference numbers so that there are no duplicates
74 | references = []
75 | for line in lines:
76 | line = line.strip()
77 | m = re.match(sixu('^.. \\[([a-z0-9_.-])\\]'), line, re.I)
78 | if m:
79 | references.append(m.group(1))
80 |
81 | # start renaming from the longest string, to avoid overwriting parts
82 | references.sort(key=lambda x: -len(x))
83 | if references:
84 | for i, line in enumerate(lines):
85 | for r in references:
86 | if re.match(sixu('^\\d+$'), r):
87 | new_r = sixu("R%d") % (reference_offset[0] + int(r))
88 | else:
89 | new_r = sixu("%s%d") % (r, reference_offset[0])
90 | lines[i] = lines[i].replace(sixu('[%s]_') % r,
91 | sixu('[%s]_') % new_r)
92 | lines[i] = lines[i].replace(sixu('.. [%s]') % r,
93 | sixu('.. [%s]') % new_r)
94 |
95 | reference_offset[0] += len(references)
96 |
97 |
98 | def mangle_signature(app, what, name, obj, options, sig, retann):
99 | # Do not try to inspect classes that don't define `__init__`
100 | if (inspect.isclass(obj) and
101 | (not hasattr(obj, '__init__') or
102 | 'initializes x; see ' in pydoc.getdoc(obj.__init__))):
103 | return '', ''
104 |
105 | if not (isinstance(obj, collections.Callable) or
106 | hasattr(obj, '__argspec_is_invalid_')):
107 | return
108 |
109 | if not hasattr(obj, '__doc__'):
110 | return
111 |
112 | doc = SphinxDocString(pydoc.getdoc(obj))
113 | if doc['Signature']:
114 | sig = re.sub(sixu("^[^(]*"), sixu(""), doc['Signature'])
115 | return sig, sixu('')
116 |
117 |
118 | def setup(app, get_doc_object_=get_doc_object):
119 | if not hasattr(app, 'add_config_value'):
120 | return # probably called by nose, better bail out
121 |
122 | global get_doc_object
123 | get_doc_object = get_doc_object_
124 |
125 | app.connect('autodoc-process-docstring', mangle_docstrings)
126 | app.connect('autodoc-process-signature', mangle_signature)
127 | app.add_config_value('numpydoc_edit_link', None, False)
128 | app.add_config_value('numpydoc_use_plots', None, False)
129 | app.add_config_value('numpydoc_show_class_members', True, True)
130 | app.add_config_value('numpydoc_show_inherited_class_members', True, True)
131 | app.add_config_value('numpydoc_class_members_toctree', True, True)
132 |
133 | # Extra mangling domains
134 | app.add_domain(NumpyPythonDomain)
135 | app.add_domain(NumpyCDomain)
136 |
137 | # ------------------------------------------------------------------------------
138 | # Docstring-mangling domains
139 | # ------------------------------------------------------------------------------
140 |
141 | from docutils.statemachine import ViewList
142 | from sphinx.domains.c import CDomain
143 | from sphinx.domains.python import PythonDomain
144 |
145 |
146 | class ManglingDomainBase(object):
147 | directive_mangling_map = {}
148 |
149 | def __init__(self, *a, **kw):
150 | super(ManglingDomainBase, self).__init__(*a, **kw)
151 | self.wrap_mangling_directives()
152 |
153 | def wrap_mangling_directives(self):
154 | for name, objtype in list(self.directive_mangling_map.items()):
155 | self.directives[name] = wrap_mangling_directive(
156 | self.directives[name], objtype)
157 |
158 |
159 | class NumpyPythonDomain(ManglingDomainBase, PythonDomain):
160 | name = 'np'
161 | directive_mangling_map = {
162 | 'function': 'function',
163 | 'class': 'class',
164 | 'exception': 'class',
165 | 'method': 'function',
166 | 'classmethod': 'function',
167 | 'staticmethod': 'function',
168 | 'attribute': 'attribute',
169 | }
170 | indices = []
171 |
172 |
173 | class NumpyCDomain(ManglingDomainBase, CDomain):
174 | name = 'np-c'
175 | directive_mangling_map = {
176 | 'function': 'function',
177 | 'member': 'attribute',
178 | 'macro': 'function',
179 | 'type': 'class',
180 | 'var': 'object',
181 | }
182 |
183 |
184 | def wrap_mangling_directive(base_directive, objtype):
185 | class directive(base_directive):
186 | def run(self):
187 | env = self.state.document.settings.env
188 |
189 | name = None
190 | if self.arguments:
191 | m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0])
192 | name = m.group(2).strip()
193 |
194 | if not name:
195 | name = self.arguments[0]
196 |
197 | lines = list(self.content)
198 | mangle_docstrings(env.app, objtype, name, None, None, lines)
199 | self.content = ViewList(lines, self.content.parent)
200 |
201 | return base_directive.run(self)
202 |
203 | return directive
204 |
--------------------------------------------------------------------------------
/doc/tools/LICENSE.txt:
--------------------------------------------------------------------------------
1 | These files were obtained from
2 |
3 | https://www.mail-archive.com/sphinx-dev@googlegroups.com/msg02472.html
4 |
5 | and were released under a BSD/MIT license by Fernando Perez, Matthew Brett and
6 | the PyMVPA folks. Further cleanups by the scikit-image crew.
7 |
8 |
--------------------------------------------------------------------------------
/doc/tools/apigen.py:
--------------------------------------------------------------------------------
1 | """
2 | Attempt to generate templates for module reference with Sphinx
3 |
4 | To include extension modules, first identify them as valid in the
5 | ``_uri2path`` method, then handle them in the ``_parse_module_with_import``
6 | script.
7 |
8 | Notes
9 | -----
10 | This parsing is based on import and introspection of modules.
11 | Previously functions and classes were found by parsing the text of .py files.
12 |
13 | Extension modules should be discovered and included as well.
14 |
15 | This is a modified version of a script originally shipped with the PyMVPA
16 | project, then adapted for use first in NIPY and then in skimage. PyMVPA
17 | is an MIT-licensed project.
18 | """
19 |
20 | # Stdlib imports
21 | import os
22 | import re
23 | from inspect import getmodule
24 |
25 | from types import BuiltinFunctionType, FunctionType
26 |
27 | # suppress print statements (warnings for empty files)
28 | DEBUG = True
29 |
30 | class ApiDocWriter(object):
31 | ''' Class for automatic detection and parsing of API docs
32 | to Sphinx-parsable reST format'''
33 |
34 | # only separating first two levels
35 | rst_section_levels = ['*', '=', '-', '~', '^']
36 |
37 | def __init__(self,
38 | package_name,
39 | rst_extension='.txt',
40 | package_skip_patterns=None,
41 | module_skip_patterns=None,
42 | other_defines = True
43 | ):
44 | ''' Initialize package for parsing
45 |
46 | Parameters
47 | ----------
48 | package_name : string
49 | Name of the top-level package. *package_name* must be the
50 | name of an importable package
51 | rst_extension : string, optional
52 | Extension for reST files, default '.rst'
53 | package_skip_patterns : None or sequence of {strings, regexps}
54 | Sequence of strings giving URIs of packages to be excluded
55 | Operates on the package path, starting at (including) the
56 | first dot in the package path, after *package_name* - so,
57 | if *package_name* is ``sphinx``, then ``sphinx.util`` will
58 | result in ``.util`` being passed for searching by these
59 | regexps. If is None, gives default. Default is:
60 | ['\.tests$']
61 | module_skip_patterns : None or sequence
62 | Sequence of strings giving URIs of modules to be excluded
63 | Operates on the module name including preceding URI path,
64 | back to the first dot after *package_name*. For example
65 | ``sphinx.util.console`` results in the string to search of
66 | ``.util.console``
67 | If is None, gives default. Default is:
68 | ['\.setup$', '\._']
69 | other_defines : {True, False}, optional
70 | Whether to include classes and functions that are imported in a
71 | particular module but not defined there.
72 | '''
73 | if package_skip_patterns is None:
74 | package_skip_patterns = ['\\.tests$']
75 | if module_skip_patterns is None:
76 | module_skip_patterns = ['\\.setup$', '\\._']
77 | self.package_name = package_name
78 | self.rst_extension = rst_extension
79 | self.package_skip_patterns = package_skip_patterns
80 | self.module_skip_patterns = module_skip_patterns
81 | self.other_defines = other_defines
82 |
83 | def get_package_name(self):
84 | return self._package_name
85 |
86 | def set_package_name(self, package_name):
87 | ''' Set package_name
88 |
89 | >>> docwriter = ApiDocWriter('sphinx')
90 | >>> import sphinx
91 | >>> docwriter.root_path == sphinx.__path__[0]
92 | True
93 | >>> docwriter.package_name = 'docutils'
94 | >>> import docutils
95 | >>> docwriter.root_path == docutils.__path__[0]
96 | True
97 | '''
98 | # It's also possible to imagine caching the module parsing here
99 | self._package_name = package_name
100 | root_module = self._import(package_name)
101 | self.root_path = root_module.__path__[-1]
102 | self.written_modules = None
103 |
104 | package_name = property(get_package_name, set_package_name, None,
105 | 'get/set package_name')
106 |
107 | def _import(self, name):
108 | ''' Import namespace package '''
109 | mod = __import__(name)
110 | components = name.split('.')
111 | for comp in components[1:]:
112 | mod = getattr(mod, comp)
113 | return mod
114 |
115 | def _get_object_name(self, line):
116 | ''' Get second token in line
117 | >>> docwriter = ApiDocWriter('sphinx')
118 | >>> docwriter._get_object_name(" def func(): ")
119 | 'func'
120 | >>> docwriter._get_object_name(" class Klass(object): ")
121 | 'Klass'
122 | >>> docwriter._get_object_name(" class Klass: ")
123 | 'Klass'
124 | '''
125 | name = line.split()[1].split('(')[0].strip()
126 | # in case we have classes which are not derived from object
127 | # ie. old style classes
128 | return name.rstrip(':')
129 |
130 | def _uri2path(self, uri):
131 | ''' Convert uri to absolute filepath
132 |
133 | Parameters
134 | ----------
135 | uri : string
136 | URI of python module to return path for
137 |
138 | Returns
139 | -------
140 | path : None or string
141 | Returns None if there is no valid path for this URI
142 | Otherwise returns absolute file system path for URI
143 |
144 | Examples
145 | --------
146 | >>> docwriter = ApiDocWriter('sphinx')
147 | >>> import sphinx
148 | >>> modpath = sphinx.__path__[0]
149 | >>> res = docwriter._uri2path('sphinx.builder')
150 | >>> res == os.path.join(modpath, 'builder.py')
151 | True
152 | >>> res = docwriter._uri2path('sphinx')
153 | >>> res == os.path.join(modpath, '__init__.py')
154 | True
155 | >>> docwriter._uri2path('sphinx.does_not_exist')
156 |
157 | '''
158 | if uri == self.package_name:
159 | return os.path.join(self.root_path, '__init__.py')
160 | path = uri.replace(self.package_name + '.', '')
161 | path = path.replace('.', os.path.sep)
162 | path = os.path.join(self.root_path, path)
163 | # XXX maybe check for extensions as well?
164 | if os.path.exists(path + '.py'): # file
165 | path += '.py'
166 | elif os.path.exists(os.path.join(path, '__init__.py')):
167 | path = os.path.join(path, '__init__.py')
168 | else:
169 | return None
170 | return path
171 |
172 | def _path2uri(self, dirpath):
173 | ''' Convert directory path to uri '''
174 | package_dir = self.package_name.replace('.', os.path.sep)
175 | relpath = dirpath.replace(self.root_path, package_dir)
176 | if relpath.startswith(os.path.sep):
177 | relpath = relpath[1:]
178 | return relpath.replace(os.path.sep, '.')
179 |
180 | def _parse_module(self, uri):
181 | ''' Parse module defined in *uri* '''
182 | filename = self._uri2path(uri)
183 | if filename is None:
184 | print(filename, 'erk')
185 | # nothing that we could handle here.
186 | return ([],[])
187 |
188 | f = open(filename, 'rt')
189 | functions, classes = self._parse_lines(f)
190 | f.close()
191 | return functions, classes
192 |
193 | def _parse_module_with_import(self, uri):
194 | """Look for functions and classes in an importable module.
195 |
196 | Parameters
197 | ----------
198 | uri : str
199 | The name of the module to be parsed. This module needs to be
200 | importable.
201 |
202 | Returns
203 | -------
204 | functions : list of str
205 | A list of (public) function names in the module.
206 | classes : list of str
207 | A list of (public) class names in the module.
208 | """
209 | mod = __import__(uri, fromlist=[uri])
210 | # find all public objects in the module.
211 | obj_strs = [obj for obj in dir(mod) if not obj.startswith('_')]
212 | functions = []
213 | classes = []
214 | for obj_str in obj_strs:
215 | # find the actual object from its string representation
216 | if obj_str not in mod.__dict__:
217 | continue
218 | obj = mod.__dict__[obj_str]
219 | # Check if function / class defined in module
220 | if not self.other_defines and not getmodule(obj) == mod:
221 | continue
222 | # figure out if obj is a function or class
223 | if hasattr(obj, 'func_name') or \
224 | isinstance(obj, BuiltinFunctionType) or \
225 | isinstance(obj, FunctionType):
226 | functions.append(obj_str)
227 | else:
228 | try:
229 | issubclass(obj, object)
230 | classes.append(obj_str)
231 | except TypeError:
232 | # not a function or class
233 | pass
234 | return functions, classes
235 |
236 | def _parse_lines(self, linesource):
237 | ''' Parse lines of text for functions and classes '''
238 | functions = []
239 | classes = []
240 | for line in linesource:
241 | if line.startswith('def ') and line.count('('):
242 | # exclude private stuff
243 | name = self._get_object_name(line)
244 | if not name.startswith('_'):
245 | functions.append(name)
246 | elif line.startswith('class '):
247 | # exclude private stuff
248 | name = self._get_object_name(line)
249 | if not name.startswith('_'):
250 | classes.append(name)
251 | else:
252 | pass
253 | functions.sort()
254 | classes.sort()
255 | return functions, classes
256 |
257 | def generate_api_doc(self, uri):
258 | '''Make autodoc documentation template string for a module
259 |
260 | Parameters
261 | ----------
262 | uri : string
263 | python location of module - e.g 'sphinx.builder'
264 |
265 | Returns
266 | -------
267 | head : string
268 | Module name, table of contents.
269 | body : string
270 | Function and class docstrings.
271 | '''
272 | # get the names of all classes and functions
273 | functions, classes = self._parse_module_with_import(uri)
274 | if not len(functions) and not len(classes) and DEBUG:
275 | print('WARNING: Empty -', uri) # dbg
276 |
277 | # Make a shorter version of the uri that omits the package name for
278 | # titles
279 | uri_short = re.sub(r'^%s\.' % self.package_name,'',uri)
280 |
281 | head = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n'
282 | body = ''
283 |
284 | # Set the chapter title to read 'module' for all modules except for the
285 | # main packages
286 | if '.' in uri_short:
287 | title = 'Module: :mod:`' + uri_short + '`'
288 | head += title + '\n' + self.rst_section_levels[2] * len(title)
289 | else:
290 | title = ':mod:`' + uri_short + '`'
291 | head += title + '\n' + self.rst_section_levels[1] * len(title)
292 |
293 | head += '\n.. automodule:: ' + uri + '\n'
294 | head += '\n.. currentmodule:: ' + uri + '\n'
295 | body += '\n.. currentmodule:: ' + uri + '\n\n'
296 | for c in classes:
297 | body += '\n:class:`' + c + '`\n' \
298 | + self.rst_section_levels[3] * \
299 | (len(c)+9) + '\n\n'
300 | body += '\n.. autoclass:: ' + c + '\n'
301 | # must NOT exclude from index to keep cross-refs working
302 | body += ' :members:\n' \
303 | ' :undoc-members:\n' \
304 | ' :show-inheritance:\n' \
305 | '\n' \
306 | ' .. automethod:: __init__\n\n'
307 | head += '.. autosummary::\n\n'
308 | for f in classes + functions:
309 | head += ' ' + f + '\n'
310 | head += '\n'
311 |
312 | for f in functions:
313 | # must NOT exclude from index to keep cross-refs working
314 | body += f + '\n'
315 | body += self.rst_section_levels[3] * len(f) + '\n'
316 | body += '\n.. autofunction:: ' + f + '\n\n'
317 |
318 | return head, body
319 |
320 | def _survives_exclude(self, matchstr, match_type):
321 | ''' Returns True if *matchstr* does not match patterns
322 |
323 | ``self.package_name`` removed from front of string if present
324 |
325 | Examples
326 | --------
327 | >>> dw = ApiDocWriter('sphinx')
328 | >>> dw._survives_exclude('sphinx.okpkg', 'package')
329 | True
330 | >>> dw.package_skip_patterns.append('^\\.badpkg$')
331 | >>> dw._survives_exclude('sphinx.badpkg', 'package')
332 | False
333 | >>> dw._survives_exclude('sphinx.badpkg', 'module')
334 | True
335 | >>> dw._survives_exclude('sphinx.badmod', 'module')
336 | True
337 | >>> dw.module_skip_patterns.append('^\\.badmod$')
338 | >>> dw._survives_exclude('sphinx.badmod', 'module')
339 | False
340 | '''
341 | if match_type == 'module':
342 | patterns = self.module_skip_patterns
343 | elif match_type == 'package':
344 | patterns = self.package_skip_patterns
345 | else:
346 | raise ValueError('Cannot interpret match type "%s"'
347 | % match_type)
348 | # Match to URI without package name
349 | L = len(self.package_name)
350 | if matchstr[:L] == self.package_name:
351 | matchstr = matchstr[L:]
352 | for pat in patterns:
353 | try:
354 | pat.search
355 | except AttributeError:
356 | pat = re.compile(pat)
357 | if pat.search(matchstr):
358 | return False
359 |
360 | return True
361 |
362 | def discover_modules(self):
363 | ''' Return module sequence discovered from ``self.package_name``
364 |
365 |
366 | Parameters
367 | ----------
368 | None
369 |
370 | Returns
371 | -------
372 | mods : sequence
373 | Sequence of module names within ``self.package_name``
374 |
375 | Examples
376 | --------
377 | >>> dw = ApiDocWriter('sphinx')
378 | >>> mods = dw.discover_modules()
379 | >>> 'sphinx.util' in mods
380 | True
381 | >>> dw.package_skip_patterns.append('\.util$')
382 | >>> 'sphinx.util' in dw.discover_modules()
383 | False
384 | >>>
385 | '''
386 | modules = [self.package_name]
387 | # raw directory parsing
388 | for dirpath, dirnames, filenames in os.walk(self.root_path):
389 | # Check directory names for packages
390 | root_uri = self._path2uri(os.path.join(self.root_path,
391 | dirpath))
392 |
393 | # Normally, we'd only iterate over dirnames, but since
394 | # dipy does not import a whole bunch of modules we'll
395 | # include those here as well (the *.py filenames).
396 | filenames = [f[:-3] for f in filenames if
397 | f.endswith('.py') and not f.startswith('__init__')]
398 | for filename in filenames:
399 | package_uri = '/'.join((dirpath, filename))
400 |
401 | for subpkg_name in dirnames + filenames:
402 | package_uri = '.'.join((root_uri, subpkg_name))
403 | package_path = self._uri2path(package_uri)
404 | if (package_path and
405 | self._survives_exclude(package_uri, 'package')):
406 | modules.append(package_uri)
407 |
408 | return sorted(modules)
409 |
410 | def write_modules_api(self, modules, outdir):
411 | # upper-level modules
412 | main_module = modules[0].split('.')[0]
413 | ulms = ['.'.join(m.split('.')[:2]) if m.count('.') >= 1
414 | else m.split('.')[0] for m in modules]
415 |
416 | from collections import OrderedDict
417 | module_by_ulm = OrderedDict()
418 |
419 | for v, k in zip(modules, ulms):
420 | if k in module_by_ulm:
421 | module_by_ulm[k].append(v)
422 | else:
423 | module_by_ulm[k] = [v]
424 |
425 | written_modules = []
426 |
427 | for ulm, mods in module_by_ulm.items():
428 | print("Generating docs for %s:" % ulm)
429 | document_head = []
430 | document_body = []
431 |
432 | for m in mods:
433 | print(" -> " + m)
434 | head, body = self.generate_api_doc(m)
435 |
436 | document_head.append(head)
437 | document_body.append(body)
438 |
439 | out_module = ulm + self.rst_extension
440 | outfile = os.path.join(outdir, out_module)
441 | fileobj = open(outfile, 'wt')
442 |
443 | fileobj.writelines(document_head + document_body)
444 | fileobj.close()
445 | written_modules.append(out_module)
446 |
447 | self.written_modules = written_modules
448 |
449 | def write_api_docs(self, outdir):
450 | """Generate API reST files.
451 |
452 | Parameters
453 | ----------
454 | outdir : string
455 | Directory name in which to store files
456 | We create automatic filenames for each module
457 |
458 | Returns
459 | -------
460 | None
461 |
462 | Notes
463 | -----
464 | Sets self.written_modules to list of written modules
465 | """
466 | if not os.path.exists(outdir):
467 | os.mkdir(outdir)
468 | # compose list of modules
469 | modules = self.discover_modules()
470 | self.write_modules_api(modules,outdir)
471 |
472 | def write_index(self, outdir, froot='gen', relative_to=None):
473 | """Make a reST API index file from written files
474 |
475 | Parameters
476 | ----------
477 | path : string
478 | Filename to write index to
479 | outdir : string
480 | Directory to which to write generated index file
481 | froot : string, optional
482 | root (filename without extension) of filename to write to
483 | Defaults to 'gen'. We add ``self.rst_extension``.
484 | relative_to : string
485 | path to which written filenames are relative. This
486 | component of the written file path will be removed from
487 | outdir, in the generated index. Default is None, meaning,
488 | leave path as it is.
489 | """
490 | if self.written_modules is None:
491 | raise ValueError('No modules written')
492 | # Get full filename path
493 | path = os.path.join(outdir, froot+self.rst_extension)
494 | # Path written into index is relative to rootpath
495 | if relative_to is not None:
496 | relpath = (outdir + os.path.sep).replace(relative_to + os.path.sep, '')
497 | else:
498 | relpath = outdir
499 | idx = open(path,'wt')
500 | w = idx.write
501 | w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n')
502 |
503 | title = "API Reference"
504 | w(title + "\n")
505 | w("=" * len(title) + "\n\n")
506 | w('.. toctree::\n\n')
507 | for f in self.written_modules:
508 | w(' %s\n' % os.path.join(relpath,f))
509 | idx.close()
510 |
--------------------------------------------------------------------------------
/doc/tools/buildmodref.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Script to auto-generate API docs.
3 | """
4 | from __future__ import print_function, division
5 |
6 | # stdlib imports
7 | import sys
8 | import re
9 |
10 | # local imports
11 | from apigen import ApiDocWriter
12 |
13 | # version comparison
14 | from distutils.version import LooseVersion as V
15 |
16 | #*****************************************************************************
17 |
18 | def abort(error):
19 | print('*WARNING* API documentation not generated: %s' % error)
20 | exit()
21 |
22 |
23 | def writeapi(package, outdir, source_version, other_defines=True):
24 | # Check that the package is available. If not, the API documentation is not
25 | # (re)generated and existing API documentation sources will be used.
26 |
27 | try:
28 | __import__(package)
29 | except ImportError:
30 | abort("Can not import " + package)
31 |
32 | module = sys.modules[package]
33 |
34 | # Check that the source version is equal to the installed
35 | # version. If the versions mismatch the API documentation sources
36 | # are not (re)generated. This avoids automatic generation of documentation
37 | # for older or newer versions if such versions are installed on the system.
38 |
39 | installed_version = V(module.__version__)
40 | if source_version != installed_version:
41 | abort("Installed version does not match source version")
42 |
43 | docwriter = ApiDocWriter(package, rst_extension='.rst',
44 | other_defines=other_defines)
45 |
46 | docwriter.package_skip_patterns += [r'\.%s$' % package,
47 | r'.*test.*$',
48 | r'\.version.*$',
49 | r'\._version.*$']
50 | docwriter.write_api_docs(outdir)
51 | docwriter.write_index(outdir, 'index', relative_to=outdir)
52 | print('%d files written' % len(docwriter.written_modules))
53 |
54 |
55 | if __name__ == '__main__':
56 | package = sys.argv[1]
57 | outdir = sys.argv[2]
58 | try:
59 | other_defines = sys.argv[3]
60 | except IndexError:
61 | other_defines = True
62 | else:
63 | other_defines = other_defines in ('True', 'true', '1')
64 |
65 | writeapi(package, outdir, other_defines=other_defines)
66 |
--------------------------------------------------------------------------------
/examples/example.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from matplotlib import pyplot as plt
3 | from mpl_toolkits.axes_grid1 import make_axes_locatable
4 |
5 | try:
6 | import skimage.data
7 | from skimage import img_as_float
8 | from skimage.transform import resize
9 | except ImportError:
10 | raise ImportError("This example requires scikit-image")
11 |
12 | from FFST import (scalesShearsAndSpectra,
13 | inverseShearletTransformSpect,
14 | shearletTransformSpect)
15 | from FFST._fft import ifftnc # centered nD inverse FFT
16 |
17 |
18 | def add_cbar(im, ax):
19 | divider = make_axes_locatable(ax)
20 | cax = divider.append_axes("right", size="10%", pad=0.05)
21 | plt.colorbar(im, cax=cax)
22 |
23 |
24 | X = img_as_float(skimage.data.camera())
25 | X = resize(X, (256, 256))
26 |
27 | # compute shearlet transform
28 | ST, Psi = shearletTransformSpect(X)
29 |
30 | Psi = scalesShearsAndSpectra(X.shape, numOfScales=None,
31 | realCoefficients=True)
32 |
33 | idx = 13
34 | fig, axes = plt.subplots(2, 2)
35 |
36 | axes[0, 0].imshow(X, interpolation='nearest', cmap=plt.cm.gray)
37 | axes[0, 0].set_axis_off()
38 | axes[0, 0].set_title('original image')
39 |
40 | axes[0, 1].imshow(ST[..., idx], interpolation='nearest', cmap=plt.cm.gray)
41 | axes[0, 1].set_axis_off()
42 | axes[0, 1].set_title('shearlet coefficients')
43 |
44 | Psi_shifted = np.fft.fftshift(Psi[..., idx])
45 | axes[1, 0].imshow(Psi_shifted, interpolation='nearest', cmap=plt.cm.gray)
46 | axes[1, 0].set_axis_off()
47 | axes[1, 0].set_title('shearlet in Fourier domain')
48 |
49 | axes[1, 1].imshow(np.abs(ifftnc(Psi_shifted)),
50 | interpolation='nearest', cmap=plt.cm.gray)
51 | axes[1, 1].set_axis_off()
52 | axes[1, 1].set_title('shearlet in time domain')
53 |
54 | # show frame tightness and exactness
55 |
56 | plt.figure()
57 | plt.imshow(1 - np.sum(Psi**2, -1), cmap=plt.cm.gray)
58 | plt.colorbar()
59 | plt.title('Frame Tightness')
60 |
61 | XX = inverseShearletTransformSpect(ST, Psi)
62 |
63 | plt.figure()
64 | plt.imshow(np.abs(X-XX), cmap=plt.cm.gray)
65 | plt.colorbar()
66 | plt.title('Transform Exactness')
67 |
68 |
69 | if False:
70 | # complex shearlet example
71 |
72 | # compute shearlet transform
73 | ST, Psi = shearletTransformSpect(X, realCoefficients=False)
74 | idx = 13
75 | fig, axes = plt.subplots(3, 2)
76 |
77 | cmap = plt.cm.jet
78 | im = axes[0, 0].imshow(X, interpolation='nearest', cmap=cmap)
79 | add_cbar(im, axes[0, 0])
80 | axes[0, 0].set_axis_off()
81 | axes[0, 0].set_title('original image')
82 | axes[0, 1].set_axis_off()
83 |
84 | im = axes[1, 0].imshow(ST[..., idx].real, interpolation='nearest',
85 | cmap=cmap)
86 | add_cbar(im, axes[1, 0])
87 | axes[1, 0].set_axis_off()
88 | axes[1, 0].set_title('shearlet coefficients (real part)')
89 |
90 | im = axes[1, 1].imshow(ST[..., idx].imag, interpolation='nearest',
91 | cmap=cmap)
92 | add_cbar(im, axes[1, 1])
93 | axes[1, 1].set_axis_off()
94 | axes[1, 1].set_title('shearlet coefficients (imaginary part)')
95 |
96 | im = axes[2, 0].imshow(np.abs(ST[..., idx]), interpolation='nearest',
97 | cmap=cmap)
98 | add_cbar(im, axes[2, 0])
99 | axes[2, 0].set_axis_off()
100 | axes[2, 0].set_title('shearlet coefficients (absolute value)')
101 |
102 | im = axes[2, 1].imshow(np.angle(ST[..., idx]), interpolation='nearest',
103 | cmap=cmap)
104 | add_cbar(im, axes[2, 1])
105 | axes[2, 1].set_axis_off()
106 | axes[2, 1].set_title('shearlet coefficients (phase)')
107 |
108 | plt.show()
109 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [versioneer]
2 | VCS = git
3 | style = pep440
4 | versionfile_source = FFST/_version.py
5 | versionfile_build = FFST/_version.py
6 | tag_prefix =
7 | parentdir_prefix = FFST-
8 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from setuptools import setup, find_packages
3 |
4 | import versioneer
5 |
6 | setup(name='PyShearlets',
7 | packages=find_packages(),
8 | version=versioneer.get_version(),
9 | cmdclass=versioneer.get_cmdclass(),
10 | description='2D fast finite shearlet transforms.',
11 | author='Gregory R. Lee',
12 | author_email='grlee77@gmail.com',
13 | url='https://bitbucket.org/grlee77/FFST',
14 | license='BSD 3-clause',
15 | zip_safe=False,
16 | # ackage_data={},
17 | )
18 |
--------------------------------------------------------------------------------