├── docs ├── rtd-pip-requirements ├── astroscrappy │ └── index.rst ├── _templates │ └── autosummary │ │ ├── base.rst │ │ ├── class.rst │ │ └── module.rst ├── index.rst ├── Makefile ├── make.bat └── conf.py ├── .gitmodules ├── astroscrappy ├── tests │ ├── setup_package.py │ ├── __init__.py │ ├── coveragerc │ ├── test_astroscrappy.py │ └── test_utils.py ├── utils │ ├── median_utils.pxd │ ├── __init__.py │ ├── setup_package.py │ ├── imutils.h │ ├── medutils.h │ ├── image_utils.pyx │ ├── median_utils.pyx │ └── imutils.c ├── conftest.py ├── __init__.py ├── _astropy_init.py └── astroscrappy.pyx ├── licenses ├── README.rst └── LICENSE.rst ├── MANIFEST.in ├── CHANGES.rst ├── setup.cfg ├── .gitignore ├── README.rst ├── setup.py ├── .travis.yml ├── ez_setup.py └── ah_bootstrap.py /docs/rtd-pip-requirements: -------------------------------------------------------------------------------- 1 | numpy 2 | matplotlib 3 | Cython 4 | astropy-helpers 5 | astropy 6 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "astropy_helpers"] 2 | path = astropy_helpers 3 | url = https://github.com/astropy/astropy-helpers.git 4 | -------------------------------------------------------------------------------- /astroscrappy/tests/setup_package.py: -------------------------------------------------------------------------------- 1 | def get_package_data(): 2 | return { 3 | _ASTROPY_PACKAGE_NAME_ + '.tests': ['coveragerc']} 4 | -------------------------------------------------------------------------------- /astroscrappy/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | """ 3 | This packages contains affiliated package tests. 4 | """ 5 | -------------------------------------------------------------------------------- /docs/astroscrappy/index.rst: -------------------------------------------------------------------------------- 1 | **************** 2 | ASTROSCRAPPY 3 | **************** 4 | 5 | Reference/API 6 | ============= 7 | 8 | .. automodapi:: astroscrappy 9 | -------------------------------------------------------------------------------- /licenses/README.rst: -------------------------------------------------------------------------------- 1 | Licenses 2 | ======== 3 | 4 | This directory holds license and credit information for the affiliated package, 5 | works the affiliated package is derived from, and/or datasets. 6 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/base.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/base.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /docs/_templates/autosummary/class.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/class.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /docs/_templates/autosummary/module.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/module.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /astroscrappy/utils/median_utils.pxd: -------------------------------------------------------------------------------- 1 | """ 2 | Header file for Cython functions in the utils package. 3 | 4 | This allows the Cython code to call these routines directly 5 | without requiring the GIL. 6 | """ 7 | 8 | """ 9 | Calculate the median on the first n elements of C float array 10 | without requiring the GIL. 11 | """ 12 | cdef float cymedian(float* aptr, int n) nogil -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include CHANGES.rst 3 | 4 | include ez_setup.py 5 | include ah_bootstrap.py 6 | include setup.cfg 7 | 8 | global-include *.pyx *.c *.pxd *.h 9 | recursive-include docs * 10 | recursive-include licenses * 11 | recursive-include cextern * 12 | recursive-include scripts * 13 | 14 | prune build 15 | prune docs/_build 16 | prune docs/api 17 | 18 | recursive-include astropy_helpers * 19 | exclude astropy_helpers/.git 20 | exclude astropy_helpers/.gitignore 21 | 22 | global-exclude *.pyc *.o 23 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | 1.1 (unreleased) 2 | ---------------- 3 | 4 | - Fixed setup_requires so that it doesn't install astropy when using egg_info. 5 | 6 | - Pinned coverage version to 3.7.1. 7 | 8 | 9 | 1.0.3 (2015-09-29) 10 | ------------------ 11 | 12 | - Updated URL in setup.cfg. 13 | 14 | 1.0.2 (2015-09-29) 15 | ------------------ 16 | 17 | - Added .h files to MANIFEST.in 18 | 19 | 1.0.1 (2015-09-29) 20 | ------------------ 21 | 22 | - Fixed bug in MANIFEST.in that was excluding *.pyx files. 23 | 24 | 1.0 (2015-09-29) 25 | ---------------- 26 | 27 | - Initial release. 28 | -------------------------------------------------------------------------------- /astroscrappy/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | """ 4 | Utility functions for Astro-SCRAPPY 5 | 6 | These include fast implementations for calculating the median, 7 | median filters, and other image operations. 8 | """ 9 | 10 | from .median_utils import * 11 | from .image_utils import * 12 | 13 | __all__ = ['median', 'optmed3', 'optmed5', 'optmed7', 'optmed9', 14 | 'optmed25', 'medfilt3', 'medfilt5', 'medfilt7', 15 | 'sepmedfilt3', 'sepmedfilt5', 'sepmedfilt7', 'sepmedfilt9', 16 | 'subsample', 'rebin', 'convolve', 'laplaceconvolve', 17 | 'dilate3', 'dilate5'] 18 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Documentation 2 | ============= 3 | 4 | This is an affiliated package for the AstroPy package. The documentation for 5 | this package is here: 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | astroscrappy/index.rst 11 | 12 | .. note:: The layout of this directory is simply a suggestion. To follow 13 | traditional practice, do *not* edit this page, but instead place 14 | all documentation for the affiliated package inside ``astroscrappy/``. 15 | The traditional practice was intended to allow the affiliated 16 | package to eventually be merged into the main astropy package. 17 | You can follow this practice or choose your own layout. 18 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [build_sphinx] 2 | source-dir = docs 3 | build-dir = docs/_build 4 | all_files = 1 5 | 6 | [upload_docs] 7 | upload-dir = docs/_build/html 8 | show-response = 1 9 | 10 | [pytest] 11 | minversion = 2.2 12 | norecursedirs = build docs/_build 13 | doctest_plus = enabled 14 | 15 | [ah_bootstrap] 16 | auto_use = True 17 | 18 | [metadata] 19 | package_name = astroscrappy 20 | description = Speedy Cosmic Ray Annihilation Package in Python 21 | long_description = This package was designed to detect and clean cosmic rays in images, originally based on the LA Cosmic algorithm. 22 | author = Curtis McCully 23 | author_email = cmccully@lcogt.net 24 | license = BSD 25 | edit_on_github = True 26 | github_project = astropy/astroscrappy 27 | url = https://github.com/astropy/astroscrappy -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled files 2 | *.py[co] 3 | *.a 4 | *.o 5 | *.so 6 | a.out 7 | __pycache__ 8 | .idea 9 | 10 | # Ignore .c files by default to avoid including generated code. If you want to 11 | # add a non-generated .c extension, use `git add -f filename.c`. 12 | *.c 13 | 14 | # Other generated files 15 | */version.py 16 | */cython_version.py 17 | htmlcov 18 | .coverage 19 | MANIFEST 20 | 21 | # Sphinx 22 | docs/api 23 | docs/_build 24 | 25 | # Eclipse editor project files 26 | .project 27 | .pydevproject 28 | .settings 29 | 30 | # Pycharm editor project files 31 | .idea 32 | 33 | # Packages/installer info 34 | *.egg 35 | *.egg-info 36 | dist 37 | build 38 | eggs 39 | parts 40 | bin 41 | var 42 | sdist 43 | develop-eggs 44 | .installed.cfg 45 | distribute-*.tar.gz 46 | 47 | # Other 48 | .*.swp 49 | *~ 50 | 51 | # Mac OSX 52 | .DS_Store 53 | -------------------------------------------------------------------------------- /astroscrappy/tests/coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = {packagename} 3 | omit = 4 | {packagename}/_astropy_init* 5 | {packagename}/conftest* 6 | {packagename}/cython_version* 7 | {packagename}/setup_package* 8 | {packagename}/*/setup_package* 9 | {packagename}/*/*/setup_package* 10 | {packagename}/tests/* 11 | {packagename}/*/tests/* 12 | {packagename}/*/*/tests/* 13 | {packagename}/version* 14 | 15 | [report] 16 | exclude_lines = 17 | # Have to re-enable the standard pragma 18 | pragma: no cover 19 | 20 | # Don't complain about packages we have installed 21 | except ImportError 22 | 23 | # Don't complain if tests don't hit assertions 24 | raise AssertionError 25 | raise NotImplementedError 26 | 27 | # Don't complain about script hooks 28 | def main\(.*\): 29 | 30 | # Ignore branches that don't pertain to this version of Python 31 | pragma: py{ignore_python_version} -------------------------------------------------------------------------------- /astroscrappy/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | # this contains imports plugins that configure py.test for astropy tests. 4 | # by importing them here in conftest.py they are discoverable by py.test 5 | # no matter how it is invoked within the source tree. 6 | 7 | from astropy.tests.pytest_plugins import * 8 | 9 | ## Uncomment the following line to treat all DeprecationWarnings as 10 | ## exceptions 11 | # enable_deprecations_as_exceptions() 12 | 13 | try: 14 | PYTEST_HEADER_MODULES['Astropy'] = 'astropy' 15 | del PYTEST_HEADER_MODULES['h5py'] 16 | except NameError: # needed to support Astropy < 1.0 17 | pass 18 | 19 | # This is to figure out the affiliated package version, rather than 20 | # using Astropy's 21 | from . import version 22 | 23 | try: 24 | packagename = os.path.basename(os.path.dirname(__file__)) 25 | TESTED_VERSIONS[packagename] = version.version 26 | except NameError: # Needed to support Astropy <= 1.0.0 27 | pass 28 | -------------------------------------------------------------------------------- /licenses/LICENSE.rst: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015, Curtis McCully 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright notice, this 10 | list of conditions and the following disclaimer in the documentation and/or 11 | other materials provided with the distribution. 12 | * Neither the name of the Astropy Team nor the names of its contributors may be 13 | used to endorse or promote products derived from this software without 14 | specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 20 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 21 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 22 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 23 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 25 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /astroscrappy/tests/test_astroscrappy.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | import numpy as np 5 | from ..astroscrappy import detect_cosmics 6 | 7 | # Make a simple Gaussian function for testing purposes 8 | def gaussian(image_shape, x0, y0, brightness, fwhm): 9 | x = np.arange(image_shape[1]) 10 | y = np.arange(image_shape[0]) 11 | x2d, y2d = np.meshgrid(x, y) 12 | 13 | sig = fwhm / 2.35482 14 | 15 | normfactor = brightness / 2.0 / np.pi * sig ** -2.0 16 | exponent = -0.5 * sig ** -2.0 17 | exponent *= (x2d - x0) ** 2.0 + (y2d - y0) ** 2.0 18 | 19 | return normfactor * np.exp(exponent) 20 | 21 | 22 | # Set a seed so that the tests are repeatable 23 | np.random.seed(200) 24 | 25 | # Create a simulated image to use in our tests 26 | imdata = np.zeros((1001, 1001), dtype=np.float32) 27 | 28 | # Add sky and sky noise 29 | imdata += 200 30 | 31 | # Add some fake sources 32 | for i in range(100): 33 | x = np.random.uniform(low=0.0, high=1001) 34 | y = np.random.uniform(low=0.0, high=1001) 35 | brightness = np.random.uniform(low=1000., high=30000.) 36 | imdata += gaussian(imdata.shape, x, y, brightness, 3.5) 37 | 38 | # Add the poisson noise 39 | imdata = np.random.poisson(imdata) 40 | 41 | # Add readnoise 42 | imdata += np.random.normal(0.0, 10.0, size=(1001, 1001)) 43 | 44 | # Add 100 fake cosmic rays 45 | cr_x = np.random.randint(low=5, high=995, size=100) 46 | cr_y = np.random.randint(low=5, high=995, size=100) 47 | 48 | cr_brightnesses = np.random.uniform(low=1000.0, high=30000.0, size=100) 49 | 50 | imdata[cr_y, cr_x] += cr_brightnesses 51 | imdata = imdata.astype('f4') 52 | 53 | # Make a mask where the detected cosmic rays should be 54 | expected_crmask = np.zeros((1001, 1001), dtype=np.bool) 55 | expected_crmask[cr_y, cr_x] = True 56 | 57 | def test_main(): 58 | # Because our image only contains single cosmics, turn off 59 | # neighbor detection. Also, our cosmic rays are high enough 60 | # contrast that we can turn our detection threshold up. 61 | mask, _clean = detect_cosmics(imdata, readnoise=10., gain=1.0, 62 | sigclip=6, sigfrac=1.0) 63 | assert (mask == expected_crmask).sum() == (1001 * 1001) -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Astro-SCRAPPY: The Speedy Cosmic Ray Annihilation Package in Python 2 | =================================================================== 3 | 4 | Name : Astro-SCRAPPY 5 | 6 | Author : Curtis McCully 7 | 8 | Date : October 2014 9 | 10 | Optimized cosmic ray detector 11 | 12 | Astro-SCRAPPY is designed to detect cosmic rays in images (numpy arrays), 13 | based on Pieter van Dokkum's L.A.Cosmic algorithm. 14 | 15 | Much of this was originally adapted from cosmics.py written by Malte Tewes. 16 | I have ported all of the slow functions to Cython/C, and optimized 17 | where I can. This is designed to be as fast as possible so some of the 18 | readability has been sacrificed, specifically in the C code. 19 | 20 | If you use this code, please consider adding this repository address in a 21 | footnote: https://github.com/astropy/astroscrappy 22 | 23 | Please cite the original paper which can be found at: 24 | http://www.astro.yale.edu/dokkum/lacosmic/ 25 | 26 | van Dokkum 2001, PASP, 113, 789, 1420 27 | (article : http://adsabs.harvard.edu/abs/2001PASP..113.1420V) 28 | 29 | This code requires Cython, preferably version >= 0.21. 30 | 31 | Parallelization is achieved using OpenMP. This code should compile (although 32 | the Cython files may have issues) using a compiler that does not support OMP, 33 | e.g. clang. 34 | 35 | Notes 36 | ----- 37 | There are some differences from original LA Cosmic: 38 | 39 | - Automatic recognition of saturated stars. 40 | This avoids treating such stars as large cosmic rays. 41 | 42 | - I have tried to optimize all of the code as much as possible while 43 | maintaining the integrity of the algorithm. One of the key speedups is to 44 | use a separable median filter instead of the true median filter. While these 45 | are not identical, they produce comparable results and the separable version 46 | is much faster. 47 | 48 | - This implementation is much faster than the Python by as much as a factor of 49 | ~17 depending on the given parameters, even without running multiple threads. 50 | With multiple threads, this can be increased easily by another factor of 2. 51 | This implementation is much faster than the original IRAF version, improvment 52 | by a factor of ~90. 53 | 54 | The arrays always must be C-contiguous, thus all loops are y outer, x inner. 55 | This follows the Pyfits convention. 56 | 57 | scipy is required for certain tests to pass, but the code itself does not depend on 58 | scipy. 59 | 60 | .. image:: https://travis-ci.org/astropy/astroscrappy.png 61 | :target: https://travis-ci.org/astropy/astroscrappy 62 | .. image:: https://coveralls.io/repos/astropy/astroscrappy/badge.png 63 | :target: https://coveralls.io/r/astropy/astroscrappy 64 | -------------------------------------------------------------------------------- /astroscrappy/utils/setup_package.py: -------------------------------------------------------------------------------- 1 | from __future__ import (absolute_import, division, print_function, 2 | unicode_literals) 3 | import os 4 | import sys 5 | import subprocess 6 | 7 | from distutils.core import Extension 8 | from distutils import log 9 | 10 | UTIL_DIR = os.path.relpath(os.path.dirname(__file__)) 11 | 12 | CODELINES = """ 13 | import sys 14 | from distutils.ccompiler import new_compiler 15 | ccompiler = new_compiler() 16 | ccompiler.add_library('gomp') 17 | sys.exit(int(ccompiler.has_function('omp_get_num_threads'))) 18 | """ 19 | 20 | 21 | def check_openmp(): 22 | s = subprocess.Popen([sys.executable], stdin=subprocess.PIPE, 23 | stdout=subprocess.PIPE, 24 | stderr=subprocess.PIPE) 25 | stdout, stderr = s.communicate(CODELINES.encode('utf-8')) 26 | s.wait() 27 | return bool(s.returncode), (stdout, stderr) 28 | 29 | 30 | def get_extensions(): 31 | 32 | med_sources = [str(os.path.join(UTIL_DIR, "median_utils.pyx")), 33 | str(os.path.join(UTIL_DIR, "medutils.c"))] 34 | 35 | im_sources = [str(os.path.join(UTIL_DIR, "image_utils.pyx")), 36 | str(os.path.join(UTIL_DIR, "imutils.c"))] 37 | 38 | include_dirs = ['numpy', UTIL_DIR] 39 | 40 | libraries = [] 41 | 42 | ext_med = Extension(name=str('astroscrappy.utils.median_utils'), 43 | sources=med_sources, 44 | include_dirs=include_dirs, 45 | libraries=libraries, 46 | language="c", 47 | extra_compile_args=['-g', '-O3', '-funroll-loops', 48 | '-ffast-math']) 49 | ext_im = Extension(name=str("astroscrappy.utils.image_utils"), 50 | sources=im_sources, 51 | include_dirs=include_dirs, 52 | libraries=libraries, 53 | language="c", 54 | extra_compile_args=['-g', '-O3', '-funroll-loops', 55 | '-ffast-math']) 56 | 57 | has_openmp, outputs = check_openmp() 58 | if has_openmp: 59 | ext_med.extra_compile_args.append('-fopenmp') 60 | ext_im.extra_compile_args.append('-fopenmp') 61 | ext_med.extra_link_args = ['-g', '-fopenmp'] 62 | ext_im.extra_link_args = ['-g', '-fopenmp'] 63 | else: 64 | log.warn('OpenMP was not found. ' 65 | 'astroscrappy will be compiled without OpenMP. ' 66 | '(Use the "-v" option of setup.py for more details.)') 67 | log.debug(('(Start of OpenMP info)\n' 68 | 'compiler stdout:\n{0}\n' 69 | 'compiler stderr:\n{1}\n' 70 | '(End of OpenMP info)').format(*outputs)) 71 | 72 | return [ext_med, ext_im] 73 | -------------------------------------------------------------------------------- /astroscrappy/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | """ 4 | Astro-SCRAPPY: The Speedy Cosmic Ray Annihilation Package in Python 5 | =================================================================== 6 | 7 | Name : Astro-SCRAPPY 8 | Author : Curtis McCully 9 | Date : October 2014 10 | 11 | Optimized Cosmic Ray Detector: 12 | 13 | Astro-SCRAPPY is designed to detect cosmic rays in images (numpy arrays), 14 | originally based on Pieter van Dokkum's L.A.Cosmic algorithm. 15 | 16 | Much of this was originally adapted from cosmics.py written by Malte Tewes. 17 | I have ported all of the slow functions to Cython/C, and optimized 18 | where I can. This is designed to be as fast as possible so some of the 19 | readability has been sacrificed, specifically in the C code. 20 | 21 | L.A.Cosmic = LAplacian Cosmic ray detection 22 | 23 | If you use this code, please consider adding this repository address in a 24 | footnote: https://github.com/astropy/astroscrappy. 25 | 26 | Please cite the original paper which can be found at: 27 | http://www.astro.yale.edu/dokkum/lacosmic/ 28 | 29 | van Dokkum 2001, PASP, 113, 789, 1420 30 | (article : http://adsabs.harvard.edu/abs/2001PASP..113.1420V) 31 | 32 | This code requires Cython, preferably version >= 0.21. 33 | 34 | Parallelization is achieved using OpenMP. This code should compile (although 35 | the Cython files may have issues) using a compiler that does not support OMP, 36 | e.g. clang. 37 | 38 | Notes 39 | ----- 40 | There are some differences from original LACosmic: 41 | 42 | - Automatic recognition of saturated stars. 43 | This avoids treating such stars as large cosmic rays. 44 | 45 | - I have tried to optimize all of the code as much as possible while 46 | maintaining the integrity of the algorithm. One of the key speedups is to 47 | use a separable median filter instead of the true median filter. While these 48 | are not identical, they produce comparable results and the separable version 49 | is much faster. 50 | 51 | - This implementation is much faster than the Python by as much as a factor of 52 | 28 depending on the given parameters. 53 | This implementation is much faster than the original IRAF version, by a factor 54 | of ~90. 55 | 56 | Note that arrays always must be C-contiguous, thus all loops are y outer, x inner. 57 | This follows the Pyfits convention. 58 | 59 | scipy is required for certain tests to pass, but the code itself does not depend on 60 | scipy. 61 | """ 62 | 63 | # Affiliated packages may add whatever they like to this file, but 64 | # should keep this content at the top. 65 | # ---------------------------------------------------------------------------- 66 | from ._astropy_init import * 67 | # ---------------------------------------------------------------------------- 68 | 69 | # For egg_info test builds to pass, put package imports here. 70 | if not _ASTROPY_SETUP_: 71 | from .astroscrappy import * 72 | from .utils import * 73 | 74 | __all__ = ['detect_cosmics'] 75 | -------------------------------------------------------------------------------- /astroscrappy/utils/imutils.h: -------------------------------------------------------------------------------- 1 | /* 2 | * imutils.h 3 | * 4 | * Author: Curtis McCully 5 | * October 2014 6 | * 7 | * Licensed under a 3-clause BSD style license - see LICENSE.rst 8 | */ 9 | 10 | #ifndef IMUTILS_H_ 11 | #define IMUTILS_H_ 12 | 13 | /* Define a bool type because there isn't one built in ANSI C */ 14 | typedef uint8_t bool; 15 | #define true 1 16 | #define false 0 17 | 18 | /* Subsample an array 2x2 given an input array data with size nx x ny. Each 19 | * pixel is replicated into 4 pixels; no averaging is performed. The results 20 | * are saved in the output array. The output array should already be allocated 21 | * as we work on it in place. Data should be striped in the x direction such 22 | * that the memory location of pixel i,j is data[nx *j + i]. 23 | */ 24 | void 25 | PySubsample(float* data, float* output, int nx, int ny); 26 | 27 | /* Rebin an array 2x2, with size (2 * nx) x (2 * ny). Rebin the array by block 28 | * averaging 4 pixels back into 1. This is effectively the opposite of 29 | * subsample (although subsample does not do an average). The results are saved 30 | * in the output array. The output array should already be allocated as we work 31 | * on it in place. Data should be striped in the x direction such that the 32 | * memory location of pixel i,j is data[nx *j + i]. 33 | */ 34 | void 35 | PyRebin(float* data, float* output, int nx, int ny); 36 | 37 | /* Convolve an image of size nx x ny with a kernel of size kernx x kerny. The 38 | * results are saved in the output array. The output array should already be 39 | * allocated as we work on it in place. Data and kernel should both be striped 40 | * in the x direction such that the memory location of pixel i,j is 41 | * data[nx *j + i]. 42 | */ 43 | void 44 | PyConvolve(float* data, float* kernel, float* output, int nx, int ny, 45 | int kernx, int kerny); 46 | 47 | /* Convolve an image of size nx x ny the following kernel: 48 | * 0 -1 0 49 | * -1 4 -1 50 | * 0 -1 0 51 | * The results are saved in the output array. The output array should 52 | * already be allocated as we work on it in place. 53 | * This is a discrete version of the Laplacian operator. 54 | * Data should be striped in the x direction such that the memory location of 55 | * pixel i,j is data[nx *j + i]. 56 | */ 57 | void 58 | PyLaplaceConvolve(float* data, float* output, int nx, int ny); 59 | 60 | /* Perform a boolean dilation on an array of size nx x ny. The results are 61 | * saved in the output array. The output array should already be allocated as 62 | * we work on it in place. 63 | * Dilation is the boolean equivalent of a convolution but using logical ors 64 | * instead of a sum. 65 | * We apply the following kernel: 66 | * 1 1 1 67 | * 1 1 1 68 | * 1 1 1 69 | * The binary dilation is not computed for a 1 pixel border around the image. 70 | * These pixels are copied from the input data. Data should be striped along 71 | * the x direction such that the memory location of pixel i,j is 72 | * data[i + nx * j]. 73 | */ 74 | void 75 | PyDilate3(bool* data, bool* output, int nx, int ny); 76 | 77 | /* Do niter iterations of boolean dilation on an array of size nx x ny. The 78 | * results are saved in the output array. The output array should already be 79 | * allocated as we work on it in place. 80 | * Dilation is the boolean equivalent of a convolution but using logical ors 81 | * instead of a sum. 82 | * We apply the following kernel: 83 | * 0 1 1 1 0 84 | * 1 1 1 1 1 85 | * 1 1 1 1 1 86 | * 1 1 1 1 1 87 | * 0 1 1 1 0 88 | * The edges are padded with zeros so that the dilation operator is defined for 89 | * all pixels. Data should be striped along the x direction such that the 90 | * memory location of pixel i,j is data[i + nx * j]. 91 | */ 92 | void 93 | PyDilate5(bool* data, bool* output, int iter, int nx, int ny); 94 | 95 | #endif /* IMUTILS_H_ */ 96 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 3 | 4 | import glob 5 | import os 6 | import sys 7 | 8 | import ah_bootstrap 9 | from setuptools import setup 10 | 11 | #A dirty hack to get around some early import/configurations ambiguities 12 | if sys.version_info[0] >= 3: 13 | import builtins 14 | else: 15 | import __builtin__ as builtins 16 | builtins._ASTROPY_SETUP_ = True 17 | 18 | from astropy_helpers.setup_helpers import ( 19 | register_commands, adjust_compiler, get_debug_option, get_package_info) 20 | from astropy_helpers.git_helpers import get_git_devstr 21 | from astropy_helpers.version_helpers import generate_version_py 22 | from astropy_helpers.distutils_helpers import is_distutils_display_option 23 | 24 | # Get some values from the setup.cfg 25 | from distutils import config 26 | conf = config.ConfigParser() 27 | conf.read(['setup.cfg']) 28 | metadata = dict(conf.items('metadata')) 29 | 30 | PACKAGENAME = metadata.get('package_name', 'packagename') 31 | DESCRIPTION = metadata.get('description', 'Astropy affiliated package') 32 | AUTHOR = metadata.get('author', '') 33 | AUTHOR_EMAIL = metadata.get('author_email', '') 34 | LICENSE = metadata.get('license', 'unknown') 35 | URL = metadata.get('url', 'http://astropy.org') 36 | 37 | # Get the long description from the package's docstring 38 | __import__(PACKAGENAME) 39 | package = sys.modules[PACKAGENAME] 40 | LONG_DESCRIPTION = package.__doc__ 41 | 42 | # Store the package name in a built-in variable so it's easy 43 | # to get from other parts of the setup infrastructure 44 | builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME 45 | 46 | # VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386) 47 | VERSION = '1.1.dev' 48 | 49 | # Indicates if this version is a release version 50 | RELEASE = 'dev' not in VERSION 51 | 52 | if not RELEASE: 53 | VERSION += get_git_devstr(False) 54 | 55 | # Populate the dict of setup command overrides; this should be done before 56 | # invoking any other functionality from distutils since it can potentially 57 | # modify distutils' behavior. 58 | cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE) 59 | 60 | # Adjust the compiler in case the default on this platform is to use a 61 | # broken one. 62 | adjust_compiler(PACKAGENAME) 63 | 64 | # Freeze build information in version.py 65 | generate_version_py(PACKAGENAME, VERSION, RELEASE, 66 | get_debug_option(PACKAGENAME)) 67 | 68 | # Treat everything in scripts except README.rst as a script to be installed 69 | scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) 70 | if os.path.basename(fname) != 'README.rst'] 71 | 72 | 73 | # Get configuration information from all of the various subpackages. 74 | # See the docstring for setup_helpers.update_package_files for more 75 | # details. 76 | package_info = get_package_info() 77 | 78 | # Add the project-global data 79 | package_info['package_data'].setdefault(PACKAGENAME, []) 80 | package_info['package_data'][PACKAGENAME].append('data/*') 81 | 82 | # Define entry points for command-line scripts 83 | entry_points = {} 84 | entry_points['console_scripts'] = [ 85 | 'astropy-package-template-example = packagename.example_mod:main', 86 | ] 87 | 88 | # Include all .c files, recursively, including those generated by 89 | # Cython, since we can not do this in MANIFEST.in with a "dynamic" 90 | # directory name. 91 | c_files = [] 92 | for root, dirs, files in os.walk(PACKAGENAME): 93 | for filename in files: 94 | if filename.endswith('.c'): 95 | c_files.append( 96 | os.path.join( 97 | os.path.relpath(root, PACKAGENAME), filename)) 98 | package_info['package_data'][PACKAGENAME].extend(c_files) 99 | 100 | # Avoid installing setup_requires dependencies if the user just 101 | # queries for information 102 | if is_distutils_display_option(): 103 | setup_requires = [] 104 | else: 105 | setup_requires=['numpy','cython'], 106 | # Note that requires and provides should not be included in the call to 107 | # ``setup``, since these are now deprecated. See this link for more details: 108 | # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM 109 | 110 | setup(name=PACKAGENAME, 111 | version=VERSION, 112 | description=DESCRIPTION, 113 | scripts=scripts, 114 | setup_requires=setup_requires, 115 | install_requires=['astropy'], 116 | test_requires=['astropy', 'scipy'], 117 | author=AUTHOR, 118 | author_email=AUTHOR_EMAIL, 119 | license=LICENSE, 120 | url=URL, 121 | long_description=LONG_DESCRIPTION, 122 | cmdclass=cmdclassd, 123 | zip_safe=False, 124 | use_2to3=False, 125 | entry_points=entry_points, 126 | **package_info 127 | ) 128 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | # Setting sudo to false opts in to Travis-CI container-based builds. 4 | sudo: false 5 | 6 | # The apt packages below are needed for sphinx builds, which can no longer 7 | # be installed with sudo apt-get. 8 | addons: 9 | apt: 10 | packages: 11 | - graphviz 12 | - texlive-latex-extra 13 | - dvipng 14 | 15 | python: 16 | - 2.6 17 | - 2.7 18 | - 3.3 19 | - 3.4 20 | # This is just for "egg_info". All other builds are explicitly given in the matrix 21 | env: 22 | global: 23 | # The following versions are the 'default' for tests, unless 24 | # overidden underneath. They are defined here in order to save having 25 | # to repeat them for all configurations. 26 | - NUMPY_VERSION=1.9 27 | - ASTROPY_VERSION=stable 28 | - CONDA_INSTALL='conda install -c astropy-ci-extras --yes' 29 | - PIP_INSTALL='pip install' 30 | matrix: 31 | - SETUP_CMD='egg_info' 32 | 33 | matrix: 34 | include: 35 | 36 | # Do a coverage test in Python 2. 37 | - python: 2.7 38 | env: SETUP_CMD='test --coverage' 39 | 40 | # Check for sphinx doc build warnings - we do this first because it 41 | # may run for a long time 42 | - python: 2.7 43 | env: SETUP_CMD='build_sphinx -w' 44 | 45 | # Try Astropy development version 46 | - python: 2.7 47 | env: ASTROPY_VERSION=development SETUP_CMD='test' 48 | - python: 3.3 49 | env: ASTROPY_VERSION=development SETUP_CMD='test' 50 | 51 | # Try all python versions with the latest numpy 52 | - python: 2.6 53 | env: SETUP_CMD='test' 54 | - python: 2.7 55 | env: SETUP_CMD='test' 56 | - python: 3.3 57 | env: SETUP_CMD='test' 58 | - python: 3.4 59 | env: SETUP_CMD='test' 60 | 61 | # Try older numpy versions 62 | - python: 2.7 63 | env: NUMPY_VERSION=1.8 SETUP_CMD='test' 64 | - python: 2.7 65 | env: NUMPY_VERSION=1.7 SETUP_CMD='test' 66 | - python: 2.7 67 | env: NUMPY_VERSION=1.6 SETUP_CMD='test' 68 | 69 | before_install: 70 | 71 | # Use utf8 encoding. Should be default, but this is insurance against 72 | # future changes 73 | - export PYTHONIOENCODING=UTF8 74 | 75 | # http://conda.pydata.org/docs/travis.html#the-travis-yml-file 76 | - wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; 77 | - bash miniconda.sh -b -p $HOME/miniconda 78 | - export PATH="$HOME/miniconda/bin:$PATH" 79 | - hash -r 80 | - conda config --set always_yes yes --set changeps1 no 81 | - conda update -q conda 82 | - conda info -a 83 | 84 | install: 85 | 86 | # CONDA 87 | - conda create --yes -n test -c astropy-ci-extras python=$TRAVIS_PYTHON_VERSION 88 | - source activate test 89 | 90 | # CORE DEPENDENCIES 91 | - if [[ $SETUP_CMD != egg_info ]]; then $CONDA_INSTALL numpy=$NUMPY_VERSION pytest pip Cython jinja2; fi 92 | - if [[ $SETUP_CMD != egg_info ]]; then $PIP_INSTALL pytest-xdist; fi 93 | 94 | # ASTROPY 95 | - if [[ $SETUP_CMD != egg_info ]] && [[ $ASTROPY_VERSION == development ]]; then $PIP_INSTALL git+http://github.com/astropy/astropy.git#egg=astropy; fi 96 | - if [[ $SETUP_CMD != egg_info ]] && [[ $ASTROPY_VERSION == stable ]]; then $CONDA_INSTALL numpy=$NUMPY_VERSION astropy; fi 97 | 98 | # OPTIONAL DEPENDENCIES 99 | # Here you can add any dependencies your package may have. You can use 100 | # conda for packages available through conda, or pip for any other 101 | # packages. You should leave the `numpy=$NUMPY_VERSION` in the `conda` 102 | # install since this ensures Numpy does not get automatically upgraded. 103 | - if [[ $SETUP_CMD != egg_info ]]; then $CONDA_INSTALL numpy=$NUMPY_VERSION scipy; fi 104 | - if [[ $SETUP_CMD != egg_info ]]; then $PIP_INSTALL scipy; fi 105 | 106 | # DOCUMENTATION DEPENDENCIES 107 | # build_sphinx needs sphinx and matplotlib (for plot_directive). Note that 108 | # this matplotlib will *not* work with py 3.x, but our sphinx build is 109 | # currently 2.7, so that's fine 110 | - if [[ $SETUP_CMD == build_sphinx* ]]; then $CONDA_INSTALL numpy=$NUMPY_VERSION Sphinx matplotlib; fi 111 | 112 | # COVERAGE DEPENDENCIES 113 | - if [[ $SETUP_CMD == 'test --coverage' ]]; then $PIP_INSTALL coverage==3.7.1 coveralls; fi 114 | 115 | script: 116 | - python setup.py $SETUP_CMD 117 | 118 | after_success: 119 | # If coveralls.io is set up for this package, uncomment the line 120 | # below and replace "packagename" with the name of your package. 121 | # The coveragerc file may be customized as needed for your package. 122 | - if [[ $SETUP_CMD == 'test --coverage' ]]; then coveralls --rcfile='astroscrappy/tests/coveragerc'; fi 123 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | #This is needed with git because git doesn't create a dir if it's empty 18 | $(shell [ -d "_static" ] || mkdir -p _static) 19 | 20 | help: 21 | @echo "Please use \`make ' where is one of" 22 | @echo " html to make standalone HTML files" 23 | @echo " dirhtml to make HTML files named index.html in directories" 24 | @echo " singlehtml to make a single large HTML file" 25 | @echo " pickle to make pickle files" 26 | @echo " json to make JSON files" 27 | @echo " htmlhelp to make HTML files and a HTML help project" 28 | @echo " qthelp to make HTML files and a qthelp project" 29 | @echo " devhelp to make HTML files and a Devhelp project" 30 | @echo " epub to make an epub" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " text to make text files" 34 | @echo " man to make manual pages" 35 | @echo " changes to make an overview of all changed/added/deprecated items" 36 | @echo " linkcheck to check all external links for integrity" 37 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 38 | 39 | clean: 40 | -rm -rf $(BUILDDIR) 41 | -rm -rf api 42 | 43 | html: 44 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 45 | @echo 46 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 47 | 48 | dirhtml: 49 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 50 | @echo 51 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 52 | 53 | singlehtml: 54 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 55 | @echo 56 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 57 | 58 | pickle: 59 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 60 | @echo 61 | @echo "Build finished; now you can process the pickle files." 62 | 63 | json: 64 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 65 | @echo 66 | @echo "Build finished; now you can process the JSON files." 67 | 68 | htmlhelp: 69 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 70 | @echo 71 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 72 | ".hhp project file in $(BUILDDIR)/htmlhelp." 73 | 74 | qthelp: 75 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 76 | @echo 77 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 78 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 79 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" 80 | @echo "To view the help file:" 81 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" 82 | 83 | devhelp: 84 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 85 | @echo 86 | @echo "Build finished." 87 | @echo "To view the help file:" 88 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" 89 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" 90 | @echo "# devhelp" 91 | 92 | epub: 93 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 94 | @echo 95 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 96 | 97 | latex: 98 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 99 | @echo 100 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 101 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 102 | "(use \`make latexpdf' here to do that automatically)." 103 | 104 | latexpdf: 105 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 106 | @echo "Running LaTeX files through pdflatex..." 107 | make -C $(BUILDDIR)/latex all-pdf 108 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 109 | 110 | text: 111 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 112 | @echo 113 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 114 | 115 | man: 116 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 117 | @echo 118 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 119 | 120 | changes: 121 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 122 | @echo 123 | @echo "The overview file is in $(BUILDDIR)/changes." 124 | 125 | linkcheck: 126 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 127 | @echo 128 | @echo "Link check complete; look for any errors in the above output " \ 129 | "or in $(BUILDDIR)/linkcheck/output.txt." 130 | 131 | doctest: 132 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 133 | @echo "Testing of doctests in the sources finished, look at the " \ 134 | "results in $(BUILDDIR)/doctest/output.txt." 135 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "help" ( 17 | :help 18 | echo.Please use `make ^` where ^ is one of 19 | echo. html to make standalone HTML files 20 | echo. dirhtml to make HTML files named index.html in directories 21 | echo. singlehtml to make a single large HTML file 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. devhelp to make HTML files and a Devhelp project 27 | echo. epub to make an epub 28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 29 | echo. text to make text files 30 | echo. man to make manual pages 31 | echo. changes to make an overview over all changed/added/deprecated items 32 | echo. linkcheck to check all external links for integrity 33 | echo. doctest to run all doctests embedded in the documentation if enabled 34 | goto end 35 | ) 36 | 37 | if "%1" == "clean" ( 38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 39 | del /q /s %BUILDDIR%\* 40 | goto end 41 | ) 42 | 43 | if "%1" == "html" ( 44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 45 | if errorlevel 1 exit /b 1 46 | echo. 47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 48 | goto end 49 | ) 50 | 51 | if "%1" == "dirhtml" ( 52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 53 | if errorlevel 1 exit /b 1 54 | echo. 55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 56 | goto end 57 | ) 58 | 59 | if "%1" == "singlehtml" ( 60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 61 | if errorlevel 1 exit /b 1 62 | echo. 63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 64 | goto end 65 | ) 66 | 67 | if "%1" == "pickle" ( 68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 69 | if errorlevel 1 exit /b 1 70 | echo. 71 | echo.Build finished; now you can process the pickle files. 72 | goto end 73 | ) 74 | 75 | if "%1" == "json" ( 76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished; now you can process the JSON files. 80 | goto end 81 | ) 82 | 83 | if "%1" == "htmlhelp" ( 84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished; now you can run HTML Help Workshop with the ^ 88 | .hhp project file in %BUILDDIR%/htmlhelp. 89 | goto end 90 | ) 91 | 92 | if "%1" == "qthelp" ( 93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 94 | if errorlevel 1 exit /b 1 95 | echo. 96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 97 | .qhcp project file in %BUILDDIR%/qthelp, like this: 98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp 99 | echo.To view the help file: 100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc 101 | goto end 102 | ) 103 | 104 | if "%1" == "devhelp" ( 105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 106 | if errorlevel 1 exit /b 1 107 | echo. 108 | echo.Build finished. 109 | goto end 110 | ) 111 | 112 | if "%1" == "epub" ( 113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 117 | goto end 118 | ) 119 | 120 | if "%1" == "latex" ( 121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 122 | if errorlevel 1 exit /b 1 123 | echo. 124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 125 | goto end 126 | ) 127 | 128 | if "%1" == "text" ( 129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 130 | if errorlevel 1 exit /b 1 131 | echo. 132 | echo.Build finished. The text files are in %BUILDDIR%/text. 133 | goto end 134 | ) 135 | 136 | if "%1" == "man" ( 137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 141 | goto end 142 | ) 143 | 144 | if "%1" == "changes" ( 145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.The overview file is in %BUILDDIR%/changes. 149 | goto end 150 | ) 151 | 152 | if "%1" == "linkcheck" ( 153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Link check complete; look for any errors in the above output ^ 157 | or in %BUILDDIR%/linkcheck/output.txt. 158 | goto end 159 | ) 160 | 161 | if "%1" == "doctest" ( 162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 163 | if errorlevel 1 exit /b 1 164 | echo. 165 | echo.Testing of doctests in the sources finished, look at the ^ 166 | results in %BUILDDIR%/doctest/output.txt. 167 | goto end 168 | ) 169 | 170 | :end 171 | -------------------------------------------------------------------------------- /astroscrappy/utils/medutils.h: -------------------------------------------------------------------------------- 1 | /* 2 | * medutils.h 3 | * 4 | * Author: Curtis McCully 5 | * October 2014 6 | * 7 | * Licensed under a 3-clause BSD style license - see LICENSE.rst 8 | */ 9 | 10 | #ifndef MEDUTILS_H_ 11 | #define MEDUTILS_H_ 12 | 13 | /* Define a bool type because there isn't one built in ANSI C */ 14 | typedef uint8_t bool; 15 | #define true 1 16 | #define false 0 17 | 18 | /*Find the median value of an array "a" of length n. */ 19 | float 20 | PyMedian(float* a, int n); 21 | 22 | /*Optimized method to find the median value of an array "a" of length 3. */ 23 | float 24 | PyOptMed3(float* a); 25 | 26 | /*Optimized method to find the median value of an array "a" of length 5. */ 27 | float 28 | PyOptMed5(float* a); 29 | 30 | /*Optimized method to find the median value of an array "a" of length 7. */ 31 | float 32 | PyOptMed7(float* a); 33 | 34 | /*Optimized method to find the median value of an array "a" of length 9. */ 35 | float 36 | PyOptMed9(float* a); 37 | 38 | /*Optimized method to find the median value of an array "a" of length 25. */ 39 | float 40 | PyOptMed25(float* a); 41 | 42 | /* Calculate the 3x3 median filter of an array data that has dimensions 43 | * nx x ny. The results are saved in the output array. The output array should 44 | * already be allocated as we work on it in place. The median filter is not 45 | * calculated for a 1 pixel border around the image. These pixel values are 46 | * copied from the input data. The data should be striped along the x 47 | * direction, such that pixel i,j in the 2D image should have memory location 48 | * data[i + nx *j]. 49 | */ 50 | void 51 | PyMedFilt3(float* data, float* output, int nx, int ny); 52 | 53 | /* Calculate the 5x5 median filter of an array data that has dimensions 54 | * nx x ny. The results are saved in the output array. The output array should 55 | * already be allocated as we work on it in place. The median filter is not 56 | * calculated for a 2 pixel border around the image. These pixel values are 57 | * copied from the input data. The data should be striped along the 58 | * x direction, such that pixel i,j in the 2D image should have memory 59 | * location data[i + nx *j]. 60 | */ 61 | void 62 | PyMedFilt5(float* data, float* output, int nx, int ny); 63 | 64 | /* Calculate the 7x7 median filter of an array data that has dimensions 65 | * nx x ny. The results are saved in the output array. The output array should 66 | * already be allocated as we work on it in place. The median filter is not 67 | * calculated for a 3 pixel border around the image. These pixel values are 68 | * copied from the input data. The data should be striped along the 69 | * x direction, such that pixel i,j in the 2D image should have memory 70 | * location data[i + nx *j]. 71 | */ 72 | void 73 | PyMedFilt7(float* data, float* output, int nx, int ny); 74 | 75 | /* Calculate the 3x3 separable median filter of an array data that has 76 | * dimensions nx x ny. The results are saved in the output array. The output 77 | * array should already be allocated as we work on it in place. The median 78 | * filter is not calculated for a 1 pixel border around the image. These pixel 79 | * values are copied from the input data. The data should be striped along 80 | * the x direction, such that pixel i,j in the 2D image should have memory 81 | * location data[i + nx *j]. Note that the rows are median filtered first, 82 | * followed by the columns. 83 | */ 84 | void 85 | PySepMedFilt3(float* data, float* output, int nx, int ny); 86 | 87 | /* Calculate the 5x5 separable median filter of an array data that has 88 | * dimensions nx x ny. The results are saved in the output array. The output 89 | * array should already be allocated as we work on it in place.The median 90 | * filter is not calculated for a 2 pixel border around the image. These pixel 91 | * values are copied from the input data. The data should be striped along the 92 | * x direction, such that pixel i,j in the 2D image should have memory location 93 | * data[i + nx *j]. Note that the rows are median filtered first, followed by 94 | * the columns. 95 | */ 96 | void 97 | PySepMedFilt5(float* data, float* output, int nx, int ny); 98 | 99 | /* Calculate the 7x7 separable median filter of an array data that has 100 | * dimensions nx x ny. The results are saved in the output array. The output 101 | * array should already be allocated as we work on it in place. The median 102 | * filter is not calculated for a 3 pixel border around the image. These pixel 103 | * values are copied from the input data. The data should be striped along the 104 | * x direction, such that pixel i,j in the 2D image should have memory location 105 | * data[i + nx *j]. Note that the rows are median filtered first, followed by 106 | * the columns. 107 | */ 108 | void 109 | PySepMedFilt7(float* data, float* output, int nx, int ny); 110 | 111 | /* Calculate the 9x9 separable median filter of an array data that has 112 | * dimensions nx x ny. The results are saved in the output array. The output 113 | * array should already be allocated as we work on it in place. The median 114 | * filter is not calculated for a 4 pixel border around the image. These pixel 115 | * values are copied from the input data. The data should be striped along the 116 | * x direction, such that pixel i,j in the 2D image should have memory location 117 | * data[i + nx *j]. Note that the rows are median filtered first, followed by 118 | * the columns. 119 | */ 120 | void 121 | PySepMedFilt9(float* data, float* output, int nx, int ny); 122 | 123 | #endif /* MEDUTILS_H_ */ 124 | -------------------------------------------------------------------------------- /astroscrappy/_astropy_init.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | __all__ = ['__version__', '__githash__', 'test'] 4 | 5 | # this indicates whether or not we are in the package's setup.py 6 | try: 7 | _ASTROPY_SETUP_ 8 | except NameError: 9 | from sys import version_info 10 | if version_info[0] >= 3: 11 | import builtins 12 | else: 13 | import __builtin__ as builtins 14 | builtins._ASTROPY_SETUP_ = False 15 | 16 | try: 17 | from .version import version as __version__ 18 | except ImportError: 19 | __version__ = '' 20 | try: 21 | from .version import githash as __githash__ 22 | except ImportError: 23 | __githash__ = '' 24 | 25 | # set up the test command 26 | def _get_test_runner(): 27 | import os 28 | from astropy.tests.helper import TestRunner 29 | return TestRunner(os.path.dirname(__file__)) 30 | 31 | def test(package=None, test_path=None, args=None, plugins=None, 32 | verbose=False, pastebin=None, remote_data=False, pep8=False, 33 | pdb=False, coverage=False, open_files=False, **kwargs): 34 | """ 35 | Run the tests using `py.test `__. A proper set 36 | of arguments is constructed and passed to `pytest.main`_. 37 | 38 | .. _py.test: http://pytest.org/latest/ 39 | .. _pytest.main: http://pytest.org/latest/builtin.html#pytest.main 40 | 41 | Parameters 42 | ---------- 43 | package : str, optional 44 | The name of a specific package to test, e.g. 'io.fits' or 'utils'. 45 | If nothing is specified all default tests are run. 46 | 47 | test_path : str, optional 48 | Specify location to test by path. May be a single file or 49 | directory. Must be specified absolutely or relative to the 50 | calling directory. 51 | 52 | args : str, optional 53 | Additional arguments to be passed to pytest.main_ in the ``args`` 54 | keyword argument. 55 | 56 | plugins : list, optional 57 | Plugins to be passed to pytest.main_ in the ``plugins`` keyword 58 | argument. 59 | 60 | verbose : bool, optional 61 | Convenience option to turn on verbose output from py.test_. Passing 62 | True is the same as specifying ``'-v'`` in ``args``. 63 | 64 | pastebin : {'failed','all',None}, optional 65 | Convenience option for turning on py.test_ pastebin output. Set to 66 | ``'failed'`` to upload info for failed tests, or ``'all'`` to upload 67 | info for all tests. 68 | 69 | remote_data : bool, optional 70 | Controls whether to run tests marked with @remote_data. These 71 | tests use online data and are not run by default. Set to True to 72 | run these tests. 73 | 74 | pep8 : bool, optional 75 | Turn on PEP8 checking via the `pytest-pep8 plugin 76 | `_ and disable normal 77 | tests. Same as specifying ``'--pep8 -k pep8'`` in ``args``. 78 | 79 | pdb : bool, optional 80 | Turn on PDB post-mortem analysis for failing tests. Same as 81 | specifying ``'--pdb'`` in ``args``. 82 | 83 | coverage : bool, optional 84 | Generate a test coverage report. The result will be placed in 85 | the directory htmlcov. 86 | 87 | open_files : bool, optional 88 | Fail when any tests leave files open. Off by default, because 89 | this adds extra run time to the test suite. Requires the 90 | `psutil `_ package. 91 | 92 | parallel : int, optional 93 | When provided, run the tests in parallel on the specified 94 | number of CPUs. If parallel is negative, it will use the all 95 | the cores on the machine. Requires the 96 | `pytest-xdist `_ plugin 97 | installed. Only available when using Astropy 0.3 or later. 98 | 99 | kwargs 100 | Any additional keywords passed into this function will be passed 101 | on to the astropy test runner. This allows use of test-related 102 | functionality implemented in later versions of astropy without 103 | explicitly updating the package template. 104 | 105 | """ 106 | test_runner = _get_test_runner() 107 | return test_runner.run_tests( 108 | package=package, test_path=test_path, args=args, 109 | plugins=plugins, verbose=verbose, pastebin=pastebin, 110 | remote_data=remote_data, pep8=pep8, pdb=pdb, 111 | coverage=coverage, open_files=open_files, **kwargs) 112 | 113 | if not _ASTROPY_SETUP_: 114 | import os 115 | from warnings import warn 116 | from astropy import config 117 | 118 | # add these here so we only need to cleanup the namespace at the end 119 | config_dir = None 120 | 121 | if not os.environ.get('ASTROPY_SKIP_CONFIG_UPDATE', False): 122 | config_dir = os.path.dirname(__file__) 123 | config_template = os.path.join(config_dir, __package__ + ".cfg") 124 | if os.path.isfile(config_template): 125 | try: 126 | config.configuration.update_default_config( 127 | __package__, config_dir, version=__version__) 128 | except TypeError as orig_error: 129 | try: 130 | config.configuration.update_default_config( 131 | __package__, config_dir) 132 | except config.configuration.ConfigurationDefaultMissingError as e: 133 | wmsg = (e.args[0] + " Cannot install default profile. If you are " 134 | "importing from source, this is expected.") 135 | warn(config.configuration.ConfigurationDefaultMissingWarning(wmsg)) 136 | del e 137 | except: 138 | raise orig_error 139 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 3 | # 4 | # Astropy documentation build configuration file. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this file. 9 | # 10 | # All configuration values have a default. Some values are defined in 11 | # the global Astropy configuration which is loaded here before anything else. 12 | # See astropy.sphinx.conf for which values are set there. 13 | 14 | # If extensions (or modules to document with autodoc) are in another directory, 15 | # add these directories to sys.path here. If the directory is relative to the 16 | # documentation root, use os.path.abspath to make it absolute, like shown here. 17 | # sys.path.insert(0, os.path.abspath('..')) 18 | # IMPORTANT: the above commented section was generated by sphinx-quickstart, but 19 | # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left 20 | # commented out with this explanation to make it clear why this should not be 21 | # done. If the sys.path entry above is added, when the astropy.sphinx.conf 22 | # import occurs, it will import the *source* version of astropy instead of the 23 | # version installed (if invoked as "make html" or directly with sphinx), or the 24 | # version in the build directory (if "python setup.py build_sphinx" is used). 25 | # Thus, any C-extensions that are needed to build the documentation will *not* 26 | # be accessible, and the documentation will not build correctly. 27 | 28 | import datetime 29 | import os 30 | import sys 31 | 32 | try: 33 | import astropy_helpers 34 | except ImportError: 35 | # Building from inside the docs/ directory? 36 | if os.path.basename(os.getcwd()) == 'docs': 37 | a_h_path = os.path.abspath(os.path.join('..', 'astropy_helpers')) 38 | if os.path.isdir(a_h_path): 39 | sys.path.insert(1, a_h_path) 40 | 41 | # Load all of the global Astropy configuration 42 | from astropy_helpers.sphinx.conf import * 43 | 44 | # Get configuration information from setup.cfg 45 | from distutils import config 46 | conf = config.ConfigParser() 47 | conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) 48 | setup_cfg = dict(conf.items('metadata')) 49 | 50 | # -- General configuration ---------------------------------------------------- 51 | 52 | # If your documentation needs a minimal Sphinx version, state it here. 53 | #needs_sphinx = '1.2' 54 | 55 | # To perform a Sphinx version check that needs to be more specific than 56 | # major.minor, call `check_sphinx_version("x.y.z")` here. 57 | # check_sphinx_version("1.2.1") 58 | 59 | # List of patterns, relative to source directory, that match files and 60 | # directories to ignore when looking for source files. 61 | exclude_patterns.append('_templates') 62 | 63 | # This is added to the end of RST files - a good place to put substitutions to 64 | # be used globally. 65 | rst_epilog += """ 66 | """ 67 | 68 | # -- Project information ------------------------------------------------------ 69 | 70 | # This does not *have* to match the package name, but typically does 71 | project = setup_cfg['package_name'] 72 | author = setup_cfg['author'] 73 | copyright = '{0}, {1}'.format( 74 | datetime.datetime.now().year, setup_cfg['author']) 75 | 76 | # The version info for the project you're documenting, acts as replacement for 77 | # |version| and |release|, also used in various other places throughout the 78 | # built documents. 79 | 80 | __import__(setup_cfg['package_name']) 81 | package = sys.modules[setup_cfg['package_name']] 82 | 83 | # The short X.Y version. 84 | version = package.__version__.split('-', 1)[0] 85 | # The full version, including alpha/beta/rc tags. 86 | release = package.__version__ 87 | 88 | 89 | # -- Options for HTML output --------------------------------------------------- 90 | 91 | # A NOTE ON HTML THEMES 92 | # The global astropy configuration uses a custom theme, 'bootstrap-astropy', 93 | # which is installed along with astropy. A different theme can be used or 94 | # the options for this theme can be modified by overriding some of the 95 | # variables set in the global configuration. The variables set in the 96 | # global configuration are listed below, commented out. 97 | 98 | # Add any paths that contain custom themes here, relative to this directory. 99 | # To use a different custom theme, add the directory containing the theme. 100 | #html_theme_path = [] 101 | 102 | # The theme to use for HTML and HTML Help pages. See the documentation for 103 | # a list of builtin themes. To override the custom theme, set this to the 104 | # name of a builtin theme or the name of a custom theme in html_theme_path. 105 | #html_theme = None 106 | 107 | # Custom sidebar templates, maps document names to template names. 108 | #html_sidebars = {} 109 | 110 | # The name of an image file (within the static path) to use as favicon of the 111 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 112 | # pixels large. 113 | #html_favicon = '' 114 | 115 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 116 | # using the given strftime format. 117 | #html_last_updated_fmt = '' 118 | 119 | # The name for this set of Sphinx documents. If None, it defaults to 120 | # " v documentation". 121 | html_title = '{0} v{1}'.format(project, release) 122 | 123 | # Output file base name for HTML help builder. 124 | htmlhelp_basename = project + 'doc' 125 | 126 | 127 | # -- Options for LaTeX output -------------------------------------------------- 128 | 129 | # Grouping the document tree into LaTeX files. List of tuples 130 | # (source start file, target name, title, author, documentclass [howto/manual]). 131 | latex_documents = [('index', project + '.tex', project + u' Documentation', 132 | author, 'manual')] 133 | 134 | 135 | # -- Options for manual page output -------------------------------------------- 136 | 137 | # One entry per manual page. List of tuples 138 | # (source start file, name, description, authors, manual section). 139 | man_pages = [('index', project.lower(), project + u' Documentation', 140 | [author], 1)] 141 | 142 | 143 | ## -- Options for the edit_on_github extension ---------------------------------------- 144 | 145 | if eval(setup_cfg.get('edit_on_github')): 146 | extensions += ['astropy_helpers.sphinx.ext.edit_on_github'] 147 | 148 | versionmod = __import__(setup_cfg['package_name'] + '.version') 149 | edit_on_github_project = setup_cfg['github_project'] 150 | if versionmod.version.release: 151 | edit_on_github_branch = "v" + versionmod.version.version 152 | else: 153 | edit_on_github_branch = "master" 154 | 155 | edit_on_github_source_root = "" 156 | edit_on_github_doc_root = "docs" 157 | -------------------------------------------------------------------------------- /astroscrappy/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | from __future__ import (absolute_import, division, print_function, 3 | unicode_literals) 4 | import numpy as np 5 | from numpy.testing import assert_allclose 6 | 7 | from ..utils import (median, optmed3, optmed5, optmed7, optmed9, optmed25, 8 | medfilt3, medfilt5, medfilt7, sepmedfilt3, sepmedfilt5, 9 | sepmedfilt7, sepmedfilt9, dilate3, dilate5, subsample, 10 | rebin, laplaceconvolve, convolve) 11 | 12 | from scipy.ndimage.morphology import binary_dilation 13 | from scipy import ndimage 14 | 15 | 16 | def test_median(): 17 | a = np.ascontiguousarray(np.random.random(1001)).astype('f4') 18 | assert np.float32(np.median(a)) == np.float32(median(a, 1001)) 19 | 20 | 21 | def test_optmed3(): 22 | a = np.ascontiguousarray(np.random.random(3)).astype('f4') 23 | assert np.float32(np.median(a)) == np.float32(optmed3(a)) 24 | 25 | 26 | def test_optmed5(): 27 | a = np.ascontiguousarray(np.random.random(5)).astype('f4') 28 | assert np.float32(np.median(a)) == np.float32(optmed5(a)) 29 | 30 | 31 | def test_optmed7(): 32 | a = np.ascontiguousarray(np.random.random(7)).astype('f4') 33 | assert np.float32(np.median(a)) == np.float32(optmed7(a)) 34 | 35 | 36 | def test_optmed9(): 37 | a = np.ascontiguousarray(np.random.random(9)).astype('f4') 38 | assert np.float32(np.median(a)) == np.float32(optmed9(a)) 39 | 40 | 41 | def test_optmed25(): 42 | a = np.ascontiguousarray(np.random.random(25)).astype('f4') 43 | assert np.float32(np.median(a)) == np.float32(optmed25(a)) 44 | 45 | 46 | def test_medfilt3(): 47 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 48 | npmed3 = ndimage.filters.median_filter(a, size=(3, 3), mode='nearest') 49 | npmed3[:1, :] = a[:1, :] 50 | npmed3[-1:, :] = a[-1:, :] 51 | npmed3[:, :1] = a[:, :1] 52 | npmed3[:, -1:] = a[:, -1:] 53 | 54 | med3 = medfilt3(a) 55 | assert np.all(med3 == npmed3) 56 | 57 | 58 | def test_medfilt5(): 59 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 60 | npmed5 = ndimage.filters.median_filter(a, size=(5, 5), mode='nearest') 61 | npmed5[:2, :] = a[:2, :] 62 | npmed5[-2:, :] = a[-2:, :] 63 | npmed5[:, :2] = a[:, :2] 64 | npmed5[:, -2:] = a[:, -2:] 65 | 66 | med5 = medfilt5(a) 67 | assert np.all(med5 == npmed5) 68 | 69 | 70 | def test_medfilt7(): 71 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 72 | npmed7 = ndimage.filters.median_filter(a, size=(7, 7), mode='nearest') 73 | npmed7[:3, :] = a[:3, :] 74 | npmed7[-3:, :] = a[-3:, :] 75 | npmed7[:, :3] = a[:, :3] 76 | npmed7[:, -3:] = a[:, -3:] 77 | 78 | med7 = medfilt7(a) 79 | assert np.all(med7 == npmed7) 80 | 81 | 82 | def test_sepmedfilt3(): 83 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 84 | npmed3 = ndimage.filters.median_filter(a, size=(1, 3), mode='nearest') 85 | npmed3[:, :1] = a[:, :1] 86 | npmed3[:, -1:] = a[:, -1:] 87 | npmed3 = ndimage.filters.median_filter(npmed3, size=(3, 1), mode='nearest') 88 | npmed3[:1, :] = a[:1, :] 89 | npmed3[-1:, :] = a[-1:, :] 90 | npmed3[:, :1] = a[:, :1] 91 | npmed3[:, -1:] = a[:, -1:] 92 | 93 | med3 = sepmedfilt3(a) 94 | assert np.all(med3 == npmed3) 95 | 96 | 97 | def test_sepmedfilt5(): 98 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 99 | npmed5 = ndimage.filters.median_filter(a, size=(1, 5), mode='nearest') 100 | npmed5[:, :2] = a[:, :2] 101 | npmed5[:, -2:] = a[:, -2:] 102 | npmed5 = ndimage.filters.median_filter(npmed5, size=(5, 1), mode='nearest') 103 | npmed5[:2, :] = a[:2, :] 104 | npmed5[-2:, :] = a[-2:, :] 105 | npmed5[:, :2] = a[:, :2] 106 | npmed5[:, -2:] = a[:, -2:] 107 | 108 | med5 = sepmedfilt5(a) 109 | assert np.all(med5 == npmed5) 110 | 111 | 112 | def test_sepmedfilt7(): 113 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 114 | npmed7 = ndimage.filters.median_filter(a, size=(1, 7), mode='nearest') 115 | npmed7[:, :3] = a[:, :3] 116 | npmed7[:, -3:] = a[:, -3:] 117 | npmed7 = ndimage.filters.median_filter(npmed7, size=(7, 1), mode='nearest') 118 | npmed7[:3, :] = a[:3, :] 119 | npmed7[-3:, :] = a[-3:, :] 120 | npmed7[:, :3] = a[:, :3] 121 | npmed7[:, -3:] = a[:, -3:] 122 | 123 | med7 = sepmedfilt7(a) 124 | assert np.all(med7 == npmed7) 125 | 126 | 127 | def test_sepmedfilt9(): 128 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 129 | npmed9 = ndimage.filters.median_filter(a, size=(1, 9), mode='nearest') 130 | npmed9[:, :4] = a[:, :4] 131 | npmed9[:, -4:] = a[:, -4:] 132 | npmed9 = ndimage.filters.median_filter(npmed9, size=(9, 1), mode='nearest') 133 | npmed9[:4, :] = a[:4, :] 134 | npmed9[-4:, :] = a[-4:, :] 135 | npmed9[:, :4] = a[:, :4] 136 | npmed9[:, -4:] = a[:, -4:] 137 | 138 | med9 = sepmedfilt9(a) 139 | assert np.all(med9 == npmed9) 140 | 141 | 142 | def test_dilate5(): 143 | # Put 5% of the pixels into a mask 144 | a = np.zeros((1001, 1001), dtype=np.bool) 145 | a[np.random.random((1001, 1001)) < 0.05] = True 146 | kernel = np.ones((5, 5)) 147 | kernel[0, 0] = 0 148 | kernel[0, 4] = 0 149 | kernel[4, 0] = 0 150 | kernel[4, 4] = 0 151 | # Make a zero padded array for the numpy version to operate 152 | paddeda = np.zeros((1005, 1005), dtype=np.bool) 153 | paddeda[2:-2, 2:-2] = a[:, :] 154 | npdilate = binary_dilation(np.ascontiguousarray(paddeda), 155 | structure=kernel, iterations=2) 156 | cdilate = dilate5(a, 2) 157 | 158 | assert np.all(npdilate[2:-2, 2:-2] == cdilate) 159 | 160 | 161 | def test_dilate3(): 162 | # Put 5% of the pixels into a mask 163 | a = np.zeros((1001, 1001), dtype=np.bool) 164 | a[np.random.random((1001, 1001)) < 0.05] = True 165 | kernel = np.ones((3, 3)) 166 | npgrow = binary_dilation(np.ascontiguousarray(a), 167 | structure=kernel, iterations=1) 168 | cgrow = dilate3(a) 169 | npgrow[:, 0] = a[:, 0] 170 | npgrow[:, -1] = a[:, -1] 171 | npgrow[0, :] = a[0, :] 172 | npgrow[-1, :] = a[-1, :] 173 | assert np.all(npgrow == cgrow) 174 | 175 | 176 | def test_subsample(): 177 | a = np.ascontiguousarray(np.random.random((1001, 1001))).astype('f4') 178 | npsubsamp = np.zeros((a.shape[0] * 2, a.shape[1] * 2), dtype=np.float32) 179 | for i in range(a.shape[0]): 180 | for j in range(a.shape[1]): 181 | npsubsamp[2 * i, 2 * j] = a[i, j] 182 | npsubsamp[2 * i + 1, 2 * j] = a[i, j] 183 | npsubsamp[2 * i, 2 * j + 1] = a[i, j] 184 | npsubsamp[2 * i + 1, 2 * j + 1] = a[i, j] 185 | 186 | csubsamp = subsample(a) 187 | assert np.all(npsubsamp == csubsamp) 188 | 189 | 190 | def test_rebin(): 191 | a = np.ascontiguousarray(np.random.random((2002, 2002)), dtype=np.float32) 192 | a = a.astype(' np.PyArray_DATA(dsub) 61 | cdef float * outdsubptr = < float * > np.PyArray_DATA(output) 62 | with nogil: 63 | PySubsample(dsubptr, outdsubptr, nx, ny) 64 | return output 65 | 66 | 67 | def rebin(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] drebin): 68 | """rebin(drebin)\n 69 | Rebin an array 2x2. 70 | 71 | Rebin the array by block averaging 4 pixels back into 1. 72 | 73 | Parameters 74 | ---------- 75 | drebin : float numpy array 76 | Array to be rebinned 2x2. 77 | 78 | Returns 79 | ------- 80 | output : float numpy array 81 | Rebinned array. The size of the output array will be 2 times smaller 82 | than drebin. 83 | 84 | Notes 85 | ----- 86 | This is effectively the opposite of subsample (although subsample does not 87 | do an average). The array needs to be C-contiguous order. Wrapper for 88 | PyRebin in imutils. 89 | """ 90 | cdef int nx = drebin.shape[1] / 2 91 | cdef int ny = drebin.shape[0] / 2 92 | 93 | # Allocate the output array here so that Python tracks the memory and will 94 | # free the memory when we are finished with the output array. 95 | output = np.zeros((ny, nx), dtype=np.float32) 96 | 97 | cdef float * drebinptr = < float * > np.PyArray_DATA(drebin) 98 | cdef float * outdrebinptr = < float * > np.PyArray_DATA(output) 99 | with nogil: 100 | PyRebin(drebinptr, outdrebinptr, nx, ny) 101 | return output 102 | 103 | 104 | def convolve(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] dconv, 105 | np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] kernel): 106 | """convolve(dconv, kernel)\n 107 | Convolve an array with a kernel. 108 | 109 | Parameters 110 | ---------- 111 | dconv : float numpy array 112 | Array to be convolved. 113 | 114 | kernel : float numpy array 115 | Kernel to use in the convolution. 116 | 117 | Returns 118 | ------- 119 | output : float numpy array 120 | Convolved array. 121 | 122 | Notes 123 | ----- 124 | Both the data and kernel arrays need to be C-contiguous order. Wrapper for 125 | PyConvolve in imutils. 126 | """ 127 | cdef int nx = dconv.shape[1] 128 | cdef int ny = dconv.shape[0] 129 | 130 | # Allocate the output array here so that Python tracks the memory and will 131 | # free the memory when we are finished with the output array. 132 | output = np.zeros((ny, nx), dtype=np.float32) 133 | 134 | cdef float * dconvptr = < float * > np.PyArray_DATA(dconv) 135 | cdef float * outdconvptr = < float * > np.PyArray_DATA(output) 136 | 137 | cdef int knx = kernel.shape[1] 138 | cdef int kny = kernel.shape[0] 139 | cdef float * kernptr = < float * > np.PyArray_DATA(kernel) 140 | 141 | with nogil: 142 | PyConvolve(dconvptr, kernptr, outdconvptr, nx, ny, knx, kny) 143 | return output 144 | 145 | 146 | def laplaceconvolve(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] dl): 147 | """laplaceconvolve(dl)\n 148 | Convolve an array with the Laplacian kernel. 149 | 150 | Convolve with the discrete version of the Laplacian operator with kernel:\n 151 | 0 -1 0\n 152 | -1 4 -1\n 153 | 0 -1 0\n 154 | 155 | Parameters 156 | ---------- 157 | dl : float numpy array 158 | Array to be convolved. 159 | 160 | Returns 161 | ------- 162 | output: float numpy array 163 | Convolved array. 164 | 165 | Notes 166 | ----- 167 | The array needs to be C-contiguous order. Wrapper for PyLaplaceConvolve 168 | in imutils. 169 | """ 170 | cdef int nx = dl.shape[1] 171 | cdef int ny = dl.shape[0] 172 | 173 | # Allocate the output array here so that Python tracks the memory and will 174 | # free the memory when we are finished with the output array. 175 | output = np.zeros((ny, nx), dtype=np.float32) 176 | 177 | cdef float * dlapptr = < float * > np.PyArray_DATA(dl) 178 | cdef float * outdlapptr = < float * > np.PyArray_DATA(output) 179 | with nogil: 180 | PyLaplaceConvolve(dlapptr, outdlapptr, nx, ny) 181 | return output 182 | 183 | 184 | def dilate3(np.ndarray[np.uint8_t, ndim=2, mode='c', cast=True] dgrow): 185 | """dilate3(dgrow)\n 186 | Perform a boolean dilation on an array. 187 | 188 | Parameters 189 | ---------- 190 | dgrow : boolean numpy array 191 | Array to dilate. 192 | 193 | Returns 194 | ------- 195 | output : boolean numpy array 196 | Dilated array. 197 | 198 | Notes 199 | ----- 200 | Dilation is the boolean equivalent of a convolution but using logical ors 201 | instead of a sum. 202 | We apply the following kernel:\n 203 | 1 1 1\n 204 | 1 1 1\n 205 | 1 1 1\n 206 | The binary dilation is not computed for a 1 pixel border around the image. 207 | These pixels are copied from the input data. The array needs to be 208 | C-contiguous order. Wrapper for PyDilate3 in imutils. 209 | """ 210 | cdef int nx = dgrow.shape[1] 211 | cdef int ny = dgrow.shape[0] 212 | 213 | # Allocate the output array here so that Python tracks the memory and will 214 | # free the memory when we are finished with the output array. 215 | output = np.zeros((ny, nx), dtype=np.bool) 216 | 217 | cdef uint8_t * dgrowptr = < uint8_t * > np.PyArray_DATA(dgrow) 218 | cdef uint8_t * outdgrowptr = < uint8_t * > np.PyArray_DATA(output) 219 | with nogil: 220 | PyDilate3(dgrowptr, outdgrowptr, nx, ny) 221 | return output 222 | 223 | 224 | def dilate5(np.ndarray[np.uint8_t, ndim=2, mode='c', cast=True] ddilate, 225 | int niter): 226 | """dilate5(data, niter)\n 227 | Do niter iterations of boolean dilation on an array. 228 | 229 | Parameters 230 | ---------- 231 | ddilate : boolean numpy array 232 | Array to dilate. 233 | 234 | niter : int 235 | Number of iterations. 236 | 237 | Returns 238 | ------- 239 | output : boolean numpy array 240 | Dilated array. 241 | 242 | Notes 243 | ----- 244 | Dilation is the boolean equivalent of a convolution but using logical ors 245 | instead of a sum. 246 | We apply the following kernel:\n 247 | 0 1 1 1 0\n 248 | 1 1 1 1 1\n 249 | 1 1 1 1 1\n 250 | 1 1 1 1 1\n 251 | 0 1 1 1 0\n 252 | The edges are padded with zeros so that the dilation operator is defined 253 | for all pixels. The array needs to be C-contiguous order. Wrapper for 254 | PyDilate5 in imutils. 255 | """ 256 | cdef int nx = ddilate.shape[1] 257 | cdef int ny = ddilate.shape[0] 258 | 259 | # Allocate the output array here so that Python tracks the memory and will 260 | # free the memory when we are finished with the output array. 261 | output = np.zeros((ny, nx), dtype=np.bool) 262 | 263 | cdef uint8_t * ddilateptr = < uint8_t * > np.PyArray_DATA(ddilate) 264 | cdef uint8_t * outddilateptr = < uint8_t * > np.PyArray_DATA(output) 265 | with nogil: 266 | PyDilate5(ddilateptr, outddilateptr, niter, nx, ny) 267 | return output 268 | -------------------------------------------------------------------------------- /ez_setup.py: -------------------------------------------------------------------------------- 1 | #!python 2 | """Bootstrap setuptools installation 3 | 4 | If you want to use setuptools in your package's setup.py, just include this 5 | file in the same directory with it, and add this to the top of your setup.py:: 6 | 7 | from ez_setup import use_setuptools 8 | use_setuptools() 9 | 10 | If you want to require a specific version of setuptools, set a download 11 | mirror, or use an alternate download directory, you can do so by supplying 12 | the appropriate options to ``use_setuptools()``. 13 | 14 | This file can also be run as a script to install or upgrade setuptools. 15 | """ 16 | import os 17 | import shutil 18 | import sys 19 | import tempfile 20 | import tarfile 21 | import optparse 22 | import subprocess 23 | import platform 24 | 25 | from distutils import log 26 | 27 | try: 28 | from site import USER_SITE 29 | except ImportError: 30 | USER_SITE = None 31 | 32 | DEFAULT_VERSION = "1.4.2" 33 | DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" 34 | 35 | def _python_cmd(*args): 36 | args = (sys.executable,) + args 37 | return subprocess.call(args) == 0 38 | 39 | def _check_call_py24(cmd, *args, **kwargs): 40 | res = subprocess.call(cmd, *args, **kwargs) 41 | class CalledProcessError(Exception): 42 | pass 43 | if not res == 0: 44 | msg = "Command '%s' return non-zero exit status %d" % (cmd, res) 45 | raise CalledProcessError(msg) 46 | vars(subprocess).setdefault('check_call', _check_call_py24) 47 | 48 | def _install(tarball, install_args=()): 49 | # extracting the tarball 50 | tmpdir = tempfile.mkdtemp() 51 | log.warn('Extracting in %s', tmpdir) 52 | old_wd = os.getcwd() 53 | try: 54 | os.chdir(tmpdir) 55 | tar = tarfile.open(tarball) 56 | _extractall(tar) 57 | tar.close() 58 | 59 | # going in the directory 60 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 61 | os.chdir(subdir) 62 | log.warn('Now working in %s', subdir) 63 | 64 | # installing 65 | log.warn('Installing Setuptools') 66 | if not _python_cmd('setup.py', 'install', *install_args): 67 | log.warn('Something went wrong during the installation.') 68 | log.warn('See the error message above.') 69 | # exitcode will be 2 70 | return 2 71 | finally: 72 | os.chdir(old_wd) 73 | shutil.rmtree(tmpdir) 74 | 75 | 76 | def _build_egg(egg, tarball, to_dir): 77 | # extracting the tarball 78 | tmpdir = tempfile.mkdtemp() 79 | log.warn('Extracting in %s', tmpdir) 80 | old_wd = os.getcwd() 81 | try: 82 | os.chdir(tmpdir) 83 | tar = tarfile.open(tarball) 84 | _extractall(tar) 85 | tar.close() 86 | 87 | # going in the directory 88 | subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) 89 | os.chdir(subdir) 90 | log.warn('Now working in %s', subdir) 91 | 92 | # building an egg 93 | log.warn('Building a Setuptools egg in %s', to_dir) 94 | _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) 95 | 96 | finally: 97 | os.chdir(old_wd) 98 | shutil.rmtree(tmpdir) 99 | # returning the result 100 | log.warn(egg) 101 | if not os.path.exists(egg): 102 | raise IOError('Could not build the egg.') 103 | 104 | 105 | def _do_download(version, download_base, to_dir, download_delay): 106 | egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' 107 | % (version, sys.version_info[0], sys.version_info[1])) 108 | if not os.path.exists(egg): 109 | tarball = download_setuptools(version, download_base, 110 | to_dir, download_delay) 111 | _build_egg(egg, tarball, to_dir) 112 | sys.path.insert(0, egg) 113 | 114 | # Remove previously-imported pkg_resources if present (see 115 | # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). 116 | if 'pkg_resources' in sys.modules: 117 | del sys.modules['pkg_resources'] 118 | 119 | import setuptools 120 | setuptools.bootstrap_install_from = egg 121 | 122 | 123 | def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, 124 | to_dir=os.curdir, download_delay=15): 125 | # making sure we use the absolute path 126 | to_dir = os.path.abspath(to_dir) 127 | was_imported = 'pkg_resources' in sys.modules or \ 128 | 'setuptools' in sys.modules 129 | try: 130 | import pkg_resources 131 | except ImportError: 132 | return _do_download(version, download_base, to_dir, download_delay) 133 | try: 134 | pkg_resources.require("setuptools>=" + version) 135 | return 136 | except pkg_resources.VersionConflict: 137 | e = sys.exc_info()[1] 138 | if was_imported: 139 | sys.stderr.write( 140 | "The required version of setuptools (>=%s) is not available,\n" 141 | "and can't be installed while this script is running. Please\n" 142 | "install a more recent version first, using\n" 143 | "'easy_install -U setuptools'." 144 | "\n\n(Currently using %r)\n" % (version, e.args[0])) 145 | sys.exit(2) 146 | else: 147 | del pkg_resources, sys.modules['pkg_resources'] # reload ok 148 | return _do_download(version, download_base, to_dir, 149 | download_delay) 150 | except pkg_resources.DistributionNotFound: 151 | return _do_download(version, download_base, to_dir, 152 | download_delay) 153 | 154 | def _clean_check(cmd, target): 155 | """ 156 | Run the command to download target. If the command fails, clean up before 157 | re-raising the error. 158 | """ 159 | try: 160 | subprocess.check_call(cmd) 161 | except subprocess.CalledProcessError: 162 | if os.access(target, os.F_OK): 163 | os.unlink(target) 164 | raise 165 | 166 | def download_file_powershell(url, target): 167 | """ 168 | Download the file at url to target using Powershell (which will validate 169 | trust). Raise an exception if the command cannot complete. 170 | """ 171 | target = os.path.abspath(target) 172 | cmd = [ 173 | 'powershell', 174 | '-Command', 175 | "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), 176 | ] 177 | _clean_check(cmd, target) 178 | 179 | def has_powershell(): 180 | if platform.system() != 'Windows': 181 | return False 182 | cmd = ['powershell', '-Command', 'echo test'] 183 | devnull = open(os.path.devnull, 'wb') 184 | try: 185 | try: 186 | subprocess.check_call(cmd, stdout=devnull, stderr=devnull) 187 | except: 188 | return False 189 | finally: 190 | devnull.close() 191 | return True 192 | 193 | download_file_powershell.viable = has_powershell 194 | 195 | def download_file_curl(url, target): 196 | cmd = ['curl', url, '--silent', '--output', target] 197 | _clean_check(cmd, target) 198 | 199 | def has_curl(): 200 | cmd = ['curl', '--version'] 201 | devnull = open(os.path.devnull, 'wb') 202 | try: 203 | try: 204 | subprocess.check_call(cmd, stdout=devnull, stderr=devnull) 205 | except: 206 | return False 207 | finally: 208 | devnull.close() 209 | return True 210 | 211 | download_file_curl.viable = has_curl 212 | 213 | def download_file_wget(url, target): 214 | cmd = ['wget', url, '--quiet', '--output-document', target] 215 | _clean_check(cmd, target) 216 | 217 | def has_wget(): 218 | cmd = ['wget', '--version'] 219 | devnull = open(os.path.devnull, 'wb') 220 | try: 221 | try: 222 | subprocess.check_call(cmd, stdout=devnull, stderr=devnull) 223 | except: 224 | return False 225 | finally: 226 | devnull.close() 227 | return True 228 | 229 | download_file_wget.viable = has_wget 230 | 231 | def download_file_insecure(url, target): 232 | """ 233 | Use Python to download the file, even though it cannot authenticate the 234 | connection. 235 | """ 236 | try: 237 | from urllib.request import urlopen 238 | except ImportError: 239 | from urllib2 import urlopen 240 | src = dst = None 241 | try: 242 | src = urlopen(url) 243 | # Read/write all in one block, so we don't create a corrupt file 244 | # if the download is interrupted. 245 | data = src.read() 246 | dst = open(target, "wb") 247 | dst.write(data) 248 | finally: 249 | if src: 250 | src.close() 251 | if dst: 252 | dst.close() 253 | 254 | download_file_insecure.viable = lambda: True 255 | 256 | def get_best_downloader(): 257 | downloaders = [ 258 | download_file_powershell, 259 | download_file_curl, 260 | download_file_wget, 261 | download_file_insecure, 262 | ] 263 | 264 | for dl in downloaders: 265 | if dl.viable(): 266 | return dl 267 | 268 | def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, 269 | to_dir=os.curdir, delay=15, 270 | downloader_factory=get_best_downloader): 271 | """Download setuptools from a specified location and return its filename 272 | 273 | `version` should be a valid setuptools version number that is available 274 | as an egg for download under the `download_base` URL (which should end 275 | with a '/'). `to_dir` is the directory where the egg will be downloaded. 276 | `delay` is the number of seconds to pause before an actual download 277 | attempt. 278 | 279 | ``downloader_factory`` should be a function taking no arguments and 280 | returning a function for downloading a URL to a target. 281 | """ 282 | # making sure we use the absolute path 283 | to_dir = os.path.abspath(to_dir) 284 | tgz_name = "setuptools-%s.tar.gz" % version 285 | url = download_base + tgz_name 286 | saveto = os.path.join(to_dir, tgz_name) 287 | if not os.path.exists(saveto): # Avoid repeated downloads 288 | log.warn("Downloading %s", url) 289 | downloader = downloader_factory() 290 | downloader(url, saveto) 291 | return os.path.realpath(saveto) 292 | 293 | 294 | def _extractall(self, path=".", members=None): 295 | """Extract all members from the archive to the current working 296 | directory and set owner, modification time and permissions on 297 | directories afterwards. `path' specifies a different directory 298 | to extract to. `members' is optional and must be a subset of the 299 | list returned by getmembers(). 300 | """ 301 | import copy 302 | import operator 303 | from tarfile import ExtractError 304 | directories = [] 305 | 306 | if members is None: 307 | members = self 308 | 309 | for tarinfo in members: 310 | if tarinfo.isdir(): 311 | # Extract directories with a safe mode. 312 | directories.append(tarinfo) 313 | tarinfo = copy.copy(tarinfo) 314 | tarinfo.mode = 448 # decimal for oct 0700 315 | self.extract(tarinfo, path) 316 | 317 | # Reverse sort directories. 318 | if sys.version_info < (2, 4): 319 | def sorter(dir1, dir2): 320 | return cmp(dir1.name, dir2.name) 321 | directories.sort(sorter) 322 | directories.reverse() 323 | else: 324 | directories.sort(key=operator.attrgetter('name'), reverse=True) 325 | 326 | # Set correct owner, mtime and filemode on directories. 327 | for tarinfo in directories: 328 | dirpath = os.path.join(path, tarinfo.name) 329 | try: 330 | self.chown(tarinfo, dirpath) 331 | self.utime(tarinfo, dirpath) 332 | self.chmod(tarinfo, dirpath) 333 | except ExtractError: 334 | e = sys.exc_info()[1] 335 | if self.errorlevel > 1: 336 | raise 337 | else: 338 | self._dbg(1, "tarfile: %s" % e) 339 | 340 | 341 | def _build_install_args(options): 342 | """ 343 | Build the arguments to 'python setup.py install' on the setuptools package 344 | """ 345 | install_args = [] 346 | if options.user_install: 347 | if sys.version_info < (2, 6): 348 | log.warn("--user requires Python 2.6 or later") 349 | raise SystemExit(1) 350 | install_args.append('--user') 351 | return install_args 352 | 353 | def _parse_args(): 354 | """ 355 | Parse the command line for options 356 | """ 357 | parser = optparse.OptionParser() 358 | parser.add_option( 359 | '--user', dest='user_install', action='store_true', default=False, 360 | help='install in user site package (requires Python 2.6 or later)') 361 | parser.add_option( 362 | '--download-base', dest='download_base', metavar="URL", 363 | default=DEFAULT_URL, 364 | help='alternative URL from where to download the setuptools package') 365 | parser.add_option( 366 | '--insecure', dest='downloader_factory', action='store_const', 367 | const=lambda: download_file_insecure, default=get_best_downloader, 368 | help='Use internal, non-validating downloader' 369 | ) 370 | options, args = parser.parse_args() 371 | # positional arguments are ignored 372 | return options 373 | 374 | def main(version=DEFAULT_VERSION): 375 | """Install or upgrade setuptools and EasyInstall""" 376 | options = _parse_args() 377 | tarball = download_setuptools(download_base=options.download_base, 378 | downloader_factory=options.downloader_factory) 379 | return _install(tarball, _build_install_args(options)) 380 | 381 | if __name__ == '__main__': 382 | sys.exit(main()) 383 | -------------------------------------------------------------------------------- /astroscrappy/utils/median_utils.pyx: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | # cython: profile=True, boundscheck=False, nonecheck=False, wraparound=False 3 | # cython: cdivision=True 4 | from __future__ import (absolute_import, division, print_function, 5 | unicode_literals) 6 | """ 7 | Name : median_utils 8 | Author : Curtis McCully 9 | Date : October 2014 10 | """ 11 | import numpy as np 12 | cimport numpy as np 13 | 14 | np.import_array() 15 | 16 | 17 | cdef extern from "medutils.h": 18 | float PyMedian(float * a, int n) nogil 19 | float PyOptMed3(float * a) nogil 20 | float PyOptMed5(float * a) nogil 21 | float PyOptMed7(float * a) nogil 22 | float PyOptMed9(float * a) nogil 23 | float PyOptMed25(float * a) nogil 24 | void PyMedFilt3(float * data, float * output, int nx, int ny) nogil 25 | void PyMedFilt5(float * data, float * output, int nx, int ny) nogil 26 | void PyMedFilt7(float * data, float * output, int nx, int ny) nogil 27 | void PySepMedFilt3(float * data, float * output, int nx, int ny) nogil 28 | void PySepMedFilt5(float * data, float * output, int nx, int ny) nogil 29 | void PySepMedFilt7(float * data, float * output, int nx, int ny) nogil 30 | void PySepMedFilt9(float * data, float * output, int nx, int ny) nogil 31 | 32 | 33 | """ 34 | Wrappers for the C functions in medutils.c 35 | """ 36 | 37 | 38 | def median(np.ndarray[np.float32_t, mode='c', cast=True] a, int n): 39 | """median(a, n)\n 40 | Find the median of the first n elements of an array. 41 | 42 | Parameters 43 | ---------- 44 | a : float numpy array 45 | Input array to find the median. 46 | 47 | n : int 48 | Number of elements of the array to median. 49 | 50 | Returns 51 | ------- 52 | med : float 53 | The median value. 54 | 55 | Notes 56 | ----- 57 | Wrapper for PyMedian in medutils. 58 | """ 59 | cdef float * aptr = < float * > np.PyArray_DATA(a) 60 | cdef float med = 0.0 61 | with nogil: 62 | med = PyMedian(aptr, n) 63 | return med 64 | 65 | cdef float cymedian(float* a, int n) nogil: 66 | """cymedian(a, n)\n 67 | Cython function to calculate the median without requiring the GIL. 68 | :param a: 69 | :param n: 70 | :return: 71 | """ 72 | cdef float med = 0.0 73 | med = PyMedian(a, n) 74 | return med 75 | 76 | def optmed3(np.ndarray[np.float32_t, ndim=1, mode='c', cast=True] a): 77 | """optmed3(a)\n 78 | Optimized method to find the median value of an array of length 3. 79 | 80 | Parameters 81 | ---------- 82 | a : float numpy array 83 | Input array to find the median. Must be length 3. 84 | 85 | Returns 86 | ------- 87 | med3 : float 88 | The median of the 3-element array. 89 | 90 | Notes 91 | ----- 92 | Wrapper for PyOptMed3 in medutils. 93 | """ 94 | cdef float * aptr3 = < float * > np.PyArray_DATA(a) 95 | cdef float med3 = 0.0 96 | with nogil: 97 | med3 = PyOptMed3(aptr3) 98 | return med3 99 | 100 | 101 | def optmed5(np.ndarray[np.float32_t, ndim=1, mode='c', cast=True] a): 102 | """optmed5(a)\n 103 | Optimized method to find the median value of an array of length 5. 104 | 105 | Parameters 106 | ---------- 107 | a : float numpy array 108 | Input array to find the median. Must be length 5. 109 | 110 | Returns 111 | ------- 112 | med5 : float 113 | The median of the 5-element array. 114 | 115 | Notes 116 | ----- 117 | Wrapper for PyOptMed5 in medutils. 118 | """ 119 | cdef float * aptr5 = < float * > np.PyArray_DATA(a) 120 | cdef float med5 = 0.0 121 | with nogil: 122 | med5 = PyOptMed5(aptr5) 123 | return med5 124 | 125 | 126 | def optmed7(np.ndarray[np.float32_t, ndim=1, mode='c', cast=True] a): 127 | """optmed7(a)\n 128 | Optimized method to find the median value of an array of length 7. 129 | 130 | Parameters 131 | ---------- 132 | a : float numpy array 133 | Input array to find the median. Must be length 7. 134 | 135 | Returns 136 | ------- 137 | med7 : float 138 | The median of the 7-element array. 139 | 140 | Notes 141 | ----- 142 | Wrapper for PyOptMed7 in medutils. 143 | """ 144 | cdef float * aptr7 = < float * > np.PyArray_DATA(a) 145 | cdef float med7 = 0.0 146 | with nogil: 147 | med7 = PyOptMed7(aptr7) 148 | return med7 149 | 150 | 151 | def optmed9(np.ndarray[np.float32_t, ndim=1, mode='c', cast=True] a): 152 | """optmed9(a)\n 153 | Optimized method to find the median value of an array of length 9. 154 | 155 | Parameters 156 | ---------- 157 | a : float numpy array 158 | Input array to find the median. Must be length 9. 159 | 160 | Returns 161 | ------- 162 | med9 : float 163 | The median of the 9-element array. 164 | 165 | Notes 166 | ----- 167 | Wrapper for PyOptMed9 in medutils. 168 | """ 169 | cdef float * aptr9 = < float * > np.PyArray_DATA(a) 170 | cdef float med9 = 0.0 171 | with nogil: 172 | med9 = PyOptMed9(aptr9) 173 | return med9 174 | 175 | 176 | def optmed25(np.ndarray[np.float32_t, ndim=1, mode='c', cast=True] a): 177 | """optmed25(a)\n 178 | Optimized method to find the median value of an array of length 25. 179 | 180 | Parameters 181 | ---------- 182 | a : float numpy array 183 | Input array to find the median. Must be length 25. 184 | 185 | Returns 186 | ------- 187 | med25 : float 188 | The median of the 25-element array. 189 | 190 | Notes 191 | ----- 192 | Wrapper for PyOptMed25 in medutils. 193 | """ 194 | cdef float * aptr25 = < float * > np.PyArray_DATA(a) 195 | cdef float med25 = 0.0 196 | with nogil: 197 | med25 = PyOptMed25(aptr25) 198 | return med25 199 | 200 | 201 | def medfilt3(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] d3): 202 | """medfilt3(d3)\n 203 | Calculate the 3x3 median filter of an array. 204 | 205 | Parameters 206 | ---------- 207 | d3 : float numpy array 208 | Array to median filter. 209 | 210 | Returns 211 | ------- 212 | output : float numpy array 213 | Median filtered array. 214 | 215 | Notes 216 | ----- 217 | The median filter is not calculated for a 1 pixel border around the image. 218 | These pixel values are copied from the input data. The array needs to be 219 | C-contiguous order. Wrapper for PyMedFilt3 in medutils. 220 | """ 221 | cdef int nx = d3.shape[1] 222 | cdef int ny = d3.shape[0] 223 | 224 | # Allocate the output array here so that Python tracks the memory and will 225 | # free the memory when we are finished with the output array. 226 | output = np.zeros((ny, nx), dtype=np.float32) 227 | cdef float * d3ptr = < float * > np.PyArray_DATA(d3) 228 | cdef float * outd3ptr = < float * > np.PyArray_DATA(output) 229 | with nogil: 230 | PyMedFilt3(d3ptr, outd3ptr, nx, ny) 231 | 232 | return output 233 | 234 | 235 | def medfilt5(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] d5): 236 | """medfilt5(d5)\n 237 | Calculate the 5x5 median filter of an array. 238 | 239 | Parameters 240 | ---------- 241 | d5 : float numpy array 242 | Array to median filter. 243 | 244 | Returns 245 | ------- 246 | output : float numpy array 247 | Median filtered array. 248 | 249 | Notes 250 | ----- 251 | The median filter is not calculated for a 2 pixel border around the image. 252 | These pixel values are copied from the input data. The array needs to be 253 | C-contiguous order. Wrapper for PyMedFilt5 in medutils. 254 | """ 255 | cdef int nx = d5.shape[1] 256 | cdef int ny = d5.shape[0] 257 | 258 | # Allocate the output array here so that Python tracks the memory and will 259 | # free the memory when we are finished with the output array. 260 | output = np.zeros((ny, nx), dtype=np.float32) 261 | cdef float * d5ptr = < float * > np.PyArray_DATA(d5) 262 | cdef float * outd5ptr = < float * > np.PyArray_DATA(output) 263 | with nogil: 264 | PyMedFilt5(d5ptr, outd5ptr, nx, ny) 265 | return output 266 | 267 | 268 | def medfilt7(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] d7): 269 | """medfilt7(d7)\n 270 | Calculate the 7x7 median filter of an array. 271 | 272 | Parameters 273 | ---------- 274 | d7 : float numpy array 275 | Array to median filter. 276 | 277 | Returns 278 | ------- 279 | output : float numpy array 280 | Median filtered array. 281 | 282 | Notes 283 | ----- 284 | The median filter is not calculated for a 3 pixel border around the image. 285 | These pixel values are copied from the input data. The array needs to be 286 | C-contiguous order. Wrapper for PyMedFilt7 in medutils. 287 | """ 288 | cdef int nx = d7.shape[1] 289 | cdef int ny = d7.shape[0] 290 | 291 | # Allocate the output array here so that Python tracks the memory and will 292 | # free the memory when we are finished with the output array. 293 | output = np.zeros((ny, nx), dtype=np.float32) 294 | 295 | cdef float * d7ptr = < float * > np.PyArray_DATA(d7) 296 | cdef float * outd7ptr = < float * > np.PyArray_DATA(output) 297 | with nogil: 298 | PyMedFilt7(d7ptr, outd7ptr, nx, ny) 299 | return output 300 | 301 | 302 | def sepmedfilt3(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] dsep3): 303 | """sepmedfilt3(dsep3)\n 304 | Calculate the 3x3 separable median filter of an array. 305 | 306 | Parameters 307 | ---------- 308 | dsep3 : float numpy array 309 | Array to median filter. 310 | 311 | Returns 312 | ------- 313 | output : float numpy array 314 | Median filtered array. 315 | 316 | Notes 317 | ----- 318 | The separable median medians the rows followed by the columns instead of 319 | using a square window. Therefore it is not identical to the full median 320 | filter but it is approximatly the same, but it is signifcantly faster. 321 | The median filter is not calculated for a 1 pixel border around the image. 322 | These pixel values are copied from the input data. The array needs to be 323 | C-contiguous order. Wrapper for PySepMedFilt3 in medutils. 324 | """ 325 | cdef int nx = dsep3.shape[1] 326 | cdef int ny = dsep3.shape[0] 327 | 328 | # Allocate the output array here so that Python tracks the memory and will 329 | # free the memory when we are finished with the output array. 330 | output = np.zeros((ny, nx), dtype=np.float32) 331 | 332 | cdef float * dsep3ptr = < float * > np.PyArray_DATA(dsep3) 333 | cdef float * outdsep3ptr = < float * > np.PyArray_DATA(output) 334 | with nogil: 335 | PySepMedFilt3(dsep3ptr, outdsep3ptr, nx, ny) 336 | return np.asarray(output) 337 | 338 | 339 | def sepmedfilt5(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] dsep5): 340 | """sepmedfilt5(dsep5)\n 341 | Calculate the 5x5 separable median filter of an array. 342 | 343 | Parameters 344 | ---------- 345 | dsep5 : float numpy array 346 | Array to median filter. 347 | 348 | Returns 349 | ------- 350 | output : float numpy array 351 | Median filtered array. 352 | 353 | Notes 354 | ----- 355 | The separable median medians the rows followed by the columns instead of 356 | using a square window. Therefore it is not identical to the full median 357 | filter but it is approximatly the same, but it is signifcantly faster. 358 | The median filter is not calculated for a 2 pixel border around the image. 359 | These pixel values are copied from the input data. The array needs to be 360 | C-contiguous order. Wrapper for PySepMedFilt5 in medutils. 361 | """ 362 | cdef int nx = dsep5.shape[1] 363 | cdef int ny = dsep5.shape[0] 364 | 365 | # Allocate the output array here so that Python tracks the memory and will 366 | # free the memory when we are finished with the output array. 367 | output = np.zeros((ny, nx), dtype=np.float32) 368 | 369 | cdef float * dsep5ptr = < float * > np.PyArray_DATA(dsep5) 370 | cdef float * outdsep5ptr = < float * > np.PyArray_DATA(output) 371 | with nogil: 372 | PySepMedFilt5(dsep5ptr, outdsep5ptr, nx, ny) 373 | 374 | return output 375 | 376 | 377 | def sepmedfilt7(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] dsep7): 378 | """sepmedfilt7(dsep7)\n 379 | Calculate the 7x7 separable median filter of an array. 380 | 381 | Parameters 382 | ---------- 383 | dsep7 : float numpy array 384 | Array to median filter. 385 | 386 | Returns 387 | ------- 388 | output : float numpy array 389 | Median filtered array. 390 | 391 | Notes 392 | ----- 393 | The separable median medians the rows followed by the columns instead of 394 | using a square window. Therefore it is not identical to the full median 395 | filter but it is approximatly the same, but it is signifcantly faster. 396 | The median filter is not calculated for a 3 pixel border around the image. 397 | These pixel values are copied from the input data. The array needs to be 398 | C-contiguous order. Wrapper for PySepMedFilt7 in medutils. 399 | """ 400 | cdef int nx = dsep7.shape[1] 401 | cdef int ny = dsep7.shape[0] 402 | 403 | # Allocate the output array here so that Python tracks the memory and will 404 | # free the memory when we are finished with the output array. 405 | output = np.zeros((ny, nx), dtype=np.float32) 406 | 407 | cdef float * dsep7ptr = < float * > np.PyArray_DATA(dsep7) 408 | cdef float * outdsep7ptr = < float * > np.PyArray_DATA(output) 409 | with nogil: 410 | PySepMedFilt7(dsep7ptr, outdsep7ptr, nx, ny) 411 | return output 412 | 413 | 414 | def sepmedfilt9(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] dsep9): 415 | """sepmedfilt9(dsep9)\n 416 | Calculate the 9x9 separable median filter of an array. 417 | 418 | Parameters 419 | ---------- 420 | dsep9 : float numpy array 421 | Array to median filter. 422 | 423 | Returns 424 | ------- 425 | output : float numpy array 426 | Median filtered array. 427 | 428 | Notes 429 | ----- 430 | The separable median medians the rows followed by the columns instead of 431 | using a square window. Therefore it is not identical to the full median 432 | filter but it is approximatly the same, but it is signifcantly faster. 433 | The median filter is not calculated for a 4 pixel border around the image. 434 | These pixel values are copied from the input data. The array needs to be 435 | C-contiguous order. Wrapper for PySepMedFilt9 in medutils. 436 | """ 437 | 438 | cdef int nx = dsep9.shape[1] 439 | cdef int ny = dsep9.shape[0] 440 | 441 | # Allocate the output array here so that Python tracks the memory and will 442 | # free the memory when we are finished with the output array. 443 | output = np.zeros((ny, nx), dtype=np.float32) 444 | 445 | cdef float * dsep9ptr = < float * > np.PyArray_DATA(dsep9) 446 | cdef float * outdsep9ptr = < float * > np.PyArray_DATA(output) 447 | with nogil: 448 | PySepMedFilt9(dsep9ptr, outdsep9ptr, nx, ny) 449 | return output 450 | -------------------------------------------------------------------------------- /astroscrappy/utils/imutils.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Author: Curtis McCully 3 | * October 2014 4 | * Licensed under a 3-clause BSD style license - see LICENSE.rst 5 | * 6 | * Originally written in C++ in 2011 7 | * See also https://github.com/cmccully/lacosmicx 8 | * 9 | * This file contains image utility functions for SCRAPPY. These are the most 10 | * computationally expensive pieces of the calculation so they have been ported 11 | * to C. 12 | * 13 | * Many thanks to Nicolas Devillard who wrote the optimized methods for finding 14 | * the median and placed them in the public domain. I have noted in the 15 | * comments places that use Nicolas Devillard's code. 16 | * 17 | * Parallelization has been achieved using OpenMP. Using a compiler that does 18 | * not support OpenMP, e.g. clang currently, the code should still compile and 19 | * run serially without issue. I have tried to be explicit as possible about 20 | * specifying which variables are private and which should be shared, although 21 | * we never actually have any shared variables. We use firstprivate instead. 22 | * This does mean that it is important that we never have two threads write to 23 | * the same memory position at the same time. 24 | * 25 | * All calculations are done with 32 bit floats to keep the memory footprint 26 | * small. 27 | */ 28 | #include 29 | #include "imutils.h" 30 | 31 | /* Subsample an array 2x2 given an input array data with size nx x ny. Each 32 | * pixel is replicated into 4 pixels; no averaging is performed. The results 33 | * are saved in the output array. The output array should already be allocated 34 | * as we work on it in place. Data should be striped in the x direction such 35 | * that the memory location of pixel i,j is data[nx *j + i]. 36 | */ 37 | void 38 | PySubsample(float* data, float* output, int nx, int ny) 39 | { 40 | PyDoc_STRVAR(PySubsample__doc__, 41 | "PySubample(data, output, nx, ny) -> void\n\n" 42 | "Subsample an array 2x2 given an input array data with size " 43 | "nx x ny.The results are saved in the output array. The output " 44 | "array should already be allocated as we work on it in place. Each" 45 | " pixel is replicated into 4 pixels; no averaging is performed. " 46 | "Data should be striped in the x direction such that the memory " 47 | "location of pixel i,j is data[nx *j + i]."); 48 | 49 | /* Precalculate the new length; minor optimization */ 50 | int padnx = 2 * nx; 51 | 52 | /* Loop indices */ 53 | int i, j, nxj, padnxj; 54 | 55 | /* Loop over all pixels */ 56 | #pragma omp parallel for firstprivate(data, output, nx, ny, padnx) \ 57 | private(i, j, nxj, padnxj) 58 | for (j = 0; j < ny; j++) { 59 | nxj = nx * j; 60 | padnxj = 2 * padnx * j; 61 | for (i = 0; i < nx; i++) { 62 | /* Copy the pixel value into a 2x2 grid on the output image */ 63 | output[2 * i + padnxj] = data[i + nxj]; 64 | output[2 * i + padnxj + padnx] = data[i + nxj]; 65 | output[2 * i + 1 + padnxj + padnx] = data[i + nxj]; 66 | output[2 * i + 1 + padnxj] = data[i + nxj]; 67 | } 68 | } 69 | 70 | return; 71 | } 72 | 73 | /* Rebin an array 2x2, with size (2 * nx) x (2 * ny). Rebin the array by block 74 | * averaging 4 pixels back into 1. This is effectively the opposite of 75 | * subsample (although subsample does not do an average). The results are saved 76 | * in the output array. The output array should already be allocated as we work 77 | * on it in place. Data should be striped in the x direction such that the 78 | * memory location of pixel i,j is data[nx *j + i]. 79 | */ 80 | void 81 | PyRebin(float* data, float* output, int nx, int ny) 82 | { 83 | PyDoc_STRVAR(PyRebin__doc__, 84 | "PyRebin(data, output, nx, ny) -> void\n \n" 85 | "Rebin an array 2x2, with size (2 * nx) x (2 * ny). Rebin the " 86 | "array by block averaging 4 pixels back into 1. This is " 87 | "effectively the opposite of subsample (although subsample does " 88 | "not do an average). The results are saved in the output array. " 89 | "The output array should already be allocated as we work on it in " 90 | "place. Data should be striped in the x direction such that the " 91 | "memory location of pixel i,j is data[nx *j + i]."); 92 | 93 | /* Size of original array */ 94 | int padnx = nx * 2; 95 | 96 | /* Loop variables */ 97 | int i, j, nxj, padnxj; 98 | 99 | /* Pixel value p. Each thread needs its own copy of this variable so we 100 | * wait to initialize it until the pragma below */ 101 | float p; 102 | #pragma omp parallel for firstprivate(output, data, nx, ny, padnx) \ 103 | private(i, j, nxj, padnxj, p) 104 | /*Loop over all of the pixels */ 105 | for (j = 0; j < ny; j++) { 106 | nxj = nx * j; 107 | padnxj = 2 * padnx * j; 108 | for (i = 0; i < nx; i++) { 109 | p = data[2 * i + padnxj]; 110 | p += data[2 * i + padnxj + padnx]; 111 | p += data[2 * i + 1 + padnxj + padnx]; 112 | p += data[2 * i + 1 + padnxj]; 113 | p /= 4.0; 114 | output[i + nxj] = p; 115 | } 116 | } 117 | return; 118 | } 119 | 120 | /* Convolve an image of size nx x ny with a kernel of size kernx x kerny. The 121 | * results are saved in the output array. The output array should already be 122 | * allocated as we work on it in place. Data and kernel should both be striped 123 | * in the x direction such that the memory location of pixel i,j is 124 | * data[nx *j + i]. 125 | */ 126 | void 127 | PyConvolve(float* data, float* kernel, float* output, int nx, int ny, 128 | int kernx, int kerny) 129 | { 130 | PyDoc_STRVAR(PyConvolve__doc__, 131 | "PyConvolve(data, kernel, output, nx, ny, kernx, kerny) -> void\n\n" 132 | "Convolve an image of size nx x ny with a a kernel of size " 133 | "kernx x kerny. The results are saved in the output array. The " 134 | "output array should already be allocated as we work on it in " 135 | "place. Data and kernel should both be striped along the x " 136 | "direction such that the memory location of pixel i,j is " 137 | "data[nx *j + i]."); 138 | 139 | /* Get the width of the borders that we will pad with zeros */ 140 | int bnx = (kernx - 1) / 2; 141 | int bny = (kerny - 1) / 2; 142 | 143 | /* Calculate the dimensions of the array including padded border */ 144 | int padnx = nx + kernx - 1; 145 | int padny = ny + kerny - 1; 146 | /* Get the total number of pixels in the padded array */ 147 | int padnxny = padnx * padny; 148 | /*Get the total number of pixels in the output image */ 149 | int nxny = nx * ny; 150 | 151 | /*Allocate the padded array */ 152 | float* padarr = (float *) malloc(padnxny * sizeof(float)); 153 | 154 | /* Loop variables. These should all be thread private. */ 155 | int i, j; 156 | int nxj; 157 | int padnxj; 158 | /* Inner loop variables. Again thread private. */ 159 | int k, l; 160 | int kernxl, padnxl; 161 | 162 | /* Define a sum variable to use in the convolution calculation. Each 163 | * thread needs its own copy of this so it should be thread private. */ 164 | float sum; 165 | 166 | /* Precompute maximum good index in each dimension */ 167 | int xmaxgood = nx + bnx; 168 | int ymaxgood = ny + bny; 169 | 170 | /* Set the borders of padarr = 0.0 171 | * Fill the rest of the padded array with the input data. */ 172 | #pragma omp parallel for \ 173 | firstprivate(padarr, data, nx, padnx, padny, bnx, bny, xmaxgood, ymaxgood)\ 174 | private(nxj, padnxj, i, j) 175 | for (j = 0; j < padny; j++) { 176 | padnxj = padnx * j; 177 | nxj = nx * (j - bny); 178 | for (i = 0; i < padnx; i++) { 179 | if (i < bnx || j < bny || j >= ymaxgood || i >= xmaxgood) { 180 | padarr[padnxj + i] = 0.0; 181 | } 182 | else { 183 | padarr[padnxj + i] = data[nxj + i - bnx]; 184 | } 185 | } 186 | 187 | } 188 | 189 | /* Calculate the convolution */ 190 | /* Loop over all pixels */ 191 | #pragma omp parallel for \ 192 | firstprivate(padarr, output, nx, ny, padnx, bnx, bny, kernx) \ 193 | private(nxj, padnxj, kernxl, padnxl, i, j, k, l, sum) 194 | for (j = 0; j < ny; j++) { 195 | nxj = nx * j; 196 | /* Note the + bvy in padnxj */ 197 | padnxj = padnx * (j + bny); 198 | for (i = 0; i < nx; i++) { 199 | sum = 0.0; 200 | /* Note that the sums in the definition of the convolution go from 201 | * -border width to + border width */ 202 | for (l = -bny; l <= bny; l++) { 203 | padnxl = padnx * (l + j + bny); 204 | kernxl = kernx * (-l + bny); 205 | for (k = -bnx; k <= bnx; k++) { 206 | sum += kernel[bnx - k + kernxl] 207 | * padarr[padnxl + k + i + bnx]; 208 | } 209 | } 210 | output[nxj + i] = sum; 211 | } 212 | } 213 | 214 | free(padarr); 215 | 216 | return; 217 | } 218 | 219 | /* Convolve an image of size nx x ny the following kernel: 220 | * 0 -1 0 221 | * -1 4 -1 222 | * 0 -1 0 223 | * The results are saved in the output array. The output array should 224 | * already be allocated as we work on it in place. 225 | * This is a discrete version of the Laplacian operator. 226 | * Data should be striped in the x direction such that the memory location of 227 | * pixel i,j is data[nx *j + i]. 228 | */ 229 | void 230 | PyLaplaceConvolve(float* data, float* output, int nx, int ny) 231 | { 232 | PyDoc_STRVAR(PyLaplaceConvolve__doc__, 233 | "PyLaplaceConvolve(data, output, nx, ny) -> void\n\n" 234 | "Convolve an image of size nx x ny the following kernel:\n" 235 | " 0 -1 0\n" 236 | "-1 4 -1\n" 237 | " 0 -1 0\n" 238 | "This is a discrete version of the Laplacian operator. The results" 239 | " are saved in the output array. The output array should already " 240 | "be allocated as we work on it in place.Data should be striped in " 241 | "the x direction such that the memory location of pixel i,j is " 242 | "data[nx *j + i]."); 243 | 244 | /* Precompute the total number of pixels in the image */ 245 | int nxny = nx * ny; 246 | 247 | /* Loop variables */ 248 | int i, j, nxj; 249 | 250 | /* Pixel value p. Each thread will need its own copy of this so we need to 251 | * make it private*/ 252 | float p; 253 | /* Because we know the form of the kernel, we can short circuit the 254 | * convolution and calculate the results with inner nest for loops. */ 255 | 256 | /*Loop over all of the pixels except the edges which we will do explicitly 257 | * below */ 258 | #pragma omp parallel for firstprivate(nx, ny, output, data) \ 259 | private(i, j, nxj, p) 260 | for (j = 1; j < ny - 1; j++) { 261 | nxj = nx * j; 262 | for (i = 1; i < nx - 1; i++) { 263 | p = 4.0 * data[nxj + i]; 264 | p -= data[i + 1 + nxj]; 265 | p -= data[i - 1 + nxj]; 266 | p -= data[i + nxj + nx]; 267 | p -= data[i + nxj - nx]; 268 | 269 | output[nxj + i] = p; 270 | } 271 | } 272 | 273 | /* Leave the corners until the very end */ 274 | 275 | #pragma omp parallel firstprivate(output, data, nx, nxny) private(i) 276 | /* Top and Bottom Rows */ 277 | for (i = 1; i < nx - 1; i++) { 278 | output[i] = 4.0 * data[i] - data[i + 1] - data[i - 1] - data[i + nx]; 279 | 280 | p = 4.0 * data[i + nxny - nx]; 281 | p -= data[i + 1 + nxny - nx]; 282 | p -= data[i + nxny - nx - 1]; 283 | p -= data[i - nx + nxny - nx]; 284 | output[i + nxny - nx] = p; 285 | } 286 | 287 | #pragma omp parallel firstprivate(output, data, nx, ny) private(j, nxj) 288 | /* First and Last Column */ 289 | for (j = 1; j < ny - 1; j++) { 290 | nxj = nx * j; 291 | p = 4.0 * data[nxj]; 292 | p -= data[nxj + 1]; 293 | p -= data[nxj + nx]; 294 | p -= data[nxj - nx]; 295 | output[nxj] = p; 296 | 297 | p = 4.0 * data[nxj + nx - 1]; 298 | p -= data[nxj + nx - 2]; 299 | p -= data[nxj + nx + nx - 1]; 300 | p -= data[nxj - 1]; 301 | output[nxj + nx - 1] = p; 302 | } 303 | 304 | /* Bottom Left Corner */ 305 | output[0] = 4.0 * data[0] - data[1] - data[nx]; 306 | /* Bottom Right Corner */ 307 | output[nx - 1] = 4.0 * data[nx - 1] - data[nx - 2] - data[nx + nx - 1]; 308 | /* Top Left Corner */ 309 | p = 4.0 * data[nxny - nx]; 310 | p -= data[nxny - nx + 1]; 311 | p -= data[nxny - nx - nx]; 312 | output[nxny - nx] = p; 313 | /* Top Right Corner */ 314 | p = 4.0 * data[nxny - 1]; 315 | p -= data[nxny - 2]; 316 | p -= data[nxny - 1 - nx]; 317 | output[nxny - 1] = p; 318 | 319 | return; 320 | } 321 | 322 | /* Perform a boolean dilation on an array of size nx x ny. The results are 323 | * saved in the output array. The output array should already be allocated as 324 | * we work on it in place. 325 | * Dilation is the boolean equivalent of a convolution but using logical ors 326 | * instead of a sum. 327 | * We apply the following kernel: 328 | * 1 1 1 329 | * 1 1 1 330 | * 1 1 1 331 | * The binary dilation is not computed for a 1 pixel border around the image. 332 | * These pixels are copied from the input data. Data should be striped along 333 | * the x direction such that the memory location of pixel i,j is 334 | * data[i + nx * j]. 335 | */ 336 | void 337 | PyDilate3(bool* data, bool* output, int nx, int ny) 338 | { 339 | PyDoc_STRVAR(PyDilate3__doc__, 340 | "PyDilate3(data, output, nx, ny) -> void\n\n" 341 | "Perform a boolean dilation on an array of size nx x ny. The " 342 | "results are saved in the output array which should already be " 343 | "allocated as we work on it in place. " 344 | "Dilation is the boolean equivalent of a convolution but using " 345 | "logical or instead of a sum. We apply a 3x3 kernel of all ones. " 346 | "Dilation is not computed for a 1 pixel border which is copied " 347 | "from the input data. Data should be striped along the x-axis " 348 | "such that the location of pixel i,j is data[i + nx * j]."); 349 | 350 | /* Precompute the total number of pixels; minor optimization */ 351 | int nxny = nx * ny; 352 | 353 | /* Loop variables */ 354 | int i, j, nxj; 355 | 356 | /* Pixel value p. Each thread needs its own unique copy of this so we don't 357 | initialize this until the pragma below. */ 358 | bool p; 359 | 360 | #pragma omp parallel for firstprivate(output, data, nxny, nx, ny) \ 361 | private(i, j, nxj, p) 362 | 363 | /* Loop through all of the pixels excluding the border */ 364 | for (j = 1; j < ny - 1; j++) { 365 | nxj = nx * j; 366 | for (i = 1; i < nx - 1; i++) { 367 | /*Start in the middle and work out */ 368 | p = data[i + nxj]; 369 | /* Right 1 */ 370 | p = p || data[i + 1 + nxj]; 371 | /* Left 1 */ 372 | p = p || data[i - 1 + nxj]; 373 | /* Up 1 */ 374 | p = p || data[i + nx + nxj]; 375 | /* Down 1 */ 376 | p = p || data[i - nx + nxj]; 377 | /* Up 1 Right 1 */ 378 | p = p || data[i + 1 + nx + nxj]; 379 | /* Up 1 Left 1 */ 380 | p = p || data[i - 1 + nx + nxj]; 381 | /* Down 1 Right 1 */ 382 | p = p || data[i + 1 - nx + nxj]; 383 | /* Down 1 Left 1 */ 384 | p = p || data[i - 1 - nx + nxj]; 385 | 386 | output[i + nxj] = p; 387 | } 388 | } 389 | 390 | #pragma omp parallel firstprivate(output, data, nx, nxny) private(i) 391 | /* For the borders, copy the data from the input array */ 392 | for (i = 0; i < nx; i++) { 393 | output[i] = data[i]; 394 | output[nxny - nx + i] = data[nxny - nx + i]; 395 | } 396 | #pragma omp parallel firstprivate(output, data, nx, ny) private(j, nxj) 397 | for (j = 0; j < ny; j++) { 398 | nxj = nx * j; 399 | output[nxj] = data[nxj]; 400 | output[nxj - 1 + nx] = data[nxj - 1 + nx]; 401 | } 402 | 403 | return; 404 | } 405 | 406 | /* Do niter iterations of boolean dilation on an array of size nx x ny. The 407 | * results are saved in the output array. The output array should already be 408 | * allocated as we work on it in place. 409 | * Dilation is the boolean equivalent of a convolution but using logical ors 410 | * instead of a sum. 411 | * We apply the following kernel: 412 | * 0 1 1 1 0 413 | * 1 1 1 1 1 414 | * 1 1 1 1 1 415 | * 1 1 1 1 1 416 | * 0 1 1 1 0 417 | * The edges are padded with zeros so that the dilation operator is defined for 418 | * all pixels. Data should be striped along the x direction such that the 419 | * memory location of pixel i,j is data[i + nx * j]. 420 | */ 421 | void 422 | PyDilate5(bool* data, bool* output, int niter, int nx, int ny) 423 | { 424 | PyDoc_STRVAR(PyDilate5__doc__, 425 | "PyDilate5(data, output, nx, ny) -> void\n\n" 426 | "Do niter iterations of boolean dilation on an array of size " 427 | "nx x ny. The results are saved in the output array. The output " 428 | "array should already be allocated as we work on it in place. " 429 | "Dilation is the boolean equivalent of a convolution but using " 430 | "logical ors instead of a sum. We apply the following kernel:\n" 431 | "0 1 1 1 0\n" 432 | "1 1 1 1 1\n" 433 | "1 1 1 1 1\n" 434 | "1 1 1 1 1\n" 435 | "0 1 1 1 0\n" 436 | "Data should be striped along the x direction such that the " 437 | "location of pixel i,j is data[i + nx * j]."); 438 | 439 | /* Pad the array with a border of zeros */ 440 | int padnx = nx + 4; 441 | int padny = ny + 4; 442 | 443 | /* Precompute the total number of pixels; minor optimization */ 444 | int padnxny = padnx * padny; 445 | int nxny = nx * ny; 446 | 447 | /* The padded array to work on */ 448 | bool* padarr = (bool *) malloc(padnxny * sizeof(bool)); 449 | 450 | /*Loop indices */ 451 | int i, j, nxj, padnxj; 452 | int iter; 453 | 454 | /* Pixel value p. This needs to be unique for each thread so we initialize 455 | * it below inside the pragma. */ 456 | bool p; 457 | 458 | #pragma omp parallel firstprivate(padarr, padnx, padnxny) private(i) 459 | /* Initialize the borders of the padded array to zero */ 460 | for (i = 0; i < padnx; i++) { 461 | padarr[i] = false; 462 | padarr[i + padnx] = false; 463 | padarr[padnxny - padnx + i] = false; 464 | padarr[padnxny - padnx - padnx + i] = false; 465 | } 466 | 467 | #pragma omp parallel firstprivate(padarr, padnx, padny) private(j, padnxj) 468 | for (j = 0; j < padny; j++) { 469 | padnxj = padnx * j; 470 | padarr[padnxj] = false; 471 | padarr[padnxj + 1] = false; 472 | padarr[padnxj + padnx - 1] = false; 473 | padarr[padnxj + padnx - 2] = false; 474 | } 475 | 476 | #pragma omp parallel firstprivate(output, data, nxny) private(i) 477 | /* Initialize the output array to the input data */ 478 | for (i = 0; i < nxny; i++) { 479 | output[i] = data[i]; 480 | } 481 | 482 | /* Outer iteration loop */ 483 | for (iter = 0; iter < niter; iter++) { 484 | #pragma omp parallel for firstprivate(padarr, output, nx, ny, padnx, iter) \ 485 | private(nxj, padnxj, i, j) 486 | /* Initialize the padded array to the output from the latest 487 | * iteration*/ 488 | for (j = 0; j < ny; j++) { 489 | padnxj = padnx * j; 490 | nxj = nx * j; 491 | for (i = 0; i < nx; i++) { 492 | padarr[i + 2 + padnx + padnx + padnxj] = output[i + nxj]; 493 | } 494 | } 495 | 496 | /* Loop over all pixels */ 497 | #pragma omp parallel for firstprivate(padarr, output, nx, ny, padnx, iter) \ 498 | private(nxj, padnxj, i, j, p) 499 | for (j = 0; j < ny; j++) { 500 | nxj = nx * j; 501 | /* Note the + 2 padding in padnxj */ 502 | padnxj = padnx * (j + 2); 503 | for (i = 0; i < nx; i++) { 504 | /* Start with the middle pixel and work out */ 505 | p = padarr[i + 2 + padnxj]; 506 | /* Right 1 */ 507 | p = p || padarr[i + 3 + padnxj]; 508 | /* Left 1 */ 509 | p = p || padarr[i + 1 + padnxj]; 510 | /* Up 1 */ 511 | p = p || padarr[i + 2 + padnx + padnxj]; 512 | /* Down 1 */ 513 | p = p || padarr[i + 2 - padnx + padnxj]; 514 | /* Up 1 Right 1 */ 515 | p = p || padarr[i + 3 + padnx + padnxj]; 516 | /* Up 1 Left 1 */ 517 | p = p || padarr[i + 1 + padnx + padnxj]; 518 | /* Down 1 Right 1 */ 519 | p = p || padarr[i + 3 - padnx + padnxj]; 520 | /* Down 1 Left 1 */ 521 | p = p || padarr[i + 1 - padnx + padnxj]; 522 | /* Right 2 */ 523 | p = p || padarr[i + 4 + padnxj]; 524 | /* Left 2 */ 525 | p = p || padarr[i + padnxj]; 526 | /* Up 2 */ 527 | p = p || padarr[i + 2 + padnx + padnx + padnxj]; 528 | /* Down 2 */ 529 | p = p || padarr[i + 2 - padnx - padnx + padnxj]; 530 | /* Right 2 Up 1 */ 531 | p = p || padarr[i + 4 + padnx + padnxj]; 532 | /* Right 2 Down 1 */ 533 | p = p || padarr[i + 4 - padnx + padnxj]; 534 | /* Left 2 Up 1 */ 535 | p = p || padarr[i + padnx + padnxj]; 536 | /* Left 2 Down 1 */ 537 | p = p || padarr[i - padnx + padnxj]; 538 | /* Up 2 Right 1 */ 539 | p = p || padarr[i + 3 + padnx + padnx + padnxj]; 540 | /* Up 2 Left 1 */ 541 | p = p || padarr[i + 1 + padnx + padnx + padnxj]; 542 | /* Down 2 Right 1 */ 543 | p = p || padarr[i + 3 - padnx - padnx + padnxj]; 544 | /* Down 2 Left 1 */ 545 | p = p || padarr[i + 1 - padnx - padnx + padnxj]; 546 | 547 | output[i + nxj] = p; 548 | 549 | } 550 | } 551 | 552 | } 553 | free(padarr); 554 | 555 | return; 556 | } 557 | -------------------------------------------------------------------------------- /astroscrappy/astroscrappy.pyx: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | # cython: profile=True, boundscheck=False, nonecheck=False, wraparound=False 3 | # cython: cdivision=True 4 | from __future__ import (absolute_import, division, print_function, 5 | unicode_literals) 6 | """ 7 | Name : astroscrappy: The Speedy Cosmic Ray Annihilation Package in Python 8 | Author : Curtis McCully 9 | Date : October 2014 10 | """ 11 | import numpy as np 12 | cimport numpy as np 13 | 14 | np.import_array() 15 | 16 | cimport cython 17 | from cython.parallel cimport parallel, prange 18 | 19 | from .utils import * 20 | from .utils.median_utils cimport cymedian 21 | 22 | from libc.stdint cimport uint8_t 23 | 24 | ctypedef uint8_t bool 25 | 26 | from libc.stdlib cimport malloc, free 27 | 28 | def detect_cosmics(indat, inmask=None, float sigclip=4.5, float sigfrac=0.3, 29 | float objlim=5.0, float gain=1.0, float readnoise=6.5, 30 | float satlevel=65536.0, float pssl=0.0, int niter=4, 31 | sepmed=True, cleantype='meanmask', fsmode='median', 32 | psfmodel='gauss', float psffwhm=2.5, int psfsize=7, 33 | psfk=None, float psfbeta=4.765, verbose=False): 34 | """detect_cosmics(indat, inmask=None, sigclip=4.5, sigfrac=0.3, objlim=5.0, 35 | gain=1.0, readnoise=6.5, satlevel=65536.0, pssl=0.0, 36 | niter=4, sepmed=True, cleantype='meanmask', 37 | fsmode='median', psfmodel='gauss', psffwhm=2.5, 38 | psfsize=7, psfk=None, psfbeta=4.765, verbose=False)\n 39 | Detect cosmic rays in a numpy array. 40 | 41 | If you use this code, please add this repository address in a footnote: 42 | https://github.com/astropy/astroscrappy 43 | 44 | Please cite the original paper which can be found at: 45 | http://www.astro.yale.edu/dokkum/lacosmic/ 46 | 47 | van Dokkum 2001, PASP, 113, 789, 1420 48 | (article : http://adsabs.harvard.edu/abs/2001PASP..113.1420V) 49 | 50 | Parameters 51 | ---------- 52 | indat : float numpy array 53 | Input data array that will be used for cosmic ray detection. 54 | 55 | inmask : boolean numpy array, optional 56 | Input bad pixel mask. Values of True will be ignored in the cosmic ray 57 | detection/cleaning process. Default: None. 58 | 59 | sigclip : float, optional 60 | Laplacian-to-noise limit for cosmic ray detection. Lower values will 61 | flag more pixels as cosmic rays. Default: 4.5. 62 | 63 | sigfrac : float, optional 64 | Fractional detection limit for neighboring pixels. For cosmic ray 65 | neighbor pixels, a lapacian-to-noise detection limit of 66 | sigfrac * sigclip will be used. Default: 0.3. 67 | 68 | objlim : float, optional 69 | Minimum contrast between Laplacian image and the fine structure image. 70 | Increase this value if cores of bright stars are flagged as cosmic 71 | rays. Default: 5.0. 72 | 73 | pssl : float, optional 74 | Previously subtracted sky level in ADU. We always need to work in 75 | electrons for cosmic ray detection, so we need to know the sky level 76 | that has been subtracted so we can add it back in. Default: 0.0. 77 | 78 | gain : float, optional 79 | Gain of the image (electrons / ADU). We always need to work in 80 | electrons for cosmic ray detection. Default: 1.0 81 | 82 | readnoise : float, optional 83 | Read noise of the image (electrons). Used to generate the noise model 84 | of the image. Default: 6.5. 85 | 86 | satlevel : float, optional 87 | Saturation of level of the image (electrons). This value is used to 88 | detect saturated stars and pixels at or above this level are added to 89 | the mask. Default: 65536.0. 90 | 91 | niter : int, optional 92 | Number of iterations of the LA Cosmic algorithm to perform. Default: 4. 93 | 94 | sepmed : boolean, optional 95 | Use the separable median filter instead of the full median filter. 96 | The separable median is not identical to the full median filter, but 97 | they are approximately the same and the separable median filter is 98 | significantly faster and still detects cosmic rays well. Default: True 99 | 100 | cleantype : {'median', 'medmask', 'meanmask', 'idw'}, optional 101 | Set which clean algorithm is used:\n 102 | 'median': An umasked 5x5 median filter\n 103 | 'medmask': A masked 5x5 median filter\n 104 | 'meanmask': A masked 5x5 mean filter\n 105 | 'idw': A masked 5x5 inverse distance weighted interpolation\n 106 | Default: "meanmask". 107 | 108 | fsmode : {'median', 'convolve'}, optional 109 | Method to build the fine structure image:\n 110 | 'median': Use the median filter in the standard LA Cosmic algorithm 111 | 'convolve': Convolve the image with the psf kernel to calculate the 112 | fine structure image. 113 | Default: 'median'. 114 | 115 | psfmodel : {'gauss', 'gaussx', 'gaussy', 'moffat'}, optional 116 | Model to use to generate the psf kernel if fsmode == 'convolve' and 117 | psfk is None. The current choices are Gaussian and Moffat profiles. 118 | 'gauss' and 'moffat' produce circular PSF kernels. The 'gaussx' and 119 | 'gaussy' produce Gaussian kernels in the x and y directions 120 | respectively. Default: "gauss". 121 | 122 | psffwhm : float, optional 123 | Full Width Half Maximum of the PSF to use to generate the kernel. 124 | Default: 2.5. 125 | 126 | psfsize : int, optional 127 | Size of the kernel to calculate. Returned kernel will have size 128 | psfsize x psfsize. psfsize should be odd. Default: 7. 129 | 130 | psfk : float numpy array, optional 131 | PSF kernel array to use for the fine structure image if 132 | fsmode == 'convolve'. If None and fsmode == 'convolve', we calculate 133 | the psf kernel using 'psfmodel'. Default: None. 134 | 135 | psfbeta : float, optional 136 | Moffat beta parameter. Only used if fsmode=='convolve' and 137 | psfmodel=='moffat'. Default: 4.765. 138 | 139 | verbose : boolean, optional 140 | Print to the screen or not. Default: False. 141 | 142 | Returns 143 | ------- 144 | crmask : boolean numpy array 145 | The cosmic ray mask (boolean) array with values of True where there are 146 | cosmic ray detections. 147 | 148 | cleanarr : float numpy array 149 | The cleaned data array. 150 | 151 | Notes 152 | ----- 153 | To reproduce the most similar behavior to the original LA Cosmic 154 | (written in IRAF), set inmask = None, satlevel = np.inf, sepmed=False, 155 | cleantype='medmask', and fsmode='median'. 156 | 157 | The original IRAF version distinguishes between spectroscopic and imaging 158 | data. This version does not. After sky subtracting the spectroscopic data, 159 | this version will work well. The 1-d 'gaussx' and 'gaussy' values for 160 | psfmodel can also be used for spectroscopic data (and may even alleviate 161 | the need to do sky subtraction, but this still requires more testing). 162 | """ 163 | 164 | # Grab the sizes of the input array 165 | cdef int nx = indat.shape[1] 166 | cdef int ny = indat.shape[0] 167 | 168 | # Tell the compiler about the loop indices so it can optimize them. 169 | cdef int i, j = 0 170 | 171 | # Make a copy of the data as the cleanarr that we work on 172 | # This guarantees that that the data will be contiguous and makes sure we 173 | # don't edit the input data. 174 | cleanarr = np.empty((ny, nx), dtype=np.float32) 175 | # Set the initial values to those of the data array 176 | cleanarr[:, :] = indat[:, :] 177 | 178 | # Setup the mask 179 | if inmask is None: 180 | # By default don't mask anything 181 | mask = np.zeros((ny, nx), dtype=np.uint8, order='C') 182 | else: 183 | # Make a copy of the input mask 184 | mask = np.empty((ny, nx), dtype=np.uint8, order='C') 185 | mask[:, :] = inmask[:, :] 186 | 187 | # Add back in the previously subtracted sky level and multiply by the gain 188 | # The statistics only work properly with electrons. 189 | cleanarr += pssl 190 | cleanarr *= gain 191 | 192 | # Find the saturated stars and add them to the mask 193 | update_mask(np.asarray(cleanarr), np.asarray(mask), satlevel, sepmed) 194 | 195 | # Find the unmasked pixels to calculate the sky. 196 | gooddata = np.zeros(int(nx * ny - np.asarray(mask).sum()), dtype=np.float32, 197 | order='c') 198 | 199 | igoodpix = 0 200 | 201 | gooddata[:] = cleanarr[np.logical_not(mask)] 202 | 203 | # Get the default background level for large cosmic rays. 204 | background_level = median(gooddata, len(gooddata)) 205 | del gooddata 206 | 207 | # Set up the psf kernel if necessary. 208 | if psfk is None and fsmode == 'convolve': 209 | # calculate the psf kernel psfk 210 | if psfmodel == 'gauss': 211 | psfk = gausskernel(psffwhm, psfsize) 212 | elif psfmodel == 'gaussx': 213 | psfk = gaussxkernel(psffwhm, psfsize) 214 | elif psfmodel == 'gaussy': 215 | psfk = gaussykernel(psffwhm, psfsize) 216 | elif psfmodel == 'moffat': 217 | psfk = moffatkernel(psffwhm, psfbeta, psfsize) 218 | else: 219 | raise ValueError('Please choose a supported PSF model.') 220 | 221 | # Define a cosmic ray mask 222 | # This is what will be returned at the end 223 | crmask = np.zeros((ny, nx), dtype=np.uint8, order='C') 224 | 225 | # Calculate the detection limit for neighbor pixels 226 | cdef float sigcliplow = sigfrac * sigclip 227 | 228 | # Run lacosmic for up to maxiter iterations 229 | # We stop if no more cosmic ray pixels are found (quite rare) 230 | if verbose: 231 | print("Starting {} L.A.Cosmic iterations".format(niter)) 232 | for i in range(niter): 233 | if verbose: 234 | print("Iteration {}:".format(i + 1)) 235 | 236 | # Detect the cosmic rays 237 | 238 | # We subsample, convolve, clip negative values, 239 | # and rebin to original size 240 | subsam = subsample(cleanarr) 241 | 242 | conved = laplaceconvolve(subsam) 243 | del subsam 244 | 245 | conved[conved < 0] = 0.0 246 | # This is called L+ in the original LA Cosmic/cosmics.py 247 | s = rebin(conved) 248 | del conved 249 | 250 | # Build a the noise map, to compare the laplacian to 251 | if sepmed: 252 | m5 = sepmedfilt7(cleanarr) 253 | else: 254 | m5 = medfilt5(cleanarr) 255 | 256 | # Clip noise so that we can take a square root 257 | m5[m5 < 0.00001] = 0.00001 258 | noise = np.sqrt(m5 + readnoise * readnoise) 259 | 260 | if cleantype != 'median': 261 | del m5 262 | 263 | # Laplacian signal to noise ratio : 264 | s /= 2.0 * noise 265 | # the 2.0 is from the 2x2 subsampling 266 | # This s is called sigmap in the original lacosmic.cl 267 | 268 | if sepmed: 269 | sp = sepmedfilt7(s) 270 | else: 271 | sp = medfilt5(s) 272 | 273 | # Remove the large structures (s prime) : 274 | sp = s - sp 275 | del s 276 | 277 | # Build the fine structure image : 278 | if fsmode == 'convolve': 279 | f = convolve(cleanarr, psfk) 280 | elif fsmode == 'median': 281 | if sepmed: 282 | f = sepmedfilt5(cleanarr) 283 | else: 284 | f = medfilt3(cleanarr) 285 | else: 286 | raise ValueError('Please choose a valid fine structure mode.') 287 | 288 | if sepmed: 289 | m7 = sepmedfilt9(f) 290 | else: 291 | m7 = medfilt7(f) 292 | 293 | f = (f - m7) / noise 294 | # Clip f as we will divide by f. Similar to the IRAF version. 295 | f[f < 0.01] = 0.01 296 | 297 | del m7 298 | del noise 299 | 300 | # Find the candidate cosmic rays 301 | goodpix = np.logical_not(mask) 302 | cosmics = np.logical_and(sp > sigclip, goodpix) 303 | # Note the sp/f and not lplus/f due to the f = f/noise above. 304 | cosmics = np.logical_and(cosmics, (sp / f) > objlim) 305 | del f 306 | 307 | # What follows is a special treatment for neighbors, with more relaxed 308 | # constraints. 309 | # We grow these cosmics a first time to determine the immediate 310 | # neighborhood. 311 | cosmics = dilate3(cosmics) 312 | cosmics = np.logical_and(cosmics, goodpix) 313 | # From this grown set, we keep those that have sp > sigmalim 314 | cosmics = np.logical_and(sp > sigclip, cosmics) 315 | 316 | # Now we repeat this procedure, but lower the detection limit to siglow 317 | cosmics = dilate3(cosmics) 318 | cosmics = np.logical_and(cosmics, goodpix) 319 | 320 | del goodpix 321 | cosmics = np.logical_and(sp > sigcliplow, cosmics) 322 | del sp 323 | 324 | # Our CR counter 325 | numcr = cosmics.sum() 326 | 327 | # Update the crmask with the cosmics we have found 328 | crmask[:, :] = np.logical_or(crmask, cosmics)[:, :] 329 | del cosmics 330 | if verbose: 331 | print("{} cosmic pixels this iteration".format(numcr)) 332 | 333 | # If we didn't find anything, we're done. 334 | if numcr == 0: 335 | break 336 | 337 | # otherwise clean the image and iterate 338 | if cleantype == 'median': 339 | # Unmasked median filter 340 | cleanarr[crmask] = m5[crmask] 341 | del m5 342 | # Masked mean filter 343 | elif cleantype == 'meanmask': 344 | clean_meanmask(cleanarr, crmask, mask, nx, ny, background_level) 345 | # Masked median filter 346 | elif cleantype == 'medmask': 347 | clean_medmask(cleanarr, crmask, mask, nx, ny, background_level) 348 | # Inverse distance weighted interpolation 349 | elif cleantype == 'idw': 350 | clean_idwinterp(cleanarr, crmask, mask, nx, ny, background_level) 351 | else: 352 | raise ValueError("""cleantype must be one of the following values: 353 | [median, meanmask, medmask, idw]""") 354 | 355 | return (crmask.astype(np.bool), cleanarr) 356 | 357 | 358 | def update_mask(np.ndarray[np.float32_t, ndim=2, mode='c', cast=True] data, 359 | np.ndarray[np.uint8_t, ndim=2, mode='c', cast=True] mask, 360 | float satlevel, bool sepmed): 361 | """update_mask(data, mask, satlevel, sepmed)\n 362 | Find staturated stars and puts them in the mask. 363 | 364 | This can then be used to avoid these regions in cosmic detection and 365 | cleaning procedures. The median filter is used to find large symmetric 366 | regions of saturated pixels (i.e. saturated stars). 367 | 368 | Parameters 369 | ---------- 370 | data : float numpy array 371 | The data array in which we look for saturated stars. 372 | 373 | mask : boolean numpy array 374 | Bad pixel mask. This mask will be dilated using dilate3 and then 375 | combined with the saturated star mask. 376 | 377 | satlevel : float 378 | Saturation level of the image. This value can be lowered if the cores 379 | of bright (saturated) stars are not being masked. 380 | 381 | sepmed : boolean 382 | Use the separable median or not. The separable median is not identical 383 | to the full median filter, but they are approximately the same and the 384 | separable median filter is significantly faster. 385 | """ 386 | 387 | # Find all of the saturated pixels 388 | satpixels = data >= satlevel 389 | 390 | # Use the median filter to estimate the large scale structure 391 | if sepmed: 392 | m5 = sepmedfilt7(data) 393 | else: 394 | m5 = medfilt5(data) 395 | 396 | # Use the median filtered image to find the cores of saturated stars 397 | # The 10 here is arbitray. Malte Tewes uses 2.0 in cosmics.py, but I 398 | # wanted to get more of the cores of saturated stars. 399 | satpixels = np.logical_and(satpixels, m5 > (satlevel / 10.0)) 400 | 401 | # Grow the input mask by one pixel to make sure we cover bad pixels 402 | grow_mask = dilate3(mask) 403 | 404 | # Dilate the saturated star mask to remove edge effects in the mask 405 | dilsatpixels = dilate5(satpixels, 2) 406 | del satpixels 407 | # Combine the saturated pixels with the given input mask 408 | # Note, we work on the mask pixels in place 409 | mask[:, :] = np.logical_or(dilsatpixels, grow_mask)[:, :] 410 | del grow_mask 411 | 412 | 413 | cdef void clean_meanmask(float[:, ::1] cleanarr, bool[:, ::1] crmask, 414 | bool[:, ::1] mask, int nx, int ny, 415 | float background_level): 416 | """clean_meanmask(cleanarr, crmask, mask, nx, ny, background_level)\n 417 | Clean the bad pixels in cleanarr using a 5x5 masked mean filter. 418 | 419 | Parameters 420 | ---------- 421 | cleanarr : float numpy array 422 | The array to be cleaned. 423 | 424 | crmask : boolean numpy array 425 | Cosmic ray mask. Pixels with a value of True in this mask will be 426 | cleaned. 427 | 428 | mask : boolean numpy array 429 | Bad pixel mask. Values of True indicate bad pixels. 430 | 431 | nx : int 432 | Size of cleanarr in the x-direction. Note cleanarr has dimensions 433 | ny x nx. 434 | 435 | ny : int 436 | Size of cleanarr in the y-direction. Note cleanarr has dimensions 437 | ny x nx. 438 | 439 | background_level : float 440 | Average value of the background. This value will be used if there are 441 | no good pixels in a 5x5 region. 442 | """ 443 | 444 | # Go through all of the pixels, ignore the borders 445 | cdef int i, j, k, l, numpix 446 | cdef float s 447 | cdef bool badpix 448 | 449 | with nogil, parallel(): 450 | # For each pixel 451 | for j in prange(2, ny - 2): 452 | for i in range(2, nx - 2): 453 | # if the pixel is in the crmask 454 | if crmask[j, i]: 455 | numpix = 0 456 | s = 0.0 457 | 458 | # sum the 25 pixels around the pixel 459 | # ignoring any pixels that are masked 460 | for l in range(-2, 3): 461 | for k in range(-2, 3): 462 | badpix = crmask[j + l, i + k] 463 | badpix = badpix or mask[j + l, i + k] 464 | if not badpix: 465 | s = s + cleanarr[j + l, i + k] 466 | numpix = numpix + 1 467 | 468 | # if the pixels count is 0 469 | # then put in the background of the image 470 | if numpix == 0: 471 | s = background_level 472 | else: 473 | # else take the mean 474 | s = s / float(numpix) 475 | 476 | cleanarr[j, i] = s 477 | 478 | 479 | cdef void clean_medmask(float[:, ::1] cleanarr, bool[:, ::1] crmask, 480 | bool[:, ::1] mask, int nx, int ny, 481 | float background_level): 482 | """clean_medmask(cleanarr, crmask, mask, nx, ny, background_level)\n 483 | Clean the bad pixels in cleanarr using a 5x5 masked median filter. 484 | 485 | Parameters 486 | ---------- 487 | cleanarr : float numpy array 488 | The array to be cleaned. 489 | 490 | crmask : boolean numpy array 491 | Cosmic ray mask. Pixels with a value of True in this mask will be 492 | cleaned. 493 | 494 | mask : boolean numpy array 495 | Bad pixel mask. Values of True indicate bad pixels. 496 | 497 | nx : int 498 | size of cleanarr in the x-direction. Note cleanarr has dimensions 499 | ny x nx. 500 | 501 | ny : int 502 | size of cleanarr in the y-direction. Note cleanarr has dimensions 503 | ny x nx. 504 | 505 | background_level : float 506 | Average value of the background. This value will be used if there are 507 | no good pixels in a 5x5 region. 508 | """ 509 | # Go through all of the pixels, ignore the borders 510 | cdef int k, l, i, j, numpix 511 | cdef float * medarr 512 | cdef bool badpixel 513 | 514 | # For each pixel 515 | with nogil, parallel(): 516 | medarr = < float * > malloc(25 * sizeof(float)) 517 | for j in prange(2, ny - 2): 518 | for i in range(2, nx - 2): 519 | # if the pixel is in the crmask 520 | if crmask[j, i]: 521 | numpix = 0 522 | # median the 25 pixels around the pixel ignoring 523 | # any pixels that are masked 524 | for l in range(-2, 3): 525 | for k in range(-2, 3): 526 | badpixel = crmask[j + l, i + k] 527 | badpixel = badpixel or mask[j + l, i + k] 528 | if not badpixel: 529 | medarr[numpix] = cleanarr[j + l, i + k] 530 | numpix = numpix + 1 531 | 532 | # if the pixels count is 0 then put in the background 533 | # of the image 534 | if numpix == 0: 535 | cleanarr[j, i] = background_level 536 | else: 537 | # else take the mean 538 | cleanarr[j, i] = cymedian(medarr, numpix) 539 | #cleanarr[j, i] = PyMedian(medarr, numpix) 540 | free(medarr) 541 | 542 | 543 | cdef void clean_idwinterp(float[:, ::1] cleanarr, bool[:, ::1] crmask, 544 | bool[:, ::1] mask, int nx, int ny, 545 | float background_level): 546 | """clean_idwinterp(cleanarr, crmask, mask, nx, ny, background_level)\n 547 | Clean the bad pixels in cleanarr using a 5x5 using inverse distance 548 | weighted interpolation. 549 | 550 | Parameters 551 | ---------- 552 | cleanarr : float numpy array 553 | The array to be cleaned. 554 | 555 | crmask : boolean numpy array 556 | Cosmic ray mask. Pixels with a value of True in this mask will be 557 | cleaned. 558 | 559 | mask : boolean numpy array 560 | Bad pixel mask. Values of True indicate bad pixels. 561 | 562 | nx : int 563 | Size of cleanarr in the x-direction (int). Note cleanarr has dimensions 564 | ny x nx. 565 | 566 | ny : int 567 | Size of cleanarr in the y-direction (int). Note cleanarr has dimensions 568 | ny x nx. 569 | 570 | background_level : float 571 | Average value of the background. This value will be used if there are 572 | no good pixels in a 5x5 region. 573 | """ 574 | 575 | # Go through all of the pixels, ignore the borders 576 | cdef int i, j, k, l 577 | cdef float f11, f12, f21, f22 = background_level 578 | cdef int x1, x2, y1, y2 579 | weightsarr = np.array([[0.35355339, 0.4472136, 0.5, 0.4472136, 0.35355339], 580 | [0.4472136, 0.70710678, 1., 0.70710678, 0.4472136], 581 | [0.5, 1., 0., 1., 0.5], 582 | [0.4472136, 0.70710678, 1., 0.70710678, 0.4472136], 583 | [0.35355339, 0.4472136, 0.5, 0.4472136, 0.35355339]], 584 | dtype=np.float32) 585 | cdef float[:, ::1] weights = weightsarr 586 | cdef float wsum 587 | cdef float val 588 | cdef int x, y 589 | # For each pixel 590 | with nogil, parallel(): 591 | 592 | for j in prange(2, ny - 2): 593 | for i in range(2, nx - 2): 594 | # if the pixel is in the crmask 595 | if crmask[j, i]: 596 | wsum = 0.0 597 | val = 0.0 598 | for l in range(-2, 3): 599 | y = j + l 600 | for k in range(-2, 3): 601 | x = i + k 602 | if not (crmask[y, x] or mask[y, x]): 603 | val = val + weights[l, k] * cleanarr[y, x] 604 | wsum = wsum + weights[l, k] 605 | if wsum < 1e-6: 606 | cleanarr[j, i] = background_level 607 | else: 608 | cleanarr[j, i] = val / wsum 609 | 610 | 611 | def gausskernel(float psffwhm, int kernsize): 612 | """gausskernel(psffwhm, kernsize)\n 613 | Calculate a circular Gaussian psf kernel. 614 | 615 | Parameters 616 | ---------- 617 | psffwhm : float 618 | Full Width Half Maximum of the PSF to use to generate the kernel. 619 | 620 | kernsize : int 621 | Size of the kernel to calculate. kernsize should be odd. 622 | Returned kernel will have size kernsize x kernsize. 623 | 624 | Returns 625 | ------- 626 | kernel : float numpy array 627 | Gaussian PSF kernel with size kernsize x kernsize. 628 | """ 629 | kernel = np.zeros((kernsize, kernsize), dtype=np.float32) 630 | # Make a grid of x and y values 631 | x = np.tile(np.arange(kernsize) - kernsize / 2, (kernsize, 1)) 632 | y = x.transpose().copy() 633 | # Calculate the offset, r 634 | r2 = x * x + y * y 635 | # Calculate the kernel 636 | sigma2 = psffwhm * psffwhm / 2.35482 / 2.35482 637 | kernel[:, :] = np.exp(-0.5 * r2 / sigma2)[:, :] 638 | # Normalize the kernel 639 | kernel /= kernel.sum() 640 | return kernel 641 | 642 | 643 | def gaussxkernel(float psffwhm, int kernsize): 644 | """gaussxkernel(psffwhm, kernsize)\n 645 | Calculate a Guassian kernel in the x-direction. 646 | 647 | This can be used for spectroscopic data. 648 | 649 | Parameters 650 | ---------- 651 | psffwhm : float 652 | Full Width Half Maximum of the PSF to use to generate the kernel. 653 | 654 | kernsize : int 655 | Size of the kernel to calculate. kernsize should be odd. 656 | Returned kernel will have size kernsize x kernsize. 657 | 658 | Returns 659 | ------- 660 | kernel : float numpy array 661 | Gaussian(x) kernel with size kernsize x kernsize. 662 | """ 663 | kernel = np.zeros((kernsize, kernsize), dtype=np.float32) 664 | # Make a grid of x and y values 665 | x = np.tile(np.arange(kernsize) - kernsize / 2, (kernsize, 1)) 666 | # Calculate the kernel 667 | sigma2 = psffwhm * psffwhm / 2.35482 / 2.35482 668 | kernel[:, :] = np.exp(-0.5 * x * x / sigma2)[:, :] 669 | # Normalize the kernel 670 | kernel /= kernel.sum() 671 | return kernel 672 | 673 | 674 | def gaussykernel(float psffwhm, int kernsize): 675 | """gaussykernel(psffwhm, kernsize)\n 676 | Calculate a Guassian kernel in the y-direction. 677 | 678 | This can be used for spectroscopic data. 679 | 680 | Parameters 681 | ---------- 682 | psffwhm : float 683 | Full Width Half Maximum of the PSF to use to generate the kernel. 684 | 685 | kernsize : int 686 | Size of the kernel to calculate. kernsize should be odd. 687 | Returned kernel will have size kernsize x kernsize. 688 | 689 | Returns 690 | ------- 691 | kernel : float numpy array 692 | Gaussian(y) kernel with size kernsize x kernsize. 693 | """ 694 | kernel = np.zeros((kernsize, kernsize), dtype=np.float32) 695 | # Make a grid of x and y values 696 | x = np.tile(np.arange(kernsize) - kernsize / 2, (kernsize, 1)) 697 | y = x.transpose().copy() 698 | # Calculate the kernel 699 | sigma2 = psffwhm * psffwhm / 2.35482 / 2.35482 700 | kernel[:, :] = np.exp(-0.5 * y * y / sigma2)[:, :] 701 | # Normalize the kernel 702 | kernel /= kernel.sum() 703 | return kernel 704 | 705 | 706 | cdef moffatkernel(float psffwhm, float beta, int kernsize): 707 | """moffatkernel(psffwhm, beta, kernsize)\n 708 | Calculate a Moffat psf kernel. 709 | 710 | Parameters 711 | ---------- 712 | psffwhm : float 713 | Full Width Half Maximum of the PSF to use to generate the kernel. 714 | 715 | beta : float 716 | Moffat beta parameter 717 | 718 | kernsize : int 719 | Size of the kernel to calculate. Returned kernel will have size 720 | kernsize x kernsize. kernsize should be odd. 721 | 722 | Returns 723 | ------- 724 | kernel : float numpy array 725 | Moffat kernel with size kernsize x kernsize. 726 | """ 727 | kernel = np.zeros((kernsize, kernsize), dtype=np.float32) 728 | # Make a grid of x and y values 729 | x = np.tile(np.arange(kernsize) - kernsize / 2, (kernsize, 1)) 730 | y = x.transpose().copy() 731 | # Calculate the offset r 732 | r = np.sqrt(x * x + y * y) 733 | # Calculate the kernel 734 | hwhm = psffwhm / 2.0 735 | alpha = hwhm / np.sqrt(np.power(2.0, (1.0 / beta)) - 1.0) 736 | kernel[:, :] = (np.power(1.0 + (r * r / alpha / alpha), -1.0 * beta))[:, :] 737 | # Normalize the kernel. 738 | kernel /= kernel.sum() 739 | return kernel 740 | -------------------------------------------------------------------------------- /ah_bootstrap.py: -------------------------------------------------------------------------------- 1 | """ 2 | This bootstrap module contains code for ensuring that the astropy_helpers 3 | package will be importable by the time the setup.py script runs. It also 4 | includes some workarounds to ensure that a recent-enough version of setuptools 5 | is being used for the installation. 6 | 7 | This module should be the first thing imported in the setup.py of distributions 8 | that make use of the utilities in astropy_helpers. If the distribution ships 9 | with its own copy of astropy_helpers, this module will first attempt to import 10 | from the shipped copy. However, it will also check PyPI to see if there are 11 | any bug-fix releases on top of the current version that may be useful to get 12 | past platform-specific bugs that have been fixed. When running setup.py, use 13 | the ``--offline`` command-line option to disable the auto-upgrade checks. 14 | 15 | When this module is imported or otherwise executed it automatically calls a 16 | main function that attempts to read the project's setup.cfg file, which it 17 | checks for a configuration section called ``[ah_bootstrap]`` the presences of 18 | that section, and options therein, determine the next step taken: If it 19 | contains an option called ``auto_use`` with a value of ``True``, it will 20 | automatically call the main function of this module called 21 | `use_astropy_helpers` (see that function's docstring for full details). 22 | Otherwise no further action is taken (however, 23 | ``ah_bootstrap.use_astropy_helpers`` may be called manually from within the 24 | setup.py script). 25 | 26 | Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same 27 | names as the arguments to `use_astropy_helpers`, and can be used to configure 28 | the bootstrap script when ``auto_use = True``. 29 | 30 | See https://github.com/astropy/astropy-helpers for more details, and for the 31 | latest version of this module. 32 | """ 33 | 34 | import contextlib 35 | import errno 36 | import imp 37 | import io 38 | import locale 39 | import os 40 | import re 41 | import subprocess as sp 42 | import sys 43 | 44 | try: 45 | from ConfigParser import ConfigParser, RawConfigParser 46 | except ImportError: 47 | from configparser import ConfigParser, RawConfigParser 48 | 49 | 50 | if sys.version_info[0] < 3: 51 | _str_types = (str, unicode) 52 | _text_type = unicode 53 | PY3 = False 54 | else: 55 | _str_types = (str, bytes) 56 | _text_type = str 57 | PY3 = True 58 | 59 | 60 | # What follows are several import statements meant to deal with install-time 61 | # issues with either missing or misbehaving pacakges (including making sure 62 | # setuptools itself is installed): 63 | 64 | 65 | # Some pre-setuptools checks to ensure that either distribute or setuptools >= 66 | # 0.7 is used (over pre-distribute setuptools) if it is available on the path; 67 | # otherwise the latest setuptools will be downloaded and bootstrapped with 68 | # ``ez_setup.py``. This used to be included in a separate file called 69 | # setuptools_bootstrap.py; but it was combined into ah_bootstrap.py 70 | try: 71 | import pkg_resources 72 | _setuptools_req = pkg_resources.Requirement.parse('setuptools>=0.7') 73 | # This may raise a DistributionNotFound in which case no version of 74 | # setuptools or distribute is properly installed 75 | _setuptools = pkg_resources.get_distribution('setuptools') 76 | if _setuptools not in _setuptools_req: 77 | # Older version of setuptools; check if we have distribute; again if 78 | # this results in DistributionNotFound we want to give up 79 | _distribute = pkg_resources.get_distribution('distribute') 80 | if _setuptools != _distribute: 81 | # It's possible on some pathological systems to have an old version 82 | # of setuptools and distribute on sys.path simultaneously; make 83 | # sure distribute is the one that's used 84 | sys.path.insert(1, _distribute.location) 85 | _distribute.activate() 86 | imp.reload(pkg_resources) 87 | except: 88 | # There are several types of exceptions that can occur here; if all else 89 | # fails bootstrap and use the bootstrapped version 90 | from ez_setup import use_setuptools 91 | use_setuptools() 92 | 93 | 94 | # Note: The following import is required as a workaround to 95 | # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this 96 | # module now, it will get cleaned up after `run_setup` is called, but that will 97 | # later cause the TemporaryDirectory class defined in it to stop working when 98 | # used later on by setuptools 99 | try: 100 | import setuptools.py31compat 101 | except ImportError: 102 | pass 103 | 104 | 105 | # matplotlib can cause problems if it is imported from within a call of 106 | # run_setup(), because in some circumstances it will try to write to the user's 107 | # home directory, resulting in a SandboxViolation. See 108 | # https://github.com/matplotlib/matplotlib/pull/4165 109 | # Making sure matplotlib, if it is available, is imported early in the setup 110 | # process can mitigate this (note importing matplotlib.pyplot has the same 111 | # issue) 112 | try: 113 | import matplotlib 114 | matplotlib.use('Agg') 115 | import matplotlib.pyplot 116 | except: 117 | # Ignore if this fails for *any* reason* 118 | pass 119 | 120 | 121 | # End compatibility imports... 122 | 123 | 124 | # In case it didn't successfully import before the ez_setup checks 125 | import pkg_resources 126 | 127 | from setuptools import Distribution 128 | from setuptools.package_index import PackageIndex 129 | from setuptools.sandbox import run_setup 130 | 131 | from distutils import log 132 | from distutils.debug import DEBUG 133 | 134 | 135 | # TODO: Maybe enable checking for a specific version of astropy_helpers? 136 | DIST_NAME = 'astropy-helpers' 137 | PACKAGE_NAME = 'astropy_helpers' 138 | 139 | # Defaults for other options 140 | DOWNLOAD_IF_NEEDED = True 141 | INDEX_URL = 'https://pypi.python.org/simple' 142 | USE_GIT = True 143 | OFFLINE = False 144 | AUTO_UPGRADE = True 145 | 146 | # A list of all the configuration options and their required types 147 | CFG_OPTIONS = [ 148 | ('auto_use', bool), ('path', str), ('download_if_needed', bool), 149 | ('index_url', str), ('use_git', bool), ('offline', bool), 150 | ('auto_upgrade', bool) 151 | ] 152 | 153 | 154 | class _Bootstrapper(object): 155 | """ 156 | Bootstrapper implementation. See ``use_astropy_helpers`` for parameter 157 | documentation. 158 | """ 159 | 160 | def __init__(self, path=None, index_url=None, use_git=None, offline=None, 161 | download_if_needed=None, auto_upgrade=None): 162 | 163 | if path is None: 164 | path = PACKAGE_NAME 165 | 166 | if not (isinstance(path, _str_types) or path is False): 167 | raise TypeError('path must be a string or False') 168 | 169 | if PY3 and not isinstance(path, _text_type): 170 | fs_encoding = sys.getfilesystemencoding() 171 | path = path.decode(fs_encoding) # path to unicode 172 | 173 | self.path = path 174 | 175 | # Set other option attributes, using defaults where necessary 176 | self.index_url = index_url if index_url is not None else INDEX_URL 177 | self.offline = offline if offline is not None else OFFLINE 178 | 179 | # If offline=True, override download and auto-upgrade 180 | if self.offline: 181 | download_if_needed = False 182 | auto_upgrade = False 183 | 184 | self.download = (download_if_needed 185 | if download_if_needed is not None 186 | else DOWNLOAD_IF_NEEDED) 187 | self.auto_upgrade = (auto_upgrade 188 | if auto_upgrade is not None else AUTO_UPGRADE) 189 | 190 | # If this is a release then the .git directory will not exist so we 191 | # should not use git. 192 | git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) 193 | if use_git is None and not git_dir_exists: 194 | use_git = False 195 | 196 | self.use_git = use_git if use_git is not None else USE_GIT 197 | # Declared as False by default--later we check if astropy-helpers can be 198 | # upgraded from PyPI, but only if not using a source distribution (as in 199 | # the case of import from a git submodule) 200 | self.is_submodule = False 201 | 202 | @classmethod 203 | def main(cls, argv=None): 204 | if argv is None: 205 | argv = sys.argv 206 | 207 | config = cls.parse_config() 208 | config.update(cls.parse_command_line(argv)) 209 | 210 | auto_use = config.pop('auto_use', False) 211 | bootstrapper = cls(**config) 212 | 213 | if auto_use: 214 | # Run the bootstrapper, otherwise the setup.py is using the old 215 | # use_astropy_helpers() interface, in which case it will run the 216 | # bootstrapper manually after reconfiguring it. 217 | bootstrapper.run() 218 | 219 | return bootstrapper 220 | 221 | @classmethod 222 | def parse_config(cls): 223 | if not os.path.exists('setup.cfg'): 224 | return {} 225 | 226 | cfg = ConfigParser() 227 | 228 | try: 229 | cfg.read('setup.cfg') 230 | except Exception as e: 231 | if DEBUG: 232 | raise 233 | 234 | log.error( 235 | "Error reading setup.cfg: {0!r}\n{1} will not be " 236 | "automatically bootstrapped and package installation may fail." 237 | "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) 238 | return {} 239 | 240 | if not cfg.has_section('ah_bootstrap'): 241 | return {} 242 | 243 | config = {} 244 | 245 | for option, type_ in CFG_OPTIONS: 246 | if not cfg.has_option('ah_bootstrap', option): 247 | continue 248 | 249 | if type_ is bool: 250 | value = cfg.getboolean('ah_bootstrap', option) 251 | else: 252 | value = cfg.get('ah_bootstrap', option) 253 | 254 | config[option] = value 255 | 256 | return config 257 | 258 | @classmethod 259 | def parse_command_line(cls, argv=None): 260 | if argv is None: 261 | argv = sys.argv 262 | 263 | config = {} 264 | 265 | # For now we just pop recognized ah_bootstrap options out of the 266 | # arg list. This is imperfect; in the unlikely case that a setup.py 267 | # custom command or even custom Distribution class defines an argument 268 | # of the same name then we will break that. However there's a catch22 269 | # here that we can't just do full argument parsing right here, because 270 | # we don't yet know *how* to parse all possible command-line arguments. 271 | if '--no-git' in argv: 272 | config['use_git'] = False 273 | argv.remove('--no-git') 274 | 275 | if '--offline' in argv: 276 | config['offline'] = True 277 | argv.remove('--offline') 278 | 279 | return config 280 | 281 | def run(self): 282 | strategies = ['local_directory', 'local_file', 'index'] 283 | dist = None 284 | 285 | # First, remove any previously imported versions of astropy_helpers; 286 | # this is necessary for nested installs where one package's installer 287 | # is installing another package via setuptools.sandbox.run_setup, as in 288 | # the case of setup_requires 289 | for key in list(sys.modules): 290 | try: 291 | if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): 292 | del sys.modules[key] 293 | except AttributeError: 294 | # Sometimes mysterious non-string things can turn up in 295 | # sys.modules 296 | continue 297 | 298 | # Check to see if the path is a submodule 299 | self.is_submodule = self._check_submodule() 300 | 301 | for strategy in strategies: 302 | method = getattr(self, 'get_{0}_dist'.format(strategy)) 303 | dist = method() 304 | if dist is not None: 305 | break 306 | else: 307 | raise _AHBootstrapSystemExit( 308 | "No source found for the {0!r} package; {0} must be " 309 | "available and importable as a prerequisite to building " 310 | "or installing this package.".format(PACKAGE_NAME)) 311 | 312 | # This is a bit hacky, but if astropy_helpers was loaded from a 313 | # directory/submodule its Distribution object gets a "precedence" of 314 | # "DEVELOP_DIST". However, in other cases it gets a precedence of 315 | # "EGG_DIST". However, when activing the distribution it will only be 316 | # placed early on sys.path if it is treated as an EGG_DIST, so always 317 | # do that 318 | dist = dist.clone(precedence=pkg_resources.EGG_DIST) 319 | 320 | # Otherwise we found a version of astropy-helpers, so we're done 321 | # Just active the found distribution on sys.path--if we did a 322 | # download this usually happens automatically but it doesn't hurt to 323 | # do it again 324 | # Note: Adding the dist to the global working set also activates it 325 | # (makes it importable on sys.path) by default. 326 | 327 | try: 328 | pkg_resources.working_set.add(dist, replace=True) 329 | except TypeError: 330 | # Some (much) older versions of setuptools do not have the 331 | # replace=True option here. These versions are old enough that all 332 | # bets may be off anyways, but it's easy enough to work around just 333 | # in case... 334 | if dist.key in pkg_resources.working_set.by_key: 335 | del pkg_resources.working_set.by_key[dist.key] 336 | pkg_resources.working_set.add(dist) 337 | 338 | @property 339 | def config(self): 340 | """ 341 | A `dict` containing the options this `_Bootstrapper` was configured 342 | with. 343 | """ 344 | 345 | return dict((optname, getattr(self, optname)) 346 | for optname, _ in CFG_OPTIONS if hasattr(self, optname)) 347 | 348 | def get_local_directory_dist(self): 349 | """ 350 | Handle importing a vendored package from a subdirectory of the source 351 | distribution. 352 | """ 353 | 354 | if not os.path.isdir(self.path): 355 | return 356 | 357 | log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 358 | 'submodule' if self.is_submodule else 'directory', 359 | self.path)) 360 | 361 | dist = self._directory_import() 362 | 363 | if dist is None: 364 | log.warn( 365 | 'The requested path {0!r} for importing {1} does not ' 366 | 'exist, or does not contain a copy of the {1} ' 367 | 'package.'.format(self.path, PACKAGE_NAME)) 368 | elif self.auto_upgrade and not self.is_submodule: 369 | # A version of astropy-helpers was found on the available path, but 370 | # check to see if a bugfix release is available on PyPI 371 | upgrade = self._do_upgrade(dist) 372 | if upgrade is not None: 373 | dist = upgrade 374 | 375 | return dist 376 | 377 | def get_local_file_dist(self): 378 | """ 379 | Handle importing from a source archive; this also uses setup_requires 380 | but points easy_install directly to the source archive. 381 | """ 382 | 383 | if not os.path.isfile(self.path): 384 | return 385 | 386 | log.info('Attempting to unpack and import astropy_helpers from ' 387 | '{0!r}'.format(self.path)) 388 | 389 | try: 390 | dist = self._do_download(find_links=[self.path]) 391 | except Exception as e: 392 | if DEBUG: 393 | raise 394 | 395 | log.warn( 396 | 'Failed to import {0} from the specified archive {1!r}: ' 397 | '{2}'.format(PACKAGE_NAME, self.path, str(e))) 398 | dist = None 399 | 400 | if dist is not None and self.auto_upgrade: 401 | # A version of astropy-helpers was found on the available path, but 402 | # check to see if a bugfix release is available on PyPI 403 | upgrade = self._do_upgrade(dist) 404 | if upgrade is not None: 405 | dist = upgrade 406 | 407 | return dist 408 | 409 | def get_index_dist(self): 410 | if not self.download: 411 | log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) 412 | return None 413 | 414 | log.warn( 415 | "Downloading {0!r}; run setup.py with the --offline option to " 416 | "force offline installation.".format(DIST_NAME)) 417 | 418 | try: 419 | dist = self._do_download() 420 | except Exception as e: 421 | if DEBUG: 422 | raise 423 | log.warn( 424 | 'Failed to download and/or install {0!r} from {1!r}:\n' 425 | '{2}'.format(DIST_NAME, self.index_url, str(e))) 426 | dist = None 427 | 428 | # No need to run auto-upgrade here since we've already presumably 429 | # gotten the most up-to-date version from the package index 430 | return dist 431 | 432 | def _directory_import(self): 433 | """ 434 | Import astropy_helpers from the given path, which will be added to 435 | sys.path. 436 | 437 | Must return True if the import succeeded, and False otherwise. 438 | """ 439 | 440 | # Return True on success, False on failure but download is allowed, and 441 | # otherwise raise SystemExit 442 | path = os.path.abspath(self.path) 443 | 444 | # Use an empty WorkingSet rather than the man 445 | # pkg_resources.working_set, since on older versions of setuptools this 446 | # will invoke a VersionConflict when trying to install an upgrade 447 | ws = pkg_resources.WorkingSet([]) 448 | ws.add_entry(path) 449 | dist = ws.by_key.get(DIST_NAME) 450 | 451 | if dist is None: 452 | # We didn't find an egg-info/dist-info in the given path, but if a 453 | # setup.py exists we can generate it 454 | setup_py = os.path.join(path, 'setup.py') 455 | if os.path.isfile(setup_py): 456 | with _silence(): 457 | run_setup(os.path.join(path, 'setup.py'), 458 | ['egg_info']) 459 | 460 | for dist in pkg_resources.find_distributions(path, True): 461 | # There should be only one... 462 | return dist 463 | 464 | return dist 465 | 466 | def _do_download(self, version='', find_links=None): 467 | if find_links: 468 | allow_hosts = '' 469 | index_url = None 470 | else: 471 | allow_hosts = None 472 | index_url = self.index_url 473 | 474 | # Annoyingly, setuptools will not handle other arguments to 475 | # Distribution (such as options) before handling setup_requires, so it 476 | # is not straightforward to programmatically augment the arguments which 477 | # are passed to easy_install 478 | class _Distribution(Distribution): 479 | def get_option_dict(self, command_name): 480 | opts = Distribution.get_option_dict(self, command_name) 481 | if command_name == 'easy_install': 482 | if find_links is not None: 483 | opts['find_links'] = ('setup script', find_links) 484 | if index_url is not None: 485 | opts['index_url'] = ('setup script', index_url) 486 | if allow_hosts is not None: 487 | opts['allow_hosts'] = ('setup script', allow_hosts) 488 | return opts 489 | 490 | if version: 491 | req = '{0}=={1}'.format(DIST_NAME, version) 492 | else: 493 | req = DIST_NAME 494 | 495 | attrs = {'setup_requires': [req]} 496 | 497 | try: 498 | if DEBUG: 499 | _Distribution(attrs=attrs) 500 | else: 501 | with _silence(): 502 | _Distribution(attrs=attrs) 503 | 504 | # If the setup_requires succeeded it will have added the new dist to 505 | # the main working_set 506 | return pkg_resources.working_set.by_key.get(DIST_NAME) 507 | except Exception as e: 508 | if DEBUG: 509 | raise 510 | 511 | msg = 'Error retrieving {0} from {1}:\n{2}' 512 | if find_links: 513 | source = find_links[0] 514 | elif index_url != INDEX_URL: 515 | source = index_url 516 | else: 517 | source = 'PyPI' 518 | 519 | raise Exception(msg.format(DIST_NAME, source, repr(e))) 520 | 521 | def _do_upgrade(self, dist): 522 | # Build up a requirement for a higher bugfix release but a lower minor 523 | # release (so API compatibility is guaranteed) 524 | next_version = _next_version(dist.parsed_version) 525 | 526 | req = pkg_resources.Requirement.parse( 527 | '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) 528 | 529 | package_index = PackageIndex(index_url=self.index_url) 530 | 531 | upgrade = package_index.obtain(req) 532 | 533 | if upgrade is not None: 534 | return self._do_download(version=upgrade.version) 535 | 536 | def _check_submodule(self): 537 | """ 538 | Check if the given path is a git submodule. 539 | 540 | See the docstrings for ``_check_submodule_using_git`` and 541 | ``_check_submodule_no_git`` for further details. 542 | """ 543 | 544 | if (self.path is None or 545 | (os.path.exists(self.path) and not os.path.isdir(self.path))): 546 | return False 547 | 548 | if self.use_git: 549 | return self._check_submodule_using_git() 550 | else: 551 | return self._check_submodule_no_git() 552 | 553 | def _check_submodule_using_git(self): 554 | """ 555 | Check if the given path is a git submodule. If so, attempt to initialize 556 | and/or update the submodule if needed. 557 | 558 | This function makes calls to the ``git`` command in subprocesses. The 559 | ``_check_submodule_no_git`` option uses pure Python to check if the given 560 | path looks like a git submodule, but it cannot perform updates. 561 | """ 562 | 563 | cmd = ['git', 'submodule', 'status', '--', self.path] 564 | 565 | try: 566 | log.info('Running `{0}`; use the --no-git option to disable git ' 567 | 'commands'.format(' '.join(cmd))) 568 | returncode, stdout, stderr = run_cmd(cmd) 569 | except _CommandNotFound: 570 | # The git command simply wasn't found; this is most likely the 571 | # case on user systems that don't have git and are simply 572 | # trying to install the package from PyPI or a source 573 | # distribution. Silently ignore this case and simply don't try 574 | # to use submodules 575 | return False 576 | 577 | stderr = stderr.strip() 578 | 579 | if returncode != 0 and stderr: 580 | # Unfortunately the return code alone cannot be relied on, as 581 | # earlier versions of git returned 0 even if the requested submodule 582 | # does not exist 583 | 584 | # This is a warning that occurs in perl (from running git submodule) 585 | # which only occurs with a malformatted locale setting which can 586 | # happen sometimes on OSX. See again 587 | # https://github.com/astropy/astropy/issues/2749 588 | perl_warning = ('perl: warning: Falling back to the standard locale ' 589 | '("C").') 590 | if not stderr.strip().endswith(perl_warning): 591 | # Some other unknown error condition occurred 592 | log.warn('git submodule command failed ' 593 | 'unexpectedly:\n{0}'.format(stderr)) 594 | return False 595 | 596 | # Output of `git submodule status` is as follows: 597 | # 598 | # 1: Status indicator: '-' for submodule is uninitialized, '+' if 599 | # submodule is initialized but is not at the commit currently indicated 600 | # in .gitmodules (and thus needs to be updated), or 'U' if the 601 | # submodule is in an unstable state (i.e. has merge conflicts) 602 | # 603 | # 2. SHA-1 hash of the current commit of the submodule (we don't really 604 | # need this information but it's useful for checking that the output is 605 | # correct) 606 | # 607 | # 3. The output of `git describe` for the submodule's current commit 608 | # hash (this includes for example what branches the commit is on) but 609 | # only if the submodule is initialized. We ignore this information for 610 | # now 611 | _git_submodule_status_re = re.compile( 612 | '^(?P[+-U ])(?P[0-9a-f]{40}) ' 613 | '(?P\S+)( .*)?$') 614 | 615 | # The stdout should only contain one line--the status of the 616 | # requested submodule 617 | m = _git_submodule_status_re.match(stdout) 618 | if m: 619 | # Yes, the path *is* a git submodule 620 | self._update_submodule(m.group('submodule'), m.group('status')) 621 | return True 622 | else: 623 | log.warn( 624 | 'Unexpected output from `git submodule status`:\n{0}\n' 625 | 'Will attempt import from {1!r} regardless.'.format( 626 | stdout, self.path)) 627 | return False 628 | 629 | def _check_submodule_no_git(self): 630 | """ 631 | Like ``_check_submodule_using_git``, but simply parses the .gitmodules file 632 | to determine if the supplied path is a git submodule, and does not exec any 633 | subprocesses. 634 | 635 | This can only determine if a path is a submodule--it does not perform 636 | updates, etc. This function may need to be updated if the format of the 637 | .gitmodules file is changed between git versions. 638 | """ 639 | 640 | gitmodules_path = os.path.abspath('.gitmodules') 641 | 642 | if not os.path.isfile(gitmodules_path): 643 | return False 644 | 645 | # This is a minimal reader for gitconfig-style files. It handles a few of 646 | # the quirks that make gitconfig files incompatible with ConfigParser-style 647 | # files, but does not support the full gitconfig syntax (just enough 648 | # needed to read a .gitmodules file). 649 | gitmodules_fileobj = io.StringIO() 650 | 651 | # Must use io.open for cross-Python-compatible behavior wrt unicode 652 | with io.open(gitmodules_path) as f: 653 | for line in f: 654 | # gitconfig files are more flexible with leading whitespace; just 655 | # go ahead and remove it 656 | line = line.lstrip() 657 | 658 | # comments can start with either # or ; 659 | if line and line[0] in (':', ';'): 660 | continue 661 | 662 | gitmodules_fileobj.write(line) 663 | 664 | gitmodules_fileobj.seek(0) 665 | 666 | cfg = RawConfigParser() 667 | 668 | try: 669 | cfg.readfp(gitmodules_fileobj) 670 | except Exception as exc: 671 | log.warn('Malformatted .gitmodules file: {0}\n' 672 | '{1} cannot be assumed to be a git submodule.'.format( 673 | exc, self.path)) 674 | return False 675 | 676 | for section in cfg.sections(): 677 | if not cfg.has_option(section, 'path'): 678 | continue 679 | 680 | submodule_path = cfg.get(section, 'path').rstrip(os.sep) 681 | 682 | if submodule_path == self.path.rstrip(os.sep): 683 | return True 684 | 685 | return False 686 | 687 | def _update_submodule(self, submodule, status): 688 | if status == ' ': 689 | # The submodule is up to date; no action necessary 690 | return 691 | elif status == '-': 692 | if self.offline: 693 | raise _AHBootstrapSystemExit( 694 | "Cannot initialize the {0} submodule in --offline mode; " 695 | "this requires being able to clone the submodule from an " 696 | "online repository.".format(submodule)) 697 | cmd = ['update', '--init'] 698 | action = 'Initializing' 699 | elif status == '+': 700 | cmd = ['update'] 701 | action = 'Updating' 702 | if self.offline: 703 | cmd.append('--no-fetch') 704 | elif status == 'U': 705 | raise _AHBoostrapSystemExit( 706 | 'Error: Submodule {0} contains unresolved merge conflicts. ' 707 | 'Please complete or abandon any changes in the submodule so that ' 708 | 'it is in a usable state, then try again.'.format(submodule)) 709 | else: 710 | log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 711 | 'attempt to use the submodule as-is, but try to ensure ' 712 | 'that the submodule is in a clean state and contains no ' 713 | 'conflicts or errors.\n{2}'.format(status, submodule, 714 | _err_help_msg)) 715 | return 716 | 717 | err_msg = None 718 | cmd = ['git', 'submodule'] + cmd + ['--', submodule] 719 | log.warn('{0} {1} submodule with: `{2}`'.format( 720 | action, submodule, ' '.join(cmd))) 721 | 722 | try: 723 | log.info('Running `{0}`; use the --no-git option to disable git ' 724 | 'commands'.format(' '.join(cmd))) 725 | returncode, stdout, stderr = run_cmd(cmd) 726 | except OSError as e: 727 | err_msg = str(e) 728 | else: 729 | if returncode != 0: 730 | err_msg = stderr 731 | 732 | if err_msg is not None: 733 | log.warn('An unexpected error occurred updating the git submodule ' 734 | '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, 735 | _err_help_msg)) 736 | 737 | class _CommandNotFound(OSError): 738 | """ 739 | An exception raised when a command run with run_cmd is not found on the 740 | system. 741 | """ 742 | 743 | 744 | def run_cmd(cmd): 745 | """ 746 | Run a command in a subprocess, given as a list of command-line 747 | arguments. 748 | 749 | Returns a ``(returncode, stdout, stderr)`` tuple. 750 | """ 751 | 752 | try: 753 | p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) 754 | # XXX: May block if either stdout or stderr fill their buffers; 755 | # however for the commands this is currently used for that is 756 | # unlikely (they should have very brief output) 757 | stdout, stderr = p.communicate() 758 | except OSError as e: 759 | if DEBUG: 760 | raise 761 | 762 | if e.errno == errno.ENOENT: 763 | msg = 'Command not found: `{0}`'.format(' '.join(cmd)) 764 | raise _CommandNotFound(msg, cmd) 765 | else: 766 | raise _AHBoostrapSystemExit( 767 | 'An unexpected error occurred when running the ' 768 | '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) 769 | 770 | 771 | # Can fail of the default locale is not configured properly. See 772 | # https://github.com/astropy/astropy/issues/2749. For the purposes under 773 | # consideration 'latin1' is an acceptable fallback. 774 | try: 775 | stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' 776 | except ValueError: 777 | # Due to an OSX oddity locale.getdefaultlocale() can also crash 778 | # depending on the user's locale/language settings. See: 779 | # http://bugs.python.org/issue18378 780 | stdio_encoding = 'latin1' 781 | 782 | # Unlikely to fail at this point but even then let's be flexible 783 | if not isinstance(stdout, _text_type): 784 | stdout = stdout.decode(stdio_encoding, 'replace') 785 | if not isinstance(stderr, _text_type): 786 | stderr = stderr.decode(stdio_encoding, 'replace') 787 | 788 | return (p.returncode, stdout, stderr) 789 | 790 | 791 | def _next_version(version): 792 | """ 793 | Given a parsed version from pkg_resources.parse_version, returns a new 794 | version string with the next minor version. 795 | 796 | Examples 797 | ======== 798 | >>> _next_version(pkg_resources.parse_version('1.2.3')) 799 | '1.3.0' 800 | """ 801 | 802 | if hasattr(version, 'base_version'): 803 | # New version parsing from setuptools >= 8.0 804 | if version.base_version: 805 | parts = version.base_version.split('.') 806 | else: 807 | parts = [] 808 | else: 809 | parts = [] 810 | for part in version: 811 | if part.startswith('*'): 812 | break 813 | parts.append(part) 814 | 815 | parts = [int(p) for p in parts] 816 | 817 | if len(parts) < 3: 818 | parts += [0] * (3 - len(parts)) 819 | 820 | major, minor, micro = parts[:3] 821 | 822 | return '{0}.{1}.{2}'.format(major, minor + 1, 0) 823 | 824 | 825 | class _DummyFile(object): 826 | """A noop writeable object.""" 827 | 828 | errors = '' # Required for Python 3.x 829 | encoding = 'utf-8' 830 | 831 | def write(self, s): 832 | pass 833 | 834 | def flush(self): 835 | pass 836 | 837 | 838 | @contextlib.contextmanager 839 | def _silence(): 840 | """A context manager that silences sys.stdout and sys.stderr.""" 841 | 842 | old_stdout = sys.stdout 843 | old_stderr = sys.stderr 844 | sys.stdout = _DummyFile() 845 | sys.stderr = _DummyFile() 846 | exception_occurred = False 847 | try: 848 | yield 849 | except: 850 | exception_occurred = True 851 | # Go ahead and clean up so that exception handling can work normally 852 | sys.stdout = old_stdout 853 | sys.stderr = old_stderr 854 | raise 855 | 856 | if not exception_occurred: 857 | sys.stdout = old_stdout 858 | sys.stderr = old_stderr 859 | 860 | 861 | _err_help_msg = """ 862 | If the problem persists consider installing astropy_helpers manually using pip 863 | (`pip install astropy_helpers`) or by manually downloading the source archive, 864 | extracting it, and installing by running `python setup.py install` from the 865 | root of the extracted source code. 866 | """ 867 | 868 | 869 | class _AHBootstrapSystemExit(SystemExit): 870 | def __init__(self, *args): 871 | if not args: 872 | msg = 'An unknown problem occurred bootstrapping astropy_helpers.' 873 | else: 874 | msg = args[0] 875 | 876 | msg += '\n' + _err_help_msg 877 | 878 | super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) 879 | 880 | 881 | if sys.version_info[:2] < (2, 7): 882 | # In Python 2.6 the distutils log does not log warnings, errors, etc. to 883 | # stderr so we have to wrap it to ensure consistency at least in this 884 | # module 885 | import distutils 886 | 887 | class log(object): 888 | def __getattr__(self, attr): 889 | return getattr(distutils.log, attr) 890 | 891 | def warn(self, msg, *args): 892 | self._log_to_stderr(distutils.log.WARN, msg, *args) 893 | 894 | def error(self, msg): 895 | self._log_to_stderr(distutils.log.ERROR, msg, *args) 896 | 897 | def fatal(self, msg): 898 | self._log_to_stderr(distutils.log.FATAL, msg, *args) 899 | 900 | def log(self, level, msg, *args): 901 | if level in (distutils.log.WARN, distutils.log.ERROR, 902 | distutils.log.FATAL): 903 | self._log_to_stderr(level, msg, *args) 904 | else: 905 | distutils.log.log(level, msg, *args) 906 | 907 | def _log_to_stderr(self, level, msg, *args): 908 | # This is the only truly 'public' way to get the current threshold 909 | # of the log 910 | current_threshold = distutils.log.set_threshold(distutils.log.WARN) 911 | distutils.log.set_threshold(current_threshold) 912 | if level >= current_threshold: 913 | if args: 914 | msg = msg % args 915 | sys.stderr.write('%s\n' % msg) 916 | sys.stderr.flush() 917 | 918 | log = log() 919 | 920 | 921 | BOOTSTRAPPER = _Bootstrapper.main() 922 | 923 | 924 | def use_astropy_helpers(**kwargs): 925 | """ 926 | Ensure that the `astropy_helpers` module is available and is importable. 927 | This supports automatic submodule initialization if astropy_helpers is 928 | included in a project as a git submodule, or will download it from PyPI if 929 | necessary. 930 | 931 | Parameters 932 | ---------- 933 | 934 | path : str or None, optional 935 | A filesystem path relative to the root of the project's source code 936 | that should be added to `sys.path` so that `astropy_helpers` can be 937 | imported from that path. 938 | 939 | If the path is a git submodule it will automatically be initialized 940 | and/or updated. 941 | 942 | The path may also be to a ``.tar.gz`` archive of the astropy_helpers 943 | source distribution. In this case the archive is automatically 944 | unpacked and made temporarily available on `sys.path` as a ``.egg`` 945 | archive. 946 | 947 | If `None` skip straight to downloading. 948 | 949 | download_if_needed : bool, optional 950 | If the provided filesystem path is not found an attempt will be made to 951 | download astropy_helpers from PyPI. It will then be made temporarily 952 | available on `sys.path` as a ``.egg`` archive (using the 953 | ``setup_requires`` feature of setuptools. If the ``--offline`` option 954 | is given at the command line the value of this argument is overridden 955 | to `False`. 956 | 957 | index_url : str, optional 958 | If provided, use a different URL for the Python package index than the 959 | main PyPI server. 960 | 961 | use_git : bool, optional 962 | If `False` no git commands will be used--this effectively disables 963 | support for git submodules. If the ``--no-git`` option is given at the 964 | command line the value of this argument is overridden to `False`. 965 | 966 | auto_upgrade : bool, optional 967 | By default, when installing a package from a non-development source 968 | distribution ah_boostrap will try to automatically check for patch 969 | releases to astropy-helpers on PyPI and use the patched version over 970 | any bundled versions. Setting this to `False` will disable that 971 | functionality. If the ``--offline`` option is given at the command line 972 | the value of this argument is overridden to `False`. 973 | 974 | offline : bool, optional 975 | If `False` disable all actions that require an internet connection, 976 | including downloading packages from the package index and fetching 977 | updates to any git submodule. Defaults to `True`. 978 | """ 979 | 980 | global BOOTSTRAPPER 981 | 982 | config = BOOTSTRAPPER.config 983 | config.update(**kwargs) 984 | 985 | # Create a new bootstrapper with the updated configuration and run it 986 | BOOTSTRAPPER = _Bootstrapper(**config) 987 | BOOTSTRAPPER.run() 988 | --------------------------------------------------------------------------------