├── .gitmodules ├── ccdproc ├── utils │ ├── tests │ │ ├── __init__.py │ │ └── test_slices.py │ ├── __init__.py │ ├── sample_directory.py │ └── slices.py ├── extern │ └── __init__.py ├── tests │ ├── data │ │ ├── sip-wcs.fit │ │ ├── a8280271.fits │ │ ├── flat-mef.fits │ │ ├── science-mef.fits │ │ ├── README.rst │ │ └── expected_ifc_file_properties.csv │ ├── __init__.py │ ├── coveragerc │ ├── test_combine_open_files.py │ ├── test_gain.py │ ├── make_mef.py │ ├── test_keyword.py │ ├── test_bitfield.py │ ├── pytest_fixtures.py │ ├── test_memory_use.py │ ├── test_wrapped_external_funcs.py │ ├── test_rebin.py │ ├── test_ccdproc_logging.py │ ├── run_for_memory_profile.py │ ├── run_profile.ipynb │ ├── run_with_file_number_limit.py │ ├── test_ccdmask.py │ └── test_cosmicray.py ├── ccddata.py ├── __init__.py ├── conftest.py ├── _astropy_init.py └── log_meta.py ├── docs ├── citation.rst ├── conduct.rst ├── authors_for_sphinx.rst ├── _static │ ├── ccd_proc.ico │ ├── ccd_proc.png │ ├── ccdproc_banner.pdf │ ├── ccdproc_banner.png │ └── ccdproc.css ├── rtd-pip-requirements ├── changelog.rst ├── license.rst ├── _templates │ └── autosummary │ │ ├── base.rst │ │ ├── class.rst │ │ └── module.rst ├── api.rst ├── overview.rst ├── reduction_examples.rst ├── contributing.rst ├── index.rst ├── install.rst ├── getting_started.rst ├── Makefile ├── make.bat ├── conf.py ├── ccddata.rst ├── image_management.rst └── image_combination.rst ├── pyproject.toml ├── .readthedocs.yml ├── CODE_OF_CONDUCT.rst ├── ccdproc.cfg ├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md ├── PULL_REQUEST_TEMPLATE.md └── workflows │ └── ci_tests.yml ├── licenses ├── README.rst └── LICENSE_STSCI_TOOLS.txt ├── MANIFEST.in ├── .gitignore ├── .mailmap ├── LICENSE.rst ├── setup.py ├── CITATION.rst ├── tox.ini ├── AUTHORS.rst ├── setup.cfg ├── README.rst └── CHANGES.rst /.gitmodules: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ccdproc/utils/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/citation.rst: -------------------------------------------------------------------------------- 1 | .. _ccdproc_citation: 2 | 3 | .. include:: ../CITATION.rst 4 | -------------------------------------------------------------------------------- /docs/conduct.rst: -------------------------------------------------------------------------------- 1 | .. _ccdproc_coc: 2 | 3 | .. include:: ../CODE_OF_CONDUCT.rst 4 | -------------------------------------------------------------------------------- /ccdproc/extern/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | -------------------------------------------------------------------------------- /docs/authors_for_sphinx.rst: -------------------------------------------------------------------------------- 1 | Contributors 2 | ************ 3 | 4 | .. include:: ../AUTHORS.rst 5 | -------------------------------------------------------------------------------- /docs/_static/ccd_proc.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/docs/_static/ccd_proc.ico -------------------------------------------------------------------------------- /docs/_static/ccd_proc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/docs/_static/ccd_proc.png -------------------------------------------------------------------------------- /ccdproc/tests/data/sip-wcs.fit: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/ccdproc/tests/data/sip-wcs.fit -------------------------------------------------------------------------------- /docs/_static/ccdproc_banner.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/docs/_static/ccdproc_banner.pdf -------------------------------------------------------------------------------- /docs/_static/ccdproc_banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/docs/_static/ccdproc_banner.png -------------------------------------------------------------------------------- /ccdproc/tests/data/a8280271.fits: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/ccdproc/tests/data/a8280271.fits -------------------------------------------------------------------------------- /ccdproc/tests/data/flat-mef.fits: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/ccdproc/tests/data/flat-mef.fits -------------------------------------------------------------------------------- /docs/rtd-pip-requirements: -------------------------------------------------------------------------------- 1 | numpy>=1.9 2 | scipy 3 | scikit-image 4 | numpydoc 5 | astropy>=3.0 6 | sphinx-astropy 7 | -------------------------------------------------------------------------------- /ccdproc/tests/data/science-mef.fits: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mwcraig/ccdproc/main/ccdproc/tests/data/science-mef.fits -------------------------------------------------------------------------------- /docs/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changelog: 2 | 3 | ************** 4 | Full Changelog 5 | ************** 6 | 7 | .. include:: ../CHANGES.rst 8 | -------------------------------------------------------------------------------- /ccdproc/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | """ 3 | This packages contains affiliated package tests. 4 | """ 5 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", 3 | "setuptools_scm", 4 | "wheel"] 5 | build-backend = 'setuptools.build_meta' 6 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | image: latest 5 | 6 | python: 7 | version: 3.7 8 | install: 9 | - method: pip 10 | path: . 11 | extra_requirements: 12 | - docs 13 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.rst: -------------------------------------------------------------------------------- 1 | Code of Conduct 2 | =============== 3 | 4 | Ccdproc is an `Astropy`_ affiliated 5 | package and we follow the `Astropy Community Code of Conduct 6 | `_. 7 | -------------------------------------------------------------------------------- /ccdproc.cfg: -------------------------------------------------------------------------------- 1 | [ccdproc] 2 | # auto_logging = True 3 | # Whether to automatically log operations to metadata 4 | # If set to False, there is no need to specify add_keyword=False 5 | # when calling processing operations 6 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | .. _license: 2 | 3 | ******* 4 | License 5 | ******* 6 | 7 | Ccdproc License 8 | =============== 9 | 10 | Ccdproc is licensed under a 3-clause BSD style license: 11 | 12 | .. include:: ../LICENSE.rst 13 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributing to ccdproc 2 | ----------------------- 3 | 4 | Contributions for ccdproc should follow the [guidelines for contributing to 5 | astropy](https://github.com/astropy/astropy/blob/master/CONTRIBUTING.md). 6 | -------------------------------------------------------------------------------- /ccdproc/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | # This sub-module is destined for common non-package specific utility 4 | # functions that will ultimately be merged into `astropy.utils` 5 | -------------------------------------------------------------------------------- /docs/_static/ccdproc.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | @import url("bootstrap-astropy.css"); 4 | 5 | div.topbar a.brand { 6 | background: transparent url("ccd_proc.png") no-repeat 10px 4px; 7 | background-image: url("ccdproc.svg"), none; 8 | background-size: 32px 32px; 9 | 10 | } 11 | 12 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/base.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/base.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /docs/_templates/autosummary/class.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/class.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. automodapi:: ccdproc 5 | :skip: CCDData 6 | :skip: fits_ccddata_writer 7 | :skip: fits_ccddata_reader 8 | 9 | .. automodapi:: ccdproc.utils.slices 10 | 11 | .. _GitHub repo: https://github.com/astropy/ccdproc 12 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/module.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/module.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /licenses/README.rst: -------------------------------------------------------------------------------- 1 | Licenses 2 | ======== 3 | 4 | This directory holds license and credit information for works the ccdproc 5 | package is derived from or distributes, and/or datasets. 6 | 7 | The license file for the ccdproc package itself is placed in the root 8 | directory of this repository. 9 | -------------------------------------------------------------------------------- /ccdproc/tests/data/README.rst: -------------------------------------------------------------------------------- 1 | Data directory 2 | ============== 3 | 4 | This directory contains data files included with the affiliated package source 5 | code distribution. Note that this is intended only for relatively small files 6 | - large files should be externally hosted and downloaded as needed. 7 | 8 | -------------------------------------------------------------------------------- /ccdproc/ccddata.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | """This module implements the base CCDData class.""" 4 | 5 | from astropy.nddata import fits_ccddata_reader, fits_ccddata_writer, CCDData 6 | 7 | 8 | __all__ = ['CCDData', 'fits_ccddata_reader', 'fits_ccddata_writer'] 9 | 10 | 11 | # This should be be a tuple to ensure it isn't inadvertently changed 12 | # elsewhere. 13 | _recognized_fits_file_extensions = ('fit', 'fits', 'fts') 14 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CITATION.rst 2 | include CHANGES.rst 3 | include CODE_OF_CONDUCT.rst 4 | include LICENSE.rst 5 | include README.rst 6 | include pyproject.toml 7 | 8 | include ccdproc/tests/coveragerc 9 | 10 | include setup.cfg 11 | 12 | recursive-include ccdproc *.pyx *.c *.pxd 13 | recursive-include docs * 14 | recursive-include licenses * 15 | recursive-include scripts * 16 | 17 | prune build 18 | prune docs/_build 19 | prune docs/api 20 | 21 | 22 | global-exclude *.pyc *.o 23 | -------------------------------------------------------------------------------- /ccdproc/tests/data/expected_ifc_file_properties.csv: -------------------------------------------------------------------------------- 1 | file,simple,bitpix,naxis,naxis1,extend,bscale,bzero,imagetyp,filter,exposure 2 | filter_no_object_light.fit,True,16,1,100,True,1,32768,LIGHT,R,1.0 3 | filter_object_light.fit,True,16,1,100,True,1,32768,LIGHT,R,1.0 4 | filter_object_light.fit.gz,True,16,1,100,True,1,32768,LIGHT,R,1.0 5 | no_filter_no_object_bias.fit,True,16,1,100,True,1,32768,BIAS,,0.0 6 | no_filter_no_object_light.fit,True,16,1,100,True,1,32768,LIGHT,,1.0 7 | test.fits.fz,True,16,1,100,True,1,32768,LIGHT,R,15.0 8 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | This is the template for bug reports, if you have a feature request or question 2 | you can safely ignore and delete this prefilled text. 3 | 4 | Include a description of the problem: What are you trying to do (include your 5 | code and the **full** traceback)? What did you expect? 6 | 7 | ``` 8 | Include a minimal example to reproduce the issue including output and 9 | traceback. The triple backticks make github render this as a multi-line code 10 | block. 11 | ``` 12 | 13 | Don't forget to include the version of astropy, ccdproc and numpy, just copy 14 | this into your Python interpreter (without the backticks): 15 | 16 | ``` 17 | import astropy 18 | print(astropy.__version__) 19 | import ccdproc 20 | print(ccdproc.__version__) 21 | import numpy 22 | print(numpy.__version__) 23 | ``` 24 | -------------------------------------------------------------------------------- /ccdproc/tests/coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = ccdproc 3 | branch = False 4 | omit = 5 | ccdproc/_astropy_init* 6 | ccdproc/conftest* 7 | ccdproc/cython_version* 8 | ccdproc/setup_package* 9 | ccdproc/*/setup_package* 10 | ccdproc/*/*/setup_package* 11 | ccdproc/tests/* 12 | ccdproc/*/tests/* 13 | ccdproc/*/*/tests/* 14 | ccdproc/*version* 15 | 16 | [report] 17 | exclude_lines = 18 | # Have to re-enable the standard pragma 19 | pragma: no cover 20 | 21 | # Don't complain about packages we have installed 22 | except ImportError 23 | 24 | # Don't complain if tests don't hit assertions 25 | raise AssertionError 26 | raise NotImplementedError 27 | 28 | # Don't complain about script hooks 29 | def main\(.*\): 30 | 31 | # Ignore branches that don't pertain to this version of Python 32 | pragma: py{ignore_python_version} 33 | -------------------------------------------------------------------------------- /docs/overview.rst: -------------------------------------------------------------------------------- 1 | Overview 2 | ======== 3 | 4 | .. note:: 5 | `ccdproc` works only with astropy version 2.0 or later. 6 | 7 | The `ccdproc` package provides: 8 | 9 | + An image class, `~astropy.nddata.CCDData`, that includes an uncertainty for the 10 | data, units and methods for performing arithmetic with images including the 11 | propagation of uncertainties. 12 | + A set of functions performing common CCD data reduction steps (e.g. dark 13 | subtraction, flat field correction) with a flexible mechanism for logging 14 | reduction steps in the image metadata. 15 | + A function for reprojecting an image onto another WCS, useful for stacking 16 | science images. The actual reprojection is done by the 17 | `reproject package `_. 18 | + A class for combining and/or clipping images, `~ccdproc.Combiner`, and 19 | associated functions. 20 | + A class, `~ccdproc.ImageFileCollection`, for working with a directory of 21 | images. 22 | -------------------------------------------------------------------------------- /docs/reduction_examples.rst: -------------------------------------------------------------------------------- 1 | Reduction examples and tutorial 2 | =============================== 3 | 4 | Here are some examples and different repositories using `ccdproc`. 5 | 6 | * `Extended guide to image calibration using ccdproc`_ 7 | * `ipython notebook`_ 8 | * `WHT basic reductions`_ 9 | * `pyhrs`_ 10 | * `reduceccd`_ 11 | * `astrolib`_ 12 | * `mont4k_reduction`_ *Processes multi-image-extension FITS files* 13 | 14 | .. _Extended guide to image calibration using ccdproc: https://mwcraig.github.io/ccd-as-book/00-00-Preface 15 | .. _ipython notebook: http://nbviewer.ipython.org/gist/mwcraig/06060d789cc298bbb08e 16 | .. _WHT basic reductions: https://github.com/crawfordsm/wht_reduction_scripts/blob/master/wht_basic_reductions.py 17 | .. _pyhrs: https://github.com/saltastro/pyhrs 18 | .. _reduceccd: https://github.com/rgbIAA/reduceccd 19 | .. _astrolib: https://github.com/yucelkilic/astrolib 20 | .. _mont4k_reduction: https://github.com/bjweiner/ARTN/tree/master/mont4k_pipeline 21 | 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled files 2 | *.py[cod] 3 | *.a 4 | *.o 5 | *.so 6 | __pycache__ 7 | 8 | # Ignore .c files by default to avoid including generated code. If you want to 9 | # add a non-generated .c extension, use `git add -f filename.c`. 10 | *.c 11 | 12 | # Other generated files 13 | */version.py 14 | */cython_version.py 15 | htmlcov 16 | .coverage 17 | MANIFEST 18 | .ipynb_checkpoints 19 | 20 | # Sphinx 21 | docs/api 22 | docs/_build 23 | 24 | # Eclipse editor project files 25 | .project 26 | .pydevproject 27 | .settings 28 | 29 | # Pycharm editor project files 30 | .idea 31 | 32 | # VSCode editor files 33 | .vscode 34 | 35 | # Packages/installer info 36 | .eggs 37 | *.egg 38 | *.egg-info 39 | dist 40 | build 41 | eggs 42 | parts 43 | bin 44 | var 45 | sdist 46 | develop-eggs 47 | .installed.cfg 48 | lib 49 | distribute-*.tar.gz 50 | pip-wheel-metadata 51 | 52 | # Other 53 | .cache 54 | .tox 55 | .*.sw[op] 56 | *~ 57 | *.asv 58 | 59 | # Mac OSX 60 | .DS_Store 61 | nosetests.xml 62 | 63 | # Translations 64 | *.mo 65 | 66 | # Mr Developer 67 | .mr.developer.cfg 68 | .pytest_cache 69 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Steve Crawford 2 | Matthew Craig 3 | Hans Moritz Günther 4 | Hans Moritz Günther 5 | Anthony Horton 6 | Forrest Gasdia 7 | Nathan Walker 8 | Erik M. Bray 9 | Erik M. Bray 10 | Erik M. Bray Erik Bray 11 | James McCormac 12 | Larry Bradley 13 | Jennifer Karr 14 | Javier Blosco 15 | Punyaslok Pattnaik 16 | Connor Stotts 17 | Connor Stotts 18 | JVSN Reddy 19 | Yoonsoo P. Bach 20 | Jaime A. Alvarado-Montes 21 | Julio C. N. Campagnolo 22 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Please have a look at the following list and replace the "[ ]" with a "[x]" if 2 | the answer to this question is yes. 3 | 4 | - [ ] For new contributors: Did you add yourself to the "Authors.rst" file? 5 | 6 | For documentation changes: 7 | 8 | - [ ] For documentation changes: Does your commit message include a "[skip ci]"? 9 | Note that it should not if you changed any examples! 10 | 11 | For bugfixes: 12 | 13 | - [ ] Did you add an entry to the "Changes.rst" file? 14 | - [ ] Did you add a regression test? 15 | - [ ] Does the commit message include a "Fixes #issue_number" (replace "issue_number"). 16 | - [ ] Does this PR add, rename, move or remove any existing functions or parameters? 17 | 18 | For new functionality: 19 | 20 | - [ ] Did you add an entry to the "Changes.rst" file? 21 | - [ ] Did you include a meaningful docstring with Parameters, Returns and Examples? 22 | - [ ] Does the commit message include a "Fixes #issue_number" (replace "issue_number"). 23 | - [ ] Did you include tests for the new functionality? 24 | - [ ] Does this PR add, rename, move or remove any existing functions or parameters? 25 | 26 | Please note that the last point is not a requirement. It is meant as a check if 27 | the pull request potentially breaks backwards-compatibility. 28 | 29 | ----------------------------------------- 30 | -------------------------------------------------------------------------------- /licenses/LICENSE_STSCI_TOOLS.txt: -------------------------------------------------------------------------------- 1 | Copyright (C) 2005 Association of Universities for Research in Astronomy (AURA) 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright 7 | notice, this list of conditions and the following disclaimer. 8 | 9 | 2. Redistributions in binary form must reproduce the above 10 | copyright notice, this list of conditions and the following 11 | disclaimer in the documentation and/or other materials provided 12 | with the distribution. 13 | 14 | 3. The name of AURA and its representatives may not be used to 15 | endorse or promote products derived from this software without 16 | specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED 19 | WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 20 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT, 22 | INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 23 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS 24 | OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 25 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 26 | TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE 27 | USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH 28 | DAMAGE. 29 | -------------------------------------------------------------------------------- /LICENSE.rst: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011-2017, Astropy-ccdproc Developers 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without modification, 5 | are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright notice, this 10 | list of conditions and the following disclaimer in the documentation and/or 11 | other materials provided with the distribution. 12 | * Neither the name of the Astropy Team nor the names of its contributors may be 13 | used to endorse or promote products derived from this software without 14 | specific prior written permission. 15 | 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 20 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 21 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 22 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 23 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 25 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /ccdproc/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | """ 3 | The ccdproc package is a collection of code that will be helpful in basic CCD 4 | processing. These steps will allow reduction of basic CCD data as either a 5 | stand-alone processing or as part of a pipeline. 6 | """ 7 | 8 | # Affiliated packages may add whatever they like to this file, but 9 | # should keep this content at the top. 10 | # ---------------------------------------------------------------------------- 11 | from ._astropy_init import * 12 | # ---------------------------------------------------------------------------- 13 | 14 | # set up the version 15 | from pkg_resources import get_distribution, DistributionNotFound 16 | 17 | try: 18 | __version__ = get_distribution(__name__).version 19 | except DistributionNotFound: 20 | # package is not installed 21 | __version__ = 'unknown' 22 | 23 | # set up namespace, unless we are in setup... 24 | if not _ASTROPY_SETUP_: 25 | from .core import * 26 | from .ccddata import * 27 | from .combiner import * 28 | from .image_collection import * 29 | from astropy import config as _config 30 | 31 | class Conf(_config.ConfigNamespace): 32 | """ 33 | Configuration parameters for ccdproc. 34 | """ 35 | auto_logging = _config.ConfigItem( 36 | True, 37 | 'Whether to automatically log operations to metadata' 38 | 'If set to False, there is no need to specify add_keyword=False' 39 | 'when calling processing operations.' 40 | ) 41 | conf = Conf() 42 | 43 | # Clean up the name space 44 | del get_distribution, DistributionNotFound 45 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | Reporting Issues and contributing code 2 | ====================================== 3 | 4 | Reporting Issues 5 | ---------------- 6 | 7 | If you have found a bug in ccdproc please report it by creating a 8 | new issue on the `ccdproc GitHub issue tracker 9 | `_. That requires 10 | creating a `free Github account `_ if you do not 11 | have one. 12 | 13 | Please include an example that demonstrates the issue and will allow the 14 | developers to reproduce and fix the problem, if possible. You may be asked to 15 | also provide information about your operating system and a full Python stack 16 | trace. The developers will walk you through obtaining a stack trace if it is 17 | necessary. 18 | 19 | 20 | Contributing code 21 | ----------------- 22 | 23 | Like the `Astropy`_ project, `ccdproc `_ is made both by and for its 24 | users. We accept contributions at all levels, spanning the gamut from 25 | fixing a typo in the documentation to developing a major new feature. 26 | We welcome contributors who will abide by the `Astropy Code of Conduct 27 | `_. 28 | 29 | Ccdproc follows the same workflow and coding guidelines as 30 | `Astropy`_. The following pages will help you get started with 31 | contributing fixes, code, or documentation (no git or GitHub 32 | experience necessary): 33 | 34 | * `How to make a code contribution `_ 35 | 36 | * `Coding Guidelines `_ 37 | 38 | * `Developer Documentation `_ 39 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 3 | 4 | import sys 5 | from setuptools import setup 6 | from pathlib import Path 7 | 8 | # First provide helpful messages if contributors try and run legacy commands 9 | # for tests or docs. 10 | 11 | TEST_HELP = """ 12 | Note: running tests is no longer done using 'python setup.py test'. Instead 13 | you will need to run: 14 | tox -e test 15 | If you don't already have tox installed, you can install it with: 16 | pip install tox 17 | If you only want to run part of the test suite, you can also use pytest 18 | directly with:: 19 | pip install -e . 20 | pytest 21 | For more information, see: 22 | http://docs.astropy.org/en/latest/development/testguide.html#running-tests 23 | """ 24 | 25 | if 'test' in sys.argv: 26 | print(TEST_HELP) 27 | sys.exit(1) 28 | 29 | DOCS_HELP = """ 30 | Note: building the documentation is no longer done using 31 | 'python setup.py build_docs'. Instead you will need to run: 32 | tox -e build_docs 33 | If you don't already have tox installed, you can install it with: 34 | pip install tox 35 | For more information, see: 36 | http://docs.astropy.org/en/latest/install.html#builddocs 37 | """ 38 | 39 | if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv: 40 | print(DOCS_HELP) 41 | sys.exit(1) 42 | 43 | # NOTE: The configuration for the package, including the name, version, and 44 | # other information are set in the setup.cfg file. Here we mainly set up 45 | # setup_requires and install_requires since these are determined 46 | # programmatically. 47 | 48 | setup(use_scm_version={'write_to': Path('ccdproc') / 'version.py'}) 49 | 50 | # If compiled extensions are added to the package, add the argument below 51 | # to setup: 52 | # ext_modules=get_extensions()) 53 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. the "raw" directive below is used to hide the title in favor of 2 | just the logo being visible 3 | .. raw:: html 4 | 5 | 8 | 9 | ======= 10 | ccdproc 11 | ======= 12 | 13 | .. raw:: html 14 | 15 | 16 | 17 | .. only:: latex 18 | 19 | .. image:: _static/ccdproc_banner.pdf 20 | 21 | **Ccdproc** is is an `Astropy`_ `affiliated package 22 | `_ for basic data reductions 23 | of CCD images. It provides the essential tools for processing of CCD images 24 | in a framework that provides error propagation and bad pixel tracking 25 | throughout the reduction process. 26 | 27 | .. Important:: 28 | If you use `ccdproc`_ for a project that leads to a publication, 29 | whether directly or as a dependency of another package, please 30 | include an :doc:`acknowledgment and/or citation `. 31 | 32 | Detailed, step-by-step guide 33 | ---------------------------- 34 | 35 | In addition to the documentation here, a detailed guide to the topic of CCD 36 | data reduction using ``ccdproc`` and other `astropy`_ tools is available here: 37 | https://mwcraig.github.io/ccd-as-book/00-00-Preface 38 | 39 | Getting started 40 | --------------- 41 | 42 | .. toctree:: 43 | :maxdepth: 1 44 | 45 | install 46 | overview 47 | getting_started 48 | citation 49 | contributing 50 | conduct 51 | authors_for_sphinx 52 | changelog 53 | license 54 | 55 | Using `ccdproc` 56 | --------------- 57 | 58 | .. toctree:: 59 | :maxdepth: 2 60 | 61 | ccddata 62 | image_combination 63 | reduction_toolbox 64 | image_management 65 | reduction_examples 66 | 67 | .. toctree:: 68 | :maxdepth: 1 69 | 70 | api 71 | 72 | -------------------------------------------------------------------------------- /CITATION.rst: -------------------------------------------------------------------------------- 1 | Citing ccdproc 2 | -------------- 3 | 4 | If you use ccdproc for a project that leads to a publication, 5 | whether directly or as a dependency of another package, please include 6 | the following acknowledgment: 7 | 8 | .. code-block:: text 9 | 10 | This research made use of ccdproc, an Astropy package for 11 | image reduction (Craig et al. 20XX). 12 | 13 | where (Craig et al. 20XX) is a citation to the `Zenodo record 14 | `_ of the ccdproc version 15 | that was used. We also encourage citations in the main text wherever 16 | appropriate. 17 | 18 | For example, for ccdprpoc v1.3.0.post1 one would cite Craig et al. 2017 19 | with the BibTeX entry (https://zenodo.org/record/1069648/export/hx): 20 | 21 | .. code-block:: text 22 | 23 | 24 | @misc{matt_craig_2017_1069648, 25 | author = {Matt Craig and Steve Crawford and Michael Seifert and 26 | Thomas Robitaille and Brigitta Sip{\H o}cz and 27 | Josh Walawender and Z\`e Vin{\'{\i}}cius and Joe Philip Ninan and Michael Droettboom and Jiyong Youn and 28 | Erik Tollerud and Erik Bray and 29 | Nathan Walker and VSN Reddy Janga and 30 | Connor Stotts and Hans Moritz G{\"u}nther and Evert Rol and 31 | Yoonsoo P. Bach and Larry Bradley and Christoph Deil and 32 | Adrian Price-Whelan and Kyle Barbary and Anthony Horton and 33 | William Schoenell and Nathan Heidt and Forrest Gasdia and 34 | Stefan Nelson and Ole Streicher}, 35 | title = {astropy/ccdproc: v1.3.0.post1}, 36 | month = dec, 37 | year = 2017, 38 | doi = {10.5281/zenodo.1069648}, 39 | url = {https://doi.org/10.5281/zenodo.1069648} 40 | } 41 | 42 | All ccdproc versions (and more citation formats) can be found at 43 | https://doi.org/10.5281/zenodo.1069648. 44 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | setuptools >= 30.3.0 4 | pip >= 19.3.1 5 | isolated_build = true 6 | 7 | 8 | [testenv] 9 | extras = test 10 | 11 | # Run the tests in a temporary directory to make sure that we don't 12 | # import this package from the source tree 13 | changedir = 14 | test: .tmp/{envname} 15 | 16 | description = 17 | run tests 18 | alldeps: with all optional dependencies 19 | devdeps: with the latest developer version of key dependencies 20 | oldestdeps: with the oldest supported version of key dependencies 21 | cov: and test coverage 22 | numpy117: with numpy 1.17.* 23 | numpy118: with numpy 1.18.* 24 | numpy119: with numpy 1.19.* 25 | astropylts: with the latest astropy LTS 26 | 27 | # The following provides some specific pinnings for key packages 28 | deps = 29 | cov: coverage 30 | 31 | numpy117: numpy==1.17.* 32 | numpy118: numpy==1.18.* 33 | numpy119: numpy==1.19.* 34 | 35 | astropylts: astropy==4.0.* 36 | 37 | devdeps: git+https://github.com/astropy/astropy.git#egg=astropy 38 | 39 | oldestdeps: numpy==1.17 40 | oldestdeps: astropy==4.0 41 | oldestdeps: scipy==0.19 42 | oldestdeps: matplotlib==2.2 43 | oldestdeps: scikit-image==0.14.2 44 | oldestdeps: scikit-learn==0.19 45 | oldestdeps: gwcs==0.12 46 | oldestdeps: pytest-astropy==0.4 47 | 48 | commands = 49 | pip freeze 50 | !cov: pytest --pyargs ccdproc {toxinidir}/docs {posargs} 51 | cov: pytest --pyargs ccdproc {toxinidir}/docs --cov ccdproc --cov-config={toxinidir}/setup.cfg {posargs} 52 | cov: coverage xml -o {toxinidir}/coverage.xml 53 | 54 | [testenv:build_docs] 55 | extras = docs 56 | setenv = 57 | HOME = {envtmpdir} 58 | changedir = docs 59 | commands = 60 | sphinx-build . _build/html -b html {posargs} 61 | 62 | [testenv:pycodestyle] 63 | skip_install = true 64 | changedir = . 65 | description = check code style with pycodestyle 66 | deps = pycodestyle 67 | commands = pycodestyle ccdproc --count --show-source --show-pep8 68 | -------------------------------------------------------------------------------- /docs/install.rst: -------------------------------------------------------------------------------- 1 | ************ 2 | Installation 3 | ************ 4 | 5 | Requirements 6 | ============ 7 | 8 | Ccdproc has the following requirements: 9 | 10 | - `Astropy`_ v2.0 or later 11 | - `NumPy `_ 12 | - `SciPy `_ 13 | - `scikit-image `_ 14 | - `astroscrappy `_ 15 | - `reproject `_ 16 | 17 | One easy way to get these dependencies is to install a python distribution 18 | like `anaconda`_. 19 | 20 | Installing ccdproc 21 | ================== 22 | 23 | Using pip 24 | ------------- 25 | 26 | To install ccdproc with `pip `_, simply run:: 27 | 28 | pip install ccdproc 29 | 30 | Using conda 31 | ------------- 32 | 33 | To install ccdproc with `anaconda`_, run:: 34 | 35 | conda install -c conda-forge ccdproc 36 | 37 | 38 | Building from source 39 | ==================== 40 | 41 | Obtaining the source packages 42 | ----------------------------- 43 | 44 | Source packages 45 | ^^^^^^^^^^^^^^^ 46 | 47 | The latest stable source package for ccdproc can be `downloaded here 48 | `_. 49 | 50 | Development repository 51 | ^^^^^^^^^^^^^^^^^^^^^^ 52 | 53 | The latest development version of ccdproc can be cloned from github 54 | using this command:: 55 | 56 | git clone git://github.com/astropy/ccdproc.git 57 | 58 | Building and Installing 59 | ----------------------- 60 | 61 | To build ccdproc (from the root of the source tree):: 62 | 63 | python setup.py build 64 | 65 | To install ccdproc (from the root of the source tree):: 66 | 67 | pip install . 68 | 69 | To set up a development install in which changes to the source are immediately 70 | reflected in the installed package (from the root of the source tree):: 71 | 72 | pip install -e . 73 | 74 | Testing a source code build of ccdproc 75 | -------------------------------------- 76 | 77 | The easiest way to test that your ccdproc built correctly (without 78 | installing ccdproc) is to run this from the root of the source tree:: 79 | 80 | python setup.py test 81 | 82 | .. _anaconda: https://anaconda.com/ 83 | -------------------------------------------------------------------------------- /ccdproc/tests/test_combine_open_files.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import subprocess 3 | import sys 4 | import os 5 | 6 | import pytest 7 | 8 | run_dir = Path(__file__).parent 9 | 10 | # Why? So that we get up to the file above ccdproc, so that in the 11 | # subprocess we can add that direction to sys.path. 12 | subprocess_dir = run_dir.parent.parent 13 | 14 | OVERHEAD = '4' 15 | NUM_FILE_LIMIT = '20' 16 | common_args = [sys.executable, str(run_dir / 'run_with_file_number_limit.py'), 17 | '--kind', 'fits', '--overhead', OVERHEAD] 18 | 19 | 20 | # Regression test for #629 21 | @pytest.mark.skipif(os.environ.get('APPVEYOR') or os.sys.platform == 'win32', 22 | reason='Test relies on linux/osx features of psutil') 23 | @pytest.mark.skipif(sys.version_info < (3, 5), 24 | reason='Test requires subprocess.run, introduced in 3.5') 25 | def test_open_files_combine_no_chunks(): 26 | """ 27 | Test that we are not opening (much) more than the number of files 28 | we are processing. 29 | """ 30 | # Make a copy 31 | args = list(common_args) 32 | args.extend(['--open-by', 'combine-nochunk', NUM_FILE_LIMIT]) 33 | p = subprocess.run(args=args, stderr=subprocess.PIPE, 34 | cwd=str(subprocess_dir)) 35 | # If we have succeeded the test passes. We are only checking that 36 | # we don't have too many files open. 37 | assert p.returncode == 0 38 | 39 | 40 | # Regression test for #629 41 | @pytest.mark.skipif(os.environ.get('APPVEYOR') or os.sys.platform == 'win32', 42 | reason='Test relies on linux/osx features of psutil') 43 | @pytest.mark.skipif(sys.version_info < (3, 5), 44 | reason='Test requires subprocess.run, introduced in 3.5') 45 | def test_open_files_combine_chunks(): 46 | """ 47 | Test that we are not opening (much) more than the number of files 48 | we are processing when combination is broken into chunks. 49 | """ 50 | # Make a copy 51 | args = list(common_args) 52 | args.extend(['--open-by', 'combine-chunk', NUM_FILE_LIMIT]) 53 | p = subprocess.run(args=args, stderr=subprocess.PIPE, 54 | cwd=str(subprocess_dir)) 55 | # If we have succeeded the test passes. We are only checking that 56 | # we don't have too many files open. 57 | assert p.returncode == 0 58 | -------------------------------------------------------------------------------- /ccdproc/tests/test_gain.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | import astropy.units as u 7 | 8 | from ccdproc.core import create_deviation, gain_correct, Keyword 9 | from ccdproc.tests.pytest_fixtures import ccd_data as ccd_data_func 10 | 11 | 12 | # tests for gain 13 | @pytest.mark.parametrize('gain', [ 14 | 3.0, 15 | 3.0 * u.photon / u.adu, 16 | 3.0 * u.electron / u.adu, 17 | Keyword('gainval', unit=u.electron / u.adu)]) 18 | def test_linear_gain_correct(gain): 19 | ccd_data = ccd_data_func() 20 | # The data values should be positive, so the poisson noise calculation 21 | # works without throwing warnings 22 | ccd_data.data = np.absolute(ccd_data.data) 23 | ccd_data = create_deviation(ccd_data, readnoise=1.0 * u.adu) 24 | ccd_data.meta['gainval'] = 3.0 25 | orig_data = ccd_data.data 26 | ccd = gain_correct(ccd_data, gain) 27 | if isinstance(gain, Keyword): 28 | gain = gain.value # convert to Quantity... 29 | try: 30 | gain_value = gain.value 31 | except AttributeError: 32 | gain_value = gain 33 | 34 | np.testing.assert_array_almost_equal_nulp(ccd.data, gain_value * orig_data) 35 | np.testing.assert_array_almost_equal_nulp( 36 | ccd.uncertainty.array, gain_value * ccd_data.uncertainty.array) 37 | 38 | if isinstance(gain, u.Quantity): 39 | assert ccd.unit == ccd_data.unit * gain.unit 40 | else: 41 | assert ccd.unit == ccd_data.unit 42 | 43 | 44 | # test gain with gain_unit 45 | def test_linear_gain_unit_keyword(): 46 | ccd_data = ccd_data_func() 47 | # The data values should be positive, so the poisson noise calculation 48 | # works without throwing warnings 49 | ccd_data.data = np.absolute(ccd_data.data) 50 | 51 | ccd_data = create_deviation(ccd_data, readnoise=1.0 * u.adu) 52 | orig_data = ccd_data.data 53 | gain = 3.0 54 | gain_unit = u.electron / u.adu 55 | ccd = gain_correct(ccd_data, gain, gain_unit=gain_unit) 56 | np.testing.assert_array_almost_equal_nulp(ccd.data, gain * orig_data) 57 | np.testing.assert_array_almost_equal_nulp( 58 | ccd.uncertainty.array, gain * ccd_data.uncertainty.array) 59 | assert ccd.unit == ccd_data.unit * gain_unit 60 | -------------------------------------------------------------------------------- /ccdproc/tests/make_mef.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from astropy.utils.misc import NumpyRNGContext 4 | from astropy.io import fits 5 | from astropy.nddata import CCDData 6 | 7 | from ccdproc import flat_correct 8 | 9 | 10 | def make_sample_mef(science_name, flat_name, size=10, dtype='float32'): 11 | """ 12 | Make a multi-extension FITS image with random data 13 | and a MEF flat. 14 | 15 | Parameters 16 | ---------- 17 | 18 | science_name : str 19 | Name of the science image created by this function. 20 | 21 | flat_name : str 22 | Name of the flat image created by this function. 23 | 24 | size : int, optional 25 | Size of each dimension of the image; images created are square. 26 | 27 | dtype : str or numpy dtype, optional 28 | dtype of the generated images. 29 | """ 30 | with NumpyRNGContext(1234): 31 | number_of_image_extensions = 3 32 | science_image = [fits.PrimaryHDU()] 33 | flat_image = [fits.PrimaryHDU()] 34 | for _ in range(number_of_image_extensions): 35 | # Simulate a cloudy night, average pixel 36 | # value of 100 with a read_noise of 1 electron. 37 | data = np.random.normal(100., 1.0, [size, size]).astype(dtype) 38 | hdu = fits.ImageHDU(data=data) 39 | # Make a header that is at least somewhat realistic 40 | hdu.header['unit'] = 'electron' 41 | hdu.header['object'] = 'clouds' 42 | hdu.header['exptime'] = 30.0 43 | hdu.header['date-obs'] = '1928-07-23T21:03:27' 44 | hdu.header['filter'] = 'B' 45 | hdu.header['imagetyp'] = 'LIGHT' 46 | science_image.append(hdu) 47 | 48 | # Make a perfect flat 49 | flat = np.ones_like(data, dtype=dtype) 50 | flat_hdu = fits.ImageHDU(data=flat) 51 | flat_hdu.header['unit'] = 'electron' 52 | flat_hdu.header['filter'] = 'B' 53 | flat_hdu.header['imagetyp'] = 'FLAT' 54 | flat_hdu.header['date-obs'] = '1928-07-23T21:03:27' 55 | flat_image.append(flat_hdu) 56 | 57 | science_image = fits.HDUList(science_image) 58 | science_image.writeto(science_name) 59 | 60 | flat_image = fits.HDUList(flat_image) 61 | flat_image.writeto(flat_name) 62 | 63 | 64 | if __name__ == '__main__': 65 | make_sample_mef('data/science-mef.fits', 'data/flat-mef.fits') 66 | -------------------------------------------------------------------------------- /ccdproc/tests/test_keyword.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import pytest 4 | from astropy import units as u 5 | from astropy.io import fits 6 | 7 | from ccdproc.core import Keyword 8 | 9 | 10 | def test_keyword_init(): 11 | key_name = 'some_key' 12 | key = Keyword(key_name, unit=u.second) 13 | assert key.name == key_name 14 | assert key.unit == u.second 15 | 16 | 17 | def test_keyword_properties_read_only(): 18 | key = Keyword('observer') 19 | with pytest.raises(AttributeError): 20 | key.name = 'error' 21 | with pytest.raises(AttributeError): 22 | key.unit = u.hour 23 | 24 | 25 | unit = u.second 26 | numerical_value = 30 27 | 28 | 29 | # The variable "expected" below is 30 | # True if the expected result is key.value == numerical_value * key.unit 31 | # Name of an error if an error is expected 32 | # A string if the expected value is a string 33 | @pytest.mark.parametrize('value,unit,expected', [ 34 | (numerical_value, unit, True), 35 | (numerical_value, None, ValueError), 36 | (numerical_value * unit, None, True), 37 | (numerical_value * unit, unit, True), 38 | (numerical_value * unit, u.km, True), 39 | ('some string', None, 'some string'), 40 | ('no strings with unit', unit, ValueError) 41 | ]) 42 | def test_value_setting(value, unit, expected): 43 | name = 'exposure' 44 | # Setting at initialization time with 45 | try: 46 | expected_is_error = issubclass(expected, Exception) 47 | except TypeError: 48 | expected_is_error = False 49 | if expected_is_error: 50 | with pytest.raises(expected): 51 | key = Keyword(name, unit=unit, value=value) 52 | else: 53 | key = Keyword(name, unit=unit, value=value) 54 | if isinstance(expected, str): 55 | assert key.value == expected 56 | else: 57 | assert key.value == numerical_value * key.unit 58 | 59 | 60 | def test_keyword_value_from_header(): 61 | name = 'exposure' 62 | numerical_value = 30 63 | unit = u.second 64 | h = fits.Header() 65 | h[name] = numerical_value 66 | 67 | key = Keyword(name, unit=unit) 68 | assert key.value_from(h) == numerical_value * unit 69 | assert key.value == numerical_value * unit 70 | -------------------------------------------------------------------------------- /ccdproc/conftest.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | # this contains imports plugins that configure py.test for astropy tests. 4 | # by importing them here in conftest.py they are discoverable by py.test 5 | # no matter how it is invoked within the source tree. 6 | 7 | import os 8 | 9 | try: 10 | # When the pytest_astropy_header package is installed 11 | from pytest_astropy_header.display import (PYTEST_HEADER_MODULES, 12 | TESTED_VERSIONS) 13 | 14 | def pytest_configure(config): 15 | config.option.astropy_header = True 16 | except ImportError: 17 | # TODO: Remove this when astropy 2.x and 3.x support is dropped. 18 | # Probably an old pytest-astropy package where the pytest_astropy_header 19 | # is not a dependency. 20 | try: 21 | from astropy.tests.plugins.display import (pytest_report_header, 22 | PYTEST_HEADER_MODULES, 23 | TESTED_VERSIONS) 24 | except ImportError: 25 | # TODO: Remove this when astropy 2.x support is dropped. 26 | # If that also did not work we're probably using astropy 2.0 27 | from astropy.tests.pytest_plugins import (pytest_report_header, 28 | PYTEST_HEADER_MODULES, 29 | TESTED_VERSIONS) 30 | 31 | try: 32 | # TODO: Remove this when astropy 2.x support is dropped. 33 | # This is the way to get plugins in astropy 2.x 34 | from astropy.tests.pytest_plugins import * 35 | except ImportError: 36 | # Otherwise they are installed as separate packages that pytest 37 | # automagically finds. 38 | pass 39 | 40 | from .tests.pytest_fixtures import * 41 | 42 | # This is to figure out ccdproc version, rather than using Astropy's 43 | try: 44 | from .version import version 45 | except ImportError: 46 | version = 'dev' 47 | 48 | packagename = os.path.basename(os.path.dirname(__file__)) 49 | TESTED_VERSIONS[packagename] = version 50 | 51 | # Uncomment the following line to treat all DeprecationWarnings as 52 | # exceptions 53 | # enable_deprecations_as_exceptions() 54 | 55 | # Add astropy to test header information and remove unused packages. 56 | 57 | try: 58 | PYTEST_HEADER_MODULES['Astropy'] = 'astropy' 59 | PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy' 60 | PYTEST_HEADER_MODULES['reproject'] = 'reproject' 61 | del PYTEST_HEADER_MODULES['h5py'] 62 | except KeyError: 63 | pass 64 | -------------------------------------------------------------------------------- /ccdproc/tests/test_bitfield.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from astropy.tests.helper import catch_warnings 7 | 8 | from ccdproc.core import bitfield_to_boolean_mask 9 | 10 | 11 | def test_bitfield_not_integer(): 12 | with pytest.raises(TypeError): 13 | bitfield_to_boolean_mask(np.random.random((10, 10))) 14 | 15 | 16 | def test_bitfield_negative_flags(): 17 | bm = np.random.randint(0, 10, (10, 10)) 18 | with pytest.raises(ValueError): 19 | bitfield_to_boolean_mask(bm, [-1]) 20 | 21 | 22 | def test_bitfield_non_poweroftwo_flags(): 23 | bm = np.random.randint(0, 10, (10, 10)) 24 | with pytest.raises(ValueError): 25 | bitfield_to_boolean_mask(bm, [3]) 26 | 27 | 28 | def test_bitfield_flipbits_when_no_bits(): 29 | bm = np.random.randint(0, 10, (10, 10)) 30 | with pytest.raises(TypeError): 31 | bitfield_to_boolean_mask(bm, None, flip_bits=1) 32 | 33 | 34 | def test_bitfield_flipbits_when_stringbits(): 35 | bm = np.random.randint(0, 10, (10, 10)) 36 | with pytest.raises(TypeError): 37 | bitfield_to_boolean_mask(bm, '3', flip_bits=1) 38 | 39 | 40 | def test_bitfield_string_flag_flip_not_start_of_string(): 41 | bm = np.random.randint(0, 10, (10, 10)) 42 | with pytest.raises(ValueError): 43 | bitfield_to_boolean_mask(bm, '1, ~4') 44 | 45 | 46 | def test_bitfield_string_flag_unbalanced_parens(): 47 | bm = np.random.randint(0, 10, (10, 10)) 48 | with pytest.raises(ValueError): 49 | bitfield_to_boolean_mask(bm, '(1, 4))') 50 | 51 | 52 | def test_bitfield_string_flag_wrong_positioned_parens(): 53 | bm = np.random.randint(0, 10, (10, 10)) 54 | with pytest.raises(ValueError): 55 | bitfield_to_boolean_mask(bm, '((1, )4)') 56 | 57 | 58 | def test_bitfield_string_flag_empty(): 59 | bm = np.random.randint(0, 10, (10, 10)) 60 | with pytest.raises(ValueError): 61 | bitfield_to_boolean_mask(bm, '~') 62 | 63 | 64 | def test_bitfield_flag_non_integer(): 65 | bm = np.random.randint(0, 10, (10, 10)) 66 | with pytest.raises(TypeError): 67 | bitfield_to_boolean_mask(bm, [1.3]) 68 | 69 | 70 | def test_bitfield_duplicate_flag_throws_warning(): 71 | bm = np.random.randint(0, 10, (10, 10)) 72 | with catch_warnings(UserWarning) as w: 73 | bitfield_to_boolean_mask(bm, [1, 1]) 74 | assert len(w) 75 | 76 | 77 | def test_bitfield_none_identical_to_strNone(): 78 | bm = np.random.randint(0, 10, (10, 10)) 79 | m1 = bitfield_to_boolean_mask(bm, None) 80 | m2 = bitfield_to_boolean_mask(bm, 'None') 81 | np.testing.assert_array_equal(m1, m2) 82 | -------------------------------------------------------------------------------- /ccdproc/tests/pytest_fixtures.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import gzip 4 | from tempfile import mkdtemp 5 | import os 6 | from shutil import rmtree 7 | 8 | import numpy as np 9 | 10 | import pytest 11 | from astropy import units as u 12 | from astropy.utils import NumpyRNGContext 13 | from astropy.io import fits 14 | from astropy.nddata import CCDData 15 | 16 | from ..utils.sample_directory import directory_for_testing 17 | 18 | # If additional pytest markers are defined the key in the dictionary below 19 | # should be the name of the marker. 20 | DEFAULTS = { 21 | 'seed': 123, 22 | 'data_size': 100, 23 | 'data_scale': 1.0, 24 | 'data_mean': 0.0 25 | } 26 | 27 | DEFAULT_SEED = 123 28 | DEFAULT_DATA_SIZE = 100 29 | DEFAULT_DATA_SCALE = 1.0 30 | DEFAULT_DATA_MEAN = 0.0 31 | 32 | 33 | def value_from_markers(key, request): 34 | m = request.node.get_closest_marker(key) 35 | if m is not None: 36 | return m.args[0] 37 | else: 38 | return DEFAULTS[key] 39 | 40 | 41 | def ccd_data(data_size=DEFAULT_DATA_SIZE, 42 | data_scale=DEFAULT_DATA_SCALE, 43 | data_mean=DEFAULT_DATA_MEAN): 44 | """ 45 | Return a CCDData object with units of ADU. 46 | 47 | The size of the data array is 100x100 but can be changed using the marker 48 | @pytest.mark.data_size(N) on the test function, where N should be the 49 | desired dimension. 50 | 51 | Data values are initialized to random numbers drawn from a normal 52 | distribution with mean of 0 and scale 1. 53 | 54 | The scale can be changed with the marker @pytest.marker.scale(s) on the 55 | test function, where s is the desired scale. 56 | 57 | The mean can be changed with the marker @pytest.marker.scale(m) on the 58 | test function, where m is the desired mean. 59 | """ 60 | size = data_size 61 | scale = data_scale 62 | mean = data_mean 63 | 64 | with NumpyRNGContext(DEFAULTS['seed']): 65 | data = np.random.normal(loc=mean, size=[size, size], scale=scale) 66 | 67 | fake_meta = {'my_key': 42, 'your_key': 'not 42'} 68 | ccd = CCDData(data, unit=u.adu) 69 | ccd.header = fake_meta 70 | return ccd 71 | 72 | 73 | @pytest.fixture 74 | def triage_setup(request): 75 | 76 | n_test, test_dir = directory_for_testing() 77 | 78 | def teardown(): 79 | try: 80 | rmtree(test_dir) 81 | except OSError: 82 | # If we cannot clean up just keep going. 83 | pass 84 | 85 | request.addfinalizer(teardown) 86 | 87 | class Result: 88 | def __init__(self, n, directory): 89 | self.n_test = n 90 | self.test_dir = directory 91 | return Result(n_test, test_dir) 92 | -------------------------------------------------------------------------------- /AUTHORS.rst: -------------------------------------------------------------------------------- 1 | ******************* 2 | Authors and Credits 3 | ******************* 4 | 5 | ccdproc Project Contributors 6 | ============================ 7 | 8 | Project Coordinators 9 | -------------------- 10 | 11 | * Matt Craig (@mwcraig) 12 | * Steve Crawford (@crawfordsm) 13 | 14 | Coordinators Emeritus 15 | --------------------- 16 | 17 | * Michael Seifert (@MSeifert04) 18 | 19 | Alphabetical list of code contributors 20 | -------------------------------------- 21 | 22 | * Jaime A. Alvarado-Montes (@JAAlvarado-Montes) 23 | * Yoonsoo P. Bach (@ysBach) 24 | * Kyle Barbary (@kbarbary) 25 | * Javier Blasco (@javierblasco) 26 | * Larry Bradley (@larrybradley) 27 | * Julio C. N. Campagnolo (@juliotux) 28 | * Mihai Cara (@mcara) 29 | * James Davenport (@jradavenport) 30 | * Christoph Deil (@cdeil) 31 | * Timothy P. Ellsworth-Bowers (@tbowers7) 32 | * Forrest Gasdia (@fgasdia) 33 | * Carlos Gomez (@carlgogo) 34 | * Yash Gondhalekar (@Yash-10) 35 | * Hans Moritz Günther (@hamogu) 36 | * Nathan Heidt (@heidtha) 37 | * Michael Hlabathe (@hlabathems) 38 | * Elias Holte (@Sondanaa) 39 | * Anthony Horton (@AnthonyHorton) 40 | * Jennifer Karr (@JenniferKarr) 41 | * Yücel Kılıç (@yucelkilic) 42 | * Kelvin Lee (@laserkelvin) 43 | * Pey Lian Lim (@pllim) 44 | * James McCormac (@jmccormac01) 45 | * Stefan Nelson (@stefannelson) 46 | * Joe Philip Ninan (@indiajoe) 47 | * Punyaslok Pattnaik (@Punyaslok) 48 | * Adrian Price-Whelan (@adrn) 49 | * JVSN Reddy (@janga1997) 50 | * Luca Rizzi (@lucarizzi) 51 | * Evert Rol (@evertrol) 52 | * Jenna Ryon (@jryon) 53 | * William Schoenell (@wschoenell) 54 | * Sourav Singh (@souravsingh) 55 | * Brigitta Sipőcz (@bsipocz) 56 | * Connor Stotts (@stottsco) 57 | * Ole Streicher (@olebole) 58 | * Erik Tollerud (@eteq) 59 | * Simon Torres (@simontorres) 60 | * Zè Vinícius (@mirca) 61 | * Josh Walawender (@joshwalawender) 62 | * Nathan Walker (@walkerna22) 63 | * Benjamin Weiner (@bjweiner) 64 | * Jiyong Youn (@hletrd) 65 | 66 | Additional contributors 67 | ----------------------- 68 | 69 | The people below have helped the project by opening multiple issues, suggesting 70 | improvements outside of GitHub, or otherwise assisted the project. 71 | 72 | * Juan Cabanela (@JuanCab) 73 | * @mheida 74 | * Sara Ogaz (@SaOgaz) 75 | * Jean-Paul Ventura (@jvntra) 76 | * Kerry Paterson (@KerryPaterson) 77 | * Jane Rigby (@janerigby) 78 | * Kris Stern (@kakirastern) 79 | * Alexa Villaume (@AlexaVillaume) 80 | * Brian York (@york-stsci) 81 | * Sylvielsstfr (@sylvielsstfr) 82 | 83 | (If you have contributed to the ccdproc project and your name is missing, 84 | please send an email to the coordinators, or 85 | `open a pull request for this page `_ 86 | in the `ccdproc repository `_) 87 | -------------------------------------------------------------------------------- /ccdproc/tests/test_memory_use.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | from sys import platform 3 | 4 | import numpy as np 5 | 6 | import pytest 7 | 8 | try: 9 | from ccdproc.tests.run_for_memory_profile import run_memory_profile, generate_fits_files, TMPPATH 10 | except ImportError: 11 | memory_profile_present = False 12 | else: 13 | memory_profile_present = True 14 | 15 | image_size = 2000 # Square image, so 4000 x 4000 16 | num_files = 10 17 | 18 | 19 | def setup_module(): 20 | if memory_profile_present: 21 | generate_fits_files(num_files, size=image_size) 22 | 23 | 24 | def teardown_module(): 25 | if memory_profile_present: 26 | for fil in TMPPATH.glob('*.fit'): 27 | fil.unlink() 28 | 29 | 30 | @pytest.mark.skipif(not platform.startswith('linux'), 31 | reason='memory tests only work on linux') 32 | @pytest.mark.skipif(not memory_profile_present, 33 | reason='memory_profiler not installed') 34 | @pytest.mark.parametrize('combine_method', 35 | ['average', 'sum', 'median']) 36 | def test_memory_use_in_combine(combine_method): 37 | # This is essentially a regression test for 38 | # https://github.com/astropy/ccdproc/issues/638 39 | # 40 | sampling_interval = 0.01 # sec 41 | memory_limit = 500000000 # bytes, roughly 0.5GB 42 | 43 | mem_use, _ = run_memory_profile(num_files, sampling_interval, 44 | size=image_size, memory_limit=memory_limit, 45 | combine_method=combine_method) 46 | 47 | # We do not expect memory use to be strictly less than memory_limit 48 | # throughout the combination. The factor below allows for that. 49 | # It may need to be raised in the future...that is fine, there is a 50 | # separate test for average memory use. 51 | overhead_allowance = 1.75 52 | 53 | # memory_profile reports in MB (no, this is not the correct conversion) 54 | memory_limit_mb = memory_limit / 1e6 55 | 56 | # Checks for TOO MUCH MEMORY USED 57 | 58 | # Check peak memory use 59 | assert np.max(mem_use) <= overhead_allowance * memory_limit_mb 60 | 61 | # Also check average, which gets no allowance 62 | assert np.mean(mem_use) < memory_limit_mb 63 | 64 | # Checks for NOT ENOUGH MEMORY USED; if these fail it means that 65 | # memory_factor in the combine function should perhaps be modified 66 | 67 | # If the peak is coming in under the limit something need to be fixed 68 | assert np.max(mem_use) >= 0.95 * memory_limit_mb 69 | 70 | # If the average is really low perhaps we should look at reducing peak 71 | # usage. Nothing special, really, about the factor 0.4 below. 72 | assert np.mean(mem_use) > 0.4 * memory_limit_mb 73 | -------------------------------------------------------------------------------- /ccdproc/tests/test_wrapped_external_funcs.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | 5 | from astropy.nddata import StdDevUncertainty, CCDData 6 | 7 | from scipy import ndimage 8 | 9 | from ccdproc import core 10 | 11 | 12 | def test_medianfilter_correct(): 13 | ccd = CCDData([[2, 6, 6, 1, 7, 2, 4, 5, 9, 1], 14 | [10, 10, 9, 0, 2, 10, 8, 3, 9, 7], 15 | [2, 4, 0, 4, 4, 10, 0, 5, 6, 5], 16 | [7, 10, 8, 7, 7, 0, 5, 3, 5, 9], 17 | [9, 6, 3, 8, 6, 9, 2, 8, 10, 10], 18 | [6, 5, 1, 7, 8, 0, 8, 2, 9, 3], 19 | [0, 6, 0, 6, 3, 10, 8, 9, 7, 8], 20 | [5, 8, 3, 2, 3, 0, 2, 0, 3, 5], 21 | [9, 6, 3, 7, 1, 0, 5, 4, 8, 3], 22 | [5, 6, 9, 9, 0, 4, 9, 1, 7, 8]], unit='adu') 23 | result = core.median_filter(ccd, 3) 24 | assert isinstance(result, CCDData) 25 | assert np.all(result.data == [[6, 6, 6, 6, 2, 4, 4, 5, 5, 7], 26 | [4, 6, 4, 4, 4, 4, 5, 5, 5, 6], 27 | [7, 8, 7, 4, 4, 5, 5, 5, 5, 7], 28 | [7, 6, 6, 6, 7, 5, 5, 5, 6, 9], 29 | [7, 6, 7, 7, 7, 6, 3, 5, 8, 9], 30 | [6, 5, 6, 6, 7, 8, 8, 8, 8, 8], 31 | [5, 5, 5, 3, 3, 3, 2, 7, 5, 5], 32 | [6, 5, 6, 3, 3, 3, 4, 5, 5, 5], 33 | [6, 6, 6, 3, 2, 2, 2, 4, 4, 5], 34 | [6, 6, 7, 7, 4, 4, 4, 7, 7, 8]]) 35 | assert result.unit == 'adu' 36 | assert all(getattr(result, attr) is None 37 | for attr in ['mask', 'uncertainty', 'wcs', 'flags']) 38 | # The following test could be deleted if log_to_metadata is also applied. 39 | assert not result.meta 40 | 41 | 42 | def test_medianfilter_unusued(): 43 | ccd = CCDData(np.ones((3, 3)), unit='adu', 44 | mask=np.ones((3, 3)), 45 | uncertainty=StdDevUncertainty(np.ones((3, 3))), 46 | flags=np.ones((3, 3))) 47 | result = core.median_filter(ccd, 3) 48 | assert isinstance(result, CCDData) 49 | assert result.unit == 'adu' 50 | assert all(getattr(result, attr) is None 51 | for attr in ['mask', 'uncertainty', 'wcs', 'flags']) 52 | # The following test could be deleted if log_to_metadata is also applied. 53 | assert not result.meta 54 | 55 | 56 | def test_medianfilter_ndarray(): 57 | arr = np.random.random((5, 5)) 58 | result = core.median_filter(arr, 3) 59 | reference = ndimage.median_filter(arr, 3) 60 | # It's a wrapped function so we can use the equal comparison. 61 | np.testing.assert_array_equal(result, reference) 62 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | 2 | [tool:pytest] 3 | minversion = 2.2 4 | testpaths = "ccdproc" "docs" 5 | norecursedirs = build docs/_build 6 | doctest_plus = enabled 7 | addopts = --doctest-rst 8 | markers = 9 | data_size(N): set dimension of square data array for ccd_data fixture 10 | data_scale(s): set the scale of the normal distribution used to generate data 11 | data_mean(m): set the center of the normal distribution used to generate data 12 | 13 | [metadata] 14 | name = ccdproc 15 | description = Astropy affiliated package 16 | long_description = This is a package for reducing optical/IR CCD data that relies on astropy 17 | author = Steve Crawford, Matt Craig, and Michael Seifert 18 | author_email = ccdproc@gmail.com 19 | license = BSD 20 | url = http://ccdproc.readthedocs.io/ 21 | edit_on_github = False 22 | github_project = astropy/ccdproc 23 | 24 | [options] 25 | packages = find: 26 | zip_safe = False 27 | setup_requires = setuptools_scm 28 | install_requires = numpy>=1.16 29 | astropy>=2.0 30 | scipy 31 | astroscrappy>=1.0.5 32 | reproject>=0.5 33 | scikit-image 34 | python_requires = >=3.6 35 | 36 | [options.package_data] 37 | * = data/* 38 | 39 | [options.extras_require] 40 | test = 41 | pytest-astropy 42 | memory_profiler 43 | docs = 44 | sphinx-astropy 45 | 46 | 47 | [pycodestyle] 48 | # PEP8 errors/warnings: 49 | # (partially) taken from 50 | # https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes 51 | # E101 - mix of tabs and spaces 52 | # E111 - 4 spaces per indentation level 53 | # E112 - 4 spaces per indentation level 54 | # E113 - 4 spaces per indentation level 55 | # E221 - multiple spaces before operator 56 | # E222 - multiple spaces after operator 57 | # E223 - tab before operator 58 | # E224 - tab after operator 59 | # E225 - missing whitespace around operator 60 | # E241 - multiple whitespace after ',' 61 | # E242 - tab after ',' 62 | # E251 - unexpected spaces around keyword / parameter equals 63 | # E271 - multiple spaces after keyword 64 | # E272 - multiple spaces before keyword 65 | # E303 - too many blank lines 66 | # E304 - blank lines found after function decorator 67 | # E502 - the backslash is redundant between brackets 68 | # E703 - statement ends with a semicolon 69 | # E901 - SyntaxError or IndentationError 70 | # E902 - IOError 71 | # W191 - indentation contains tabs 72 | # W291 - trailing whitespace 73 | # W292 - no newline at end of file 74 | # W293 - blank line contains whitespace 75 | # W391 - blank line at end of file 76 | select = E101,E111,E112,E113,E221,E222,E223,E224,E225,E241,E242,E251,E271,E272,E303,E304,E502,E703,E901,E902,W191,W291,W292,W293,W391 77 | 78 | # PEP errors to ignore 79 | # ignore = ... 80 | 81 | # Excluding files that are directly copied from the package template or 82 | # generated 83 | exclude = _astropy_init.py,version.py 84 | 85 | [entry_points] 86 | 87 | [flake8] 88 | max-line-length = 100 89 | -------------------------------------------------------------------------------- /ccdproc/tests/test_rebin.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | import pytest 5 | 6 | from astropy.nddata import StdDevUncertainty 7 | 8 | from astropy.tests.helper import catch_warnings 9 | from astropy.utils.exceptions import AstropyDeprecationWarning 10 | 11 | from ccdproc.core import rebin 12 | from ccdproc.tests.pytest_fixtures import ccd_data as ccd_data_func 13 | 14 | 15 | # test rebinning ndarray 16 | def test_rebin_ndarray(): 17 | with pytest.raises(TypeError), catch_warnings(AstropyDeprecationWarning): 18 | rebin(1, (5, 5)) 19 | 20 | 21 | # test rebinning dimensions 22 | def test_rebin_dimensions(): 23 | ccd_data = ccd_data_func(data_size=10) 24 | with pytest.raises(ValueError), catch_warnings(AstropyDeprecationWarning): 25 | rebin(ccd_data.data, (5,)) 26 | 27 | 28 | # test rebinning dimensions 29 | def test_rebin_ccddata_dimensions(): 30 | ccd_data = ccd_data_func(data_size=10) 31 | with pytest.raises(ValueError), catch_warnings(AstropyDeprecationWarning): 32 | rebin(ccd_data, (5,)) 33 | 34 | 35 | # test rebinning works 36 | def test_rebin_larger(): 37 | ccd_data = ccd_data_func(data_size=10) 38 | a = ccd_data.data 39 | with catch_warnings(AstropyDeprecationWarning) as w: 40 | b = rebin(a, (20, 20)) 41 | assert len(w) >= 1 42 | 43 | assert b.shape == (20, 20) 44 | np.testing.assert_almost_equal(b.sum(), 4 * a.sum()) 45 | 46 | 47 | # test rebinning is invariant 48 | def test_rebin_smaller(): 49 | ccd_data = ccd_data_func(data_size=10) 50 | a = ccd_data.data 51 | with catch_warnings(AstropyDeprecationWarning) as w: 52 | b = rebin(a, (20, 20)) 53 | c = rebin(b, (10, 10)) 54 | assert len(w) >= 1 55 | 56 | assert c.shape == (10, 10) 57 | assert (c - a).sum() == 0 58 | 59 | 60 | # test rebinning with ccddata object 61 | @pytest.mark.parametrize('mask_data, uncertainty', [ 62 | (False, False), 63 | (True, True)]) 64 | def test_rebin_ccddata(mask_data, uncertainty): 65 | ccd_data = ccd_data_func(data_size=10) 66 | if mask_data: 67 | ccd_data.mask = np.zeros_like(ccd_data) 68 | if uncertainty: 69 | err = np.random.normal(size=ccd_data.shape) 70 | ccd_data.uncertainty = StdDevUncertainty(err) 71 | 72 | with catch_warnings(AstropyDeprecationWarning) as w: 73 | b = rebin(ccd_data, (20, 20)) 74 | assert len(w) >= 1 75 | 76 | assert b.shape == (20, 20) 77 | if mask_data: 78 | assert b.mask.shape == (20, 20) 79 | if uncertainty: 80 | assert b.uncertainty.array.shape == (20, 20) 81 | 82 | 83 | def test_rebin_does_not_change_input(): 84 | ccd_data = ccd_data_func() 85 | original = ccd_data.copy() 86 | with catch_warnings(AstropyDeprecationWarning) as w: 87 | _ = rebin(ccd_data, (20, 20)) 88 | assert len(w) >= 1 89 | np.testing.assert_array_equal(original.data, ccd_data.data) 90 | assert original.unit == ccd_data.unit 91 | -------------------------------------------------------------------------------- /ccdproc/utils/sample_directory.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | from tempfile import mkdtemp 3 | import os 4 | 5 | import numpy as np 6 | 7 | from astropy.io import fits 8 | 9 | 10 | def _make_file_for_testing(file_name='', **kwd): 11 | img = np.uint16(np.arange(100)) 12 | 13 | hdu = fits.PrimaryHDU(img) 14 | 15 | for k, v in kwd.items(): 16 | hdu.header[k] = v 17 | 18 | hdu.writeto(file_name) 19 | 20 | 21 | def directory_for_testing(): 22 | """ 23 | Set up directory with these contents: 24 | 25 | One file with imagetyp BIAS. It has an the keyword EXPOSURE in 26 | the header, but no others beyond IMAGETYP and the bare minimum 27 | created with the FITS file. 28 | 29 | File name(s) 30 | ------------ 31 | 32 | no_filter_no_object_bias.fit 33 | 34 | Five (5) files with imagetyp LIGHT, including two compressed 35 | files. 36 | 37 | + One file for each compression type, currently .gz and .fz. 38 | + ALL of the files will have the keyword EXPOSURE 39 | in the header. 40 | + Only ONE of them will have the value EXPOSURE=15.0. 41 | + All of the files EXCEPT ONE will have the keyword 42 | FILTER with the value 'R'. 43 | + NONE of the files have the keyword OBJECT 44 | 45 | File names 46 | ---------- 47 | 48 | test.fits.fz 49 | filter_no_object_light.fit 50 | filter_object_light.fit.gz 51 | filter_object_light.fit 52 | no_filter_no_object_light.fit <---- this one has no filter 53 | """ 54 | n_test = { 55 | 'files': 6, 56 | 'missing_filter_value': 1, 57 | 'bias': 1, 58 | 'compressed': 2, 59 | 'light': 5 60 | } 61 | 62 | test_dir = mkdtemp() 63 | 64 | # Directory is reset on teardown. 65 | original_dir = os.getcwd() 66 | os.chdir(test_dir) 67 | 68 | _make_file_for_testing(file_name='no_filter_no_object_bias.fit', 69 | imagetyp='BIAS', 70 | EXPOSURE=0.0) 71 | 72 | _make_file_for_testing(file_name='no_filter_no_object_light.fit', 73 | imagetyp='LIGHT', 74 | EXPOSURE=1.0) 75 | 76 | _make_file_for_testing(file_name='filter_no_object_light.fit', 77 | imagetyp='LIGHT', 78 | EXPOSURE=1.0, 79 | filter='R') 80 | 81 | _make_file_for_testing(file_name='filter_object_light.fit', 82 | imagetyp='LIGHT', 83 | EXPOSURE=1.0, 84 | filter='R') 85 | 86 | with open('filter_object_light.fit', 'rb') as f_in: 87 | with gzip.open('filter_object_light.fit.gz', 'wb') as f_out: 88 | f_out.write(f_in.read()) 89 | 90 | # filter_object.writeto('filter_object_RA_keyword_light.fit') 91 | 92 | _make_file_for_testing(file_name='test.fits.fz', 93 | imagetyp='LIGHT', 94 | EXPOSURE=15.0, 95 | filter='R') 96 | 97 | os.chdir(original_dir) 98 | 99 | return n_test, test_dir 100 | 101 | 102 | def sample_directory_with_files(): 103 | """ 104 | Returns the path to the small sample directory used 105 | in the tests of ``ImageFileCollection``. Primarily intended 106 | for use in the doctests. 107 | """ 108 | 109 | n_test, tmpdir = directory_for_testing() 110 | return tmpdir 111 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ccdproc 2 | ======= 3 | 4 | .. image:: https://travis-ci.org/astropy/ccdproc.svg?branch=master 5 | :target: https://travis-ci.org/astropy/ccdproc 6 | 7 | .. image:: https://coveralls.io/repos/astropy/ccdproc/badge.svg 8 | :target: https://coveralls.io/r/astropy/ccdproc 9 | 10 | .. image:: https://zenodo.org/badge/13384007.svg 11 | :target: https://zenodo.org/badge/latestdoi/13384007 12 | 13 | 14 | Ccdproc is is an affiliated package for the AstroPy package for basic data 15 | reductions of CCD images. The ccdproc package provides many of the 16 | necessary tools for processing of ccd images built on a framework to provide 17 | error propagation and bad pixel tracking throughout the reduction process. 18 | 19 | Ccdproc can currently be installed via pip or from the source code. For 20 | installation instructions, see the `online documentation`_ or docs/install.rst 21 | in this source distribution. 22 | 23 | 24 | Documentation is at `ccdproc.readthedocs.io 25 | `_ 26 | 27 | An extensive `tutorial`_ is currently in development. 28 | 29 | Contributing 30 | ------------ 31 | 32 | We have had the first stable release, but there is still plenty to do! 33 | 34 | Please open a new issue or new pull request for bugs, feedback, or new features 35 | you would like to see. If there is an issue you would like to work on, please 36 | leave a comment and we will be happy to assist. New contributions and 37 | contributors are very welcome! 38 | 39 | New to github or open source projects? If you are unsure about where to start 40 | or haven't used github before, please feel free to email `@crawfordsm`_, 41 | `@mwcraig`_ or `@mseifert`_. We will more than happily help you make your first 42 | contribution. 43 | 44 | Feedback and feature requests? Is there something missing you would like 45 | to see? Please open an issue or send an email to `@mwcraig`_, 46 | `@crawfordsm`_ or `@mseifert`_. Questions can also be opened on 47 | stackoverflow, twitter, or the astropy email list. 48 | 49 | Ccdproc follows the `Astropy Code of Conduct`_ and strives to provide a 50 | welcoming community to all of our users and contributors. 51 | 52 | Want more information about how to make a contribution? Take a look at 53 | the astropy `contributing`_ and `developer`_ documentation. 54 | 55 | If you are interested in finacially supporting the project, please 56 | consider donating to `NumFOCUS`_ that provides financial 57 | management for the Astropy Project. 58 | 59 | Acknowledgements 60 | ---------------- 61 | 62 | If you have found ccdproc useful to your research, please considering adding a 63 | citation to `ccdproc contributors; Craig, M. W.; Crawford, S. M.; Deil, Christoph; Gasdia, Forrest; Gomez, Carlos; Günther, Hans Moritz; Heidt, Nathan; Horton, Anthony; Karr, Jennifer; Nelson, Stefan; Ninan, Joe Phillip; Pattnaik, Punyaslok; Rol, Evert; Schoenell, William; Seifert, Michael; Singh, Sourav; Sipocz, Brigitta; Stotts, Connor; Streicher, Ole; Tollerud, Erik; and Walker, Nathan, 2015, Astrophysics Source Code Library, 1510.007, DOI: 10.5281/zenodo.47652 `_ 64 | 65 | Thanks to Kyle Barbary (`@kbarbary`_) for designing the `ccdproc` logo. 66 | 67 | .. _Astropy: https://www.astropy.org/ 68 | .. _git: https://git-scm.com/ 69 | .. _github: https://github.com 70 | .. _Cython: https://cython.org/ 71 | .. _online documentation: https://ccdproc.readthedocs.io/en/latest/install.html 72 | .. _@kbarbary: https://github.com/kbarbary 73 | .. _@crawfordsm: https://github.com/crawfordsm 74 | .. _@mwcraig: https://github.com/mwcraig 75 | .. _@mseifert: https://github.com/MSeifert04 76 | .. _Astropy Code of Conduct: https://www.astropy.org/about.html#codeofconduct 77 | .. _contributing: https://docs.astropy.org/en/stable/index.html#contributing 78 | .. _developer: https://docs.astropy.org/en/stable/index.html#developer-documentation 79 | .. _tutorial: https://github.com/mwcraig/ccd-reduction-and-photometry-guide 80 | .. _NumFOCUS: https://numfocus.org/ 81 | -------------------------------------------------------------------------------- /docs/getting_started.rst: -------------------------------------------------------------------------------- 1 | Getting Started 2 | =============== 3 | 4 | A ``CCDData`` object can be created from a numpy array (masked or not) or from 5 | a FITS file: 6 | 7 | >>> import numpy as np 8 | >>> from astropy import units as u 9 | >>> from astropy.nddata import CCDData 10 | >>> import ccdproc 11 | >>> image_1 = CCDData(np.ones((10, 10)), unit="adu") 12 | 13 | An example of reading from a FITS file is 14 | ``image_2 = astropy.nddata.CCDData.read('my_image.fits', unit="electron")`` (the 15 | ``electron`` unit is defined as part of ``ccdproc``). 16 | 17 | The metadata of a ``CCDData`` object may be any dictionary-like object, including a FITS header. When a ``CCDData`` object is initialized from FITS file its metadata is a FITS header. 18 | 19 | The data is accessible either by indexing directly or through the ``data`` 20 | attribute: 21 | 22 | >>> sub_image = image_1[:, 1:-3] # a CCDData object 23 | >>> sub_data = image_1.data[:, 1:-3] # a numpy array 24 | 25 | See the documentation for `~astropy.nddata.CCDData` for a complete list of attributes. 26 | 27 | Most operations are performed by functions in `ccdproc`: 28 | 29 | >>> dark = CCDData(np.random.normal(size=(10, 10)), unit="adu") 30 | >>> dark_sub = ccdproc.subtract_dark(image_1, dark, 31 | ... dark_exposure=30*u.second, 32 | ... data_exposure=15*u.second, 33 | ... scale=True) 34 | 35 | See the documentation for `~ccdproc.subtract_dark` for more compact 36 | ways of providing exposure times. 37 | 38 | Every function returns a *copy* of the data with the operation performed. 39 | 40 | Every function in `ccdproc` supports logging through the addition of 41 | information to the image metadata. 42 | 43 | Logging can be simple -- add a string to the metadata: 44 | 45 | >>> dark_sub_gained = ccdproc.gain_correct(dark_sub, 1.5 * u.photon/u.adu, add_keyword='gain_corrected') 46 | 47 | Logging can be more complicated -- add several keyword/value pairs by passing 48 | a dictionary to ``add_keyword``: 49 | 50 | >>> my_log = {'gain_correct': 'Gain value was 1.5', 51 | ... 'calstat': 'G'} 52 | >>> dark_sub_gained = ccdproc.gain_correct(dark_sub, 53 | ... 1.5 * u.photon/u.adu, 54 | ... add_keyword=my_log) 55 | 56 | You might wonder why there is a `~ccdproc.gain_correct` at all, since the implemented 57 | gain correction simple multiplies by a constant. There are two things you get 58 | with `~ccdproc.gain_correct` that you do not get with multiplication: 59 | 60 | + Appropriate scaling of uncertainties. 61 | + Units 62 | 63 | The same advantages apply to operations that are more complex, like flat 64 | correction, in which one image is divided by another: 65 | 66 | >>> flat = CCDData(np.random.normal(1.0, scale=0.1, size=(10, 10)), 67 | ... unit='adu') 68 | >>> image_1_flat = ccdproc.flat_correct(image_1, flat) 69 | 70 | In addition to doing the necessary division, `~ccdproc.flat_correct` propagates 71 | uncertainties (if they are set). 72 | 73 | The function `~ccdproc.wcs_project` allows you to reproject an image onto a different WCS. 74 | 75 | To make applying the same operations to a set of files in a directory easier, 76 | use an `~ccdproc.image_collection.ImageFileCollection`. It constructs, given a directory, a `~astropy.table.Table` containing the values of user-selected keywords in the directory. It also provides methods for iterating over the files. The example below was used to find an image in which the sky background was high for use in a talk: 77 | 78 | >>> from ccdproc import ImageFileCollection 79 | >>> import numpy as np 80 | >>> from glob import glob 81 | >>> dirs = glob('/Users/mcraig/Documents/Data/feder-images/fixed_headers/20*-??-??') 82 | 83 | >>> for d in dirs: 84 | ... print(d) 85 | ... ic = ImageFileCollection(d, keywords='*') 86 | ... for data, fname in ic.data(imagetyp='LIGHT', return_fname=True): 87 | ... if data.mean() > 4000.: 88 | ... print(fname) 89 | -------------------------------------------------------------------------------- /.github/workflows/ci_tests.yml: -------------------------------------------------------------------------------- 1 | name: CI Tests 2 | 3 | on: 4 | push: 5 | pull_request: 6 | schedule: 7 | # run every Monday at 6am UTC 8 | - cron: '0 6 * * 1' 9 | 10 | env: 11 | SETUP_XVFB: True # avoid issues if mpl tries to open a GUI window 12 | TOXARGS: '-v' 13 | 14 | jobs: 15 | ci-tests: 16 | name: ${{ matrix.os }}, ${{ matrix.tox_env }} 17 | runs-on: ${{ matrix.os }} 18 | if: "!(contains(github.event.head_commit.message, '[skip ci]') || contains(github.event.head_commit.message, '[ci skip]'))" 19 | strategy: 20 | matrix: 21 | name: [ 22 | 'ubuntu-py37', 23 | 'ubuntu-py38', 24 | # 'ubuntu-py39', # Skip until astroscrappy is working 25 | 'macos-py38', 26 | 'windows-py38', 27 | # 'ubuntu-py38-test', 28 | 'ubuntu-codestyle', 29 | 'ubuntu-build_docs', 30 | # 'ubuntu-linkcheck', 31 | # 'ubuntu-bandit', 32 | # 'ubuntu-py36-test-alldeps-astropylts-numpy117', 33 | # 'ubuntu-py37-test-alldeps-astropylts-numpy118', 34 | 'ubuntu-py38-test-alldeps-devdeps', 35 | ] 36 | 37 | include: 38 | - name: 'ubuntu-py37' 39 | os: ubuntu-latest 40 | python: '3.7' 41 | tox_env: 'py37-test-alldeps-numpy117' 42 | - name: 'ubuntu-py38' 43 | os: ubuntu-latest 44 | python: '3.8' 45 | tox_env: 'py38-test-alldeps-numpy118-cov' 46 | # - name: 'ubuntu-py39' 47 | # os: ubuntu-latest 48 | # python: '3.9' 49 | # tox_env: 'py39-test-alldeps-numpy119' 50 | - name: 'macos-py38' 51 | os: macos-latest 52 | python: '3.8' 53 | tox_env: 'py38-test-alldeps' 54 | - name: 'windows-py38' 55 | os: windows-latest 56 | python: '3.8' 57 | tox_env: 'py38-test-alldeps' 58 | # - name: 'ubuntu-py38-test' 59 | # os: ubuntu-latest 60 | # python: '3.8' 61 | # tox_env: 'py38-test' 62 | - name: 'ubuntu-codestyle' 63 | os: ubuntu-latest 64 | python: '3.8' 65 | tox_env: 'pycodestyle' 66 | - name: 'ubuntu-build_docs' 67 | os: ubuntu-latest 68 | python: '3.8' 69 | tox_env: 'build_docs' 70 | # - name: 'ubuntu-linkcheck' 71 | # os: ubuntu-latest 72 | # python: '3.8' 73 | # tox_env: 'linkcheck' 74 | # - name: 'ubuntu-bandit' 75 | # os: ubuntu-latest 76 | # python: '3.8' 77 | # tox_env: 'bandit' 78 | # - name: 'ubuntu-py36-test-alldeps-astropylts-numpy117' 79 | # os: ubuntu-latest 80 | # python: '3.6' 81 | # tox_env: 'py36-test-alldeps-astropylts-numpy117' 82 | # - name: 'ubuntu-py37-test-alldeps-astropylts-numpy118' 83 | # os: ubuntu-latest 84 | # python: '3.7' 85 | # tox_env: 'py37-test-alldeps-astropylts-numpy118' 86 | - name: 'ubuntu-py38-test-alldeps-devdeps' 87 | os: ubuntu-latest 88 | python: '3.8' 89 | tox_env: 'py38-test-alldeps-devdeps' 90 | 91 | steps: 92 | - name: Check out repository 93 | uses: actions/checkout@v2 94 | - name: Set up Python ${{ matrix.python }} 95 | uses: actions/setup-python@v2 96 | with: 97 | python-version: ${{ matrix.python }} 98 | - name: Install base dependencies 99 | run: | 100 | python -m pip install --upgrade pip 101 | python -m pip install tox wheel 102 | - name: Install graphviz dependency 103 | if: "endsWith(matrix.tox_env, 'build_docs')" 104 | run: sudo apt-get -y install graphviz 105 | - name: Print Python, pip, setuptools, and tox versions 106 | run: | 107 | python -c "import sys; print(f'Python {sys.version}')" 108 | python -c "import pip; print(f'pip {pip.__version__}')" 109 | python -c "import setuptools; print(f'setuptools {setuptools.__version__}')" 110 | python -c "import tox; print(f'tox {tox.__version__}')" 111 | - name: Run tests 112 | if: "! matrix.use_remote_data" 113 | run: | 114 | tox -e ${{ matrix.tox_env }} -- ${{ matrix.toxposargs }} 115 | # - name: Run tests with remote data 116 | # if: "matrix.use_remote_data" 117 | # run: tox -e ${{ matrix.tox_env }} -- --remote-data=any 118 | - name: Upload coverage to codecov 119 | if: "endsWith(matrix.tox_env, '-cov')" 120 | uses: codecov/codecov-action@v1.0.13 121 | -------------------------------------------------------------------------------- /ccdproc/tests/test_ccdproc_logging.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | 5 | from astropy.nddata import CCDData 6 | import pytest 7 | 8 | from ccdproc import subtract_bias, create_deviation, Keyword, trim_image 9 | from ccdproc.core import _short_names 10 | from ccdproc.tests.pytest_fixtures import ccd_data as ccd_data_func 11 | 12 | 13 | @pytest.mark.parametrize('key', [ 14 | 'short', 15 | 'toolongforfits']) 16 | def test_log_string(key): 17 | ccd_data = ccd_data_func() 18 | add_key = key 19 | new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, 20 | add_keyword=add_key) 21 | # Keys should be added to new but not to ccd_data and should have 22 | # no value. 23 | assert add_key in new.meta 24 | assert add_key not in ccd_data.meta 25 | # Long keyword names should be accessible with just the keyword name 26 | # without HIERARCH -- is it? 27 | assert new.meta[add_key] is None 28 | 29 | 30 | def test_log_keyword(): 31 | ccd_data = ccd_data_func() 32 | key = 'filter' 33 | key_val = 'V' 34 | kwd = Keyword(key, value=key_val) 35 | new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, 36 | add_keyword=kwd) 37 | # Was the Keyword added with the correct value? 38 | assert kwd.name in new.meta 39 | assert kwd.name not in ccd_data.meta 40 | assert new.meta[kwd.name] == key_val 41 | 42 | 43 | def test_log_dict(): 44 | ccd_data = ccd_data_func() 45 | keys_to_add = { 46 | 'process': 'Added deviation', 47 | 'n_images_input': 1, 48 | 'current_temp': 42.9 49 | } 50 | new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, 51 | add_keyword=keys_to_add) 52 | for k, v in keys_to_add.items(): 53 | # Were all dictionary items added? 54 | assert k in new.meta 55 | assert k not in ccd_data.meta 56 | assert new.meta[k] == v 57 | 58 | 59 | def test_log_bad_type_fails(): 60 | ccd_data = ccd_data_func() 61 | add_key = 15 # anything not string and not dict-like will work here 62 | # Do we fail with non-string, non-Keyword, non-dict-like value? 63 | with pytest.raises(AttributeError): 64 | create_deviation(ccd_data, readnoise=3 * ccd_data.unit, 65 | add_keyword=add_key) 66 | 67 | 68 | def test_log_set_to_None_does_not_change_header(): 69 | ccd_data = ccd_data_func() 70 | new = create_deviation(ccd_data, readnoise=3 * ccd_data.unit, 71 | add_keyword=None) 72 | assert new.meta.keys() == ccd_data.header.keys() 73 | 74 | 75 | def test_implicit_logging(): 76 | ccd_data = ccd_data_func() 77 | # If nothing is supplied for the add_keyword argument then the following 78 | # should happen: 79 | # + A key named func.__name__ is created, with 80 | # + value that is the list of arguments the function was called with. 81 | bias = CCDData(np.zeros_like(ccd_data.data), unit="adu") 82 | result = subtract_bias(ccd_data, bias) 83 | assert "subtract_bias" in result.header 84 | assert result.header['subtract_bias'] == ( 85 | 'subbias', 'Shortened name for ccdproc command') 86 | assert result.header['subbias'] == "ccd=, master=" 87 | 88 | result = create_deviation(ccd_data, readnoise=3 * ccd_data.unit) 89 | assert result.header['create_deviation'] == ( 90 | 'creatvar', 'Shortened name for ccdproc command') 91 | assert ("readnoise=" + str(3 * ccd_data.unit) in 92 | result.header['creatvar']) 93 | 94 | 95 | def test_loggin_without_keyword_args(): 96 | # Regression test for the first failure in #704, which fails because 97 | # there is no "fits_section" keyword in the call to trim_image. 98 | ccd = CCDData(data=np.arange(1000).reshape(20, 50), 99 | header=None, 100 | unit='adu') 101 | section = "[10:20, 10:20]" 102 | trim_1 = trim_image(ccd, "[10:20, 10:20]") 103 | assert section in trim_1.header[_short_names['trim_image']] 104 | 105 | 106 | def test_logging_with_really_long_parameter_value(): 107 | # Another regression test for the trim_3 case in #704 108 | ccd = CCDData(data=np.arange(1000).reshape(20, 50), 109 | header=None, 110 | unit='adu') 111 | section = ("[10:2000000000000000000000000000000000000000000000000000000, " 112 | "10:2000000000000000000000000000000]") 113 | trim_3 = trim_image(ccd, fits_section=section) 114 | assert section in trim_3.header[_short_names['trim_image']] 115 | -------------------------------------------------------------------------------- /ccdproc/utils/slices.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | """ 4 | Define utility functions and classes for ccdproc 5 | """ 6 | 7 | __all__ = ["slice_from_string"] 8 | 9 | 10 | def slice_from_string(string, fits_convention=False): 11 | """ 12 | Convert a string to a tuple of slices. 13 | 14 | Parameters 15 | ---------- 16 | 17 | string : str 18 | A string that can be converted to a slice. 19 | 20 | fits_convention : bool, optional 21 | If True, assume the input string follows the FITS convention for 22 | indexing: the indexing is one-based (not zero-based) and the first 23 | axis is that which changes most rapidly as the index increases. 24 | 25 | Returns 26 | ------- 27 | 28 | slice_tuple : tuple of slice objects 29 | A tuple able to be used to index a numpy.array 30 | 31 | Notes 32 | ----- 33 | 34 | The ``string`` argument can be anything that would work as a valid way to 35 | slice an array in Numpy. It must be enclosed in matching brackets; all 36 | spaces are stripped from the string before processing. 37 | 38 | Examples 39 | -------- 40 | 41 | >>> import numpy as np 42 | >>> arr1d = np.arange(5) 43 | >>> a_slice = slice_from_string('[2:5]') 44 | >>> arr1d[a_slice] 45 | array([2, 3, 4]) 46 | >>> a_slice = slice_from_string('[ : : -2] ') 47 | >>> arr1d[a_slice] 48 | array([4, 2, 0]) 49 | >>> arr2d = np.array([arr1d, arr1d + 5, arr1d + 10]) 50 | >>> arr2d 51 | array([[ 0, 1, 2, 3, 4], 52 | [ 5, 6, 7, 8, 9], 53 | [10, 11, 12, 13, 14]]) 54 | >>> a_slice = slice_from_string('[1:-1, 0:4:2]') 55 | >>> arr2d[a_slice] 56 | array([[5, 7]]) 57 | >>> a_slice = slice_from_string('[0:2,0:3]') 58 | >>> arr2d[a_slice] 59 | array([[0, 1, 2], 60 | [5, 6, 7]]) 61 | """ 62 | no_space = string.replace(' ', '') 63 | 64 | if not no_space: 65 | return () 66 | 67 | if not (no_space.startswith('[') and no_space.endswith(']')): 68 | raise ValueError('Slice string must be enclosed in square brackets.') 69 | 70 | no_space = no_space.strip('[]') 71 | if fits_convention: 72 | # Special cases first 73 | # Flip dimension, with step 74 | no_space = no_space.replace('-*:', '::-') 75 | # Flip dimension 76 | no_space = no_space.replace('-*', '::-1') 77 | # Normal wildcard 78 | no_space = no_space.replace('*', ':') 79 | string_slices = no_space.split(',') 80 | slices = [] 81 | for string_slice in string_slices: 82 | slice_args = [int(arg) if arg else None 83 | for arg in string_slice.split(':')] 84 | a_slice = slice(*slice_args) 85 | slices.append(a_slice) 86 | 87 | if fits_convention: 88 | slices = _defitsify_slice(slices) 89 | 90 | return tuple(slices) 91 | 92 | 93 | def _defitsify_slice(slices): 94 | """ 95 | Convert a FITS-style slice specification into a python slice. 96 | 97 | This means two things: 98 | + Subtract 1 from starting index because in the FITS 99 | specification arrays are one-based. 100 | + Do **not** subtract 1 from the ending index because the python 101 | convention for a slice is for the last value to be one less than the 102 | stop value. In other words, this subtraction is already built into 103 | python. 104 | + Reverse the order of the slices, because the FITS specification dictates 105 | that the first axis is the one along which the index varies most rapidly 106 | (aka FORTRAN order). 107 | """ 108 | 109 | python_slice = [] 110 | for a_slice in slices[::-1]: 111 | new_start = a_slice.start - 1 if a_slice.start is not None else None 112 | if new_start is not None and new_start < 0: 113 | raise ValueError("Smallest permissible FITS index is 1") 114 | if a_slice.stop is not None and a_slice.stop < 0: 115 | raise ValueError("Negative final index not allowed for FITS slice") 116 | new_slice = slice(new_start, a_slice.stop, a_slice.step) 117 | if (a_slice.start is not None and a_slice.stop is not None and 118 | a_slice.start > a_slice.stop): 119 | # FITS use a positive step index when dimension are inverted 120 | new_step = -1 if a_slice.step is None else -a_slice.step 121 | # Special case to prevent -1 as slice stop value 122 | new_stop = None if a_slice.stop == 1 else a_slice.stop-2 123 | new_slice = slice(new_start, new_stop, new_step) 124 | python_slice.append(new_slice) 125 | 126 | return python_slice 127 | -------------------------------------------------------------------------------- /ccdproc/utils/tests/test_slices.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | 5 | import pytest 6 | 7 | from ..slices import slice_from_string 8 | 9 | 10 | # none of these are properly enclosed in brackets; is an error raised? 11 | @pytest.mark.parametrize('arg', 12 | ['1:2', '[1:2', '1:2]']) 13 | def test_slice_from_string_needs_enclosing_brackets(arg): 14 | with pytest.raises(ValueError): 15 | slice_from_string(arg) 16 | 17 | 18 | @pytest.mark.parametrize('start,stop,step', [ 19 | (None, None, -1), 20 | (5, 10, None), 21 | (None, 25, None), 22 | (2, 30, 3), 23 | (30, None, -2), 24 | (None, None, None) 25 | ]) 26 | def test_slice_from_string_1d(start, stop, step): 27 | an_array = np.zeros([100]) 28 | 29 | stringify = lambda n: str(n) if n else '' 30 | start_str = stringify(start) 31 | stop_str = stringify(stop) 32 | step_str = stringify(step) 33 | 34 | if step_str: 35 | slice_str = ':'.join([start_str, stop_str, step_str]) 36 | else: 37 | slice_str = ':'.join([start_str, stop_str]) 38 | sli = slice_from_string('[' + slice_str + ']') 39 | expected = an_array[slice(start, stop, step)] 40 | np.testing.assert_array_equal(expected, 41 | an_array[sli]) 42 | 43 | 44 | @pytest.mark.parametrize('arg', 45 | [' [ 1: 45]', '[ 1 :4 5]', ' [1:45] ']) 46 | def test_slice_from_string_spaces(arg): 47 | an_array = np.zeros([100]) 48 | np.testing.assert_array_equal(an_array[1:45], 49 | an_array[slice_from_string(arg)]) 50 | 51 | 52 | def test_slice_from_string_2d(): 53 | an_array = np.zeros([100, 200]) 54 | 55 | # manually writing a few cases here rather than parametrizing because the 56 | # latter seems not worth the trouble. 57 | sli = slice_from_string('[:-1:2, :]') 58 | np.testing.assert_array_equal(an_array[:-1:2, :], 59 | an_array[sli]) 60 | 61 | sli = slice_from_string('[:, 15:90]') 62 | np.testing.assert_array_equal(an_array[:, 15:90], 63 | an_array[sli]) 64 | 65 | sli = slice_from_string('[10:80:5, 15:90:-1]') 66 | np.testing.assert_array_equal(an_array[10:80:5, 15:90:-1], 67 | an_array[sli]) 68 | 69 | 70 | def test_slice_from_string_fits_style(): 71 | sli = slice_from_string('[1:5, :]', fits_convention=True) 72 | # order is reversed, so is the *first* slice one that includes everything? 73 | assert (sli[0].start is None and 74 | sli[0].stop is None and 75 | sli[0].step is None) 76 | # In the second slice, has the first index been reduced by 1 and the 77 | # second index left unchanged? 78 | assert (sli[1].start == 0 and 79 | sli[1].stop == 5) 80 | sli = slice_from_string('[1:10:2, 4:5:2]', fits_convention=True) 81 | assert sli[0] == slice(3, 5, 2) 82 | assert sli[1] == slice(0, 10, 2) 83 | 84 | 85 | def test_slice_from_string_fits_inverted(): 86 | sli = slice_from_string('[20:10:2, 10:5, 5:4]', fits_convention=True) 87 | assert sli[0] == slice(4, 2, -1) 88 | assert sli[1] == slice(9, 3, -1) 89 | assert sli[2] == slice(19, 8, -2) 90 | # Handle a bunch of special cases for inverted slices, when the 91 | # stop index is 1 or 2 92 | sli = slice_from_string('[20:1:4, 21:1:4, 22:2:4, 2:1]', fits_convention=True) 93 | assert sli[0] == slice(1, None, -1) 94 | assert sli[1] == slice(21, 0, -4) 95 | assert sli[2] == slice(20, None, -4) 96 | assert sli[3] == slice(19, None, -4) 97 | 98 | 99 | def test_slice_from_string_empty(): 100 | assert len(slice_from_string('')) == 0 101 | 102 | 103 | def test_slice_from_string_bad_fits_slice(): 104 | with pytest.raises(ValueError): 105 | # Do I error because 0 is an illegal lower bound? 106 | slice_from_string('[0:10, 1:5]', fits_convention=True) 107 | with pytest.raises(ValueError): 108 | # Same as above, but switched order 109 | slice_from_string('[1:5, 0:10]', fits_convention=True) 110 | with pytest.raises(ValueError): 111 | # Do I error if an ending index is negative? 112 | slice_from_string('[1:10, 10:-1]', fits_convention=True) 113 | 114 | 115 | def test_slice_from_string_fits_wildcard(): 116 | sli = slice_from_string('[*,-*]', fits_convention=True) 117 | assert sli[0] == slice(None, None, -1) 118 | assert sli[1] == slice(None, None, None) 119 | sli = slice_from_string('[*:2,-*:2]', fits_convention=True) 120 | assert sli[0] == slice(None, None, -2) 121 | assert sli[1] == slice(None, None, 2) 122 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | #This is needed with git because git doesn't create a dir if it's empty 18 | $(shell [ -d "_static" ] || mkdir -p _static) 19 | 20 | help: 21 | @echo "Please use \`make ' where is one of" 22 | @echo " html to make standalone HTML files" 23 | @echo " dirhtml to make HTML files named index.html in directories" 24 | @echo " singlehtml to make a single large HTML file" 25 | @echo " pickle to make pickle files" 26 | @echo " json to make JSON files" 27 | @echo " htmlhelp to make HTML files and a HTML help project" 28 | @echo " qthelp to make HTML files and a qthelp project" 29 | @echo " devhelp to make HTML files and a Devhelp project" 30 | @echo " epub to make an epub" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " text to make text files" 34 | @echo " man to make manual pages" 35 | @echo " changes to make an overview of all changed/added/deprecated items" 36 | @echo " linkcheck to check all external links for integrity" 37 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 38 | 39 | clean: 40 | -rm -rf $(BUILDDIR) 41 | -rm -rf api 42 | 43 | html: 44 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 45 | @echo 46 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 47 | 48 | dirhtml: 49 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 50 | @echo 51 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 52 | 53 | singlehtml: 54 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 55 | @echo 56 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 57 | 58 | pickle: 59 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 60 | @echo 61 | @echo "Build finished; now you can process the pickle files." 62 | 63 | json: 64 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 65 | @echo 66 | @echo "Build finished; now you can process the JSON files." 67 | 68 | htmlhelp: 69 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 70 | @echo 71 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 72 | ".hhp project file in $(BUILDDIR)/htmlhelp." 73 | 74 | qthelp: 75 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 76 | @echo 77 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 78 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 79 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" 80 | @echo "To view the help file:" 81 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" 82 | 83 | devhelp: 84 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 85 | @echo 86 | @echo "Build finished." 87 | @echo "To view the help file:" 88 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" 89 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" 90 | @echo "# devhelp" 91 | 92 | epub: 93 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 94 | @echo 95 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 96 | 97 | latex: 98 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 99 | @echo 100 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 101 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 102 | "(use \`make latexpdf' here to do that automatically)." 103 | 104 | latexpdf: 105 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 106 | @echo "Running LaTeX files through pdflatex..." 107 | make -C $(BUILDDIR)/latex all-pdf 108 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 109 | 110 | text: 111 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 112 | @echo 113 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 114 | 115 | man: 116 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 117 | @echo 118 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 119 | 120 | changes: 121 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 122 | @echo 123 | @echo "The overview file is in $(BUILDDIR)/changes." 124 | 125 | linkcheck: 126 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 127 | @echo 128 | @echo "Link check complete; look for any errors in the above output " \ 129 | "or in $(BUILDDIR)/linkcheck/output.txt." 130 | 131 | doctest: 132 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 133 | @echo "Testing of doctests in the sources finished, look at the " \ 134 | "results in $(BUILDDIR)/doctest/output.txt." 135 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | if "%1" == "" goto help 15 | 16 | if "%1" == "help" ( 17 | :help 18 | echo.Please use `make ^` where ^ is one of 19 | echo. html to make standalone HTML files 20 | echo. dirhtml to make HTML files named index.html in directories 21 | echo. singlehtml to make a single large HTML file 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. devhelp to make HTML files and a Devhelp project 27 | echo. epub to make an epub 28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 29 | echo. text to make text files 30 | echo. man to make manual pages 31 | echo. changes to make an overview over all changed/added/deprecated items 32 | echo. linkcheck to check all external links for integrity 33 | echo. doctest to run all doctests embedded in the documentation if enabled 34 | goto end 35 | ) 36 | 37 | if "%1" == "clean" ( 38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 39 | del /q /s %BUILDDIR%\* 40 | goto end 41 | ) 42 | 43 | if "%1" == "html" ( 44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 45 | if errorlevel 1 exit /b 1 46 | echo. 47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 48 | goto end 49 | ) 50 | 51 | if "%1" == "dirhtml" ( 52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 53 | if errorlevel 1 exit /b 1 54 | echo. 55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 56 | goto end 57 | ) 58 | 59 | if "%1" == "singlehtml" ( 60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 61 | if errorlevel 1 exit /b 1 62 | echo. 63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 64 | goto end 65 | ) 66 | 67 | if "%1" == "pickle" ( 68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 69 | if errorlevel 1 exit /b 1 70 | echo. 71 | echo.Build finished; now you can process the pickle files. 72 | goto end 73 | ) 74 | 75 | if "%1" == "json" ( 76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished; now you can process the JSON files. 80 | goto end 81 | ) 82 | 83 | if "%1" == "htmlhelp" ( 84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished; now you can run HTML Help Workshop with the ^ 88 | .hhp project file in %BUILDDIR%/htmlhelp. 89 | goto end 90 | ) 91 | 92 | if "%1" == "qthelp" ( 93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 94 | if errorlevel 1 exit /b 1 95 | echo. 96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 97 | .qhcp project file in %BUILDDIR%/qthelp, like this: 98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp 99 | echo.To view the help file: 100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc 101 | goto end 102 | ) 103 | 104 | if "%1" == "devhelp" ( 105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 106 | if errorlevel 1 exit /b 1 107 | echo. 108 | echo.Build finished. 109 | goto end 110 | ) 111 | 112 | if "%1" == "epub" ( 113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 117 | goto end 118 | ) 119 | 120 | if "%1" == "latex" ( 121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 122 | if errorlevel 1 exit /b 1 123 | echo. 124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 125 | goto end 126 | ) 127 | 128 | if "%1" == "text" ( 129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 130 | if errorlevel 1 exit /b 1 131 | echo. 132 | echo.Build finished. The text files are in %BUILDDIR%/text. 133 | goto end 134 | ) 135 | 136 | if "%1" == "man" ( 137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 141 | goto end 142 | ) 143 | 144 | if "%1" == "changes" ( 145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.The overview file is in %BUILDDIR%/changes. 149 | goto end 150 | ) 151 | 152 | if "%1" == "linkcheck" ( 153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Link check complete; look for any errors in the above output ^ 157 | or in %BUILDDIR%/linkcheck/output.txt. 158 | goto end 159 | ) 160 | 161 | if "%1" == "doctest" ( 162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 163 | if errorlevel 1 exit /b 1 164 | echo. 165 | echo.Testing of doctests in the sources finished, look at the ^ 166 | results in %BUILDDIR%/doctest/output.txt. 167 | goto end 168 | ) 169 | 170 | :end 171 | -------------------------------------------------------------------------------- /ccdproc/_astropy_init.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | __all__ = ['__version__', 'test'] 4 | 5 | # this indicates whether or not we are in the package's setup.py 6 | try: 7 | _ASTROPY_SETUP_ 8 | except NameError: 9 | import builtins 10 | builtins._ASTROPY_SETUP_ = False 11 | 12 | try: 13 | from .version import version as __version__ 14 | except ImportError: 15 | __version__ = '' 16 | 17 | 18 | # set up the test command 19 | def _get_test_runner(): 20 | import os 21 | from astropy.tests.helper import TestRunner 22 | return TestRunner(os.path.dirname(__file__)) 23 | 24 | 25 | def test(package=None, test_path=None, args=None, plugins=None, 26 | verbose=False, pastebin=None, remote_data=False, pep8=False, 27 | pdb=False, coverage=False, open_files=False, **kwargs): 28 | """ 29 | Run the tests using `py.test `__. A proper set 30 | of arguments is constructed and passed to `pytest.main`_. 31 | 32 | .. _py.test: http://pytest.org/latest/ 33 | .. _pytest.main: http://pytest.org/latest/builtin.html#pytest.main 34 | 35 | Parameters 36 | ---------- 37 | package : str, optional 38 | The name of a specific package to test, e.g. 'io.fits' or 'utils'. 39 | If nothing is specified all default tests are run. 40 | 41 | test_path : str, optional 42 | Specify location to test by path. May be a single file or 43 | directory. Must be specified absolutely or relative to the 44 | calling directory. 45 | 46 | args : str, optional 47 | Additional arguments to be passed to pytest.main_ in the ``args`` 48 | keyword argument. 49 | 50 | plugins : list, optional 51 | Plugins to be passed to pytest.main_ in the ``plugins`` keyword 52 | argument. 53 | 54 | verbose : bool, optional 55 | Convenience option to turn on verbose output from py.test_. Passing 56 | True is the same as specifying ``'-v'`` in ``args``. 57 | 58 | pastebin : {'failed','all',None}, optional 59 | Convenience option for turning on py.test_ pastebin output. Set to 60 | ``'failed'`` to upload info for failed tests, or ``'all'`` to upload 61 | info for all tests. 62 | 63 | remote_data : bool, optional 64 | Controls whether to run tests marked with @remote_data. These 65 | tests use online data and are not run by default. Set to True to 66 | run these tests. 67 | 68 | pep8 : bool, optional 69 | Turn on PEP8 checking via the `pytest-pep8 plugin 70 | `_ and disable normal 71 | tests. Same as specifying ``'--pep8 -k pep8'`` in ``args``. 72 | 73 | pdb : bool, optional 74 | Turn on PDB post-mortem analysis for failing tests. Same as 75 | specifying ``'--pdb'`` in ``args``. 76 | 77 | coverage : bool, optional 78 | Generate a test coverage report. The result will be placed in 79 | the directory htmlcov. 80 | 81 | open_files : bool, optional 82 | Fail when any tests leave files open. Off by default, because 83 | this adds extra run time to the test suite. Works only on 84 | platforms with a working ``lsof`` command. 85 | 86 | parallel : int, optional 87 | When provided, run the tests in parallel on the specified 88 | number of CPUs. If parallel is negative, it will use the all 89 | the cores on the machine. Requires the 90 | `pytest-xdist `_ plugin 91 | installed. Only available when using Astropy 0.3 or later. 92 | 93 | kwargs 94 | Any additional keywords passed into this function will be passed 95 | on to the astropy test runner. This allows use of test-related 96 | functionality implemented in later versions of astropy without 97 | explicitly updating the package template. 98 | 99 | """ 100 | test_runner = _get_test_runner() 101 | return test_runner.run_tests( 102 | package=package, test_path=test_path, args=args, 103 | plugins=plugins, verbose=verbose, pastebin=pastebin, 104 | remote_data=remote_data, pep8=pep8, pdb=pdb, 105 | coverage=coverage, open_files=open_files, **kwargs) 106 | 107 | 108 | if not _ASTROPY_SETUP_: 109 | import os 110 | from warnings import warn 111 | from astropy import config 112 | 113 | # add these here so we only need to cleanup the namespace at the end 114 | config_dir = None 115 | 116 | if not os.environ.get('ASTROPY_SKIP_CONFIG_UPDATE', False): 117 | config_dir = os.path.dirname(__file__) 118 | config_template = os.path.join(config_dir, __package__ + ".cfg") 119 | if os.path.isfile(config_template): 120 | try: 121 | config.configuration.update_default_config( 122 | __package__, config_dir, version=__version__) 123 | except TypeError as orig_error: 124 | try: 125 | config.configuration.update_default_config( 126 | __package__, config_dir) 127 | except config.configuration.ConfigurationDefaultMissingError as e: 128 | wmsg = (e.args[0] + " Cannot install default profile. If you are " 129 | "importing from source, this is expected.") 130 | warn(config.configuration.ConfigurationDefaultMissingWarning(wmsg)) 131 | del e 132 | except: 133 | raise orig_error 134 | -------------------------------------------------------------------------------- /ccdproc/log_meta.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | from functools import wraps 4 | import inspect 5 | from itertools import chain 6 | 7 | import numpy as np 8 | 9 | from astropy.nddata import NDData 10 | from astropy import units as u 11 | from astropy.io import fits 12 | 13 | import ccdproc # Really only need Keyword from ccdproc 14 | 15 | __all__ = [] 16 | 17 | _LOG_ARGUMENT = 'add_keyword' 18 | 19 | _LOG_ARG_HELP = \ 20 | """ 21 | {arg} : str, `~ccdproc.Keyword` or dict-like, optional 22 | Item(s) to add to metadata of result. Set to False or None to 23 | completely disable logging. 24 | Default is to add a dictionary with a single item: 25 | The key is the name of this function and the value is a string 26 | containing the arguments the function was called with, except the 27 | value of this argument. 28 | """.format(arg=_LOG_ARGUMENT) 29 | 30 | 31 | def _insert_in_metadata_fits_safe(ccd, key, value): 32 | from .core import _short_names 33 | 34 | if key in _short_names: 35 | # This keyword was (hopefully) added by autologging but the 36 | # combination of it and its value not FITS-compliant in two 37 | # ways: the keyword name may be more than 8 characters and 38 | # the value may be too long. FITS cannot handle both of 39 | # those problems at once, so this fixes one of those 40 | # problems... 41 | # Shorten, sort of... 42 | short_name = _short_names[key] 43 | if isinstance(ccd.meta, fits.Header): 44 | ccd.meta['HIERARCH {0}'.format(key.upper())] = ( 45 | short_name, "Shortened name for ccdproc command") 46 | else: 47 | ccd.meta[key] = ( 48 | short_name, "Shortened name for ccdproc command") 49 | ccd.meta[short_name] = value 50 | else: 51 | ccd.meta[key] = value 52 | 53 | 54 | def log_to_metadata(func): 55 | """ 56 | Decorator that adds logging to ccdproc functions. 57 | 58 | The decorator adds the optional argument _LOG_ARGUMENT to function 59 | signature and updates the function's docstring to reflect that. 60 | 61 | It also sets the default value of the argument to the name of the function 62 | and the arguments it was called with. 63 | """ 64 | func.__doc__ = func.__doc__.format(log=_LOG_ARG_HELP) 65 | 66 | argspec = inspect.getfullargspec(func) 67 | original_args, varargs, keywords, defaults = (argspec.args, argspec.varargs, 68 | argspec.varkw, argspec.defaults) 69 | # original_args = argspec.args 70 | # varargs = argspec.varargs 71 | # keywords = argspec.varkw 72 | # defaults = argspec.defaults 73 | 74 | # Grab the names of positional arguments for use in automatic logging 75 | try: 76 | original_positional_args = original_args[:-len(defaults)] 77 | except TypeError: 78 | original_positional_args = original_args 79 | 80 | # Add logging keyword and its default value for docstring 81 | original_args.append(_LOG_ARGUMENT) 82 | try: 83 | defaults = list(defaults) 84 | except TypeError: 85 | defaults = [] 86 | defaults.append(True) 87 | 88 | signature_with_arg_added = inspect.signature(func) 89 | signature_with_arg_added = "{0}{1}".format(func.__name__, 90 | signature_with_arg_added) 91 | func.__doc__ = "\n".join([signature_with_arg_added, func.__doc__]) 92 | 93 | @wraps(func) 94 | def wrapper(*args, **kwd): 95 | # Grab the logging keyword, if it is present. 96 | log_result = kwd.pop(_LOG_ARGUMENT, True) 97 | result = func(*args, **kwd) 98 | 99 | if not log_result: 100 | # No need to add metadata.... 101 | meta_dict = {} 102 | elif log_result is not True: 103 | meta_dict = _metadata_to_dict(log_result) 104 | else: 105 | # Logging is not turned off, but user did not provide a value 106 | # so construct one unless the config parameter auto_logging is set to False 107 | if ccdproc.conf.auto_logging: 108 | key = func.__name__ 109 | # Get names of arguments, which may or may not have 110 | # been called as keywords. 111 | positional_args = original_args[:len(args)] 112 | 113 | all_args = chain(zip(positional_args, args), kwd.items()) 114 | all_args = ["{0}={1}".format(name, 115 | _replace_array_with_placeholder(val)) 116 | for name, val in all_args] 117 | log_val = ", ".join(all_args) 118 | log_val = log_val.replace("\n", "") 119 | meta_dict = {key: log_val} 120 | else: 121 | meta_dict = {} 122 | 123 | for k, v in meta_dict.items(): 124 | _insert_in_metadata_fits_safe(result, k, v) 125 | return result 126 | 127 | return wrapper 128 | 129 | 130 | def _metadata_to_dict(arg): 131 | if isinstance(arg, str): 132 | # add the key, no value 133 | return {arg: None} 134 | elif isinstance(arg, ccdproc.Keyword): 135 | return {arg.name: arg.value} 136 | else: 137 | return arg 138 | 139 | 140 | def _replace_array_with_placeholder(value): 141 | return_type_not_value = False 142 | if isinstance(value, u.Quantity): 143 | return_type_not_value = not value.isscalar 144 | elif isinstance(value, (NDData, np.ndarray)): 145 | try: 146 | length = len(value) 147 | except TypeError: 148 | # Value has no length... 149 | try: 150 | # ...but if it is NDData its .data will have a length 151 | length = len(value.data) 152 | except TypeError: 153 | # No idea what this data is, assume length is not 1 154 | length = 42 155 | return_type_not_value = length > 1 156 | 157 | if return_type_not_value: 158 | return "<{0}>".format(value.__class__.__name__) 159 | else: 160 | return value 161 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 3 | # 4 | # Astropy documentation build configuration file. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this file. 9 | # 10 | # All configuration values have a default. Some values are defined in 11 | # the global Astropy configuration which is loaded here before anything else. 12 | # See astropy.sphinx.conf for which values are set there. 13 | 14 | # If extensions (or modules to document with autodoc) are in another directory, 15 | # add these directories to sys.path here. If the directory is relative to the 16 | # documentation root, use os.path.abspath to make it absolute, like shown here. 17 | # sys.path.insert(0, os.path.abspath('..')) 18 | # IMPORTANT: the above commented section was generated by sphinx-quickstart, but 19 | # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left 20 | # commented out with this explanation to make it clear why this should not be 21 | # done. If the sys.path entry above is added, when the astropy.sphinx.conf 22 | # import occurs, it will import the *source* version of astropy instead of the 23 | # version installed (if invoked as "make html" or directly with sphinx), or the 24 | # version in the build directory (if "python setup.py build_sphinx" is used). 25 | # Thus, any C-extensions that are needed to build the documentation will *not* 26 | # be accessible, and the documentation will not build correctly. 27 | 28 | import datetime 29 | import os 30 | import sys 31 | 32 | try: 33 | from sphinx_astropy.conf.v1 import * # noqa 34 | except ImportError: 35 | print('ERROR: the documentation requires the sphinx-astropy package to be installed') 36 | sys.exit(1) 37 | 38 | # Get configuration information from setup.cfg 39 | try: 40 | from ConfigParser import ConfigParser 41 | except ImportError: 42 | from configparser import ConfigParser 43 | conf = ConfigParser() 44 | 45 | conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) 46 | setup_cfg = dict(conf.items('metadata')) 47 | 48 | # -- General configuration ---------------------------------------------------- 49 | 50 | # By default, highlight as Python 3. 51 | highlight_language = 'python3' 52 | 53 | # If your documentation needs a minimal Sphinx version, state it here. 54 | #needs_sphinx = '1.2' 55 | 56 | # To perform a Sphinx version check that needs to be more specific than 57 | # major.minor, call `check_sphinx_version("x.y.z")` here. 58 | # check_sphinx_version("1.2.1") 59 | 60 | # List of patterns, relative to source directory, that match files and 61 | # directories to ignore when looking for source files. 62 | exclude_patterns.append('_templates') 63 | 64 | # This is added to the end of RST files - a good place to put substitutions to 65 | # be used globally. 66 | rst_epilog += """ 67 | """ 68 | 69 | # -- Project information ------------------------------------------------------ 70 | 71 | # This does not *have* to match the package name, but typically does 72 | project = setup_cfg['name'] 73 | author = setup_cfg['author'] 74 | copyright = '{0}, {1}'.format( 75 | datetime.datetime.now().year, setup_cfg['author']) 76 | 77 | # The version info for the project you're documenting, acts as replacement for 78 | # |version| and |release|, also used in various other places throughout the 79 | # built documents. 80 | __import__(project) 81 | package = sys.modules[project] 82 | 83 | ver = package.__version__ 84 | version = '.'.join(ver.split('.'))[:5] 85 | release = ver 86 | 87 | # -- Options for HTML output -------------------------------------------------- 88 | 89 | # A NOTE ON HTML THEMES 90 | # The global astropy configuration uses a custom theme, 'bootstrap-astropy', 91 | # which is installed along with astropy. A different theme can be used or 92 | # the options for this theme can be modified by overriding some of the 93 | # variables set in the global configuration. The variables set in the 94 | # global configuration are listed below, commented out. 95 | 96 | 97 | # Add any paths that contain custom themes here, relative to this directory. 98 | # To use a different custom theme, add the directory containing the theme. 99 | #html_theme_path = [] 100 | 101 | # The theme to use for HTML and HTML Help pages. See the documentation for 102 | # a list of builtin themes. To override the custom theme, set this to the 103 | # name of a builtin theme or the name of a custom theme in html_theme_path. 104 | #html_theme = 'bootstrap-ccdproc' 105 | 106 | 107 | html_theme_options = { 108 | 'logotext1': 'ccd', # white, semi-bold 109 | 'logotext2': 'proc', # orange, light 110 | 'logotext3': ':docs' # white, light 111 | } 112 | 113 | 114 | # Custom sidebar templates, maps document names to template names. 115 | #html_sidebars = {} 116 | 117 | # The name of an image file (relative to this directory) to place at the top 118 | # of the sidebar. 119 | #html_logo = '' 120 | 121 | # The name of an image file (within the static path) to use as favicon of the 122 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 123 | # pixels large. 124 | #html_favicon = '' 125 | from os.path import join 126 | html_favicon = join('_static', 'ccd_proc.ico') 127 | 128 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 129 | # using the given strftime format. 130 | #html_last_updated_fmt = '' 131 | 132 | # The name for this set of Sphinx documents. If None, it defaults to 133 | # " v documentation". 134 | html_title = '{0} v{1}'.format(project, release) 135 | 136 | # Output file base name for HTML help builder. 137 | htmlhelp_basename = project + 'doc' 138 | 139 | # Static files to copy after template files 140 | html_static_path = ['_static'] 141 | html_style = 'ccdproc.css' 142 | 143 | # -- Options for LaTeX output ------------------------------------------------- 144 | 145 | # Grouping the document tree into LaTeX files. List of tuples 146 | # (source start file, target name, title, author, documentclass [howto/manual]). 147 | latex_documents = [('index', project + '.tex', project + u' Documentation', 148 | author, 'manual')] 149 | 150 | 151 | # -- Options for manual page output ------------------------------------------- 152 | 153 | # One entry per manual page. List of tuples 154 | # (source start file, name, description, authors, manual section). 155 | man_pages = [('index', project.lower(), project + u' Documentation', 156 | [author], 1)] 157 | 158 | 159 | # -- Options for the edit_on_github extension --------------------------------- 160 | 161 | if eval(setup_cfg.get('edit_on_github')): 162 | extensions += ['sphinx_astropy.ext.edit_on_github'] 163 | 164 | versionmod = __import__(setup_cfg['name'] + '.version') 165 | edit_on_github_project = setup_cfg['github_project'] 166 | if versionmod.version.release: 167 | edit_on_github_branch = "v" + versionmod.version.version 168 | else: 169 | edit_on_github_branch = "master" 170 | 171 | edit_on_github_source_root = "" 172 | edit_on_github_doc_root = "docs" 173 | 174 | # -- Resolving issue number to links in changelog ----------------------------- 175 | github_issues_url = 'https://github.com/astropy/ccdproc/issues/' 176 | 177 | # -- Turn on nitpicky mode for sphinx (to warn about references not found) ---- 178 | # 179 | nitpicky = True 180 | # nitpick_ignore = [] 181 | # 182 | # for line in open('nitpick-exceptions'): 183 | # if line.strip() == "" or line.startswith("#"): 184 | # continue 185 | # dtype, target = line.split(None, 1) 186 | # target = target.strip() 187 | # nitpick_ignore.append((dtype, six.u(target))) 188 | -------------------------------------------------------------------------------- /ccdproc/tests/run_for_memory_profile.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | from tempfile import TemporaryDirectory 3 | from pathlib import Path 4 | import sys 5 | import gc 6 | 7 | import psutil 8 | from memory_profiler import memory_usage 9 | 10 | import numpy as np 11 | from astropy.io import fits 12 | from astropy.stats import median_absolute_deviation 13 | from astropy.nddata import CCDData 14 | 15 | # This bit of hackery ensures that we can see ccdproc from within 16 | # the test suite 17 | sys.path.append(str(Path().cwd())) 18 | from ccdproc import combine, ImageFileCollection 19 | 20 | try: 21 | from ccdproc.combiner import _calculate_size_of_image 22 | except ImportError: 23 | def _calculate_size_of_image(ccd, 24 | combine_uncertainty_function): 25 | # If uncertainty_func is given for combine this will create an uncertainty 26 | # even if the originals did not have one. In that case we need to create 27 | # an empty placeholder. 28 | if ccd.uncertainty is None and combine_uncertainty_function is not None: 29 | ccd.uncertainty = StdDevUncertainty(np.zeros(ccd.data.shape)) 30 | 31 | size_of_an_img = ccd.data.nbytes 32 | try: 33 | size_of_an_img += ccd.uncertainty.array.nbytes 34 | # In case uncertainty is None it has no "array" and in case the "array" is 35 | # not a numpy array: 36 | except AttributeError: 37 | pass 38 | # Mask is enforced to be a numpy.array across astropy versions 39 | if ccd.mask is not None: 40 | size_of_an_img += ccd.mask.nbytes 41 | # flags is not necessarily a numpy array so do not fail with an 42 | # AttributeError in case something was set! 43 | # TODO: Flags are not taken into account in Combiner. This number is added 44 | # nevertheless for future compatibility. 45 | try: 46 | size_of_an_img += ccd.flags.nbytes 47 | except AttributeError: 48 | pass 49 | 50 | return size_of_an_img 51 | 52 | 53 | # Do not combine these into one statement. When all references are lost 54 | # to a TemporaryDirectory the directory is automatically deleted. _TMPDIR 55 | # creates a reference that will stick around. 56 | _TMPDIR = TemporaryDirectory() 57 | TMPPATH = Path(_TMPDIR.name) 58 | 59 | 60 | def generate_fits_files(n_images, size=None, seed=1523): 61 | if size is None: 62 | use_size = (2024, 2031) 63 | else: 64 | use_size = (size, size) 65 | 66 | np.random.seed(seed) 67 | 68 | base_name = 'test-combine-{num:03d}.fits' 69 | 70 | for num in range(n_images): 71 | data = np.random.normal(size=use_size) 72 | # Now add some outlying pixels so there is something to clip 73 | n_bad = 50000 74 | bad_x = np.random.randint(0, high=use_size[0] - 1, size=n_bad) 75 | bad_y = np.random.randint(0, high=use_size[1] - 1, size=n_bad) 76 | data[bad_x, bad_y] = (np.random.choice([-1, 1], size=n_bad) * 77 | (10 + np.random.rand(n_bad))) 78 | hdu = fits.PrimaryHDU(data=np.asarray(data, dtype='float32')) 79 | hdu.header['for_prof'] = 'yes' 80 | hdu.header['bunit'] = 'adu' 81 | path = TMPPATH.resolve() / base_name.format(num=num) 82 | hdu.writeto(path, overwrite=True) 83 | 84 | 85 | def run_memory_profile(n_files, sampling_interval, size=None, sigma_clip=False, 86 | combine_method=None, memory_limit=None): 87 | """ 88 | Try opening a bunch of files with a relatively low limit on the number 89 | of open files. 90 | 91 | Parameters 92 | ---------- 93 | 94 | n_files : int 95 | Number of files to combine. 96 | 97 | sampling_interval : float 98 | Time, in seconds, between memory samples. 99 | 100 | size : int, optional 101 | Size of one side of the image (the image is always square). 102 | 103 | sigma_clip : bool, optional 104 | If true, sigma clip the data before combining. 105 | 106 | combine_method : str, optional 107 | Should be one of the combine methods accepted by 108 | ccdproc.combine 109 | 110 | memory_limit : int, optional 111 | Cap on memory use during image combination. 112 | """ 113 | # Do a little input validation 114 | if n_files <= 0: 115 | raise ValueError("Argument 'n' must be a positive integer") 116 | 117 | proc = psutil.Process() 118 | 119 | print('Process ID is: ', proc.pid, flush=True) 120 | ic = ImageFileCollection(str(TMPPATH)) 121 | files = ic.files_filtered(for_prof='yes', include_path=True) 122 | 123 | kwargs = {'method': combine_method} 124 | 125 | if sigma_clip: 126 | kwargs.update( 127 | {'sigma_clip': True, 128 | 'sigma_clip_low_thresh': 5, 129 | 'sigma_clip_high_thresh': 5, 130 | 'sigma_clip_func': np.ma.median, 131 | 'sigma_clip_dev_func': median_absolute_deviation} 132 | ) 133 | 134 | ccd = CCDData.read(files[0]) 135 | expected_img_size = _calculate_size_of_image(ccd, None) 136 | 137 | if memory_limit: 138 | kwargs['mem_limit'] = memory_limit 139 | 140 | pre_mem_use = memory_usage(-1, interval=sampling_interval, timeout=1) 141 | baseline = np.mean(pre_mem_use) 142 | print('Subtracting baseline memory before profile: {}'.format(baseline)) 143 | mem_use = memory_usage((combine, (files,), kwargs), 144 | interval=sampling_interval, timeout=None) 145 | mem_use = [m - baseline for m in mem_use] 146 | return mem_use, expected_img_size 147 | 148 | 149 | if __name__ == '__main__': 150 | parser = ArgumentParser() 151 | parser.add_argument('number', type=int, 152 | help='Number of files to combine.') 153 | parser.add_argument('--size', type=int, action='store', 154 | help='Size of one side of image to create. ' 155 | 'All images are square, so only give ' 156 | 'a single number for the size.') 157 | parser.add_argument('--combine-method', '-c', 158 | choices=('average', 'median'), 159 | help='Method to use to combine images.') 160 | parser.add_argument('--memory-limit', type=int, 161 | help='Limit combination to this amount of memory') 162 | parser.add_argument('--sigma-clip', action='store_true', 163 | help='If set, sigma-clip before combining. Clipping ' 164 | 'will be done with high/low limit of 5. ' 165 | 'The central function is the median, the ' 166 | 'deviation is the median_absolute_deviation.') 167 | parser.add_argument('--sampling-freq', type=float, default=0.05, 168 | help='Time, in seconds, between memory samples.') 169 | parser.add_argument('--frequent-gc', action='store_true', 170 | help='If set, perform garbage collection ' 171 | 'much more frequently than the default.') 172 | args = parser.parse_args() 173 | 174 | if args.frequent_gc: 175 | gc.set_threshold(10, 10, 10) 176 | 177 | print("Garbage collection thresholds: ", gc.get_threshold()) 178 | 179 | mem_use = run_with_limit(args.number, args.sampling_freq, 180 | size=args.size, 181 | sigma_clip=args.sigma_clip, 182 | combine_method=args.combine_method, 183 | memory_limit=args.memory_limit) 184 | print('Max memory usage (MB): ', np.max(mem_use)) 185 | print('Baseline memory usage (MB): ', mem_use[0]) 186 | -------------------------------------------------------------------------------- /docs/ccddata.rst: -------------------------------------------------------------------------------- 1 | .. _ccddata: 2 | 3 | Using the ``CCDData`` image class: I/O, properties and arithmetic 4 | ================================================================= 5 | 6 | Input and output 7 | ---------------- 8 | 9 | Getting data in 10 | +++++++++++++++ 11 | 12 | The tools in `ccdproc` accept only `~astropy.nddata.CCDData` objects, a 13 | subclass of `~astropy.nddata.NDData`. 14 | 15 | Creating a `~astropy.nddata.CCDData` object from any array-like data is easy: 16 | 17 | >>> import numpy as np 18 | >>> from astropy.nddata import CCDData 19 | >>> import ccdproc 20 | >>> ccd = CCDData(np.arange(10), unit="adu") 21 | 22 | Note that behind the scenes, `~astropy.nddata.NDData` creates references to 23 | (not copies of) your data when possible, so modifying the data in ``ccd`` will 24 | modify the underlying data. 25 | 26 | You are **required** to provide a unit for your data. The most frequently used 27 | units for these objects are likely to be ``adu``, ``photon`` and ``electron``, which 28 | can be set either by providing the string name of the unit (as in the example 29 | above) or from unit objects: 30 | 31 | >>> from astropy import units as u 32 | >>> ccd_photon = CCDData([1, 2, 3], unit=u.photon) 33 | >>> ccd_electron = CCDData([1, 2, 3], unit="electron") 34 | 35 | If you prefer *not* to use the unit functionality then use the special unit 36 | ``u.dimensionless_unscaled`` when you create your `~astropy.nddata.CCDData` 37 | images: 38 | 39 | >>> ccd_unitless = CCDData(np.zeros((10, 10)), 40 | ... unit=u.dimensionless_unscaled) 41 | 42 | A `~astropy.nddata.CCDData` object can also be initialized from a FITS file: 43 | 44 | >>> ccd = CCDData.read('my_file.fits', unit="adu") # doctest: +SKIP 45 | 46 | If there is a unit in the FITS file (in the ``BUNIT`` keyword), that will be 47 | used, but a unit explicitly provided in ``read`` will override any unit in the 48 | FITS file. 49 | 50 | There is no restriction at all on what the unit can be -- any unit in 51 | `astropy.units` or that you create yourself will work. 52 | 53 | In addition, the user can specify the extension in a FITS file to use: 54 | 55 | >>> ccd = CCDData.read('my_file.fits', hdu=1, unit="adu") # doctest: +SKIP 56 | 57 | If ``hdu`` is not specified, it will assume the data is in the primary 58 | extension. If there is no data in the primary extension, the first extension 59 | with data will be used. 60 | 61 | Getting data out 62 | ++++++++++++++++ 63 | 64 | A `~astropy.nddata.CCDData` object behaves like a numpy array (masked if the 65 | `~astropy.nddata.CCDData` mask is set) in expressions, and the underlying 66 | data (ignoring any mask) is accessed through ``data`` attribute: 67 | 68 | >>> ccd_masked = CCDData([1, 2, 3], unit="adu", mask=[0, 0, 1]) 69 | >>> res = 2 * np.ones(3) * ccd_masked 70 | >>> res.mask # one return value will be masked 71 | array([False, False, True]...) 72 | >>> 2 * np.ones(3) * ccd_masked.data # doctest: +FLOAT_CMP 73 | array([ 2., 4., 6.]) 74 | 75 | You can force conversion to a numpy array with: 76 | 77 | >>> np.asarray(ccd_masked) 78 | array([1, 2, 3]) 79 | >>> np.ma.array(ccd_masked.data, mask=ccd_masked.mask) # doctest: +SKIP 80 | 81 | A method for converting a `~astropy.nddata.CCDData` object to a FITS HDU list 82 | is also available. It converts the metadata to a FITS header: 83 | 84 | >>> hdulist = ccd_masked.to_hdu() 85 | 86 | You can also write directly to a FITS file: 87 | 88 | >>> ccd_masked.write('my_image.fits') 89 | 90 | Essential properties 91 | -------------------- 92 | 93 | Metadata 94 | ++++++++ 95 | 96 | When initializing from a FITS file, the ``header`` property is initialized using 97 | the header of the FITS file. Metadata is optional, and can be provided by any 98 | dictionary or dict-like object: 99 | 100 | >>> ccd_simple = CCDData(np.arange(10), unit="adu") 101 | >>> my_meta = {'observer': 'Edwin Hubble', 'exposure': 30.0} 102 | >>> ccd_simple.header = my_meta # or use ccd_simple.meta = my_meta 103 | 104 | Whether the metadata is case sensitive or not depends on how it is 105 | initialized. A FITS header, for example, is not case sensitive, but a python 106 | dictionary is. 107 | 108 | Masks and flags 109 | +++++++++++++++ 110 | 111 | Although not required when a `~astropy.nddata.CCDData` image is created you 112 | can also specify a mask and/or flags. 113 | 114 | A mask is a boolean array the same size as the data in which a value of 115 | ``True`` indicates that a particular pixel should be masked, *i.e.* not be 116 | included in arithmetic operations or aggregation. 117 | 118 | Flags are one or more additional arrays (of any type) whose shape matches the 119 | shape of the data. For more details on setting flags see 120 | `astropy.nddata.NDData`. 121 | 122 | WCS 123 | +++ 124 | 125 | The ``wcs`` attribute of `~astropy.nddata.CCDData` object can be set two ways. 126 | 127 | + If the `~astropy.nddata.CCDData` object is created from a FITS file that has 128 | WCS keywords in the header, the ``wcs`` attribute is set to a 129 | `astropy.wcs.WCS` object using the information in the FITS header. 130 | 131 | + The WCS can also be provided when the `~astropy.nddata.CCDData` object is 132 | constructed with the ``wcs`` argument. 133 | 134 | Either way, the ``wcs`` attribute is kept up to date if the 135 | `~astropy.nddata.CCDData` image is trimmed. 136 | 137 | Uncertainty 138 | +++++++++++ 139 | 140 | Pixel-by-pixel uncertainty can be calculated for you: 141 | 142 | >>> data = np.random.normal(size=(10, 10), loc=1.0, scale=0.1) 143 | >>> ccd = CCDData(data, unit="electron") 144 | >>> ccd_new = ccdproc.create_deviation(ccd, readnoise=5 * u.electron) 145 | 146 | See :ref:`create_deviation` for more details. 147 | 148 | You can also set the uncertainty directly, either by creating a 149 | `~astropy.nddata.StdDevUncertainty` object first: 150 | 151 | >>> from astropy.nddata.nduncertainty import StdDevUncertainty 152 | >>> uncertainty = 0.1 * ccd.data # can be any array whose shape matches the data 153 | >>> my_uncertainty = StdDevUncertainty(uncertainty) 154 | >>> ccd.uncertainty = my_uncertainty 155 | 156 | or by providing a `~numpy.ndarray` with the same shape as the data: 157 | 158 | >>> ccd.uncertainty = 0.1 * ccd.data # doctest: +ELLIPSIS 159 | INFO: array provided for uncertainty; assuming it is a StdDevUncertainty. [...] 160 | 161 | In this case the uncertainty is assumed to be 162 | `~astropy.nddata.StdDevUncertainty`. Using `~astropy.nddata.StdDevUncertainty` 163 | is required to enable error propagation in `~astropy.nddata.CCDData` 164 | 165 | If you want access to the underlying uncertainty use its ``.array`` attribute: 166 | 167 | >>> ccd.uncertainty.array # doctest: +ELLIPSIS 168 | array(...) 169 | 170 | Arithmetic with images 171 | ---------------------- 172 | 173 | Methods are provided to perform arithmetic operations with a 174 | `~astropy.nddata.CCDData` image and a number, an astropy 175 | `~astropy.units.Quantity` (a number with units) or another 176 | `~astropy.nddata.CCDData` image. 177 | 178 | Using these methods propagates errors correctly (if the errors are 179 | uncorrelated), take care of any necessary unit conversions, and apply masks 180 | appropriately. Note that the metadata of the result is *not* set if the operation 181 | is between two `~astropy.nddata.CCDData` objects. 182 | 183 | >>> result = ccd.multiply(0.2 * u.adu) 184 | >>> uncertainty_ratio = result.uncertainty.array[0, 0]/ccd.uncertainty.array[0, 0] 185 | >>> round(uncertainty_ratio, 5) # doctest: +FLOAT_CMP 186 | 0.2 187 | >>> result.unit 188 | Unit("adu electron") 189 | 190 | .. note:: 191 | In most cases you should use the functions described in 192 | :ref:`reduction_toolbox` to perform common operations like scaling by gain or 193 | doing dark or sky subtraction. Those functions try to construct a sensible 194 | header for the result and provide a mechanism for logging the action of the 195 | function in the header. 196 | 197 | 198 | The arithmetic operators ``*``, ``/``, ``+`` and ``-`` are *not* overridden. 199 | 200 | .. note:: 201 | If two images have different WCS values, the wcs on the first 202 | `~astropy.nddata.CCDData` object will be used for the resultant object. 203 | -------------------------------------------------------------------------------- /ccdproc/tests/run_profile.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import gc\n", 10 | "from copy import deepcopy\n", 11 | "\n", 12 | "%matplotlib inline \n", 13 | "from matplotlib import pyplot as plt\n", 14 | "import numpy as np\n", 15 | "\n", 16 | "try:\n", 17 | " from run_for_memory_profile import run_memory_profile, generate_fits_files\n", 18 | "except ImportError:\n", 19 | " raise ImportError('Please install memory_profiler before running this notebook.')\n", 20 | "\n", 21 | "from ccdproc.version import get_git_devstr\n", 22 | "from astropy import __version__ as apy_version" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "print('Astropy version: ', apy_version)" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": null, 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "image_size = 4000 # Square image, so 4000 x 4000\n", 41 | "num_files = 10\n", 42 | "sampling_interval = 0.01 # sec\n", 43 | "memory_limit = 1000000000 # bytes, roughly 1GB\n", 44 | "\n", 45 | "commit = get_git_devstr(sha=True)[:7]\n", 46 | "print(commit)" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "generate_fits_files(num_files, size=image_size)" 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": {}, 62 | "outputs": [], 63 | "source": [ 64 | "runs = {\n", 65 | " 'average': {\n", 66 | " 'times': [],\n", 67 | " 'memory': [],\n", 68 | " 'image_size': 0.\n", 69 | " },\n", 70 | " 'median': {\n", 71 | " 'times': [],\n", 72 | " 'memory': [],\n", 73 | " 'image_size': 0.\n", 74 | " },\n", 75 | " 'sum': {\n", 76 | " 'times': [],\n", 77 | " 'memory': [],\n", 78 | " 'image_size': 0.\n", 79 | " }\n", 80 | "}\n", 81 | "runs_clip = deepcopy(runs)" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "metadata": {}, 87 | "source": [ 88 | "# Seem to need to do one run before the profiling\n", 89 | "\n", 90 | "Every time the first run looks different than the rest, so we run one and throw it out." 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "_, _ = run_memory_profile(num_files, sampling_interval, size=image_size, \n", 100 | " memory_limit=memory_limit, combine_method='average')" 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "metadata": {}, 106 | "source": [ 107 | "# Memory profile without sigma clipping" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": null, 113 | "metadata": {}, 114 | "outputs": [], 115 | "source": [ 116 | "n_repetitions = 4" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "metadata": {}, 123 | "outputs": [], 124 | "source": [ 125 | "def run_them(runs, clipping=False):\n", 126 | " for combine_method in runs.keys():\n", 127 | " for _ in range(n_repetitions):\n", 128 | " mem_use, img_size = run_memory_profile(num_files, sampling_interval, size=image_size, \n", 129 | " memory_limit=memory_limit, combine_method=combine_method,\n", 130 | " sigma_clip=clipping)\n", 131 | " gc.collect()\n", 132 | " runs[combine_method]['times'].append(np.arange(len(mem_use)) * sampling_interval)\n", 133 | " runs[combine_method]['memory'].append(mem_use)\n", 134 | " runs[combine_method]['image_size'] = img_size\n", 135 | " runs[combine_method]['memory_limit'] = memory_limit\n", 136 | " runs[combine_method]['clipping'] = clipping" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": null, 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [ 145 | "run_them(runs)" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [ 154 | "styles = ['solid', 'dashed', 'dotted']" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "metadata": {}, 161 | "outputs": [], 162 | "source": [ 163 | "plt.figure(figsize=(20, 10))\n", 164 | "\n", 165 | "for idx, method in enumerate(runs.keys()):\n", 166 | " style = styles[idx % len(styles)]\n", 167 | " for i, data in enumerate(zip(runs[method]['times'], runs[method]['memory'])):\n", 168 | " time, mem_use = data \n", 169 | " if i == 0:\n", 170 | " label = 'Memory use in {} combine (repeated runs same style)'.format(method)\n", 171 | " alpha = 1.0\n", 172 | " else:\n", 173 | " label = ''\n", 174 | " alpha = 0.4\n", 175 | " plt.plot(time, mem_use, linestyle=style, label=label, alpha=alpha)\n", 176 | "\n", 177 | "plt.vlines(-40 * sampling_interval, mem_use[0], mem_use[0] + memory_limit/1e6, colors='red', label='Memory use limit')\n", 178 | "plt.vlines(-20 * sampling_interval, mem_use[0], mem_use[0] + runs[method]['image_size']/1e6, label='size of one image')\n", 179 | "\n", 180 | "plt.grid()\n", 181 | "clipped = 'ON' if runs[method]['clipping'] else 'OFF'\n", 182 | "\n", 183 | "plt.title('ccdproc commit {}; {} repetitions per method; sigma_clip {}'.format(commit, n_repetitions, clipped),\n", 184 | " fontsize=20)\n", 185 | "plt.xlabel('Time (sec)', fontsize=20)\n", 186 | "plt.ylabel('Memory use (MB)', fontsize=20)\n", 187 | "\n", 188 | "plt.legend(fontsize=20)\n", 189 | "plt.savefig('commit_{}_reps_{}_clip_{}_memlim_{}GB.png'.format(commit, n_repetitions, clipped, memory_limit/1e9))" 190 | ] 191 | }, 192 | { 193 | "cell_type": "markdown", 194 | "metadata": {}, 195 | "source": [ 196 | "# Memory profile with sigma clipping" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [ 205 | "run_them(runs_clip, clipping=True)" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": {}, 212 | "outputs": [], 213 | "source": [ 214 | "plt.figure(figsize=(20, 10))\n", 215 | "\n", 216 | "for idx, method in enumerate(runs_clip.keys()):\n", 217 | " style = styles[idx % len(styles)]\n", 218 | " for i, data in enumerate(zip(runs_clip[method]['times'], runs_clip[method]['memory'])):\n", 219 | " time, mem_use = data \n", 220 | " if i == 0:\n", 221 | " label = 'Memory use in {} combine (repeated runs same style)'.format(method)\n", 222 | " alpha = 1.0\n", 223 | " else:\n", 224 | " label = ''\n", 225 | " alpha = 0.4\n", 226 | " plt.plot(time, mem_use, linestyle=style, label=label, alpha=alpha)\n", 227 | "\n", 228 | "plt.vlines(-40 * sampling_interval, mem_use[0], mem_use[0] + memory_limit/1e6, colors='red', label='Memory use limit')\n", 229 | "plt.vlines(-20 * sampling_interval, mem_use[0], mem_use[0] + runs_clip[method]['image_size']/1e6, label='size of one image')\n", 230 | "\n", 231 | "plt.grid()\n", 232 | "clipped = 'ON' if runs_clip[method]['clipping'] else 'OFF'\n", 233 | "\n", 234 | "plt.title('ccdproc commit {}; {} repetitions per method; sigma_clip {}'.format(commit, n_repetitions, clipped),\n", 235 | " fontsize=20)\n", 236 | "plt.xlabel('Time (sec)', fontsize=20)\n", 237 | "plt.ylabel('Memory use (MB)', fontsize=20)\n", 238 | "\n", 239 | "plt.legend(fontsize=20)\n", 240 | "plt.savefig('commit_{}_reps_{}_clip_{}_memlim_{}GB.png'.format(commit, n_repetitions, clipped, memory_limit/1e9))" 241 | ] 242 | } 243 | ], 244 | "metadata": { 245 | "kernelspec": { 246 | "display_name": "Python 3", 247 | "language": "python", 248 | "name": "python3" 249 | }, 250 | "language_info": { 251 | "codemirror_mode": { 252 | "name": "ipython", 253 | "version": 3 254 | }, 255 | "file_extension": ".py", 256 | "mimetype": "text/x-python", 257 | "name": "python", 258 | "nbconvert_exporter": "python", 259 | "pygments_lexer": "ipython3", 260 | "version": "3.6.7" 261 | } 262 | }, 263 | "nbformat": 4, 264 | "nbformat_minor": 2 265 | } 266 | -------------------------------------------------------------------------------- /docs/image_management.rst: -------------------------------------------------------------------------------- 1 | .. _image_management: 2 | 3 | Image Management 4 | ================ 5 | 6 | 7 | .. _image_collection: 8 | 9 | Working with a directory of images 10 | ---------------------------------- 11 | 12 | For the sake of argument all of the examples below assume you are working in 13 | a directory that contains FITS images. 14 | 15 | The class :class:`~ccdproc.image_collection.ImageFileCollection` is meant to 16 | make working with a directory of FITS images easier by allowing you select the 17 | files you act on based on the values of FITS keywords in their headers or based 18 | on Unix shell-style filename matching. 19 | 20 | It is initialized with the name of a directory containing FITS images and a 21 | list of FITS keywords you want the 22 | :class:`~ccdproc.image_collection.ImageFileCollection` to be aware of. An 23 | example initialization looks like:: 24 | 25 | >>> from ccdproc import ImageFileCollection 26 | >>> from ccdproc.utils.sample_directory import sample_directory_with_files 27 | >>> keys = ['imagetyp', 'object', 'filter', 'exposure'] 28 | >>> dir = sample_directory_with_files() 29 | >>> ic1 = ImageFileCollection(dir, keywords=keys) # only keep track of keys 30 | 31 | You can use the wildcard ``*`` in place of a list to indicate you want the 32 | collection to use all keywords in the headers:: 33 | 34 | >>> ic_all = ImageFileCollection(dir, keywords='*') 35 | 36 | Normally identification of FITS files is done by looking at the file extension 37 | and including all files with the correct extension. 38 | 39 | If the files are not compressed (e.g. not gzipped) then you can force the image 40 | collection to open each file and check from its contents whether it is FITS by 41 | using the ``find_fits_by_reading`` argument:: 42 | 43 | >> ic_from_content = ImageFileCollection(dir, find_fits_by_reading=True) 44 | 45 | You can indicate filename patterns to include or exclude using Unix shell-style 46 | expressions. For example, to include all filenames that begin with ``1d_`` but 47 | not ones that include the word ``bad``, you could do:: 48 | 49 | >>> ic_all = ImageFileCollection(dir, glob_include='1d_*', 50 | ... glob_exclude='*bad*') 51 | 52 | Alternatively, you can create the collection with an explicit list of file names:: 53 | 54 | >>> ic_names = ImageFileCollection(filenames=['a.fits', '/some/path/b.fits.gz']) 55 | 56 | Most of the useful interaction with the image collection is via its 57 | ``.summary`` property, a :class:`~astropy.table.Table` of the value of each keyword for each 58 | file in the collection:: 59 | 60 | >>> ic1.summary.colnames 61 | ['file', 'imagetyp', 'object', 'filter', 'exposure'] 62 | >>> ic_all.summary.colnames # doctest: +SKIP 63 | # long list of keyword names omitted 64 | 65 | Note that the name of the file is automatically added to the table as a 66 | column named ``file``. 67 | 68 | Selecting files 69 | --------------- 70 | 71 | Selecting the files that match a set of criteria, for example all images in 72 | the I band with exposure time less than 60 seconds you could do:: 73 | 74 | >>> matches = (ic1.summary['filter'] == 'R') & (ic1.summary['exposure'] < 15) 75 | >>> my_files = ic1.summary['file'][matches] 76 | 77 | The column ``file`` is added automatically when the image collection is created. 78 | 79 | For more simple selection, when you just want files whose keywords exactly 80 | match particular values, say all I band images with exposure time of 30 81 | seconds, there is a convenience method ``.files_filtered``:: 82 | 83 | >>> my_files = ic1.files_filtered(filter='R', exposure=15) 84 | 85 | The optional arguments to ``files_filtered`` are used to filter the list of 86 | files. 87 | 88 | Python regular expression patterns can also be used as the value if the 89 | ``regex_match`` flag is set. For example, to find all of the images whose 90 | object is in the Kelt exoplanet survey, you might do:: 91 | 92 | >>> my_files = ic1.files_filtered(regex_match=True, object='kelt.*') 93 | 94 | To get all of the images that have image type ``BIAS`` or ``LIGHT`` you 95 | can also use a regular expression pattern:: 96 | 97 | >>> my_files = ic1.files_filtered(regex_match=True, 98 | ... imagetyp='bias|light') 99 | 100 | Note that regular expression is different, and much more flexible than, 101 | file name matching (or "globbing") at the command line. The 102 | `Python documentation on the re module `_ 103 | is useful for learning about regular expressions. 104 | 105 | Finally, a new `~ccdproc.ImageFileCollection` can be created with by providing 106 | a list of keywords. The example below makes a new collection containing the 107 | files whose ``imagetyp`` is ``BIAS`` or ``LIGHT``:: 108 | 109 | >>> new_ic = ic1.filter(regex_match=True, 110 | ... imagetyp='bias|light') 111 | 112 | Sorting files 113 | ------------- 114 | 115 | Sometimes it is useful to bring the files into a specific order, e.g. if you 116 | make a plot for each object you probably want all images of the same object 117 | next to each other. To do this, the images in a collection can be sorted with 118 | the ``sort`` method using the fits header keys in the same way you would sort a 119 | :class:`~astropy.table.Table`:: 120 | 121 | >>> ic1.sort(['exposure', 'imagetyp']) 122 | 123 | Iterating over hdus, headers, data, or ccds 124 | ------------------------------------------- 125 | 126 | Four methods are provided for iterating over the images in the collection, 127 | optionally filtered by keyword values. 128 | 129 | For example, to iterate over all of the I band images with exposure of 130 | 30 seconds, performing some basic operation on the data (very contrived 131 | example):: 132 | 133 | >>> for hdu in ic1.hdus(imagetyp='LiGhT', filter='R', exposure=15): 134 | ... hdu.header['exposure'] 135 | ... new_data = hdu.data - hdu.data.mean() 136 | 15.0 137 | 138 | Note that the names of the arguments to ``hdus`` here are the names of FITS 139 | keywords in the collection and the values are the values of those keywords you 140 | want to select. Note also that string comparisons are not case sensitive. 141 | 142 | The other iterators are ``headers``, ``data``, and ``ccds``. 143 | 144 | All of them have the option to also provide the file name in addition to the 145 | hdu (or header or data):: 146 | 147 | >>> for hdu, fname in ic1.hdus(return_fname=True, 148 | ... imagetyp='LiGhT', filter='R', exposure=15): 149 | ... hdu.header['meansub'] = True 150 | ... hdu.data = hdu.data - hdu.data.mean() 151 | ... hdu.writeto(fname + '.new') 152 | 153 | That last use case, doing something to several files and saving them 154 | somewhere afterwards, is common enough that the iterators provide arguments to 155 | automate it. 156 | 157 | Automatic saving from the iterators 158 | ----------------------------------- 159 | 160 | There are three ways of triggering automatic saving. 161 | 162 | 1. One is with the argument ``save_with_name``; it adds the value of the 163 | argument to the file name between the original base name and extension. The 164 | example below has (almost) the same effect of the example above, subtracting 165 | the mean from each image and saving to a new file:: 166 | 167 | >>> for hdu in ic1.hdus(save_with_name='_new', 168 | ... imagetyp='LiGhT', filter='R', exposure=15): 169 | ... hdu.header['meansub'] = True 170 | ... hdu.data = hdu.data - hdu.data.mean() 171 | 172 | It saves, in the ``location`` of the image collection, a new FITS file with 173 | the mean subtracted from the data, with ``_new`` added to the name; as an 174 | example, if one of the files iterated over was ``intput001.fit`` then a new 175 | file, in the same directory, called ``input001_new.fit`` would be created. 176 | 177 | 2. You can also provide the directory to which you want to save the files with 178 | ``save_location``; note that you do not need to actually do anything to the 179 | hdu (or header or data) to cause the copy to be made. The example below copies 180 | all of the I band images with 30 second exposure from the original 181 | location to ``other_dir``:: 182 | 183 | >>> for hdu in ic1.hdus(save_location='other_dir', 184 | ... imagetyp='LiGhT', filter='I', exposure=30): # doctest: +SKIP 185 | ... pass 186 | 187 | This option can be combined with the previous one to also give the files a 188 | new name. 189 | 190 | 3. Finally, if you want to live dangerously, you can overwrite the files in 191 | the same location with the ``overwrite`` argument; use it carefully because it 192 | preserves no backup. The example below replaces each of the I band images 193 | with 30 second exposure with a file that has had the mean subtracted:: 194 | 195 | >>> for hdu in ic1.hdus(overwrite=True, 196 | ... imagetyp='LiGhT', filter='R', exposure=15): # doctest: +SKIP 197 | ... hdu.header['meansub'] = True 198 | ... hdu.data = hdu.data - hdu.data.mean() 199 | 200 | .. note:: 201 | This functionality is not currently available on Windows. 202 | -------------------------------------------------------------------------------- /docs/image_combination.rst: -------------------------------------------------------------------------------- 1 | .. _image_combination: 2 | 3 | Combining images and generating masks from clipping 4 | =================================================== 5 | 6 | .. note:: 7 | There are currently two interfaces to image combination. One is through 8 | the `~ccdproc.Combiner` class, the other through the `~ccdproc.combine` 9 | function. They offer *almost* identical capabilities. The primary 10 | difference is that `~ccdproc.combine` allows you to place an upper 11 | limit on the amount of memory used. 12 | 13 | Work to improve the performance of image combination is ongoing. 14 | 15 | 16 | The first step in combining a set of images is creating a 17 | `~ccdproc.Combiner` instance: 18 | 19 | >>> from astropy import units as u 20 | >>> from astropy.nddata import CCDData 21 | >>> from ccdproc import Combiner 22 | >>> import numpy as np 23 | >>> ccd1 = CCDData(np.random.normal(size=(10,10)), 24 | ... unit=u.adu) 25 | >>> ccd2 = ccd1.copy() 26 | >>> ccd3 = ccd1.copy() 27 | >>> combiner = Combiner([ccd1, ccd2, ccd3]) 28 | 29 | The combiner task really combines two things: generation of masks for 30 | individual images via several clipping techniques and combination of images. 31 | 32 | .. _clipping: 33 | 34 | Image masks and clipping 35 | ------------------------ 36 | 37 | There are currently three methods of clipping. None affect the data 38 | directly; instead each constructs a mask that is applied when images are 39 | combined. 40 | 41 | Masking done by clipping operations is combined with the image mask provided 42 | when the `~ccdproc.Combiner` is created. 43 | 44 | Min/max clipping 45 | ++++++++++++++++ 46 | 47 | `~ccdproc.Combiner.minmax_clipping` masks all pixels above or below 48 | user-specified levels. For example, to mask all values above the value 49 | ``0.1`` and below the value ``-0.3``: 50 | 51 | >>> combiner.minmax_clipping(min_clip=-0.3, max_clip=0.1) 52 | 53 | Either ``min_clip`` or ``max_clip`` can be omitted. 54 | 55 | Sigma clipping 56 | ++++++++++++++ 57 | 58 | For each pixel of an image in the combiner, 59 | `~ccdproc.combiner.Combiner.sigma_clipping` masks the pixel if is more than a 60 | user-specified number of deviations from the central value of that pixel in 61 | the list of images. 62 | 63 | The `~ccdproc.combiner.Combiner.sigma_clipping` method is very flexible: you can 64 | specify both the function for calculating the central value and the function 65 | for calculating the deviation. The default is to use the mean (ignoring any 66 | masked pixels) for the central value and the standard deviation (again 67 | ignoring any masked values) for the deviation. 68 | 69 | You can mask pixels more than 5 standard deviations above or 2 standard 70 | deviations below the median with 71 | 72 | >>> combiner.sigma_clipping(low_thresh=2, high_thresh=5, func=np.ma.median) 73 | 74 | .. note:: 75 | Numpy masked median can be very slow in exactly the situation typically 76 | encountered in reducing ccd data: a cube of data in which one dimension 77 | (in the case the number of frames in the combiner) is much smaller than 78 | the number of pixels. 79 | 80 | 81 | Extrema clipping 82 | ++++++++++++++++ 83 | 84 | For each pixel position in the input arrays, the algorithm will mask the 85 | highest ``nhigh`` and lowest ``nlow`` pixel values. The resulting image will be 86 | a combination of ``Nimages-nlow-nhigh`` pixel values instead of the combination 87 | of ``Nimages`` worth of pixel values. 88 | 89 | You can mask the lowest pixel value and the highest two pixel values with: 90 | 91 | >>> combiner.clip_extrema(nlow=1, nhigh=2) 92 | 93 | 94 | Iterative clipping 95 | ++++++++++++++++++ 96 | 97 | To clip iteratively, continuing the clipping process until no more pixels are 98 | rejected, loop in the code calling the clipping method: 99 | 100 | >>> old_n_masked = 0 # dummy value to make loop execute at least once 101 | >>> new_n_masked = combiner.data_arr.mask.sum() 102 | >>> while (new_n_masked > old_n_masked): 103 | ... combiner.sigma_clipping(func=np.ma.median) 104 | ... old_n_masked = new_n_masked 105 | ... new_n_masked = combiner.data_arr.mask.sum() 106 | 107 | Note that the default values for the high and low thresholds for rejection are 108 | 3 standard deviations. 109 | 110 | Image combination 111 | ----------------- 112 | 113 | Image combination is straightforward; to combine by taking the average, 114 | excluding any pixels mapped by clipping: 115 | 116 | >>> combined_average = combiner.average_combine() 117 | 118 | Performing a median combination is also straightforward, 119 | 120 | >>> combined_median = combiner.median_combine() # can be slow, see below 121 | 122 | 123 | 124 | Combination with image scaling 125 | ++++++++++++++++++++++++++++++ 126 | 127 | In some circumstances it may be convenient to scale all images to some value 128 | before combining them. Do so by setting `~ccdproc.Combiner.scaling`: 129 | 130 | >>> scaling_func = lambda arr: 1/np.ma.average(arr) 131 | >>> combiner.scaling = scaling_func 132 | >>> combined_average_scaled = combiner.average_combine() 133 | 134 | This will normalize each image by its mean before combining (note that the 135 | underlying images are *not* scaled; scaling is only done as part of combining 136 | using `~ccdproc.Combiner.average_combine` or 137 | `~ccdproc.Combiner.median_combine`). 138 | 139 | 140 | .. _combination_with_IFC 141 | Image combination using `~ccdproc.ImageFileCollection` 142 | ------------------------------------------------------ 143 | 144 | There are a couple of ways that image combination can be done if you are using 145 | `~ccdproc.ImageFileCollection` to 146 | :ref:`manage a folder of images `. 147 | 148 | For this example, a temporary folder with images in it is created: 149 | 150 | >>> from tempfile import mkdtemp 151 | >>> from pathlib import Path 152 | >>> import numpy as np 153 | >>> from astropy.nddata import CCDData 154 | >>> from ccdproc import ImageFileCollection, Combiner, combine 155 | >>> 156 | >>> ccd = CCDData(np.ones([5, 5]), unit='adu') 157 | >>> 158 | >>> # Make a temporary folder as a path object 159 | >>> image_folder = Path(mkdtemp()) 160 | >>> # Put several copies ccd in the temporary folder 161 | >>> _ = [ccd.write(image_folder / f"ccd-{i}.fits") for i in range(3)] 162 | >>> ifc = ImageFileCollection(image_folder) 163 | 164 | To combine images using the `~ccdproc.Combiner` class you can use the ``ccds`` 165 | method of the `~ccdproc.ImageFileCollection`: 166 | 167 | >>> c = Combiner(ifc.ccds()) 168 | >>> avg_combined = c.average_combine() 169 | 170 | There two ways combine images using the `~ccdproc.combine` function. If the 171 | images are large enough to combine in memory, then use the file names as the argument to `~ccdproc.combine`, like this: 172 | 173 | >>> avg_combo_mem_lim = combine(ifc.files_filtered(include_path=True), 174 | ... mem_limit=1e9) 175 | 176 | If memory use is not an issue, then the ``ccds`` method can be used here too: 177 | 178 | >>> avg_combo = combine(ifc.ccds()) 179 | 180 | 181 | 182 | .. _reprojection: 183 | 184 | Combination with image transformation and alignment 185 | --------------------------------------------------- 186 | 187 | .. note:: 188 | 189 | **Flux conservation** Whether flux is conserved in performing the 190 | reprojection depends on the method you use for reprojecting and the 191 | extent to which pixel area varies across the image. 192 | `~ccdproc.wcs_project` rescales counts by the ratio of pixel area 193 | *of the pixel indicated by the keywords* ``CRPIX`` of the input and 194 | output images. 195 | 196 | The reprojection methods available are described in detail in the 197 | documentation for the `reproject project`_; consult those 198 | documents for details. 199 | 200 | You should carefully check whether flux conservation provided in CCDPROC 201 | is adequate for your needs. Suggestions for improvement are welcome! 202 | 203 | Align and then combine images based on World Coordinate System (WCS) 204 | information in the image headers in two steps. 205 | 206 | First, reproject each image onto the same footprint using 207 | `~ccdproc.wcs_project`. The example below assumes you have an image with WCS 208 | information and another image (or WCS) onto which you want to project your 209 | images: 210 | 211 | .. doctest-skip:: 212 | 213 | >>> from ccdproc import wcs_project 214 | >>> reprojected_image = wcs_project(input_image, target_wcs) 215 | 216 | Repeat this for each of the images you want to combine, building up a list of 217 | reprojected images: 218 | 219 | .. doctest-skip:: 220 | 221 | >>> reprojected = [] 222 | >>> for img in my_list_of_images: 223 | ... new_image = wcs_project(img, target_wcs) 224 | ... reprojected.append(new_image) 225 | 226 | Then, combine the images as described above for any set of images: 227 | 228 | .. doctest-skip:: 229 | 230 | >>> combiner = Combiner(reprojected) 231 | >>> stacked_image = combiner.average_combine() 232 | 233 | .. _reproject project: http://reproject.readthedocs.io/ 234 | -------------------------------------------------------------------------------- /ccdproc/tests/run_with_file_number_limit.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | from tempfile import TemporaryDirectory 3 | from pathlib import Path 4 | import mmap 5 | import sys 6 | import gc 7 | 8 | import psutil 9 | 10 | import numpy as np 11 | from astropy.io import fits 12 | 13 | # This bit of hackery ensures that we can see ccdproc from within 14 | # the test suite 15 | sys.path.append(str(Path().cwd())) 16 | from ccdproc import combine 17 | 18 | # Do not combine these into one statement. When all references are lost 19 | # to a TemporaryDirectory the directory is automatically deleted. _TMPDIR 20 | # creates a reference that will stick around. 21 | _TMPDIR = TemporaryDirectory() 22 | TMPPATH = Path(_TMPDIR.name) 23 | 24 | ALLOWED_EXTENSIONS = { 25 | 'fits': 'fits', 26 | 'plain': 'txt' 27 | } 28 | 29 | 30 | def generate_fits_files(number, size=None): 31 | if size is None: 32 | use_size = [250, 250] 33 | else: 34 | int_size = int(size) 35 | use_size = [int_size, int_size] 36 | 37 | base_name = 'test-combine-{num:03d}.' + ALLOWED_EXTENSIONS['fits'] 38 | 39 | for num in range(number): 40 | data = np.zeros(shape=use_size) 41 | hdu = fits.PrimaryHDU(data=data) 42 | hdu.header['bunit'] = 'adu' 43 | name = base_name.format(num=num) 44 | path = TMPPATH / name 45 | hdu.writeto(path, overwrite=True) 46 | 47 | 48 | def generate_plain_files(number): 49 | for i in range(number): 50 | file = TMPPATH / ("{i:03d}.".format(i=i) + ALLOWED_EXTENSIONS['plain']) 51 | file.write_bytes(np.random.random(100)) 52 | 53 | 54 | def open_files_with_open(kind): 55 | """ 56 | Open files with plain open. 57 | """ 58 | # Ensure the file references persist until end of script. Not really 59 | # necessary, but convenient while debugging the script. 60 | global fds 61 | fds = [] 62 | 63 | paths = TMPPATH.glob('**/*.' + ALLOWED_EXTENSIONS[kind]) 64 | 65 | for p in paths: 66 | fds.append(p.open()) 67 | 68 | 69 | def open_files_as_mmap(kind): 70 | """ 71 | Open files as mmaps. 72 | """ 73 | # Ensure the file references persist until end of script. Not really 74 | # necessary, but convenient while debugging the script. 75 | global fds 76 | fds = [] 77 | 78 | paths = TMPPATH.glob('**/*.' + ALLOWED_EXTENSIONS[kind]) 79 | 80 | for p in paths: 81 | with p.open() as f: 82 | fds.append(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_COPY)) 83 | 84 | 85 | def open_files_ccdproc_combine_chunk(kind): 86 | """ 87 | Open files indirectly as part of ccdproc.combine, ensuring that the 88 | task is broken into chunks. 89 | """ 90 | global combo 91 | paths = sorted(list(TMPPATH.glob('**/*.' + ALLOWED_EXTENSIONS[kind]))) 92 | # We want to force combine to break the task into chunks even 93 | # if the task really would fit in memory; it is in that case that 94 | # we end up with too many open files. We'll open one file, determine 95 | # the size of the data in bytes, and set the memory limit to that. 96 | # That will mean lots of chunks (however many files there are plus one), 97 | # but lots of chunks is fine. 98 | with fits.open(paths[0]) as hdulist: 99 | array_size = hdulist[0].data.nbytes 100 | 101 | combo = combine(paths, mem_limit=array_size) 102 | 103 | 104 | def open_files_ccdproc_combine_nochunk(kind): 105 | """ 106 | Open files indirectly as part of ccdproc.combine, ensuring that the 107 | task is not broken into chunks. 108 | """ 109 | global combo 110 | paths = sorted(list(TMPPATH.glob('**/*.' + ALLOWED_EXTENSIONS[kind]))) 111 | 112 | # We ensure there are no chunks by setting a memory limit large 113 | # enough to hold everything. 114 | with fits.open(paths[0]) as hdulist: 115 | array_size = hdulist[0].data.nbytes 116 | 117 | # Why 2x the number of files? To make absolutely sure we don't 118 | # end up chunking the job. 119 | array_size *= 2 * len(paths) 120 | combo = combine(paths) 121 | 122 | 123 | ALLOWED_OPENERS = { 124 | 'open': open_files_with_open, 125 | 'mmap': open_files_as_mmap, 126 | 'combine-chunk': open_files_ccdproc_combine_chunk, 127 | 'combine-nochunk': open_files_ccdproc_combine_nochunk 128 | } 129 | 130 | 131 | def run_with_limit(n, kind='fits', size=None, overhead=6, 132 | open_method='mmap'): 133 | """ 134 | Try opening a bunch of files with a relatively low limit on the number 135 | of open files. 136 | 137 | Parameters 138 | ---------- 139 | 140 | n : int 141 | Limit on number of open files in this function. The number of files 142 | to create is calculated from this to be just below the maximum number 143 | of files controlled by this function that can be opened. 144 | 145 | kind : one of 'fits', 'plain', optional 146 | The type of file to generate. The plain files are intended mainly for 147 | testing this script, while the FITS files are for testing 148 | ccdproc.combine. 149 | 150 | size : int, optional 151 | Size of file to create. If the kind is 'plain; this is the size 152 | of the file, in bytes. If the kind is 'fits', this is the size 153 | of one side of the image (the image is always square). 154 | 155 | overhead : int, optional 156 | Number of open files to assume the OS is using for this process. The 157 | default value is chosen so that this succeeds on MacOS or Linux. 158 | Setting it to a value lower than default should cause a SystemExit 159 | exception to be raised because of too many open files. This is meant 160 | for testing that this script is actually testing something. 161 | 162 | Notes 163 | ----- 164 | 165 | .. warning:: 166 | 167 | You should run this in a subprocess. Running as part of a larger python 168 | process will lower the limit on the number of open files for that 169 | **entire python process** which will almost certainly lead to nasty 170 | side effects. 171 | """ 172 | # Keep the resource import here so that it is skipped on windows 173 | import resource 174 | 175 | # Do a little input validation 176 | if n <= 0: 177 | raise ValueError("Argument 'n' must be a positive integer") 178 | 179 | if kind not in ALLOWED_EXTENSIONS.keys(): 180 | raise ValueError("Argument 'kind' must be one of " 181 | "{}".format(ALLOWED_EXTENSIONS.keys())) 182 | 183 | # Set the limit on the number of open files to n. The try/except 184 | # is the catch the case where this change would *increase*, rather than 185 | # decrease, the limit. That apparently can only be done by a superuser. 186 | try: 187 | resource.setrlimit(resource.RLIMIT_NOFILE, (n, n)) 188 | except ValueError as e: 189 | if 'not allowed to raise maximum limit' not in str(e): 190 | raise 191 | max_n_this_process = resource.getrlimit(resource.RLIMIT_NOFILE) 192 | raise ValueError('Maximum number of open ' 193 | 'files is {}'.format(max_n_this_process)) 194 | 195 | # The "-1" is to leave a little wiggle room. overhead is based on the 196 | # the number of open files that a process running on linux has open. 197 | # These typically include stdin and stout, and apparently others. 198 | n_files = n - 1 - overhead 199 | 200 | proc = psutil.Process() 201 | 202 | print('Process ID is: ', proc.pid, flush=True) 203 | print("Making {} files".format(n_files)) 204 | if kind == 'plain': 205 | generate_plain_files(n_files) 206 | elif kind == 'fits': 207 | generate_fits_files(n_files, size=size) 208 | 209 | # Print number of open files before we try opening anything for debugging 210 | # purposes. 211 | print("Before opening, files open is {}".format(len(proc.open_files())), 212 | flush=True) 213 | print(" Note well: this number is different than what lsof reports.") 214 | 215 | try: 216 | ALLOWED_OPENERS[open_method](kind) 217 | # fds.append(p.open()) 218 | except OSError as e: 219 | # Capture the error and re-raise as a SystemExit because this is 220 | # run in a subprocess. This ensures that the original error message 221 | # is reported back to the calling process; we add on the number of 222 | # open files. 223 | raise SystemExit(str(e) + '; number of open files: ' + 224 | '{}, with target {}'.format(len(proc.open_files()), 225 | n_files)) 226 | else: 227 | print('Opens succeeded, files currently open:', 228 | len(proc.open_files()), 229 | flush=True) 230 | 231 | 232 | if __name__ == '__main__': 233 | parser = ArgumentParser() 234 | parser.add_argument('number', type=int, 235 | help='Limit on number of open files.') 236 | parser.add_argument('--kind', action='store', default='plain', 237 | choices=ALLOWED_EXTENSIONS.keys(), 238 | help='Kind of file to generate for test; ' 239 | 'default is plain') 240 | parser.add_argument('--overhead', type=int, action='store', 241 | help='Number of files to assume the OS is using.', 242 | default=6) 243 | parser.add_argument('--open-by', action='store', default='mmap', 244 | choices=ALLOWED_OPENERS.keys(), 245 | help='How to open the files. Default is mmap') 246 | parser.add_argument('--size', type=int, action='store', 247 | help='Size of one side of image to create. ' 248 | 'All images are square, so only give ' 249 | 'a single number for the size.') 250 | parser.add_argument('--frequent-gc', action='store_true', 251 | help='If set, perform garbage collection ' 252 | 'much more frequently than the default.') 253 | args = parser.parse_args() 254 | if args.frequent_gc: 255 | gc.set_threshold(10, 10, 10) 256 | print("Garbage collection thresholds: ", gc.get_threshold()) 257 | run_with_limit(args.number, kind=args.kind, overhead=args.overhead, 258 | open_method=args.open_by, size=args.size) 259 | -------------------------------------------------------------------------------- /ccdproc/tests/test_ccdmask.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | from numpy.testing import assert_array_equal 4 | import numpy as np 5 | 6 | import pytest 7 | 8 | from ccdproc.core import ccdmask 9 | from astropy.nddata import CCDData 10 | 11 | 12 | def test_ccdmask_no_ccddata(): 13 | # Fails when a simple list is given. 14 | with pytest.raises(ValueError): 15 | ccdmask([[0, 0, 0], [0, 0, 0], [0, 0, 0]]) 16 | 17 | 18 | def test_ccdmask_not_2d(): 19 | # Fails when a CCDData has less than 2 dimensions 20 | with pytest.raises(ValueError): 21 | ccdmask(CCDData(np.ones(3), unit='adu')) 22 | 23 | # Fails when scalar 24 | with pytest.raises(ValueError): 25 | ccdmask(CCDData(np.array(10), unit='adu')) 26 | 27 | # Fails when more than 2d 28 | with pytest.raises(ValueError): 29 | ccdmask(CCDData(np.ones((3, 3, 3)), unit='adu')) 30 | 31 | 32 | def test_ccdmask_pixels(): 33 | flat1 = CCDData(np.array([[ 34 | 20044, 19829, 19936, 20162, 19948, 19965, 19919, 20004, 19951, 35 | 20002, 19926, 20151, 19886, 20014, 19928, 20025, 19921, 19996, 36 | 19912, 20017, 19969, 20103, 20161, 20110, 19977, 19922, 20004, 37 | 19802, 20079, 19981, 20083, 19871], 38 | [20068, 20204, 20085, 20027, 20103, 19866, 20089, 19914, 20160, 39 | 19884, 19956, 20095, 20004, 20075, 19899, 20016, 19995, 20178, 40 | 19963, 20030, 20055, 20005, 20073, 19969, 19958, 20040, 19979, 41 | 19938, 19986, 19957, 20172, 20054], 42 | [20099, 20180, 19912, 20050, 19930, 19930, 20036, 20006, 19833, 43 | 19984, 19879, 19815, 20105, 20011, 19949, 20062, 19837, 20070, 44 | 20047, 19855, 19956, 19928, 19878, 20102, 19940, 20001, 20082, 45 | 20080, 20019, 19991, 19919, 20121], 46 | [20014, 20262, 19953, 20077, 19928, 20271, 19962, 20048, 20011, 47 | 20054, 20112, 19931, 20125, 19899, 19993, 19939, 19916, 19998, 48 | 19921, 19949, 20246, 20160, 19881, 19863, 19874, 19979, 19989, 49 | 19901, 19850, 19931, 20001, 20167], 50 | [20131, 19991, 20073, 19945, 19980, 20021, 19938, 19964, 20002, 51 | 20177, 19888, 19901, 19919, 19977, 20280, 20035, 20045, 19849, 52 | 20169, 20074, 20113, 19993, 19965, 20026, 20018, 19966, 20023, 53 | 19965, 19962, 20082, 20027, 20145], 54 | [20106, 20025, 19846, 19865, 19913, 20046, 19998, 20037, 19986, 55 | 20048, 20005, 19790, 20011, 19985, 19959, 19882, 20085, 19978, 56 | 19881, 19960, 20111, 19936, 19983, 19863, 19819, 19896, 19968, 57 | 20134, 19824, 19990, 20146, 19886], 58 | [20162, 19997, 19966, 20110, 19822, 19923, 20029, 20129, 19936, 59 | 19882, 20077, 20112, 20040, 20051, 20177, 19763, 20097, 19898, 60 | 19832, 20061, 19919, 20056, 20010, 19929, 20010, 19995, 20124, 61 | 19965, 19922, 19860, 20021, 19989], 62 | [20088, 20104, 19956, 19959, 20018, 19948, 19836, 20107, 19920, 63 | 20117, 19882, 20039, 20206, 20067, 19784, 20087, 20117, 19990, 64 | 20242, 19861, 19923, 19779, 20024, 20024, 19981, 19915, 20017, 65 | 20053, 19932, 20179, 20062, 19908], 66 | [19993, 20047, 20008, 20172, 19977, 20054, 19980, 19952, 20138, 67 | 19940, 19995, 20029, 19888, 20191, 19958, 20007, 19938, 19959, 68 | 19933, 20139, 20069, 19905, 20101, 20086, 19904, 19807, 20131, 69 | 20048, 19927, 19905, 19939, 20030], 70 | [20040, 20051, 19997, 20013, 19942, 20130, 19983, 19603, 19934, 71 | 19944, 19961, 19979, 20164, 19855, 20157, 20010, 20020, 19902, 72 | 20134, 19971, 20228, 19967, 19879, 20022, 19915, 20063, 19768, 73 | 19976, 19860, 20041, 19955, 19984], 74 | [19807, 20066, 19986, 19999, 19975, 20115, 19998, 20056, 20059, 75 | 20016, 19970, 19964, 20053, 19975, 19985, 19973, 20041, 19918, 76 | 19875, 19997, 19954, 19777, 20117, 20248, 20034, 20019, 20018, 77 | 20058, 20027, 20121, 19909, 20094], 78 | [19890, 20018, 20032, 20058, 19909, 19906, 19812, 20206, 19908, 79 | 19767, 20127, 20015, 19959, 20026, 20021, 19964, 19824, 19934, 80 | 20147, 19984, 20026, 20168, 19992, 20175, 20040, 20208, 20077, 81 | 19897, 20037, 19996, 19998, 20019], 82 | [19966, 19897, 20062, 19914, 19780, 20004, 20029, 20140, 20057, 83 | 20134, 20125, 19973, 19894, 19929, 19876, 20135, 19981, 20057, 84 | 20015, 20113, 20107, 20115, 19924, 19987, 19926, 19885, 20013, 85 | 20058, 19950, 20155, 19825, 20092], 86 | [19889, 20046, 20113, 19991, 19829, 20180, 19949, 20011, 20014, 87 | 20123, 19980, 19770, 20086, 20041, 19957, 19949, 20026, 19918, 88 | 19777, 20062, 19862, 20085, 20090, 20122, 19692, 19937, 19897, 89 | 20018, 19935, 20037, 19946, 19998], 90 | [20001, 19940, 19994, 19835, 19959, 19895, 20017, 20002, 20007, 91 | 19851, 19900, 20044, 20354, 19814, 19869, 20148, 20001, 20143, 92 | 19778, 20146, 19975, 19859, 20008, 20041, 19937, 20072, 20203, 93 | 19778, 20027, 20075, 19877, 19999], 94 | [19753, 19866, 20037, 20149, 20020, 20071, 19955, 20164, 19837, 95 | 19967, 19959, 20163, 20003, 20127, 20065, 20118, 20104, 19839, 96 | 20124, 20057, 19943, 20023, 20138, 19996, 19910, 20048, 20070, 97 | 19833, 19913, 20012, 19897, 19983]]), unit='adu') 98 | flat2 = CCDData(np.array([[ 99 | 20129, 20027, 19945, 20085, 19951, 20015, 20102, 19957, 20100, 100 | 19865, 19878, 20111, 20047, 19882, 19929, 20079, 19937, 19999, 101 | 20109, 19929, 19985, 19970, 19941, 19868, 20191, 20142, 19948, 102 | 20079, 19975, 19949, 19972, 20053], 103 | [20075, 19980, 20035, 20014, 19865, 20058, 20091, 20030, 19931, 104 | 19806, 19990, 19902, 19895, 19789, 20079, 20048, 20040, 19968, 105 | 20049, 19946, 19982, 19865, 19766, 19903, 20025, 19916, 19904, 106 | 20128, 19865, 20103, 19864, 19832], 107 | [20008, 19989, 20032, 19891, 20063, 20061, 20179, 19920, 19960, 108 | 19655, 19897, 19943, 20015, 20123, 20009, 19940, 19876, 19964, 109 | 20097, 19814, 20086, 20096, 20030, 20140, 19903, 19858, 19978, 110 | 19817, 20107, 19893, 19988, 19956], 111 | [20105, 19873, 20003, 19671, 19993, 19981, 20234, 19976, 20079, 112 | 19882, 19982, 19959, 19882, 20103, 20008, 19960, 20084, 20025, 113 | 19864, 19969, 19945, 19979, 19937, 19965, 19981, 19957, 19906, 114 | 19959, 19839, 19679, 19988, 20154], 115 | [20053, 20152, 19858, 20134, 19867, 20027, 20024, 19884, 20015, 116 | 19904, 19992, 20137, 19981, 20147, 19814, 20035, 19992, 19921, 117 | 20007, 20103, 19920, 19889, 20182, 19964, 19859, 20016, 20011, 118 | 20203, 19761, 19954, 20151, 19973], 119 | [20029, 19863, 20217, 19819, 19984, 19950, 19914, 20028, 19980, 120 | 20033, 20016, 19796, 19901, 20027, 20078, 20136, 19995, 19915, 121 | 20014, 19920, 19996, 20216, 19939, 19967, 19949, 20023, 20024, 122 | 19949, 19949, 19902, 19980, 19895], 123 | [19962, 19872, 19926, 20047, 20136, 19944, 20151, 19956, 19958, 124 | 20054, 19942, 20010, 19972, 19936, 20062, 20259, 20230, 19927, 125 | 20004, 19963, 20095, 19866, 19942, 19958, 20149, 19956, 20000, 126 | 19979, 19949, 19892, 20249, 20050], 127 | [20019, 19999, 19954, 20095, 20045, 20002, 19761, 20187, 20113, 128 | 20048, 20117, 20002, 19938, 19968, 19993, 19995, 20094, 19913, 129 | 19963, 19813, 20040, 19950, 19992, 19958, 20043, 19925, 20036, 130 | 19930, 20057, 20055, 20040, 19937], 131 | [19958, 19984, 19842, 19990, 19985, 19958, 20070, 19850, 20026, 132 | 20047, 20081, 20094, 20048, 20048, 19917, 19893, 19766, 19765, 133 | 20109, 20067, 19905, 19870, 19832, 20019, 19868, 20075, 20132, 134 | 19916, 19944, 19840, 20140, 20117], 135 | [19995, 20122, 19998, 20039, 20125, 19879, 19911, 20010, 19944, 136 | 19994, 19903, 20057, 20021, 20139, 19972, 20026, 19922, 20132, 137 | 19976, 20025, 19948, 20038, 19807, 19809, 20145, 20003, 20090, 138 | 19848, 19884, 19936, 19997, 19944], 139 | [19839, 19990, 20005, 19826, 20070, 19987, 20015, 19835, 20083, 140 | 19908, 19910, 20218, 19960, 19937, 19987, 19808, 19893, 19929, 141 | 20004, 20055, 19973, 19794, 20242, 20082, 20110, 20058, 19876, 142 | 20042, 20064, 19966, 20041, 20015], 143 | [20048, 20203, 19855, 20011, 19888, 19926, 19973, 19893, 19986, 144 | 20152, 20030, 19880, 20012, 19848, 19959, 20002, 20027, 19935, 145 | 19975, 19905, 19932, 20190, 20188, 19903, 20012, 19943, 19954, 146 | 19891, 19947, 19939, 19974, 19808], 147 | [20102, 20041, 20013, 20097, 20101, 19859, 20011, 20144, 19920, 148 | 19880, 20134, 19963, 19980, 20090, 20027, 19822, 20051, 19903, 149 | 19784, 19845, 20014, 19974, 20043, 20141, 19968, 20055, 20066, 150 | 20045, 20182, 20104, 20008, 19999], 151 | [19932, 20023, 20042, 19894, 20070, 20015, 20172, 20024, 19988, 152 | 20181, 20180, 20023, 19978, 19989, 19976, 19870, 20152, 20003, 153 | 19984, 19903, 19904, 19940, 19990, 19922, 19911, 19976, 19841, 154 | 19946, 20273, 20085, 20142, 20122], 155 | [19959, 20071, 20020, 20037, 20024, 19967, 20044, 20009, 19997, 156 | 20045, 19995, 19831, 20035, 19976, 20049, 19958, 20021, 19887, 157 | 19961, 19928, 19805, 20173, 19928, 19939, 19826, 20096, 20078, 158 | 20100, 19935, 19942, 19969, 19941], 159 | [19876, 20056, 20071, 19886, 19979, 20174, 19978, 20037, 19933, 160 | 20184, 19948, 20034, 19896, 19905, 20138, 19870, 19936, 20085, 161 | 19971, 20063, 19936, 19941, 19928, 19937, 19970, 19931, 20036, 162 | 19965, 19855, 19949, 19965, 19821]]), unit='adu') 163 | 164 | target_mask = np.zeros(flat1.shape, dtype=bool) 165 | 166 | # No bad pixels in this scenario 167 | ratio = flat1.divide(flat2) 168 | mask = ccdmask(ratio, ncsig=9, nlsig=11) 169 | assert mask.shape == ratio.shape 170 | assert_array_equal(mask, target_mask) 171 | 172 | # Check again with different ncsig and nlsig 173 | ratio = flat1.divide(flat2) 174 | mask = ccdmask(ratio, ncsig=11, nlsig=15) 175 | assert mask.shape == ratio.shape 176 | assert_array_equal(mask, target_mask) 177 | 178 | # Add single bad pixel 179 | flat1.data[14][3] = 65535 180 | flat2.data[14][3] = 1 181 | ratio = flat1.divide(flat2) 182 | mask = ccdmask(ratio, ncsig=11, nlsig=15) 183 | target_mask[14][3] = True 184 | assert_array_equal(mask, target_mask) 185 | 186 | # Add single bad column 187 | flat1.data[:, 7] = 65535 188 | flat2.data[:, 7] = 1 189 | ratio = flat1.divide(flat2) 190 | target_mask[:, 7] = True 191 | 192 | mask = ccdmask(ratio, ncsig=11, nlsig=15) 193 | assert_array_equal(mask, target_mask) 194 | 195 | mask = ccdmask(ratio, ncsig=11, nlsig=15, byblocks=True) 196 | assert_array_equal(mask, target_mask) 197 | 198 | mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=True) 199 | assert_array_equal(mask, target_mask) 200 | 201 | mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=True, 202 | byblocks=True) 203 | assert_array_equal(mask, target_mask) 204 | 205 | # Add bad column with gaps 206 | flat1.data[0:8, 2] = 65535 207 | flat1.data[11:, 2] = 65535 208 | flat2.data[0:8, 2] = 1 209 | flat2.data[11:, 2] = 1 210 | ratio = flat1.divide(flat2) 211 | mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=False) 212 | target_mask[0:8, 2] = True 213 | target_mask[11:, 2] = True 214 | assert_array_equal(mask, target_mask) 215 | 216 | mask = ccdmask(ratio, ncsig=11, nlsig=15, findbadcolumns=True) 217 | target_mask[:, 2] = True 218 | assert_array_equal(mask, target_mask) 219 | -------------------------------------------------------------------------------- /ccdproc/tests/test_cosmicray.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | import numpy as np 4 | 5 | from numpy.testing import assert_allclose 6 | import pytest 7 | from astropy.utils import NumpyRNGContext 8 | from astropy.nddata import StdDevUncertainty 9 | from astropy import units as u 10 | 11 | from ccdproc.core import (cosmicray_lacosmic, cosmicray_median, 12 | background_deviation_box, background_deviation_filter) 13 | from ccdproc.tests.pytest_fixtures import ccd_data as ccd_data_func 14 | 15 | 16 | DATA_SCALE = 5.3 17 | NCRAYS = 30 18 | 19 | 20 | def add_cosmicrays(data, scale, threshold, ncrays=NCRAYS): 21 | size = data.shape[0] 22 | with NumpyRNGContext(125): 23 | crrays = np.random.randint(0, size, size=(ncrays, 2)) 24 | # use (threshold + 1) below to make sure cosmic ray is well above the 25 | # threshold no matter what the random number generator returns 26 | crflux = (10 * scale * np.random.random(NCRAYS) + 27 | (threshold + 5) * scale) 28 | for i in range(ncrays): 29 | y, x = crrays[i] 30 | data.data[y, x] = crflux[i] 31 | 32 | 33 | def test_cosmicray_lacosmic(): 34 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 35 | threshold = 5 36 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 37 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 38 | data, crarr = cosmicray_lacosmic(ccd_data.data, sigclip=5) 39 | 40 | # check the number of cosmic rays detected 41 | # currently commented out while checking on issues 42 | # in astroscrappy 43 | # assert crarr.sum() == NCRAYS 44 | 45 | 46 | def test_cosmicray_lacosmic_ccddata(): 47 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 48 | threshold = 5 49 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 50 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 51 | ccd_data.uncertainty = noise 52 | nccd_data = cosmicray_lacosmic(ccd_data, sigclip=5) 53 | 54 | # check the number of cosmic rays detected 55 | # currently commented out while checking on issues 56 | # in astroscrappy 57 | # assert nccd_data.mask.sum() == NCRAYS 58 | 59 | 60 | def test_cosmicray_lacosmic_check_data(): 61 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 62 | with pytest.raises(TypeError): 63 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 64 | cosmicray_lacosmic(10, noise) 65 | 66 | 67 | @pytest.mark.parametrize('array_input', [True, False]) 68 | @pytest.mark.parametrize('gain_correct_data', [True, False]) 69 | def test_cosmicray_gain_correct(array_input, gain_correct_data): 70 | # Add regression check for #705 and for the new gain_correct 71 | # argument. 72 | # The issue is that cosmicray_lacosmic gain-corrects the 73 | # data and returns that gain corrected data. That is not the 74 | # intent... 75 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 76 | threshold = 5 77 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 78 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 79 | ccd_data.uncertainty = noise 80 | # No units here on purpose. 81 | gain = 2.0 82 | # Don't really need to set this (6.5 is the default value) but want to 83 | # make lack of units explicit. 84 | readnoise = 6.5 85 | if array_input: 86 | new_data, cr_mask = cosmicray_lacosmic(ccd_data.data, 87 | gain=gain, 88 | gain_apply=gain_correct_data) 89 | else: 90 | new_ccd = cosmicray_lacosmic(ccd_data, 91 | gain=gain, 92 | gain_apply=gain_correct_data) 93 | new_data = new_ccd.data 94 | cr_mask = new_ccd.mask 95 | # Fill masked locations with 0 since there is no simple relationship 96 | # between the original value and the corrected value. 97 | orig_data = np.ma.array(ccd_data.data, mask=cr_mask).filled(0) 98 | new_data = np.ma.array(new_data.data, mask=cr_mask).filled(0) 99 | if gain_correct_data: 100 | gain_for_test = gain 101 | else: 102 | gain_for_test = 1.0 103 | 104 | np.testing.assert_allclose(gain_for_test * orig_data, new_data) 105 | 106 | 107 | def test_cosmicray_lacosmic_accepts_quantity_gain(): 108 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 109 | threshold = 5 110 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 111 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 112 | ccd_data.uncertainty = noise 113 | # The units below are the point of the test 114 | gain = 2.0 * u.electron / u.adu 115 | 116 | # Since gain and ccd_data have units, the readnoise should too. 117 | readnoise = 6.5 * u.electron 118 | new_ccd = cosmicray_lacosmic(ccd_data, 119 | gain=gain, 120 | gain_apply=True) 121 | 122 | 123 | def test_cosmicray_lacosmic_accepts_quantity_readnoise(): 124 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 125 | threshold = 5 126 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 127 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 128 | ccd_data.uncertainty = noise 129 | gain = 2.0 * u.electron / u.adu 130 | # The units below are the point of this test 131 | readnoise = 6.5 * u.electron 132 | new_ccd = cosmicray_lacosmic(ccd_data, 133 | gain=gain, 134 | gain_apply=True, 135 | readnoise=readnoise) 136 | 137 | 138 | def test_cosmicray_lacosmic_detects_inconsistent_units(): 139 | # This is intended to detect cases like a ccd with units 140 | # of adu, a readnoise in electrons and a gain in adu / electron. 141 | # That is not internally inconsistent. 142 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 143 | ccd_data.unit = 'adu' 144 | threshold = 5 145 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 146 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 147 | ccd_data.uncertainty = noise 148 | readnoise = 6.5 * u.electron 149 | 150 | # The units below are deliberately incorrect. 151 | gain = 2.0 * u.adu / u.electron 152 | with pytest.raises(ValueError) as e: 153 | cosmicray_lacosmic(ccd_data, 154 | gain=gain, 155 | gain_apply=True, 156 | readnoise=readnoise) 157 | assert 'Inconsistent units' in str(e.value) 158 | 159 | 160 | def test_cosmicray_lacosmic_warns_on_ccd_in_electrons(recwarn): 161 | # Check that an input ccd in electrons raises a warning. 162 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 163 | # The unit below is important for the test; this unit on 164 | # input is supposed to raise an error. 165 | ccd_data.unit = u.electron 166 | threshold = 5 167 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 168 | noise = DATA_SCALE * np.ones_like(ccd_data.data) 169 | ccd_data.uncertainty = noise 170 | # No units here on purpose. 171 | gain = 2.0 172 | # Don't really need to set this (6.5 is the default value) but want to 173 | # make lack of units explicit. 174 | readnoise = 6.5 175 | new_ccd = cosmicray_lacosmic(ccd_data, 176 | gain=gain, 177 | gain_apply=True, 178 | readnoise=readnoise) 179 | 180 | assert "Image unit is electron" in str(recwarn.pop()) 181 | 182 | 183 | def test_cosmicray_median_check_data(): 184 | with pytest.raises(TypeError): 185 | ndata, crarr = cosmicray_median(10, thresh=5, mbox=11, 186 | error_image=DATA_SCALE) 187 | 188 | 189 | def test_cosmicray_median(): 190 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 191 | threshold = 5 192 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 193 | ndata, crarr = cosmicray_median(ccd_data.data, thresh=5, mbox=11, 194 | error_image=DATA_SCALE) 195 | 196 | # check the number of cosmic rays detected 197 | assert crarr.sum() == NCRAYS 198 | 199 | 200 | def test_cosmicray_median_ccddata(): 201 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 202 | threshold = 5 203 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 204 | ccd_data.uncertainty = ccd_data.data*0.0+DATA_SCALE 205 | nccd = cosmicray_median(ccd_data, thresh=5, mbox=11, 206 | error_image=None) 207 | 208 | # check the number of cosmic rays detected 209 | assert nccd.mask.sum() == NCRAYS 210 | 211 | 212 | def test_cosmicray_median_masked(): 213 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 214 | threshold = 5 215 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 216 | data = np.ma.masked_array(ccd_data.data, (ccd_data.data > -1e6)) 217 | ndata, crarr = cosmicray_median(data, thresh=5, mbox=11, 218 | error_image=DATA_SCALE) 219 | 220 | # check the number of cosmic rays detected 221 | assert crarr.sum() == NCRAYS 222 | 223 | 224 | def test_cosmicray_median_background_None(): 225 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 226 | threshold = 5 227 | add_cosmicrays(ccd_data, DATA_SCALE, threshold, ncrays=NCRAYS) 228 | data, crarr = cosmicray_median(ccd_data.data, thresh=5, mbox=11, 229 | error_image=None) 230 | 231 | # check the number of cosmic rays detected 232 | assert crarr.sum() == NCRAYS 233 | 234 | 235 | def test_cosmicray_median_gbox(): 236 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 237 | scale = DATA_SCALE # yuck. Maybe use pytest.parametrize? 238 | threshold = 5 239 | add_cosmicrays(ccd_data, scale, threshold, ncrays=NCRAYS) 240 | error = ccd_data.data*0.0+DATA_SCALE 241 | data, crarr = cosmicray_median(ccd_data.data, error_image=error, 242 | thresh=5, mbox=11, rbox=0, gbox=5) 243 | data = np.ma.masked_array(data, crarr) 244 | assert crarr.sum() > NCRAYS 245 | assert abs(data.std() - scale) < 0.1 246 | 247 | 248 | def test_cosmicray_median_rbox(): 249 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 250 | scale = DATA_SCALE # yuck. Maybe use pytest.parametrize? 251 | threshold = 5 252 | add_cosmicrays(ccd_data, scale, threshold, ncrays=NCRAYS) 253 | error = ccd_data.data*0.0+DATA_SCALE 254 | data, crarr = cosmicray_median(ccd_data.data, error_image=error, 255 | thresh=5, mbox=11, rbox=21, gbox=5) 256 | assert data[crarr].mean() < ccd_data.data[crarr].mean() 257 | assert crarr.sum() > NCRAYS 258 | 259 | 260 | def test_cosmicray_median_background_deviation(): 261 | ccd_data = ccd_data_func(data_scale=DATA_SCALE) 262 | with pytest.raises(TypeError): 263 | cosmicray_median(ccd_data.data, thresh=5, mbox=11, 264 | error_image='blank') 265 | 266 | 267 | def test_background_deviation_box(): 268 | with NumpyRNGContext(123): 269 | scale = 5.3 270 | cd = np.random.normal(loc=0, size=(100, 100), scale=scale) 271 | bd = background_deviation_box(cd, 25) 272 | assert abs(bd.mean() - scale) < 0.10 273 | 274 | 275 | def test_background_deviation_box_fail(): 276 | with NumpyRNGContext(123): 277 | scale = 5.3 278 | cd = np.random.normal(loc=0, size=(100, 100), scale=scale) 279 | with pytest.raises(ValueError): 280 | background_deviation_box(cd, 0.5) 281 | 282 | 283 | def test_background_deviation_filter(): 284 | with NumpyRNGContext(123): 285 | scale = 5.3 286 | cd = np.random.normal(loc=0, size=(100, 100), scale=scale) 287 | bd = background_deviation_filter(cd, 25) 288 | assert abs(bd.mean() - scale) < 0.10 289 | 290 | 291 | def test_background_deviation_filter_fail(): 292 | with NumpyRNGContext(123): 293 | scale = 5.3 294 | cd = np.random.normal(loc=0, size=(100, 100), scale=scale) 295 | with pytest.raises(ValueError): 296 | background_deviation_filter(cd, 0.5) 297 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | 2.1.1 (unreleased) 2 | ------------------ 3 | 4 | New Features 5 | ^^^^^^^^^^^^ 6 | 7 | - Improve integration of ``ImageFileCollection`` with image combination 8 | and document that integration [#762] 9 | 10 | Other Changes and Additions 11 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 12 | - Add memory_profiler as a test requirement [#739] 13 | 14 | - Updated test suite to use absolute, not relative imports [#735] 15 | 16 | Bug Fixes 17 | ^^^^^^^^^ 18 | 19 | - ``test_image_collection.py`` in the test suite no longer produces 20 | permanent files on disk and cleans up after itself. [#738] 21 | 22 | - Change ``Combiner`` to allow accepting either a list or a generator [#757] 23 | 24 | - ``ImageFileCollection`` now correctly returns an empty collection when 25 | an existing collection is filtered restrictively enough to remove all 26 | files. [#750] 27 | 28 | - Logging now preserves all of the arguments when the keyword argument 29 | names are not used. [#756] 30 | 31 | 2.1.0 (2019-12-24) 32 | ------------------ 33 | 34 | New Features 35 | ^^^^^^^^^^^^ 36 | 37 | Other Changes and Additions 38 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 39 | 40 | - Remove astropy_helpers from the package infrastructure, which also changes 41 | how the tests are run and how the documentation is built. [#717] 42 | 43 | Bug Fixes 44 | ^^^^^^^^^ 45 | 46 | - Update units if gain is applied in ``cosmicray_lacosmic``. [#716, #705] 47 | 48 | 2.0.1 (2019-09-05) 49 | ------------------ 50 | 51 | New Features 52 | ^^^^^^^^^^^^ 53 | 54 | Other Changes and Additions 55 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 56 | 57 | Bug Fixes 58 | ^^^^^^^^^ 59 | 60 | - Move generation of sample directory of images to avoid importing pytest in 61 | user installation. [#699, #700] 62 | 63 | 2.0.0 (2019-09-02) 64 | ------------------ 65 | 66 | New Features 67 | ^^^^^^^^^^^^ 68 | 69 | - Allow initialization of ``ImageFileCollection`` from a list of files with no 70 | location set. [#374, #661, #680] 71 | 72 | - Allow identification of FITS files in ``ImageFileCollection`` based on content 73 | of the files instead of file name extension. [#620, #680] 74 | 75 | - Add option to use regular expression matching when filtering items in 76 | ``ImageFileCollection``. [#480, #595, #682] 77 | 78 | - Added an option to disregard negative values passed to ``create_deviation`` 79 | and assume the error is represented by the read noise [#688] 80 | 81 | - Add ``filter`` method to ``ImageFileCollection`` that creates a new 82 | collection by filtering based on header keywords. [#596, #690] 83 | 84 | Other Changes and Additions 85 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 86 | 87 | - Dropped support for Python 2.x and Astropy 1.x. 88 | 89 | - Removed deprecated property ``summary_info`` of ``ImageFileCollection``. 90 | 91 | - Improved handling of large flags in the ``bitfield`` module. [#610, #611] 92 | 93 | - Improved the performance of several ``ImageFileCollection`` methods. [#599] 94 | 95 | - Added auto_logging configuration paramenter [#622, #90] 96 | 97 | - Added support for .fz,.bz2, .Z and .zip file formats in ``ImageFileCollection``. [#623, #644] 98 | 99 | - Modified weights function to also accept 1D array in ``Combiner``. [#634, #670] 100 | 101 | - Added warning that ``transform_image`` does not apply the transformation to 102 | the WCS [#684] 103 | 104 | - When creating a new object in ``wcs_transform``, WCS keywords in the header 105 | are removed so that they are only stored in the WCS object [#685] 106 | 107 | - Improved warning for negative values in the array passed to 108 | ``create_deviation`` [#688] 109 | 110 | - Removed support for initializing ``ImageFileCollection`` from a table instead 111 | of files. [#680] 112 | 113 | - More consistent typing of ``ImageFileCollection.summary`` when the collection 114 | is empty. [#601, #680] 115 | 116 | Bug Fixes 117 | ^^^^^^^^^ 118 | 119 | - Function ``median_combine`` now correctly calculates the uncertainty for 120 | masked ``CCDData``. [#608] 121 | 122 | - Function ``combine`` avoids keeping files open unnecessarily. [#629, #630] 123 | 124 | - Function ``combine`` more accurately estimates memory use 125 | when deciding how to chunk files. [#638, #642] 126 | 127 | - Raise ``ValueError`` error in ``subtract_dark`` for when the errors have 128 | different shapes [#674, #677] 129 | 130 | - Fix problem with column dtypes when initializing ``ImageFileCollection`` from 131 | a list of file names. [#662, #680] 132 | 133 | 1.3.0 (2017-11-1) 134 | ----------------- 135 | 136 | New Features 137 | ^^^^^^^^^^^^ 138 | 139 | - Add representation for ImageFileCollection. [#475, #515] 140 | 141 | - Added ext parameter and property to ImageFileCollection to specify the FITS 142 | extension. [#463] 143 | 144 | - Add keywords.deleter method to ImageFileCollection. [#474] 145 | 146 | - Added ``glob_include`` and ``glob_exclude`` parameter to 147 | ``ImageFileCollection``. [#484] 148 | 149 | - Add ``bitfield_to_boolean_mask`` function to convert a ``bitfield`` to a 150 | boolean mask (following the numpy conventions). [#460] 151 | 152 | - Added ``gain_corrected`` option in ccd_process so that calibration 153 | files do not need to previously been gain corrected. [#491] 154 | 155 | - Add a new ``wcs_relax`` argument to ``CCDData.to_header()`` that is passed 156 | through to the ``WCS`` method of the same name to allow more flexible 157 | handing of headers with SIP distortion. [#501] 158 | 159 | - ``combine`` now accepts ``numpy.ndarray`` as the input ``img_list``. 160 | [#493, #503] 161 | 162 | - Added ``sum`` option in method for ``combime``. [#500, #508] 163 | 164 | - Add ``norm_value`` argument to ``flat_correct`` that allows the normalization 165 | of the flat frame to be manually specified. [#584, #577] 166 | 167 | 168 | Other Changes and Additions 169 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 170 | 171 | - removed ability to set unit of CCDData to None. [#451] 172 | 173 | - deprecated ``summary_info`` property of ``ImageFileCollection`` now raises 174 | a deprecation warning. [#486] 175 | 176 | - Logging will include the abbreviation even if the ``meta`` attribute of 177 | the processed ``CCDData`` isn't a ``fits.Header``. [#528] 178 | 179 | - The ``CCDData`` class and the functions ``fits_ccddata_reader`` and 180 | ``fits_ccddata_writer`` will be imported from ``astropy.nddata`` if 181 | astropy >= 2.0 is installed (instead of the one defined in ``ccdproc``). [#528] 182 | 183 | - Building the documentation requires astropy >= 2.0. [#528] 184 | 185 | - When reading a ``CCDData`` from a file the WCS-related keywords are removed 186 | from the header. [#568] 187 | 188 | - The ``info_file`` argument for ``ImageFileCollection`` is now deprecated. 189 | [#585] 190 | 191 | 192 | Bug Fixes 193 | ^^^^^^^^^ 194 | 195 | - ``ImageFileCollection`` now handles Headers with duplicated keywords 196 | (other than ``COMMENT`` and ``HISTORY``) by ignoring all but the first. [#467] 197 | 198 | - The ``ccd`` method of ``ImageFileCollection`` will raise an 199 | ``NotImplementedError`` in case the parameter ``overwrite=True`` or 200 | ``clobber=True`` is used instead of silently ignoring the parameter. [#527] 201 | 202 | - The ``sort`` method of ``ImageFileCollection`` now requires an explicitly 203 | given ``keys`` argument. [#534] 204 | 205 | - Fixed a problem with ``CCDData.read`` when the extension wasn't given and the 206 | primary HDU contained no ``data`` but another HDU did. In that case the header 207 | were not correctly combined. [#541] 208 | 209 | - Suppress errors during WCS creation in CCDData.read(). [#552] 210 | 211 | - The generator methods in ``ImageFileCollection`` now don't leave open file 212 | handles in case the iterator wasn't advanced or an exception was raised 213 | either inside the method itself or during the loop. [#553] 214 | 215 | - Allow non-string columns when filtering an ``ImageFileCollection`` with a 216 | string value. [#567] 217 | 218 | 219 | 1.2.0 (2016-12-13) 220 | ------------------ 221 | 222 | ccdproc has now the following additional dependency: 223 | 224 | - scikit-image. 225 | 226 | 227 | New Features 228 | ^^^^^^^^^^^^ 229 | 230 | - Add an optional attribute named ``filenames`` to ``ImageFileCollection``, 231 | so that users can pass a list of FITS files to the collection. [#374, #403] 232 | 233 | - Added ``block_replicate``, ``block_reduce`` and ``block_average`` functions. 234 | [#402] 235 | 236 | - Added ``median_filter`` function. [#420] 237 | 238 | - ``combine`` now takes an additional ``combine_uncertainty_function`` argument 239 | which is passed as ``uncertainty_func`` parameter to 240 | ``Combiner.median_combine`` or ``Combiner.average_combine``. [#416] 241 | 242 | - Added ``ccdmask`` function. [#414, #432] 243 | 244 | 245 | Other Changes and Additions 246 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 247 | 248 | - ccdprocs core functions now explicitly add HIERARCH cards. [#359, #399, #413] 249 | 250 | - ``combine`` now accepts a ``dtype`` argument which is passed to 251 | ``Combiner.__init__``. [#391, #392] 252 | 253 | - Removed ``CaseInsensitiveOrderedDict`` because it is not used in the current 254 | code base. [#428] 255 | 256 | 257 | Bug Fixes 258 | ^^^^^^^^^ 259 | 260 | - The default dtype of the ``combine``-result doesn't depend on the dtype 261 | of the first CCDData anymore. This also corrects the memory consumption 262 | calculation. [#391, #392] 263 | 264 | - ``ccd_process`` now copies the meta of the input when subtracting the 265 | master bias. [#404] 266 | 267 | - Fixed ``combine`` with ``CCDData`` objects using ``StdDevUncertainty`` as 268 | uncertainty. [#416, #424] 269 | 270 | - ``ccds`` generator from ``ImageFileCollection`` now uses the full path to the 271 | file when calling ``fits_ccddata_reader``. [#421 #422] 272 | 273 | 1.1.0 (2016-08-01) 274 | ------------------ 275 | 276 | New Features 277 | ^^^^^^^^^^^^ 278 | 279 | - Add an additional combination method, ``clip_extrema``, that drops the highest 280 | and/or lowest pixels in an image stack. [#356, #358] 281 | 282 | Other Changes and Additions 283 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 284 | 285 | - ``cosmicray_lacosmic`` default ``satlevel`` changed from 65536 to 65535. [#347] 286 | 287 | - Auto-identify files with extension ``fts`` as FITS files. [#355, #364] 288 | 289 | - Raise more explicit exception if unit of uncalibrated image and master do 290 | not match in ``subtract_bias`` or ``subtract_dark``. [#361, #366] 291 | 292 | - Updated the ``Combiner`` class so that it could process images with >2 293 | dimensions. [#340, #375] 294 | 295 | Bug Fixes 296 | ^^^^^^^^^ 297 | 298 | - ``Combiner`` creates plain array uncertainties when using``average_combine`` 299 | or ``median_combine``. [#351] 300 | 301 | - ``flat_correct`` does not properly scale uncertainty in the flat. [#345, #363] 302 | 303 | - Error message in weights setter fixed. [#376] 304 | 305 | 306 | 1.0.1 (2016-03-15) 307 | ------------------ 308 | 309 | The 1.0.1 release was a release to fix some minor packaging issues. 310 | 311 | 312 | 1.0.0 (2016-03-15) 313 | ------------------ 314 | 315 | General 316 | ^^^^^^^ 317 | 318 | - ccdproc has now the following requirements: 319 | 320 | - Python 2.7 or 3.4 or later. 321 | - astropy 1.0 or later 322 | - numpy 1.9 or later 323 | - scipy 324 | - astroscrappy 325 | - reproject 326 | 327 | New Features 328 | ^^^^^^^^^^^^ 329 | 330 | - Add a WCS setter for ``CCDData``. [#256] 331 | - Allow user to set the function used for uncertainty calculation in 332 | ``average_combine`` and ``median_combine``. [#258] 333 | - Add a new keyword to ImageFileCollection.files_filtered to return the full 334 | path to a file [#275] 335 | - Added ccd_process for handling multiple steps. [#211] 336 | - CCDData.write now writes multi-extension-FITS files. The mask and uncertainty 337 | are saved as extensions if these attributes were set. The name of the 338 | extensions can be altered with the parameters ``hdu_mask`` (default extension 339 | name ``'MASK'``) and ``hdu_uncertainty`` (default ``'UNCERT'``). 340 | CCDData.read can read these files and has the same optional parameters. [#302] 341 | 342 | Other Changes and Additions 343 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 344 | 345 | - Issue warning if there are no FITS images in an ``ImageFileCollection``. [#246] 346 | - The overscan_axis argument in subtract_overscan can now be set to 347 | None, to let subtract_overscan provide a best guess for the axis. [#263] 348 | - Add support for wildcard and reversed FITS style slicing. [#265] 349 | - When reading a FITS file with CCDData.read, if no data exists in the 350 | primary hdu, the resultant header object is a combination of the 351 | header information in the primary hdu and the first hdu with data. [#271] 352 | - Changed cosmicray_lacosmic to use astroscrappy for cleaning cosmic rays. [#272] 353 | - CCDData arithmetic with number/Quantity now preserves any existing WCS. [#278] 354 | - Update astropy_helpers to 1.1.1. [#287] 355 | - Drop support for Python 2.6. [#300] 356 | - The ``add_keyword`` parameter now has a default of ``True``, to be more 357 | explicit. [#310] 358 | - Return name of file instead of full path in ``ImageFileCollection`` 359 | generators. [#315] 360 | 361 | 362 | Bug Fixes 363 | ^^^^^^^^^ 364 | 365 | - Adding/Subtracting a CCDData instance with a Quantity with a different unit 366 | produced wrong results. [#291] 367 | - The uncertainty resulting when combining CCDData will be divided by the 368 | square root of the number of combined pixel [#309] 369 | - Improve documentation for read/write methods on ``CCDData`` [#320] 370 | - Add correct path separator when returning full path from 371 | ``ImageFileCollection.files_filtered``. [#325] 372 | 373 | 374 | 0.3.3 (2015-10-24) 375 | ------------------ 376 | 377 | New Features 378 | ^^^^^^^^^^^^ 379 | 380 | - add a ``sort`` method to ImageFileCollection [#274] 381 | 382 | Other Changes and Additions 383 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 384 | 385 | - Opt in to new container-based builds on travis. [#227] 386 | 387 | - Update astropy_helpers to 1.0.5. [#245] 388 | 389 | Bug Fixes 390 | ^^^^^^^^^ 391 | 392 | - Ensure that creating a WCS from a header that contains list-like keywords 393 | (e.g. ``BLANK`` or ``HISTORY``) succeeds. [#229, #231] 394 | 395 | 0.3.2 (never released) 396 | ---------------------- 397 | 398 | There was no 0.3.2 release because of a packaging error. 399 | 400 | 0.3.1 (2015-05-12) 401 | ------------------ 402 | 403 | New Features 404 | ^^^^^^^^^^^^ 405 | 406 | - Add CCDData generator for ImageCollection [#405] 407 | 408 | Other Changes and Additions 409 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 410 | 411 | - Add extensive tests to ensure ``ccdproc`` functions do not modify the input 412 | data. [#208] 413 | 414 | - Remove red-box warning about API stability from docs. [#210] 415 | 416 | - Support astropy 1.0.5, which made changes to ``NDData``. [#242] 417 | 418 | Bug Fixes 419 | ^^^^^^^^^ 420 | 421 | - Make ``subtract_overscan`` act on a copy of the input data. [#206] 422 | 423 | - Overscan subtraction failed on non-square images if the overscan axis was the 424 | first index, ``0``. [#240, #244] 425 | 426 | 0.3.0 (2015-03-17) 427 | ------------------ 428 | 429 | New Features 430 | ^^^^^^^^^^^^ 431 | 432 | - When reading in a FITS file, the extension to be used can be specified. If 433 | it is not and there is no data in the primary extension, the first extension 434 | with data will be used. 435 | 436 | - Set wcs attribute when reading from a FITS file that contains WCS keywords 437 | and write WCS keywords to header when converting to an HDU. [#195] 438 | 439 | Other Changes and Additions 440 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 441 | 442 | - Updated CCDData to use the new version of NDDATA in astropy v1.0. This 443 | breaks backward compatibility with earlier versions of astropy. 444 | 445 | Bug Fixes 446 | ^^^^^^^^^ 447 | 448 | - Ensure ``dtype`` of combined images matches the ``dtype`` of the 449 | ``Combiner`` object. [#189] 450 | 451 | 0.2.2 (2014-11-05) 452 | ------------------ 453 | 454 | New Features 455 | ^^^^^^^^^^^^ 456 | 457 | - Add dtype argument to `ccdproc.Combiner` to help control memory use [#178] 458 | 459 | Other Changes and Additions 460 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 461 | - Added Changes to the docs [#183] 462 | 463 | Bug Fixes 464 | ^^^^^^^^^ 465 | 466 | - Allow the unit string "adu" to be upper or lower case in a FITS header [#182] 467 | 468 | 0.2.1 (2014-09-09) 469 | ------------------ 470 | 471 | New Features 472 | ^^^^^^^^^^^^ 473 | 474 | - Add a unit directly from BUNIT if it is available in the FITS header [#169] 475 | 476 | Other Changes and Additions 477 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 478 | 479 | - Relaxed the requirements on what the metadata must be. It can be anything dict-like, e.g. an astropy.io.fits.Header, a python dict, an OrderedDict or some custom object created by the user. [#167] 480 | 481 | Bug Fixes 482 | ^^^^^^^^^ 483 | 484 | - Fixed a new-style formating issue in the logging [#170] 485 | 486 | 487 | 0.2 (2014-07-28) 488 | ---------------- 489 | 490 | - Initial release. 491 | --------------------------------------------------------------------------------