├── astrocut
├── utils
│ ├── tests
│ │ └── __init__.py
│ ├── __init__.py
│ ├── logger.py
│ └── utils.py
├── tests
│ ├── __init__.py
│ ├── setup_package.py
│ ├── test_image_cutout.py
│ ├── data
│ │ └── ex_ffi_wcs.txt
│ ├── test_utils.py
│ ├── test_cube_factory.py
│ ├── utils_for_test.py
│ ├── test_tess_footprint_cutout.py
│ ├── test_cutouts.py
│ └── test_cutout_processing.py
├── data
│ └── README.rst
├── extern
│ └── __init__.py
├── _astropy_init.py
├── exceptions.py
├── conftest.py
├── __init__.py
├── tica_cube_factory.py
├── cutout_factory.py
├── cutout.py
└── footprint_cutout.py
├── docs
├── _static
│ ├── AstroCut_medium.png
│ ├── AstroCut_thumb.png
│ ├── astrocut.css
│ └── main.css
├── astrocut
│ ├── imgs
│ │ ├── hapcut_left.png
│ │ ├── img_cutout.jpg
│ │ ├── hapcut_right.png
│ │ ├── hapcut_combined.png
│ │ ├── img_cutout_color.png
│ │ └── img_cutout_invert.png
│ ├── api.rst
│ ├── contents.rst
│ ├── license.rst
│ ├── install.rst
│ └── file_formats.rst
├── _templates
│ ├── autosummary
│ │ ├── base.rst
│ │ ├── class.rst
│ │ └── module.rst
│ └── layout.html
├── index.html
├── Makefile
├── make.bat
└── conf.py
├── pyproject.toml
├── .readthedocs.yml
├── .codecov.yml
├── licenses
├── README.rst
├── LICENSE.rst
└── TEMPLATE_LICENCE.rst
├── MANIFEST.in
├── .github
├── workflows
│ ├── changelog.yml
│ ├── pypi-package.yml
│ └── ci_workflows.yml
└── dependabot.yml
├── .gitignore
├── setup.py
├── setup.cfg
├── tox.ini
├── README.rst
└── CHANGES.rst
/astrocut/utils/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/_static/AstroCut_medium.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/_static/AstroCut_medium.png
--------------------------------------------------------------------------------
/docs/_static/AstroCut_thumb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/_static/AstroCut_thumb.png
--------------------------------------------------------------------------------
/docs/astrocut/imgs/hapcut_left.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/astrocut/imgs/hapcut_left.png
--------------------------------------------------------------------------------
/docs/astrocut/imgs/img_cutout.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/astrocut/imgs/img_cutout.jpg
--------------------------------------------------------------------------------
/docs/astrocut/imgs/hapcut_right.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/astrocut/imgs/hapcut_right.png
--------------------------------------------------------------------------------
/docs/astrocut/imgs/hapcut_combined.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/astrocut/imgs/hapcut_combined.png
--------------------------------------------------------------------------------
/docs/astrocut/imgs/img_cutout_color.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/astrocut/imgs/img_cutout_color.png
--------------------------------------------------------------------------------
/docs/astrocut/imgs/img_cutout_invert.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/spacetelescope/astrocut/main/docs/astrocut/imgs/img_cutout_invert.png
--------------------------------------------------------------------------------
/astrocut/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 | """
3 | This module contains package tests.
4 | """
5 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools",
3 | "setuptools_scm",
4 | "wheel"]
5 | build-backend = 'setuptools.build_meta'
6 |
--------------------------------------------------------------------------------
/astrocut/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | # This sub-module is destined for common non-package specific utility
4 | # functions.
5 |
--------------------------------------------------------------------------------
/docs/astrocut/api.rst:
--------------------------------------------------------------------------------
1 |
2 | ************
3 | Astrocut API
4 | ************
5 |
6 | .. automodapi:: astrocut
7 | :skip: UnsupportedPythonError
8 | :no-inheritance-diagram:
9 | :inherited-members:
10 |
--------------------------------------------------------------------------------
/docs/astrocut/contents.rst:
--------------------------------------------------------------------------------
1 | *************
2 | Site Contents
3 | *************
4 |
5 | .. toctree::
6 | :maxdepth: 2
7 |
8 | Home <../index.html#http://>
9 | install
10 | index
11 | file_formats
12 | api
13 | license
14 |
--------------------------------------------------------------------------------
/docs/astrocut/license.rst:
--------------------------------------------------------------------------------
1 | .. _license:
2 |
3 | *******
4 | License
5 | *******
6 |
7 | AstroCut License
8 | ================
9 |
10 | AstroCut is licensed under a 3-clause BSD style license:
11 |
12 | .. include:: ../../licenses/LICENSE.rst
13 |
--------------------------------------------------------------------------------
/docs/_templates/autosummary/base.rst:
--------------------------------------------------------------------------------
1 | {% extends "autosummary_core/base.rst" %}
2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}
--------------------------------------------------------------------------------
/docs/_templates/autosummary/class.rst:
--------------------------------------------------------------------------------
1 | {% extends "autosummary_core/class.rst" %}
2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}
--------------------------------------------------------------------------------
/astrocut/data/README.rst:
--------------------------------------------------------------------------------
1 | Data directory
2 | ==============
3 |
4 | This directory contains data files included with the package source
5 | code distribution. Note that this is intended only for relatively small files
6 | - large files should be externally hosted and downloaded as needed.
7 |
8 |
--------------------------------------------------------------------------------
/docs/_templates/autosummary/module.rst:
--------------------------------------------------------------------------------
1 | {% extends "autosummary_core/module.rst" %}
2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #}
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: ubuntu-22.04
5 | tools:
6 | python: "3.11"
7 |
8 | python:
9 | install:
10 | - method: pip
11 | path: .
12 | extra_requirements:
13 | - docs
14 |
15 | formats: []
16 |
17 | sphinx:
18 | # Path to your Sphinx configuration file.
19 | configuration: docs/conf.py
20 |
--------------------------------------------------------------------------------
/astrocut/tests/setup_package.py:
--------------------------------------------------------------------------------
1 | # import os
2 |
3 | # If this package has tests data in the tests/data directory, add them to
4 | # the paths here, see commented example
5 | paths = ['coveragerc',
6 | # os.path.join('data', '*fits')
7 | ]
8 |
9 |
10 | def get_package_data():
11 | return {
12 | _ASTROPY_PACKAGE_NAME_ + '.tests': paths} # noqa
13 |
--------------------------------------------------------------------------------
/.codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | range: 50..90
3 | round: down
4 | precision: 2
5 | status:
6 | project:
7 | # full project
8 | default:
9 | target: 90%
10 | threshold: 5%
11 | base: auto
12 | patch:
13 | # just the lines changed
14 | default:
15 | target: 50%
16 | threshold: 10%
17 | base: auto
18 | only_pulls: false
--------------------------------------------------------------------------------
/astrocut/extern/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 | """
3 | This packages contains python packages that are bundled with the package but
4 | are external to it, and hence are developed in a separate source tree. Note
5 | that this package is distinct from the /cextern directory of the source code
6 | distribution, as that directory only contains C extension code.
7 | """
8 |
--------------------------------------------------------------------------------
/licenses/README.rst:
--------------------------------------------------------------------------------
1 | Licenses
2 | ========
3 |
4 | This directory holds license and credit information for the package,
5 | works the package is derived from, and/or datasets.
6 |
7 | Ensure that you pick a package licence which is in this folder and it matches
8 | the one mentioned in the top level README.rst file. If you are using the
9 | pre-rendered version of this template check for the word 'Other' in the README.
10 |
--------------------------------------------------------------------------------
/astrocut/_astropy_init.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | __all__ = ['__version__']
4 |
5 | # this indicates whether or not we are in the package's setup.py
6 | try:
7 | _ASTROPY_SETUP_
8 | except NameError:
9 | import builtins
10 | builtins._ASTROPY_SETUP_ = False
11 |
12 | try:
13 | from .version import version as __version__
14 | except ImportError:
15 | __version__ = ''
16 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.rst
2 | include CHANGES.rst
3 | include setup.cfg
4 | include LICENSE.rst
5 | include pyproject.toml
6 |
7 | include astrocut/tests/coveragerc
8 |
9 | recursive-include astrocut *.pyx *.c *.pxd
10 | recursive-include docs *
11 | recursive-include licenses *
12 | recursive-include cextern *
13 | recursive-include scripts *
14 |
15 | prune build
16 | prune docs/_build
17 | prune docs/api
18 |
19 | global-exclude *.pyc *.o
20 |
--------------------------------------------------------------------------------
/astrocut/utils/logger.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | """This module configures the astrocut logger."""
4 |
5 | import logging
6 |
7 |
8 | def setup_logger():
9 | """Set up a logger for astrocut"""
10 | log = logging.getLogger(__name__)
11 | log.setLevel(logging.INFO) # default logging level
12 |
13 | # Create a console handler with format
14 | sh = logging.StreamHandler()
15 | formatter = logging.Formatter('%(levelname)s: %(message)s [%(module)s]')
16 | sh.setFormatter(formatter)
17 | log.addHandler(sh)
18 |
19 | return log
20 |
--------------------------------------------------------------------------------
/docs/_static/astrocut.css:
--------------------------------------------------------------------------------
1 |
2 |
3 | div.wy-side-nav-search{
4 | background: #00617E;
5 | }
6 |
7 | div.wy-side-scroll{
8 | background: #0D4960;
9 | }
10 |
11 | a.reference:hover{
12 | background: rgb(11, 61, 78);
13 | }
14 |
15 | .logo{
16 | width:120px !important;
17 | margin-left:auto !important;
18 | margin-right: auto !important;
19 | padding-bottom: 20px !important;
20 | margin-top: 15px !important;
21 | }
22 |
23 | .intro{
24 | color: aqua;
25 | }
26 |
27 | .heading{
28 | color: #0D4960;
29 | font-size: 25px;
30 | font-weight: 800;
31 | line-height: 30px;
32 |
33 | }
34 |
35 |
--------------------------------------------------------------------------------
/.github/workflows/changelog.yml:
--------------------------------------------------------------------------------
1 | name: Changelog check
2 |
3 | on:
4 | pull_request:
5 | types: [labeled, unlabeled, opened, synchronize, reopened]
6 |
7 | concurrency:
8 | group: ${{ github.workflow }}-${{ github.ref }}
9 | cancel-in-progress: true
10 |
11 | jobs:
12 | changelog:
13 | name: Check changelog entry
14 | runs-on: ubuntu-latest
15 | steps:
16 | - name: Check change log entry
17 | uses: scientific-python/action-check-changelogfile@1fc669db9618167166d5a16c10282044f51805c0 # 0.3
18 | env:
19 | CHANGELOG_FILENAME: CHANGES.rst
20 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
21 | CHECK_MILESTONE: false
--------------------------------------------------------------------------------
/docs/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {% extends "!layout.html" %}
2 | {% block sidebartitle %}
3 |
4 |
5 |
6 |
7 |
8 | {% if theme_display_version %}
9 | {%- set nav_version = version %}
10 | {% if READTHEDOCS and current_version %}
11 | {%- set nav_version = current_version %}
12 | {% endif %}
13 | {% if nav_version %}
14 |
15 | {{ nav_version }}
16 |
17 | {% endif %}
18 | {% endif %}
19 |
20 | {% include "searchbox.html" %}
21 |
22 | {% endblock %}
23 | {% set css_files = css_files + [ "_static/astrocut.css" ] %}
24 |
25 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "github-actions" # See documentation for possible values
9 | directory: ".github/workflows" # Location of package manifests
10 | schedule:
11 | interval: "monthly"
12 | groups:
13 | actions:
14 | patterns:
15 | - "*"
16 | labels:
17 | - "no-changelog-entry-needed"
18 |
--------------------------------------------------------------------------------
/docs/astrocut/install.rst:
--------------------------------------------------------------------------------
1 |
2 | *******************
3 | Installing Astrocut
4 | *******************
5 |
6 | Using pip
7 | =========
8 |
9 | The easiest way to install Astrocut is using pip::
10 |
11 | pip install astrocut
12 |
13 |
14 | From source
15 | ===========
16 |
17 | To install the bleeding edge version from github without downloading,
18 | run the following command::
19 |
20 | pip git+https://github.com/spacetelescope/astrocut.git
21 |
22 | The latest development version of astrocut can be cloned from github
23 | using this command::
24 |
25 | git clone https://github.com/spacetelescope/astrocut.git
26 |
27 | To install astrocut (from the root of the source tree)::
28 |
29 | pip install .
30 |
31 |
32 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled files
2 | *.py[cod]
3 | *.a
4 | *.o
5 | *.so
6 | __pycache__
7 |
8 | # Ignore .c files by default to avoid including generated code. If you want to
9 | # add a non-generated .c extension, use `git add -f filename.c`.
10 | *.c
11 |
12 | # Other generated files
13 | */version.py
14 | */cython_version.py
15 | htmlcov
16 | .coverage
17 | MANIFEST
18 | .ipynb_checkpoints
19 | pip-wheel-metadata/
20 |
21 | # Sphinx
22 | docs/api
23 | docs/_build
24 |
25 | # Eclipse editor project files
26 | .project
27 | .pydevproject
28 | .settings
29 |
30 | # Pycharm editor project files
31 | .idea
32 |
33 | # Floobits project files
34 | .floo
35 | .flooignore
36 |
37 | # Packages/installer info
38 | *.egg
39 | *.egg-info
40 | *.eggs/
41 | dist
42 | build
43 | eggs
44 | parts
45 | bin
46 | var
47 | sdist
48 | develop-eggs
49 | .installed.cfg
50 | distribute-*.tar.gz
51 |
52 | # Other
53 | .cache
54 | .tox
55 | .*.sw[op]
56 | *~
57 | .project
58 | .pydevproject
59 | .settings
60 | .tmp/
61 |
62 | # Mac OSX
63 | .DS_Store
64 |
--------------------------------------------------------------------------------
/astrocut/exceptions.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | """
4 | Custom exceptions used in the astrocut classes
5 | """
6 |
7 | from astropy.utils.exceptions import AstropyWarning
8 |
9 |
10 | class InvalidQueryError(Exception):
11 | """
12 | Errors related to invalid queries.
13 | """
14 | pass
15 |
16 |
17 | class InvalidInputError(Exception):
18 | """
19 | Exception to be issued when user input is incorrect in a
20 | way that prevents the function from running.
21 | """
22 | pass
23 |
24 |
25 | class UnsupportedPythonError(Exception):
26 | """
27 | Exception to be issued when attempting to use astrocut with
28 | an unsupported version of Python.
29 | """
30 | pass
31 |
32 |
33 | class InputWarning(AstropyWarning):
34 | """
35 | Warning to be issued when user input is incorrect in
36 | some way but doesn't prevent the function from running.
37 | """
38 | pass
39 |
40 |
41 | class TypeWarning(AstropyWarning):
42 | """
43 | Warnings to do with data types.
44 | """
45 | pass
46 |
47 |
48 | class DataWarning(AstropyWarning):
49 | """
50 | Warnings to do with data content.
51 | """
52 | pass
53 |
54 |
55 | class ModuleWarning(AstropyWarning):
56 | """
57 | Warnings to do with optional modules.
58 | """
59 | pass
60 |
--------------------------------------------------------------------------------
/licenses/LICENSE.rst:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018, MAST Archive Developers
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification,
5 | are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 | * Redistributions in binary form must reproduce the above copyright notice, this
10 | list of conditions and the following disclaimer in the documentation and/or
11 | other materials provided with the distribution.
12 | * Neither the name of the Astropy Team nor the names of its contributors may be
13 | used to endorse or promote products derived from this software without
14 | specific prior written permission.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
20 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
23 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
--------------------------------------------------------------------------------
/.github/workflows/pypi-package.yml:
--------------------------------------------------------------------------------
1 | name: Building & Publishing
2 | on:
3 | push:
4 | tags:
5 | - "v*"
6 |
7 | jobs:
8 | build:
9 | name: Build distribution 📦
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
14 | - name: Set up Python
15 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
16 | with:
17 | python-version: "3.x"
18 | - name: Install pypa/build
19 | run: >-
20 | python3 -m
21 | pip install
22 | build
23 | --user
24 | - name: Build a binary wheel and a source tarball
25 | run: python3 -m build
26 | - name: Store the distribution packages
27 | uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
28 | with:
29 | name: python-package-distributions
30 | path: dist/
31 |
32 | publish-to-pypi:
33 | name: >-
34 | Publish Python 🐍 distribution 📦 to PyPI
35 | if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes
36 | needs:
37 | - build
38 | runs-on: ubuntu-latest
39 | environment:
40 | name: pypi
41 | url: https://pypi.org/p/astrocut
42 | permissions:
43 | id-token: write # IMPORTANT: mandatory for trusted publishing
44 |
45 | steps:
46 | - name: Download all the dists
47 | uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
48 | with:
49 | name: python-package-distributions
50 | path: dist/
51 | - name: Publish distribution 📦 to PyPI
52 | uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
53 |
--------------------------------------------------------------------------------
/licenses/TEMPLATE_LICENCE.rst:
--------------------------------------------------------------------------------
1 | This project is based upon the Astropy package template
2 | (https://github.com/astropy/package-template/) which is licenced under the terms
3 | of the following licence.
4 |
5 | ---
6 |
7 | Copyright (c) 2018, Astropy Developers
8 | All rights reserved.
9 |
10 | Redistribution and use in source and binary forms, with or without modification,
11 | are permitted provided that the following conditions are met:
12 |
13 | * Redistributions of source code must retain the above copyright notice, this
14 | list of conditions and the following disclaimer.
15 | * Redistributions in binary form must reproduce the above copyright notice, this
16 | list of conditions and the following disclaimer in the documentation and/or
17 | other materials provided with the distribution.
18 | * Neither the name of the Astropy Team nor the names of its contributors may be
19 | used to endorse or promote products derived from this software without
20 | specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
23 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
24 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
26 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
27 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
29 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
31 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
--------------------------------------------------------------------------------
/astrocut/conftest.py:
--------------------------------------------------------------------------------
1 | # This file is used to configure the behavior of pytest when using the Astropy
2 | # test infrastructure. It needs to live inside the package in order for it to
3 | # get picked up when running the tests inside an interpreter using
4 | # packagename.test
5 |
6 | import os
7 |
8 | try:
9 | from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
10 | ASTROPY_HEADER = True
11 | except ImportError:
12 | ASTROPY_HEADER = False
13 |
14 |
15 | def pytest_configure(config):
16 |
17 | if ASTROPY_HEADER:
18 |
19 | config.option.astropy_header = True
20 |
21 | # Customize the following lines to add/remove entries from the list of
22 | # packages for which version numbers are displayed when running the tests.
23 | PYTEST_HEADER_MODULES.pop('Pandas', None)
24 | PYTEST_HEADER_MODULES.pop('h5py', None)
25 | PYTEST_HEADER_MODULES.pop('Matplotlib', None)
26 |
27 | from . import __version__
28 | packagename = os.path.basename(os.path.dirname(__file__))
29 | TESTED_VERSIONS[packagename] = __version__
30 |
31 | # Uncomment the last two lines in this block to treat all DeprecationWarnings as
32 | # exceptions. For Astropy v2.0 or later, there are 2 additional keywords,
33 | # as follow (although default should work for most cases).
34 | # To ignore some packages that produce deprecation warnings on import
35 | # (in addition to 'compiler', 'scipy', 'pygments', 'ipykernel', and
36 | # 'setuptools'), add:
37 | # modules_to_ignore_on_import=['module_1', 'module_2']
38 | # To ignore some specific deprecation warning messages for Python version
39 | # MAJOR.MINOR or later, add:
40 | # warnings_to_ignore_by_pyver={(MAJOR, MINOR): ['Message to ignore']}
41 | # from astropy.tests.helper import enable_deprecations_as_exceptions # noqa
42 | # enable_deprecations_as_exceptions()
43 |
--------------------------------------------------------------------------------
/docs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Astrocut
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 | Astrocut
19 |
20 | A Python package for making astronomical cutouts.
21 |
22 | Installation
23 | Documentation
24 |
25 | Astrocut provides tools for making cutouts from sets of astronomical images with shared footprints. It is under active development.
26 |
27 | Three main areas of functionality are included:
28 |
29 |
30 | Solving the specific problem of creating image cutouts from sectors of Transiting Exoplanet Survey Satellite (TESS) full-frame images.
31 | General FITS file cutouts incuding from single images and sets of images with the shared WCS/pixel scale.
32 | Cutout post-processing functionality, including centering cutouts along a path (for moving targets) and combining cutouts.
33 |
34 |
35 |
36 | GitHub Repository
37 |
38 |
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/astrocut/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | # Packages may add whatever they like to this file, but
4 | # should keep this content at the top.
5 | # ----------------------------------------------------------------------------
6 | from ._astropy_init import * # noqa
7 | # ----------------------------------------------------------------------------
8 |
9 | """
10 | This module initializes the astrocut package and performs essential setup tasks, including:
11 | - Verifying the version of Python.
12 | - Setting up package-wide logging.
13 | - Importing key modules.
14 | """
15 |
16 | import sys
17 |
18 | from .exceptions import UnsupportedPythonError
19 | from .utils.logger import setup_logger
20 |
21 | # Enforce Python version check during package import.
22 | __minimum_python_version__ = "3.9" # minimum supported Python version
23 | if sys.version_info < tuple(map(int, __minimum_python_version__.split('.'))):
24 | raise UnsupportedPythonError(f"astrocut does not support Python < {__minimum_python_version__}")
25 |
26 | # Initialize package-wide logger using astropy's logging system
27 | log = setup_logger()
28 |
29 | # Import key submodules and functions if not in setup mode
30 | if not _ASTROPY_SETUP_: # noqa
31 | from .cube_factory import CubeFactory # noqa
32 | from .tica_cube_factory import TicaCubeFactory # noqa
33 | from .cutout_factory import CutoutFactory, cube_cut # noqa
34 | from .cutout_processing import ( # noqa
35 | path_to_footprints, center_on_path, CutoutsCombiner, build_default_combine_function # noqa
36 | ) # noqa
37 | from .image_cutout import normalize_img # noqa
38 | from .fits_cutout import FITSCutout, fits_cut, img_cut # noqa
39 | from .asdf_cutout import ASDFCutout, asdf_cut, get_center_pixel # noqa
40 | from .tess_cube_cutout import TessCubeCutout # noqa
41 | from .footprint_cutout import ra_dec_crossmatch # noqa
42 | from .tess_footprint_cutout import TessFootprintCutout, cube_cut_from_footprint, get_tess_sectors # noqa
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
3 |
4 | # NOTE: The configuration for the package, including the name, version, and
5 | # other information are set in the setup.cfg file.
6 |
7 | import os
8 | import sys
9 |
10 | from setuptools import setup
11 |
12 |
13 | # First provide helpful messages if contributors try and run legacy commands
14 | # for tests or docs.
15 |
16 | TEST_HELP = """
17 | Note: running tests is no longer done using 'python setup.py test'. Instead
18 | you will need to run:
19 |
20 | tox -e test
21 |
22 | If you don't already have tox installed, you can install it with:
23 |
24 | pip install tox
25 |
26 | If you only want to run part of the test suite, you can also use pytest
27 | directly with::
28 |
29 | pip install -e .[test]
30 | pytest
31 |
32 | For more information, see:
33 |
34 | http://docs.astropy.org/en/latest/development/testguide.html#running-tests
35 | """
36 |
37 | if 'test' in sys.argv:
38 | print(TEST_HELP)
39 | sys.exit(1)
40 |
41 | DOCS_HELP = """
42 | Note: building the documentation is no longer done using
43 | 'python setup.py build_docs'. Instead you will need to run:
44 |
45 | tox -e build_docs
46 |
47 | If you don't already have tox installed, you can install it with:
48 |
49 | pip install tox
50 |
51 | You can also build the documentation with Sphinx directly using::
52 |
53 | pip install -e .[docs]
54 | cd docs
55 | make html
56 |
57 | For more information, see:
58 |
59 | http://docs.astropy.org/en/latest/install.html#builddocs
60 | """
61 |
62 | if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv:
63 | print(DOCS_HELP)
64 | sys.exit(1)
65 |
66 | VERSION_TEMPLATE = """
67 | # Note that we need to fall back to the hard-coded version if either
68 | # setuptools_scm can't be imported or setuptools_scm can't determine the
69 | # version, so we catch the generic 'Exception'.
70 | try:
71 | from setuptools_scm import get_version
72 | version = get_version(root='..', relative_to=__file__)
73 | except Exception:
74 | version = '{version}'
75 | """.lstrip()
76 |
77 | current_path = os.path.abspath(os.path.dirname(__file__))
78 |
79 | def read_file(*parts):
80 | with open(os.path.join(current_path, *parts), encoding='utf-8') as reader:
81 | return reader.read()
82 |
83 | setup(use_scm_version={'write_to': os.path.join('astrocut', 'version.py'),
84 | 'write_to_template': VERSION_TEMPLATE},
85 | long_description=read_file('README.rst'),
86 | long_description_content_type='text/x-rst')
87 |
--------------------------------------------------------------------------------
/docs/_static/main.css:
--------------------------------------------------------------------------------
1 | /*div.documentwrapper{
2 | background-color: #00617E;
3 | }
4 |
5 | div.body{
6 | background-color: unset;
7 | }
8 |
9 | div.document{
10 | margin-top: 40px;
11 | }
12 | */
13 |
14 | body{
15 | background: linear-gradient(#0D4960,#00617E);
16 | font-family: 'Overpass', sans-serif;
17 | color: white;
18 | margin-left: auto;
19 | margin-right: auto;
20 | max-width: 800px;
21 | padding-left: 15px;
22 | padding-right: 15px;
23 | }
24 |
25 |
26 | .logo{
27 | margin-left: auto;
28 | margin-right: auto;
29 | max-width: 700px;
30 | max-width: 700px;
31 | display: block;
32 | }
33 |
34 | h1{
35 | text-align: center;
36 | max-width: 800px;
37 | margin-top: 0px;
38 | margin-left: auto;
39 | margin-right: auto;
40 | margin-bottom: 50px;
41 | }
42 |
43 | h2{
44 | text-align: center;
45 | font-size: 55px;
46 | margin-bottom: 25px;
47 | margin-top: 30px;
48 | font-weight: 400;
49 | }
50 |
51 | h3{
52 | font-size: 40px;
53 | margin-bottom: 15px;
54 | margin-right: 0px;
55 | }
56 |
57 | a{
58 | color: white;
59 | text-decoration: none;
60 | cursor: pointer;
61 | }
62 |
63 |
64 | p{
65 | font-size: 25px;
66 | line-height: 30px;
67 | }
68 |
69 | ul{
70 | font-size: 25px;
71 | line-height: 30px;
72 | }
73 |
74 | br{
75 | height: 10px;
76 | }
77 |
78 | .big-button{
79 | background: #0D4960;
80 | border: 2px;
81 | border-color: white;
82 | border-style: solid;
83 | padding-left: 25px;
84 | padding-right: 20px;
85 | padding-bottom: 20px;
86 | padding-top: 5px;
87 | margin-bottom: 20px;
88 | display: block;
89 | }
90 |
91 | a:hover .big-button {
92 | background-color: #00617E;
93 | }
94 |
95 | .text-link{
96 | text-decoration: underline;
97 |
98 | }
99 |
100 | .small-link{
101 | background: #0D4960;
102 | font-size: 25px;
103 | margin-bottom: 20px;
104 | border: 2px;
105 | border-color: white;
106 | border-style: solid;
107 | padding: 15px;
108 | max-width: 800;
109 | text-align: center;
110 | font-weight: bold;
111 | display: block;
112 | }
113 |
114 |
115 | .small-link:hover{
116 | background-color: #00617E;
117 | }
118 |
119 | .big-button ORANGE{
120 | background: #C75109;
121 | cursor: pointer;
122 | padding-left: 20px;
123 | padding-right: 20px;
124 | padding-bottom: 20px;
125 | padding-top: 5px;
126 | margin-bottom: 20px;
127 | }
128 |
129 | .footer{
130 | margin-top: 100px;
131 | margin-bottom: 30px;
132 | }
133 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = astrocut
3 | author = MAST Archive Developers
4 | author_email = archive@stsci.edu
5 | license = BSD 3-Clause
6 | license_files = licenses/LICENSE.rst
7 | url = https://astrocut.readthedocs.io
8 | description = Cutout tools for astronomical images
9 | long_description = file: README.rst
10 | edit_on_github = False
11 | github_project = spacetelescope/astrocut
12 |
13 | [options]
14 | zip_safe = False
15 | packages = find:
16 | python_requires = >=3.9
17 | setup_requires = setuptools_scm
18 | install_requires =
19 | asdf>=4.1.0 # for ASDF file format
20 | astropy>=5.2 # astropy with s3fs support
21 | cachetools>=5.3.2 # for caching data
22 | fsspec[http]>=2022.8.2 # for remote cutouts
23 | s3fs>=2022.8.2 # for remote cutouts
24 | s3path>=0.5.7 # for remote file paths
25 | requests>=2.32.3 # for making HTTP requests
26 | spherical_geometry>=1.3.0
27 | gwcs>=0.21.0
28 | scipy
29 | Pillow
30 |
31 | [options.entry_points]
32 | console_scripts =
33 | astropy-package-template-example = packagename.example_mod:main
34 |
35 | [options.extras_require]
36 | test =
37 | pytest-astropy
38 | astroquery>=0.4.6
39 | docs =
40 | sphinx != 4.1.0
41 | docutils == 0.16
42 | sphinx-astropy
43 | sphinx_rtd_theme >= 0.5.2
44 | all =
45 | stdatamodels>=4.1.0 # stdatamodels is optional; required for ASDF-in-FITS embedding (Python>=3.11)
46 |
47 | [options.package_data]
48 | astrocut.tests = data/*
49 |
50 | [tool:pytest]
51 | testpaths = "astrocut" "docs"
52 | astropy_header = true
53 | doctest_plus = enabled
54 | text_file_format = rst
55 | addopts = --doctest-rst
56 |
57 | [flake8]
58 | exclude = extern,sphinx,*parsetab.py,astrocut
59 |
60 | [pycodestyle]
61 | exclude = extern,sphinx,*parsetab.py
62 |
63 | [coverage:run]
64 | omit =
65 | astrocut/_astropy_init*
66 | astrocut/conftest.py
67 | astrocut/*setup_package*
68 | astrocut/tests/*
69 | astrocut/*/tests/*
70 | astrocut/extern/*
71 | astrocut/version*
72 | */astrocut/_astropy_init*
73 | */astrocut/conftest.py
74 | */astrocut/*setup_package*
75 | */astrocut/tests/*
76 | */astrocut/*/tests/*
77 | */astrocut/extern/*
78 | */astrocut/version*
79 |
80 | [coverage:report]
81 | exclude_lines =
82 | # Have to re-enable the standard pragma
83 | pragma: no cover
84 | # Don't complain about packages we have installed
85 | except ImportError
86 | # Don't complain if tests don't hit assertions
87 | raise AssertionError
88 | raise NotImplementedError
89 | # Don't complain about script hooks
90 | def main\(.*\):
91 | # Ignore branches that don't pertain to this version of Python
92 | pragma: py{ignore_python_version}
93 | # Don't complain about IPython completion helper
94 | def _ipython_key_completions_
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | py{38,39,310,311,312,313}-test{,-alldeps,-devdeps}{,-cov}
4 | py{38,39,310,311,312,313}-test-numpy{120,123}
5 | py{38,39,310,311,312,313}-test-astropy{52}
6 | build_docs
7 | linkcheck
8 | codestyle
9 | requires =
10 | setuptools >= 30.3.0
11 | pip >= 19.3.1
12 |
13 | isolated_build = true
14 |
15 | [testenv]
16 |
17 | # Pass through the following environment variables which may be needed for the CI
18 | passenv = HOME,WINDIR,LC_ALL,LC_CTYPE,CC,CI,TRAVIS
19 |
20 | # Run the tests in a temporary directory to make sure that we don't import
21 | # this package from the source tree
22 | changedir = .tmp/{envname}
23 |
24 | # tox environments are constructed with so-called 'factors' (or terms)
25 | # separated by hyphens, e.g. test-devdeps-cov. Lines below starting with factor:
26 | # will only take effect if that factor is included in the environment name. To
27 | # see a list of example environments that can be run, along with a description,
28 | # run:
29 | #
30 | # tox -l -v
31 | #
32 | description =
33 | run tests
34 | alldeps: with all optional dependencies
35 | devdeps: with the latest developer version of key dependencies
36 | oldestdeps: with the oldest supported version of key dependencies
37 | cov: and test coverage
38 | astropy52: with astropy 5.2.*
39 | numpy120: with numpy 1.20.*
40 | numpy123: with numpy 1.23.*
41 | numpy2: with numpy 2
42 | astroquery04: with astroquery 0.4.*
43 |
44 | # The following provides some specific pinnings for key packages
45 | deps =
46 | numpy120: numpy==1.20.*
47 | numpy123: numpy==1.23.*
48 | numpy2: numpy==2.0.*
49 |
50 | astropy52: astropy==5.2.*
51 |
52 | astroquery04: astroquery==0.4.*
53 |
54 | stdatamodels; python_version >= "3.11"
55 |
56 | devdeps: git+https://github.com/numpy/numpy.git#egg=numpy
57 | devdeps: git+https://github.com/astropy/astropy.git#egg=astropy
58 | devdeps: git+https://github.com/astropy/astroquery.git
59 |
60 | # The following indicates which extras_require from setup.cfg will be installed
61 | extras =
62 | test
63 | alldeps: docs
64 |
65 | commands =
66 | pip freeze
67 | !cov: pytest --pyargs astrocut {toxinidir}/docs {posargs}
68 | cov: pytest --pyargs astrocut {toxinidir}/docs --cov astrocut --cov-config={toxinidir}/setup.cfg {posargs}
69 | cov: coverage xml -o {toxinidir}/coverage.xml
70 |
71 | [testenv:build_docs]
72 | changedir = docs
73 | description = invoke sphinx-build to build the HTML docs
74 | extras = docs
75 | deps =
76 | sphinx_rtd_theme
77 | matplotlib # Sphinx uses matplotlib for plot_directive extension
78 | commands =
79 | pip freeze
80 | sphinx-build -W -b html . _build/html
81 |
82 | [testenv:linkcheck]
83 | changedir = docs
84 | description = check the links in the HTML docs
85 | extras = docs
86 | commands =
87 | pip freeze
88 | sphinx-build -W -b linkcheck . _build/html
89 |
90 | [testenv:codestyle]
91 | skip_install = true
92 | changedir = .
93 | description = check code style, e.g. with flake8
94 | deps = flake8
95 | commands = flake8 astrocut --count --show-source --statistics --ignore=W291,W293,W391,E303,E266,E226,W504 --max-line-length=120 --exclude=astrocut/conftest.py
96 |
--------------------------------------------------------------------------------
/astrocut/tests/test_image_cutout.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import numpy as np
4 |
5 | from astrocut.image_cutout import ImageCutout
6 |
7 | from ..exceptions import InputWarning, InvalidInputError
8 |
9 |
10 | def test_normalize_img():
11 | # basic linear stretch
12 | img_arr = np.array([[1, 0], [.25, .75]])
13 | assert ((img_arr*255).astype(int) == ImageCutout.normalize_img(img_arr, stretch='linear')).all()
14 |
15 | # invert
16 | assert (255-(img_arr*255).astype(int) == ImageCutout.normalize_img(img_arr, stretch='linear', invert=True)).all()
17 |
18 | # linear stretch where input image must be scaled
19 | img_arr = np.array([[10, 5], [2.5, 7.5]])
20 | norm_img = ((img_arr - img_arr.min())/(img_arr.max()-img_arr.min())*255).astype(int)
21 | assert (norm_img == ImageCutout.normalize_img(img_arr, stretch='linear')).all()
22 |
23 | # min_max val
24 | minval, maxval = 0, 1
25 | img_arr = np.array([[1, 0], [-1, 2]])
26 | norm_img = ImageCutout.normalize_img(img_arr, stretch='linear', minmax_value=[minval, maxval])
27 | img_arr[img_arr < minval] = minval
28 | img_arr[img_arr > maxval] = maxval
29 | assert ((img_arr*255).astype(int) == norm_img).all()
30 |
31 | minval, maxval = 0, 1
32 | img_arr = np.array([[1, 0], [.1, .2]])
33 | norm_img = ImageCutout.normalize_img(img_arr, stretch='linear', minmax_value=[minval, maxval])
34 | img_arr[img_arr < minval] = minval
35 | img_arr[img_arr > maxval] = maxval
36 | ((img_arr*255).astype(int) == norm_img).all()
37 |
38 | # min_max percent
39 | img_arr = np.array([[1, 0], [0.1, 0.9], [.25, .75]])
40 | norm_img = ImageCutout.normalize_img(img_arr, stretch='linear', minmax_percent=[25, 75])
41 | assert (norm_img == [[255, 0], [0, 255], [39, 215]]).all()
42 |
43 | # asinh
44 | img_arr = np.array([[1, 0], [.25, .75]])
45 | norm_img = ImageCutout.normalize_img(img_arr)
46 | assert ((np.arcsinh(img_arr*10)/np.arcsinh(10)*255).astype(int) == norm_img).all()
47 |
48 | # sinh
49 | img_arr = np.array([[1, 0], [.25, .75]])
50 | norm_img = ImageCutout.normalize_img(img_arr, stretch='sinh')
51 | assert ((np.sinh(img_arr*3)/np.sinh(3)*255).astype(int) == norm_img).all()
52 |
53 | # sqrt
54 | img_arr = np.array([[1, 0], [.25, .75]])
55 | norm_img = ImageCutout.normalize_img(img_arr, stretch='sqrt')
56 | assert ((np.sqrt(img_arr)*255).astype(int) == norm_img).all()
57 |
58 | # log
59 | img_arr = np.array([[1, 0], [.25, .75]])
60 | norm_img = ImageCutout.normalize_img(img_arr, stretch='log')
61 | assert ((np.log(img_arr*1000+1)/np.log(1000)*255).astype(int) == norm_img).all()
62 |
63 |
64 | def test_normalize_img_errors():
65 | # Bad stretch
66 | with pytest.raises(InvalidInputError):
67 | img_arr = np.array([[1, 0], [.25, .75]])
68 | ImageCutout.normalize_img(img_arr, stretch='lin')
69 |
70 | # Giving both minmax percent and cut
71 | img_arr = np.array([[1, 0], [.25, .75]])
72 | norm_img = ImageCutout.normalize_img(img_arr, stretch='asinh', minmax_percent=[0.7, 99.3])
73 | with pytest.warns(InputWarning,
74 | match='Both minmax_percent and minmax_value are set, minmax_value will be ignored.'):
75 | test_img = ImageCutout.normalize_img(img_arr, stretch='asinh', minmax_value=[5, 2000],
76 | minmax_percent=[0.7, 99.3])
77 | assert (test_img == norm_img).all()
78 |
79 | # Raise error if image array is empty
80 | img_arr = np.array([])
81 | with pytest.raises(InvalidInputError):
82 | ImageCutout.normalize_img(img_arr)
83 |
--------------------------------------------------------------------------------
/.github/workflows/ci_workflows.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | tags:
8 | - "*"
9 | pull_request:
10 | schedule:
11 | # Nightly build triggered at 2 AM EST
12 | - cron: '0 7 * * *'
13 |
14 | jobs:
15 | tests:
16 | name: ${{ matrix.name }}
17 | runs-on: ${{ matrix.os }}
18 | strategy:
19 | fail-fast: true
20 | matrix:
21 | include:
22 | - name: Python 3.9 with minimal dependencies
23 | os: ubuntu-latest
24 | python: 3.9
25 | toxenv: py39-test
26 |
27 | - name: Python 3.9 with all optional dependencies
28 | os: ubuntu-latest
29 | python: 3.9
30 | toxenv: py39-test-alldeps
31 | toxargs: -v --develop
32 |
33 | - name: Python 3.10 with numpy 1.23 and full coverage
34 | os: ubuntu-latest
35 | python: "3.10"
36 | toxenv: py310-test-alldeps-numpy123-cov
37 |
38 | - name: Python 3.10 with all optional dependencies (MacOS X)
39 | os: macos-latest
40 | python: "3.10"
41 | toxenv: py310-test-alldeps
42 |
43 | - name: Python 3.10 with numpy 2
44 | os: ubuntu-latest
45 | python: "3.10"
46 | toxenv: py310-test-alldeps-numpy2
47 |
48 | - name: Python 3.11 with minimal dependencies
49 | os: ubuntu-latest
50 | python: 3.11
51 | toxenv: py311-test
52 |
53 | - name: Python 3.12 with minimal dependencies
54 | os: ubuntu-latest
55 | python: 3.12
56 | toxenv: py312-test
57 |
58 | - name: Python 3.13 with minimal dependencies
59 | os: ubuntu-latest
60 | python: 3.13
61 | toxenv: py313-test
62 |
63 | steps:
64 | - name: Checkout code
65 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
66 | with:
67 | fetch-depth: 0
68 | - name: Set up Python
69 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
70 | with:
71 | python-version: ${{ matrix.python }}
72 | - name: Install language-pack-de and tzdata
73 | if: startsWith(matrix.os, 'ubuntu')
74 | run: |
75 | sudo apt-get update
76 | sudo apt-get install language-pack-de tzdata
77 | - name: Install Python dependencies
78 | run: python -m pip install --upgrade tox codecov
79 | - name: Run tests
80 | run: tox ${{ matrix.toxargs }} -e ${{ matrix.toxenv }} -- ${{ matrix.toxposargs }}
81 | - name: Upload coverage to codecov
82 | if: ${{ contains(matrix.toxenv,'-cov') }}
83 | uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
84 | with:
85 | file: ./coverage.xml
86 | env:
87 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
88 |
89 | allowed_failures:
90 | name: ${{ matrix.name }}
91 | runs-on: ${{ matrix.os }}
92 | strategy:
93 | fail-fast: false
94 | matrix:
95 | include:
96 | - name: Code style checks
97 | os: ubuntu-latest
98 | python: 3.x
99 | toxenv: codestyle
100 |
101 | - name: (Allowed Failure) Python 3.12 with dev version of key dependencies
102 | os: ubuntu-latest
103 | python: 3.12
104 | toxenv: py312-test-devdeps
105 |
106 | steps:
107 | - name: Checkout code
108 | uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
109 | with:
110 | fetch-depth: 0
111 | - name: Set up Python
112 | uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
113 | with:
114 | python-version: ${{ matrix.python }}
115 | - name: Install language-pack-de and tzdata
116 | if: startsWith(matrix.os, 'ubuntu')
117 | run: |
118 | sudo apt-get update
119 | sudo apt-get install language-pack-de tzdata
120 | - name: Install Python dependencies
121 | run: python -m pip install --upgrade tox codecov
122 | - name: Run tests
123 | run: tox ${{ matrix.toxargs }} -e ${{ matrix.toxenv }} -- ${{ matrix.toxposargs }}
124 |
--------------------------------------------------------------------------------
/astrocut/tica_cube_factory.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | from typing import Optional, Union
4 |
5 | import numpy as np
6 | from astropy.io import fits
7 | from astropy.utils.decorators import deprecated
8 |
9 | from .cube_factory import CubeFactory
10 |
11 |
12 | @deprecated(since='1.1.0', message='The `TicaCubeFactory` class is deprecated and will be removed in a future version. '
13 | 'Use the `CubeFactory` class for creating image cubes from SPOC product files.')
14 | class TicaCubeFactory(CubeFactory):
15 | """
16 | Class for creating TICA image cubes.
17 |
18 | The TESS Image CAlibrator (TICA) products are high level science products (HLSPs)
19 | developed by the MIT Quick Look Pipeline (https://github.com/mmfausnaugh/tica). These
20 | images are produced and delivered up to 4x sooner than their SPOC counterparts (as of TESS EM2),
21 | and can therefore be used to produce the most up-to-date cutouts of a target.
22 | More information on TICA can be found here: https://archive.stsci.edu/hlsp/tica
23 |
24 | Parameters
25 | ----------
26 | max_memory : int
27 | The maximum amount of memory to make available for building the data cube in GB.
28 | Note, this is the maximum amount of space to be used for the cube array only,
29 | so should not be set to the full amount of memory on the system.
30 | """
31 | def __init__(self, max_memory: int = 50):
32 | """ Setting up the class members."""
33 | super().__init__(max_memory=max_memory)
34 |
35 | self._time_keyword = 'STARTTJD' # Start time in TJD. TICA-specific.
36 | self._last_file_keywords = ['ENDTJD'] # Stop time in TJD. TICA-specific (assumed to be in extension 0)
37 | self._image_header_keywords = ['CAMNUM', 'CCDNUM'] # Camera number and CCD number
38 | self._template_requirements = {'NAXIS': 2} # Using NAXIS instead of WCSAXES.
39 | self._img_ext = 0 # TICA has image data in the primary extension
40 | self._naxis1 = 1 # TICA has data values only
41 |
42 | def _get_img_start_time(self, img_data: fits.HDUList) -> float:
43 | """
44 | Get the start time of the image.
45 |
46 | Parameters
47 | ----------
48 | img_data : HDUList
49 | The image data.
50 |
51 | Returns
52 | -------
53 | float
54 | The start time of the image.
55 | """
56 | return img_data[self._img_ext].header.get(self._time_keyword)
57 |
58 | def _get_img_shape(self, img_data: fits.HDUList) -> tuple:
59 | """
60 | Get the shape of the image data.
61 |
62 | Parameters
63 | ----------
64 | img_data : HDUList
65 | The image data.
66 |
67 | Returns
68 | -------
69 | tuple
70 | The shape of the image data.
71 | """
72 | try:
73 | return img_data[self._img_ext].data.shape
74 | except AttributeError:
75 | # If data is not found in the image extension, raise an error
76 | raise ValueError(self.ERROR_MSG)
77 |
78 | def _write_to_sub_cube(self, sub_cube: np.ndarray, idx: int, img_data: fits.HDUList, start_row: int, end_row: int):
79 | """
80 | Write data from an input image to a sub-cube.
81 |
82 | Parameters
83 | ----------
84 | sub_cube : numpy.ndarray
85 | The sub-cube to write to.
86 | idx : int
87 | The index of the input file.
88 | img_data : HDUList
89 | The image data.
90 | start_row : int
91 | The starting row of the block.
92 | end_row : int
93 | The ending row of the block.
94 | """
95 | # Add image data to the sub-cube
96 | sub_cube[:, :, idx, 0] = img_data[0].data[start_row:end_row, :]
97 |
98 | # Remove the data from the input image to save memory
99 | del img_data[0].data
100 |
101 | def _get_header_keyword(self, kwd: str, img_data: fits.HDUList, nulval: Optional[Union[int, str]]):
102 | """
103 | Get a header keyword from an input image and save it to the info table.
104 |
105 | Parameters
106 | ----------
107 | kwd : str
108 | The keyword to get.
109 | img_data : HDUList
110 | The image data.
111 | nulval : int or str
112 | The null value for the keyword.
113 | """
114 | val = img_data[0].header.get(kwd, nulval)
115 |
116 | # The "COMMENT" keyword is in the form of a _HeaderCommentaryCard instead of a string
117 | return str(val) if isinstance(val, fits.header._HeaderCommentaryCards) else val
118 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 |
15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
16 |
17 | #This is needed with git because git doesn't create a dir if it's empty
18 | $(shell [ -d "_static" ] || mkdir -p _static)
19 |
20 | help:
21 | @echo "Please use \`make ' where is one of"
22 | @echo " html to make standalone HTML files"
23 | @echo " dirhtml to make HTML files named index.html in directories"
24 | @echo " singlehtml to make a single large HTML file"
25 | @echo " pickle to make pickle files"
26 | @echo " json to make JSON files"
27 | @echo " htmlhelp to make HTML files and a HTML help project"
28 | @echo " qthelp to make HTML files and a qthelp project"
29 | @echo " devhelp to make HTML files and a Devhelp project"
30 | @echo " epub to make an epub"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " text to make text files"
34 | @echo " man to make manual pages"
35 | @echo " changes to make an overview of all changed/added/deprecated items"
36 | @echo " linkcheck to check all external links for integrity"
37 |
38 | clean:
39 | -rm -rf $(BUILDDIR)
40 | -rm -rf api
41 | -rm -rf generated
42 |
43 | html:
44 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
45 | @echo
46 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
47 |
48 | dirhtml:
49 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
50 | @echo
51 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
52 |
53 | singlehtml:
54 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
55 | @echo
56 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
57 |
58 | pickle:
59 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
60 | @echo
61 | @echo "Build finished; now you can process the pickle files."
62 |
63 | json:
64 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
65 | @echo
66 | @echo "Build finished; now you can process the JSON files."
67 |
68 | htmlhelp:
69 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
70 | @echo
71 | @echo "Build finished; now you can run HTML Help Workshop with the" \
72 | ".hhp project file in $(BUILDDIR)/htmlhelp."
73 |
74 | qthelp:
75 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
76 | @echo
77 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
78 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
79 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp"
80 | @echo "To view the help file:"
81 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc"
82 |
83 | devhelp:
84 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
85 | @echo
86 | @echo "Build finished."
87 | @echo "To view the help file:"
88 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy"
89 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy"
90 | @echo "# devhelp"
91 |
92 | epub:
93 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
94 | @echo
95 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
96 |
97 | latex:
98 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
99 | @echo
100 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
101 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
102 | "(use \`make latexpdf' here to do that automatically)."
103 |
104 | latexpdf:
105 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
106 | @echo "Running LaTeX files through pdflatex..."
107 | make -C $(BUILDDIR)/latex all-pdf
108 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
109 |
110 | text:
111 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
112 | @echo
113 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
114 |
115 | man:
116 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
117 | @echo
118 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
119 |
120 | changes:
121 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
122 | @echo
123 | @echo "The overview file is in $(BUILDDIR)/changes."
124 |
125 | linkcheck:
126 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
127 | @echo
128 | @echo "Link check complete; look for any errors in the above output " \
129 | "or in $(BUILDDIR)/linkcheck/output.txt."
130 |
131 | doctest:
132 | @echo "Run 'python setup.py test' in the root directory to run doctests " \
133 | @echo "in the documentation."
134 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | if NOT "%PAPER%" == "" (
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
12 | )
13 |
14 | if "%1" == "" goto help
15 |
16 | if "%1" == "help" (
17 | :help
18 | echo.Please use `make ^` where ^ is one of
19 | echo. html to make standalone HTML files
20 | echo. dirhtml to make HTML files named index.html in directories
21 | echo. singlehtml to make a single large HTML file
22 | echo. pickle to make pickle files
23 | echo. json to make JSON files
24 | echo. htmlhelp to make HTML files and a HTML help project
25 | echo. qthelp to make HTML files and a qthelp project
26 | echo. devhelp to make HTML files and a Devhelp project
27 | echo. epub to make an epub
28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
29 | echo. text to make text files
30 | echo. man to make manual pages
31 | echo. changes to make an overview over all changed/added/deprecated items
32 | echo. linkcheck to check all external links for integrity
33 | echo. doctest to run all doctests embedded in the documentation if enabled
34 | goto end
35 | )
36 |
37 | if "%1" == "clean" (
38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
39 | del /q /s %BUILDDIR%\*
40 | goto end
41 | )
42 |
43 | if "%1" == "html" (
44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
45 | if errorlevel 1 exit /b 1
46 | echo.
47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
48 | goto end
49 | )
50 |
51 | if "%1" == "dirhtml" (
52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
53 | if errorlevel 1 exit /b 1
54 | echo.
55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
56 | goto end
57 | )
58 |
59 | if "%1" == "singlehtml" (
60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
61 | if errorlevel 1 exit /b 1
62 | echo.
63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
64 | goto end
65 | )
66 |
67 | if "%1" == "pickle" (
68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
69 | if errorlevel 1 exit /b 1
70 | echo.
71 | echo.Build finished; now you can process the pickle files.
72 | goto end
73 | )
74 |
75 | if "%1" == "json" (
76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
77 | if errorlevel 1 exit /b 1
78 | echo.
79 | echo.Build finished; now you can process the JSON files.
80 | goto end
81 | )
82 |
83 | if "%1" == "htmlhelp" (
84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
85 | if errorlevel 1 exit /b 1
86 | echo.
87 | echo.Build finished; now you can run HTML Help Workshop with the ^
88 | .hhp project file in %BUILDDIR%/htmlhelp.
89 | goto end
90 | )
91 |
92 | if "%1" == "qthelp" (
93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
94 | if errorlevel 1 exit /b 1
95 | echo.
96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
97 | .qhcp project file in %BUILDDIR%/qthelp, like this:
98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Astropy.qhcp
99 | echo.To view the help file:
100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Astropy.ghc
101 | goto end
102 | )
103 |
104 | if "%1" == "devhelp" (
105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
106 | if errorlevel 1 exit /b 1
107 | echo.
108 | echo.Build finished.
109 | goto end
110 | )
111 |
112 | if "%1" == "epub" (
113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
114 | if errorlevel 1 exit /b 1
115 | echo.
116 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
117 | goto end
118 | )
119 |
120 | if "%1" == "latex" (
121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
122 | if errorlevel 1 exit /b 1
123 | echo.
124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
125 | goto end
126 | )
127 |
128 | if "%1" == "text" (
129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
130 | if errorlevel 1 exit /b 1
131 | echo.
132 | echo.Build finished. The text files are in %BUILDDIR%/text.
133 | goto end
134 | )
135 |
136 | if "%1" == "man" (
137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
138 | if errorlevel 1 exit /b 1
139 | echo.
140 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
141 | goto end
142 | )
143 |
144 | if "%1" == "changes" (
145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
146 | if errorlevel 1 exit /b 1
147 | echo.
148 | echo.The overview file is in %BUILDDIR%/changes.
149 | goto end
150 | )
151 |
152 | if "%1" == "linkcheck" (
153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
154 | if errorlevel 1 exit /b 1
155 | echo.
156 | echo.Link check complete; look for any errors in the above output ^
157 | or in %BUILDDIR%/linkcheck/output.txt.
158 | goto end
159 | )
160 |
161 | if "%1" == "doctest" (
162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
163 | if errorlevel 1 exit /b 1
164 | echo.
165 | echo.Testing of doctests in the sources finished, look at the ^
166 | results in %BUILDDIR%/doctest/output.txt.
167 | goto end
168 | )
169 |
170 | :end
171 |
--------------------------------------------------------------------------------
/astrocut/tests/data/ex_ffi_wcs.txt:
--------------------------------------------------------------------------------
1 | NAXIS = 2 / number of array dimensions NAXIS1 = 2136 NAXIS2 = 2078 WCSAXES = 2 / Number of coordinate axes CRPIX1 = 1045.0 / Pixel coordinate of reference point CRPIX2 = 1001.0 / Pixel coordinate of reference point PC1_1 = 0.0054214440719814 / Coordinate transformation matrix element PC1_2 = 0.001882933847014 / Coordinate transformation matrix element PC2_1 = -0.001756958456204 / Coordinate transformation matrix element PC2_2 = 0.005370972142312 / Coordinate transformation matrix element CDELT1 = 1.0 / [deg] Coordinate increment at reference point CDELT2 = 1.0 / [deg] Coordinate increment at reference point CUNIT1 = 'deg' / Units of coordinate increment and value CUNIT2 = 'deg' / Units of coordinate increment and value CTYPE1 = 'RA---TAN-SIP' / TAN (gnomonic) projection + SIP distortions CTYPE2 = 'DEC--TAN-SIP' / TAN (gnomonic) projection + SIP distortions CRVAL1 = 259.36421299568 / [deg] Coordinate value at reference point CRVAL2 = 36.205189858442 / [deg] Coordinate value at reference point LONPOLE = 180.0 / [deg] Native longitude of celestial pole LATPOLE = 36.205189858442 / [deg] Native latitude of celestial pole TIMESYS = 'TDB' / Time scale TIMEUNIT= 'd' / Time units MJDREF = 0.0 / [d] MJD of fiducial time DATE-OBS= '2019-05-11T05:08:26.816Z' / ISO-8601 time of observation MJD-OBS = 58614.214199259 / [d] MJD of observation TSTART = 1614.715 / [d] Time elapsed since fiducial time at start DATE-END= '2019-05-11T05:38:26.816Z' / ISO-8601 time at end of observation MJD-END = 58614.235032593 / [d] MJD at end of observation TSTOP = 1614.7358333333 / [d] Time elapsed since fiducial time at end TELAPSE = 0.020833333333 / [d] Elapsed time (start to stop) TIMEDEL = 0.020833333333333 / [d] Time resolution TIMEPIXR= 0.5 / Reference position of timestamp in binned data RADESYS = 'ICRS' / Equatorial coordinate system A_ORDER = 2 A_0_2 = -3.149317908124E-06 A_1_1 = 1.730817428468E-05 A_2_0 = -1.933462721325E-05 B_ORDER = 2 B_0_2 = 2.042014645602E-05 B_1_1 = -1.636520970621E-05 B_2_0 = 3.30815059215721E-06 AP_ORDER= 2 AP_0_1 = -0.0005978141551887 AP_0_2 = 3.162944912935E-06 AP_1_0 = 0.000847639020400928 AP_1_1 = -1.72807349897E-05 AP_2_0 = 1.93271150683819E-05 BP_ORDER= 2 BP_0_1 = 0.00088615310847368 BP_0_2 = -2.041185722247E-05 BP_1_0 = -0.0005960802481639 BP_1_1 = 1.633823692359E-05 BP_2_0 = -3.322554744068E-06 END
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | Cutout tools for astronomical images
2 | ------------------------------------
3 |
4 | .. image:: http://img.shields.io/badge/powered%20by-AstroPy-orange.svg
5 | :target: http://www.astropy.org
6 | :alt: Powered by Astropy Badge
7 |
8 | .. image:: https://badge.fury.io/py/astrocut.svg
9 | :target: https://badge.fury.io/py/astrocut
10 | :alt: PyPi Status
11 |
12 | .. image:: https://readthedocs.org/projects/astrocut/badge/?version=latest
13 | :target: https://astrocut.readthedocs.io/en/latest/?badge=latest
14 | :alt: Documentation Status
15 |
16 | Astrocut provides tools for making cutouts from sets of astronomical images with shared footprints. It is under active development.
17 |
18 | Three main areas of functionality are included:
19 |
20 | - Solving the specific problem of creating image cutouts from sectors of Transiting Exoplanet Survey Satellite (TESS) full-frame images.
21 | - General fits file cutouts including from single images and sets of images with the shared WCS/pixel scale.
22 | - Cutout post-processing functionality, including centering cutouts along a path (for moving targets) and combining cutouts.
23 |
24 | Documentation is at https://astrocut.readthedocs.io.
25 |
26 | Project Status
27 | --------------
28 | .. image:: https://github.com/spacetelescope/astrocut/workflows/CI/badge.svg?branch=master
29 | :target: https://github.com/spacetelescope/astrocut/actions
30 | :alt: Github actions CI status
31 |
32 | .. image:: https://codecov.io/gh/spacetelescope/astrocut/branch/master/graph/badge.svg
33 | :target: https://codecov.io/gh/spacetelescope/astrocut
34 | :alt: Codecov coverage status
35 |
36 |
37 | Developer Documentation
38 | -----------------------
39 |
40 | Installation
41 | ============
42 |
43 | .. code-block:: bash
44 |
45 | $ git clone https://github.com/spacetelescope/astrocut.git
46 | $ cd astrocut
47 | $ pip install .
48 |
49 | For active development install in develop mode
50 |
51 | .. code-block:: bash
52 |
53 | $ pip install -e .
54 |
55 | Testing
56 | =======
57 | Testing is now run with `tox `_ (``pip install tox``).
58 | Tests can be found in ``astrocut/tests/``.
59 |
60 | .. code-block:: bash
61 |
62 | $ tox -e test
63 |
64 | Tests can also be run directly with pytest:
65 |
66 | .. code-block:: bash
67 |
68 | $ pip install -e .[test]
69 | $ pytest
70 |
71 | Documentation
72 | =============
73 | Documentation files are found in ``docs/``.
74 |
75 | We now build the documentation with `tox `_ (``pip install tox``):
76 |
77 | .. code-block:: bash
78 |
79 | $ tox -e build_docs
80 |
81 | You can also build the documentation with Sphinx directly using:
82 |
83 | .. code-block:: bash
84 |
85 | $ pip install -e .[docs]
86 | $ cd docs
87 | $ make html
88 |
89 | The built docs will be in ``docs/_build/html/``, to view them go to ``file:///path/to/astrocut/repo/docs/_build/html/index.html`` in the browser of your choice.
90 |
91 |
92 | Release Protocol
93 | ================
94 |
95 | GitHub Action Releases
96 | ^^^^^^^^^^^^^^^^^^^^^^
97 |
98 | The `pypi-package.yml <.github/workflows/pypi-package.yml>`_ GitHub workflow creates a PyPI release. The job in this workflow is triggered when a tag is pushed or a GH release (+tag) is created, and uses `OpenAstronomy`'s `GitHub action workflow `_
99 | for publishing pure Python packages (`see here `_ for documentation).
100 |
101 | Manual Releases
102 | ^^^^^^^^^^^^^^^
103 |
104 | For making releases manually, follow the `Astropy template release instructions `_.
105 |
106 | *Requirements:*
107 |
108 | - build (``pip install build``)
109 | - twine (``pip install twine``)
110 |
111 | *Notes:*
112 |
113 | - Astrocut uses setuptools_scm to manage version numbers.
114 | - Astrocut does have a pyproject.toml file
115 | - If the given twine command doesn't work you likely need ``python -m twine upload dist/*``
116 | - You shouldn't have to trigger a readthedocs build manually, it should run on its own in ~20 min.
117 |
118 |
119 | Contributing
120 | ------------
121 |
122 | We love contributions! Astrocut is open source,
123 | built on open source, and we'd love to have you hang out in our community.
124 |
125 | **Imposter syndrome disclaimer**: We want your help. No, really.
126 |
127 | There may be a little voice inside your head that is telling you that you're not
128 | ready to be an open source contributor; that your skills aren't nearly good
129 | enough to contribute. What could you possibly offer a project like this one?
130 |
131 | We assure you - the little voice in your head is wrong. If you can write code at
132 | all, you can contribute code to open source. Contributing to open source
133 | projects is a fantastic way to advance one's coding skills. Writing perfect code
134 | isn't the measure of a good developer (that would disqualify all of us!); it's
135 | trying to create something, making mistakes, and learning from those
136 | mistakes. That's how we all improve, and we are happy to help others learn.
137 |
138 | Being an open source contributor doesn't just mean writing code, either. You can
139 | help out by writing documentation, tests, or even giving feedback about the
140 | project (and yes - that includes giving feedback about the contribution
141 | process). Some of these contributions may be the most valuable to the project as
142 | a whole, because you're coming to the project with fresh eyes, so you can see
143 | the errors and assumptions that seasoned contributors have glossed over.
144 |
145 | Note: This disclaimer was originally written by
146 | `Adrienne Lowe `_ for a
147 | `PyCon talk `_, and was adapted by
148 | Astrocut based on its use in the README file for the
149 | `MetPy project `_.
150 |
151 |
152 | License
153 | -------
154 |
155 | This project is Copyright (c) MAST Archive Developers and licensed under
156 | the terms of the BSD 3-Clause license. This package is based upon
157 | the `Astropy package template `_
158 | which is licensed under the BSD 3-clause license. See the licenses folder for
159 | more information.
160 |
161 |
--------------------------------------------------------------------------------
/astrocut/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | from astropy.io import fits
4 | from astropy import wcs
5 | from astropy.coordinates import SkyCoord
6 | from astropy import units as u
7 | from astropy.utils.data import get_pkg_data_filename
8 | import pytest
9 |
10 | from astrocut.exceptions import InputWarning, InvalidQueryError
11 |
12 | from ..utils import utils
13 |
14 |
15 | # Example FFI WCS for testing
16 | with open(get_pkg_data_filename('data/ex_ffi_wcs.txt'), "r") as FLE:
17 | WCS_STR = FLE.read()
18 |
19 |
20 | @pytest.mark.parametrize("input_value, expected", [
21 | (5, np.array((5, 5))), # scalar
22 | (10 * u.pix, np.array((10, 10)) * u.pix), # Astropy quantity
23 | ((5, 10), np.array((5, 10))), # tuple
24 | ([10, 5], np.array((10, 5))), # list
25 | (np.array((5, 10)), np.array((5, 10))), # array
26 | ])
27 | def test_parse_size_input(input_value, expected):
28 | """Test that different types of input are accurately parsed into cutout sizes."""
29 | cutout_size = utils.parse_size_input(input_value)
30 | assert np.array_equal(cutout_size, expected)
31 |
32 |
33 | def test_parse_size_input_dimension_warning():
34 | """Test that a warning is output when input has too many dimensions"""
35 | warning = "Too many dimensions in cutout size, only the first two will be used."
36 | with pytest.warns(InputWarning, match=warning):
37 | cutout_size = utils.parse_size_input((5, 5, 10))
38 | assert np.array_equal(cutout_size, np.array((5, 5)))
39 |
40 |
41 | def test_parse_size_input_invalid():
42 | """Test that an error is raised when one of the size dimensions is not positive"""
43 | err = ('Cutout size dimensions must be greater than zero.')
44 | with pytest.raises(InvalidQueryError, match=err):
45 | utils.parse_size_input(0)
46 |
47 | with pytest.raises(InvalidQueryError, match=err):
48 | utils.parse_size_input((0, 5))
49 |
50 | with pytest.raises(InvalidQueryError, match=err):
51 | utils.parse_size_input((0, 5))
52 |
53 |
54 | def test_get_cutout_limits():
55 |
56 | test_img_wcs_kwds = fits.Header(cards=[('NAXIS', 2, 'number of array dimensions'),
57 | ('NAXIS1', 20, ''),
58 | ('NAXIS2', 30, ''),
59 | ('CTYPE1', 'RA---TAN', 'Right ascension, gnomonic projection'),
60 | ('CTYPE2', 'DEC--TAN', 'Declination, gnomonic projection'),
61 | ('CRVAL1', 100, '[deg] Coordinate value at reference point'),
62 | ('CRVAL2', 20, '[deg] Coordinate value at reference point'),
63 | ('CRPIX1', 10, 'Pixel coordinate of reference point'),
64 | ('CRPIX2', 15, 'Pixel coordinate of reference point'),
65 | ('CDELT1', 1.0, '[deg] Coordinate increment at reference point'),
66 | ('CDELT2', 1.0, '[deg] Coordinate increment at reference point'),
67 | ('WCSAXES', 2, 'Number of coordinate axes'),
68 | ('PC1_1', 1, 'Coordinate transformation matrix element'),
69 | ('PC2_2', 1, 'Coordinate transformation matrix element'),
70 | ('CUNIT1', 'deg', 'Units of coordinate increment and value'),
71 | ('CUNIT2', 'deg', 'Units of coordinate increment and value')])
72 |
73 | test_img_wcs = wcs.WCS(test_img_wcs_kwds)
74 |
75 | center_coord = SkyCoord("100 20", unit='deg')
76 | cutout_size = [10, 10]
77 |
78 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
79 | assert (lims[0, 1] - lims[0, 0]) == (lims[1, 1] - lims[1, 0])
80 | assert (lims == np.array([[4, 14], [9, 19]])).all()
81 |
82 | cutout_size = [10, 5]
83 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
84 | assert (lims[0, 1] - lims[0, 0]) == 10
85 | assert (lims[1, 1] - lims[1, 0]) == 5
86 |
87 | cutout_size = [.1, .1]*u.deg
88 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
89 | assert (lims[0, 1] - lims[0, 0]) == (lims[1, 1] - lims[1, 0])
90 | assert (lims[0, 1] - lims[0, 0]) == 1
91 |
92 | cutout_size = [4, 5]*u.deg
93 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
94 | assert (lims[0, 1] - lims[0, 0]) == 4
95 | assert (lims[1, 1] - lims[1, 0]) == 5
96 |
97 | center_coord = SkyCoord("90 20", unit='deg')
98 | cutout_size = [4, 5]*u.deg
99 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
100 | assert lims[0, 0] < 0
101 |
102 | center_coord = SkyCoord("100 5", unit='deg')
103 | cutout_size = [4, 5]*u.pixel
104 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
105 | assert lims[1, 0] < 0
106 |
107 |
108 | def test_get_cutout_wcs():
109 | test_img_wcs_kwds = fits.Header(cards=[('NAXIS', 2, 'number of array dimensions'),
110 | ('NAXIS1', 20, ''),
111 | ('NAXIS2', 30, ''),
112 | ('CTYPE1', 'RA---TAN', 'Right ascension, gnomonic projection'),
113 | ('CTYPE2', 'DEC--TAN', 'Declination, gnomonic projection'),
114 | ('CRVAL1', 100, '[deg] Coordinate value at reference point'),
115 | ('CRVAL2', 20, '[deg] Coordinate value at reference point'),
116 | ('CRPIX1', 10, 'Pixel coordinate of reference point'),
117 | ('CRPIX2', 15, 'Pixel coordinate of reference point'),
118 | ('CDELT1', 1.0, '[deg] Coordinate increment at reference point'),
119 | ('CDELT2', 1.0, '[deg] Coordinate increment at reference point'),
120 | ('WCSAXES', 2, 'Number of coordinate axes'),
121 | ('PC1_1', 1, 'Coordinate transformation matrix element'),
122 | ('PC2_2', 1, 'Coordinate transformation matrix element'),
123 | ('CUNIT1', 'deg', 'Units of coordinate increment and value'),
124 | ('CUNIT2', 'deg', 'Units of coordinate increment and value')])
125 |
126 | test_img_wcs = wcs.WCS(test_img_wcs_kwds)
127 |
128 | center_coord = SkyCoord("100 20", unit='deg')
129 | cutout_size = [4, 5]*u.deg
130 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
131 | cutout_wcs = utils.get_cutout_wcs(test_img_wcs, lims)
132 | assert (cutout_wcs.wcs.crval == [100, 20]).all()
133 | assert (cutout_wcs.wcs.crpix == [3, 4]).all()
134 |
135 | center_coord = SkyCoord("100 5", unit='deg')
136 | cutout_size = [4, 5]*u.deg
137 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
138 | cutout_wcs = utils.get_cutout_wcs(test_img_wcs, lims)
139 | assert (cutout_wcs.wcs.crval == [100, 20]).all()
140 | assert (cutout_wcs.wcs.crpix == [3, 19]).all()
141 |
142 | center_coord = SkyCoord("110 20", unit='deg')
143 | cutout_size = [10, 10]*u.deg
144 | lims = utils.get_cutout_limits(test_img_wcs, center_coord, cutout_size)
145 | cutout_wcs = utils.get_cutout_wcs(test_img_wcs, lims)
146 | assert (cutout_wcs.wcs.crval == [100, 20]).all()
147 | assert (cutout_wcs.wcs.crpix == [-3, 6]).all()
148 |
149 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
3 | #
4 | # Astropy documentation build configuration file.
5 | #
6 | # This file is execfile()d with the current directory set to its containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this file.
9 | #
10 | # All configuration values have a default. Some values are defined in
11 | # the global Astropy configuration which is loaded here before anything else.
12 | # See astropy.sphinx.conf for which values are set there.
13 |
14 | # If extensions (or modules to document with autodoc) are in another directory,
15 | # add these directories to sys.path here. If the directory is relative to the
16 | # documentation root, use os.path.abspath to make it absolute, like shown here.
17 | # sys.path.insert(0, os.path.abspath('..'))
18 | # IMPORTANT: the above commented section was generated by sphinx-quickstart, but
19 | # is *NOT* appropriate for astropy or Astropy affiliated packages. It is left
20 | # commented out with this explanation to make it clear why this should not be
21 | # done. If the sys.path entry above is added, when the astropy.sphinx.conf
22 | # import occurs, it will import the *source* version of astropy instead of the
23 | # version installed (if invoked as "make html" or directly with sphinx), or the
24 | # version in the build directory (if "python setup.py build_sphinx" is used).
25 | # Thus, any C-extensions that are needed to build the documentation will *not*
26 | # be accessible, and the documentation will not build correctly.
27 |
28 | import datetime
29 | import os
30 | import sys
31 | from importlib import import_module
32 |
33 | try:
34 | from sphinx_astropy.conf.v1 import * # noqa
35 | except ImportError:
36 | print('ERROR: the documentation requires the sphinx-astropy package to be installed')
37 | sys.exit(1)
38 |
39 | # Get configuration information from setup.cfg
40 | from configparser import ConfigParser
41 | conf = ConfigParser()
42 |
43 | conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')])
44 | setup_cfg = dict(conf.items('metadata'))
45 |
46 | # -- General configuration ----------------------------------------------------
47 |
48 | # By default, highlight as Python 3.
49 | highlight_language = 'python3'
50 |
51 | # If your documentation needs a minimal Sphinx version, state it here.
52 | #needs_sphinx = '1.2'
53 |
54 | # To perform a Sphinx version check that needs to be more specific than
55 | # major.minor, call `check_sphinx_version("x.y.z")` here.
56 | # check_sphinx_version("1.2.1")
57 |
58 | # List of patterns, relative to source directory, that match files and
59 | # directories to ignore when looking for source files.
60 |
61 | # This is added to the end of RST files - a good place to put substitutions to
62 | # be used globally.
63 | rst_epilog += """
64 | """
65 |
66 | # -- Project information ------------------------------------------------------
67 |
68 | # This does not *have* to match the package name, but typically does
69 | project = setup_cfg['name']
70 | author = setup_cfg['author']
71 | copyright = '{0}, {1}'.format(
72 | datetime.datetime.now().year, setup_cfg['author'])
73 |
74 | # The version info for the project you're documenting, acts as replacement for
75 | # |version| and |release|, also used in various other places throughout the
76 | # built documents.
77 |
78 | import_module(setup_cfg['name'])
79 | package = sys.modules[setup_cfg['name']]
80 |
81 | # The short X.Y version.
82 | version = package.__version__.split('-', 1)[0]
83 | # The full version, including alpha/beta/rc tags.
84 | release = package.__version__
85 |
86 |
87 | # -- Options for HTML output --------------------------------------------------
88 |
89 | # A NOTE ON HTML THEMES
90 | # The global astropy configuration uses a custom theme, 'bootstrap-astropy',
91 | # which is installed along with astropy. A different theme can be used or
92 | # the options for this theme can be modified by overriding some of the
93 | # variables set in the global configuration. The variables set in the
94 | # global configuration are listed below, commented out.
95 |
96 |
97 | # Add any paths that contain custom themes here, relative to this directory.
98 | # To use a different custom theme, add the directory containing the theme.
99 | #html_theme_path = ["_themes",]
100 |
101 | # Custome template path, adding custom css and home link
102 | templates_path = ["_templates"]
103 |
104 | # The theme to use for HTML and HTML Help pages. See the documentation for
105 | # a list of builtin themes. To override the custom theme, set this to the
106 | # name of a builtin theme or the name of a custom theme in html_theme_path.
107 | html_theme = 'sphinx_rtd_theme'
108 |
109 | def setup_style(app):
110 | app.add_stylesheet("astrocut.css")
111 |
112 | master_doc='astrocut/contents'
113 | html_extra_path=['index.html']
114 |
115 | # Custom sidebar templates, maps document names to template names.
116 | html_sidebars = { '**': ['globaltoc.html', 'localtoc.html', 'searchbox.html'] }
117 |
118 | # The name of an image file (relative to this directory) to place at the top
119 | # of the sidebar.
120 | html_logo = '_static/AstroCut_thumb.png'
121 |
122 | # The name of an image file (within the static path) to use as favicon of the
123 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
124 | # pixels large.
125 | html_favicon = '_static/AstroCut_thumb.png'
126 |
127 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
128 | # using the given strftime format.
129 | #html_last_updated_fmt = ''
130 |
131 | # The name for this set of Sphinx documents. If None, it defaults to
132 | # " v documentation".
133 | html_title = '{0} v{1}'.format(project, release)
134 |
135 | # Output file base name for HTML help builder.
136 | htmlhelp_basename = project + 'doc'
137 |
138 | # Static files to copy after template files
139 | html_static_path = ['_static']
140 | html_css_files = ['astrocut.css']
141 |
142 |
143 | # -- Options for LaTeX output -------------------------------------------------
144 |
145 | # Grouping the document tree into LaTeX files. List of tuples
146 | # (source start file, target name, title, author, documentclass [howto/manual]).
147 | latex_documents = [('index', project + '.tex', project + u' Documentation',
148 | author, 'manual')]
149 |
150 |
151 | # -- Options for manual page output -------------------------------------------
152 |
153 | # One entry per manual page. List of tuples
154 | # (source start file, name, description, authors, manual section).
155 | man_pages = [('index', project.lower(), project + u' Documentation',
156 | [author], 1)]
157 |
158 |
159 | # -- Options for the edit_on_github extension ---------------------------------
160 |
161 | if setup_cfg.get('edit_on_github').lower() == 'true':
162 |
163 | extensions += ['sphinx_astropy.ext.edit_on_github']
164 |
165 | edit_on_github_project = setup_cfg['github_project']
166 | edit_on_github_branch = "main"
167 |
168 | edit_on_github_source_root = ""
169 | edit_on_github_doc_root = "docs"
170 |
171 | # -- Resolving issue number to links in changelog -----------------------------
172 | github_issues_url = 'https://github.com/{0}/issues/'.format(setup_cfg['github_project'])
173 |
174 | # -- Turn on nitpicky mode for sphinx (to warn about references not found) ----
175 | #
176 | # nitpicky = True
177 | # nitpick_ignore = []
178 | #
179 | # Some warnings are impossible to suppress, and you can list specific references
180 | # that should be ignored in a nitpick-exceptions file which should be inside
181 | # the docs/ directory. The format of the file should be:
182 | #
183 | #
184 | #
185 | # for example:
186 | #
187 | # py:class astropy.io.votable.tree.Element
188 | # py:class astropy.io.votable.tree.SimpleElement
189 | # py:class astropy.io.votable.tree.SimpleElementWithContent
190 | #
191 | # Uncomment the following lines to enable the exceptions:
192 | #
193 | # for line in open('nitpick-exceptions'):
194 | # if line.strip() == "" or line.startswith("#"):
195 | # continue
196 | # dtype, target = line.split(None, 1)
197 | # target = target.strip()
198 | # nitpick_ignore.append((dtype, six.u(target)))
199 |
--------------------------------------------------------------------------------
/astrocut/utils/utils.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | """This module includes a variety of functions that may be used by multiple modules."""
4 |
5 | import warnings
6 | import numpy as np
7 | import logging
8 |
9 | from datetime import date
10 |
11 | from astropy import wcs
12 | from astropy.io import fits
13 | from astropy import units as u
14 | from astropy.utils import deprecated
15 |
16 | from .. import __version__, log
17 | from ..exceptions import InvalidQueryError, InputWarning
18 |
19 |
20 | def parse_size_input(cutout_size):
21 | """
22 | Makes the given cutout size into a length 2 array.
23 |
24 | Parameters
25 | ----------
26 | cutout_size : int, array-like, `~astropy.units.Quantity`
27 | The size of the cutout array. If ``cutout_size`` is a scalar number or a scalar
28 | `~astropy.units.Quantity`, then a square cutout of ``cutout_size`` will be created.
29 | If ``cutout_size`` has two elements, they should be in ``(ny, nx)`` order. Scalar numbers
30 | in ``cutout_size`` are assumed to be in units of pixels. `~astropy.units.Quantity` objects
31 | must be in pixel or angular units.
32 |
33 | Returns
34 | -------
35 | response : array
36 | Length two cutout size array, in the form [ny, nx].
37 | """
38 |
39 | # Making size into an array [ny, nx]
40 | if np.isscalar(cutout_size):
41 | cutout_size = np.repeat(cutout_size, 2)
42 |
43 | if isinstance(cutout_size, u.Quantity):
44 | cutout_size = np.atleast_1d(cutout_size)
45 | if len(cutout_size) == 1:
46 | cutout_size = np.repeat(cutout_size, 2)
47 | elif not isinstance(cutout_size, np.ndarray):
48 | cutout_size = np.array(cutout_size)
49 |
50 | if len(cutout_size) > 2:
51 | warnings.warn("Too many dimensions in cutout size, only the first two will be used.",
52 | InputWarning)
53 | cutout_size = cutout_size[:2]
54 |
55 | ny, nx = cutout_size
56 | if ny <= 0 or nx <= 0:
57 | raise InvalidQueryError('Cutout size dimensions must be greater than zero. '
58 | f'Provided size: ({cutout_size[0]}, {cutout_size[1]})')
59 |
60 | return cutout_size
61 |
62 |
63 | def get_cutout_limits(img_wcs, center_coord, cutout_size):
64 | """
65 | Takes the center coordinates, cutout size, and the wcs from
66 | which the cutout is being taken and returns the x and y pixel limits
67 | for the cutout.
68 |
69 | Note: This function does no bounds checking, so the returned limits are not
70 | guaranteed to overlap the original image.
71 |
72 | Parameters
73 | ----------
74 | img_wcs : `~astropy.wcs.WCS`
75 | The WCS for the image that the cutout is being cut from.
76 | center_coord : `~astropy.coordinates.SkyCoord`
77 | The central coordinate for the cutout
78 | cutout_size : array
79 | [nx,ny] in with ints (pixels) or astropy quantities
80 |
81 | Returns
82 | -------
83 | response : `numpy.array`
84 | The cutout pixel limits in an array of the form [[xmin,xmax],[ymin,ymax]]
85 | """
86 |
87 | # Note: This is returning the center pixel in 1-up
88 | try:
89 | center_pixel = center_coord.to_pixel(img_wcs, 1)
90 | except wcs.NoConvergence: # If wcs can't converge, center coordinate is far from the footprint
91 | raise InvalidQueryError("Cutout location is not in image footprint!")
92 |
93 | # For some reason you can sometimes get nans without a no convergance error
94 | if np.isnan(center_pixel).all():
95 | raise InvalidQueryError("Cutout location is not in image footprint!")
96 |
97 | lims = np.zeros((2, 2), dtype=int)
98 |
99 | for axis, size in enumerate(cutout_size):
100 |
101 | if not isinstance(size, u.Quantity): # assume pixels
102 | dim = size / 2
103 | elif size.unit == u.pixel: # also pixels
104 | dim = size.value / 2
105 | elif size.unit.physical_type == 'angle':
106 | pixel_scale = u.Quantity(wcs.utils.proj_plane_pixel_scales(img_wcs)[axis],
107 | img_wcs.wcs.cunit[axis])
108 | dim = (size / pixel_scale).decompose() / 2
109 |
110 | lims[axis, 0] = int(np.round(center_pixel[axis] - 1 - dim))
111 | lims[axis, 1] = int(np.round(center_pixel[axis] - 1 + dim))
112 |
113 | # The case where the requested area is so small it rounds to zero
114 | if lims[axis, 0] == lims[axis, 1]:
115 | lims[axis, 0] = int(np.floor(center_pixel[axis] - 1))
116 | lims[axis, 1] = lims[axis, 0] + 1
117 |
118 | return lims
119 |
120 |
121 | def get_cutout_wcs(img_wcs, cutout_lims):
122 | """
123 | Starting with the full image WCS and adjusting it for the cutout WCS.
124 | Adjusts CRPIX values and adds physical WCS keywords.
125 |
126 | Parameters
127 | ----------
128 | img_wcs : `~astropy.wcs.WCS`
129 | WCS for the image the cutout is being cut from.
130 | cutout_lims : `numpy.array`
131 | The cutout pixel limits in an array of the form [[ymin,ymax],[xmin,xmax]]
132 |
133 | Returns
134 | --------
135 | response : `~astropy.wcs.WCS`
136 | The cutout WCS object including SIP distortions if present.
137 | """
138 |
139 | # relax = True is important when the WCS has sip distortions, otherwise it has no effect
140 | wcs_header = img_wcs.to_header(relax=True)
141 |
142 | # Adjusting the CRPIX values
143 | wcs_header["CRPIX1"] -= cutout_lims[0, 0]
144 | wcs_header["CRPIX2"] -= cutout_lims[1, 0]
145 |
146 | # Adding the physical wcs keywords
147 | wcs_header.set("WCSNAMEP", "PHYSICAL", "name of world coordinate system alternate P")
148 | wcs_header.set("WCSAXESP", 2, "number of WCS physical axes")
149 |
150 | wcs_header.set("CTYPE1P", "RAWX", "physical WCS axis 1 type CCD col")
151 | wcs_header.set("CUNIT1P", "PIXEL", "physical WCS axis 1 unit")
152 | wcs_header.set("CRPIX1P", 1, "reference CCD column")
153 | wcs_header.set("CRVAL1P", cutout_lims[0, 0] + 1, "value at reference CCD column")
154 | wcs_header.set("CDELT1P", 1.0, "physical WCS axis 1 step")
155 |
156 | wcs_header.set("CTYPE2P", "RAWY", "physical WCS axis 2 type CCD col")
157 | wcs_header.set("CUNIT2P", "PIXEL", "physical WCS axis 2 unit")
158 | wcs_header.set("CRPIX2P", 1, "reference CCD row")
159 | wcs_header.set("CRVAL2P", cutout_lims[1, 0] + 1, "value at reference CCD row")
160 | wcs_header.set("CDELT2P", 1.0, "physical WCS axis 2 step")
161 |
162 | return wcs.WCS(wcs_header)
163 |
164 |
165 | def _build_astrocut_primaryhdu(**keywords):
166 | """
167 | TODO: Document
168 | """
169 |
170 | primary_hdu = fits.PrimaryHDU()
171 | primary_hdu.header.extend([("ORIGIN", 'STScI/MAST', "institution responsible for creating this file"),
172 | ("DATE", str(date.today()), "file creation date"),
173 | ('PROCVER', __version__, 'software version')])
174 | for kwd in keywords:
175 | primary_hdu.header[kwd] = keywords[kwd]
176 |
177 | return primary_hdu
178 |
179 |
180 | @deprecated(since="v0.9", alternative="make_fits")
181 | def save_fits(cutout_hdus, output_path, center_coord):
182 | return get_fits(cutout_hdus, center_coord=center_coord, output_path=output_path)
183 |
184 |
185 | def get_fits(cutout_hdus, center_coord=None, output_path=None):
186 | """
187 | Make one or more cutout hdus to a single fits object, optionally save the file to disk.
188 |
189 | Parameters
190 | ----------
191 | cutout_hdus : list or `~astropy.io.fits.hdu.image.ImageHDU`
192 | The `~astropy.io.fits.hdu.image.ImageHDU` object(s) to be written to the fits file.
193 | output_path : str
194 | The full path to the output fits file.
195 | center_coord : `~astropy.coordinates.sky_coordinate.SkyCoord`
196 | The center coordinate of the image cutouts. TODO: make more general?
197 | """
198 |
199 | if isinstance(cutout_hdus, fits.hdu.image.ImageHDU):
200 | cutout_hdus = [cutout_hdus]
201 |
202 | # Setting up the Primary HDU
203 | keywords = dict()
204 | if center_coord:
205 | keywords = {"RA_OBJ": (center_coord.ra.deg, '[deg] right ascension'),
206 | "DEC_OBJ": (center_coord.dec.deg, '[deg] declination')}
207 | primary_hdu = _build_astrocut_primaryhdu(**keywords)
208 |
209 | cutout_hdulist = fits.HDUList([primary_hdu] + cutout_hdus)
210 |
211 | if output_path:
212 | # Writing out the hdu often causes a warning as the ORIG_FLE card description is truncated
213 | with warnings.catch_warnings():
214 | warnings.simplefilter("ignore")
215 | cutout_hdulist.writeto(output_path, overwrite=True, checksum=True)
216 |
217 | return cutout_hdulist
218 |
219 |
220 | def _handle_verbose(verbose: bool):
221 | """Set the log level according to the verbose parameter"""
222 | level = logging.DEBUG if verbose else logging.INFO
223 | log.setLevel(level)
224 |
--------------------------------------------------------------------------------
/astrocut/tests/test_cube_factory.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import numpy as np
3 | import pytest
4 |
5 | from astropy.io import fits
6 | from astropy.table import Table
7 | from re import findall
8 |
9 | from astrocut.exceptions import DataWarning, InvalidInputError
10 |
11 | from .utils_for_test import create_test_ffis
12 | from ..cube_factory import CubeFactory
13 | from ..tica_cube_factory import TicaCubeFactory
14 |
15 |
16 | @pytest.fixture
17 | def img_size():
18 | """ Fixture for the size of the test images to be created. """
19 | return 10
20 |
21 |
22 | @pytest.fixture
23 | def num_images():
24 | """ Fixture for the number of test images to be created. """
25 | return 100
26 |
27 |
28 | def test_make_cube(tmpdir, img_size, num_images, tmp_path):
29 | """
30 | Testing the make cube functionality by making a bunch of test FFIs,
31 | making the cube, and checking the results.
32 | """
33 | cube_maker = CubeFactory()
34 | tmp_path = Path(tmpdir)
35 |
36 | # Create test FFIs
37 | ffi_files = create_test_ffis(img_size, num_images, dir_name=tmpdir)
38 | cube_path = tmp_path / 'out_dir' / 'test_cube.fits'
39 |
40 | # Generate cube
41 | cube_file = cube_maker.make_cube(ffi_files, cube_path, verbose=False)
42 |
43 | # Open FITS file and extract cube data
44 | with fits.open(cube_file) as hdu:
45 | cube = hdu[1].data
46 | tab = Table(hdu[2].data)
47 | filenames = np.array([Path(x).name for x in ffi_files])
48 |
49 | # Expected cube shape and values
50 | ecube = np.zeros((img_size, img_size, num_images, 2))
51 | plane = np.arange(img_size*img_size, dtype=np.float32).reshape((img_size, img_size))
52 | assert cube.shape == ecube.shape, 'Mismatch between cube shape and expected shape'
53 |
54 | for i in range(num_images):
55 | ecube[:, :, i, 0] = -plane
56 | ecube[:, :, i, 1] = plane
57 | plane += img_size * img_size
58 | assert np.all(cube == ecube), 'Cube values do not match expected values'
59 |
60 | assert np.all(tab['TSTART'] == np.arange(num_images)), 'TSTART mismatch in table'
61 | assert np.all(tab['TSTOP'] == np.arange(num_images)+1), 'TSTOP mismatch in table'
62 | assert np.all(tab['FFI_FILE'] == np.array(filenames)), 'FFI_FILE mismatch in table'
63 |
64 |
65 | def test_make_and_update_cube(tmpdir, img_size, num_images):
66 | """
67 | Testing the make cube and update cube functionality for TICACubeFactory by making a bunch of test FFIs,
68 | making the cube with first half of the FFIs, updating the same cube with the second half,
69 | and checking the results.
70 | """
71 | cube_maker = TicaCubeFactory()
72 | tmp_path = Path(tmpdir)
73 |
74 | # Create test FFIs
75 | ffi_files = create_test_ffis(img_size, num_images, product='TICA', dir_name=tmpdir)
76 | cube_path = tmp_path / 'out_dir' / 'test_update_cube.fits'
77 |
78 | # Generate cube
79 | cube_file = cube_maker.make_cube(ffi_files[:num_images // 2], cube_path, verbose=False)
80 |
81 | with fits.open(cube_file) as hdu:
82 | cube = hdu[1].data
83 |
84 | # Expected values for cube before update_cube
85 | ecube = np.zeros((img_size, img_size, num_images // 2, 1))
86 | plane = np.arange(img_size*img_size, dtype=np.float32).reshape((img_size, img_size))
87 |
88 | assert cube.shape == ecube.shape, 'Mismatch between cube shape and expected shape'
89 |
90 | for i in range(num_images // 2):
91 | ecube[:, :, i, 0] = -plane
92 | # we don't need to test error array because TICA doesnt come with error arrays
93 | # so index 1 will always be blank
94 | # ecube[:, :, i, 1] = plane
95 | plane += img_size * img_size
96 |
97 | assert np.all(cube == ecube), 'Cube values do not match expected values'
98 |
99 | # Update cube
100 | cube_file = cube_maker.update_cube(ffi_files[num_images // 2:], cube_file, verbose=False)
101 |
102 | with fits.open(cube_file) as hdu:
103 | cube = hdu[1].data
104 | tab = Table(hdu[2].data)
105 | filenames = np.array([Path(x).name for x in ffi_files])
106 |
107 | # Expected values for cube after update_cube
108 | ecube = np.zeros((img_size, img_size, num_images, 1))
109 | plane = np.arange(img_size*img_size, dtype=np.float32).reshape((img_size, img_size))
110 |
111 | assert cube.shape == ecube.shape, 'Mismatch between cube shape and expected shape'
112 |
113 | for i in range(num_images):
114 | ecube[:, :, i, 0] = -plane
115 | # we don't need to test error array because TICA doesnt come with error arrays
116 | # so index 1 will always be blank
117 | # ecube[:, :, i, 1] = plane
118 | plane += img_size * img_size
119 |
120 | assert np.all(cube == ecube), 'Cube values do not match expected values'
121 | assert np.all(tab['STARTTJD'] == np.arange(num_images)), 'STARTTJD mismatch in table'
122 | assert np.all(tab['ENDTJD'] == np.arange(num_images)+1), 'ENDTJD mismatch in table'
123 | assert np.all(tab['FFI_FILE'] == np.array(filenames)), 'FFI_FILE mismatch in table'
124 |
125 | # Fail if trying to update a cube with no new files
126 | with pytest.raises(InvalidInputError, match='No new images were found'):
127 | with pytest.warns(DataWarning, match='Removed duplicate file'):
128 | cube_maker.update_cube(ffi_files[:1], cube_file)
129 |
130 |
131 | def test_iteration(tmpdir, caplog):
132 | """
133 | Testing cubes made with different numbers of iterations against each other.
134 | """
135 | cube_maker = CubeFactory()
136 | tmp_path = Path(tmpdir)
137 | img_size = 1000
138 | num_images = 10
139 |
140 | # Create test FFIs
141 | ffi_files = create_test_ffis(img_size, num_images, dir_name=tmpdir)
142 |
143 | # Single iteration (higher memory usage)
144 | cube_file_1 = cube_maker.make_cube(ffi_files, tmp_path / 'iterated_cube_1.fits',
145 | max_memory=0.5, verbose=True)
146 | assert len(findall('Completed block', caplog.text)) == 1, 'Incorrect number of iterations'
147 | assert len(findall('Completed file', caplog.text)) == num_images, 'Incorrect number of complete files'
148 | caplog.clear()
149 |
150 | # Multiple iterations (lower memory usage)
151 | cube_file_2 = cube_maker.make_cube(ffi_files, tmp_path / 'iterated_cube_2.fits',
152 | max_memory=0.05, verbose=True)
153 | print(len(findall('Completed block', caplog.text)))
154 | assert len(findall('Completed block', caplog.text)) == 2, 'Incorrect number of iterations'
155 | assert len(findall('Completed file', caplog.text)) == num_images * 2, 'Incorrect number of complete files'
156 |
157 | # Open FITS files and compare cubes
158 | with fits.open(cube_file_1) as hdu_1, fits.open(cube_file_2) as hdu_2:
159 | cube_1 = hdu_1[1].data
160 | cube_2 = hdu_2[1].data
161 |
162 | assert cube_1.shape == cube_2.shape, 'Mismatch between cube shape for 1 vs 2 iterations'
163 | assert np.all(cube_1 == cube_2), 'Cubes made in 1 vs 2 iterations do not match'
164 |
165 | # Expected values for cube
166 | ecube = np.zeros((img_size, img_size, num_images, 2))
167 | plane = np.arange(img_size * img_size, dtype=np.float32).reshape((img_size, img_size))
168 | assert cube_1.shape == ecube.shape, 'Mismatch between cube shape and expected shape'
169 |
170 | for i in range(num_images):
171 | ecube[:, :, i, 0] = -plane
172 | ecube[:, :, i, 1] = plane
173 | plane += img_size * img_size
174 |
175 | assert np.all(cube_1 == ecube), 'Cube values do not match expected values'
176 |
177 |
178 | @pytest.mark.parametrize('ffi_type', ['TICA', 'SPOC'])
179 | def test_invalid_inputs(tmpdir, ffi_type, img_size, num_images):
180 | """
181 | Test that an error is raised when users attempt to make cubes with an invalid file type.
182 | """
183 | # Assigning some variables
184 | product = 'TICA' if ffi_type == 'TICA' else 'SPOC'
185 | value_error = ('One or more incorrect file types were input.')
186 |
187 | # Create test FFI files
188 | ffi_files = create_test_ffis(img_size=img_size,
189 | num_images=num_images,
190 | dir_name=tmpdir,
191 | product=product)
192 |
193 | # Create opposite cube factory of input
194 | cube_maker = CubeFactory() if ffi_type == 'TICA' else TicaCubeFactory()
195 |
196 | # Should raise a Value Error due to incorrect file type
197 | with pytest.raises(ValueError, match=value_error):
198 | cube_maker.make_cube(ffi_files)
199 |
200 | # Fail if trying to update a cube file that doesn't exist
201 | new_ffi_files = create_test_ffis(img_size=10,
202 | num_images=10,
203 | dir_name=tmpdir,
204 | product=product)
205 | with pytest.raises(InvalidInputError, match='Cube file was not found'):
206 | cube_maker.update_cube(new_ffi_files, 'non_existent_file.fits')
207 |
--------------------------------------------------------------------------------
/astrocut/cutout_factory.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | """This module implements the cutout functionality."""
4 |
5 | from pathlib import Path
6 | from typing import Literal, Optional, Union, List, Tuple
7 |
8 | import astropy.units as u
9 | import numpy as np
10 | from astropy.coordinates import SkyCoord
11 | from astropy.utils.decorators import deprecated_renamed_argument
12 | from s3path import S3Path
13 |
14 | from .tess_cube_cutout import TessCubeCutout
15 |
16 |
17 | class CutoutFactory():
18 | """
19 | Class for creating image cutouts from TESS image cube files.
20 |
21 | This class encompasses all of the cutout functionality.
22 | In the current version, this means creating cutout target pixel files from
23 | TESS full frame images cubes.
24 |
25 | This class is maintained for backwards compatibility. For maximum flexibility, we recommend using the
26 | `~astrocut.TessCubeCutout` class.
27 | """
28 |
29 | @deprecated_renamed_argument('product', None, since='1.1.0', message='The `product` argument is deprecated and '
30 | 'will be removed in a future version. Astrocut will only support cutouts from '
31 | 'SPOC products.')
32 | def cube_cut(self, cube_file: Union[str, Path, S3Path], coordinates: Union[SkyCoord, str],
33 | cutout_size: Union[int, np.ndarray, u.Quantity, List[int], Tuple[int]],
34 | product: str = 'SPOC', target_pixel_file: Optional[str] = None,
35 | output_path: Union[str, Path] = '.', memory_only: bool = False,
36 | threads: Union[int, Literal["auto"]] = 1, verbose: bool = False):
37 | """
38 | Takes a cube file (as created by `~astrocut.CubeFactory`), and makes a cutout target pixel
39 | file of the given size around the given coordinates. The target pixel file is formatted like
40 | a TESS pipeline target pixel file.
41 |
42 | This function is maintained for backwards compatibility. For maximum flexibility, we recommend using the
43 | `~astrocut.TessCubeCutout` class.
44 |
45 | Parameters
46 | ----------
47 | cube_file : str
48 | The cube file containing all the images to be cutout.
49 | Must be in the format returned by ~astrocut.make_cube.
50 | coordinates : str or `astropy.coordinates.SkyCoord` object
51 | The position around which to cutout.
52 | It may be specified as a string ("ra dec" in degrees)
53 | or as the appropriate `~astropy.coordinates.SkyCoord` object.
54 | cutout_size : int, array-like, `~astropy.units.Quantity`
55 | The size of the cutout array. If ``cutout_size``
56 | is a scalar number or a scalar `~astropy.units.Quantity`,
57 | then a square cutout of ``cutout_size`` will be created. If
58 | ``cutout_size`` has two elements, they should be in ``(ny, nx)``
59 | order. Scalar numbers in ``cutout_size`` are assumed to be in
60 | units of pixels. `~astropy.units.Quantity` objects must be in pixel or
61 | angular units.
62 | product : str
63 | .. deprecated:: 1.1.0
64 | This parameter is deprecated and will be removed in a future release.
65 | Only "SPOC" products will be supported.
66 | target_pixel_file : str
67 | Optional. The name for the output target pixel file.
68 | If no name is supplied, the file will be named:
69 | ``____astrocut.fits``
70 | output_path : str
71 | Optional. The path where the output file is saved.
72 | The current directory is default.
73 | memory_only : bool
74 | Optional. If true, the cutout is made in memory only and not saved to disk.
75 | Default is False.
76 | threads : int, "auto", default=1
77 | Number of threads to use when making remote (e.g. s3) cutouts, will not use threads for local access
78 | <=1 disables the threadpool, >1 sets threadpool to the specified number of threads,
79 | "auto" uses `concurrent.futures.ThreadPoolExecutor`'s default: cpu_count + 4, limit to max of 32
80 | verbose : bool
81 | Optional. If true intermediate information is printed.
82 |
83 | Returns
84 | -------
85 | response: string or `~astropy.io.fits.HDUList` or None
86 | If successful, returns the target pixel file as an `~astropy.io.fits.HDUList` object,
87 | or the path to the target pixel file if saved to disk.
88 | If unsuccessful returns None.
89 | """
90 | cube_cutout = TessCubeCutout(input_files=cube_file,
91 | coordinates=coordinates,
92 | cutout_size=cutout_size,
93 | product=product,
94 | threads=threads,
95 | verbose=verbose)
96 |
97 | # Assign these attributes to be backwards compatible
98 | cutout_obj = cube_cutout.cutouts_by_file[cube_file]
99 | self.cube_wcs = cutout_obj.cube_wcs
100 | self.center_coord = cube_cutout._coordinates
101 | self.cutout_lims = cutout_obj.cutout_lims
102 | self.cutout_wcs = cutout_obj.wcs
103 |
104 | if memory_only:
105 | return cube_cutout.tpf_cutouts[0]
106 |
107 | return cube_cutout.write_as_tpf(output_dir=output_path,
108 | output_file=target_pixel_file)[0]
109 |
110 |
111 | @deprecated_renamed_argument('product', None, since='1.1.0', message='The `product` argument is deprecated and will be '
112 | 'removed in a future version. Astrocut will only support cutouts from SPOC products.')
113 | def cube_cut(cube_file: Union[str, Path, S3Path], coordinates: Union[SkyCoord, str],
114 | cutout_size: Union[int, np.ndarray, u.Quantity, List[int], Tuple[int]],
115 | product: str = 'SPOC', target_pixel_file: Optional[str] = None,
116 | output_path: Union[str, Path] = '.', memory_only: bool = False,
117 | threads: Union[int, Literal["auto"]] = 1, verbose: bool = False):
118 | """
119 | Takes a cube file (as created by `~astrocut.CubeFactory`), and makes a cutout target pixel
120 | file of the given size around the given coordinates. The target pixel file is formatted like
121 | a TESS pipeline target pixel file.
122 |
123 | This function is maintained for backwards compatibility. For maximum flexibility, we recommend using the
124 | `~astrocut.TessCubeCutout` class.
125 |
126 | Parameters
127 | ----------
128 | cube_file : str
129 | The cube file containing all the images to be cutout.
130 | Must be in the format returned by ~astrocut.make_cube.
131 | coordinates : str or `astropy.coordinates.SkyCoord` object
132 | The position around which to cutout.
133 | It may be specified as a string ("ra dec" in degrees)
134 | or as the appropriate `~astropy.coordinates.SkyCoord` object.
135 | cutout_size : int, array-like, `~astropy.units.Quantity`
136 | The size of the cutout array. If ``cutout_size``
137 | is a scalar number or a scalar `~astropy.units.Quantity`,
138 | then a square cutout of ``cutout_size`` will be created. If
139 | ``cutout_size`` has two elements, they should be in ``(ny, nx)``
140 | order. Scalar numbers in ``cutout_size`` are assumed to be in
141 | units of pixels. `~astropy.units.Quantity` objects must be in pixel or
142 | angular units.
143 | product : str
144 | .. deprecated:: 1.1.0
145 | This parameter is deprecated and will be removed in a future release.
146 | Only "SPOC" products will be supported.
147 | target_pixel_file : str
148 | Optional. The name for the output target pixel file.
149 | If no name is supplied, the file will be named:
150 | ``____astrocut.fits``
151 | output_path : str
152 | Optional. The path where the output file is saved.
153 | The current directory is default.
154 | memory_only : bool
155 | Optional. If true, the cutout is made in memory only and not saved to disk.
156 | Default is False.
157 | threads : int, "auto", default=1
158 | Number of threads to use when making remote (e.g. s3) cutouts, will not use threads for local access
159 | <=1 disables the threadpool, >1 sets threadpool to the specified number of threads,
160 | "auto" uses `concurrent.futures.ThreadPoolExecutor`'s default: cpu_count + 4, limit to max of 32
161 | verbose : bool
162 | Optional. If true intermediate information is printed.
163 |
164 | Returns
165 | -------
166 | response: string or `~astropy.io.fits.HDUList` or None
167 | If successful, returns the target pixel file as an `~astropy.io.fits.HDUList` object,
168 | or the path to the target pixel file if saved to disk.
169 | If unsuccessful, returns None.
170 | """
171 | cube_cutout = TessCubeCutout(input_files=cube_file,
172 | coordinates=coordinates,
173 | cutout_size=cutout_size,
174 | product=product,
175 | threads=threads,
176 | verbose=verbose)
177 |
178 | if memory_only:
179 | return cube_cutout.tpf_cutouts[0]
180 |
181 | return cube_cutout.write_as_tpf(output_dir=output_path,
182 | output_file=target_pixel_file)[0]
183 |
--------------------------------------------------------------------------------
/CHANGES.rst:
--------------------------------------------------------------------------------
1 | Unreleased
2 | ----------
3 |
4 | - Added support in ``ra_dec_crossmatch`` for a cutout size of zero, enabling single-point matching to FFIs that contain
5 | the specified coordinates. [#166]
6 | - Added ``write_as_zip`` method to ``ASDFCutout``, ``FITSCutout``, ``TessCubeCutout``, and ``TessFootprintCutout`` classes to facilitate
7 | writing multiple cutouts into a single ZIP archive. [#167]
8 | - Added ``get_tess_sectors`` function to return TESS sector information for sectors whose footprints overlap with
9 | the given sky coordinates and cutout size. [#168]
10 | - Cutouts of ASDF data in FITS format now include embedded ASDF metadata in an "ASDF" extension within the FITS file for
11 | Python versions greater than or equal to 3.11. [#170]
12 |
13 | Breaking Changes
14 | ^^^^^^^^^^^^^^^^
15 |
16 | - Cube cutout filenames now use a hyphen between dimensions (e.g., ``10-x-10`` instead of ``10x10``). They also include unit suffixes when
17 | users request sizes as an ``astropy.units.Quantity`` object (e.g., ``5arcmin-x-4arcmin`` or ``30arcsec-x-20arcsec``). RA/Dec formatting within
18 | filenames now uses 7 decimal places (``{:.7f}``) for consistency across classes. These changes may break code that parses filenames or relies on
19 | old glob patterns. [#167]
20 |
21 | Migration:
22 |
23 | - Update glob patterns from ``*___x_astrocut.fits`` to ``*___*-x-*_astrocut.fits``.
24 | - If parsing filenames, switch to flexible regex patterns:
25 |
26 | - RA/Dec: ``_(?P[-+]?\\d+(?:\\.\\d+)?)_(?P[-+]?\\d+(?:\\.\\d+)?)_``
27 | - Dimensions (with optional units): ``(?P\\d+(?:\\.\\d+)?)(?Parcsec|arcmin|deg|pixel|pix)?-x-(?P\\d+(?:\\.\\d+)?)(?Parcsec|arcmin|deg|pixel|pix)?``
28 | - Prefer reading RA/Dec, dimensions, and scales from file metadata (FITS headers/WCS) instead of relying on filenames.
29 | - Example transition:
30 |
31 | - Old: ``..._83.406310_-62.489771_64x64_astrocut.fits``
32 | - New (no unit - pixels assumed): ``..._83.4063100_-62.4897710_64-x-64_astrocut.fits``
33 | - New (with units): ``..._83.4063100_-62.4897710_5arcmin-x-4arcmin_astrocut.fits``
34 |
35 | - ASDF cutouts in FITS format now include cutout data in an ``ImageHDU`` extension called "CUTOUT". Code that reads ASDF cutouts from FITS files
36 | should be updated to access the "CUTOUT" extension for cutout data rather than the "PRIMARY" extension. [#170]
37 |
38 |
39 | 1.1.0 (2025-09-15)
40 | ------------------
41 |
42 | - Bugfix for transposed GWCS bounding box for ASDF cutouts. [#160]
43 | - Bugfix to correct ``array_shape`` and ``pixel_shape`` for GWCS objects. [#160]
44 | - By default, ``ASDFCutout`` makes cutouts of all arrays in the input file (e.g., data, error, uncertainty, variance, etc.)
45 | where the last two dimensions match the shape of the science data array. [#158]
46 | - By default, ASDF cutouts now preserve all metadata from the input file. [#158]
47 | - Add ``lite`` parameter to ``ASDFCutout`` to create minimal cutouts with only the science data and updated world coordinate system. [#158]
48 | - Add history entry to ASDF cutouts specifying the cutout shape and center coordinates. [#158]
49 | - Remove TICA (TESS Image Calibration) as an option for the ``product`` parameter in ``TessFootprintCutout``. [#161]
50 | - Deprecate the ``TicaCubeFactory`` class. [#161]
51 | - Deprecate the ``product`` parameter in the ``TessCubeCutout`` class, the ``TessFootprintCutout`` class, the ``cube_cut`` function,
52 | the ``CutoutFactory.cube_cut`` function, and the ``cube_cutout_from_footprint`` function. [#161]
53 |
54 |
55 | 1.0.1 (2025-05-12)
56 | -------------------
57 |
58 | - Bugfix so ``ASDFCutout.get_center_pixel`` preserves the GWCS bounding box. [#154]
59 | - Bugfix in ``ASDFCutout`` to use deep copies of data and GWCS to avoid links to original ASDF input.
60 |
61 |
62 | 1.0.0 (2025-04-28)
63 | -------------------
64 |
65 | - Introduce generalized cutout architecture with ``Cutout``, ``ImageCutout``, and ``FITSCutout`` classes. [#136]
66 | - Deprecate ``correct_wcs`` parameter in ``fits_cut`` as non-operational. [#136]
67 | - Add ``ASDFCutout`` class as a specialized cutout class for ASDF files. [#137]
68 | - Allow ``ASDFCutout`` and ``asdf_cut`` to accept multiple input files. [#137]
69 | - Deprecated ``output_file`` parameter in ``asdf_cut`` in favor of making outputs from a batch of input files.. [#137]
70 | - Return ASDF cutouts in memory as ``astropy.nddata.Cutout2D`` objects, ``asdf.AsdfFile`` objects, or ``astropy.io.fits.HDUList`` objects. [#137]
71 | - Enable output of ASDF cutouts in image formats. [#137]
72 | - Refactor ``TicaCubeFactory`` to inherit from ``CubeFactory``. [#143]
73 | - Optimize ``CubeFactory._update_info_table`` to open FITS files only once. [#143]
74 | - Add ``TessCubeCutout`` class as a concrete implementation of abstract ``CubeCutout`` with TESS-specific logic. [#146]
75 | - Introduce ``TessCubeCutout.CubeCutoutInstance`` inner class for per-cutout attributes. [#146]
76 | - Enable in-memory output for ``TessCubeCutout`` instances. [#146]
77 | - Add ``TessFootprintCutout`` class as a concrete implementation of abstract ``FootprintCutout`` with TESS-specific logic. [#149]
78 | - Enable in-memory output for ``TessFootprintCutout`` instances. [#149]
79 | - Bugfix so ASDF cutouts store a copy of the cutout data rather than a view into the original data. [#153]
80 |
81 |
82 | 0.12.0 (2025-01-21)
83 | --------------------
84 |
85 | - Implement and document ``cube_cut_from_footprint`` function to generate cutouts from TESS image cube files hosted on the S3 cloud. [#127]
86 | - Bugfix to properly catch input TICA product files in ``CubeFactory``. [#129]
87 | - Add a logging framework. [#131]
88 | - Improve performance of FITS image cutouts by using the ``section`` attribute of ``ImageHDU`` objects to access data more efficiently. [#132]
89 | - Bugfix when writing multiple output files to memory in ``fits_cut``. [#132]
90 |
91 |
92 | 0.11.1 (2024-07-31)
93 | --------------------
94 |
95 | - ``asdf_cut`` function now accepts `pathlib.Path` and `s3path.S3Path` objects as an input file. [#119]
96 | - Bugfix for accessing private resources on the cloud in the ``asdf_cut`` function. [#121]
97 | - Add ``key``, ``secret``, and ``token`` parameters to ``asdf_cut`` for accessing private S3 buckets. [#124]
98 |
99 |
100 | 0.11.0 (2024-05-28)
101 | --------------------
102 |
103 | - Add functionality for creating cutouts from the ASDF file format [#105]
104 | - Update ASDF cutout function to support an `astropy.Quantity` object as input data [#114]
105 | - Return an `astropy.nddata.Cutout2D` object from ASDF cutout function [#114]
106 | - Preserve original cutout shape when requesting an ASDF cutout that is partially outside of image bounds [#115]
107 | - Output ASDF cutout as either a FITS file or an ASDF file [#116]
108 | - Support S3 URI strings as input to ASDF cutout function [#117]
109 | - Drop support for Python 3.8 [#112]
110 |
111 |
112 | 0.10.0 (2023-10-23)
113 | --------------------
114 |
115 | - Improve file checking prior to cutout creation to avoid errors [#52]
116 | - Fix broken tests from GitHub Actions CI run [#56]
117 | - Fix error resulting from forward slash in target name [#55]
118 | - MNT: Update codecov-action version to v2 [#53]
119 | - Make cubes out of TICA FFIs [#59]
120 | - Make cutouts out of TICA cubes [#60]
121 | - Fix bug for not catching duplicate ffis [#69]
122 | - Add max_memory arg to update_cube [#71]
123 | - Hotfix for cube_cut checking for valid WCS info [#70]
124 | - Add remote cutout functionality (requires astropy 5.2 or above) [#76]
125 | - Error handling for CubeFactory and TicaCubeFactory [#85]
126 | - Cutout in threadpool [#84]
127 | - Document multithreading enhancement [#86]
128 | - Remove error array dimension from TicaCubeFactory [#87]
129 | - Adapt CutoutFactory to account for error-less TICA Cubes [#88]
130 | - Update .readthedocs.yml with Python 3.11 [#89]
131 | - Update cube and cutout unit tests [#90]
132 | - Update docs to reflect changes in TICA cube format [#93]
133 | - Cloud functionality for astrocut.fits_cut() [#95]
134 | - Use GitHub Actions for publishing new releases to PyPI [#97]
135 | - Update deprecated license_file kwd [#103]
136 |
137 |
138 | 0.9 (2021-08-10)
139 | ----------------
140 |
141 | - Add cutout combine functionality [#45]
142 |
143 |
144 | 0.8 (2021-07-02)
145 | ----------------
146 |
147 | - Add moving target cutout functionality [#40]
148 |
149 |
150 | 0.7 (2020-08-19)
151 | ----------------
152 |
153 | - Add iterative cubing and user selected max memory [#35]
154 |
155 |
156 | 0.6 (2020-05-20)
157 | ----------------
158 | - Update wcs fitting to match Astropy (and use Astropy when available) [#29]
159 | - Limit the number of pixels used for WCS fitting to 100 [#30]
160 | - Deprecate drop_after and handle inconsistant wcs keywords automatically [#31]
161 | - Change the memmap access mode from ACCESS_COPY to ACCESS_READ to lower memory usage. [#33]
162 |
163 |
164 | 0.5 (2020-01-13)
165 | ----------------
166 | - Adding fits_cut function [#17]
167 | - Doc update (explain time column) [#19]
168 | - Adding img_cut and normalize_img [#21]
169 | - Improve cutout filenames, change minmax_cut to minmax_value [#24]
170 | - Add error handling when reading data raises an exception [#28]
171 |
172 | 0.4 (2019-06-21)
173 | ----------------
174 |
175 | - Adding more unit tests and coveralls setup [#11]
176 | - Adding workaround for FFIs with bad WCS info [#12]
177 | - Adding linear WCS approximation for cutouts [#14]
178 |
179 |
180 | 0.3 (2019-05-03)
181 | ----------------
182 |
183 | - Formatting update. [#5]
184 | - Making the sperture extension use integers. [#6]
185 | - Setting the creator keyword to astrocute. [#7]
186 | - Adding automated testing. [#8]
187 | - Uniform formatting on target pixel file names. [#10]
188 |
189 | 0.2 (2018-12-05)
190 | ----------------
191 |
192 | - Improved WCS handling
193 | - Additional TESS keywords
194 | - A handlful of bugfixes
195 |
196 |
197 | 0.1 (2018-10-26)
198 | ----------------
199 |
200 | - Initial release. Includes features!
201 |
--------------------------------------------------------------------------------
/astrocut/tests/utils_for_test.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from os import path
3 | from astropy.io import fits
4 |
5 |
6 | def add_keywords(hdu, extname, time_increment, primary=False):
7 | """
8 | Add a bunch of required keywords (mostly fake values).
9 | """
10 |
11 | hdu.header['extname'] = extname
12 | hdu.header['camera'] = 1
13 | hdu.header['ccd'] = 1
14 | hdu.header['tstart'] = float(time_increment)
15 | hdu.header['tstop'] = float(time_increment+1)
16 | hdu.header['date-obs'] = '2019-05-11T00:08:26.816Z'
17 | hdu.header['date-end'] = '2019-05-11T00:38:26.816Z'
18 | hdu.header['barycorr'] = 5.0085597E-03
19 | hdu.header['dquality'] = 0
20 | hdu.header['FFIINDEX'] = 151696
21 |
22 | if not primary:
23 | # WCS keywords just copied from example
24 | hdu.header['RADESYS'] = 'ICRS '
25 | hdu.header['EQUINOX'] = 2000.0
26 | hdu.header['WCSAXES'] = 2
27 | hdu.header['CTYPE1'] = ('RA---TAN-SIP', "Gnomonic projection + SIP distortions")
28 | hdu.header['CTYPE2'] = ('DEC--TAN-SIP', "Gnomonic projection + SIP distortions")
29 | hdu.header['CRVAL1'] = 250.3497414839765200
30 | hdu.header['CRVAL2'] = 2.2809255996090630
31 | hdu.header['CRPIX1'] = 1045.0
32 | hdu.header['CRPIX2'] = 1001.0
33 | hdu.header['CD1_1'] = -0.005564478186178
34 | hdu.header['CD1_2'] = -0.001042099258152
35 | hdu.header['CD2_1'] = 0.001181441465850
36 | hdu.header['CD2_2'] = -0.005590816683583
37 | hdu.header['A_ORDER'] = 2
38 | hdu.header['B_ORDER'] = 2
39 | hdu.header['A_2_0'] = 2.024511892340E-05
40 | hdu.header['A_0_2'] = 3.317603337918E-06
41 | hdu.header['A_1_1'] = 1.73456334971071E-5
42 | hdu.header['B_2_0'] = 3.331330003472E-06
43 | hdu.header['B_0_2'] = 2.042474824825892E-5
44 | hdu.header['B_1_1'] = 1.714767108041439E-5
45 | hdu.header['AP_ORDER'] = 2
46 | hdu.header['BP_ORDER'] = 2
47 | hdu.header['AP_1_0'] = 9.047002963896363E-4
48 | hdu.header['AP_0_1'] = 6.276607155847164E-4
49 | hdu.header['AP_2_0'] = -2.023482905861E-05
50 | hdu.header['AP_0_2'] = -3.332285841011E-06
51 | hdu.header['AP_1_1'] = -1.731636633824E-05
52 | hdu.header['BP_1_0'] = 6.279608820532116E-4
53 | hdu.header['BP_0_1'] = 9.112228860848081E-4
54 | hdu.header['BP_2_0'] = -3.343918167224E-06
55 | hdu.header['BP_0_2'] = -2.041598249021E-05
56 | hdu.header['BP_1_1'] = -1.711876336719E-05
57 | hdu.header['A_DMAX'] = 44.72893589844534
58 | hdu.header['B_DMAX'] = 44.62692873032506
59 |
60 |
61 | def add_tica_keywords(hdu, time_increment):
62 | """
63 | Add a bunch of required keywords (mostly fake values). TICA specific.
64 | """
65 |
66 | hdu.header['CAMNUM'] = 1
67 | hdu.header['CCDNUM'] = 1
68 | hdu.header['STARTTJD'] = float(time_increment)
69 | hdu.header['ENDTJD'] = float(time_increment+1)
70 | hdu.header['QUAL_BIT'] = 0
71 | hdu.header['CADENCE'] = 151696
72 | hdu.header['CRM'] = True
73 | hdu.header['DEADC'] = 0.792
74 |
75 | # WCS keywords just copied from example
76 | # hdu.header['RADESYS'] = 'ICRS '
77 | hdu.header['EQUINOX'] = 2000.0
78 | hdu.header['WCAX3'] = 2
79 | hdu.header['CTYPE1'] = ('RA---TAN-SIP', "Gnomonic projection + SIP distortions")
80 | hdu.header['CTYPE2'] = ('DEC--TAN-SIP', "Gnomonic projection + SIP distortions")
81 | hdu.header['CRVAL1'] = 250.3497414839765200
82 | hdu.header['CRVAL2'] = 2.2809255996090630
83 | hdu.header['CRPIX1'] = 1045.0
84 | hdu.header['CRPIX2'] = 1001.0
85 | hdu.header['CD1_1'] = -0.005564478186178
86 | hdu.header['CD1_2'] = -0.001042099258152
87 | hdu.header['CD2_1'] = 0.001181441465850
88 | hdu.header['CD2_2'] = -0.005590816683583
89 | hdu.header['A_ORDER'] = 2
90 | hdu.header['B_ORDER'] = 2
91 | hdu.header['A_2_0'] = 2.024511892340E-05
92 | hdu.header['A_0_2'] = 3.317603337918E-06
93 | hdu.header['A_1_1'] = 1.73456334971071E-5
94 | hdu.header['B_2_0'] = 3.331330003472E-06
95 | hdu.header['B_0_2'] = 2.042474824825892E-5
96 | hdu.header['B_1_1'] = 1.714767108041439E-5
97 | hdu.header['AP_ORDER'] = 2
98 | hdu.header['BP_ORDER'] = 2
99 | hdu.header['AP_1_0'] = 9.047002963896363E-4
100 | hdu.header['AP_0_1'] = 6.276607155847164E-4
101 | hdu.header['AP_2_0'] = -2.023482905861E-05
102 | hdu.header['AP_0_2'] = -3.332285841011E-06
103 | hdu.header['AP_1_1'] = -1.731636633824E-05
104 | hdu.header['BP_1_0'] = 6.279608820532116E-4
105 | hdu.header['BP_0_1'] = 9.112228860848081E-4
106 | hdu.header['BP_2_0'] = -3.343918167224E-06
107 | hdu.header['BP_0_2'] = -2.041598249021E-05
108 | hdu.header['BP_1_1'] = -1.711876336719E-05
109 | hdu.header['A_DMAX'] = 44.72893589844534
110 | hdu.header['B_DMAX'] = 44.62692873032506
111 | hdu.header['COMMENT'] = 'TICA TEST'
112 |
113 |
114 | def create_test_ffis(img_size, num_images, dir_name=".", product='SPOC', basename='make_cube-test{:04d}.fits'):
115 | """
116 | Create test fits files
117 |
118 | Write negative values for data array and positive values for error array,
119 | with unique values for all the pixels.
120 | """
121 |
122 | img = np.arange(img_size*img_size, dtype=np.float32).reshape((img_size, img_size))
123 | file_list = []
124 |
125 | basename = path.join(dir_name, basename)
126 | for i in range(num_images):
127 |
128 | file_list.append(basename.format(i))
129 |
130 | if product == 'SPOC':
131 | primary_hdu = fits.PrimaryHDU()
132 | elif product == 'TICA':
133 | primary_hdu = fits.PrimaryHDU(-img)
134 |
135 | if product == 'SPOC':
136 | add_keywords(primary_hdu, "PRIMARY", i, primary=True)
137 | elif product == 'TICA':
138 | add_tica_keywords(primary_hdu, i)
139 |
140 | if product == 'SPOC':
141 | hdu = fits.ImageHDU(-img)
142 | add_keywords(hdu, 'CAMERA.CCD 1.1 cal', i)
143 |
144 | ehdu = fits.ImageHDU(img)
145 | add_keywords(ehdu, 'CAMERA.CCD 1.1 uncert', i)
146 |
147 | hdulist = fits.HDUList([primary_hdu, hdu, ehdu])
148 |
149 | elif product == 'TICA':
150 | hdulist = fits.HDUList([primary_hdu])
151 |
152 | hdulist.writeto(file_list[-1], overwrite=True, checksum=True)
153 |
154 | img = img + img_size*img_size
155 |
156 | return file_list
157 |
158 |
159 | def add_wcs_nosip_keywords(hdu, img_size, product='SPOC'):
160 | """
161 | Adds example wcs keywords without sip distortions to the given header.
162 |
163 | Center coordinate is: 150.1163213, 2.200973097
164 | """
165 |
166 | wcsaxes = 'WCSAXES' if product == 'SPOC' else 'WCAX3'
167 | hdu.header.extend([(wcsaxes, 2, 'Number of coordinate axes'),
168 | ('CRPIX1', img_size/2, 'Pixel coordinate of reference point'),
169 | ('CRPIX2', img_size/2, 'Pixel coordinate of reference point'),
170 | ('PC1_1', -1.666667e-05, 'Coordinate transformation matrix element'),
171 | ('PC2_2', 1.666667e-05, 'Coordinate transformation matrix element'),
172 | ('CDELT1', 1.0, '[deg] Coordinate increment at reference point'),
173 | ('CDELT2', 1.0, '[deg] Coordinate increment at reference point'),
174 | ('CUNIT1', 'deg', 'Units of coordinate increment and value'),
175 | ('CUNIT2', 'deg', 'Units of coordinate increment and value'),
176 | ('CTYPE1', 'RA---TAN', 'Right ascension, gnomonic projection'),
177 | ('CTYPE2', 'DEC--TAN', 'Declination, gnomonic projection'),
178 | ('CRVAL1', 150.1163213, '[deg] Coordinate value at reference point'),
179 | ('CRVAL2', 2.200973097, '[deg] Coordinate value at reference point')])
180 |
181 |
182 | def add_bad_sip_keywords(hdu):
183 | """
184 | Adding a number of dummy keywords, basically so the drop_after argument to fits_cut can be tested.
185 | """
186 | hdu.header['A_ORDER'] = 2
187 | hdu.header['B_ORDER'] = 2
188 | hdu.header['A_2_0'] = 2.024511892340E-05
189 | hdu.header['A_0_2'] = 3.317603337918E-06
190 | hdu.header['A_1_1'] = 1.73456334971071E-5
191 | hdu.header['B_2_0'] = 3.331330003472E-06
192 | hdu.header['B_0_2'] = 2.042474824825892E-5
193 | hdu.header['B_1_1'] = 1.714767108041439E-5
194 | hdu.header['AP_ORDER'] = 2
195 | hdu.header['BP_ORDER'] = 2
196 | hdu.header['AP_1_0'] = 9.047002963896363E-4
197 | hdu.header['AP_0_1'] = 6.276607155847164E-4
198 | hdu.header['AP_2_0'] = -2.023482905861E-05
199 | hdu.header['AP_0_2'] = -3.332285841011E-06
200 | hdu.header['AP_1_1'] = -1.731636633824E-05
201 | hdu.header['BP_1_0'] = 6.279608820532116E-4
202 | hdu.header['BP_0_1'] = 9.112228860848081E-4
203 | hdu.header['BP_2_0'] = -3.343918167224E-06
204 | hdu.header['BP_0_2'] = -2.041598249021E-05
205 | hdu.header['BP_1_1'] = -1.711876336719E-05
206 | hdu.header['A_DMAX'] = 44.72893589844534
207 | hdu.header['B_DMAX'] = 44.62692873032506
208 |
209 |
210 | def create_test_imgs(product, img_size, num_images, bad_sip_keywords=False, dir_name=".", basename='img_{:04d}.fits'):
211 | """
212 | Create test fits image files, single extension.
213 |
214 | Write unique values for all the pixels.
215 | The header keywords are populated with a simple WCS for testing.
216 | """
217 |
218 | img = np.arange(img_size*img_size, dtype=np.float32).reshape((img_size, img_size))
219 | file_list = []
220 |
221 | basename = path.join(dir_name, basename)
222 | for i in range(num_images):
223 |
224 | file_list.append(basename.format(i))
225 |
226 | primary_hdu = fits.PrimaryHDU(data=img)
227 | add_wcs_nosip_keywords(primary_hdu, img_size, product)
228 |
229 | if bad_sip_keywords:
230 | add_bad_sip_keywords(primary_hdu)
231 |
232 | hdulist = fits.HDUList([primary_hdu])
233 | hdulist.writeto(file_list[-1], overwrite=True, checksum=True)
234 |
235 | img = img + img_size*img_size
236 |
237 | return file_list
238 |
--------------------------------------------------------------------------------
/astrocut/cutout.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | import io
3 | import zipfile
4 | from abc import abstractmethod, ABC
5 | from pathlib import Path
6 | from typing import List, Union, Tuple, Iterable, Callable, Any, Optional
7 |
8 | import asdf
9 | import astropy.units as u
10 | import numpy as np
11 | from astropy import wcs
12 | from astropy.io import fits
13 | from s3path import S3Path
14 | from astropy.coordinates import SkyCoord
15 |
16 | from astrocut.exceptions import InputWarning, InvalidInputError, InvalidQueryError
17 |
18 | from . import log
19 | from .utils.utils import _handle_verbose
20 |
21 |
22 | class Cutout(ABC):
23 | """
24 | Abstract class for creating cutouts. This class defines attributes and methods that are common to all
25 | cutout classes.
26 |
27 | Parameters
28 | ----------
29 | input_files : list
30 | List of input image files.
31 | coordinates : str | `~astropy.coordinates.SkyCoord`
32 | Coordinates of the center of the cutout.
33 | cutout_size : int | array | list | tuple | `~astropy.units.Quantity`
34 | Size of the cutout array.
35 | fill_value : int | float
36 | Value to fill the cutout with if the cutout is outside the image.
37 | limit_rounding_method : str
38 | Method to use for rounding the cutout limits. Options are 'round', 'ceil', and 'floor'.
39 | verbose : bool
40 | If True, log messages are printed to the console.
41 |
42 | Methods
43 | -------
44 | cutout()
45 | Generate the cutouts.
46 | """
47 |
48 | def __init__(self, input_files: List[Union[str, Path, S3Path]], coordinates: Union[SkyCoord, str],
49 | cutout_size: Union[int, np.ndarray, u.Quantity, List[int], Tuple[int]] = 25,
50 | fill_value: Union[int, float] = np.nan, limit_rounding_method: str = 'round',
51 | verbose: bool = False):
52 |
53 | # Log messages according to verbosity
54 | _handle_verbose(verbose)
55 |
56 | # Ensure that input files are in a list
57 | if isinstance(input_files, str) or isinstance(input_files, Path):
58 | input_files = [input_files]
59 | self._input_files = input_files
60 |
61 | # Get coordinates as a SkyCoord object
62 | if not isinstance(coordinates, SkyCoord):
63 | coordinates = SkyCoord(coordinates, unit='deg')
64 | self._coordinates = coordinates
65 | log.debug('Coordinates: %s', self._coordinates)
66 |
67 | # Turning the cutout size into an array of two values
68 | self._cutout_size = self.parse_size_input(cutout_size)
69 | log.debug('Cutout size: %s', self._cutout_size)
70 |
71 | # Assigning other attributes
72 | valid_rounding = ['round', 'ceil', 'floor']
73 | if not isinstance(limit_rounding_method, str) or limit_rounding_method.lower() not in valid_rounding:
74 | raise InvalidInputError(f'Limit rounding method {limit_rounding_method} is not recognized. '
75 | 'Valid options are {valid_rounding}.')
76 | self._limit_rounding_method = limit_rounding_method
77 |
78 | if not isinstance(fill_value, int) and not isinstance(fill_value, float):
79 | raise InvalidInputError('Fill value must be an integer or a float.')
80 | self._fill_value = fill_value
81 |
82 | self._verbose = verbose
83 |
84 | # Initialize cutout dictionary
85 | self.cutouts_by_file = {}
86 |
87 | def _get_cutout_limits(self, img_wcs: wcs.WCS) -> np.ndarray:
88 | """
89 | Returns the x and y pixel limits for the cutout.
90 |
91 | Note: This function does no bounds checking, so the returned limits are not
92 | guaranteed to overlap the original image.
93 |
94 | Parameters
95 | ----------
96 | img_wcs : `~astropy.wcs.WCS`
97 | The WCS for the image or cube that the cutout is being cut from.
98 |
99 | Returns
100 | -------
101 | response : `numpy.array`
102 | The cutout pixel limits in an array of the form [[xmin,xmax],[ymin,ymax]]
103 | """
104 | # Calculate pixel corresponding to coordinate
105 | try:
106 | with warnings.catch_warnings():
107 | warnings.filterwarnings('ignore', message='All-NaN slice encountered')
108 | center_pixel = self._coordinates.to_pixel(img_wcs)
109 | except wcs.NoConvergence: # If wcs can't converge, center coordinate is far from the footprint
110 | raise InvalidQueryError('Cutout location is not in image footprint!')
111 |
112 | # We may get nans without a NoConvergence error
113 | if np.isnan(center_pixel).any():
114 | raise InvalidQueryError('Cutout location is not in image footprint!')
115 |
116 | lims = np.zeros((2, 2), dtype=int)
117 | for axis, size in enumerate(self._cutout_size):
118 |
119 | if not isinstance(size, u.Quantity): # assume pixels
120 | dim = size / 2
121 | elif size.unit == u.pixel: # also pixels
122 | dim = size.value / 2
123 | elif size.unit.physical_type == 'angle': # angular size
124 | pixel_scale = u.Quantity(wcs.utils.proj_plane_pixel_scales(img_wcs)[axis],
125 | img_wcs.wcs.cunit[axis])
126 | dim = (size / pixel_scale).decompose() / 2
127 | else:
128 | raise InvalidInputError(f'Cutout size unit {size.unit.aliases[0]} is not supported.')
129 |
130 | # Round the limits according to the requested method
131 | rounding_funcs = {
132 | 'round': np.round,
133 | 'ceil': np.ceil,
134 | 'floor': np.floor
135 | }
136 | round_func = rounding_funcs[self._limit_rounding_method]
137 |
138 | lims[axis, 0] = int(round_func(center_pixel[axis] - dim))
139 | lims[axis, 1] = int(round_func(center_pixel[axis] + dim))
140 |
141 | # The case where the requested area is so small it rounds to zero
142 | if lims[axis, 0] == lims[axis, 1]:
143 | lims[axis, 0] = int(np.floor(center_pixel[axis]))
144 | lims[axis, 1] = lims[axis, 0] + 1
145 | return lims
146 |
147 | @abstractmethod
148 | def cutout(self):
149 | """
150 | Generate the cutout(s).
151 |
152 | This method is abstract and should be defined in subclasses.
153 | """
154 | raise NotImplementedError('Subclasses must implement this method.')
155 |
156 | def _make_cutout_filename(self, file_stem: str) -> str:
157 | """
158 | Create a cutout filename based on a file stem, coordinates, and cutout size.
159 |
160 | Parameters
161 | ----------
162 | file_stem : str
163 | The stem of the input file to use in the cutout filename.
164 |
165 | Returns
166 | -------
167 | filename : str
168 | The generated cutout filename.
169 | """
170 | return '{}_{:.7f}_{:.7f}_{}-x-{}_astrocut.fits'.format(
171 | file_stem,
172 | self._coordinates.ra.value,
173 | self._coordinates.dec.value,
174 | str(self._cutout_size[0]).replace(' ', ''),
175 | str(self._cutout_size[1]).replace(' ', ''))
176 |
177 | def _obj_to_bytes(self, obj: Union[fits.HDUList, asdf.AsdfFile]) -> bytes:
178 | """
179 | Convert a supported object into bytes for writing into a zip stream.
180 |
181 | Parameters
182 | ----------
183 | obj : `astropy.io.fits.HDUList` | `asdf.AsdfFile`
184 | The object to convert to bytes.
185 |
186 | Returns
187 | -------
188 | bytes
189 | The byte representation of the object.
190 | """
191 | # HDUList to bytes
192 | if isinstance(obj, fits.HDUList):
193 | buf = io.BytesIO()
194 | with warnings.catch_warnings():
195 | warnings.simplefilter('ignore', fits.verify.VerifyWarning)
196 | obj.writeto(buf, overwrite=True, checksum=True)
197 | # `AsdfFile` to bytes
198 | elif isinstance(obj, asdf.AsdfFile):
199 | buf = io.BytesIO()
200 | obj.write_to(buf)
201 | else:
202 | raise TypeError(
203 | 'Unsupported payload type for zip entry. Expected `HDUList` or `AsdfFile`.'
204 | )
205 |
206 | return buf.getvalue()
207 |
208 | def _write_cutouts_to_zip(
209 | self,
210 | output_dir: Union[str, Path] = ".",
211 | filename: Optional[Union[str, Path]] = None,
212 | build_entries: Optional[Callable[[], Iterable[Tuple[str, Any]]]] = None
213 | ) -> str:
214 | """
215 | Create a zip archive containing all cutout files without writing intermediate files.
216 |
217 | Parameters
218 | ----------
219 | output_dir : str | Path, optional
220 | Directory where the zip will be created. Default '.'
221 | filename : str | Path | None, optional
222 | Name (or path) of the output zip file. If not provided, defaults to
223 | 'astrocut_{ra}_{dec}_{size}.zip'. If provided without a '.zip' suffix,
224 | the suffix is added automatically.
225 | build_entries : callable -> iterable of (arcname, payload), optional
226 | Function that yields entries lazily. Useful to build streams on demand.
227 |
228 | Returns
229 | -------
230 | str
231 | Path to the created zip file.
232 | """
233 | # Resolve zip path and ensure directory exists
234 | if filename is None:
235 | filename = 'astrocut_{:.7f}_{:.7f}_{}-x-{}.zip'.format(
236 | self._coordinates.ra.value,
237 | self._coordinates.dec.value,
238 | str(self._cutout_size[0]).replace(' ', ''),
239 | str(self._cutout_size[1]).replace(' ', ''))
240 | filename = Path(filename)
241 | if filename.suffix.lower() != '.zip':
242 | filename = filename.with_suffix('.zip')
243 |
244 | output_dir = Path(output_dir)
245 | output_dir.mkdir(parents=True, exist_ok=True)
246 |
247 | zip_path = filename if filename.is_absolute() else output_dir / filename
248 |
249 | # Stream entries directly into the zip
250 | with zipfile.ZipFile(zip_path, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:
251 | for arcname, payload in build_entries():
252 | data = self._obj_to_bytes(payload)
253 | zf.writestr(arcname, data)
254 |
255 | return zip_path.as_posix()
256 |
257 | @staticmethod
258 | def parse_size_input(cutout_size, *, allow_zero: bool = False) -> np.ndarray:
259 | """
260 | Makes the given cutout size into a length 2 array.
261 |
262 | Parameters
263 | ----------
264 | cutout_size : int, array-like, `~astropy.units.Quantity`
265 | The size of the cutout array. If ``cutout_size`` is a scalar number or a scalar
266 | `~astropy.units.Quantity`, then a square cutout of ``cutout_size`` will be created.
267 | If ``cutout_size`` has two elements, they should be in ``(ny, nx)`` order. Scalar numbers
268 | in ``cutout_size`` are assumed to be in units of pixels. `~astropy.units.Quantity` objects
269 | must be in pixel or angular units.
270 | allow_zero : bool, optional
271 | If True, allows cutout dimensions to be zero. Default is False.
272 |
273 | Returns
274 | -------
275 | response : array
276 | Length two cutout size array, in the form [ny, nx].
277 | """
278 |
279 | # Making size into an array [ny, nx]
280 | if np.isscalar(cutout_size):
281 | cutout_size = np.repeat(cutout_size, 2)
282 |
283 | if isinstance(cutout_size, u.Quantity):
284 | cutout_size = np.atleast_1d(cutout_size)
285 | if len(cutout_size) == 1:
286 | cutout_size = np.repeat(cutout_size, 2)
287 |
288 | if len(cutout_size) > 2:
289 | warnings.warn('Too many dimensions in cutout size, only the first two will be used.',
290 | InputWarning)
291 | cutout_size = cutout_size[:2]
292 |
293 |
294 | for dim in cutout_size:
295 | # Raise error if either dimension is not a positive number
296 | if dim < 0 or (not allow_zero and dim == 0):
297 | raise InvalidInputError('Cutout size dimensions must be greater than zero. '
298 | f'Provided size: ({cutout_size[0]}, {cutout_size[1]})')
299 |
300 | # Raise error if either dimension is not an pixel or angular Quantity
301 | if isinstance(dim, u.Quantity) and dim.unit != u.pixel and dim.unit.physical_type != 'angle':
302 | raise InvalidInputError(f'Cutout size unit {dim.unit.aliases[0]} is not supported.')
303 |
304 | return cutout_size
305 |
--------------------------------------------------------------------------------
/docs/astrocut/file_formats.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | *********************
4 | Astrocut File Formats
5 | *********************
6 |
7 | FITS Cutout Files
8 | =================
9 |
10 | FITS files output by image cutout classes consist of a PrimaryHDU extension
11 | and one or more ImageHDU extensions, each containing a single cutout.
12 |
13 | PRIMARY PrimaryHDU (Extension 0)
14 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
15 |
16 | ========= ===================================================
17 | Keyword Value
18 | ========= ===================================================
19 | SIMPLE T (conforms to FITS standard)
20 | BITPIX 8 (array data type)
21 | NAXIS 0 (number of array dimensions)
22 | EXTEND Number of standard extensions
23 | ORIGIN STScI/MAST
24 | DATE File creation date
25 | PROCVER Software version
26 | RA_OBJ Center coordinate right ascension (deg)
27 | DEC_OBJ Center coordinate declination (deg)
28 | CHECKSUM HDU checksum
29 | DATASUM Data unit checksum
30 | ========= ===================================================
31 |
32 | CUTOUT ImageHDU (Subsequent extension(s))
33 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
34 |
35 | The data in each CUTOUT extension is the cutout image. The header includes all of the
36 | keywords from the extension that the cutout image was drawn from, with WCS keywords
37 | updated to match the cutout image. Additionally the keyword ``ORIG_FLE`` has been added,
38 | it contains the name of the file the cutout comes from.
39 |
40 |
41 |
42 | ASDF Cutout Files
43 | ==================
44 |
45 | ASDF files output by `~astrocut.ASDFCutout` are a minimal tree structure that mirrors the format of the original Roman image file.
46 |
47 | .. code-block:: python
48 |
49 | asdf_cutout = {
50 | "roman": {
51 | "meta": {
52 | "wcs" - the gwcs of the cutout
53 | },
54 | "data" - the cutout data
55 | }
56 | }
57 |
58 | ``wcs`` is the original ``gwcs`` object from the input ASDF file that has been sliced into the shape of the cutout.
59 |
60 |
61 |
62 | Cube Files
63 | ==========
64 |
65 | See the `TESS Science Data Products Description Document `__
66 | for detailed information on the TESS full-frame image file format.
67 |
68 |
69 | PrimaryHDU (Extension 0)
70 | ^^^^^^^^^^^^^^^^^^^^^^^^
71 |
72 | The Primary Header of the TESS Mission, SPOC cube FITS file is the same as that from
73 | an individual FFI with the following exceptions:
74 |
75 | ========= ===================================================
76 | Keyword Value
77 | ========= ===================================================
78 | ORIGIN STScI/MAST
79 | DATE Date the cube was created
80 | CAMERA From the ImageHDU (EXT 1) of an FFI
81 | CCD From the ImageHDU (EXT 1) of an FFI
82 | SECTOR The TESS observing Sector, passed by the user
83 | DATE-OBS From the ImageHDU (EXT 1) of the Sector's first FFI
84 | DATE-END From the ImageHDU (EXT 1) of the Sector's last FFI
85 | TSTART From the ImageHDU (EXT 1) of the Sector's first FFI
86 | TSTOP From the ImageHDU (EXT 1) of the Sector's last FFI
87 | ========= ===================================================
88 |
89 |
90 | ImageHDU (Extension 1)
91 | ^^^^^^^^^^^^^^^^^^^^^^
92 |
93 | The ImageHDU extension contains the TESS FFI data cube.
94 | It is 4 dimensional, with two spatial dimensions, time, data and
95 | error flux values. Pixel values are 32 bit floats.
96 | The cube dimensions are ordered in the FITS format as follows:
97 |
98 | ========= ===================================================
99 | Keyword Value
100 | ========= ===================================================
101 | NAXIS 4 (number of array dimensions)
102 | NAXIS1 2 (data value, error value)
103 | NAXIS2 Total number of FFIs
104 | NAXIS3 Length of first array dimension (NAXIS1 from FFIs)
105 | NAXIS4 Length of second array dimension (NAXIS2 from FFIs)
106 | ========= ===================================================
107 |
108 |
109 | BinTableHDU (Extension 2)
110 | ^^^^^^^^^^^^^^^^^^^^^^^^^
111 |
112 | The BinTableHDU extension contains a table that
113 | holds all of the image extension header keywords from the individual FFIs. There
114 | is one column for each keyword plus one additional column called "FFI_FILE" that
115 | contains FFI filename for each row. Each column name keyword also has an entry in the
116 | Image extension header, with the value being the keyword value from the FFI header.
117 | This last column allows the FFI Image extension headers to be recreated completely if desired.
118 |
119 |
120 | Target Pixel Files
121 | ==================
122 |
123 | The Astrocut target pixel file (TPF) format conforms as closely as possible to the
124 | TESS Mission TPFs. See the `TESS Science Data Products Description Document `__
125 | for detailed information on the TESS Mission TPF format, here it is
126 | described how Astrocut TPFs differ from Mission pipeline TPFs.
127 |
128 | PRIMARY PrimaryHDU (Extension 0)
129 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
130 |
131 | The Primary Header of an Astrocut TPF is the same as that from
132 | a Mission TPF with the following exceptions:
133 |
134 | ========= ====================================================
135 | Keyword Value
136 | ========= ====================================================
137 | ORIGIN STScI/MAST
138 | CREATOR astrocut
139 | PROCVER Astrocut version
140 | SECTOR Depends on this value having been filled in the cube
141 |
142 | **Mission pipeline header values Astrocut cannot populate**
143 | --------------------------------------------------------------
144 | OBJECT ""
145 | TCID 0
146 | PXTABLE 0
147 | PMRA 0.0
148 | PMDEC 0.0
149 | PMTOTAL 0.0
150 | TESSMAG 0.0
151 | TEFF 0.0
152 | LOGG 0.0
153 | MH 0.0
154 | RADIUS 0.0
155 | TICVER 0
156 | TICID None
157 | ========= ====================================================
158 |
159 | PIXELS BinTableHDU (Extension 1)
160 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
161 |
162 | The Astrocut PIXELS BinTableHDU comprises the same columns as those included in
163 | the Mission pipeline TPFs, with one addition: an extra column, ``FFI_FILE``, contains
164 | the name of the FFI file that the row's pixels come from.
165 |
166 | While all of the columns present in Mission pipeline TPFs are present in cutouts created
167 | from SPOC cubes, they do not all contain data. The columns that are empty in Astrocut TPFs are:
168 |
169 | ============ ====================================================
170 | Column Value
171 | ============ ====================================================
172 | CADENCENO 0 filled array in cutout shape
173 | RAW_CNTS -1 filled array in cutout shape
174 | FLUX_BKG 0 filled array in cutout shape
175 | FLUX_BKG_ERR 0 filled array in cutout shape
176 | POS_CORR1 0
177 | POS_CORR2 0
178 | ============ ====================================================
179 |
180 | The ``TIME`` column is formed by taking the average of the ``TSTART`` and ``TSTOP`` values
181 | from the corresponding FFI for each row. The ``QUALITY`` column is taken from the ``DQUALITY``
182 | image keyword in the individual SPOC FFI files.
183 |
184 | Three keywords have also been added to the PIXELS extension header to give additional information
185 | about the cutout world coordinate system (WCS). TESS FFIs are large and therefore are described
186 | by WCS objects that have many non-linear terms. Astrocut creates a new simpler (linear) WCS
187 | object from the matched set of cutout pixel coordinates and sky coordinates (from the FFI WCS).
188 | This linear WCS object will generally work very well, however at larger cutout sizes (100-200
189 | pixels per side and above) the linear WCS fit will start to be noticeably incorrect at the edges
190 | of the cutout. The extra keywords allow the user to determine if the linear WCS is accurate enough
191 | for their purpose, and to retrieve the original WCS with distortion coefficients if it is needed.
192 |
193 |
194 | +---------+----------------------------------------------------------------+
195 | | Keyword | Value |
196 | +=========+================================================================+
197 | | WCS_FFI | | The name of the FFI file used to build the original WCS |
198 | | | | from which the cutout and cutout WCS were calculated. |
199 | +---------+----------------------------------------------------------------+
200 | | WCS_MSEP| | The maximum separation in degrees between the cutout's |
201 | | | | linear WCS and the FFI's full WCS. |
202 | +---------+----------------------------------------------------------------+
203 | | WCS_SIG | | The error in the cutout's linear WCS, calculated as |
204 | | | | ``sqrt((dist(Po_ij, Pl_ij)^2)`` where ``dist(Po_ij, Pl_ij)`` |
205 | | | | is the angular distance in degrees between the sky position |
206 | | | | of of pixel i,j in the original full WCS and the new linear |
207 | | | | WCS. |
208 | +---------+----------------------------------------------------------------+
209 |
210 |
211 | APERTURE ImageHDU (Extension 2)
212 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
213 |
214 | The APERTURE ImageHDU extension is similar to that of Mission pipeline TPFs, but contains
215 | slightly different data. For Mission pipeline files, the aperture image gives information about
216 | each pixel, whether it was collected and whether it was used in calculating e.g., the background flux.
217 | Because Astrocut does not do any of the more complex calculations used in the Mission pipeline, each pixel in the
218 | aperture image will either be 1 (pixel was collected and contains data in the cutout) or 0
219 | (pixel is off the edge of the detector and contains no data in the cutout).
220 |
221 |
222 | Cosmic Ray Binary Table Extension
223 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
224 |
225 | This extension is not present in Astrocut TPFs, although it is a part of the Mission pipeline TPFs.
226 |
227 |
228 | Path Focused Target Pixel Files
229 | ===============================
230 |
231 | When the `~astrocut.center_on_path` function is used to create cutout TPFs
232 | where the individual image cutouts move along a path in time and space, the TPF format has to be
233 | adjusted accordingly. It still conforms as closely as possible to the TESS Mission pipeline TPF
234 | file format, but differs in several crucial ways. The `~astrocut.center_on_path` function works
235 | on Astrocut TPFs, so that is the baseline file format. Only the differences
236 | between path focused Astrocut TPFs and regular Astrocut TPFs are described here (see `Target Pixel Files`_ for
237 | regular Astrocut TPF format).
238 |
239 | PRIMARY PrimaryHDU (Extension 0)
240 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
241 |
242 | Additional or updated keywords:
243 |
244 | ========= =======================================================
245 | Keyword Value
246 | ========= =======================================================
247 | DATE Set the the time the path focused cutout was performed
248 | OBJECT Moving target object name/identifier, only present if
249 | set by the user
250 | ========= =======================================================
251 |
252 | Removed keywords:
253 |
254 | ========= =======================================================
255 | Keyword Reason
256 | ========= =======================================================
257 | RA_OBJ Cutout is no longer centered on a sky position
258 | DEC_OBJ Cutout is no longer centered on a sky position
259 | ========= =======================================================
260 |
261 |
262 | PIXELS BinTableHDU (Extension 1)
263 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
264 |
265 | Additional columns:
266 |
267 | ============ ========================================================
268 | Column Value
269 | ============ ========================================================
270 | TGT_X X position of the target in the cutout array at row time
271 | TGT_Y Y position of the target in the cutout array at row time
272 | TGT_RA Right ascension (deg) of the target at row time
273 | TGT_DEC Declination (deg) of the target at row time
274 | ============ ========================================================
275 |
276 | No world coordinate system (WCS) information is present, since it is no
277 | longer common across all cutout images.
278 |
279 |
280 | APERTURE ImageHDU (Extension 2)
281 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
282 |
283 | The APERTURE extension may or may not be present in a path focussed TPF, to be present
284 | the user must have passed an FFI WCS object into the `~astrocut.center_on_path` function.
285 |
286 | The APERTURE ImageHDU extension of path focussed TPFs is very different from other
287 | TESS TPFs. The aperture image, instead of being the size and shape of an individual cutout,
288 | is the size of the full FFI image the cutouts were drawn from. All pixels used in any
289 | individual cutout are marked with 1, while the rest of the pixels are 0, so the entire
290 | trajectory of the cutout path is captured. Additionally the WCS information in the header
291 | is the WCS for the original FFI, including all distortion coefficients. This can be
292 | used in combination with the TGT_RA/DEC and TGT_X/Y columns to trace the path of the
293 | target across the FFI footprint and calculate the WCS object for individual cutout images
294 | if necessary.
295 |
--------------------------------------------------------------------------------
/astrocut/tests/test_tess_footprint_cutout.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import pytest
3 | import re
4 | import zipfile
5 | from unittest.mock import MagicMock
6 |
7 | import astropy.units as u
8 | import numpy as np
9 | from astropy.coordinates import SkyCoord
10 | from astropy.io import fits
11 | from astropy.table import Table
12 | from spherical_geometry.polygon import SphericalPolygon
13 |
14 | from .. import footprint_cutout
15 | from ..cube_cutout import CubeCutout
16 | from ..exceptions import InvalidInputError, InvalidQueryError
17 | from ..footprint_cutout import get_ffis, ra_dec_crossmatch
18 | from ..tess_footprint_cutout import (TessFootprintCutout, cube_cut_from_footprint, get_tess_sectors,
19 | _extract_sequence_information, _create_sequence_list)
20 | from ..tess_cube_cutout import TessCubeCutout
21 |
22 |
23 | @pytest.fixture
24 | def cutout_size():
25 | """Fixture to return the cutout size"""
26 | return 5
27 |
28 |
29 | @pytest.fixture
30 | def coordinates():
31 | """Fixture to return the coordinates at the center of the images"""
32 | return SkyCoord('350 -80', unit='deg')
33 |
34 |
35 | @pytest.fixture
36 | def all_ffis(scope='module'):
37 | """Fixture to return the table of all FFIs"""
38 | return get_ffis('s3://stpubdata/tess/public/footprints/tess_ffi_footprint_cache.json')
39 |
40 |
41 | @pytest.fixture
42 | def crossmatch_spies(monkeypatch):
43 | # wrap real functions with MagicMocks that call the real implementation
44 | real_point = footprint_cutout._crossmatch_point
45 | real_poly = footprint_cutout._crossmatch_polygon
46 |
47 | spy_point = MagicMock(side_effect=real_point)
48 | spy_poly = MagicMock(side_effect=real_poly)
49 |
50 | monkeypatch.setattr(footprint_cutout, "_crossmatch_point", spy_point)
51 | monkeypatch.setattr(footprint_cutout, "_crossmatch_polygon", spy_poly)
52 |
53 | yield spy_point, spy_poly
54 |
55 |
56 | def check_output_tpf(tpf, sequences=[], cutout_size=5):
57 | """Helper function to check the validity of output cutout files"""
58 | tpf_table = tpf[1].data
59 |
60 | # SPOC cutouts have 1 extra columns in EXT 1
61 | assert len(tpf_table.columns) == 12
62 | assert tpf_table[0]['FLUX'].shape == (cutout_size, cutout_size)
63 |
64 | # Check that sector matches a provided sequence
65 | if sequences:
66 | assert tpf[0].header['SECTOR'] in sequences
67 |
68 | # Close TPF
69 | tpf.close()
70 |
71 |
72 | def test_s_region_to_polygon_unsupported_region():
73 | """Test that ValueError is raised if s_region is not a polygon"""
74 | s_region = 'CIRCLE'
75 | err = f'Unsupported s_region type: {s_region}'
76 | with pytest.raises(ValueError, match=err):
77 | TessFootprintCutout._s_region_to_polygon(s_region)
78 |
79 |
80 | @pytest.mark.parametrize("lon, lat, center, expected", [
81 | ((345, 355, 355, 345), (-15, -15, -5, -5), (350, -10), True), # intersecting
82 | ((335, 345, 345, 335), (-15, -15, -5, -5), (340, -10), False), # non-intersecting
83 | ((340, 350, 350, 340), (-15, -15, -5, -5), (345, -10), True), # edge object that intersects
84 | ((340, 349, 349, 340), (-15, -15, -5, -5), (345, -10), False), # edge object that does not intersect
85 | ])
86 | def test_ffi_intersect(lon, lat, center, expected):
87 | """Test that FFI intersection with cutout outputs proper results."""
88 | # SphericalPolygon object for cutout
89 | cutout_sp = SphericalPolygon.from_radec(lon=(350, 10, 10, 350),
90 | lat=(-10, -10, 10, 10),
91 | center=(0, 0))
92 |
93 | # Create a SphericalPolygon with the parametrized lon, lat, and center
94 | polygon = SphericalPolygon.from_radec(lon=lon, lat=lat, center=center)
95 |
96 | # Create a table with this polygon
97 | polygon_table = Table(names=['polygon'], dtype=[SphericalPolygon])
98 | polygon_table['polygon'] = [polygon]
99 |
100 | # Perform the intersection check
101 | intersection = TessFootprintCutout._ffi_intersect(polygon_table, cutout_sp)
102 |
103 | # Assert the intersection result matches the expected value
104 | assert intersection.value[0] == expected
105 |
106 |
107 | @pytest.mark.parametrize("cutout_size", [0, 0 * u.arcmin, [0, 0], (0, 0), (0*u.pix, 0*u.pix),
108 | [0*u.arcsec, 0*u.arcsec], np.array([0, 0])])
109 | def test_ra_dec_crossmatch_point(coordinates, all_ffis, cutout_size, crossmatch_spies):
110 | spy_point, spy_poly = crossmatch_spies
111 |
112 | # Cutout size of 0 should do a point match
113 | results = ra_dec_crossmatch(all_ffis, coordinates, cutout_size)
114 | assert isinstance(results, Table)
115 | spy_point.assert_called_once()
116 | spy_poly.assert_not_called()
117 |
118 |
119 | @pytest.mark.parametrize("cutout_size", [5, 5 * u.arcmin, [5, 5], [5*u.arcsec, 5*u.arcsec], (5, 0), (5, 0)])
120 | def test_ra_dec_crossmatch_poly(all_ffis, cutout_size, crossmatch_spies):
121 | spy_point, spy_poly = crossmatch_spies
122 |
123 | # Cutout size of 0 should do a point match
124 | results = ra_dec_crossmatch(all_ffis, '350 -80', cutout_size)
125 | assert isinstance(results, Table)
126 | spy_poly.assert_called_once()
127 | spy_point.assert_not_called()
128 |
129 |
130 | def test_tess_footprint_cutout(cutout_size, caplog):
131 | """Test that a single data cube is created for a given sequence"""
132 | cutout = TessFootprintCutout('130 30', cutout_size, sequence=44, verbose=True)
133 |
134 | # Check cutouts attribute
135 | cutouts = cutout.cutouts
136 | assert len(cutouts) == 1
137 | assert isinstance(cutouts, list)
138 | assert isinstance(cutouts[0], CubeCutout.CubeCutoutInstance)
139 | assert cutouts[0].shape[1:] == (5, 5)
140 |
141 | # Check cutouts_by_file attribute
142 | cutouts_by_file = cutout.cutouts_by_file
143 | assert len(cutouts_by_file) == 1
144 | assert isinstance(cutouts_by_file, dict)
145 | assert isinstance(list(cutouts_by_file.values())[0], CubeCutout.CubeCutoutInstance)
146 |
147 | # Check tpf_cutouts attribute
148 | tpf_cutouts = cutout.tpf_cutouts
149 | assert len(tpf_cutouts) == 1
150 | assert isinstance(tpf_cutouts, list)
151 | assert isinstance(tpf_cutouts[0], fits.HDUList)
152 | check_output_tpf(tpf_cutouts[0], [44])
153 |
154 | # Check tpf_cutouts_by_file
155 | tpf_cutouts_by_file = cutout.tpf_cutouts_by_file
156 | tpf_cutout = list(tpf_cutouts_by_file.values())[0]
157 | assert len(tpf_cutouts_by_file) == 1
158 | assert isinstance(tpf_cutouts_by_file, dict)
159 | assert isinstance(tpf_cutout, fits.HDUList)
160 | check_output_tpf(tpf_cutout, [44])
161 |
162 | # Check tess_cube attribute
163 | tess_cube = cutout.tess_cube_cutout
164 | assert isinstance(tess_cube, TessCubeCutout)
165 |
166 | # Assert that messages were printed
167 | captured = caplog.text
168 | assert 'Coordinates:' in captured
169 | assert 'Cutout size: [5 5]' in captured
170 | assert re.search(r'Found \d+ footprint files.', captured)
171 | assert re.search(r'Filtered to \d+ footprints for sequences: 44', captured)
172 | assert re.search(r'Found \d+ matching files.', captured)
173 | assert 'Generating cutouts...' in captured
174 |
175 | # Check that _extract_sequence_information works correctly
176 | # Should return empty dict if sector name does not match format
177 | sector_name = 'invalid_s'
178 | sector_name += '0044-4-1'
179 | info = _extract_sequence_information(sector_name)
180 | assert info == {}
181 |
182 |
183 | def test_tess_footprint_cutout_multi_sequence(coordinates, cutout_size):
184 | """Test that a cube is created for each sequence when multiple are provided"""
185 | sequences = [1, 13]
186 | cutout = TessFootprintCutout(coordinates, cutout_size, sequence=sequences)
187 | cutout_tpfs = cutout.tpf_cutouts
188 | assert len(cutout_tpfs) == 2
189 |
190 | for tpf in cutout_tpfs:
191 | check_output_tpf(tpf, sequences)
192 |
193 |
194 | def test_tess_footprint_cutout_all_sequences(coordinates, cutout_size):
195 | """Test that cubes are created for all sequences that intersect the cutout"""
196 | # Create cutouts for all possible sequences
197 | cutout = TessFootprintCutout(coordinates, cutout_size)
198 | cutout_tpfs = cutout.tpf_cutouts
199 | assert len(cutout_tpfs) >= 5
200 |
201 | # Crossmatch to get sectors that contain cutout
202 | all_ffis = get_ffis(cutout.S3_FOOTPRINT_CACHE)
203 | cone_results = ra_dec_crossmatch(all_ffis, '350 -80', cutout_size, 21)
204 | seq_list = _create_sequence_list(cone_results)
205 | sequences = [int(seq['sector']) for seq in seq_list]
206 |
207 | # Assert non-empty results
208 | assert len(seq_list) == len(cutout_tpfs)
209 | for tpf in cutout_tpfs:
210 | check_output_tpf(tpf, sequences)
211 |
212 |
213 | def test_tess_footprint_cutout_write_as_tpf(coordinates, cutout_size, tmpdir):
214 | """Test that TPF files are written to disk"""
215 | cutout = TessFootprintCutout(coordinates, cutout_size, sequence=[1, 13])
216 | paths = cutout.write_as_tpf(output_dir=tmpdir)
217 |
218 | for cutout_path in paths:
219 | path = Path(cutout_path)
220 | assert path.exists()
221 | assert path.suffix == '.fits'
222 |
223 | # Check that file can be opened
224 | with fits.open(path) as hdu:
225 | hdu.info()
226 |
227 |
228 | def test_tess_footprint_cutout_write_to_zip(coordinates, cutout_size, tmpdir):
229 | """Test that TPF cutouts are written to a ZIP archive"""
230 | cutout = TessFootprintCutout(coordinates, cutout_size, sequence=[1, 13])
231 | zip_path = cutout.write_as_zip(output_dir=tmpdir, filename='tess_cutouts')
232 |
233 | path = Path(zip_path)
234 | assert path.exists()
235 | assert path.name == 'tess_cutouts.zip'
236 | assert path.suffix == '.zip'
237 |
238 | # Check contents of ZIP file
239 | with zipfile.ZipFile(path, 'r') as zf:
240 | namelist = zf.namelist()
241 | assert len(namelist) == 2 # Two sequences
242 | for name in namelist:
243 | assert name.endswith('.fits')
244 |
245 |
246 | def test_tess_footprint_cutout_invalid_sequence(coordinates, cutout_size):
247 | """Test that InvalidQueryError is raised if sequence does not have cube files"""
248 | err = 'No files were found for sequences: -1'
249 | with pytest.raises(InvalidQueryError, match=err):
250 | TessFootprintCutout(coordinates, cutout_size, sequence=-1)
251 |
252 |
253 | def test_tess_footprint_cutout_outside_coords(coordinates, cutout_size):
254 | """Test that InvalidQueryError is raised if coordinates are not found in sequence"""
255 | err = 'The given coordinates were not found within the specified sequence(s).'
256 | with pytest.raises(InvalidQueryError, match=re.escape(err)):
257 | TessFootprintCutout(coordinates, cutout_size, sequence=2)
258 |
259 |
260 | def test_tess_footprint_cutout_invalid_product(coordinates, cutout_size):
261 | """Test that InvalidQueryError is raised if an invalid product is given"""
262 | err = 'Product for TESS cube cutouts must be "SPOC".'
263 | with pytest.raises(InvalidInputError, match=err):
264 | TessFootprintCutout(coordinates, cutout_size, product='invalid')
265 |
266 | with pytest.raises(InvalidInputError, match=err):
267 | TessFootprintCutout(coordinates, cutout_size, product='TICA')
268 |
269 |
270 | def test_cube_cut_from_footprint(coordinates, cutout_size, tmpdir):
271 | """Test that data cube is cut from FFI file using parallel processing"""
272 | # Writing to memory, should return cutouts as memory objects
273 | cutouts = cube_cut_from_footprint(coordinates,
274 | cutout_size,
275 | sequence=13,
276 | memory_only=True)
277 | assert len(cutouts) == 1
278 | assert isinstance(cutouts, list)
279 | assert isinstance(cutouts[0], fits.HDUList)
280 |
281 | # Writing to disk, should return cutout filepaths
282 | cutouts = cube_cut_from_footprint(coordinates,
283 | cutout_size,
284 | sequence=13,
285 | output_dir=tmpdir)
286 | assert len(cutouts) == 1
287 | assert isinstance(cutouts, list)
288 | assert isinstance(cutouts[0], str)
289 | assert str(tmpdir) in cutouts[0]
290 |
291 |
292 | @pytest.mark.parametrize("cutout_size", [0, 5, 1 * u.arcmin, [5, 5], (3*u.arcsec, 3*u.arcsec)])
293 | def test_get_tess_sectors(coordinates, cutout_size):
294 | """Test that get_tess_sectors returns sector list"""
295 | sector_table = get_tess_sectors(coordinates, cutout_size)
296 | assert isinstance(sector_table, Table)
297 | assert 'sectorName' in sector_table.colnames
298 | assert 'sector' in sector_table.colnames
299 | assert 'camera' in sector_table.colnames
300 | assert 'ccd' in sector_table.colnames
301 | assert len(sector_table) >= 7
302 |
303 |
304 | def test_get_tess_sectors_invalid_coordinates():
305 | """Test that InvalidInputError is raised for invalid coordinates input"""
306 | with pytest.raises(InvalidInputError, match='Invalid coordinates input'):
307 | get_tess_sectors('400 -120', 0)
308 |
309 |
310 | def test_get_tess_sectors_no_matches(monkeypatch, coordinates):
311 | """When no FFIs overlap the cutout, the sectors table should be empty."""
312 | empty_table = Table(names=['sectorName', 'sector', 'camera', 'ccd'], dtype=['S10', 'i4', 'i4', 'i4'])
313 | monkeypatch.setattr('astrocut.tess_footprint_cutout.ra_dec_crossmatch', lambda *_a, **_k: empty_table)
314 |
315 | sector_table = get_tess_sectors(coordinates, 0)
316 | assert isinstance(sector_table, Table)
317 | assert len(sector_table) == 0
318 |
--------------------------------------------------------------------------------
/astrocut/footprint_cutout.py:
--------------------------------------------------------------------------------
1 | import json
2 | from abc import ABC, abstractmethod
3 | from threading import Lock
4 | from typing import List, Tuple, Union
5 |
6 | import astropy.units as u
7 | import fsspec
8 | import numpy as np
9 | from astropy.coordinates import SkyCoord
10 | from astropy.table import Table, Column
11 | from cachetools import TTLCache, cached
12 | from spherical_geometry.polygon import SphericalPolygon
13 | from spherical_geometry.vector import radec_to_vector
14 |
15 | from .cutout import Cutout
16 | from .exceptions import InvalidInputError
17 |
18 | FFI_TTLCACHE = TTLCache(maxsize=10, ttl=900) # Cache for FFI footprint files
19 |
20 |
21 | class FootprintCutout(Cutout, ABC):
22 | """
23 | Abstract class that creates cutouts from data files hosted on the S3 cloud.
24 |
25 | Parameters
26 | ----------
27 | coordinates : str | `~astropy.coordinates.SkyCoord`
28 | Coordinates of the center of the cutout.
29 | cutout_size : int | array | list | tuple | `~astropy.units.Quantity`
30 | Size of the cutout array.
31 | fill_value : int | float
32 | Value to fill the cutout with if the cutout is outside the image.
33 | limit_rounding_method : str
34 | Method to use for rounding the cutout limits. Options are 'round', 'ceil', and 'floor'.
35 | sequence : int | list | None
36 | Default None. Sequence(s) from which to generate cutouts. Can provide a single
37 | sequence number as an int or a list of sequence numbers. If not specified,
38 | cutouts will be generated from all sequences that contain the cutout.
39 | verbose : bool
40 | If True, log messages are printed to the console.
41 |
42 | Methods
43 | -------
44 | cutout()
45 | Fetch the cloud files that contain the cutout and generate the cutouts.
46 | """
47 |
48 | def __init__(self, coordinates: Union[SkyCoord, str],
49 | cutout_size: Union[int, np.ndarray, u.Quantity, List[int], Tuple[int]] = 25,
50 | fill_value: Union[int, float] = np.nan, limit_rounding_method: str = 'round',
51 | sequence: Union[int, List[int], None] = None, verbose: bool = False):
52 | super().__init__([], coordinates, cutout_size, fill_value, limit_rounding_method, verbose)
53 |
54 | # Assigning other attributes
55 | if isinstance(sequence, int):
56 | sequence = [sequence] # Convert to list
57 | self._sequence = sequence
58 |
59 | @abstractmethod
60 | def cutout(self):
61 | """
62 | Generate cutouts from the cloud files that contain the cutout's footprint.
63 |
64 | This method is abstract and should be implemented in subclasses.
65 | """
66 | raise NotImplementedError('Subclasses must implement this method.')
67 |
68 | @staticmethod
69 | def _s_region_to_polygon(s_region: Column) -> Column:
70 | """
71 | Returns a column of `~spherical_geometry.polygon.SphericalPolygon` objects from a column of
72 | s_region strings.
73 |
74 | Parameters
75 | ----------
76 | s_region : `~astropy.table.Column`
77 | Column containing the s_region string. Example input: 'POLYGON 229.80771900 -75.17048500
78 | 241.67788000 -63.95992300 269.94872000 -64.39276400 277.87862300 -75.57754400'
79 |
80 | Returns
81 | -------
82 | polygon : `~astropy.table.Column`
83 | Column containing `~spherical_geometry.polygon.SphericalPolygon` objects representing each s_region.
84 | """
85 | def ind_sregion_to_polygon(s_reg):
86 | """
87 | Helper function to convert s_region string to a `~spherical_geometry.polygon.SphericalPolygon` object.
88 |
89 | Parameters
90 | ----------
91 | s_reg : str
92 | A string defining a spatial region, expected to be in the 'POLYGON' format.
93 |
94 | Returns
95 | -------
96 | `~spherical_geometry.polygon.SphericalPolygon`
97 | A SphericalPolygon object created from the provided coordinates.
98 |
99 | Raises
100 | ------
101 | ValueError
102 | If the S_REGION type is not 'POLYGON'.
103 |
104 | """
105 | # Split input string into individual components
106 | sr_list = s_reg.strip().split()
107 |
108 | # Extract the region type (first element of list)
109 | reg_type = sr_list[0].upper()
110 |
111 | if reg_type == 'POLYGON':
112 | # Extract RA and Dec values
113 | # RAs are at odd indices
114 | ras = np.array(sr_list[1::2], dtype=float)
115 |
116 | # Convert negative RAs to the 0-360 range
117 | ras[ras < 0] = ras[ras < 0] + 360
118 |
119 | # Decs are at even indices
120 | decs = np.array(sr_list[2::2], dtype=float)
121 |
122 | # Create SphericalPolygon object
123 | return SphericalPolygon.from_radec(ras, decs)
124 | else:
125 | raise ValueError(f'Unsupported s_region type: {reg_type}.')
126 |
127 | return np.vectorize(ind_sregion_to_polygon)(s_region)
128 |
129 | @staticmethod
130 | def _ffi_intersect(ffi_list: Table, polygon: SphericalPolygon) -> np.ndarray:
131 | """
132 | Vectorizing the spherical_coordinate intersects_polygon function.
133 |
134 | Parameters
135 | ----------
136 | ffi_list : `~astropy.table.Table`
137 | Table containing information about FFIs and their footprints.
138 | polygon : `~spherical_geometry.polygon.SphericalPolygon`
139 | SphericalPolygon object representing the cutout's footprint.
140 |
141 | Returns
142 | -------
143 | intersect : `~numpy.ndarray`
144 | Boolean array indicating whether each FFI intersects with the cutout.
145 | """
146 | def single_intersect(ffi, polygon):
147 | return ffi.intersects_poly(polygon)
148 |
149 | return np.vectorize(single_intersect)(ffi_list['polygon'], polygon)
150 |
151 |
152 | @cached(cache=FFI_TTLCACHE, lock=Lock())
153 | def get_ffis(s3_footprint_cache: str) -> Table:
154 | """
155 | Fetches footprints for Full Frame Images (FFIs) from S3. The resulting
156 | table contains each (FFI) and a 'polygon' column that describes the image's footprints as polygon points
157 | and vectors.
158 |
159 | This method is outside the class definition to allow for caching.
160 |
161 | Parameters
162 | ----------
163 | s3_footprint_cache : str
164 | S3 URI to the footprint cache file.
165 |
166 | Returns
167 | -------
168 | ffis : `~astropy.table.Table`
169 | Table containing information about FFIs and their footprints.
170 | """
171 | # Open footprint file with fsspec
172 | with fsspec.open(s3_footprint_cache, s3={'anon': True}) as f:
173 | ffis = json.load(f)
174 |
175 | # Compute spherical polygons
176 | ffis['polygon'] = FootprintCutout._s_region_to_polygon(ffis['s_region'])
177 |
178 | # Convert to Astropy table
179 | ffis = Table(ffis)
180 |
181 | return ffis
182 |
183 |
184 | def _crossmatch_point(ra: SkyCoord, dec: SkyCoord, all_ffis: Table) -> np.ndarray:
185 | """
186 | Returns the indices of the Full Frame Images (FFIs) that contain the given RA and
187 | Dec coordinates by checking which FFI polygons contain the point.
188 |
189 | Parameters
190 | ----------
191 | ra : SkyCoord
192 | Right Ascension in degrees.
193 | dec : SkyCoord
194 | Declination in degrees.
195 | all_ffis : `~astropy.table.Table`
196 | Table of FFIs to crossmatch with the point.
197 |
198 | Returns
199 | -------
200 | ffi_inds : `~numpy.ndarray`
201 | Indices of FFIs that contain the given RA and Dec coordinates.
202 | """
203 | ffi_inds = []
204 | vector_coord = radec_to_vector(ra, dec)
205 | for sector in np.unique(all_ffis['sequence_number']):
206 | # Returns a 2-long array where the first element is indexes and the 2nd element is empty
207 | sector_ffi_inds = np.where(all_ffis['sequence_number'] == sector)[0]
208 |
209 | for ind in sector_ffi_inds:
210 | if all_ffis[ind]["polygon"].contains_point(vector_coord):
211 | ffi_inds.append(ind)
212 | break # the ra/dec will only be on one ccd per sector
213 | return np.array(ffi_inds, dtype=int)
214 |
215 |
216 | def _crossmatch_polygon(ra: SkyCoord, dec: SkyCoord, all_ffis: Table, px_size: np.ndarray,
217 | arcsec_per_px: int = 21) -> np.ndarray:
218 | """
219 | Returns the indices of the Full Frame Images (FFIs) that intersect with the given cutout footprint
220 | by checking which FFI polygons intersect with the cutout polygon.
221 |
222 | Parameters
223 | ----------
224 | ra : SkyCoord
225 | Right Ascension in degrees.
226 | dec : SkyCoord
227 | Declination in degrees.
228 | all_ffis : `~astropy.table.Table`
229 | Table of FFIs to crossmatch with the point.
230 | px_size : array-like
231 | Size of the cutout in pixels, in the form [ny, nx].
232 | arcsec_per_px : int, optional
233 | Default 21. The number of arcseconds per pixel in an image. Used to determine
234 | the footprint of the cutout. Default is the number of arcseconds per pixel in
235 | a TESS image.
236 |
237 | Returns
238 | -------
239 | ffi_inds : `~numpy.ndarray`
240 | Boolean array indicating whether each FFI intersects with the cutout.
241 | """
242 | # Create polygon for intersection
243 | # Convert dimensions from pixels to arcseconds and divide by 2 to get offset from center
244 | # If one of the dimensions is 0, use a very small value to avoid issues with SphericalPolygon
245 | min_offset = 0.1 # pixels
246 | ra_offset = ((max(px_size[0], min_offset) * arcsec_per_px) / 2) * u.arcsec
247 | dec_offset = ((max(px_size[1], min_offset) * arcsec_per_px) / 2) * u.arcsec
248 |
249 | # Calculate RA and Dec boundaries
250 | ra_bounds = [ra - ra_offset, ra + ra_offset]
251 | dec_bounds = [dec - dec_offset, dec + dec_offset]
252 |
253 | # Get RA and Dec for four corners of rectangle
254 | ras = [ra_bounds[0].value, ra_bounds[1].value, ra_bounds[1].value, ra_bounds[0].value]
255 | decs = [dec_bounds[0].value, dec_bounds[0].value, dec_bounds[1].value, dec_bounds[1].value]
256 |
257 | # Create SphericalPolygon for comparison
258 | cutout_fp = SphericalPolygon.from_radec(ras, decs, center=(ra, dec))
259 |
260 | # Find indices of FFIs that intersect with the cutout
261 | ffi_inds = np.vectorize(lambda ffi: ffi.intersects_poly(cutout_fp))(all_ffis['polygon'])
262 | ffi_inds = FootprintCutout._ffi_intersect(all_ffis, cutout_fp)
263 |
264 | return ffi_inds
265 |
266 |
267 | def ra_dec_crossmatch(all_ffis: Table, coordinates: Union[SkyCoord, str], cutout_size,
268 | arcsec_per_px: int = 21) -> Table:
269 | """
270 | Returns the Full Frame Images (FFIs) whose footprints overlap with a cutout of a given position and size.
271 |
272 | Parameters
273 | ----------
274 | all_ffis : `~astropy.table.Table`
275 | Table of FFIs to crossmatch with the cutout.
276 | coordinates : str or `astropy.coordinates.SkyCoord` object
277 | The position around which to cutout.
278 | It may be specified as a string ("ra dec" in degrees)
279 | or as the appropriate `~astropy.coordinates.SkyCoord` object.
280 | cutout_size : int, array-like, `~astropy.units.Quantity`
281 | The size of the cutout array. If ``cutout_size``
282 | is a scalar number or a scalar `~astropy.units.Quantity`,
283 | then a square cutout of ``cutout_size`` will be used. If
284 | ``cutout_size`` has two elements, they should be in ``(ny, nx)``
285 | order. Scalar numbers in ``cutout_size`` are assumed to be in
286 | units of pixels. `~astropy.units.Quantity` objects must be in pixel or
287 | angular units.
288 |
289 | If a cutout size of zero is provided, the function will return FFIs that contain
290 | the exact RA and Dec position. If a non-zero cutout size is provided, the function
291 | will return FFIs whose footprints overlap with the cutout area.
292 | arcsec_per_px : int, optional
293 | Default 21. The number of arcseconds per pixel in an image. Used to determine
294 | the footprint of the cutout. Default is the number of arcseconds per pixel in
295 | a TESS image.
296 |
297 | Returns
298 | -------
299 | matching_ffis : `~astropy.table.Table`
300 | Table containing information about FFIs whose footprints overlap those of the cutout.
301 | """
302 | # Convert coordinates to SkyCoord
303 | if not isinstance(coordinates, SkyCoord):
304 | try:
305 | coordinates = SkyCoord(coordinates, unit='deg')
306 | except ValueError as e:
307 | raise InvalidInputError(f'Invalid coordinates input: {e}')
308 | ra, dec = coordinates.ra, coordinates.dec
309 |
310 | px_size = np.zeros(2, dtype=object)
311 | for axis, size in enumerate(Cutout.parse_size_input(cutout_size, allow_zero=True)):
312 | if isinstance(size, u.Quantity): # If Quantity, convert to pixels
313 | if size.unit == u.pixel:
314 | px_size[axis] = size.value
315 | else: # Angular size
316 | # Convert angular size to pixels
317 | px_size[axis] = (size.to_value(u.arcsec)) / arcsec_per_px
318 | else: # Assume pixels
319 | px_size[axis] = size
320 |
321 | if np.all(px_size == 0):
322 | # Cross match with point
323 | ffi_inds = _crossmatch_point(ra, dec, all_ffis)
324 | else:
325 | # Cross match with polygon
326 | ffi_inds = _crossmatch_polygon(ra, dec, all_ffis, px_size, arcsec_per_px)
327 |
328 | return all_ffis[ffi_inds]
329 |
--------------------------------------------------------------------------------
/astrocut/tests/test_cutouts.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import numpy as np
4 | from os import path
5 | from re import findall
6 |
7 | from astropy.io import fits
8 | from astropy import wcs
9 | from astropy.coordinates import SkyCoord
10 | from astropy import units as u
11 |
12 | from PIL import Image
13 |
14 | from .utils_for_test import create_test_imgs
15 | from .. import fits_cut, img_cut, normalize_img
16 | from ..exceptions import DataWarning, InputWarning, InvalidInputError, InvalidQueryError
17 |
18 |
19 | @pytest.mark.parametrize('ffi_type', ['SPOC', 'TICA'])
20 | def test_fits_cut(tmpdir, caplog, ffi_type):
21 |
22 | test_images = create_test_imgs(ffi_type, 50, 6, dir_name=tmpdir)
23 |
24 | # Single file
25 | center_coord = SkyCoord("150.1163213 2.200973097", unit='deg')
26 | cutout_size = 10
27 | cutout_file = fits_cut(test_images, center_coord, cutout_size, single_outfile=True, output_dir=tmpdir)
28 | assert isinstance(cutout_file, str)
29 |
30 | cutout_hdulist = fits.open(cutout_file)
31 | assert len(cutout_hdulist) == len(test_images) + 1 # num imgs + primary header
32 |
33 | cut1 = cutout_hdulist[1].data
34 | assert cut1.shape == (cutout_size, cutout_size)
35 | assert cutout_hdulist[1].data.shape == cutout_hdulist[2].data.shape
36 | assert cutout_hdulist[2].data.shape == cutout_hdulist[3].data.shape
37 | assert cutout_hdulist[3].data.shape == cutout_hdulist[4].data.shape
38 | assert cutout_hdulist[4].data.shape == cutout_hdulist[5].data.shape
39 | assert cutout_hdulist[5].data.shape == cutout_hdulist[6].data.shape
40 |
41 | cut_wcs = wcs.WCS(cutout_hdulist[1].header)
42 | sra, sdec = cut_wcs.all_pix2world(cutout_size/2, cutout_size/2, 0)
43 | assert round(float(sra), 4) == round(center_coord.ra.deg, 4)
44 | assert round(float(sdec), 4) == round(center_coord.dec.deg, 4)
45 |
46 | cutout_hdulist.close()
47 |
48 | # Multiple files
49 | cutout_files = fits_cut(test_images, center_coord, cutout_size, single_outfile=False, output_dir=tmpdir)
50 |
51 | assert isinstance(cutout_files, list)
52 | assert len(cutout_files) == len(test_images)
53 |
54 | cutout_hdulist = fits.open(cutout_files[0])
55 | assert len(cutout_hdulist) == 2
56 |
57 | cut1 = cutout_hdulist[1].data
58 | assert cut1.shape == (cutout_size, cutout_size)
59 |
60 | cut_wcs = wcs.WCS(cutout_hdulist[1].header)
61 | sra, sdec = cut_wcs.all_pix2world(cutout_size/2, cutout_size/2, 0)
62 | assert round(float(sra), 4) == round(center_coord.ra.deg, 4)
63 | assert round(float(sdec), 4) == round(center_coord.dec.deg, 4)
64 |
65 | cutout_hdulist.close()
66 |
67 | # Memory only, single file
68 | nonexisting_dir = "nonexisting" # non-existing directory to check that no files are written
69 | cutout_list = fits_cut(test_images, center_coord, cutout_size,
70 | output_dir=nonexisting_dir, single_outfile=True, memory_only=True)
71 | assert isinstance(cutout_list, list)
72 | assert len(cutout_list) == 1
73 | assert isinstance(cutout_list[0], fits.HDUList)
74 | assert not path.exists(nonexisting_dir) # no files should be written
75 |
76 | # Memory only, multiple files
77 | cutout_list = fits_cut(test_images, center_coord, cutout_size,
78 | output_dir=nonexisting_dir, single_outfile=False, memory_only=True)
79 | assert isinstance(cutout_list, list)
80 | assert len(cutout_list) == len(test_images)
81 | assert isinstance(cutout_list[0], fits.HDUList)
82 | assert not path.exists(nonexisting_dir) # no files should be written
83 |
84 | # Output directory that has to be created
85 | new_dir = path.join(tmpdir, "cutout_files") # non-existing directory to write files to
86 | cutout_files = fits_cut(test_images, center_coord, cutout_size,
87 | output_dir=new_dir, single_outfile=False)
88 |
89 | assert isinstance(cutout_files, list)
90 | assert len(cutout_files) == len(test_images)
91 | assert new_dir in cutout_files[0]
92 | assert path.exists(new_dir) # new directory should now exist
93 |
94 | # Do an off the edge test
95 | center_coord = SkyCoord("150.1163213 2.2005731", unit='deg')
96 | cutout_file = fits_cut(test_images, center_coord, cutout_size, single_outfile=True, output_dir=tmpdir)
97 | assert isinstance(cutout_file, str)
98 |
99 | cutout_hdulist = fits.open(cutout_file)
100 | assert len(cutout_hdulist) == len(test_images) + 1 # num imgs + primary header
101 |
102 | cut1 = cutout_hdulist[1].data
103 | assert cut1.shape == (cutout_size, cutout_size)
104 | assert np.isnan(cut1[:cutout_size//2, :]).all()
105 |
106 | cutout_hdulist.close()
107 |
108 | # Test when the requested cutout is not on the image
109 | center_coord = SkyCoord("140.1163213 2.2005731", unit='deg')
110 | with pytest.warns(DataWarning, match='does not overlap'):
111 | with pytest.warns(DataWarning, match='contains no data, skipping...'):
112 | with pytest.raises(InvalidQueryError, match='Cutout contains no data!'):
113 | cutout_file = fits_cut(test_images, center_coord, cutout_size, single_outfile=True)
114 |
115 | center_coord = SkyCoord("15.1163213 2.2005731", unit='deg')
116 | with pytest.warns(DataWarning, match='does not overlap'):
117 | with pytest.warns(DataWarning, match='contains no data, skipping...'):
118 | with pytest.raises(InvalidQueryError, match='Cutout contains no data!'):
119 | cutout_file = fits_cut(test_images, center_coord, cutout_size, single_outfile=True)
120 |
121 |
122 | # Test when cutout is in some images not others
123 | # Putting zeros into 2 images
124 | for img in test_images[:2]:
125 | hdu = fits.open(img, mode="update")
126 | hdu[0].data[:20, :] = 0
127 | hdu.flush()
128 | hdu.close()
129 |
130 | center_coord = SkyCoord("150.1163213 2.2007", unit='deg')
131 | with pytest.warns(DataWarning, match='contains no data, skipping...'):
132 | cutout_file = fits_cut(test_images, center_coord, cutout_size, single_outfile=True, output_dir=tmpdir)
133 |
134 | cutout_hdulist = fits.open(cutout_file)
135 | assert len(cutout_hdulist) == len(test_images) - 1 # 6 images - 2 empty + 1 primary header
136 | assert ~(cutout_hdulist[1].data == 0).any()
137 | assert ~(cutout_hdulist[2].data == 0).any()
138 | assert ~(cutout_hdulist[3].data == 0).any()
139 | assert ~(cutout_hdulist[4].data == 0).any()
140 |
141 | with pytest.warns(DataWarning, match='contains no data'):
142 | cutout_files = fits_cut(test_images, center_coord, cutout_size, single_outfile=False, output_dir=tmpdir)
143 | assert isinstance(cutout_files, list)
144 | assert len(cutout_files) == len(test_images) - 2
145 |
146 | # Test when cutout is in no images
147 | for img in test_images[2:]:
148 | hdu = fits.open(img, mode="update")
149 | hdu[0].data[:20, :] = 0
150 | hdu.flush()
151 | hdu.close()
152 |
153 | with pytest.warns(DataWarning, match='contains no data, skipping...'):
154 | with pytest.raises(InvalidQueryError, match='Cutout contains no data!'):
155 | cutout_file = fits_cut(test_images, center_coord, cutout_size, single_outfile=True,
156 | output_dir=tmpdir)
157 |
158 | # test single image and also conflicting sip keywords
159 | test_image = create_test_imgs(ffi_type, 50, 1, dir_name=tmpdir,
160 | basename="img_badsip_{:04d}.fits", bad_sip_keywords=True)[0]
161 |
162 | center_coord = SkyCoord("150.1163213 2.2007", unit='deg')
163 | cutout_size = [10, 15]
164 | cutout_file = fits_cut(test_image, center_coord, cutout_size, output_dir=tmpdir)
165 | assert isinstance(cutout_file, str)
166 | assert "10-x-15" in cutout_file
167 | cutout_hdulist = fits.open(cutout_file)
168 | assert cutout_hdulist[1].data.shape == (15, 10)
169 |
170 | center_coord = SkyCoord("150.1159 2.2006", unit='deg')
171 | cutout_size = [10, 15]*u.pixel
172 | cutout_file = fits_cut(test_image, center_coord, cutout_size, output_dir=tmpdir)
173 | assert isinstance(cutout_file, str)
174 | assert "10.0pix-x-15.0pix" in cutout_file
175 | cutout_hdulist = fits.open(cutout_file)
176 | assert cutout_hdulist[1].data.shape == (15, 10)
177 |
178 | cutout_size = [1, 2]*u.arcsec
179 | cutout_file = fits_cut(test_image, center_coord, cutout_size, output_dir=tmpdir, verbose=True)
180 | assert isinstance(cutout_file, str)
181 | assert "1.0arcsec-x-2.0arcsec" in cutout_file
182 | cutout_hdulist = fits.open(cutout_file)
183 | assert cutout_hdulist[1].data.shape == (33, 17)
184 | captured = caplog.text
185 | assert "Original image shape: (50, 50)" in captured
186 | assert "Image cutout shape: (33, 17)" in captured
187 | assert "Total time:" in captured
188 |
189 | center_coord = "150.1159 2.2006"
190 | cutout_size = [10, 15, 20]
191 | with pytest.warns(InputWarning):
192 | cutout_file = fits_cut(test_image, center_coord, cutout_size, output_dir=tmpdir)
193 | assert isinstance(cutout_file, str)
194 | assert "10-x-15" in cutout_file
195 | assert "x-20" not in cutout_file
196 |
197 | # Test single cloud image
198 | test_s3_uri = "s3://stpubdata/hst/public/j8pu/j8pu0y010/j8pu0y010_drc.fits"
199 | center_coord = SkyCoord("150.4275416667 2.42155", unit='deg')
200 | cutout_size = [10, 15]
201 | cutout_file = fits_cut(test_s3_uri, center_coord, cutout_size, output_dir=tmpdir)
202 | assert isinstance(cutout_file, str)
203 | assert "10-x-15" in cutout_file
204 |
205 | with fits.open(cutout_file) as cutout_hdulist:
206 | assert cutout_hdulist[1].data.shape == (15, 10)
207 |
208 |
209 | def test_normalize_img():
210 |
211 | # basic linear stretch
212 | img_arr = np.array([[1, 0], [.25, .75]])
213 | assert ((img_arr*255).astype(int) == normalize_img(img_arr, stretch='linear')).all()
214 |
215 | # invert
216 | assert (255-(img_arr*255).astype(int) == normalize_img(img_arr, stretch='linear', invert=True)).all()
217 |
218 | # linear stretch where input image must be scaled
219 | img_arr = np.array([[10, 5], [2.5, 7.5]])
220 | norm_img = ((img_arr - img_arr.min())/(img_arr.max()-img_arr.min())*255).astype(int)
221 | assert (norm_img == normalize_img(img_arr, stretch='linear')).all()
222 |
223 | # min_max val
224 | minval, maxval = 0, 1
225 | img_arr = np.array([[1, 0], [-1, 2]])
226 | norm_img = normalize_img(img_arr, stretch='linear', minmax_value=[minval, maxval])
227 | img_arr[img_arr < minval] = minval
228 | img_arr[img_arr > maxval] = maxval
229 | assert ((img_arr*255).astype(int) == norm_img).all()
230 |
231 | minval, maxval = 0, 1
232 | img_arr = np.array([[1, 0], [.1, .2]])
233 | norm_img = normalize_img(img_arr, stretch='linear', minmax_value=[minval, maxval])
234 | img_arr[img_arr < minval] = minval
235 | img_arr[img_arr > maxval] = maxval
236 | ((img_arr*255).astype(int) == norm_img).all()
237 |
238 | # min_max percent
239 | img_arr = np.array([[1, 0], [0.1, 0.9], [.25, .75]])
240 | norm_img = normalize_img(img_arr, stretch='linear', minmax_percent=[25, 75])
241 | assert (norm_img == [[255, 0], [0, 255], [39, 215]]).all()
242 |
243 | # asinh
244 | img_arr = np.array([[1, 0], [.25, .75]])
245 | norm_img = normalize_img(img_arr)
246 | assert ((np.arcsinh(img_arr*10)/np.arcsinh(10)*255).astype(int) == norm_img).all()
247 |
248 | # sinh
249 | img_arr = np.array([[1, 0], [.25, .75]])
250 | norm_img = normalize_img(img_arr, stretch='sinh')
251 | assert ((np.sinh(img_arr*3)/np.sinh(3)*255).astype(int) == norm_img).all()
252 |
253 | # sqrt
254 | img_arr = np.array([[1, 0], [.25, .75]])
255 | norm_img = normalize_img(img_arr, stretch='sqrt')
256 | assert ((np.sqrt(img_arr)*255).astype(int) == norm_img).all()
257 |
258 | # log
259 | img_arr = np.array([[1, 0], [.25, .75]])
260 | norm_img = normalize_img(img_arr, stretch='log')
261 | assert ((np.log(img_arr*1000+1)/np.log(1000)*255).astype(int) == norm_img).all()
262 |
263 | # Bad stretch
264 | with pytest.raises(InvalidInputError):
265 | img_arr = np.array([[1, 0], [.25, .75]])
266 | normalize_img(img_arr, stretch='lin')
267 |
268 | # Giving both minmax percent and cut
269 | img_arr = np.array([[1, 0], [.25, .75]])
270 | norm_img = normalize_img(img_arr, stretch='asinh', minmax_percent=[0.7, 99.3])
271 | with pytest.warns(InputWarning):
272 | test_img = normalize_img(img_arr, stretch='asinh', minmax_value=[5, 2000], minmax_percent=[0.7, 99.3])
273 | assert (test_img == norm_img).all()
274 |
275 |
276 | @pytest.mark.parametrize('ffi_type', ['SPOC', 'TICA'])
277 | def test_img_cut(tmpdir, caplog, ffi_type):
278 |
279 | test_images = create_test_imgs(ffi_type, 50, 6, dir_name=tmpdir)
280 | center_coord = SkyCoord("150.1163213 2.200973097", unit='deg')
281 | cutout_size = 10
282 |
283 | # Basic jpg image
284 | jpg_files = img_cut(test_images, center_coord, cutout_size, output_dir=tmpdir)
285 |
286 | assert len(jpg_files) == len(test_images)
287 | with open(jpg_files[0], 'rb') as IMGFLE:
288 | assert IMGFLE.read(3) == b'\xFF\xD8\xFF' # JPG
289 |
290 | # Png (single input file, not as list)
291 | img_files = img_cut(test_images[0], center_coord, cutout_size, img_format='png', output_dir=tmpdir)
292 | with open(img_files[0], 'rb') as IMGFLE:
293 | assert IMGFLE.read(8) == b'\x89\x50\x4E\x47\x0D\x0A\x1A\x0A' # PNG
294 | assert len(img_files) == 1
295 |
296 | # Color image
297 | color_jpg = img_cut(test_images[:3], center_coord, cutout_size, colorize=True, output_dir=tmpdir)
298 | img = Image.open(color_jpg)
299 | assert img.mode == 'RGB'
300 |
301 | # Too few input images
302 | with pytest.raises(InvalidInputError):
303 | img_cut(test_images[0], center_coord, cutout_size, colorize=True, output_dir=tmpdir)
304 |
305 | # Too many input images
306 | with pytest.warns(InputWarning):
307 | color_jpg = img_cut(test_images, center_coord, cutout_size, colorize=True, output_dir=tmpdir)
308 | img = Image.open(color_jpg)
309 | assert img.mode == 'RGB'
310 |
311 | # string coordinates and verbose
312 | center_coord = "150.1163213 2.200973097"
313 | jpg_files = img_cut(test_images, center_coord, cutout_size,
314 | output_dir=path.join(tmpdir, "image_path"), verbose=True)
315 | captured = caplog.text
316 | assert len(findall("Original image shape", captured)) == 6
317 | assert "Cutout filepaths:" in captured
318 | assert "Total time" in captured
319 |
320 | # test color image where one of the images is all zeros
321 | hdu = fits.open(test_images[0], mode='update')
322 | hdu[0].data[:, :] = 0
323 | hdu.flush()
324 | hdu.close()
325 |
326 | with pytest.warns(DataWarning, match='contains no data, skipping...'):
327 | with pytest.raises(InvalidInputError):
328 | img_cut(test_images[:3], center_coord, cutout_size,
329 | colorize=True, img_format='png', output_dir=tmpdir)
330 |
--------------------------------------------------------------------------------
/astrocut/tests/test_cutout_processing.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 |
4 | import numpy as np
5 |
6 | from astropy.io import fits
7 | from astropy.utils.data import get_pkg_data_filename
8 | from astropy.wcs import WCS
9 | from astropy.coordinates import SkyCoord
10 | from astropy.table import Table
11 | from astropy.time import Time
12 |
13 | from .utils_for_test import create_test_ffis, create_test_imgs
14 | from .. import cutout_processing, CubeFactory, CutoutFactory, fits_cut
15 |
16 |
17 | # Example FFI WCS for testing
18 | with open(get_pkg_data_filename('data/ex_ffi_wcs.txt'), "r") as FLE:
19 | WCS_STR = FLE.read()
20 |
21 |
22 | def test_combine_headers():
23 |
24 | header_1 = fits.Header(cards=[('KWD_SHR', 20, 'Shared keyword'),
25 | ('KWD_DIF', 'one', 'Different keyword'),
26 | ('CHECKSUM', 1283726182378, "Keyword to drop")])
27 | header_2 = fits.Header(cards=[('KWD_SHR', 20, 'Shared keyword'),
28 | ('KWD_DIF', 'two', 'Different keyword'),
29 | ('CHECKSUM', 1248721378218, "Keyword to drop")])
30 |
31 | combined_header = cutout_processing._combine_headers([header_1, header_2])
32 |
33 | assert len(combined_header) == 7
34 | assert 'KWD_SHR' in combined_header
35 | assert 'KWD_DIF' not in combined_header
36 | assert 'CHECKSUM' not in combined_header
37 | assert combined_header['F01_K01'] == combined_header['F02_K01']
38 | assert combined_header['F01_V01'] != combined_header['F02_V01']
39 | assert combined_header['F01_V01'] == header_1[combined_header['F01_K01']]
40 | assert 'F01_K02' not in combined_header
41 |
42 | combined_header = cutout_processing._combine_headers([header_1, header_2], constant_only=True)
43 | assert len(combined_header) == 1
44 | assert 'KWD_SHR' in combined_header
45 | assert 'KWD_DIF' not in combined_header
46 | assert 'F01_K01' not in combined_header
47 |
48 |
49 | def test_get_bounds():
50 |
51 | x = [5, 10]
52 | y = [2, 20]
53 | size = [3, 5]
54 | bounds = cutout_processing._get_bounds(x, y, size)
55 | assert (bounds == np.array([[[4, 7], [0, 5]], [[8, 11], [18, 23]]])).all()
56 |
57 | for nx, ny in bounds:
58 | assert nx[1]-nx[0] == size[0]
59 | assert ny[1]-ny[0] == size[1]
60 |
61 | # test that if we move the center a small amount, we still get the same integer bounds
62 | x = [5.9, 9.8]
63 | y = [2.2, 20.2]
64 | assert (cutout_processing._get_bounds(x, y, size) == bounds).all()
65 |
66 |
67 | def test_combine_bounds():
68 |
69 | x = [5, 10]
70 | y = [2, 20]
71 | size = [3, 5]
72 | bounds = cutout_processing._get_bounds(x, y, size)
73 |
74 | big_bounds = cutout_processing._combine_bounds(bounds[0], bounds[1])
75 |
76 | assert big_bounds.dtype == int
77 | for bx, by in bounds:
78 | assert big_bounds[0, 0] <= bx[0]
79 | assert big_bounds[0, 1] >= bx[1]
80 | assert big_bounds[1, 0] <= by[0]
81 | assert big_bounds[1, 1] >= by[1]
82 |
83 |
84 | def test_area():
85 |
86 | x = [5, 10]
87 | y = [2, 20]
88 | size = [3, 5]
89 | area = np.multiply(*size)
90 |
91 | bounds = cutout_processing._get_bounds(x, y, size)
92 | area_0 = cutout_processing._area(bounds[0])
93 | area_1 = cutout_processing._area(bounds[1])
94 |
95 | assert area_0 == area
96 | assert area_0 == area_1
97 |
98 |
99 | def test_get_args():
100 |
101 | wcs_obj = WCS(WCS_STR, relax=True)
102 | bounds = np.array([[0, 4], [0, 6]])
103 |
104 | args = cutout_processing._get_args(bounds, wcs_obj)
105 | assert args["coordinates"] == wcs_obj.pixel_to_world(2, 3)
106 | assert args["size"] == (6, 4)
107 |
108 |
109 | def test_moving_target_focus(tmpdir):
110 |
111 | # Making the test cube/cutout
112 | cube_maker = CubeFactory()
113 |
114 | img_sz = 1000
115 | num_im = 10
116 |
117 | ffi_files = create_test_ffis(img_size=img_sz, num_images=num_im, dir_name=tmpdir)
118 | cube_file = cube_maker.make_cube(ffi_files, os.path.join(tmpdir, "test_cube.fits"), verbose=False)
119 |
120 | cutout_file = CutoutFactory().cube_cut(cube_file, "250.3497414839765 2.280925599609063", 100,
121 | target_pixel_file="cutout_file.fits", output_path=tmpdir,
122 | verbose=False)
123 |
124 | cutout_wcs = WCS(fits.getheader(cutout_file, 2))
125 | cutout_data = Table(fits.getdata(cutout_file, 1))
126 |
127 | # Focusing on a path where the time points line up with cutout times
128 | coords = cutout_wcs.pixel_to_world([4, 5, 10, 20], [10, 10, 11, 12])
129 | times = Time(Table(fits.getdata(cutout_file, 1))["TIME"].data[:len(coords)] + 2457000, format="jd")
130 | path = Table({"time": times, "position": coords})
131 | size = [4, 4]
132 |
133 | mt_cutout_table = cutout_processing._moving_target_focus(path, size, [cutout_file])
134 | assert np.allclose(coords.ra.deg, mt_cutout_table["TGT_RA"])
135 | assert np.allclose(coords.dec.deg, mt_cutout_table["TGT_DEC"])
136 | assert (mt_cutout_table["TIME"] == cutout_data["TIME"][:len(coords)]).all()
137 | assert (mt_cutout_table["FFI_FILE"] == cutout_data["FFI_FILE"][:len(coords)]).all()
138 |
139 | # Focusing on a path where interpolation will actually have to be used
140 | times = Time(Table(fits.getdata(cutout_file, 1))["TIME"].data[:len(coords)*2:2] + 2457000, format="jd")
141 | path = Table({"time": times, "position": coords})
142 |
143 | mt_cutout_table = cutout_processing._moving_target_focus(path, size, [cutout_file])
144 | assert mt_cutout_table["TIME"].max() == (times.jd[-1] - 2457000)
145 | assert len(mt_cutout_table) > len(path)
146 |
147 |
148 | def test_path_to_footprints():
149 |
150 | img_wcs = WCS(WCS_STR, relax=True)
151 | size = [4, 5]
152 |
153 | xs = [10, 20, 30, 40, 50]
154 | ys = [1000, 950, 900, 810, 800]
155 | path = img_wcs.pixel_to_world(xs, ys)
156 |
157 | footprints = cutout_processing.path_to_footprints(path, size, img_wcs)
158 | assert len(footprints) == 1
159 |
160 | assert (np.max(xs) - np.min(xs) + size[0]) == footprints[0]["size"][1]
161 | assert (np.max(ys) - np.min(ys) + size[1]) == footprints[0]["size"][0]
162 |
163 | cent_x = (np.max(xs) - np.min(xs) + size[0])//2 + np.min(xs) - size[0]/2
164 | cent_y = (np.max(ys) - np.min(ys) + size[1])//2 + np.min(ys) - size[1]/2
165 | assert (img_wcs.pixel_to_world([cent_x], [cent_y]) == footprints[0]["coordinates"]).all()
166 |
167 | # Lowering the max pixels so we force >1 footprint
168 | max_pixels = 100
169 | footprints = cutout_processing.path_to_footprints(path, size, img_wcs, max_pixels)
170 |
171 | assert len(footprints) == 5
172 | for fp in footprints:
173 | assert np.multiply(*fp["size"]) <= max_pixels
174 |
175 |
176 | def test_configure_bintable_header(tmpdir):
177 |
178 |
179 | # Making the test cube/cutout/table we need
180 | cube_maker = CubeFactory()
181 |
182 | img_sz = 1000
183 | num_im = 10
184 |
185 | ffi_files = create_test_ffis(img_sz, num_im, dir_name=tmpdir)
186 | cube_file = cube_maker.make_cube(ffi_files, os.path.join(tmpdir, "test_cube.fits"), verbose=False)
187 |
188 | cutout_file = CutoutFactory().cube_cut(cube_file, "250.3497414839765 2.280925599609063", 100,
189 | target_pixel_file="cutout_file.fits", output_path=tmpdir,
190 | verbose=False)
191 |
192 | cutout_wcs = WCS(fits.getheader(cutout_file, 2))
193 | coords = cutout_wcs.pixel_to_world([4, 5, 10, 20], [10, 10, 11, 12])
194 | times = Time(Table(fits.getdata(cutout_file, 1))["TIME"].data[:len(coords)] + 2457000, format="jd")
195 | path = Table({"time": times, "position": coords})
196 | size = [4, 4]
197 |
198 | mt_cutout_table = cutout_processing._moving_target_focus(path, size, [cutout_file])
199 | mt_cutout_fits_table = fits.table_to_hdu(mt_cutout_table)
200 |
201 | new_header = mt_cutout_fits_table.header
202 | orig_header = new_header.copy()
203 | cutout_header = fits.getheader(cutout_file, 1)
204 |
205 | cutout_processing._configure_bintable_header(new_header, [cutout_header])
206 | for kwd in new_header:
207 | if kwd in orig_header:
208 | assert orig_header[kwd] == new_header[kwd]
209 | else:
210 | assert cutout_header[kwd] == new_header[kwd]
211 |
212 | # TODO: add test where there are more than one cutout headers
213 |
214 |
215 | @pytest.mark.parametrize("in_target, out_file", [(None, "path_"), ("C/ Targetname", "C-_Targetname")])
216 | def test_center_on_path(tmpdir, in_target, out_file):
217 |
218 | # Making the test cube/cutout
219 | cube_maker = CubeFactory()
220 |
221 | img_sz = 1000
222 | num_im = 10
223 |
224 | ffi_files = create_test_ffis(img_sz, num_im, dir_name=tmpdir)
225 | cube_file = cube_maker.make_cube(ffi_files, os.path.join(tmpdir, "test_cube.fits"), verbose=False)
226 |
227 | cutout_maker = CutoutFactory()
228 | cutout_file = cutout_maker.cube_cut(cube_file, "250.3497414839765 2.280925599609063", 100,
229 | target_pixel_file="cutout_file.fits", output_path=tmpdir,
230 | verbose=False)
231 |
232 | cutout_wcs = WCS(fits.getheader(cutout_file, 2))
233 |
234 | coords = cutout_wcs.pixel_to_world([4, 5, 10, 20], [10, 10, 11, 12])
235 | times = Time(Table(fits.getdata(cutout_file, 1))["TIME"].data[:len(coords)] + 2457000, format="jd")
236 | path = Table({"time": times, "position": coords})
237 | size = [4, 4]
238 |
239 | # Parametrization of 2 tests
240 | # Test 1: Using the default output filename and not giving an image wcs
241 | # Test 2: Using target name with special characters and not giving an image wcs
242 | output = cutout_processing.center_on_path(path, size,
243 | cutout_fles=[cutout_file],
244 | target=in_target,
245 | output_path=tmpdir,
246 | verbose=False)
247 | output_file = os.path.basename(output)
248 | assert output_file.startswith(out_file)
249 |
250 | hdu = fits.open(output)
251 | assert len(hdu) == 2
252 | assert hdu[0].header["DATE"] == Time.now().to_value('iso', subfmt='date')
253 | assert hdu[0].header["OBJECT"] == '' if in_target is None else in_target
254 | hdu.close()
255 |
256 |
257 | def test_center_on_path_input_tpf(tmpdir):
258 |
259 | # Making the test cube/cutout
260 | cube_maker = CubeFactory()
261 |
262 | img_sz = 1000
263 | num_im = 10
264 |
265 | ffi_files = create_test_ffis(img_sz, num_im, dir_name=tmpdir)
266 | cube_file = cube_maker.make_cube(ffi_files, os.path.join(tmpdir, "test_cube.fits"), verbose=False)
267 |
268 | cutout_maker = CutoutFactory()
269 | cutout_file = cutout_maker.cube_cut(cube_file, "250.3497414839765 2.280925599609063", 100,
270 | target_pixel_file="cutout_file.fits", output_path=tmpdir,
271 | verbose=False)
272 |
273 | cutout_wcs = WCS(fits.getheader(cutout_file, 2))
274 |
275 | coords = cutout_wcs.pixel_to_world([4, 5, 10, 20], [10, 10, 11, 12])
276 | times = Time(Table(fits.getdata(cutout_file, 1))["TIME"].data[:len(coords)] + 2457000, format="jd")
277 | path = Table({"time": times, "position": coords})
278 | size = [4, 4]
279 |
280 | # Giving both a target name and a specific output filename
281 | img_wcs = cutout_maker.cube_wcs
282 | output = cutout_processing.center_on_path(path,
283 | size,
284 | cutout_fles=[cutout_file],
285 | target="Target Name",
286 | img_wcs=img_wcs,
287 | target_pixel_file="mt_cutout.fits",
288 | output_path=tmpdir,
289 | verbose=False)
290 | assert "mt_cutout.fits" in output
291 |
292 | mt_wcs = WCS(fits.getheader(output, 2))
293 | assert img_wcs.to_header(relax=True) == mt_wcs.to_header(relax=True)
294 |
295 | primary_header = fits.getheader(output)
296 | assert primary_header["DATE"] == Time.now().to_value('iso', subfmt='date')
297 | assert primary_header["OBJECT"] == "Target Name"
298 |
299 |
300 | def test_default_combine():
301 | """
302 | The build_default_combine_function function uses the input hdus
303 | to determine which pixels will be used by the combiner function
304 | when combining images. The returned combiner function applies
305 | that mask before taking the mean of all non-masked pixels to get the
306 | output image.
307 | """
308 |
309 | hdu_1 = fits.ImageHDU(np.array([[1, 1], [0, 0]]))
310 | hdu_2 = fits.ImageHDU(np.array([[0, 0], [1, 1]]))
311 |
312 | # Two input arrays no overlapping pixels
313 | combine_func = cutout_processing.build_default_combine_function([hdu_1, hdu_2], 0)
314 | assert (combine_func([hdu_1, hdu_2]) == 1).all()
315 | assert (combine_func([hdu_2, hdu_1]) == 0).all()
316 |
317 | # Three input arrays overlapping pixels
318 | hdu_3 = fits.ImageHDU(np.array([[0, 1], [1, 0]]))
319 | combine_func = cutout_processing.build_default_combine_function([hdu_1, hdu_2, hdu_3], 0)
320 |
321 | im4 = fits.ImageHDU(np.array([[4, 5], [0, 0]]))
322 | im5 = fits.ImageHDU(np.array([[0, 0], [4, 5]]))
323 | im6 = fits.ImageHDU(np.array([[0, 3], [8, 0]]))
324 | comb_img = combine_func([im4, im5, im6])
325 | assert (comb_img == [[4, 4], [6, 5]]).all()
326 |
327 | im4 = fits.ImageHDU(np.array([[4, 5], [-3, 8]]))
328 | im5 = fits.ImageHDU(np.array([[5, 2], [4, 5]]))
329 | im6 = fits.ImageHDU(np.array([[4, 3], [8, 9]]))
330 | assert (combine_func([im4, im5, im6]) == comb_img).all()
331 |
332 | # Two input arrays, with nans and a missing pixel
333 | hdu_1 = fits.ImageHDU(np.array([[1, np.nan], [np.nan, np.nan]]))
334 | hdu_2 = fits.ImageHDU(np.array([[np.nan, np.nan], [1, 1]]))
335 |
336 | combine_func = cutout_processing.build_default_combine_function([hdu_1, hdu_2])
337 | assert np.allclose(combine_func([hdu_1, hdu_2]), [[1, np.nan], [1, 1]], equal_nan=True)
338 |
339 |
340 | @pytest.mark.parametrize('ffi_type', ['SPOC', 'TICA'])
341 | def test_combiner(tmpdir, ffi_type):
342 |
343 | test_images = create_test_imgs(ffi_type, 50, 6, dir_name=tmpdir)
344 | center_coord = SkyCoord("150.1163213 2.200973097", unit='deg')
345 | cutout_size = 2
346 |
347 | cutout_file_1 = fits_cut(test_images[:3], center_coord, cutout_size,
348 | cutout_prefix="cutout_1", output_dir=tmpdir)
349 | cutout_file_2 = fits_cut(test_images[3:], center_coord, cutout_size,
350 | cutout_prefix="cutout_2", output_dir=tmpdir)
351 |
352 | combiner = cutout_processing.CutoutsCombiner([cutout_file_1, cutout_file_2])
353 |
354 | # Checking the load function
355 | assert center_coord.separation(combiner.center_coord) == 0
356 | assert len(combiner.input_hdulists) == 3
357 | assert len(combiner.input_hdulists[0]) == 2
358 |
359 | # Checking the combiner function was set properly
360 | comb_1 = combiner.combine_images(combiner.input_hdulists[0])
361 | combine_func = cutout_processing.build_default_combine_function(combiner.input_hdulists[0])
362 | assert (comb_1 == combine_func(combiner.input_hdulists[0])).all()
363 |
364 | # Running the combine function and checking the results
365 | out_fle = combiner.combine(os.path.join(tmpdir, "combination.fits"))
366 | comb_hdu = fits.open(out_fle)
367 | assert len(comb_hdu) == 4
368 | assert (comb_hdu[1].data == comb_1).all()
369 | assert np.isclose(comb_hdu[0].header['RA_OBJ'], center_coord.ra.deg)
370 | assert np.isclose(comb_hdu[0].header['DEC_OBJ'], center_coord.dec.deg)
371 | comb_hdu.close()
372 |
373 | # Checking memory only input and output
374 | input_fits_1 = fits.open(cutout_file_1)
375 | input_fits_2 = fits.open(cutout_file_2)
376 |
377 | combiner = cutout_processing.CutoutsCombiner([input_fits_1, input_fits_2])
378 | assert center_coord.separation(combiner.center_coord) == 0
379 | assert len(combiner.input_hdulists) == 3
380 | assert len(combiner.input_hdulists[0]) == 2
381 |
382 | comb_hdu = combiner.combine(memory_only=True)
383 |
--------------------------------------------------------------------------------