├── focalplane ├── fpalign │ ├── __init__.py │ └── prepare_fpa_data.py ├── example_subpkg │ ├── data │ │ └── .gitignore │ ├── tests │ │ └── __init__.py │ ├── setup_package.py │ └── __init__.py ├── tests │ ├── test_fpalign.py │ ├── __init__.py │ ├── setup_package.py │ ├── test_example.py │ ├── coveragerc │ └── test_utils.py ├── data │ └── README.rst ├── extern │ └── __init__.py ├── _astropy_init.py ├── __init__.py ├── example_mod.py ├── conftest.py └── utils.py ├── docs ├── rtd-pip-requirements ├── focalplane │ └── index.rst ├── _templates │ └── autosummary │ │ ├── base.rst │ │ ├── class.rst │ │ └── module.rst ├── index.rst ├── make.bat ├── exts │ └── numfig.py ├── Makefile └── conf.py ├── readthedocs.yml ├── CHANGES.rst ├── .gitmodules ├── .rtd-environment.yml ├── CITATION ├── licenses ├── README.rst ├── AURA.rst └── TEMPLATE_LICENCE.rst ├── CONTRIBUTING.md ├── .gitignore ├── MANIFEST.in ├── README.md ├── setup.cfg ├── _README.rst ├── CODE_OF_CONDUCT.md ├── setup.py └── ah_bootstrap.py /focalplane/fpalign/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /focalplane/example_subpkg/data/.gitignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /focalplane/example_subpkg/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /focalplane/tests/test_fpalign.py: -------------------------------------------------------------------------------- 1 | # to be written -------------------------------------------------------------------------------- /docs/rtd-pip-requirements: -------------------------------------------------------------------------------- 1 | stsci_rtd_theme 2 | numpy 3 | matplotlib 4 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | conda: 2 | file: .rtd-environment.yml 3 | 4 | python: 5 | setup_py_install: true 6 | -------------------------------------------------------------------------------- /CHANGES.rst: -------------------------------------------------------------------------------- 1 | Changes included in this version 2 | -------------------------------- 3 | 4 | Edit this template to detail updates to the codebase 5 | -------------------------------------------------------------------------------- /focalplane/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | """ 3 | This module contains package tests. 4 | """ 5 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "astropy_helpers"] 2 | url = https://github.com/astropy/astropy-helpers.git 3 | path = astropy_helpers 4 | branch = refs/heads/v3.0.2 5 | -------------------------------------------------------------------------------- /focalplane/example_subpkg/setup_package.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | from __future__ import absolute_import 3 | 4 | 5 | def get_package_data(): 6 | return {'focalplane.example_subpkg': ['data/*']} 7 | -------------------------------------------------------------------------------- /docs/focalplane/index.rst: -------------------------------------------------------------------------------- 1 | ************************ 2 | focalplane Documentation 3 | ************************ 4 | 5 | This is the documentation for focalplane. 6 | 7 | Reference/API 8 | ============= 9 | 10 | .. automodapi:: focalplane 11 | -------------------------------------------------------------------------------- /.rtd-environment.yml: -------------------------------------------------------------------------------- 1 | name: packagename 2 | 3 | channels: 4 | - http://ssb.stsci.edu/astroconda 5 | 6 | dependencies: 7 | - setuptools 8 | - numpy 9 | - python>=3.0 10 | - pip: 11 | - sphinx-automodapi 12 | - stsci_rtd_theme 13 | -------------------------------------------------------------------------------- /focalplane/example_subpkg/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is the docstring for the examplesubpkg package. Normally you would 3 | have whatever.py files in this directory implementing some modules, but this 4 | is just an example sub-package, so it doesn't actually do anything. 5 | """ 6 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/base.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/base.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /docs/_templates/autosummary/class.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/class.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /CITATION: -------------------------------------------------------------------------------- 1 | See http://journals.aas.org/authors/references.html#Software on how to 2 | cite software. 3 | 4 | GitHub has Zenodo DOI integration: 5 | https://guides.github.com/activities/citable-code/ 6 | 7 | Also see https://swcarpentry.github.io/git-novice/12-citation/ 8 | for further reading. 9 | -------------------------------------------------------------------------------- /docs/_templates/autosummary/module.rst: -------------------------------------------------------------------------------- 1 | {% extends "autosummary_core/module.rst" %} 2 | {# The template this is inherited from is in astropy/sphinx/ext/templates/autosummary_core. If you want to modify this template, it is strongly recommended that you still inherit from the astropy template. #} -------------------------------------------------------------------------------- /focalplane/data/README.rst: -------------------------------------------------------------------------------- 1 | Data directory 2 | ============== 3 | 4 | This directory contains data files included with the package source 5 | code distribution. Note that this is intended only for relatively small files 6 | - large files should be externally hosted and downloaded as needed. 7 | 8 | -------------------------------------------------------------------------------- /focalplane/tests/setup_package.py: -------------------------------------------------------------------------------- 1 | # import os 2 | 3 | # If this package has tests data in the tests/data directory, add them to 4 | # the paths here, see commented example 5 | paths = ['coveragerc', 6 | # os.path.join('data', '*fits') 7 | ] 8 | 9 | def get_package_data(): 10 | return { 11 | _ASTROPY_PACKAGE_NAME_ + '.tests': paths} 12 | -------------------------------------------------------------------------------- /focalplane/extern/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | """ 3 | This packages contains python packages that are bundled with the package but 4 | are external to it, and hence are developed in a separate source tree. Note 5 | that this package is distinct from the /cextern directory of the source code 6 | distribution, as that directory only contains C extension code. 7 | """ 8 | -------------------------------------------------------------------------------- /licenses/README.rst: -------------------------------------------------------------------------------- 1 | Licenses 2 | ======== 3 | 4 | This directory holds license and credit information for the package, 5 | works the package is derived from, and/or datasets. 6 | 7 | Ensure that you pick a package licence which is in this folder and it matches 8 | the one mentioned in the top level README.rst file. If you are using the 9 | pre-rendered version of this template check for the word 'Other' in the README. 10 | -------------------------------------------------------------------------------- /focalplane/tests/test_example.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | def test_primes(): 5 | from ..example_mod import primes 6 | assert primes(10) == [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] 7 | 8 | 9 | def test_deprecation(): 10 | import warnings 11 | warnings.warn( 12 | "This is deprecated, but shouldn't raise an exception, unless " 13 | "enable_deprecations_as_exceptions() called from conftest.py", 14 | DeprecationWarning) 15 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Documentation 2 | ============= 3 | 4 | This is the documentation for focalplane. 5 | Tools for focal plane geometric calibration of astronomical telescopes. 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | focalplane/index.rst 11 | 12 | .. note:: The layout of this directory is simply a suggestion. To follow 13 | traditional practice, do *not* edit this page, but instead place 14 | all documentation for the package inside ``focalplane/``. 15 | You can follow this practice or choose your own layout. 16 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Please open a new issue or new pull request for bugs, feedback, or new features you would like to see. If there is an issue you would like to work on, please leave a comment and we will be happy to assist. New contributions and contributors are very welcome! 2 | 3 | New to github or open source projects? If you are unsure about where to start or haven't used github before, please feel free to contact the package maintainers. 4 | 5 | Feedback and feature requests? Is there something missing you would like to see? Please open an issue or send an email to the maintainers. This package follows the Spacetelescope [Code of Conduct](CODE_OF_CONDUCT.md) strives to provide a welcoming community to all of our users and contributors. 6 | -------------------------------------------------------------------------------- /focalplane/tests/coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = {packagename} 3 | omit = 4 | {packagename}/_astropy_init* 5 | {packagename}/conftest* 6 | {packagename}/cython_version* 7 | {packagename}/setup_package* 8 | {packagename}/*/setup_package* 9 | {packagename}/*/*/setup_package* 10 | {packagename}/tests/* 11 | {packagename}/*/tests/* 12 | {packagename}/*/*/tests/* 13 | {packagename}/version* 14 | 15 | [report] 16 | exclude_lines = 17 | # Have to re-enable the standard pragma 18 | pragma: no cover 19 | 20 | # Don't complain about packages we have installed 21 | except ImportError 22 | 23 | # Don't complain if tests don't hit assertions 24 | raise AssertionError 25 | raise NotImplementedError 26 | 27 | # Don't complain about script hooks 28 | def main\(.*\): 29 | 30 | # Ignore branches that don't pertain to this version of Python 31 | pragma: py{ignore_python_version} -------------------------------------------------------------------------------- /focalplane/_astropy_init.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | __all__ = ['__version__', '__githash__'] 4 | 5 | # this indicates whether or not we are in the package's setup.py 6 | try: 7 | _ASTROPY_SETUP_ 8 | except NameError: 9 | from sys import version_info 10 | if version_info[0] >= 3: 11 | import builtins 12 | else: 13 | import __builtin__ as builtins 14 | builtins._ASTROPY_SETUP_ = False 15 | 16 | try: 17 | from .version import version as __version__ 18 | except ImportError: 19 | __version__ = '' 20 | try: 21 | from .version import githash as __githash__ 22 | except ImportError: 23 | __githash__ = '' 24 | 25 | 26 | if not _ASTROPY_SETUP_: # noqa 27 | import os 28 | 29 | # Create the test function for self test 30 | from astropy.tests.runner import TestRunner 31 | test = TestRunner.make_test_runner_in(os.path.dirname(__file__)) 32 | __all__ += ['test'] 33 | -------------------------------------------------------------------------------- /focalplane/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 2 | 3 | # Packages may add whatever they like to this file, but 4 | # should keep this content at the top. 5 | # ---------------------------------------------------------------------------- 6 | from ._astropy_init import * 7 | # ---------------------------------------------------------------------------- 8 | 9 | # Enforce Python version check during package import. 10 | # This is the same check as the one at the top of setup.py 11 | import sys 12 | 13 | __minimum_python_version__ = "3.5" 14 | 15 | class UnsupportedPythonError(Exception): 16 | pass 17 | 18 | if sys.version_info < tuple((int(val) for val in __minimum_python_version__.split('.'))): 19 | raise UnsupportedPythonError("focalplane does not support Python < {}".format(__minimum_python_version__)) 20 | 21 | if not _ASTROPY_SETUP_: 22 | # For egg_info test builds to pass, put package imports here. 23 | from .example_mod import * 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled files 2 | *.py[cod] 3 | *.a 4 | *.o 5 | *.so 6 | __pycache__ 7 | 8 | # Ignore .c files by default to avoid including generated code. If you want to 9 | # add a non-generated .c extension, use `git add -f filename.c`. 10 | *.c 11 | 12 | # Other generated files 13 | */version.py 14 | */cython_version.py 15 | htmlcov 16 | .coverage 17 | MANIFEST 18 | .ipynb_checkpoints 19 | 20 | # Sphinx 21 | docs/api 22 | docs/_build 23 | 24 | # Eclipse editor project files 25 | .project 26 | .pydevproject 27 | .settings 28 | 29 | # Pycharm editor project files 30 | .idea 31 | 32 | # Floobits project files 33 | .floo 34 | .flooignore 35 | 36 | # Packages/installer info 37 | *.egg 38 | *.egg-info 39 | dist 40 | build 41 | eggs 42 | parts 43 | bin 44 | var 45 | sdist 46 | develop-eggs 47 | .installed.cfg 48 | distribute-*.tar.gz 49 | 50 | # Other 51 | .cache 52 | .tox 53 | .*.sw[op] 54 | *~ 55 | .project 56 | .pydevproject 57 | .settings 58 | 59 | # Mac OSX 60 | .DS_Store 61 | v 62 | nearby_sources.vot 63 | -------------------------------------------------------------------------------- /focalplane/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from astropy.table import Table 4 | from astropy.time import Time 5 | from astroquery.gaia import Gaia 6 | import pytest 7 | 8 | from ..utils import correct_for_proper_motion 9 | 10 | ON_TRAVIS = os.environ.get('TRAVIS') == 'true' 11 | 12 | local_dir = os.path.dirname(os.path.abspath(__file__)) 13 | 14 | @pytest.mark.skipif(ON_TRAVIS, reason='timeout issue.') 15 | def test_pm_correction(): 16 | query = """SELECT * FROM gaiadr2.gaia_source AS gaia WHERE gaia.parallax > 200 AND gaia.parallax < 205""" 17 | 18 | output_file = os.path.join(local_dir, 'nearby_sources.vot'.format()) 19 | overwrite = True 20 | 21 | if (not os.path.isfile(output_file)) or (overwrite): 22 | job = Gaia.launch_job_async(query, dump_to_file=True, output_file=output_file) 23 | table = job.get_results() 24 | else: 25 | table = Table.read(output_file) 26 | print('Retrieved {} sources'.format(len(table))) 27 | 28 | target_epoch = Time(2017, format='jyear') 29 | corrected_table = correct_for_proper_motion(table, target_epoch) 30 | 31 | assert len(corrected_table) == len(table) -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include CHANGES.rst 3 | 4 | include ah_bootstrap.py 5 | include setup.cfg 6 | include focalplane/tests/coveragerc 7 | 8 | recursive-include focalplane *.pyx *.c *.pxd 9 | recursive-include docs * 10 | recursive-include licenses * 11 | recursive-include cextern * 12 | recursive-include scripts * 13 | 14 | prune build 15 | prune docs/_build 16 | prune docs/api 17 | 18 | 19 | # the next few stanzas are for astropy_helpers. It's derived from the 20 | # astropy_helpers/MANIFEST.in, but requires additional includes for the actual 21 | # package directory and egg-info. 22 | 23 | include astropy_helpers/README.rst 24 | include astropy_helpers/CHANGES.rst 25 | include astropy_helpers/LICENSE.rst 26 | recursive-include astropy_helpers/licenses * 27 | 28 | include astropy_helpers/ah_bootstrap.py 29 | 30 | recursive-include astropy_helpers/astropy_helpers *.py *.pyx *.c *.h *.rst 31 | recursive-include astropy_helpers/astropy_helpers.egg-info * 32 | # include the sphinx stuff with "*" because there are css/html/rst/etc. 33 | recursive-include astropy_helpers/astropy_helpers/sphinx * 34 | 35 | prune astropy_helpers/build 36 | prune astropy_helpers/astropy_helpers/tests 37 | 38 | 39 | global-exclude *.pyc *.o 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/spacetelescope/focalplane.svg?branch=master)](https://travis-ci.org/spacetelescope/focalplane) 2 | [![PyPI version](https://badge.fury.io/py/focalplane.svg)](https://badge.fury.io/py/focalplane) 3 | [![PyPI - License](https://img.shields.io/pypi/l/Django.svg)](https://github.com/spacetelescope/focalplane/blob/master/LICENSE.md) 4 | [![DOI](https://zenodo.org/badge/208096894.svg)](https://zenodo.org/badge/latestdoi/208096894) 5 | 6 | > [!WARNING] 7 | > This repository has been archived and is no longer being maintained. 8 | > Please see the report below for more information 9 | > 10 | > You can submit additional questions to our help desk at https://stsci.service-now.com/stars 11 | 12 | # Tools for focal plane geometric calibration of astronomical telescopes 13 | 14 | 15 | 16 | ### Example usage 17 | 18 | ### Documentation 19 | 20 | 21 | Sahlmann, J. et al., 2019, A comprehensive approach to HST focal plane geometric calibration, Instrument Science Report TEL 2019-1 STScI 22 | 23 | https://ui.adsabs.harvard.edu/abs/2019tel..rept....1S/abstract 24 | 25 | ### Contributing 26 | Please open a new issue or new pull request for bugs, feedback, or new features you would like to see. If there is an issue you would like to work on, please leave a comment and we will be happy to assist. New contributions and contributors are very welcome! 27 | 28 | 29 | ### References 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [build_sphinx] 2 | source-dir = docs 3 | build-dir = docs/_build 4 | all_files = 1 5 | 6 | [build_docs] 7 | source-dir = docs 8 | build-dir = docs/_build 9 | all_files = 1 10 | 11 | [upload_docs] 12 | upload-dir = docs/_build/html 13 | show-response = 1 14 | 15 | [tool:pytest] 16 | minversion = 3.0 17 | norecursedirs = build docs/_build 18 | doctest_plus = enabled 19 | addopts = -p no:warnings 20 | 21 | [ah_bootstrap] 22 | auto_use = True 23 | 24 | [flake8] 25 | exclude = extern,sphinx,*parsetab.py 26 | 27 | [pycodestyle] 28 | exclude = extern,sphinx,*parsetab.py 29 | 30 | [metadata] 31 | package_name = focalplane 32 | description = Tools for focal plane geometric calibration of astronomical telescopes. 33 | long_description = Tools for focal plane geometric calibration of astronomical telescopes. 34 | author = Space Telescope Science Institute 35 | license = Aura 36 | url = http://stsci.edu 37 | edit_on_github = False 38 | github_project = https://github.com/spacetelescope/focalplane 39 | 40 | # install_requires should be formatted as a comma-separated list, e.g.: 41 | install_requires = astropy, pystortion, pysiaf, pystrometry, pyia, photutils<0.7, pytest 42 | 43 | # version should be PEP440 compatible (https://www.python.org/dev/peps/pep-0440/) 44 | version = 0.0.dev 45 | # Note: you will also need to change this in your package's __init__.py 46 | minimum_python_version = 3.5 47 | 48 | [entry_points] 49 | 50 | astropy-package-template-example = packagename.example_mod:main 51 | 52 | -------------------------------------------------------------------------------- /licenses/AURA.rst: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2019, Space Telescope Science Institute, AURA 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /licenses/TEMPLATE_LICENCE.rst: -------------------------------------------------------------------------------- 1 | This project is based upon the Astropy package template 2 | (https://github.com/astropy/package-template/) which is licenced under the terms 3 | of the following licence. 4 | 5 | --- 6 | 7 | Copyright (c) 2018, Astropy Developers 8 | All rights reserved. 9 | 10 | Redistribution and use in source and binary forms, with or without modification, 11 | are permitted provided that the following conditions are met: 12 | 13 | * Redistributions of source code must retain the above copyright notice, this 14 | list of conditions and the following disclaimer. 15 | * Redistributions in binary form must reproduce the above copyright notice, this 16 | list of conditions and the following disclaimer in the documentation and/or 17 | other materials provided with the distribution. 18 | * Neither the name of the Astropy Team nor the names of its contributors may be 19 | used to endorse or promote products derived from this software without 20 | specific prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 23 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 24 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 26 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 27 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 28 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 29 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 30 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 31 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 32 | -------------------------------------------------------------------------------- /focalplane/example_mod.py: -------------------------------------------------------------------------------- 1 | def primes(imax): 2 | """ 3 | Returns prime numbers up to imax. 4 | 5 | Parameters 6 | ---------- 7 | imax: int 8 | The number of primes to return. This should be less or equal to 10000. 9 | 10 | Returns 11 | ------- 12 | result: list 13 | The list of prime numbers. 14 | """ 15 | 16 | p = list(range(10000)) 17 | result = [] 18 | k = 0 19 | n = 2 20 | 21 | if imax > 10000: 22 | raise ValueError("imax should be <= 10000") 23 | 24 | while len(result) < imax: 25 | i = 0 26 | while i < k and n % p[i] != 0: 27 | i = i + 1 28 | if i == k: 29 | p[k] = n 30 | k = k + 1 31 | result.append(n) 32 | if k > 10000: 33 | break 34 | n = n + 1 35 | 36 | return result 37 | 38 | 39 | def do_primes(n, usecython=False): 40 | if usecython: 41 | 42 | raise Exception("This template does not have the example C code included.") 43 | 44 | else: 45 | print('Using pure python primes') 46 | return primes(n) 47 | 48 | 49 | def main(args=None): 50 | 51 | from astropy.utils.compat import argparse 52 | from time import time 53 | 54 | parser = argparse.ArgumentParser(description='Process some integers.') 55 | parser.add_argument('-c', '--use-cython', dest='cy', action='store_true', 56 | help='Use the Cython-based Prime number generator.') 57 | parser.add_argument('-t', '--timing', dest='time', action='store_true', 58 | help='Time the Fibonacci generator.') 59 | parser.add_argument('-p', '--print', dest='prnt', action='store_true', 60 | help='Print all of the Prime numbers.') 61 | parser.add_argument('n', metavar='N', type=int, 62 | help='Get Prime numbers up to this number.') 63 | 64 | res = parser.parse_args(args) 65 | 66 | pre = time() 67 | primes = do_primes(res.n, res.cy) 68 | post = time() 69 | 70 | print('Found {0} prime numbers'.format(len(primes))) 71 | print('Largest prime: {0}'.format(primes[-1])) 72 | 73 | if res.time: 74 | print('Running time: {0} s'.format(post - pre)) 75 | 76 | if res.prnt: 77 | print('Primes: {0}'.format(primes)) 78 | -------------------------------------------------------------------------------- /_README.rst: -------------------------------------------------------------------------------- 1 | Tools for focal plane geometric calibration of astronomical telescopes. 2 | ----------------------------------------------------------------------- 3 | 4 | .. image:: http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat 5 | :target: http://www.astropy.org 6 | :alt: Powered by Astropy Badge 7 | 8 | Tools for focal plane geometric calibration of astronomical telescopes. 9 | 10 | 11 | License 12 | ------- 13 | 14 | This project is Copyright (c) Space Telescope Science Institute and licensed under 15 | the terms of the Aura license. This package is based upon 16 | the `Astropy package template `_ 17 | which is licensed under the BSD 3-clause licence. See the licenses folder for 18 | more information. 19 | 20 | 21 | Contributing 22 | ------------ 23 | 24 | We love contributions! focalplane is open source, 25 | built on open source, and we'd love to have you hang out in our community. 26 | 27 | **Imposter syndrome disclaimer**: We want your help. No, really. 28 | 29 | There may be a little voice inside your head that is telling you that you're not 30 | ready to be an open source contributor; that your skills aren't nearly good 31 | enough to contribute. What could you possibly offer a project like this one? 32 | 33 | We assure you - the little voice in your head is wrong. If you can write code at 34 | all, you can contribute code to open source. Contributing to open source 35 | projects is a fantastic way to advance one's coding skills. Writing perfect code 36 | isn't the measure of a good developer (that would disqualify all of us!); it's 37 | trying to create something, making mistakes, and learning from those 38 | mistakes. That's how we all improve, and we are happy to help others learn. 39 | 40 | Being an open source contributor doesn't just mean writing code, either. You can 41 | help out by writing documentation, tests, or even giving feedback about the 42 | project (and yes - that includes giving feedback about the contribution 43 | process). Some of these contributions may be the most valuable to the project as 44 | a whole, because you're coming to the project with fresh eyes, so you can see 45 | the errors and assumptions that seasoned contributors have glossed over. 46 | 47 | *This disclaimer was originally written by 48 | `Adrienne Lowe `_ for a 49 | `PyCon talk `_, and was adapted by 50 | focalplane based on its use in the README file for the 51 | `MetPy project `_.* 52 | -------------------------------------------------------------------------------- /focalplane/conftest.py: -------------------------------------------------------------------------------- 1 | # This file is used to configure the behavior of pytest when using the Astropy 2 | # test infrastructure. 3 | 4 | from astropy.version import version as astropy_version 5 | if astropy_version < '3.0': 6 | # With older versions of Astropy, we actually need to import the pytest 7 | # plugins themselves in order to make them discoverable by pytest. 8 | from astropy.tests.pytest_plugins import * 9 | else: 10 | # As of Astropy 3.0, the pytest plugins provided by Astropy are 11 | # automatically made available when Astropy is installed. This means it's 12 | # not necessary to import them here, but we still need to import global 13 | # variables that are used for configuration. 14 | from astropy.tests.plugins.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS 15 | 16 | from astropy.tests.helper import enable_deprecations_as_exceptions 17 | 18 | ## Uncomment the following line to treat all DeprecationWarnings as 19 | ## exceptions. For Astropy v2.0 or later, there are 2 additional keywords, 20 | ## as follow (although default should work for most cases). 21 | ## To ignore some packages that produce deprecation warnings on import 22 | ## (in addition to 'compiler', 'scipy', 'pygments', 'ipykernel', and 23 | ## 'setuptools'), add: 24 | ## modules_to_ignore_on_import=['module_1', 'module_2'] 25 | ## To ignore some specific deprecation warning messages for Python version 26 | ## MAJOR.MINOR or later, add: 27 | ## warnings_to_ignore_by_pyver={(MAJOR, MINOR): ['Message to ignore']} 28 | # enable_deprecations_as_exceptions() 29 | 30 | ## Uncomment and customize the following lines to add/remove entries from 31 | ## the list of packages for which version numbers are displayed when running 32 | ## the tests. Making it pass for KeyError is essential in some cases when 33 | ## the package uses other astropy affiliated packages. 34 | # try: 35 | # PYTEST_HEADER_MODULES['Astropy'] = 'astropy' 36 | # PYTEST_HEADER_MODULES['scikit-image'] = 'skimage' 37 | # del PYTEST_HEADER_MODULES['h5py'] 38 | # except (NameError, KeyError): # NameError is needed to support Astropy < 1.0 39 | # pass 40 | 41 | ## Uncomment the following lines to display the version number of the 42 | ## package rather than the version number of Astropy in the top line when 43 | ## running the tests. 44 | # import os 45 | # 46 | ## This is to figure out the package version, rather than 47 | ## using Astropy's 48 | # try: 49 | # from .version import version 50 | # except ImportError: 51 | # version = 'dev' 52 | # 53 | # try: 54 | # packagename = os.path.basename(os.path.dirname(__file__)) 55 | # TESTED_VERSIONS[packagename] = version 56 | # except NameError: # Needed to support Astropy <= 1.0.0 57 | # pass 58 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Spacetelescope Open Source Code of Conduct 2 | 3 | We expect all "spacetelescope" organization projects to adopt a code of conduct that ensures a productive, respectful environment for all open source contributors and participants. We are committed to providing a strong and enforced code of conduct and expect everyone in our community to follow these guidelines when interacting with others in all forums. Our goal is to keep ours a positive, inclusive, successful, and growing community. The community of participants in open source Astronomy projects is made up of members from around the globe with a diverse set of skills, personalities, and experiences. It is through these differences that our community experiences success and continued growth. 4 | 5 | 6 | As members of the community, 7 | 8 | - We pledge to treat all people with respect and provide a harassment- and bullying-free environment, regardless of sex, sexual orientation and/or gender identity, disability, physical appearance, body size, race, nationality, ethnicity, and religion. In particular, sexual language and imagery, sexist, racist, or otherwise exclusionary jokes are not appropriate. 9 | 10 | - We pledge to respect the work of others by recognizing acknowledgment/citation requests of original authors. As authors, we pledge to be explicit about how we want our own work to be cited or acknowledged. 11 | 12 | - We pledge to welcome those interested in joining the community, and realize that including people with a variety of opinions and backgrounds will only serve to enrich our community. In particular, discussions relating to pros/cons of various technologies, programming languages, and so on are welcome, but these should be done with respect, taking proactive measure to ensure that all participants are heard and feel confident that they can freely express their opinions. 13 | 14 | - We pledge to welcome questions and answer them respectfully, paying particular attention to those new to the community. We pledge to provide respectful criticisms and feedback in forums, especially in discussion threads resulting from code contributions. 15 | 16 | - We pledge to be conscientious of the perceptions of the wider community and to respond to criticism respectfully. We will strive to model behaviors that encourage productive debate and disagreement, both within our community and where we are criticized. We will treat those outside our community with the same respect as people within our community. 17 | 18 | - We pledge to help the entire community follow the code of conduct, and to not remain silent when we see violations of the code of conduct. We will take action when members of our community violate this code such as such as contacting conduct@stsci.edu (all emails sent to this address will be treated with the strictest confidence) or talking privately with the person. 19 | 20 | This code of conduct applies to all community situations online and offline, including mailing lists, forums, social media, conferences, meetings, associated social events, and one-to-one interactions. 21 | 22 | Parts of this code of conduct have been adapted from the Astropy and Numfocus codes of conduct. 23 | http://www.astropy.org/code_of_conduct.html 24 | https://www.numfocus.org/about/code-of-conduct/ 25 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | if NOT "%PAPER%" == "" ( 11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 12 | ) 13 | 14 | 15 | if "%1" == "" goto help 16 | 17 | if "%1" == "help" ( 18 | :help 19 | echo.Please use `make ^` where ^ is one of 20 | echo. html to make standalone HTML files 21 | echo. dirhtml to make HTML files named index.html in directories 22 | echo. pickle to make pickle files 23 | echo. json to make JSON files 24 | echo. htmlhelp to make HTML files and a HTML help project 25 | echo. qthelp to make HTML files and a qthelp project 26 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 27 | echo. changes to make an overview over all changed/added/deprecated items 28 | echo. linkcheck to check all external links for integrity 29 | echo. doctest to run all doctests embedded in the documentation if enabled 30 | goto end 31 | ) 32 | 33 | if "%1" == "clean" ( 34 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 35 | del /q /s %BUILDDIR%\* 36 | goto end 37 | ) 38 | 39 | if "%1" == "html" ( 40 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 41 | if errorlevel 1 exit /b 1 42 | echo. 43 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 44 | goto end 45 | ) 46 | 47 | if "%1" == "dirhtml" ( 48 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 49 | echo. 50 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 51 | goto end 52 | ) 53 | 54 | if "%1" == "pickle" ( 55 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 56 | echo. 57 | echo.Build finished; now you can process the pickle files. 58 | goto end 59 | ) 60 | 61 | if "%1" == "json" ( 62 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 63 | echo. 64 | echo.Build finished; now you can process the JSON files. 65 | goto end 66 | ) 67 | 68 | if "%1" == "htmlhelp" ( 69 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 70 | echo. 71 | echo.Build finished; now you can run HTML Help Workshop with the ^ 72 | .hhp project file in %BUILDDIR%/htmlhelp. 73 | goto end 74 | ) 75 | 76 | if "%1" == "qthelp" ( 77 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 78 | echo. 79 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 80 | .qhcp project file in %BUILDDIR%/qthelp, like this: 81 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\packagename.qhcp 82 | echo.To view the help file: 83 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\packagename.ghc 84 | goto end 85 | ) 86 | 87 | if "%1" == "latex" ( 88 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 89 | echo. 90 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 91 | goto end 92 | ) 93 | 94 | if "%1" == "changes" ( 95 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 96 | echo. 97 | echo.The overview file is in %BUILDDIR%/changes. 98 | goto end 99 | ) 100 | 101 | if "%1" == "linkcheck" ( 102 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 103 | echo. 104 | echo.Link check complete; look for any errors in the above output ^ 105 | or in %BUILDDIR%/linkcheck/output.txt. 106 | goto end 107 | ) 108 | 109 | if "%1" == "doctest" ( 110 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 111 | echo. 112 | echo.Testing of doctests in the sources finished, look at the ^ 113 | results in %BUILDDIR%/doctest/output.txt. 114 | goto end 115 | ) 116 | 117 | :end 118 | -------------------------------------------------------------------------------- /docs/exts/numfig.py: -------------------------------------------------------------------------------- 1 | from docutils.nodes import figure, caption, Text, reference, raw, SkipNode, Element 2 | from sphinx.roles import XRefRole 3 | 4 | 5 | # Element classes 6 | 7 | class page_ref(reference): 8 | pass 9 | 10 | class num_ref(reference): 11 | pass 12 | 13 | 14 | # Visit/depart functions 15 | 16 | def skip_page_ref(self, node): 17 | raise SkipNode 18 | 19 | def latex_visit_page_ref(self, node): 20 | self.body.append("\\pageref{%s:%s}" % (node['refdoc'], node['reftarget'])) 21 | raise SkipNode 22 | 23 | def latex_visit_num_ref(self, node): 24 | fields = node['reftarget'].split('#') 25 | if len(fields) > 1: 26 | label, target = fields 27 | ref_link = '%s:%s' % (node['refdoc'], target) 28 | latex = "\\hyperref[%s]{%s \\ref*{%s}}" % (ref_link, label, ref_link) 29 | self.body.append(latex) 30 | else: 31 | self.body.append('\\ref{%s:%s}' % (node['refdoc'], fields[0])) 32 | 33 | raise SkipNode 34 | 35 | 36 | def doctree_read(app, doctree): 37 | # first generate figure numbers for each figure 38 | env = app.builder.env 39 | figid_docname_map = getattr(env, 'figid_docname_map', {}) 40 | 41 | for figure_info in doctree.traverse(figure): 42 | for id in figure_info['ids']: 43 | figid_docname_map[id] = env.docname 44 | 45 | env.figid_docname_map = figid_docname_map 46 | 47 | 48 | def doctree_resolved(app, doctree, docname): 49 | i = 1 50 | figids = {} 51 | for figure_info in doctree.traverse(figure): 52 | if app.builder.name != 'latex' and app.config.number_figures: 53 | for cap in figure_info.traverse(caption): 54 | cap[0] = Text("%s %d: %s" % (app.config.figure_caption_prefix, i, cap[0])) 55 | 56 | for id in figure_info['ids']: 57 | figids[id] = i 58 | 59 | i += 1 60 | 61 | 62 | # replace numfig nodes with links 63 | if app.builder.name != 'latex': 64 | for ref_info in doctree.traverse(num_ref): 65 | if '#' in ref_info['reftarget']: 66 | label, target = ref_info['reftarget'].split('#') 67 | labelfmt = label + " %d" 68 | else: 69 | labelfmt = '%d' 70 | target = ref_info['reftarget'] 71 | 72 | if target not in figids: 73 | continue 74 | 75 | if app.builder.name == 'html': 76 | target_doc = app.builder.env.figid_docname_map[target] 77 | link = "%s#%s" % (app.builder.get_relative_uri(docname, target_doc), 78 | target) 79 | html = '%s' % (link, labelfmt %(figids[target])) 80 | ref_info.replace_self(raw(html, html, format='html')) 81 | else: 82 | ref_info.replace_self(Text(labelfmt % (figids[target]))) 83 | 84 | 85 | def clean_env(app): 86 | app.builder.env.i=1 87 | app.builder.env.figid_docname_map = {} 88 | 89 | def setup(app): 90 | app.add_config_value('number_figures', True, True) 91 | app.add_config_value('figure_caption_prefix', "Figure", True) 92 | 93 | app.add_node(page_ref, 94 | text=(skip_page_ref, None), 95 | html=(skip_page_ref, None), 96 | latex=(latex_visit_page_ref, None)) 97 | 98 | app.add_role('page', XRefRole(nodeclass=page_ref)) 99 | 100 | app.add_node(num_ref, 101 | latex=(latex_visit_num_ref, None)) 102 | 103 | app.add_role('num', XRefRole(nodeclass=num_ref)) 104 | 105 | app.connect("builder-inited", clean_env) 106 | app.connect('doctree-read', doctree_read) 107 | app.connect('doctree-resolved', doctree_resolved) 108 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | 15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest 16 | 17 | #This is needed with git because git doesn't create a dir if it's empty 18 | $(shell [ -d "_static" ] || mkdir -p _static) 19 | 20 | help: 21 | @echo "Please use \`make ' where is one of" 22 | @echo " html to make standalone HTML files" 23 | @echo " dirhtml to make HTML files named index.html in directories" 24 | @echo " singlehtml to make a single large HTML file" 25 | @echo " pickle to make pickle files" 26 | @echo " json to make JSON files" 27 | @echo " htmlhelp to make HTML files and a HTML help project" 28 | @echo " qthelp to make HTML files and a qthelp project" 29 | @echo " devhelp to make HTML files and a Devhelp project" 30 | @echo " epub to make an epub" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " text to make text files" 34 | @echo " man to make manual pages" 35 | @echo " changes to make an overview of all changed/added/deprecated items" 36 | @echo " linkcheck to check all external links for integrity" 37 | 38 | clean: 39 | -rm -rf $(BUILDDIR) 40 | -rm -rf api 41 | -rm -rf generated 42 | 43 | html: 44 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 45 | @echo 46 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 47 | 48 | dirhtml: 49 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 50 | @echo 51 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 52 | 53 | singlehtml: 54 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 55 | @echo 56 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 57 | 58 | pickle: 59 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 60 | @echo 61 | @echo "Build finished; now you can process the pickle files." 62 | 63 | json: 64 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 65 | @echo 66 | @echo "Build finished; now you can process the JSON files." 67 | 68 | htmlhelp: 69 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 70 | @echo 71 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 72 | ".hhp project file in $(BUILDDIR)/htmlhelp." 73 | 74 | qthelp: 75 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 76 | @echo 77 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 78 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 79 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp" 80 | @echo "To view the help file:" 81 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc" 82 | 83 | devhelp: 84 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 85 | @echo 86 | @echo "Build finished." 87 | @echo "To view the help file:" 88 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy" 89 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy" 90 | @echo "# devhelp" 91 | 92 | epub: 93 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 94 | @echo 95 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 96 | 97 | latex: 98 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 99 | @echo 100 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 101 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 102 | "(use \`make latexpdf' here to do that automatically)." 103 | 104 | latexpdf: 105 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 106 | @echo "Running LaTeX files through pdflatex..." 107 | make -C $(BUILDDIR)/latex all-pdf 108 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 109 | 110 | text: 111 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 112 | @echo 113 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 114 | 115 | man: 116 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 117 | @echo 118 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 119 | 120 | changes: 121 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 122 | @echo 123 | @echo "The overview file is in $(BUILDDIR)/changes." 124 | 125 | linkcheck: 126 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 127 | @echo 128 | @echo "Link check complete; look for any errors in the above output " \ 129 | "or in $(BUILDDIR)/linkcheck/output.txt." 130 | 131 | doctest: 132 | @echo "Run 'python setup.py test' in the root directory to run doctests " \ 133 | @echo "in the documentation." 134 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst 3 | 4 | import glob 5 | import os 6 | import sys 7 | 8 | from configparser import ConfigParser 9 | 10 | # Get some values from the setup.cfg 11 | conf = ConfigParser() 12 | conf.read(['setup.cfg']) 13 | metadata = dict(conf.items('metadata')) 14 | 15 | PACKAGENAME = metadata.get('package_name', 'focalplane') 16 | DESCRIPTION = metadata.get('description', 'Tools for focal plane geometric calibration of astronomical telescopes.') 17 | AUTHOR = metadata.get('author', 'Space Telescope Science Institute') 18 | AUTHOR_EMAIL = metadata.get('author_email', '') 19 | LICENSE = metadata.get('license', 'unknown') 20 | URL = metadata.get('url', 'http://stsci.edu') 21 | __minimum_python_version__ = metadata.get("minimum_python_version", "3.5") 22 | 23 | # Enforce Python version check - this is the same check as in __init__.py but 24 | # this one has to happen before importing ah_bootstrap. 25 | if sys.version_info < tuple((int(val) for val in __minimum_python_version__.split('.'))): 26 | sys.stderr.write("ERROR: focalplane requires Python {} or later\n".format(__minimum_python_version__)) 27 | sys.exit(1) 28 | 29 | # Import ah_bootstrap after the python version validation 30 | 31 | import ah_bootstrap 32 | from setuptools import setup 33 | 34 | import builtins 35 | builtins._ASTROPY_SETUP_ = True 36 | 37 | from astropy_helpers.setup_helpers import (register_commands, get_debug_option, 38 | get_package_info) 39 | from astropy_helpers.git_helpers import get_git_devstr 40 | from astropy_helpers.version_helpers import generate_version_py 41 | 42 | 43 | # order of priority for long_description: 44 | # (1) set in setup.cfg, 45 | # (2) load LONG_DESCRIPTION.rst, 46 | # (3) load README.rst, 47 | # (4) package docstring 48 | readme_glob = 'README*' 49 | _cfg_long_description = metadata.get('long_description', '') 50 | if _cfg_long_description: 51 | LONG_DESCRIPTION = _cfg_long_description 52 | 53 | elif os.path.exists('LONG_DESCRIPTION.rst'): 54 | with open('LONG_DESCRIPTION.rst') as f: 55 | LONG_DESCRIPTION = f.read() 56 | 57 | elif len(glob.glob(readme_glob)) > 0: 58 | with open(glob.glob(readme_glob)[0]) as f: 59 | LONG_DESCRIPTION = f.read() 60 | 61 | else: 62 | # Get the long description from the package's docstring 63 | __import__(PACKAGENAME) 64 | package = sys.modules[PACKAGENAME] 65 | LONG_DESCRIPTION = package.__doc__ 66 | 67 | # Store the package name in a built-in variable so it's easy 68 | # to get from other parts of the setup infrastructure 69 | builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME 70 | 71 | # VERSION should be PEP440 compatible (http://www.python.org/dev/peps/pep-0440) 72 | VERSION = metadata.get('version', '0.0.dev') 73 | 74 | # Indicates if this version is a release version 75 | RELEASE = 'dev' not in VERSION 76 | 77 | if not RELEASE: 78 | VERSION += get_git_devstr(False) 79 | 80 | # Populate the dict of setup command overrides; this should be done before 81 | # invoking any other functionality from distutils since it can potentially 82 | # modify distutils' behavior. 83 | cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE) 84 | 85 | # Freeze build information in version.py 86 | generate_version_py(PACKAGENAME, VERSION, RELEASE, 87 | get_debug_option(PACKAGENAME)) 88 | 89 | # Treat everything in scripts except README* as a script to be installed 90 | scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) 91 | if not os.path.basename(fname).startswith('README')] 92 | 93 | 94 | # Get configuration information from all of the various subpackages. 95 | # See the docstring for setup_helpers.update_package_files for more 96 | # details. 97 | package_info = get_package_info() 98 | 99 | # Add the project-global data 100 | package_info['package_data'].setdefault(PACKAGENAME, []) 101 | package_info['package_data'][PACKAGENAME].append('data/*') 102 | 103 | # Define entry points for command-line scripts 104 | entry_points = {'console_scripts': []} 105 | 106 | if conf.has_section('entry_points'): 107 | entry_point_list = conf.items('entry_points') 108 | for entry_point in entry_point_list: 109 | entry_points['console_scripts'].append('{0} = {1}'.format( 110 | entry_point[0], entry_point[1])) 111 | 112 | # Include all .c files, recursively, including those generated by 113 | # Cython, since we can not do this in MANIFEST.in with a "dynamic" 114 | # directory name. 115 | c_files = [] 116 | for root, dirs, files in os.walk(PACKAGENAME): 117 | for filename in files: 118 | if filename.endswith('.c'): 119 | c_files.append( 120 | os.path.join( 121 | os.path.relpath(root, PACKAGENAME), filename)) 122 | package_info['package_data'][PACKAGENAME].extend(c_files) 123 | 124 | # Note that requires and provides should not be included in the call to 125 | # ``setup``, since these are now deprecated. See this link for more details: 126 | # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM 127 | 128 | setup(name=PACKAGENAME, 129 | version=VERSION, 130 | description=DESCRIPTION, 131 | scripts=scripts, 132 | install_requires=[s.strip() for s in metadata.get('install_requires', 'astropy').split(',')], 133 | author=AUTHOR, 134 | author_email=AUTHOR_EMAIL, 135 | license=LICENSE, 136 | url=URL, 137 | long_description=LONG_DESCRIPTION, 138 | cmdclass=cmdclassd, 139 | zip_safe=False, 140 | use_2to3=False, 141 | entry_points=entry_points, 142 | python_requires='>={}'.format(__minimum_python_version__), 143 | **package_info 144 | ) 145 | -------------------------------------------------------------------------------- /focalplane/utils.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | from astropy.table import Table 4 | import astropy.units as u 5 | from astropy.time import Time 6 | import numpy as np 7 | 8 | from pyia.data import GaiaData 9 | from pystrometry import pystrometry 10 | 11 | 12 | def correct_for_proper_motion(gaia_table, target_epoch, verbose=False, ignore_parallax=True): 13 | """Apply proper motion correction to an input Gaia catalog. 14 | 15 | Compute positions and uncertainties at an epoch other than the catalog epoch. 16 | 17 | Supports only Gaia input catalog format, i.e. and astropy table with Gaia-named columns. 18 | 19 | TODO: 20 | ----- 21 | Do corrected_values['ra_error'] need to be corrected for cos(delta) effect? 22 | 23 | 24 | Parameters 25 | ---------- 26 | gaia_table 27 | target_epoch : astropy time 28 | verbose 29 | ignore_parallax : bool 30 | If True, set parallax to zero to ignore its contribution to the offset 31 | (that offset is observer-dependent) 32 | 33 | Returns 34 | ------- 35 | 36 | """ 37 | gaia_table = copy.deepcopy(gaia_table) 38 | 39 | DR2_REF_EPOCH = gaia_table['ref_epoch'][0] 40 | 41 | for attribute_name in 'ra dec ra_error dec_error'.split(): 42 | gaia_table[ 43 | '{}_original_{}'.format(attribute_name, DR2_REF_EPOCH)] = np.full( 44 | len(gaia_table), np.nan) 45 | gaia_table['{}_{:3.1f}'.format(attribute_name, target_epoch.jyear)] = np.full( 46 | len(gaia_table), np.nan) 47 | 48 | 49 | gaia_data = GaiaData(gaia_table) 50 | 51 | for i in range(len(gaia_table)): 52 | if (not np.isnan(gaia_table['parallax'][i])) and (not np.ma.is_masked(gaia_table['parallax'][i])): 53 | gaia_star = gaia_data[i] 54 | covariance_matrix_mas = gaia_star.get_cov(units=dict(ra=u.milliarcsecond, 55 | dec=u.milliarcsecond, 56 | parallax=u.milliarcsecond, 57 | pm_ra=u.milliarcsecond/u.year, 58 | pm_dec=u.milliarcsecond/u.year)) 59 | 60 | # remove radial velocity component 61 | covariance_matrix_mas = np.squeeze(covariance_matrix_mas)[0:5, 0:5] 62 | 63 | if verbose: 64 | print(covariance_matrix_mas) 65 | print(np.diag(covariance_matrix_mas)) 66 | tbl_names = ['ra', 'dec', 'parallax', 'pmra', 'pmdec'] 67 | for colname in tbl_names: 68 | print('{} = {}'.format(colname, getattr(gaia_star, colname))) 69 | err_colname = '{}_error'.format(colname) 70 | print('{} = {}'.format(err_colname, getattr(gaia_star, err_colname))) 71 | 72 | # helper object to get PPM coefficients 73 | T = Table() 74 | T['MJD'] = [target_epoch.utc.mjd] 75 | T['frame'] = 1 76 | T['OB'] = 1 77 | iad = pystrometry.ImagingAstrometryData(T) 78 | iad.RA_deg = gaia_star.ra.to(u.deg).value 79 | iad.Dec_deg = gaia_star.dec.to(u.deg).value 80 | 81 | # this step depends on the observer when computing parallax factors 82 | # set reference epoch properly 83 | # https://gea.esac.esa.int/archive/documentation/GDR2/Gaia_archive/chap_datamodel/sec_dm_main_tables/ssec_dm_gaia_source.html 84 | # ref_epoch : Reference epoch (double, Time[Julian Years]) 85 | # Reference epoch to which the astrometric source parameters are referred, expressed as a Julian Year in TCB. 86 | # At DR2 this reference epoch is always J2015.5 but in future releases this will be different and not necessarily the same for all sources. 87 | iad.set_five_parameter_coefficients(verbose=False, overwrite=False, 88 | reference_epoch_MJD=Time(gaia_star.ref_epoch[0], format='jyear', scale='tcb').utc.mjd) 89 | if verbose: 90 | print(iad.five_parameter_coefficients_table) 91 | print(iad.five_parameter_coefficients_array) 92 | 93 | if ignore_parallax: 94 | gaia_star.parallax = 0. * u.arcsec 95 | 96 | delta_ppm_array = np.array([0., 0., 97 | gaia_star.parallax.to(u.deg).value[0], 98 | gaia_star.pmra.to(u.deg/u.year).value[0], 99 | gaia_star.pmdec.to(u.deg/u.year).value[0]]) 100 | [delta_rastar_at_epoch_deg, delta_dec_at_epoch_deg] = np.dot(iad.five_parameter_coefficients_array.T, delta_ppm_array) 101 | dec_at_epoch_deg = gaia_star.dec.to(u.deg).value + delta_dec_at_epoch_deg 102 | if 0: 103 | cos_delta_factor = np.cos(np.deg2rad(gaia_star.dec.to(u.deg).value)) 104 | else: 105 | # this is the way simbad is doing it 106 | cos_delta_factor = np.cos(np.deg2rad(dec_at_epoch_deg)) 107 | ra_at_epoch_deg = gaia_star.ra.to(u.deg).value + delta_rastar_at_epoch_deg/cos_delta_factor 108 | 109 | corrected_values = {} 110 | for ii, jj in enumerate(iad.observing_1D_xi): 111 | prediction_vector = iad.five_parameter_coefficients_array.T[jj] 112 | prediction_uncertainty_x = np.sqrt( 113 | np.dot(np.dot(prediction_vector, covariance_matrix_mas), prediction_vector)) 114 | prediction_vector_y = iad.five_parameter_coefficients_array.T[jj + 1] 115 | prediction_uncertainty_y = np.sqrt( 116 | np.dot(np.dot(prediction_vector_y, covariance_matrix_mas), prediction_vector_y)) 117 | if verbose: 118 | print( 119 | '{}: (COV) offset and uncertainty in RA : {:3.12f} +/- {:3.12f} mas '.format( 120 | target_epoch.utc.isot, ra_at_epoch_deg, prediction_uncertainty_x)) 121 | print( 122 | '{}: (COV) offset and uncertainty in Dec: {:3.12f} +/- {:3.12f} mas '.format( 123 | target_epoch.utc.isot, dec_at_epoch_deg, prediction_uncertainty_y)) 124 | corrected_values['ra'] = ra_at_epoch_deg 125 | corrected_values['dec'] = dec_at_epoch_deg 126 | corrected_values['ra_error'] = prediction_uncertainty_x 127 | corrected_values['dec_error'] = prediction_uncertainty_y 128 | 129 | for attribute_name in 'ra dec ra_error dec_error'.split(): 130 | gaia_table['{}_original_{}'.format(attribute_name, gaia_star.ref_epoch[0].value)][i] = \ 131 | gaia_table[attribute_name][i] 132 | gaia_table['{}_{:3.1f}'.format(attribute_name, target_epoch.utc.jyear)][i] = \ 133 | corrected_values[attribute_name] 134 | gaia_table['{}'.format(attribute_name)][i] = \ 135 | gaia_table['{}_{:3.1f}'.format(attribute_name, target_epoch.utc.jyear)][i] 136 | if verbose: 137 | print( 138 | 'Replacing {}={} by proper motion and parallax corrected value of {}'.format( 139 | attribute_name, 140 | gaia_table['{}_{}'.format(attribute_name, gaia_star.ref_epoch[0].value)][i], 141 | gaia_table['{}'.format(attribute_name)][i])) 142 | 143 | return gaia_table 144 | 145 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # STSCI documentation build configuration file, created by 4 | # sphinx-quickstart on Thu Oct 22 17:25:41 2015. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import datetime 16 | import importlib 17 | import sys 18 | import os 19 | import sphinx 20 | import stsci_rtd_theme 21 | 22 | def setup(app): 23 | app.add_stylesheet("stsci.css") 24 | 25 | 26 | from distutils.version import LooseVersion 27 | try: 28 | from ConfigParser import ConfigParser 29 | except ImportError: 30 | from configparser import ConfigParser 31 | conf = ConfigParser() 32 | 33 | # If extensions (or modules to document with autodoc) are in another directory, 34 | # add these directories to sys.path here. If the directory is relative to the 35 | # documentation root, use os.path.abspath to make it absolute, like shown here. 36 | sys.path.insert(0, os.path.abspath('../')) 37 | sys.path.insert(0, os.path.abspath('packagename/')) 38 | sys.path.insert(0, os.path.abspath('exts/')) 39 | 40 | # -- General configuration ------------------------------------------------ 41 | conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) 42 | setup_cfg = dict(conf.items('metadata')) 43 | 44 | # If your documentation needs a minimal Sphinx version, state it here. 45 | needs_sphinx = '1.3' 46 | 47 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 48 | 49 | 50 | def check_sphinx_version(expected_version): 51 | sphinx_version = LooseVersion(sphinx.__version__) 52 | expected_version = LooseVersion(expected_version) 53 | if sphinx_version < expected_version: 54 | raise RuntimeError( 55 | "At least Sphinx version {0} is required to build this " 56 | "documentation. Found {1}.".format( 57 | expected_version, sphinx_version)) 58 | 59 | # Configuration for intersphinx: refer to the Python standard library. 60 | intersphinx_mapping = { 61 | 'python': ('http://docs.python.org/3/', None), 62 | 'numpy': ('http://docs.scipy.org/doc/numpy/', None), 63 | 'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None), 64 | 'matplotlib': ('http://matplotlib.org/', None), 65 | } 66 | 67 | if sys.version_info[0] == 2: 68 | intersphinx_mapping['python'] = ('http://docs.python.org/2/', None) 69 | intersphinx_mapping['pythonloc'] = ( 70 | 'http://docs.python.org/', 71 | os.path.abspath(os.path.join(os.path.dirname(__file__), 72 | 'local/python2_local_links.inv'))) 73 | 74 | # Add any Sphinx extension module names here, as strings. They can be 75 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 76 | # ones. 77 | # Add any Sphinx extension module names here, as strings. They can be 78 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 79 | # ones. 80 | extensions = [ 81 | 'numfig', 82 | 'sphinx.ext.autodoc', 83 | 'sphinx.ext.intersphinx', 84 | 'sphinx.ext.todo', 85 | 'sphinx.ext.inheritance_diagram', 86 | 'sphinx.ext.viewcode', 87 | 'sphinx.ext.autosummary', 88 | ] 89 | 90 | if on_rtd: 91 | extensions.append('sphinx.ext.mathjax') 92 | 93 | elif LooseVersion(sphinx.__version__) < LooseVersion('1.4'): 94 | extensions.append('sphinx.ext.pngmath') 95 | else: 96 | extensions.append('sphinx.ext.imgmath') 97 | 98 | 99 | # Add any paths that contain templates here, relative to this directory. 100 | # templates_path = ['_templates'] 101 | 102 | # The suffix of source filenames. 103 | source_suffix = '.rst' 104 | 105 | # The encoding of source files. 106 | # source_encoding = 'utf-8-sig' 107 | 108 | # The master toctree document. 109 | master_doc = 'index' 110 | 111 | # A list of warning types to suppress arbitrary warning messages. We mean to 112 | # override directives in astropy_helpers.sphinx.ext.autodoc_enhancements, 113 | # thus need to ignore those warning. This can be removed once the patch gets 114 | # released in upstream Sphinx (https://github.com/sphinx-doc/sphinx/pull/1843). 115 | # Suppress the warnings requires Sphinx v1.4.2 116 | suppress_warnings = ['app.add_directive', ] 117 | 118 | 119 | # General information about the project 120 | project = setup_cfg['package_name'] 121 | author = setup_cfg['author'] 122 | copyright = '{0}, {1}'.format(datetime.datetime.now().year, author) 123 | 124 | # The version info for the project you're documenting, acts as replacement for 125 | # |version| and |release|, also used in various other places throughout the 126 | # build documents. 127 | # 128 | # The short X.Y version. 129 | package = importlib.import_module(setup_cfg['package_name']) 130 | version = package.__version__.split('-', 1)[0] 131 | # The full version, including alpha/beta/rc tags. 132 | release = package.__version__ 133 | 134 | # The language for content autogenerated by Sphinx. Refer to documentation 135 | # for a list of supported languages. 136 | # language = None 137 | 138 | # There are two options for replacing |today|: either, you set today to some 139 | # non-false value, then it is used: 140 | # today = '' 141 | # Else, today_fmt is used as the format for a strftime call. 142 | # today_fmt = '%B %d, %Y' 143 | 144 | # List of patterns, relative to source directory, that match files and 145 | # directories to ignore when looking for source files. 146 | exclude_patterns = ['_build'] 147 | 148 | # The reST default role (used for this markup: `text`) to use for all 149 | # documents. 150 | default_role = 'obj' 151 | 152 | 153 | # Don't show summaries of the members in each class along with the 154 | # class' docstring 155 | numpydoc_show_class_members = False 156 | 157 | autosummary_generate = True 158 | 159 | automodapi_toctreedirnm = 'api' 160 | 161 | # Class documentation should contain *both* the class docstring and 162 | # the __init__ docstring 163 | autoclass_content = "both" 164 | 165 | # Render inheritance diagrams in SVG 166 | graphviz_output_format = "svg" 167 | 168 | graphviz_dot_args = [ 169 | '-Nfontsize=10', 170 | '-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif', 171 | '-Efontsize=10', 172 | '-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif', 173 | '-Gfontsize=10', 174 | '-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif' 175 | ] 176 | 177 | 178 | # If true, '()' will be appended to :func: etc. cross-reference text. 179 | # add_function_parentheses = True 180 | 181 | # If true, the current module name will be prepended to all description 182 | # unit titles (such as .. function::). 183 | # add_module_names = True 184 | 185 | # If true, sectionauthor and moduleauthor directives will be shown in the 186 | # output. They are ignored by default. 187 | # show_authors = False 188 | 189 | # The name of the Pygments (syntax highlighting) style to use. 190 | pygments_style = 'sphinx' 191 | 192 | # A list of ignored prefixes for module index sorting. 193 | # modindex_common_prefix = [] 194 | 195 | # If true, keep warnings as "system message" paragraphs in the built documents. 196 | # keep_warnings = False 197 | 198 | 199 | # -- Options for HTML output ---------------------------------------------- 200 | 201 | # The theme to use for HTML and HTML Help pages. See the documentation for 202 | # a list of builtin themes. 203 | html_theme = 'stsci_rtd_theme' 204 | html_theme_path = [stsci_rtd_theme.get_html_theme_path()] 205 | 206 | # The name for this set of Sphinx documents. If None, it defaults to 207 | # " v documentation". 208 | # html_title = None 209 | 210 | # A shorter title for the navigation bar. Default is the same as html_title. 211 | # html_short_title = None 212 | 213 | # The name of an image file (within the static path) to use as favicon of the 214 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 215 | # pixels large. 216 | # html_favicon = None 217 | 218 | # Add any extra paths that contain custom files (such as robots.txt or 219 | # .htaccess) here, relative to this directory. These files are copied 220 | # directly to the root of the documentation. 221 | # html_extra_path = [] 222 | 223 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 224 | # using the given strftime format. 225 | html_last_updated_fmt = '%b %d, %Y' 226 | 227 | # If true, SmartyPants will be used to convert quotes and dashes to 228 | # typographically correct entities. 229 | # html_use_smartypants = True 230 | 231 | # Custom sidebar templates, maps document names to template names. 232 | html_sidebars = {'**': ['globaltoc.html', 'relations.html', 'searchbox.html']} 233 | 234 | # Additional templates that should be rendered to pages, maps page names to 235 | # template names. 236 | # html_additional_pages = {} 237 | 238 | # If false, no module index is generated. 239 | html_domain_indices = True 240 | 241 | # If false, no index is generated. 242 | html_use_index = True 243 | 244 | # If true, the index is split into individual pages for each letter. 245 | # html_split_index = False 246 | 247 | # If true, links to the reST sources are added to the pages. 248 | # html_show_sourcelink = True 249 | 250 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 251 | # html_show_sphinx = True 252 | 253 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 254 | # html_show_copyright = True 255 | 256 | # If true, an OpenSearch description file will be output, and all pages will 257 | # contain a tag referring to it. The value of this option must be the 258 | # base URL from which the finished HTML is served. 259 | # html_use_opensearch = '' 260 | 261 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 262 | # html_file_suffix = None 263 | 264 | # Output file base name for HTML help builder. 265 | htmlhelp_basename = 'packagenamedoc' 266 | 267 | 268 | # -- Options for LaTeX output --------------------------------------------- 269 | 270 | latex_elements = { 271 | # The paper size ('letterpaper' or 'a4paper'). 272 | 'papersize': 'letterpaper', 273 | # The font size ('10pt', '11pt' or '12pt'). 274 | 'pointsize': '14pt', 275 | # Additional stuff for the LaTeX preamble. 276 | 'preamble': r'''\usepackage{enumitem} \setlistdepth{99}''' 277 | } 278 | 279 | # Grouping the document tree into LaTeX files. List of tuples 280 | # (source start file, target name, title, 281 | # author, documentclass [howto, manual, or own class]). 282 | latex_documents = [ 283 | ('index', 'packagename.tex', u'Packagename Documentation', 284 | u'packagename', 'manual'), 285 | ] 286 | 287 | # The name of an image file (relative to this directory) to place at the top of 288 | # the title page. 289 | 290 | # For "manual" documents, if this is true, then toplevel headings are parts, 291 | # not chapters. 292 | # latex_use_parts = False 293 | 294 | # If true, show page references after internal links. 295 | # latex_show_pagerefs = False 296 | 297 | # If true, show URL addresses after external links. 298 | latex_show_urls = 'True' 299 | 300 | # Documents to append as an appendix to all manuals. 301 | # latex_appendices = [] 302 | 303 | # If false, no module index is generated. 304 | latex_domain_indices = True 305 | 306 | 307 | # -- Options for manual page output --------------------------------------- 308 | 309 | # One entry per manual page. List of tuples 310 | # (source start file, name, description, authors, manual section). 311 | man_pages = [ 312 | ('index', 'packagename', u'Packagename Documentation', 313 | [u'packagename'], 1) 314 | ] 315 | 316 | # If true, show URL addresses after external links. 317 | man_show_urls = True 318 | 319 | 320 | # -- Options for Texinfo output ------------------------------------------- 321 | 322 | # Grouping the document tree into Texinfo files. List of tuples 323 | # (source start file, target name, title, author, 324 | # dir menu entry, description, category) 325 | texinfo_documents = [ 326 | ('index', 'packagename', u'Packagename Documentation', 327 | u'packagename', 'packagename', 'Packagname', 328 | 'Miscellaneous'), 329 | ] 330 | 331 | # Documents to append as an appendix to all manuals. 332 | # texinfo_appendices = [] 333 | 334 | # If false, no module index is generated. 335 | texinfo_domain_indices = True 336 | 337 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 338 | texinfo_show_urls = 'inline' 339 | 340 | # If true, do not generate a @detailmenu in the "Top" node's menu. 341 | # texinfo_no_detailmenu = False 342 | 343 | 344 | # -- Options for Epub output ---------------------------------------------- 345 | 346 | # Bibliographic Dublin Core info. 347 | epub_title = u'Packagename' 348 | epub_author = u'Author Name, STSCI' 349 | epub_publisher = u'STSCI' 350 | epub_copyright = u'2016 STScI' 351 | 352 | # The basename for the epub file. It defaults to the project name. 353 | # epub_basename = u'wfc3tools' 354 | 355 | # The HTML theme for the epub output. Since the default themes are not 356 | # optimized for small screen space, using the same theme for HTML and 357 | # epub output is usually not wise. This defaults to 'epub', a theme designed 358 | # to save visual space. 359 | epub_theme = 'epub' 360 | 361 | # The language of the text. It defaults to the language option 362 | # or en if the language is not set. 363 | # epub_language = '' 364 | 365 | # The scheme of the identifier. Typical schemes are ISBN or URL. 366 | # epub_scheme = '' 367 | 368 | # The unique identifier of the text. This can be a ISBN number 369 | # or the project homepage. 370 | # epub_identifier = '' 371 | 372 | # A unique identification for the text. 373 | # epub_uid = '' 374 | 375 | # A tuple containing the cover image and cover page html template filenames. 376 | # epub_cover = () 377 | 378 | # A sequence of (type, uri, title) tuples for the guide element of content.opf. 379 | # epub_guide = () 380 | 381 | # HTML files that should be inserted before the pages created by sphinx. 382 | # The format is a list of tuples containing the path and title. 383 | # epub_pre_files = [] 384 | 385 | # HTML files shat should be inserted after the pages created by sphinx. 386 | # The format is a list of tuples containing the path and title. 387 | # epub_post_files = [] 388 | 389 | # A list of files that should not be packed into the epub file. 390 | epub_exclude_files = ['search.html'] 391 | 392 | # The depth of the table of contents in toc.ncx. 393 | # epub_tocdepth = 3 394 | 395 | # Allow duplicate toc entries. 396 | # epub_tocdup = True 397 | 398 | # Choose between 'default' and 'includehidden'. 399 | # epub_tocscope = 'default' 400 | 401 | # Fix unsupported image types using the PIL. 402 | # epub_fix_images = False 403 | 404 | # Scale large images. 405 | # epub_max_image_width = 0 406 | 407 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 408 | # epub_show_urls = 'inline' 409 | 410 | # If false, no index is generated. 411 | # epub_use_index = True 412 | -------------------------------------------------------------------------------- /ah_bootstrap.py: -------------------------------------------------------------------------------- 1 | """ 2 | This bootstrap module contains code for ensuring that the astropy_helpers 3 | package will be importable by the time the setup.py script runs. It also 4 | includes some workarounds to ensure that a recent-enough version of setuptools 5 | is being used for the installation. 6 | 7 | This module should be the first thing imported in the setup.py of distributions 8 | that make use of the utilities in astropy_helpers. If the distribution ships 9 | with its own copy of astropy_helpers, this module will first attempt to import 10 | from the shipped copy. However, it will also check PyPI to see if there are 11 | any bug-fix releases on top of the current version that may be useful to get 12 | past platform-specific bugs that have been fixed. When running setup.py, use 13 | the ``--offline`` command-line option to disable the auto-upgrade checks. 14 | 15 | When this module is imported or otherwise executed it automatically calls a 16 | main function that attempts to read the project's setup.cfg file, which it 17 | checks for a configuration section called ``[ah_bootstrap]`` the presences of 18 | that section, and options therein, determine the next step taken: If it 19 | contains an option called ``auto_use`` with a value of ``True``, it will 20 | automatically call the main function of this module called 21 | `use_astropy_helpers` (see that function's docstring for full details). 22 | Otherwise no further action is taken and by default the system-installed version 23 | of astropy-helpers will be used (however, ``ah_bootstrap.use_astropy_helpers`` 24 | may be called manually from within the setup.py script). 25 | 26 | This behavior can also be controlled using the ``--auto-use`` and 27 | ``--no-auto-use`` command-line flags. For clarity, an alias for 28 | ``--no-auto-use`` is ``--use-system-astropy-helpers``, and we recommend using 29 | the latter if needed. 30 | 31 | Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same 32 | names as the arguments to `use_astropy_helpers`, and can be used to configure 33 | the bootstrap script when ``auto_use = True``. 34 | 35 | See https://github.com/astropy/astropy-helpers for more details, and for the 36 | latest version of this module. 37 | """ 38 | 39 | import contextlib 40 | import errno 41 | import io 42 | import locale 43 | import os 44 | import re 45 | import subprocess as sp 46 | import sys 47 | 48 | __minimum_python_version__ = (3, 5) 49 | 50 | if sys.version_info < __minimum_python_version__: 51 | print("ERROR: Python {} or later is required by astropy-helpers".format( 52 | __minimum_python_version__)) 53 | sys.exit(1) 54 | 55 | try: 56 | from ConfigParser import ConfigParser, RawConfigParser 57 | except ImportError: 58 | from configparser import ConfigParser, RawConfigParser 59 | 60 | 61 | _str_types = (str, bytes) 62 | 63 | 64 | # What follows are several import statements meant to deal with install-time 65 | # issues with either missing or misbehaving pacakges (including making sure 66 | # setuptools itself is installed): 67 | 68 | # Check that setuptools 1.0 or later is present 69 | from distutils.version import LooseVersion 70 | 71 | try: 72 | import setuptools 73 | assert LooseVersion(setuptools.__version__) >= LooseVersion('1.0') 74 | except (ImportError, AssertionError): 75 | print("ERROR: setuptools 1.0 or later is required by astropy-helpers") 76 | sys.exit(1) 77 | 78 | # typing as a dependency for 1.6.1+ Sphinx causes issues when imported after 79 | # initializing submodule with ah_boostrap.py 80 | # See discussion and references in 81 | # https://github.com/astropy/astropy-helpers/issues/302 82 | 83 | try: 84 | import typing # noqa 85 | except ImportError: 86 | pass 87 | 88 | 89 | # Note: The following import is required as a workaround to 90 | # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this 91 | # module now, it will get cleaned up after `run_setup` is called, but that will 92 | # later cause the TemporaryDirectory class defined in it to stop working when 93 | # used later on by setuptools 94 | try: 95 | import setuptools.py31compat # noqa 96 | except ImportError: 97 | pass 98 | 99 | 100 | # matplotlib can cause problems if it is imported from within a call of 101 | # run_setup(), because in some circumstances it will try to write to the user's 102 | # home directory, resulting in a SandboxViolation. See 103 | # https://github.com/matplotlib/matplotlib/pull/4165 104 | # Making sure matplotlib, if it is available, is imported early in the setup 105 | # process can mitigate this (note importing matplotlib.pyplot has the same 106 | # issue) 107 | try: 108 | import matplotlib 109 | matplotlib.use('Agg') 110 | import matplotlib.pyplot 111 | except: 112 | # Ignore if this fails for *any* reason* 113 | pass 114 | 115 | 116 | # End compatibility imports... 117 | 118 | 119 | # In case it didn't successfully import before the ez_setup checks 120 | import pkg_resources 121 | 122 | from setuptools import Distribution 123 | from setuptools.package_index import PackageIndex 124 | 125 | from distutils import log 126 | from distutils.debug import DEBUG 127 | 128 | 129 | # TODO: Maybe enable checking for a specific version of astropy_helpers? 130 | DIST_NAME = 'astropy-helpers' 131 | PACKAGE_NAME = 'astropy_helpers' 132 | UPPER_VERSION_EXCLUSIVE = None 133 | 134 | # Defaults for other options 135 | DOWNLOAD_IF_NEEDED = True 136 | INDEX_URL = 'https://pypi.python.org/simple' 137 | USE_GIT = True 138 | OFFLINE = False 139 | AUTO_UPGRADE = True 140 | 141 | # A list of all the configuration options and their required types 142 | CFG_OPTIONS = [ 143 | ('auto_use', bool), ('path', str), ('download_if_needed', bool), 144 | ('index_url', str), ('use_git', bool), ('offline', bool), 145 | ('auto_upgrade', bool) 146 | ] 147 | 148 | 149 | class _Bootstrapper(object): 150 | """ 151 | Bootstrapper implementation. See ``use_astropy_helpers`` for parameter 152 | documentation. 153 | """ 154 | 155 | def __init__(self, path=None, index_url=None, use_git=None, offline=None, 156 | download_if_needed=None, auto_upgrade=None): 157 | 158 | if path is None: 159 | path = PACKAGE_NAME 160 | 161 | if not (isinstance(path, _str_types) or path is False): 162 | raise TypeError('path must be a string or False') 163 | 164 | if not isinstance(path, str): 165 | fs_encoding = sys.getfilesystemencoding() 166 | path = path.decode(fs_encoding) # path to unicode 167 | 168 | self.path = path 169 | 170 | # Set other option attributes, using defaults where necessary 171 | self.index_url = index_url if index_url is not None else INDEX_URL 172 | self.offline = offline if offline is not None else OFFLINE 173 | 174 | # If offline=True, override download and auto-upgrade 175 | if self.offline: 176 | download_if_needed = False 177 | auto_upgrade = False 178 | 179 | self.download = (download_if_needed 180 | if download_if_needed is not None 181 | else DOWNLOAD_IF_NEEDED) 182 | self.auto_upgrade = (auto_upgrade 183 | if auto_upgrade is not None else AUTO_UPGRADE) 184 | 185 | # If this is a release then the .git directory will not exist so we 186 | # should not use git. 187 | git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) 188 | if use_git is None and not git_dir_exists: 189 | use_git = False 190 | 191 | self.use_git = use_git if use_git is not None else USE_GIT 192 | # Declared as False by default--later we check if astropy-helpers can be 193 | # upgraded from PyPI, but only if not using a source distribution (as in 194 | # the case of import from a git submodule) 195 | self.is_submodule = False 196 | 197 | @classmethod 198 | def main(cls, argv=None): 199 | if argv is None: 200 | argv = sys.argv 201 | 202 | config = cls.parse_config() 203 | config.update(cls.parse_command_line(argv)) 204 | 205 | auto_use = config.pop('auto_use', False) 206 | bootstrapper = cls(**config) 207 | 208 | if auto_use: 209 | # Run the bootstrapper, otherwise the setup.py is using the old 210 | # use_astropy_helpers() interface, in which case it will run the 211 | # bootstrapper manually after reconfiguring it. 212 | bootstrapper.run() 213 | 214 | return bootstrapper 215 | 216 | @classmethod 217 | def parse_config(cls): 218 | if not os.path.exists('setup.cfg'): 219 | return {} 220 | 221 | cfg = ConfigParser() 222 | 223 | try: 224 | cfg.read('setup.cfg') 225 | except Exception as e: 226 | if DEBUG: 227 | raise 228 | 229 | log.error( 230 | "Error reading setup.cfg: {0!r}\n{1} will not be " 231 | "automatically bootstrapped and package installation may fail." 232 | "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) 233 | return {} 234 | 235 | if not cfg.has_section('ah_bootstrap'): 236 | return {} 237 | 238 | config = {} 239 | 240 | for option, type_ in CFG_OPTIONS: 241 | if not cfg.has_option('ah_bootstrap', option): 242 | continue 243 | 244 | if type_ is bool: 245 | value = cfg.getboolean('ah_bootstrap', option) 246 | else: 247 | value = cfg.get('ah_bootstrap', option) 248 | 249 | config[option] = value 250 | 251 | return config 252 | 253 | @classmethod 254 | def parse_command_line(cls, argv=None): 255 | if argv is None: 256 | argv = sys.argv 257 | 258 | config = {} 259 | 260 | # For now we just pop recognized ah_bootstrap options out of the 261 | # arg list. This is imperfect; in the unlikely case that a setup.py 262 | # custom command or even custom Distribution class defines an argument 263 | # of the same name then we will break that. However there's a catch22 264 | # here that we can't just do full argument parsing right here, because 265 | # we don't yet know *how* to parse all possible command-line arguments. 266 | if '--no-git' in argv: 267 | config['use_git'] = False 268 | argv.remove('--no-git') 269 | 270 | if '--offline' in argv: 271 | config['offline'] = True 272 | argv.remove('--offline') 273 | 274 | if '--auto-use' in argv: 275 | config['auto_use'] = True 276 | argv.remove('--auto-use') 277 | 278 | if '--no-auto-use' in argv: 279 | config['auto_use'] = False 280 | argv.remove('--no-auto-use') 281 | 282 | if '--use-system-astropy-helpers' in argv: 283 | config['auto_use'] = False 284 | argv.remove('--use-system-astropy-helpers') 285 | 286 | return config 287 | 288 | def run(self): 289 | strategies = ['local_directory', 'local_file', 'index'] 290 | dist = None 291 | 292 | # First, remove any previously imported versions of astropy_helpers; 293 | # this is necessary for nested installs where one package's installer 294 | # is installing another package via setuptools.sandbox.run_setup, as in 295 | # the case of setup_requires 296 | for key in list(sys.modules): 297 | try: 298 | if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): 299 | del sys.modules[key] 300 | except AttributeError: 301 | # Sometimes mysterious non-string things can turn up in 302 | # sys.modules 303 | continue 304 | 305 | # Check to see if the path is a submodule 306 | self.is_submodule = self._check_submodule() 307 | 308 | for strategy in strategies: 309 | method = getattr(self, 'get_{0}_dist'.format(strategy)) 310 | dist = method() 311 | if dist is not None: 312 | break 313 | else: 314 | raise _AHBootstrapSystemExit( 315 | "No source found for the {0!r} package; {0} must be " 316 | "available and importable as a prerequisite to building " 317 | "or installing this package.".format(PACKAGE_NAME)) 318 | 319 | # This is a bit hacky, but if astropy_helpers was loaded from a 320 | # directory/submodule its Distribution object gets a "precedence" of 321 | # "DEVELOP_DIST". However, in other cases it gets a precedence of 322 | # "EGG_DIST". However, when activing the distribution it will only be 323 | # placed early on sys.path if it is treated as an EGG_DIST, so always 324 | # do that 325 | dist = dist.clone(precedence=pkg_resources.EGG_DIST) 326 | 327 | # Otherwise we found a version of astropy-helpers, so we're done 328 | # Just active the found distribution on sys.path--if we did a 329 | # download this usually happens automatically but it doesn't hurt to 330 | # do it again 331 | # Note: Adding the dist to the global working set also activates it 332 | # (makes it importable on sys.path) by default. 333 | 334 | try: 335 | pkg_resources.working_set.add(dist, replace=True) 336 | except TypeError: 337 | # Some (much) older versions of setuptools do not have the 338 | # replace=True option here. These versions are old enough that all 339 | # bets may be off anyways, but it's easy enough to work around just 340 | # in case... 341 | if dist.key in pkg_resources.working_set.by_key: 342 | del pkg_resources.working_set.by_key[dist.key] 343 | pkg_resources.working_set.add(dist) 344 | 345 | @property 346 | def config(self): 347 | """ 348 | A `dict` containing the options this `_Bootstrapper` was configured 349 | with. 350 | """ 351 | 352 | return dict((optname, getattr(self, optname)) 353 | for optname, _ in CFG_OPTIONS if hasattr(self, optname)) 354 | 355 | def get_local_directory_dist(self): 356 | """ 357 | Handle importing a vendored package from a subdirectory of the source 358 | distribution. 359 | """ 360 | 361 | if not os.path.isdir(self.path): 362 | return 363 | 364 | log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 365 | 'submodule' if self.is_submodule else 'directory', 366 | self.path)) 367 | 368 | dist = self._directory_import() 369 | 370 | if dist is None: 371 | log.warn( 372 | 'The requested path {0!r} for importing {1} does not ' 373 | 'exist, or does not contain a copy of the {1} ' 374 | 'package.'.format(self.path, PACKAGE_NAME)) 375 | elif self.auto_upgrade and not self.is_submodule: 376 | # A version of astropy-helpers was found on the available path, but 377 | # check to see if a bugfix release is available on PyPI 378 | upgrade = self._do_upgrade(dist) 379 | if upgrade is not None: 380 | dist = upgrade 381 | 382 | return dist 383 | 384 | def get_local_file_dist(self): 385 | """ 386 | Handle importing from a source archive; this also uses setup_requires 387 | but points easy_install directly to the source archive. 388 | """ 389 | 390 | if not os.path.isfile(self.path): 391 | return 392 | 393 | log.info('Attempting to unpack and import astropy_helpers from ' 394 | '{0!r}'.format(self.path)) 395 | 396 | try: 397 | dist = self._do_download(find_links=[self.path]) 398 | except Exception as e: 399 | if DEBUG: 400 | raise 401 | 402 | log.warn( 403 | 'Failed to import {0} from the specified archive {1!r}: ' 404 | '{2}'.format(PACKAGE_NAME, self.path, str(e))) 405 | dist = None 406 | 407 | if dist is not None and self.auto_upgrade: 408 | # A version of astropy-helpers was found on the available path, but 409 | # check to see if a bugfix release is available on PyPI 410 | upgrade = self._do_upgrade(dist) 411 | if upgrade is not None: 412 | dist = upgrade 413 | 414 | return dist 415 | 416 | def get_index_dist(self): 417 | if not self.download: 418 | log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) 419 | return None 420 | 421 | log.warn( 422 | "Downloading {0!r}; run setup.py with the --offline option to " 423 | "force offline installation.".format(DIST_NAME)) 424 | 425 | try: 426 | dist = self._do_download() 427 | except Exception as e: 428 | if DEBUG: 429 | raise 430 | log.warn( 431 | 'Failed to download and/or install {0!r} from {1!r}:\n' 432 | '{2}'.format(DIST_NAME, self.index_url, str(e))) 433 | dist = None 434 | 435 | # No need to run auto-upgrade here since we've already presumably 436 | # gotten the most up-to-date version from the package index 437 | return dist 438 | 439 | def _directory_import(self): 440 | """ 441 | Import astropy_helpers from the given path, which will be added to 442 | sys.path. 443 | 444 | Must return True if the import succeeded, and False otherwise. 445 | """ 446 | 447 | # Return True on success, False on failure but download is allowed, and 448 | # otherwise raise SystemExit 449 | path = os.path.abspath(self.path) 450 | 451 | # Use an empty WorkingSet rather than the man 452 | # pkg_resources.working_set, since on older versions of setuptools this 453 | # will invoke a VersionConflict when trying to install an upgrade 454 | ws = pkg_resources.WorkingSet([]) 455 | ws.add_entry(path) 456 | dist = ws.by_key.get(DIST_NAME) 457 | 458 | if dist is None: 459 | # We didn't find an egg-info/dist-info in the given path, but if a 460 | # setup.py exists we can generate it 461 | setup_py = os.path.join(path, 'setup.py') 462 | if os.path.isfile(setup_py): 463 | # We use subprocess instead of run_setup from setuptools to 464 | # avoid segmentation faults - see the following for more details: 465 | # https://github.com/cython/cython/issues/2104 466 | sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) 467 | 468 | for dist in pkg_resources.find_distributions(path, True): 469 | # There should be only one... 470 | return dist 471 | 472 | return dist 473 | 474 | def _do_download(self, version='', find_links=None): 475 | if find_links: 476 | allow_hosts = '' 477 | index_url = None 478 | else: 479 | allow_hosts = None 480 | index_url = self.index_url 481 | 482 | # Annoyingly, setuptools will not handle other arguments to 483 | # Distribution (such as options) before handling setup_requires, so it 484 | # is not straightforward to programmatically augment the arguments which 485 | # are passed to easy_install 486 | class _Distribution(Distribution): 487 | def get_option_dict(self, command_name): 488 | opts = Distribution.get_option_dict(self, command_name) 489 | if command_name == 'easy_install': 490 | if find_links is not None: 491 | opts['find_links'] = ('setup script', find_links) 492 | if index_url is not None: 493 | opts['index_url'] = ('setup script', index_url) 494 | if allow_hosts is not None: 495 | opts['allow_hosts'] = ('setup script', allow_hosts) 496 | return opts 497 | 498 | if version: 499 | req = '{0}=={1}'.format(DIST_NAME, version) 500 | else: 501 | if UPPER_VERSION_EXCLUSIVE is None: 502 | req = DIST_NAME 503 | else: 504 | req = '{0}<{1}'.format(DIST_NAME, UPPER_VERSION_EXCLUSIVE) 505 | 506 | attrs = {'setup_requires': [req]} 507 | 508 | # NOTE: we need to parse the config file (e.g. setup.cfg) to make sure 509 | # it honours the options set in the [easy_install] section, and we need 510 | # to explicitly fetch the requirement eggs as setup_requires does not 511 | # get honored in recent versions of setuptools: 512 | # https://github.com/pypa/setuptools/issues/1273 513 | 514 | try: 515 | 516 | context = _verbose if DEBUG else _silence 517 | with context(): 518 | dist = _Distribution(attrs=attrs) 519 | try: 520 | dist.parse_config_files(ignore_option_errors=True) 521 | dist.fetch_build_eggs(req) 522 | except TypeError: 523 | # On older versions of setuptools, ignore_option_errors 524 | # doesn't exist, and the above two lines are not needed 525 | # so we can just continue 526 | pass 527 | 528 | # If the setup_requires succeeded it will have added the new dist to 529 | # the main working_set 530 | return pkg_resources.working_set.by_key.get(DIST_NAME) 531 | except Exception as e: 532 | if DEBUG: 533 | raise 534 | 535 | msg = 'Error retrieving {0} from {1}:\n{2}' 536 | if find_links: 537 | source = find_links[0] 538 | elif index_url != INDEX_URL: 539 | source = index_url 540 | else: 541 | source = 'PyPI' 542 | 543 | raise Exception(msg.format(DIST_NAME, source, repr(e))) 544 | 545 | def _do_upgrade(self, dist): 546 | # Build up a requirement for a higher bugfix release but a lower minor 547 | # release (so API compatibility is guaranteed) 548 | next_version = _next_version(dist.parsed_version) 549 | 550 | req = pkg_resources.Requirement.parse( 551 | '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) 552 | 553 | package_index = PackageIndex(index_url=self.index_url) 554 | 555 | upgrade = package_index.obtain(req) 556 | 557 | if upgrade is not None: 558 | return self._do_download(version=upgrade.version) 559 | 560 | def _check_submodule(self): 561 | """ 562 | Check if the given path is a git submodule. 563 | 564 | See the docstrings for ``_check_submodule_using_git`` and 565 | ``_check_submodule_no_git`` for further details. 566 | """ 567 | 568 | if (self.path is None or 569 | (os.path.exists(self.path) and not os.path.isdir(self.path))): 570 | return False 571 | 572 | if self.use_git: 573 | return self._check_submodule_using_git() 574 | else: 575 | return self._check_submodule_no_git() 576 | 577 | def _check_submodule_using_git(self): 578 | """ 579 | Check if the given path is a git submodule. If so, attempt to initialize 580 | and/or update the submodule if needed. 581 | 582 | This function makes calls to the ``git`` command in subprocesses. The 583 | ``_check_submodule_no_git`` option uses pure Python to check if the given 584 | path looks like a git submodule, but it cannot perform updates. 585 | """ 586 | 587 | cmd = ['git', 'submodule', 'status', '--', self.path] 588 | 589 | try: 590 | log.info('Running `{0}`; use the --no-git option to disable git ' 591 | 'commands'.format(' '.join(cmd))) 592 | returncode, stdout, stderr = run_cmd(cmd) 593 | except _CommandNotFound: 594 | # The git command simply wasn't found; this is most likely the 595 | # case on user systems that don't have git and are simply 596 | # trying to install the package from PyPI or a source 597 | # distribution. Silently ignore this case and simply don't try 598 | # to use submodules 599 | return False 600 | 601 | stderr = stderr.strip() 602 | 603 | if returncode != 0 and stderr: 604 | # Unfortunately the return code alone cannot be relied on, as 605 | # earlier versions of git returned 0 even if the requested submodule 606 | # does not exist 607 | 608 | # This is a warning that occurs in perl (from running git submodule) 609 | # which only occurs with a malformatted locale setting which can 610 | # happen sometimes on OSX. See again 611 | # https://github.com/astropy/astropy/issues/2749 612 | perl_warning = ('perl: warning: Falling back to the standard locale ' 613 | '("C").') 614 | if not stderr.strip().endswith(perl_warning): 615 | # Some other unknown error condition occurred 616 | log.warn('git submodule command failed ' 617 | 'unexpectedly:\n{0}'.format(stderr)) 618 | return False 619 | 620 | # Output of `git submodule status` is as follows: 621 | # 622 | # 1: Status indicator: '-' for submodule is uninitialized, '+' if 623 | # submodule is initialized but is not at the commit currently indicated 624 | # in .gitmodules (and thus needs to be updated), or 'U' if the 625 | # submodule is in an unstable state (i.e. has merge conflicts) 626 | # 627 | # 2. SHA-1 hash of the current commit of the submodule (we don't really 628 | # need this information but it's useful for checking that the output is 629 | # correct) 630 | # 631 | # 3. The output of `git describe` for the submodule's current commit 632 | # hash (this includes for example what branches the commit is on) but 633 | # only if the submodule is initialized. We ignore this information for 634 | # now 635 | _git_submodule_status_re = re.compile( 636 | '^(?P[+-U ])(?P[0-9a-f]{40}) ' 637 | '(?P\S+)( .*)?$') 638 | 639 | # The stdout should only contain one line--the status of the 640 | # requested submodule 641 | m = _git_submodule_status_re.match(stdout) 642 | if m: 643 | # Yes, the path *is* a git submodule 644 | self._update_submodule(m.group('submodule'), m.group('status')) 645 | return True 646 | else: 647 | log.warn( 648 | 'Unexpected output from `git submodule status`:\n{0}\n' 649 | 'Will attempt import from {1!r} regardless.'.format( 650 | stdout, self.path)) 651 | return False 652 | 653 | def _check_submodule_no_git(self): 654 | """ 655 | Like ``_check_submodule_using_git``, but simply parses the .gitmodules file 656 | to determine if the supplied path is a git submodule, and does not exec any 657 | subprocesses. 658 | 659 | This can only determine if a path is a submodule--it does not perform 660 | updates, etc. This function may need to be updated if the format of the 661 | .gitmodules file is changed between git versions. 662 | """ 663 | 664 | gitmodules_path = os.path.abspath('.gitmodules') 665 | 666 | if not os.path.isfile(gitmodules_path): 667 | return False 668 | 669 | # This is a minimal reader for gitconfig-style files. It handles a few of 670 | # the quirks that make gitconfig files incompatible with ConfigParser-style 671 | # files, but does not support the full gitconfig syntax (just enough 672 | # needed to read a .gitmodules file). 673 | gitmodules_fileobj = io.StringIO() 674 | 675 | # Must use io.open for cross-Python-compatible behavior wrt unicode 676 | with io.open(gitmodules_path) as f: 677 | for line in f: 678 | # gitconfig files are more flexible with leading whitespace; just 679 | # go ahead and remove it 680 | line = line.lstrip() 681 | 682 | # comments can start with either # or ; 683 | if line and line[0] in (':', ';'): 684 | continue 685 | 686 | gitmodules_fileobj.write(line) 687 | 688 | gitmodules_fileobj.seek(0) 689 | 690 | cfg = RawConfigParser() 691 | 692 | try: 693 | cfg.readfp(gitmodules_fileobj) 694 | except Exception as exc: 695 | log.warn('Malformatted .gitmodules file: {0}\n' 696 | '{1} cannot be assumed to be a git submodule.'.format( 697 | exc, self.path)) 698 | return False 699 | 700 | for section in cfg.sections(): 701 | if not cfg.has_option(section, 'path'): 702 | continue 703 | 704 | submodule_path = cfg.get(section, 'path').rstrip(os.sep) 705 | 706 | if submodule_path == self.path.rstrip(os.sep): 707 | return True 708 | 709 | return False 710 | 711 | def _update_submodule(self, submodule, status): 712 | if status == ' ': 713 | # The submodule is up to date; no action necessary 714 | return 715 | elif status == '-': 716 | if self.offline: 717 | raise _AHBootstrapSystemExit( 718 | "Cannot initialize the {0} submodule in --offline mode; " 719 | "this requires being able to clone the submodule from an " 720 | "online repository.".format(submodule)) 721 | cmd = ['update', '--init'] 722 | action = 'Initializing' 723 | elif status == '+': 724 | cmd = ['update'] 725 | action = 'Updating' 726 | if self.offline: 727 | cmd.append('--no-fetch') 728 | elif status == 'U': 729 | raise _AHBootstrapSystemExit( 730 | 'Error: Submodule {0} contains unresolved merge conflicts. ' 731 | 'Please complete or abandon any changes in the submodule so that ' 732 | 'it is in a usable state, then try again.'.format(submodule)) 733 | else: 734 | log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 735 | 'attempt to use the submodule as-is, but try to ensure ' 736 | 'that the submodule is in a clean state and contains no ' 737 | 'conflicts or errors.\n{2}'.format(status, submodule, 738 | _err_help_msg)) 739 | return 740 | 741 | err_msg = None 742 | cmd = ['git', 'submodule'] + cmd + ['--', submodule] 743 | log.warn('{0} {1} submodule with: `{2}`'.format( 744 | action, submodule, ' '.join(cmd))) 745 | 746 | try: 747 | log.info('Running `{0}`; use the --no-git option to disable git ' 748 | 'commands'.format(' '.join(cmd))) 749 | returncode, stdout, stderr = run_cmd(cmd) 750 | except OSError as e: 751 | err_msg = str(e) 752 | else: 753 | if returncode != 0: 754 | err_msg = stderr 755 | 756 | if err_msg is not None: 757 | log.warn('An unexpected error occurred updating the git submodule ' 758 | '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, 759 | _err_help_msg)) 760 | 761 | class _CommandNotFound(OSError): 762 | """ 763 | An exception raised when a command run with run_cmd is not found on the 764 | system. 765 | """ 766 | 767 | 768 | def run_cmd(cmd): 769 | """ 770 | Run a command in a subprocess, given as a list of command-line 771 | arguments. 772 | 773 | Returns a ``(returncode, stdout, stderr)`` tuple. 774 | """ 775 | 776 | try: 777 | p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) 778 | # XXX: May block if either stdout or stderr fill their buffers; 779 | # however for the commands this is currently used for that is 780 | # unlikely (they should have very brief output) 781 | stdout, stderr = p.communicate() 782 | except OSError as e: 783 | if DEBUG: 784 | raise 785 | 786 | if e.errno == errno.ENOENT: 787 | msg = 'Command not found: `{0}`'.format(' '.join(cmd)) 788 | raise _CommandNotFound(msg, cmd) 789 | else: 790 | raise _AHBootstrapSystemExit( 791 | 'An unexpected error occurred when running the ' 792 | '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) 793 | 794 | 795 | # Can fail of the default locale is not configured properly. See 796 | # https://github.com/astropy/astropy/issues/2749. For the purposes under 797 | # consideration 'latin1' is an acceptable fallback. 798 | try: 799 | stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' 800 | except ValueError: 801 | # Due to an OSX oddity locale.getdefaultlocale() can also crash 802 | # depending on the user's locale/language settings. See: 803 | # http://bugs.python.org/issue18378 804 | stdio_encoding = 'latin1' 805 | 806 | # Unlikely to fail at this point but even then let's be flexible 807 | if not isinstance(stdout, str): 808 | stdout = stdout.decode(stdio_encoding, 'replace') 809 | if not isinstance(stderr, str): 810 | stderr = stderr.decode(stdio_encoding, 'replace') 811 | 812 | return (p.returncode, stdout, stderr) 813 | 814 | 815 | def _next_version(version): 816 | """ 817 | Given a parsed version from pkg_resources.parse_version, returns a new 818 | version string with the next minor version. 819 | 820 | Examples 821 | ======== 822 | >>> _next_version(pkg_resources.parse_version('1.2.3')) 823 | '1.3.0' 824 | """ 825 | 826 | if hasattr(version, 'base_version'): 827 | # New version parsing from setuptools >= 8.0 828 | if version.base_version: 829 | parts = version.base_version.split('.') 830 | else: 831 | parts = [] 832 | else: 833 | parts = [] 834 | for part in version: 835 | if part.startswith('*'): 836 | break 837 | parts.append(part) 838 | 839 | parts = [int(p) for p in parts] 840 | 841 | if len(parts) < 3: 842 | parts += [0] * (3 - len(parts)) 843 | 844 | major, minor, micro = parts[:3] 845 | 846 | return '{0}.{1}.{2}'.format(major, minor + 1, 0) 847 | 848 | 849 | class _DummyFile(object): 850 | """A noop writeable object.""" 851 | 852 | errors = '' # Required for Python 3.x 853 | encoding = 'utf-8' 854 | 855 | def write(self, s): 856 | pass 857 | 858 | def flush(self): 859 | pass 860 | 861 | 862 | @contextlib.contextmanager 863 | def _verbose(): 864 | yield 865 | 866 | @contextlib.contextmanager 867 | def _silence(): 868 | """A context manager that silences sys.stdout and sys.stderr.""" 869 | 870 | old_stdout = sys.stdout 871 | old_stderr = sys.stderr 872 | sys.stdout = _DummyFile() 873 | sys.stderr = _DummyFile() 874 | exception_occurred = False 875 | try: 876 | yield 877 | except: 878 | exception_occurred = True 879 | # Go ahead and clean up so that exception handling can work normally 880 | sys.stdout = old_stdout 881 | sys.stderr = old_stderr 882 | raise 883 | 884 | if not exception_occurred: 885 | sys.stdout = old_stdout 886 | sys.stderr = old_stderr 887 | 888 | 889 | _err_help_msg = """ 890 | If the problem persists consider installing astropy_helpers manually using pip 891 | (`pip install astropy_helpers`) or by manually downloading the source archive, 892 | extracting it, and installing by running `python setup.py install` from the 893 | root of the extracted source code. 894 | """ 895 | 896 | 897 | class _AHBootstrapSystemExit(SystemExit): 898 | def __init__(self, *args): 899 | if not args: 900 | msg = 'An unknown problem occurred bootstrapping astropy_helpers.' 901 | else: 902 | msg = args[0] 903 | 904 | msg += '\n' + _err_help_msg 905 | 906 | super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) 907 | 908 | 909 | BOOTSTRAPPER = _Bootstrapper.main() 910 | 911 | 912 | def use_astropy_helpers(**kwargs): 913 | """ 914 | Ensure that the `astropy_helpers` module is available and is importable. 915 | This supports automatic submodule initialization if astropy_helpers is 916 | included in a project as a git submodule, or will download it from PyPI if 917 | necessary. 918 | 919 | Parameters 920 | ---------- 921 | 922 | path : str or None, optional 923 | A filesystem path relative to the root of the project's source code 924 | that should be added to `sys.path` so that `astropy_helpers` can be 925 | imported from that path. 926 | 927 | If the path is a git submodule it will automatically be initialized 928 | and/or updated. 929 | 930 | The path may also be to a ``.tar.gz`` archive of the astropy_helpers 931 | source distribution. In this case the archive is automatically 932 | unpacked and made temporarily available on `sys.path` as a ``.egg`` 933 | archive. 934 | 935 | If `None` skip straight to downloading. 936 | 937 | download_if_needed : bool, optional 938 | If the provided filesystem path is not found an attempt will be made to 939 | download astropy_helpers from PyPI. It will then be made temporarily 940 | available on `sys.path` as a ``.egg`` archive (using the 941 | ``setup_requires`` feature of setuptools. If the ``--offline`` option 942 | is given at the command line the value of this argument is overridden 943 | to `False`. 944 | 945 | index_url : str, optional 946 | If provided, use a different URL for the Python package index than the 947 | main PyPI server. 948 | 949 | use_git : bool, optional 950 | If `False` no git commands will be used--this effectively disables 951 | support for git submodules. If the ``--no-git`` option is given at the 952 | command line the value of this argument is overridden to `False`. 953 | 954 | auto_upgrade : bool, optional 955 | By default, when installing a package from a non-development source 956 | distribution ah_boostrap will try to automatically check for patch 957 | releases to astropy-helpers on PyPI and use the patched version over 958 | any bundled versions. Setting this to `False` will disable that 959 | functionality. If the ``--offline`` option is given at the command line 960 | the value of this argument is overridden to `False`. 961 | 962 | offline : bool, optional 963 | If `False` disable all actions that require an internet connection, 964 | including downloading packages from the package index and fetching 965 | updates to any git submodule. Defaults to `True`. 966 | """ 967 | 968 | global BOOTSTRAPPER 969 | 970 | config = BOOTSTRAPPER.config 971 | config.update(**kwargs) 972 | 973 | # Create a new bootstrapper with the updated configuration and run it 974 | BOOTSTRAPPER = _Bootstrapper(**config) 975 | BOOTSTRAPPER.run() 976 | -------------------------------------------------------------------------------- /focalplane/fpalign/prepare_fpa_data.py: -------------------------------------------------------------------------------- 1 | import copy 2 | from collections import OrderedDict 3 | import os 4 | import glob 5 | import numpy as np 6 | import pickle 7 | import pylab as pl 8 | import sys 9 | import warnings 10 | 11 | import astropy.io.fits as fits 12 | from astropy.table import Table, vstack 13 | from astropy.time import Time 14 | from astropy.coordinates import SkyCoord 15 | from astropy import units as u 16 | from astropy.utils.exceptions import AstropyWarning 17 | from astropy.stats import sigma_clipped_stats 18 | from astropy.visualization import SqrtStretch, LogStretch 19 | from astropy.visualization.mpl_normalize import ImageNormalize 20 | import astroquery.mast as mast 21 | 22 | from jwst import datamodels 23 | from photutils import DAOStarFinder, CircularAperture, RectangularAperture 24 | import pysiaf 25 | from pystortion import crossmatch 26 | from scipy.spatial import cKDTree 27 | 28 | from .alignment import AlignmentObservation, compute_idl_to_tel_in_table 29 | from ..utils import correct_for_proper_motion 30 | 31 | def select_isolated_sources(extracted_sources, nearest_neighbour_distance_threshold_pix): 32 | """ 33 | select isolated stars 34 | https://stackoverflow.com/questions/57129448/find-distance-to-nearest-neighbor-in-2d-array 35 | 36 | Parameters 37 | ---------- 38 | extracted_sources 39 | nearest_neighbour_distance_threshold_pix 40 | 41 | Returns 42 | ------- 43 | 44 | """ 45 | stars_xy = np.array([extracted_sources['xcentroid'], extracted_sources['ycentroid']]).T 46 | tree = cKDTree(stars_xy) 47 | dists = tree.query(stars_xy, 2) 48 | nearest_neighbour_distance = dists[0][:, 1] 49 | 50 | extracted_sources.remove_rows( 51 | np.where(nearest_neighbour_distance < nearest_neighbour_distance_threshold_pix)[0]) 52 | 53 | return extracted_sources 54 | 55 | def crossmatch_fpa_data(parameters): 56 | """ 57 | 58 | Parameters 59 | ---------- 60 | parameters 61 | 62 | Returns 63 | ------- 64 | 65 | """ 66 | print('\nCROSSMATCH OF FPA DATA WITH REFERENCE CATALOG') 67 | 68 | default_parameters = {'file_pattern': '*.fits', 69 | 'camera_names': []} 70 | 71 | for key, value in default_parameters.items(): 72 | if key not in parameters.keys(): 73 | parameters[key] = value 74 | 75 | if (not os.path.isfile(parameters['pickle_file']) or parameters['overwrite']): 76 | 77 | fpa_data_files = glob.glob(os.path.join(parameters['standardized_data_dir'], parameters['file_pattern'])) 78 | verbose_figures = parameters['verbose_figures'] 79 | save_plot = parameters['save_plot'] 80 | plot_dir = parameters['plot_dir'] 81 | out_dir = parameters['out_dir'] 82 | q_max_cutoff = parameters['q_max_cutoff'] 83 | siaf = parameters['siaf'] 84 | gaia_tag = parameters['gaia_tag'] 85 | verbose = parameters['verbose'] 86 | idl_tel_method = parameters['idl_tel_method'] 87 | reference_catalog = parameters['reference_catalog'] 88 | xmatch_radius_camera = parameters['xmatch_radius_camera'] 89 | xmatch_radius_fgs = parameters['xmatch_radius_fgs'] 90 | rejection_level_sigma = parameters['rejection_level_sigma'] 91 | restrict_analysis_to_these_apertures = parameters['restrict_analysis_to_these_apertures'] 92 | 93 | observations = [] 94 | 95 | print('*'*100) 96 | print('Running crossmatch procedure on {} input files'.format(len(fpa_data_files))) 97 | for j, f in enumerate(fpa_data_files): 98 | 99 | # if 'FPA_data_HST_SUPERFGS2_FGS2_chip0_F583W_2019-04-09T06-39-23_fdyw24-fgs2.fits' not in f: 100 | # continue 101 | 102 | print('=' * 40) 103 | fpa_data = Table.read(f) 104 | 105 | # handle HST FGS case 106 | if (parameters['observatory'] == 'HST') & (fpa_data.meta['INSTRUME'] not in parameters['camera_names']): 107 | continue 108 | 109 | if parameters['observatory'] == 'JWST': 110 | pl.close('all') 111 | print('Loading FPA observations in %s' % f) 112 | fpa_name_seed = os.path.basename(f).split('.')[0] 113 | 114 | aperture_name = fpa_data.meta['SIAFAPER'] 115 | 116 | if (restrict_analysis_to_these_apertures is not None): 117 | if (aperture_name not in restrict_analysis_to_these_apertures): 118 | continue 119 | 120 | aperture = copy.deepcopy(siaf[aperture_name]) 121 | reference_aperture = copy.deepcopy(aperture) 122 | 123 | print('using aperture: %s %s %s' % (aperture.observatory, aperture.InstrName, aperture.AperName)) 124 | 125 | # compute v2 v3 coordinates of gaia catalog stars using reference aperture (using boresight) 126 | attitude_ref = pysiaf.utils.rotations.attitude(0., 0., fpa_data.meta['pointing_ra_v1'], 127 | fpa_data.meta['pointing_dec_v1'], 128 | fpa_data.meta['pointing_pa_v3']) 129 | 130 | reference_catalog['v2_spherical_arcsec'], reference_catalog[ 131 | 'v3_spherical_arcsec'] = pysiaf.utils.rotations.getv2v3(attitude_ref, np.array( 132 | reference_catalog['ra']), np.array(reference_catalog['dec'])) 133 | 134 | # reference_catalog['v2_spherical_arcsec'][reference_catalog['v2_spherical_arcsec']>180*3600] -= 360*3600 135 | 136 | reference_cat = SkyCoord( 137 | ra=np.array(reference_catalog['v2_spherical_arcsec']) * u.arcsec, 138 | dec=np.array(reference_catalog['v3_spherical_arcsec']) * u.arcsec) 139 | 140 | # generate alignment observation 141 | obs = AlignmentObservation(aperture.observatory, aperture.InstrName) 142 | obs.aperture = aperture 143 | 144 | star_catalog = fpa_data 145 | 146 | star_catalog['star_id'] = star_catalog['id'] 147 | obs.star_catalog = star_catalog 148 | 149 | # SCI science frame (in pixels) -> IDL frame (in arcsec) 150 | obs.star_catalog['x_idl_arcsec'], obs.star_catalog['y_idl_arcsec'] = aperture.sci_to_idl(np.array(obs.star_catalog['x_SCI']), np.array(obs.star_catalog['y_SCI'])) 151 | 152 | # compute V2/V3 153 | # IDL frame in degrees -> V2/V3_tangent_plane in arcsec 154 | # obs.compute_v2v3(aperture, method=idl_tel_method, input_coordinates='tangent_plane') 155 | obs.star_catalog = compute_idl_to_tel_in_table(obs.star_catalog, aperture, method=idl_tel_method) 156 | 157 | # define Gaia catalog specific to every aperture to allow for local tangent-plane projection 158 | v2v3_reference = SkyCoord(ra=reference_aperture.V2Ref * u.arcsec, 159 | dec=reference_aperture.V3Ref * u.arcsec) 160 | selection_index = np.where(reference_cat.separation(v2v3_reference) < 3 * u.arcmin)[0] 161 | 162 | # pl.figure() 163 | # pl.plot(reference_cat.ra, reference_cat.dec, 'b.') 164 | # pl.plot(v2v3_reference.ra, v2v3_reference.dec, 'ro') 165 | # pl.show() 166 | # 1/0 167 | 168 | obs.gaia_catalog = reference_catalog[selection_index] 169 | # obs.gaia_reference_catalog = reference_catalog 170 | 171 | 172 | # determine which Gaia stars fall into the aperture 173 | path_Tel = aperture.path('tel') 174 | mask = path_Tel.contains_points(np.array( 175 | obs.gaia_catalog['v2_spherical_arcsec', 'v3_spherical_arcsec'].to_pandas())) 176 | 177 | # if verbose_figures: 178 | if 0: 179 | # if aperture.InstrName == 'NIRISS': 180 | pl.figure() 181 | pl.plot(obs.star_catalog['v2_spherical_arcsec'], 182 | obs.star_catalog['v3_spherical_arcsec'], 'ko', mfc='w', mew=1) 183 | pl.plot(obs.gaia_catalog['v2_spherical_arcsec'], 184 | obs.gaia_catalog['v3_spherical_arcsec'], 'b.') 185 | # pl.plot(reference_catalog['v2_spherical_arcsec'], reference_catalog['v3_spherical_arcsec'], 'k.') 186 | pl.axis('equal') 187 | pl.title(fpa_name_seed) 188 | aperture.plot() 189 | pl.show() 190 | 1/0 191 | # aperture_names = ['FGS1', 'FGS2', 'FGS3'] 192 | # instruments = aperture_names 193 | # for j, instrument in enumerate(instruments): 194 | # a = siaf.apertures[instrument] 195 | # a.plot(color='b') 196 | 197 | ax = pl.gca() 198 | # ax.invert_yaxis() 199 | if save_plot == 1: 200 | figure_name = os.path.join(plot_dir, '%s_v2v3_spherical.pdf' % fpa_name_seed) 201 | pl.savefig(figure_name, transparent=True, bbox_inches='tight', pad_inches=0) 202 | 203 | 1/0 204 | 205 | remove_multiple_matches = True 206 | retain_best_match = True 207 | 208 | # crossmatch star_catalog with gaia_catalog 209 | star_cat = SkyCoord(ra=np.array(obs.star_catalog['v2_spherical_arcsec']) * u.arcsec, 210 | dec=np.array( 211 | obs.star_catalog['v3_spherical_arcsec']) * u.arcsec) 212 | 213 | gaia_cat = SkyCoord(ra=np.array(obs.gaia_catalog['v2_spherical_arcsec']) * u.arcsec, 214 | dec=np.array( 215 | obs.gaia_catalog['v3_spherical_arcsec']) * u.arcsec) 216 | 217 | # # tackle wrapping or RA coordinates 218 | # if np.ptp(star_cat.ra).value > 350: 219 | # star_cat.ra[np.where(star_cat.ra > 180 * u.deg)[0]] -= 360 * u.deg 220 | # if np.ptp(gaia_cat.ra).value > 350: 221 | # gaia_cat.ra[np.where(gaia_cat.ra > 180 * u.deg)[0]] -= 360 * u.deg 222 | 223 | xmatch_radius = copy.deepcopy(xmatch_radius_camera) 224 | 225 | idx_gaia_cat, idx_star_cat, d2d, d3d, diff_raStar, diff_de = crossmatch.xmatch( 226 | gaia_cat, star_cat, xmatch_radius, rejection_level_sigma, verbose=verbose, 227 | verbose_figures=verbose_figures, saveplot=save_plot, out_dir=plot_dir, 228 | name_seed=fpa_name_seed, retain_best_match=retain_best_match, 229 | remove_multiple_matches=remove_multiple_matches) 230 | 231 | print( 232 | '{:d} measured stars, {:d} reference catalog stars in the aperture, {:d} matches.'.format( 233 | len(obs.star_catalog), np.sum(mask), len(idx_gaia_cat))) 234 | # 1/0 235 | 236 | elif parameters['observatory'] == 'HST': 237 | 238 | 239 | if 1: 240 | camera_name = fpa_data.meta['INSTRUME'] 241 | # for camera_name in parameters['camera_names']: 242 | # if fpa_data.meta['INSTRUME'] != camera_name: 243 | # continue 244 | # else: 245 | pl.close('all') 246 | print('Loading FPA observations in %s' % f) 247 | print('Read {} rows from {}'.format(len(fpa_data), f)) 248 | 249 | fpa_name_seed = os.path.basename(f).split('.')[0] 250 | 251 | if 'FGS' not in camera_name: 252 | retain_best_match = 1 253 | 254 | # make and save q figure 255 | if verbose_figures: 256 | fig = pl.figure(figsize=(7, 7), facecolor='w', edgecolor='k') 257 | pl.clf() 258 | pl.plot(fpa_data['m'], fpa_data['q'], 'bo') 259 | pl.plot(fpa_data['m'][fpa_data['q'] > q_max_cutoff], 260 | fpa_data['q'][fpa_data['q'] > q_max_cutoff], 'ro', mfc='w') 261 | pl.title( 262 | '{}: {}'.format(fpa_data.meta['INSTRUME'], fpa_data.meta['DATAFILE'])) 263 | pl.xlabel('m') 264 | pl.ylabel('q') 265 | pl.axhline(q_max_cutoff, ls='--') 266 | # pl.ylim((0,q_max_cutoff)) 267 | # pl.hist(fpa_data['q'], 50) 268 | pl.show() 269 | if save_plot == 1: 270 | figure_name = os.path.join(plot_dir, '%s_onepass_q.pdf' % fpa_name_seed) 271 | pl.savefig(figure_name, transparent=True, bbox_inches='tight', 272 | pad_inches=0) 273 | 274 | fpa_data.remove_rows(np.where(fpa_data['q'] > q_max_cutoff)[0]) 275 | 276 | instrument = fpa_data.meta['INSTRUME'] 277 | 278 | image_file_name = os.path.join(fpa_data.meta['DATAPATH'], 279 | fpa_data.meta['DATAFILE']) 280 | primary_header = fits.getheader(image_file_name, ext=0) 281 | # chip2_header = fits.getheader(image_file_name, ext=1) 282 | # chip1_header = fits.getheader(image_file_name, ext=4) 283 | 284 | 285 | header_aperture_name = fpa_data.meta['APERTURE'].strip() 286 | 287 | # set SIAF reference aperture and aperture (aperture names in FITS header differ from SIAF names) 288 | if header_aperture_name == 'WFC-FIX': 289 | reference_aperture_name = 'JWFCFIX' 290 | if fpa_data.meta['CHIP'] == 1: 291 | aperture_name = 'JWFC1FIX' 292 | elif fpa_data.meta['CHIP'] == 2: 293 | aperture_name = 'JWFC2FIX' 294 | elif header_aperture_name == 'UVIS-CENTER': 295 | reference_aperture_name = 'IUVISCTR' 296 | if fpa_data.meta['CHIP'] == 1: 297 | aperture_name = 'IUVIS1FIX' 298 | elif fpa_data.meta['CHIP'] == 2: 299 | aperture_name = 'IUVIS2FIX' 300 | 301 | else: # FGS is in aperture name 302 | header_aperture_name = fpa_data.meta['SIAFAPER'] 303 | reference_aperture_name = header_aperture_name 304 | aperture_name = header_aperture_name 305 | 306 | # if aperture_name == 'FGS1': 307 | # verbose_figures = True 308 | 309 | 310 | if (restrict_analysis_to_these_apertures is not None): 311 | if (aperture_name not in restrict_analysis_to_these_apertures): 312 | continue 313 | 314 | aperture = copy.deepcopy(siaf.apertures[aperture_name]) 315 | reference_aperture = siaf.apertures[reference_aperture_name] 316 | print('using aperture: %s %s %s' % ( 317 | aperture.observatory, aperture.InstrName, aperture.AperName)) 318 | name_seed = fpa_name_seed 319 | 320 | # compute v2 v3 coordinates of gaia catalog stars using reference aperture (using boresight) 321 | attitude_ref = pysiaf.utils.rotations.attitude(0., 0., 322 | fpa_data.meta['RA_V1'], 323 | fpa_data.meta['DEC_V1'], 324 | fpa_data.meta['PA_V3']) 325 | 326 | reference_catalog['v2_spherical_arcsec'], reference_catalog[ 327 | 'v3_spherical_arcsec'] = pysiaf.utils.rotations.getv2v3(attitude_ref, np.array( 328 | reference_catalog['ra']), np.array(reference_catalog['dec'])) 329 | 330 | gaia_reference_cat = SkyCoord( 331 | ra=np.array(reference_catalog['v2_spherical_arcsec']) * u.arcsec, 332 | dec=np.array(reference_catalog['v3_spherical_arcsec']) * u.arcsec) 333 | 334 | # make overview plot with all relevant apertures and gaia stars 335 | if 0: 336 | fig = pl.figure(figsize=(6, 6), facecolor='w', edgecolor='k') 337 | for aperture_name, aperture in siaf.apertures.items(): 338 | # aperture.plot(label=True, color='k') 339 | show_label = True 340 | aperture.plot(color='k') 341 | if 'FGS' in aperture_name: 342 | label = aperture_name 343 | elif aperture_name == 'IUVISCTR': 344 | label = 'WFC3' 345 | elif aperture_name == 'JWFCFIX': 346 | label = 'ACS' 347 | else: 348 | show_label = False 349 | 350 | if show_label: 351 | pl.text(aperture.V2Ref, aperture.V3Ref, label , horizontalalignment='center') 352 | 353 | pl.plot(reference_catalog['v2_spherical_arcsec'], 354 | reference_catalog['v3_spherical_arcsec'], 'k.', ms=1, 355 | mfc='0.7') 356 | # pl.axis('tight') 357 | pl.axis('equal') 358 | pl.xlim((-1000, 1000)) 359 | pl.ylim((-1000, 1000)) 360 | pl.xlabel('V2 (arcsec)') 361 | pl.ylabel('V3 (arcsec)') 362 | ax = pl.gca() 363 | ax.invert_yaxis() 364 | pl.show() 365 | if save_plot == 1: 366 | figure_name = os.path.join(plot_dir, '%s_overview_v2v3_with_gaia.pdf' % name_seed) 367 | pl.savefig(figure_name, transparent=True, bbox_inches='tight', pad_inches=0) 368 | 1/0 369 | 370 | 371 | # generate alignment observation 372 | obs = AlignmentObservation(aperture.observatory, aperture.InstrName) 373 | obs.aperture = aperture 374 | 375 | star_catalog = fpa_data 376 | 377 | 378 | if 'FGS' not in camera_name: 379 | # assign star identifiers 380 | star_catalog['star_id'] = star_catalog['n'] 381 | obs.star_catalog = star_catalog 382 | 383 | if fpa_data.meta['INSTRUME'] == 'ACS': 384 | number_of_reference_pixels_x = 24 385 | elif fpa_data.meta['INSTRUME'] == 'WFC3': 386 | number_of_reference_pixels_x = 25 387 | 388 | # for HST the SIAF SciRef coordinates include the reference pixels. see also Colin's excel worksheet and his email dated September 22 2017 389 | # SCI science frame (in pixels) -> IDL frame (in arcsec) 390 | obs.star_catalog['x_idl_arcsec'], obs.star_catalog['y_idl_arcsec'] = aperture.sci_to_idl( 391 | np.array(obs.star_catalog['x_SCI']) + number_of_reference_pixels_x, 392 | np.array(obs.star_catalog['y_SCI'])) 393 | 394 | else: 395 | # assign star identifiers 396 | star_catalog['star_id'] = [np.int(s.replace('_', '')) for s in star_catalog['TARGET_ID']] 397 | obs.star_catalog = star_catalog 398 | 399 | # compute V2/V3 400 | # IDL frame in degrees -> V2/V3_tangent_plane in arcsec 401 | # obs.compute_v2v3(aperture, method=idl_tel_method, input_coordinates='tangent_plane') 402 | obs.star_catalog = compute_idl_to_tel_in_table(obs.star_catalog, aperture, method=idl_tel_method) 403 | 404 | # define Gaia catalog specific to every aperture to allow for local tangent-plane projection 405 | v2v3_reference = SkyCoord(ra=reference_aperture.V2Ref * u.arcsec, dec=reference_aperture.V3Ref * u.arcsec) 406 | if 'FGS' not in camera_name: 407 | gaia_selection_index = np.where(gaia_reference_cat.separation(v2v3_reference) < 3 * u.arcmin)[0] 408 | else: 409 | # some FGS stars were observed several times to correct for drifts etc. 410 | tmp, tmp_unique_index = np.unique(obs.star_catalog['STAR_NAME'].data, 411 | return_index=True) 412 | tmp_star_cat = SkyCoord( 413 | ra=np.array(obs.star_catalog['RA'][tmp_unique_index]) * u.deg, 414 | dec=np.array(obs.star_catalog['DEC'][tmp_unique_index]) * u.deg) 415 | 416 | tmp_gaia_cat = SkyCoord(ra=np.array(reference_catalog['ra']) * u.deg, 417 | dec=np.array(reference_catalog['dec']) * u.deg) 418 | 419 | xmatch_radius_sky = 1 * u.arcsecond 420 | rejection_level_sigma = 5 421 | retain_best_match = 1 422 | tmp_idx_gaia_cat, tmp_idx_star_cat, d2d, d3d, diff_raStar, diff_de = crossmatch.xmatch( 423 | tmp_gaia_cat, tmp_star_cat, 424 | xmatch_radius_sky, 425 | verbose=0, rejection_level_sigma=rejection_level_sigma, 426 | retain_best_match=retain_best_match, verbose_figures=0) 427 | gaia_selection_index = tmp_idx_gaia_cat 428 | print('{} unique stars in the FGS observations'.format(len(tmp_unique_index))) 429 | print('{} unique FGS stars crossmatched with Gaia (in sky frame)'.format(len(gaia_selection_index))) 430 | 431 | obs.gaia_catalog = reference_catalog[gaia_selection_index] 432 | obs.gaia_reference_catalog = reference_catalog 433 | 434 | # IMPLEMENT PROPER MOTION CORRECTION HERE 435 | target_epoch = Time(fpa_data.meta['EPOCH'], format='isot') 436 | 437 | if parameters['correct_gaia_for_proper_motion']: 438 | # plot_position_errors = True 439 | file_format = '.fits' 440 | output_name_seed = gaia_tag + '_{:.3f}'.format(target_epoch.decimalyear) 441 | pm_corrected_file = os.path.join(out_dir, 442 | 'gaia_sources_{}_pm_corrected_{}'.format( 443 | gaia_tag, 444 | target_epoch.isot)) + file_format 445 | if (parameters['overwrite_pm_correction']) or (not os.path.isfile(pm_corrected_file)): 446 | with warnings.catch_warnings(): 447 | warnings.simplefilter('ignore', AstropyWarning, append=True) 448 | warnings.simplefilter('ignore', UserWarning, append=True) 449 | obs.gaia_catalog = correct_for_proper_motion( 450 | obs.gaia_catalog, 451 | target_epoch, 452 | verbose=False, 453 | ignore_parallax=False) 454 | obs.gaia_catalog.write(pm_corrected_file, overwrite=True) 455 | else: 456 | obs.gaia_catalog = Table.read(pm_corrected_file) 457 | 458 | # overflow_index = np.where(obs.star_catalog['v2_spherical_arcsec'] / 3600 > 350.)[0] 459 | # obs.star_catalog['v2_spherical_arcsec'][overflow_index] -= 360. * 3600 460 | 461 | # determine which Gaia stars fall into the aperture 462 | path_Tel = aperture.path('tel') 463 | mask = path_Tel.contains_points(np.array(obs.gaia_catalog['v2_spherical_arcsec', 'v3_spherical_arcsec'].to_pandas())) 464 | 465 | if 'FGS' not in camera_name: 466 | if verbose_figures: 467 | pl.figure() 468 | pl.plot(obs.star_catalog['v2_spherical_arcsec'], 469 | obs.star_catalog['v3_spherical_arcsec'], 'ko', mfc='w', mew=1) 470 | pl.plot(obs.gaia_catalog['v2_spherical_arcsec'], 471 | obs.gaia_catalog['v3_spherical_arcsec'], 'b.') 472 | pl.axis('equal') 473 | pl.title(fpa_name_seed) 474 | pl.show() 475 | 476 | # plot the aperture contours in v2v3 477 | if fpa_data.meta['INSTRUME'] == 'ACS': 478 | aperture_names = [reference_aperture_name, 'JWFC1FIX', 'JWFC2FIX'] 479 | elif fpa_data.meta['INSTRUME'] == 'WFC3': 480 | aperture_names = [reference_aperture_name, 'IUVIS1FIX', 'IUVIS2FIX'] 481 | instruments = [fpa_data.meta['INSTRUME']] * len(aperture_names) 482 | 483 | for AperName in aperture_names: 484 | siaf.apertures[AperName].plot('tel') # , label=True) 485 | 486 | else: 487 | if verbose_figures: 488 | pl.figure() 489 | pl.plot(obs.star_catalog['v2_spherical_arcsec'], 490 | obs.star_catalog['v3_spherical_arcsec'], 'ko', mfc='w', 491 | mew=1) 492 | pl.plot(obs.gaia_catalog['v2_spherical_arcsec'], 493 | obs.gaia_catalog['v3_spherical_arcsec'], 'b.') 494 | pl.axis('equal') 495 | pl.title(fpa_name_seed) 496 | pl.show() 497 | 498 | aperture_names = ['FGS1', 'FGS2', 'FGS3'] 499 | instruments = aperture_names 500 | for j, instrument in enumerate(instruments): 501 | a = siaf.apertures[instrument] 502 | a.plot(color='b') 503 | 504 | ax = pl.gca() 505 | ax.invert_yaxis() 506 | if save_plot == 1: 507 | figure_name = os.path.join(plot_dir, 508 | '%s_v2v3_spherical.pdf' % fpa_name_seed) 509 | pl.savefig(figure_name, transparent=True, bbox_inches='tight', pad_inches=0) 510 | 511 | if 'FGS' not in camera_name: 512 | remove_multiple_matches = True 513 | else: 514 | # keep the FGS stars that were observed several times by the astrometer 515 | remove_multiple_matches = False 516 | 517 | # crossmatch star_catalog with gaia_catalog 518 | star_cat = SkyCoord(ra=np.array(obs.star_catalog['v2_spherical_arcsec']) * u.arcsec, 519 | dec=np.array(obs.star_catalog['v3_spherical_arcsec']) * u.arcsec) 520 | 521 | gaia_cat = SkyCoord(ra=np.array(obs.gaia_catalog['v2_spherical_arcsec']) * u.arcsec, 522 | dec=np.array(obs.gaia_catalog['v3_spherical_arcsec']) * u.arcsec) 523 | 524 | 525 | # if aperture_name == 'FGS1': 526 | # xmatch_radius = 1.*u.arcsec 527 | # verbose_figures = True 528 | # verbose=True 529 | # 1/0 530 | # xmatch_radius = 5.*u.arcsec 531 | # else: 532 | if 'FGS' in aperture_name: 533 | xmatch_radius = copy.deepcopy(xmatch_radius_fgs) 534 | else: 535 | xmatch_radius = copy.deepcopy(xmatch_radius_camera) 536 | 537 | # run xmatch 538 | if 0: 539 | idx_gaia_cat, idx_star_cat, d2d, d3d, diff_raStar, diff_de = distortion.crossmatch_sky_catalogues_with_iterative_distortion_correction( 540 | gaia_cat, star_cat, out_dir, verbose=1, 541 | initial_crossmatch_radius=xmatch_radius, max_iterations=10, 542 | n_interation_switch=1, k=6, overwrite=True, adaptive_xmatch_radius_factor=5) 543 | 1 / 0 544 | else: 545 | # verbose = 0 546 | # verbose_figures = 0 547 | idx_gaia_cat, idx_star_cat, d2d, d3d, diff_raStar, diff_de = \ 548 | crossmatch.xmatch(gaia_cat, star_cat, 549 | xmatch_radius, 550 | rejection_level_sigma, 551 | verbose=verbose, 552 | verbose_figures=verbose_figures, 553 | saveplot=save_plot, 554 | out_dir=plot_dir, 555 | name_seed=fpa_name_seed, 556 | retain_best_match=retain_best_match, 557 | remove_multiple_matches=remove_multiple_matches) 558 | 559 | print( 560 | '{:d} measured stars, {:d} Gaia catalog stars in the aperture, {:d} matched with Gaia.'.format( 561 | len(obs.star_catalog), np.sum(mask), len(idx_gaia_cat))) 562 | 563 | # if aperture_name == 'FGS1': 564 | # 1/0 565 | 566 | if ('FGS' in camera_name) & (len(obs.star_catalog) != len(idx_gaia_cat)): 567 | print('MISSING STAR:') 568 | obs.star_catalog[np.setdiff1d(np.arange(len(obs.star_catalog)), idx_star_cat)].pprint() 569 | 570 | if 0: 571 | pl.figure() 572 | pl.plot(obs.star_catalog['v2_spherical_arcsec'], 573 | obs.star_catalog['v3_spherical_arcsec'], 'ko', mfc='w', 574 | mew=1) 575 | pl.plot(obs.gaia_catalog['v2_spherical_arcsec'], 576 | obs.gaia_catalog['v3_spherical_arcsec'], 'b.') 577 | pl.axis('equal') 578 | pl.title(fpa_name_seed) 579 | pl.show() 580 | 581 | aperture_names = ['FGS1', 'FGS2', 'FGS3'] 582 | instruments = aperture_names # [fpa_data.meta['INSTRUME']] * len( 583 | # aperture_names) 584 | 585 | for j, instrument in enumerate(instruments): 586 | a = siaf.apertures[instrument] 587 | a.plot(color='b') 588 | 589 | ax = pl.gca() 590 | ax.invert_yaxis() 591 | 592 | 593 | obs.number_of_measured_stars = len(obs.star_catalog) 594 | obs.number_of_gaia_stars = np.sum(mask) 595 | obs.number_of_matched_stars = len(idx_gaia_cat) 596 | 597 | obs.gaia_catalog_matched = obs.gaia_catalog[idx_gaia_cat] 598 | obs.star_catalog_matched = obs.star_catalog[idx_star_cat] 599 | obs.gaia_catalog_matched['star_id'] = obs.star_catalog_matched['star_id'] 600 | 601 | # save space in pickle, speed up 602 | obs.gaia_catalog = [] 603 | obs.star_catalog = [] 604 | obs.gaia_reference_catalog = [] 605 | 606 | obs.siaf_aperture_name = aperture_name 607 | obs.fpa_data = fpa_data 608 | obs.fpa_name_seed = fpa_name_seed 609 | 610 | # dictionary that defines the names of columns in the star/gaia_catalog for use later on 611 | fieldname_dict = {} 612 | fieldname_dict['star_catalog'] = {} # observed 613 | fieldname_dict['reference_catalog'] = {} # Gaia 614 | 615 | if idl_tel_method == 'spherical': 616 | fieldname_dict['reference_catalog']['position_1'] = 'v2_spherical_arcsec' 617 | fieldname_dict['reference_catalog']['position_2'] = 'v3_spherical_arcsec' 618 | else: 619 | fieldname_dict['reference_catalog']['position_1'] = 'v2_tangent_arcsec' 620 | fieldname_dict['reference_catalog']['position_2'] = 'v3_tangent_arcsec' 621 | 622 | if 'Name: J/A+A/563/A80/jwstcf' in reference_catalog.meta['comments']: 623 | reference_catalog_identifier = 'ID' # HAWK-I 624 | fieldname_dict['reference_catalog']['sigma_position_1'] = 'ra_error_mas' 625 | fieldname_dict['reference_catalog']['sigma_position_2'] = 'dec_error_mas' 626 | else: 627 | reference_catalog_identifier = 'source_id' # Gaia 628 | fieldname_dict['reference_catalog']['sigma_position_1'] = 'ra_error' 629 | fieldname_dict['reference_catalog']['sigma_position_2'] = 'dec_error' 630 | fieldname_dict['reference_catalog']['identifier'] = reference_catalog_identifier 631 | fieldname_dict['reference_catalog']['position_unit'] = u.arcsecond 632 | fieldname_dict['reference_catalog']['sigma_position_unit'] = u.milliarcsecond 633 | 634 | if idl_tel_method == 'spherical': 635 | fieldname_dict['star_catalog']['position_1'] = 'v2_spherical_arcsec' 636 | fieldname_dict['star_catalog']['position_2'] = 'v3_spherical_arcsec' 637 | else: 638 | fieldname_dict['star_catalog']['position_1'] = 'v2_tangent_arcsec' 639 | fieldname_dict['star_catalog']['position_2'] = 'v3_tangent_arcsec' 640 | 641 | fieldname_dict['star_catalog']['sigma_position_1'] = 'sigma_x_mas' 642 | fieldname_dict['star_catalog']['sigma_position_2'] = 'sigma_y_mas' 643 | fieldname_dict['star_catalog']['identifier'] = 'star_id' 644 | fieldname_dict['star_catalog']['position_unit'] = u.arcsecond 645 | fieldname_dict['star_catalog']['sigma_position_unit'] = u.milliarcsecond 646 | 647 | obs.fieldname_dict = fieldname_dict 648 | 649 | observations.append(obs) 650 | 651 | pickle.dump((observations), open(parameters['pickle_file'], "wb")) 652 | else: 653 | print('Loading pickled file {}'.format(parameters['pickle_file'])) 654 | observations = pickle.load(open(parameters['pickle_file'], "rb")) 655 | 656 | return observations 657 | 658 | 659 | def correct_dva(obs_collection, parameters): 660 | """Correct for effects of differential velocity aberration. This routine provides the necessary input to the DVA 661 | calculations (as attributes to the aperture object). DVA corrections are performed within the aperture methods when 662 | necessary. 663 | 664 | Parameters 665 | ---------- 666 | obs_collection 667 | 668 | Returns 669 | ------- 670 | 671 | """ 672 | print('\nCORRECT FOR DIFFERENTIAL VELOCITY ABERRATION') 673 | 674 | dva_dir = parameters['dva_dir'] 675 | dva_source_dir = parameters['dva_source_dir'] 676 | verbose = parameters['verbose'] 677 | 678 | for group_id in np.unique(obs_collection.T['group_id']): 679 | if verbose: 680 | print('Working on group {}'.format(group_id)) 681 | obs_indexes = np.where((obs_collection.T['group_id'] == group_id))[0] 682 | # obs_collection.T[obs_indexes].pprint() 683 | 684 | superfgs_observation_index = \ 685 | np.where((obs_collection.T['group_id'] == group_id) & ( 686 | obs_collection.T['INSTRUME'] == 'SUPERFGS'))[0] 687 | if superfgs_observation_index.size == 0: 688 | print('No FGS data in this group. Skipping.') 689 | continue 690 | superfgs_obs = obs_collection.observations[superfgs_observation_index][0] 691 | 692 | 693 | 694 | camera_observation_index = \ 695 | np.where((obs_collection.T['group_id'] == group_id) & ( 696 | obs_collection.T['INSTRUME'] != 'SUPERFGS'))[0] 697 | 698 | fgs_exposure_midtimes = np.mean(np.vstack( 699 | (superfgs_obs.fpa_data['EXPSTART'].data, superfgs_obs.fpa_data['EXPEND'].data)), axis=0) 700 | 701 | # exclude HST FGS because it has already been corrected 702 | for i in camera_observation_index: 703 | 704 | camera_obs = obs_collection.observations[i] 705 | camera_obs_name_seed = '{}_{}_{}_{}_{}'.format(camera_obs.fpa_data.meta['TELESCOP'], 706 | camera_obs.fpa_data.meta['INSTRUME'], 707 | camera_obs.fpa_data.meta['APERTURE'], 708 | camera_obs.fpa_data.meta['EPOCH'], 709 | camera_obs.fpa_data.meta[ 710 | 'DATAFILE'].split('.')[0]).replace( 711 | ':', '-') 712 | dva_filename = os.path.join(dva_dir, 'DVA_data_{}.txt'.format(camera_obs_name_seed)) 713 | dva_file = open(dva_filename, 'w') 714 | # dva_file = sys.stdout 715 | 716 | camera_exposure_midtime = np.mean( 717 | [camera_obs.fpa_data.meta['EXPSTART'], camera_obs.fpa_data.meta['EXPEND']]) 718 | matching_fgs_exposure_index = np.argmin( 719 | np.abs(camera_exposure_midtime - fgs_exposure_midtimes)) 720 | if verbose: 721 | print('Camera-FGS Match found with delta_time = {:2.3f} min'.format(np.abs( 722 | camera_exposure_midtime - fgs_exposure_midtimes[ 723 | matching_fgs_exposure_index]) * 24 * 60.)) 724 | print('Writing parameter file for DVA code') 725 | for key in 'PRIMESI V2Ref V3Ref FGSOFFV2 FGSOFFV3 RA_V1 DEC_V1 PA_V3 POSTNSTX POSTNSTY POSTNSTZ VELOCSTX VELOCSTY VELOCSTZ EXPSTART'.split(): 726 | if key in 'V2Ref V3Ref'.split(): 727 | value = getattr(superfgs_obs.aperture, key) 728 | elif key == 'PRIMESI': 729 | value = np.int(superfgs_obs.fpa_data[key][matching_fgs_exposure_index][-1]) 730 | elif key in 'FGSOFFV2 FGSOFFV3'.split(): 731 | value = superfgs_obs.fpa_data[key][matching_fgs_exposure_index] 732 | elif key == 'EXPSTART': 733 | # Scale of EXPSTART seems to be UTC, see http://www.stsci.edu/ftp/documents/calibration/podps.dict 734 | fgs_time = Time(camera_obs.fpa_data.meta[key], format='mjd', scale='utc') 735 | value = fgs_time.yday.replace(':', ' ') 736 | else: 737 | value = camera_obs.fpa_data.meta[key] 738 | 739 | print('{:<30} {}'.format(value, key), file=dva_file) 740 | dva_file.close() 741 | camera_obs.aperture._correct_dva = True 742 | camera_obs.aperture._dva_parameters = {'parameter_file': dva_filename, 743 | 'dva_source_dir': dva_source_dir} 744 | 745 | obs_collection.observations[i] = camera_obs 746 | if 0: 747 | 748 | v2, v3 = camera_obs.aperture.correct_for_dva( 749 | np.array(camera_obs.star_catalog_matched['v2_spherical_arcsec']), 750 | np.array(camera_obs.star_catalog_matched['v3_spherical_arcsec'])) 751 | 752 | v2v3_data_file = os.path.join(dva_dir, 'v2v3_data_{}_measured.txt'.format( 753 | camera_obs_name_seed)) 754 | camera_obs.star_catalog_matched['v2_spherical_arcsec', 'v3_spherical_arcsec'].write( 755 | v2v3_data_file, 756 | format='ascii.fixed_width_no_header', 757 | delimiter=' ', 758 | bookend=False, 759 | overwrite=True) 760 | 761 | v2v3_corrected_file = v2v3_data_file.replace('_measured', '_corrected') 762 | import subprocess 763 | 764 | system_command = '{} {} {} {}'.format(os.path.join(dva_source_dir, 'compute-DVA.e'), 765 | dva_filename, 766 | v2v3_data_file, v2v3_corrected_file) 767 | print('Running system command \n{}'.format(system_command)) 768 | subprocess.call(system_command, shell=True) 769 | 770 | v2v3_corrected = Table.read(v2v3_corrected_file, format='ascii.no_header', 771 | names=('v2_original', 'v3_original', 'v2_corrected', 772 | 'v3_corrected')) 773 | 774 | 1 / 0 775 | 776 | # interpolate ephmeris 777 | # fgs_time = Time(superfgs_obs.fpa_data[key][matching_fgs_exposure_index], format='mjd', scale='tdb') 778 | # start_time = Time(superfgs_obs.fpa_data[key][matching_fgs_exposure_index]-2001, format='mjd') 779 | # stop_time = Time(superfgs_obs.fpa_data[key][matching_fgs_exposure_index]-1999, format='mjd') 780 | start_time = fgs_time - TimeDelta(1, format='jd') 781 | stop_time = fgs_time + TimeDelta(1, format='jd') 782 | 783 | # center = 'g@399'# Earth 784 | # center = '500@399' 785 | # target = '0' # SSB 786 | center = '500@0' # SSB 787 | target = '399' # Earth 788 | # target = '500@399' 789 | e = pystrometry.get_ephemeris(center=center, target=target, start_time=start_time, 790 | stop_time=stop_time, 791 | step_size='1h', verbose=True, out_dir=dva_dir, 792 | vector_table_output_type=2, 793 | output_units='KM-S', overwrite=True, 794 | reference_plane='FRAME') 795 | 796 | ip_values = [] 797 | for colname in ['X', 'Y', 'Z', 'VX', 'VY', 'VZ']: 798 | ip_fun = scipy.interpolate.interp1d(np.array(e['JDTDB']), np.array(e[colname]), 799 | kind='linear', copy=True, bounds_error=True, 800 | fill_value=np.nan) 801 | # http://docs.astropy.org/en/stable/time/#id6 802 | ip_val = ip_fun(fgs_time.tdb.jd) 803 | ip_values.append(ip_val) 804 | 805 | e.add_row(np.hstack(([fgs_time.tdb.jd, 'N/A'], np.array(ip_values)))) 806 | e[[-1]].pprint() 807 | return obs_collection 808 | 809 | 810 | def hst_camera_fpa_data(data_dir, pattern, onepass_extension, standardized_data_dir, astrometry_uncertainty_mas): 811 | """ 812 | Generate standardized focal plane alignment data based on HST camera data (WFC3, ACS) that were processed with 813 | Jay Anderson's hst1pass code to extract source pixel positions. 814 | 815 | 816 | ATTENTION: Pixel positions are extracted in individual chips (chips are numbered 1 and 2) 817 | hst1pass numbering is inverted compared to standard numbering scheme. 818 | 819 | chip 1 is the top chip 820 | chip 2 is the bottom chip 821 | 822 | :param data_dir: 823 | :param pattern: 824 | :param onepass_extension: 825 | :return: 826 | 827 | TODO: add FITS keywords needed for DVA correction from _spt header 828 | 829 | """ 830 | 831 | invert_chip_numbers = True 832 | 833 | # file_list = glob.glob(os.path.join(data_dir, '**/*%s' % pattern)) 834 | file_list = glob.glob(os.path.join(data_dir, '**/**/**/*{}'.format(pattern))) 835 | if len(file_list) == 0: 836 | raise RuntimeError('No HST camera data found') 837 | 838 | for f in file_list: 839 | # print(f) 840 | if '/logs/' in f: 841 | continue 842 | # get header 843 | primary_header = fits.getheader(f, ext=0) 844 | first_ext_header = fits.getheader(f, ext=1) 845 | 846 | # STS (10/22/2019): Check if header keyword QUALITY includes the value GSFAIL; if so, ignore data set. 847 | quality = primary_header['QUALITY'] 848 | if 'GSFAIL' in quality: 849 | continue 850 | 851 | # collect meta data 852 | telescope = primary_header['TELESCOP'] 853 | instr = primary_header['INSTRUME'].strip() 854 | if instr == 'ACS': 855 | header_keyword_filter = 'FILTER1' 856 | elif instr == 'WFC3': 857 | header_keyword_filter = 'FILTER' 858 | filter = primary_header[header_keyword_filter].strip() 859 | aperture = primary_header['APERTURE'].strip() 860 | epoch_isot = '%sT%s' % (primary_header['DATE-OBS'], primary_header['TIME-OBS']) 861 | 862 | # data file 863 | # df = f.replace('.fits', '.%s' % onepass_extension) 864 | df = f.replace('.fits', '.%s' % onepass_extension).replace('mast_data', 'onepass_output') 865 | d = Table.read(df, format='ascii.basic', names=(list(onepass_extension))) 866 | 867 | print('Read {} stars from {}'.format(len(d), df)) 868 | 869 | # extract keywords for DVA correction 870 | spt_file = f.replace('_flc.fits', '_spt.fits') 871 | 872 | 873 | d['dms_chip_number'] = np.zeros(len(d)).astype(np.int) 874 | onepass_chip_numbers = np.unique(d['k'].data) 875 | 876 | 877 | # construct DMS compliant chip numbers 878 | for chip_id in [1, 2]: 879 | chip_index = np.where(d['k'] == chip_id)[0] 880 | if chip_id == 1: 881 | if invert_chip_numbers: 882 | d['dms_chip_number'][chip_index] = 2 883 | else: 884 | d['dms_chip_number'][chip_index] = 1 885 | 886 | elif chip_id == 2: 887 | if invert_chip_numbers: 888 | d['dms_chip_number'][chip_index] = 1 889 | else: 890 | d['dms_chip_number'][chip_index] = 2 891 | 892 | dms_chip_numbers = np.unique(d['dms_chip_number'].data) 893 | chip_indices = [] 894 | for chip_id in dms_chip_numbers: 895 | chip_index = np.where(d['dms_chip_number'] == chip_id)[0] 896 | chip_indices.append(chip_index) 897 | 898 | # flt images are in SIAS. hst1pass x,y coordinates are therefore also in SIAS 899 | d['x_SCI'] = d['x'] 900 | d['y_SCI'] = d['y'] 901 | 902 | # index of chip 1 903 | if invert_chip_numbers: 904 | chip_correct_index = dms_chip_numbers.tolist().index(1) 905 | else: 906 | chip_correct_index = dms_chip_numbers.tolist().index(2) 907 | 908 | # correct y coordinates of chip1 909 | d['y_SCI'][chip_indices[chip_correct_index]] = d['y'][chip_indices[chip_correct_index]] - 2048. 910 | 911 | d['RA_deg'] = d['r'] 912 | d['Dec_deg'] = d['d'] 913 | 914 | if type(astrometry_uncertainty_mas) is dict: 915 | exp_time = primary_header['EXPTIME'] 916 | if exp_time < astrometry_uncertainty_mas['shallow']['exptime_threshold_s']: 917 | uncertainty_mas = astrometry_uncertainty_mas['shallow']['uncertainty_mas'] 918 | elif exp_time > astrometry_uncertainty_mas['deep']['exptime_threshold_s']: 919 | uncertainty_mas = astrometry_uncertainty_mas['deep']['uncertainty_mas'] 920 | 921 | d['sigma_x_mas'] = np.ones(len(d)) * uncertainty_mas 922 | d['sigma_y_mas'] = np.ones(len(d)) * uncertainty_mas 923 | 924 | else: 925 | d['sigma_x_mas'] = np.ones(len(d)) * astrometry_uncertainty_mas 926 | d['sigma_y_mas'] = np.ones(len(d)) * astrometry_uncertainty_mas 927 | 928 | # clean Table 929 | d2 = Table() 930 | for col in d.colnames: 931 | d2[col] = d[col] 932 | 933 | header_keys = ['TELESCOP', 'INSTRUME', 'PA_V3', 'APERTURE', 'DATE-OBS', 'TIME-OBS', 'EXPSTART', 'EXPEND', 'PROPOSID', 'EXPTIME'] 934 | # PA_V3 = 269.998413 / position angle of V3-axis of HST (deg) 935 | for key in header_keys: 936 | d2.meta[key] = primary_header[key] 937 | 938 | spt_header = fits.getheader(spt_file) 939 | for key in 'RA_V1 DEC_V1 POSTNSTX POSTNSTY POSTNSTZ VELOCSTX VELOCSTY VELOCSTZ APER_REF APERTYPE DGESTAR SGESTAR'.split(): 940 | d2.meta[key] = spt_header[key] 941 | # RA_V1 = 9.230892548311E+01 / right ascension of v1 axis of st (deg) 942 | # DEC_V1 = 2.434359995048E+01 / declination of v1 axis of st (deg) 943 | # APER_REF= 'JWFCFIX ' / aperture used for reference position 944 | # APERTYPE= 'SICS ' / aperture type (SICS, SIAS, SIDS) 945 | 946 | d2.meta['FILTER'] = primary_header[header_keyword_filter].strip() 947 | d2.meta['DATAFILE'] = os.path.basename(f) 948 | d2.meta['DATAPATH'] = os.path.dirname(f) 949 | d2.meta['EPOCH'] = epoch_isot 950 | header2_keys = ['RA_APER', 'DEC_APER', 'PA_APER'] 951 | # first_ext_header 952 | # PA_APER = 87.5009 / Position Angle of reference aperture center (de 953 | # RA_APER = 9.223601092740E+01 / RA of aperture reference position 954 | # DEC_APER= 2.441575887367E+01 / Declination of aperture reference position 955 | 956 | # primary header 957 | # RA_TARG = 9.223601092740E+01 / right ascension of the target (deg) (J2000) 958 | for key in header2_keys: 959 | d2.meta[key] = first_ext_header[key] 960 | 961 | # == APER_REF ! 962 | if d2.meta['APERTURE'] == 'WFC-FIX': 963 | d2.meta['SIAFAPER'] = 'JWFCFIX' 964 | elif d2.meta['APERTURE'] == 'UVIS-CENTER': 965 | d2.meta['SIAFAPER'] = 'IUVISCTR' 966 | 967 | d2.meta['PROGRAM_VISIT'] = '{}_{}'.format(d2.meta['PROPOSID'], d2.meta['DATAFILE'][4:6]) 968 | 969 | for j, chip_id in enumerate(dms_chip_numbers): 970 | out_file = os.path.join(standardized_data_dir, 'FPA_data_%s_%s_%s_chip%d_%s_%s_%s.fits' % ( 971 | telescope, instr, aperture, chip_id, filter, epoch_isot, os.path.basename(f).split('.')[0])).replace(':', 972 | '-') 973 | print('Writing {} ({} stars)'.format(out_file, len(chip_indices[j]))) 974 | d2.meta['CHIP'] = chip_id 975 | d2[chip_indices[j]].write(out_file, overwrite=True) 976 | 977 | 978 | def jwst_camera_fpa_data(data_dir, pattern, standardized_data_dir, parameters, 979 | overwrite_source_extraction=False): 980 | """Generate standardized focal plane alignment data based on JWST camera data. 981 | """ 982 | 983 | # invert_chip_numbers = True 984 | 985 | # file_list = glob.glob(os.path.join(data_dir, '**/*%s' % pattern)) 986 | # file_list = glob.glob(os.path.join(data_dir, '**/**/**/*{}'.format(pattern))) 987 | file_list = glob.glob(os.path.join(data_dir, '*{}'.format(pattern))) 988 | 989 | if len(file_list) == 0: 990 | raise RuntimeError('No data found') 991 | 992 | print('*'*100) 993 | print('Extracting sources from {} JWST files:'.format(len(file_list))) 994 | 995 | for f in file_list: 996 | # if 'jw01088002001_01201_00004_g2_cal' not in f: 997 | # if 'jw01087001001_01101_00031_nis_cal' not in f: 998 | # continue 999 | 1000 | pl.close('all') 1001 | print('processing {}'.format(f)) 1002 | 1003 | im = datamodels.open(f) 1004 | if hasattr(im, 'data') is False: 1005 | im.data = fits.getdata(f) 1006 | im.dq = np.zeros(im.data.shape) 1007 | 1008 | header_info = OrderedDict() 1009 | 1010 | for attribute in 'telescope'.split(): 1011 | header_info[attribute] = getattr(im.meta, attribute) 1012 | 1013 | # observations 1014 | for attribute in 'date time visit_number visit_id visit_group activity_id program_number'.split(): 1015 | header_info['observation_{}'.format(attribute)] = getattr(im.meta.observation, attribute) 1016 | 1017 | header_info['epoch_isot'] = '{}T{}'.format(header_info['observation_date'], header_info['observation_time']) 1018 | 1019 | # instrument 1020 | for attribute in 'name filter pupil detector'.split(): 1021 | header_info['instrument_{}'.format(attribute)] = getattr(im.meta.instrument, attribute) 1022 | 1023 | # subarray 1024 | for attribute in 'name'.split(): 1025 | header_info['subarray_{}'.format(attribute)] = getattr(im.meta.subarray, attribute) 1026 | 1027 | # aperture 1028 | for attribute in 'name position_angle pps_name'.split(): 1029 | try: 1030 | value = getattr(im.meta.aperture, attribute) 1031 | except AttributeError: 1032 | value = None 1033 | 1034 | header_info['aperture_{}'.format(attribute)] = value 1035 | 1036 | 1037 | # temporary solution, this should come from pupulated aperture attributes 1038 | if header_info['subarray_name'] == 'FULL': 1039 | master_apertures = pysiaf.read.read_siaf_detector_layout() 1040 | if header_info['instrument_name'].lower() in ['niriss', 'miri']: 1041 | header_info['SIAFAPER'] = master_apertures['AperName'][np.where(master_apertures['InstrName']==header_info['instrument_name'])[0][0]] 1042 | elif header_info['instrument_name'].lower() in ['fgs']: 1043 | header_info['SIAFAPER'] = 'FGS{}_FULL'.format(header_info['instrument_detector'][-1]) 1044 | 1045 | # target 1046 | for attribute in 'ra dec catalog_name proposer_name'.split(): 1047 | header_info['target_{}'.format(attribute)] = getattr(im.meta.target, attribute) 1048 | 1049 | # pointing 1050 | for attribute in 'ra_v1 dec_v1 pa_v3'.split(): 1051 | try: 1052 | value = getattr(im.meta.pointing, attribute) 1053 | except AttributeError: 1054 | value = None 1055 | header_info['pointing_{}'.format(attribute)] = value 1056 | 1057 | # add HST style keywords 1058 | header_info['PROGRAM_VISIT'] = '{}_{}'.format(header_info['observation_program_number'], header_info['observation_visit_id']) 1059 | header_info['PROPOSID'] = header_info['observation_program_number'] 1060 | header_info['DATE-OBS'] = header_info['observation_date'] 1061 | header_info['TELESCOP'] = header_info['telescope'] 1062 | header_info['INSTRUME'] = header_info['instrument_name'] 1063 | try: 1064 | header_info['APERTURE'] = header_info['SIAFAPER'] 1065 | except KeyError: 1066 | header_info['APERTURE'] = None 1067 | header_info['CHIP'] = 0 1068 | 1069 | extracted_sources_dir = os.path.join(standardized_data_dir, 'extraction') 1070 | if os.path.isdir(extracted_sources_dir) is False: 1071 | os.makedirs(extracted_sources_dir) 1072 | extracted_sources_file = os.path.join(extracted_sources_dir, 1073 | '{}_extracted_sources.fits'.format(os.path.basename(f))) 1074 | 1075 | mask_extreme_slope_values = False 1076 | parameters['maximum_slope_value'] = 1000. 1077 | 1078 | if (not os.path.isfile(extracted_sources_file)) or (overwrite_source_extraction): 1079 | show_figures = False 1080 | data = copy.deepcopy(im.data) 1081 | dq = copy.deepcopy(im.dq) 1082 | 1083 | if mask_extreme_slope_values: 1084 | # clean up extreme slope values 1085 | bad_index = np.where(np.abs(data) > parameters['maximum_slope_value']) 1086 | data[bad_index] = 0. 1087 | dq[bad_index] = -1 1088 | # pl.figure() 1089 | # pl.hist(np.abs(data.flatten()), 100) 1090 | # pl.show() 1091 | # 1/0 1092 | # data[np.where(im.dq!=0)] = 0. 1093 | # pl.figure() 1094 | # pl.hist(data.flatten(), 100) 1095 | # pl.show() 1096 | # 1/0 1097 | 1098 | mean, median, std = sigma_clipped_stats(data, sigma=3.0) 1099 | 1100 | name_seed = '{}_{}'.format(os.path.basename(f).split('.')[0], parameters['naming_tag']) 1101 | 1102 | import corner 1103 | if parameters['use_epsf'] is False: 1104 | daofind = DAOStarFinder(fwhm=2.0, threshold=parameters['dao_detection_threshold'] * std) 1105 | # daofind = DAOStarFinder(fwhm=2.0, threshold=10. * std) 1106 | dao_extracted_sources = daofind(data - median) 1107 | # extracted_sources.write(extracted_sources_file, overwrite=True) 1108 | # extracted_sources = extracted_sources[extracted_sources['peak']<500] 1109 | if dao_extracted_sources is None: 1110 | dao_extracted_sources = Table() 1111 | else: 1112 | print('Initial source extraction: {} sources'.format(len(dao_extracted_sources))) 1113 | if 0: 1114 | corner_plot_file = os.path.join(extracted_sources_dir, 1115 | '{}_corner_extraction.pdf'.format(os.path.basename(f).split('.')[0])) 1116 | selected_columns = [col for col in dao_extracted_sources.colnames if col not in 'npix sky'.split()] 1117 | samples = np.array([dao_extracted_sources[col] for col in selected_columns]) 1118 | # title_string = '{}: {} extracted sources'.format(os.path.basename(f), len(extracted_sources)) 1119 | # fig = pl.figure() 1120 | fig = corner.corner(samples.T, labels=selected_columns) 1121 | # pl.text(0.5, 0.95, title_string, horizontalalignment='center', verticalalignment = 'center', transform = pl.gca().transAxes) 1122 | fig.savefig(corner_plot_file) 1123 | if show_figures: 1124 | pl.show() 1125 | dao_extracted_sources = dao_extracted_sources[(np.abs(dao_extracted_sources['roundness1'])parameters['sharpness_threshold'])] 1127 | # & (np.abs(extracted_sources['roundness2'])parameters['sharpness_threshold'][header_info['INSTRUME']])] 1166 | print('Sharpness/roundness + isolation cut: {} sources'.format(len(dao_extracted_sources))) 1167 | 1168 | if 1: 1169 | dao_extracted_sources.remove_rows( 1170 | np.where(dao_extracted_sources['flux'] <= 0)[0]) 1171 | # flux_threshold_percentile = 50 1172 | # flux_threshold = np.percentile(dao_extracted_sources['flux'], flux_threshold_percentile) 1173 | flux_threshold_lower = np.percentile(dao_extracted_sources['flux'], parameters['flux_threshold_percentile_lower'][header_info['INSTRUME']]) 1174 | flux_threshold_upper = np.percentile(dao_extracted_sources['flux'], parameters['flux_threshold_percentile_upper'][header_info['INSTRUME']]) 1175 | # dao_extracted_sources.remove_rows(np.where(dao_extracted_sources['flux'] < flux_threshold)[0]) 1176 | dao_extracted_sources.remove_rows(np.where(dao_extracted_sources['flux'] < flux_threshold_lower)[0]) 1177 | dao_extracted_sources.remove_rows(np.where(dao_extracted_sources['flux'] > flux_threshold_upper)[0]) 1178 | # print('Only {} sources have positve flux > {:2.3f}'.format(len(dao_extracted_sources), flux_threshold)) 1179 | print('Only {} sources have positve {:2.3f} > flux > {:2.3f}'.format(len(dao_extracted_sources), flux_threshold_upper, flux_threshold_lower)) 1180 | # 1/0 1181 | else: 1182 | from photutils import find_peaks 1183 | from photutils.centroids import centroid_2dg 1184 | 1185 | # extracted_sources = find_peaks(data, threshold=75, box_size=25, centroid_func=centroid_2dg, mask=im.dq!=0 ) 1186 | extracted_sources = find_peaks(data - median, threshold=10. * std, box_size=10, 1187 | centroid_func=centroid_2dg, mask=im.dq!=0 ) 1188 | extracted_sources.rename_column('x_centroid', 'xcentroid') 1189 | extracted_sources.rename_column('y_centroid', 'ycentroid') 1190 | print('Initial source extraction using find_peaks: {} sources'.format(len(extracted_sources))) 1191 | 1192 | 1193 | # 1/0 1194 | 1195 | # for j in range(len(extracted_sources)): 1196 | 1197 | # epsf_psf_size_pix = 25 1198 | # epsf_psf_size_pix = 15 1199 | 1200 | # see https://photutils.readthedocs.io/en/stable/epsf.html 1201 | size = epsf_psf_size_pix + 10 1202 | hsize = (size - 1) / 2 1203 | x = dao_extracted_sources['xcentroid'] 1204 | y = dao_extracted_sources['ycentroid'] 1205 | mask = ((x > hsize) & (x < (data.shape[1] - 1 - hsize)) & (y > hsize) & (y < (data.shape[0] - 1 - hsize))) 1206 | 1207 | stars_tbl = Table() 1208 | stars_tbl['x'] = x[mask] 1209 | stars_tbl['y'] = y[mask] 1210 | mean_val, median_val, std_val = sigma_clipped_stats(data, sigma=2.) 1211 | print('{}: mean {:2.3f} median {:2.3f} std {:2.3f}'.format(f, mean_val, median_val, std_val)) 1212 | # 1/0 1213 | # data -= median_val 1214 | 1215 | # stars_tbl = stars_tbl[0:5] 1216 | # stars_tbl = stars_tbl[[0]] 1217 | 1218 | from astropy.nddata import NDData 1219 | nddata = NDData(data=data-median_val) 1220 | 1221 | if 0: 1222 | from astropy.nddata import StdDevUncertainty, NDUncertainty 1223 | std_uncertainty = StdDevUncertainty(data) 1224 | 1225 | # weight_uncertainty = copy.deepcopy(std_uncertainty) 1226 | weight_uncertainty = NDUncertainty(data=std_uncertainty.array, uncertainty_type='weights') 1227 | weight_uncertainty.array = 1./(std_uncertainty.array**2) 1228 | weight_uncertainty.array[np.where(dq.data != 0)] = 0 1229 | weight_uncertainty.uncertainty_type = 'weights' 1230 | 1231 | # np.where(dq.data != 0) 1232 | nddata = NDData(data=data-median_val, uncertainty=weight_uncertainty)#, uncertainty_type='weights') 1233 | 1234 | # use_weights_for_epsf = True 1235 | if parameters['use_weights_for_epsf']: 1236 | mask = dq != 0 # bool mask is True for bad values 1237 | nddata = NDData(data=data - median_val, mask=mask) 1238 | 1239 | 1240 | from photutils.psf import extract_stars 1241 | stars = extract_stars(nddata, stars_tbl, size=epsf_psf_size_pix) 1242 | 1243 | 1244 | 1245 | n_dq_threshold = parameters['discard_stars_based_on_dq'][header_info['INSTRUME']] 1246 | if n_dq_threshold is not None: 1247 | dqs = extract_stars(NDData(data=dq), stars_tbl, size=epsf_psf_size_pix) 1248 | discard_index = [] 1249 | for j,dq in enumerate(dqs): 1250 | mask_index = np.where(dq.data!=0)[0] 1251 | if len(mask_index) > n_dq_threshold: 1252 | discard_index.append(j) 1253 | 1254 | print('Removing {} stars with too many bad pixel DQ values'.format(len(discard_index))) 1255 | stars_tbl.remove_rows(discard_index) 1256 | 1257 | stars = extract_stars(nddata, stars_tbl, size=epsf_psf_size_pix) 1258 | 1259 | 1260 | if 0: 1261 | dqs = extract_stars(NDData(data=dq), stars_tbl, size=epsf_psf_size_pix) 1262 | # print([np.any(dqs[i].data!=0) for i in range(len(dqs))] ) 1263 | # # dqs[0].data[] 1264 | # print([len(np.where(dqs[i].data!=0)[0]) for i in range(len(dqs))] ) 1265 | for j,star in enumerate(stars): 1266 | # star.weights = 1./np. 1267 | mask_index = np.where(dqs[j].data!=0) 1268 | star.weights[mask_index] = 0 1269 | # if mask_index[0].size > 50: 1270 | # print(star.weights) 1271 | 1272 | 1273 | print('Using {} stars to build epsf'.format(len(stars_tbl))) 1274 | 1275 | import matplotlib.pyplot as plt 1276 | from astropy.visualization import simple_norm 1277 | nrows = 10 1278 | ncols = nrows 1279 | # nrows = 10 1280 | # ncols = 10 1281 | fig, ax = plt.subplots(nrows=nrows, ncols=ncols, figsize=(20, 20), squeeze=True) 1282 | ax = ax.ravel() 1283 | for i in range(nrows * ncols): 1284 | if i <= len(stars)-1: 1285 | norm = simple_norm(stars[i], 'log', percent=99.) 1286 | ax[i].imshow(stars[i], norm=norm, origin='lower', cmap='viridis') 1287 | # pl.show() 1288 | pl.title('{} sample stars for epsf'.format(header_info['APERTURE'])) 1289 | psf_plot_file = os.path.join(extracted_sources_dir, 1290 | '{}_sample_psfs.pdf'.format(name_seed)) 1291 | pl.savefig(psf_plot_file) 1292 | 1293 | from photutils import EPSFBuilder 1294 | from photutils.centroids import centroid_com 1295 | 1296 | #################### EPSF BUILDING #################### 1297 | 1298 | # epsf_builder = EPSFBuilder(oversampling=4, maxiters=10, recentering_maxiters=20, recentering_func=centroid_com, progress_bar=True) 1299 | epsf_builder = EPSFBuilder(oversampling=4, progress_bar=True) 1300 | print('Building epsf ...') 1301 | # epsf, fitted_stars = epsf_builder(stars) 1302 | epsf, fitted_stars = epsf_builder(stars) 1303 | 1304 | norm = simple_norm(epsf.data, 'log', percent=99.) 1305 | pl.figure() 1306 | pl.imshow(epsf.data, norm=norm, origin='lower', cmap='viridis') 1307 | pl.colorbar() 1308 | # pl.show() 1309 | filter_string = header_info['instrument_pupil'] if 'F'==header_info['instrument_pupil'][0] else header_info['instrument_filter'] 1310 | pl.title('{} {} epsf using {} stars'.format(header_info['APERTURE'], filter_string, len(stars_tbl))) 1311 | epsf_plot_file = os.path.join(extracted_sources_dir, '{}_epsf.pdf'.format(name_seed)) 1312 | pl.savefig(epsf_plot_file) 1313 | 1314 | from photutils.psf import IntegratedGaussianPRF, DAOGroup 1315 | from photutils.background import MMMBackground, MADStdBackgroundRMS 1316 | from astropy.modeling.fitting import LevMarLSQFitter 1317 | from astropy.stats import gaussian_sigma_to_fwhm 1318 | 1319 | sigma_psf = detection_fwhm 1320 | image = data 1321 | # bkgrms = MADStdBackgroundRMS() 1322 | # std = bkgrms(image) 1323 | # iraffind = IRAFStarFinder(threshold=20 * std, 1324 | # fwhm=sigma_psf * gaussian_sigma_to_fwhm, minsep_fwhm=1.0, 1325 | # roundhi=1.0, roundlo=-1.0, sharplo=0.5, sharphi=2.0, 1326 | # ) 1327 | # peakmax=parameters['maximum_slope_value']) 1328 | 1329 | daofind2 = DAOStarFinder(threshold=detection_threshold, fwhm=detection_fwhm, 1330 | roundhi=parameters['roundness_threshold'][header_info['INSTRUME']], sharplo=parameters['sharpness_threshold'][header_info['INSTRUME']]) 1331 | 1332 | 1333 | # daogroup = DAOGroup(2.0 * sigma_psf * gaussian_sigma_to_fwhm) 1334 | daogroup = DAOGroup(2.0 * detection_fwhm) 1335 | mmm_bkg = MMMBackground() 1336 | # fitter = LevMarLSQFitter() 1337 | # psf_model = IntegratedGaussianPRF(sigma=sigma_psf) 1338 | psf_model = epsf.copy() 1339 | from photutils.psf import IterativelySubtractedPSFPhotometry 1340 | photometry = IterativelySubtractedPSFPhotometry(finder=daofind2, #iraffind, 1341 | group_maker=daogroup, 1342 | bkg_estimator=mmm_bkg, 1343 | psf_model=psf_model, 1344 | fitter=LevMarLSQFitter(), niters=parameters['final_extraction_niters'], 1345 | fitshape=(11, 11), aperture_radius=5,) 1346 | # extra_output_cols=['roundness1', 'sharpness', 'roundness2']) 1347 | 1348 | print('Performing source extraction and photometry ...') 1349 | # data1 = image[0:200, 0:200] 1350 | epsf_extracted_sources = photometry(image=image) 1351 | # result_tab = photometry(image=data1) 1352 | # print(result_tab) 1353 | print('Final source extraction with epsf: {} sources'.format(len(epsf_extracted_sources))) 1354 | epsf_extracted_sources['xcentroid'] = epsf_extracted_sources['x_fit'] 1355 | epsf_extracted_sources['ycentroid'] = epsf_extracted_sources['y_fit'] 1356 | 1357 | extracted_sources = epsf_extracted_sources 1358 | if 0: 1359 | corner_plot_file = os.path.join(extracted_sources_dir, 1360 | '{}_corner_epsf_extraction.pdf'.format(os.path.basename(f).split('.')[0])) 1361 | selected_columns = [col for col in epsf_extracted_sources.colnames if col not in 'npix sky iter_detected'.split()] 1362 | samples = np.array([epsf_extracted_sources[col] for col in selected_columns]) 1363 | title_string = '{}: {} epsf extracted sources'.format(os.path.basename(f), len(extracted_sources)) 1364 | # fig = pl.figure() 1365 | fig = corner.corner(samples.T, labels=selected_columns) 1366 | # pl.text(0.5, 0.95, title_string, horizontalalignment='center', verticalalignment = 'center', transform = pl.gca().transAxes) 1367 | fig.savefig(corner_plot_file) 1368 | # 1/0 1369 | 1370 | if 0: 1371 | print('Making residual image ...') 1372 | residual_image = photometry.get_residual_image() 1373 | pl.figure() 1374 | plt.subplot(1, 2, 1) 1375 | plt.imshow(image, cmap='viridis', aspect=1, interpolation='nearest') 1376 | plt.title('Simulated data') 1377 | plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04) 1378 | plt.subplot(1, 2, 2) 1379 | plt.imshow(residual_image, cmap='viridis', aspect=1, interpolation='nearest', 1380 | origin='lower') 1381 | plt.title('Residual Image') 1382 | plt.colorbar(orientation='horizontal', fraction=0.046, pad=0.04) 1383 | plt.show() 1384 | 1/0 1385 | 1386 | extracted_sources.write(extracted_sources_file, overwrite=True) 1387 | 1388 | if parameters['show_extracted_sources']: 1389 | # data[np.where(np.abs(data)>65000)] = np.nan 1390 | data[np.where(np.abs(data)>100)] = 0.1 1391 | # print(extracted_sources) 1392 | 1393 | positions = (extracted_sources['xcentroid'], extracted_sources['ycentroid']) 1394 | apertures = CircularAperture(positions, r=4.) 1395 | norm = ImageNormalize(stretch=SqrtStretch()) 1396 | # norm = ImageNormalize(stretch=LogStretch()) 1397 | extracted_plot_file = os.path.join(extracted_sources_dir, 1398 | '{}_extracted_sources.pdf'.format( 1399 | os.path.basename(f).split('.')[0])) 1400 | 1401 | fig = pl.figure() 1402 | pl.imshow(data, cmap='Greys', origin='lower', norm=norm) 1403 | apertures.plot(color='blue', lw=1.5, alpha=0.5) 1404 | 1405 | epsf_positions = (epsf_extracted_sources['xcentroid'], epsf_extracted_sources['ycentroid']) 1406 | epsf_apertures = RectangularAperture(epsf_positions, w=6, h=6) 1407 | epsf_apertures.plot(color='green', lw=1.5, alpha=0.5) 1408 | 1409 | title_string = '{}: {} selected sources'.format(os.path.basename(f), 1410 | len(extracted_sources)) 1411 | pl.title(title_string) 1412 | if show_figures: 1413 | pl.show() 1414 | # pl.show() 1415 | fig.savefig(extracted_plot_file) 1416 | # 1/0 1417 | else: 1418 | extracted_sources = Table.read(extracted_sources_file) 1419 | 1420 | 1421 | 1422 | print('Extracted {} sources from {}'.format(len(extracted_sources), f)) 1423 | impose_positive_flux = True 1424 | if impose_positive_flux and parameters['use_epsf']: 1425 | extracted_sources.remove_rows(np.where(extracted_sources['flux_fit']<0)[0]) 1426 | print('Only {} sources have positve flux'.format(len(extracted_sources))) 1427 | 1428 | if 0: 1429 | pl.figure() 1430 | pl.hist(extracted_sources['flux_fit'], 100) 1431 | pl.show() 1432 | 1433 | if 0: 1434 | flux_threshold_percentile = 50 1435 | flux_threshold = np.percentile(extracted_sources['flux_fit'], flux_threshold_percentile) 1436 | # extracted_sources.remove_rows(np.where(extracted_sources['flux_fit'] > flux_threshold)[0]) 1437 | extracted_sources.remove_rows(np.where(extracted_sources['flux_fit'] < flux_threshold)[0]) 1438 | # print('Only {} sources have positve flux < {:2.3f}'.format(len(extracted_sources), flux_threshold)) 1439 | print('Only {} sources have positve flux > {:2.3f}'.format(len(extracted_sources), flux_threshold)) 1440 | 1441 | # 1/0 1442 | 1443 | 1444 | # data file 1445 | # df = f.replace('.fits', '.%s' % onepass_extension) 1446 | # df = f.replace('.fits', '.%s' % onepass_extension).replace('mast_data', 'onepass_output') 1447 | # d = Table.read(df, format='ascii.basic', names=(list(onepass_extension))) 1448 | 1449 | 1450 | # extract keywords for DVA correction 1451 | # spt_file = f.replace('_flc.fits', '_spt.fits') 1452 | 1453 | 1454 | # d['dms_chip_number'] = np.zeros(len(d)).astype(np.int) 1455 | # onepass_chip_numbers = np.unique(d['k'].data) 1456 | astrometry_uncertainty_mas = 5 1457 | 1458 | if len(extracted_sources) > 0: 1459 | # cal images are in DMS coordinates. These correspond to the SIAF Science (SCI) frame 1460 | extracted_sources['x_SCI'], extracted_sources['y_SCI'] = extracted_sources['xcentroid'], extracted_sources['ycentroid'] 1461 | 1462 | # d['RA_deg'] = d['r'] 1463 | # d['Dec_deg'] = d['d'] 1464 | 1465 | # if type(astrometry_uncertainty_mas) is dict: 1466 | # exp_time = primary_header['EXPTIME'] 1467 | # if exp_time < astrometry_uncertainty_mas['shallow']['exptime_threshold_s']: 1468 | # uncertainty_mas = astrometry_uncertainty_mas['shallow']['uncertainty_mas'] 1469 | # elif exp_time > astrometry_uncertainty_mas['deep']['exptime_threshold_s']: 1470 | # uncertainty_mas = astrometry_uncertainty_mas['deep']['uncertainty_mas'] 1471 | # 1472 | # d['sigma_x_mas'] = np.ones(len(d)) * uncertainty_mas 1473 | # d['sigma_y_mas'] = np.ones(len(d)) * uncertainty_mas 1474 | # 1475 | # else: 1476 | extracted_sources['sigma_x_mas'] = np.ones(len(extracted_sources)) * astrometry_uncertainty_mas 1477 | extracted_sources['sigma_y_mas'] = np.ones(len(extracted_sources)) * astrometry_uncertainty_mas 1478 | 1479 | # transfer info to astropy table header 1480 | for key, value in header_info.items(): 1481 | extracted_sources.meta[key] = value 1482 | 1483 | 1484 | if 0: 1485 | # # clean Table 1486 | # d2 = Table() 1487 | # for col in d.colnames: 1488 | # d2[col] = d[col] 1489 | 1490 | header_keys = ['TELESCOP', 'INSTRUME', 'PA_V3', 'APERTURE', 'DATE-OBS', 'TIME-OBS', 'EXPSTART', 'EXPEND', 'PROPOSID', 'EXPTIME'] 1491 | # PA_V3 = 269.998413 / position angle of V3-axis of HST (deg) 1492 | for key in header_keys: 1493 | d2.meta[key] = primary_header[key] 1494 | 1495 | spt_header = fits.getheader(spt_file) 1496 | for key in 'RA_V1 DEC_V1 POSTNSTX POSTNSTY POSTNSTZ VELOCSTX VELOCSTY VELOCSTZ APER_REF APERTYPE DGESTAR SGESTAR'.split(): 1497 | d2.meta[key] = spt_header[key] 1498 | # RA_V1 = 9.230892548311E+01 / right ascension of v1 axis of st (deg) 1499 | # DEC_V1 = 2.434359995048E+01 / declination of v1 axis of st (deg) 1500 | # APER_REF= 'JWFCFIX ' / aperture used for reference position 1501 | # APERTYPE= 'SICS ' / aperture type (SICS, SIAS, SIDS) 1502 | 1503 | d2.meta['FILTER'] = primary_header[header_keyword_filter].strip() 1504 | extracted_sources.meta['DATAFILE'] = os.path.basename(f) 1505 | extracted_sources.meta['DATAPATH'] = os.path.dirname(f) 1506 | extracted_sources.meta['EPOCH'] = header_info['epoch_isot'] 1507 | 1508 | if 0: 1509 | header2_keys = ['RA_APER', 'DEC_APER', 'PA_APER'] 1510 | # first_ext_header 1511 | # PA_APER = 87.5009 / Position Angle of reference aperture center (de 1512 | # RA_APER = 9.223601092740E+01 / RA of aperture reference position 1513 | # DEC_APER= 2.441575887367E+01 / Declination of aperture reference position 1514 | 1515 | # primary header 1516 | # RA_TARG = 9.223601092740E+01 / right ascension of the target (deg) (J2000) 1517 | for key in header2_keys: 1518 | d2.meta[key] = first_ext_header[key] 1519 | 1520 | # == APER_REF ! 1521 | if d2.meta['APERTURE'] == 'WFC-FIX': 1522 | d2.meta['SIAFAPER'] = 'JWFCFIX' 1523 | elif d2.meta['APERTURE'] == 'UVIS-CENTER': 1524 | d2.meta['SIAFAPER'] = 'IUVISCTR' 1525 | 1526 | d2.meta['PROGRAM_VISIT'] = '{}_{}'.format(d2.meta['PROPOSID'], d2.meta['DATAFILE'][4:6]) 1527 | 1528 | out_file = os.path.join(standardized_data_dir, 'FPA_data_{}_{}_{}_{}_{}_{}_{}.fits'.format(extracted_sources.meta['telescope'], 1529 | extracted_sources.meta['instrument_name'], 1530 | extracted_sources.meta['subarray_name'], 1531 | extracted_sources.meta['instrument_filter'], 1532 | extracted_sources.meta['instrument_pupil'], 1533 | extracted_sources.meta['EPOCH'].replace(':','-').replace('.','-'), 1534 | extracted_sources.meta['DATAFILE'].split('.')[0]).replace('/','')) 1535 | print('Writing {}'.format(out_file)) 1536 | with warnings.catch_warnings(): 1537 | warnings.simplefilter('ignore', AstropyWarning, append=True) 1538 | extracted_sources.write(out_file, overwrite=True) 1539 | 1540 | return im 1541 | 1542 | def hst_guider_fpa_data(reduced_data_dir, mast_data_dir, pattern, standardized_data_dir, 1543 | verbose=True): 1544 | """Generate standardized focal plane alignment data based on HST guider data (FGS1, FGS2, FGS3). 1545 | 1546 | Parameters 1547 | ---------- 1548 | reduced_data_dir 1549 | mast_data_dir 1550 | pattern 1551 | standardized_data_dir 1552 | verbose 1553 | 1554 | Returns 1555 | ------- 1556 | 1557 | """ 1558 | file_list = glob.glob(os.path.join(reduced_data_dir, '*{}*'.format(pattern))) 1559 | 1560 | for f in file_list: 1561 | 1562 | d = Table.read(f, format='ascii.basic', delimiter= ',', guess=False, data_start=2) 1563 | if verbose: 1564 | print('Reading Guider data file: {}'.format(f)) 1565 | d.pprint() 1566 | 1567 | # remove entries with `failed` or non-nominal guide-star data 1568 | for colname in 'X_sd Y_sd Xd_sd Yd_sd Xs_sd Ys_sd'.split(): 1569 | bad_index = np.where(d[colname] == '******')[0] 1570 | if len(bad_index) != 0: 1571 | print('Found non-nominal guide-star data. removing {} observations.'.format(len(bad_index))) 1572 | d.remove_rows(bad_index) 1573 | 1574 | # convert column to float because it may have been read as string 1575 | d[colname] = d[colname].astype(np.float) 1576 | 1577 | if len(d) == 0: 1578 | print('No valid FGS data in this file. Skipping.\n') 1579 | continue 1580 | 1581 | # loop over individual FGS frames 1582 | for j, obs_id in enumerate(d['OBS_ID'].data): 1583 | 1584 | # retrieve basic exposure data from MAST 1585 | mast_data = mast.Observations.query_criteria(obs_id=obs_id.upper(), obstype='cal') 1586 | download_dir = mast_data_dir 1587 | 1588 | a1f_file = glob.glob( os.path.join(download_dir, '**/**/FGS/{}{}.fits'.format(obs_id.lower(), '_a1f'))) 1589 | if len(a1f_file) != 1: 1590 | raise RuntimeError('Identified NO or more than one match for {}'.format(obs_id)) 1591 | else: 1592 | a1f_file = a1f_file[0] 1593 | if verbose: 1594 | print('Identifed _a1f file: {}'.format(a1f_file)) 1595 | a1f_header = fits.getheader(a1f_file) 1596 | 1597 | # primary instrument 1598 | instr = a1f_header['PRIMESI'].strip() 1599 | fgs_number = np.int(instr.split('FGS')[1]) 1600 | 1601 | af_header = fits.getheader(a1f_file.replace('_a1f', '_a%df'%fgs_number)) 1602 | 1603 | # read DMF header to get RA/Dec of V1 1604 | dmf_header = fits.getheader(a1f_file.replace('_a1f', '_dmf')) 1605 | 1606 | # clean Table 1607 | d2 = Table() 1608 | for col in d.colnames: 1609 | d2[col] = d[col][[j]] 1610 | for col in mast_data.colnames: 1611 | d2[col] = mast_data[col] 1612 | 1613 | # 1/0 1614 | d2['RA_deg'] = d2['RA'] 1615 | d2['Dec_deg'] = d2['DEC'] 1616 | 1617 | # nelan email dated 20 October 2017: 1618 | # The (x,y) values are corrected for geometric distortion, differential velocity aberration, spacecraft jitter, and spacecraft drift. 1619 | # question is whether these are cartesian or polar coordinates 1620 | d2['x_idl_arcsec'] = d2['X'] 1621 | d2['y_idl_arcsec'] = d2['Y'] 1622 | 1623 | d2['sigma_x_mas'] = np.array(d2['X_sd']) * 1000. 1624 | d2['sigma_y_mas'] = np.array(d2['X_sd']) * 1000. 1625 | 1626 | header_keys = ['TELESCOP', 'INSTRUME', 'DATE-OBS', 'TIME-OBS'] #'PA_V3', 1627 | for key in header_keys: 1628 | d2.meta[key] = af_header[key] 1629 | 1630 | dmf_keys = 'PA_V3 RA_V1 DEC_V1 DGESTAR SGESTAR'.split() 1631 | for key in dmf_keys: 1632 | d2.meta[key] = dmf_header[key] 1633 | 1634 | # d2.meta['PA_V3'] = dmf_header['PA_V3'] # / position angle of V3-axis of HST (deg) this is at V2,V3=0,0 1635 | # # see /Users/jsahlmann/jwst/code/github/spacetelescope/mirage/jwst/jwst/lib/set_telescope_pointing.py 1636 | # # V3APERCE= 326.492096 / V3 offset of target from aper fiducial (arcsec) 1637 | # d2.meta['RA_V1'] = dmf_header['RA_V1'] # right ascension of v1 axis of st (deg) 1638 | # d2.meta['DEC_V1'] = dmf_header['DEC_V1'] # declination of v1 axis of st (deg) 1639 | d2.meta['PA_APER'] = d['PA'][j] 1640 | 1641 | telescope = af_header['TELESCOP'] 1642 | header_keyword_filter = 'filters' 1643 | d2.meta['FILTER'] = d2[header_keyword_filter][0] 1644 | d2.meta['DATAFILE'] = os.path.basename(f) 1645 | 1646 | epoch_isot = '%sT%s' % (af_header['DATE-OBS'], af_header['TIME-OBS']) 1647 | d2.meta['EPOCH'] = epoch_isot 1648 | 1649 | d2.meta['SIAFAPER'] = instr 1650 | aperture = d2.meta['SIAFAPER'] 1651 | d2.meta['APERTURE'] = aperture 1652 | filter = d2.meta['FILTER'] 1653 | chip_id = 0 1654 | 1655 | out_file = os.path.join(standardized_data_dir, 'FPA_data_%s_%s_%s_chip%d_%s_%s_%s.fits' % ( 1656 | telescope, instr, aperture, chip_id, filter, epoch_isot, 1657 | os.path.basename(f).split('.')[0])).replace(':','-') 1658 | print('Writing %s' % out_file) 1659 | d2.write(out_file, overwrite=True) 1660 | 1661 | for key in 'PRIMESI PROPOSID'.split(): 1662 | d2[key] = a1f_header[key] 1663 | for key in 'FGSOFFV2 FGSOFFV3 FGS_PAV3 FGSREFV2 FGSREFV3 PVEL_AB EXPTIME'.split(): 1664 | d2[key] = af_header[key] 1665 | 1666 | # VELABBRA 4.052049 / aberration in position of the target 1667 | # V2APERCE= 10.000000 / V2 offset of target from aper fiducial (arcsec) 1668 | # POSTNSTX= 4.755696683602E+03 / position of space telescope x axis (km) 1669 | # EXPSTART= 57855.30789390 / exposure start time (Modified Julian Date) 1670 | for key in 'VELOCSTX VELOCSTY VELOCSTZ VELABBRA V2APERCE V3APERCE POSTNSTX POSTNSTY POSTNSTZ EXPSTART EXPEND PA_V3 RA_V1 DEC_V1'.split(): 1671 | d2[key] = dmf_header[key] 1672 | 1673 | if j==0: 1674 | d3 = d2.copy() 1675 | else: 1676 | d3 = vstack((d3,d2)) 1677 | 1678 | out_file = os.path.join(standardized_data_dir, 'FPA_data_%s_SUPER%s_%s_chip%d_%s_%s_%s.fits' % ( 1679 | telescope, instr, aperture, chip_id, filter, epoch_isot, 1680 | os.path.basename(f).split('.')[0])).replace(':','-') 1681 | d3.meta['PROPOSID'] = d2['PROPOSID'][0] 1682 | d3.meta['PROGRAM_VISIT'] = '{}_{}'.format(d3.meta['PROPOSID'], d3.meta['DATAFILE'][4:6]) 1683 | 1684 | # Change the name of the instrument to indicate that these data originate from a combination 1685 | # of FGS observations 1686 | d3.meta['INSTRUME'] = 'SUPER' + d3.meta['INSTRUME'] 1687 | 1688 | print('Writing %s' % out_file) 1689 | d3.write(out_file, overwrite=True) 1690 | --------------------------------------------------------------------------------