├── .bandit.yaml
├── corazon_runner
├── constants.py
├── datatypes.py
├── README.md
├── factory.py
└── utils.py
├── pyproject.toml
├── corazon
├── __init__.py
├── run_list.py
├── plateau.py
├── gen_lightcurve.py
├── run_pipeline.py
├── pipeline.py
└── planetSearch.py
├── MANIFEST.in
├── docs
├── index.rst
├── make.bat
├── Makefile
└── conf.py
├── .readthedocs.yml
├── CONTRIBUTING.md
├── conftest.py
├── .github
└── workflows
│ ├── publish.yml
│ └── ci_workflows.yml
├── setup.py
├── .gitignore
├── tests
└── test_pipeline.py
├── LICENSE.rst
├── README.rst
├── setup.cfg
├── tox.ini
└── CODE_OF_CONDUCT.md
/.bandit.yaml:
--------------------------------------------------------------------------------
1 | exclude_dirs:
2 | - tests
3 |
--------------------------------------------------------------------------------
/corazon_runner/constants.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | LOCAL_DATA_PATH = os.path.join(os.getcwd(), 'local-data')
4 | DATA_HOST = os.environ.get('DATA_HOST', None)
5 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools",
3 | "setuptools_scm",
4 | "wheel"]
5 | build-backend = "setuptools.build_meta"
6 |
--------------------------------------------------------------------------------
/corazon/__init__.py:
--------------------------------------------------------------------------------
1 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 |
3 | try:
4 | from .version import version as __version__
5 | except ImportError:
6 | __version__ = ''
7 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.rst
2 | include LICENSE.rst
3 |
4 | include setup.cfg
5 | include pyproject.toml
6 |
7 | recursive-include docs *
8 |
9 | prune build
10 | prune docs/_build
11 | prune docs/api
12 |
13 | global-exclude *.pyc *.o
14 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | *******
2 | corazon
3 | *******
4 |
5 | ``corazon`` is a Python package that runs
6 | `exovetter `_ on
7 | TESS lightcurves.
8 |
9 | Reference/API
10 | =============
11 |
12 | .. automodapi:: corazon.pipeline_search_vet
13 |
--------------------------------------------------------------------------------
/corazon_runner/datatypes.py:
--------------------------------------------------------------------------------
1 | import enum
2 | import typing
3 |
4 |
5 | class Mode(enum.Enum):
6 | SyncData = 'sync-data'
7 | RunCalculation = 'run-calc'
8 | RunConcurrently = 'run-multi-calc'
9 |
10 |
11 | class TESSLightCurveFile(typing.NamedTuple):
12 | sector: str
13 | tic: int
14 | rel_filename: str
15 | rel_path: str
16 | local_dir: str
17 | option: str
18 | output_dir: str
19 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # Read the Docs configuration file
2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
3 | version: 2
4 |
5 | sphinx:
6 | builder: html
7 | configuration: docs/conf.py
8 | fail_on_warning: true
9 |
10 | # Set the version of Python and requirements required to build your docs
11 | python:
12 | version: 3.8
13 | system_packages: true
14 | install:
15 | - method: pip
16 | path: .
17 | extra_requirements:
18 | - docs
19 |
20 | # Don't build any extra formats
21 | formats: []
22 |
--------------------------------------------------------------------------------
/corazon_runner/README.md:
--------------------------------------------------------------------------------
1 | # Running Corazon Runner
2 |
3 | This runner application encapsulates corazon with a commandline interface. The interface boils down manual steps into
4 | easily automatable procedures which can be initialized by bash scripts, or other automations.
5 |
6 | ### How to setup and run the first set of calculations
7 |
8 | ```
9 | PYTHONPATH='.' python corazon_runner/factory.py -i input-files/first-dataset -o output-files/first-dataset -m sync-data
10 | PYTHONPATH='.' python corazon_runner/factory.py -i input-files/first-dataset -o output-files/first-dataset -m run-calc
11 | ```
12 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Please open a new issue or new pull request for bugs, feedback, or new features you would like to see. If there is an issue you would like to work on, please leave a comment and we will be happy to assist. New contributions and contributors are very welcome!
2 |
3 | New to github or open source projects? If you are unsure about where to start or haven't used github before, please feel free to contact the package maintainers.
4 |
5 | Feedback and feature requests? Is there something missing you would like to see? Please open an issue or send an email to the maintainers. This package follows the Spacetelescope [Code of Conduct](CODE_OF_CONDUCT.md) strives to provide a welcoming community to all of our users and contributors.
6 |
--------------------------------------------------------------------------------
/conftest.py:
--------------------------------------------------------------------------------
1 | try:
2 | from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
3 | TESTED_VERSIONS)
4 | except ImportError:
5 | PYTEST_HEADER_MODULES = {}
6 | TESTED_VERSIONS = {}
7 |
8 | try:
9 | from corazon import __version__ as version
10 | except ImportError:
11 | version = 'unknown'
12 |
13 | # The following line treats all DeprecationWarnings as exceptions.
14 | from astropy.tests.helper import enable_deprecations_as_exceptions
15 | enable_deprecations_as_exceptions()
16 |
17 | # Uncomment and customize the following lines to add/remove entries
18 | # from the list of packages for which version numbers are displayed
19 | # when running the tests.
20 | PYTEST_HEADER_MODULES['astropy'] = 'astropy'
21 | PYTEST_HEADER_MODULES.pop('Pandas', None)
22 | PYTEST_HEADER_MODULES.pop('h5py', None)
23 |
24 | TESTED_VERSIONS['corazon'] = version
25 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | release:
5 | types: [released]
6 |
7 | jobs:
8 | build-n-publish:
9 | name: Build and publish Python 🐍 distributions 📦 to PyPI
10 | runs-on: ubuntu-latest
11 | if: github.repository == 'spacetelescope/corazon'
12 |
13 | steps:
14 | - uses: actions/checkout@v2
15 | with:
16 | fetch-depth: 0
17 |
18 | - uses: actions/setup-python@v2
19 | with:
20 | python-version: 3.8
21 |
22 | - name: Install python-build and twine
23 | run: python -m pip install build "twine>=3.3"
24 |
25 | - name: Build package
26 | run: python -m build --sdist --wheel .
27 |
28 | - name: List result
29 | run: ls -l dist
30 |
31 | - name: Check dist
32 | run: python -m twine check --strict dist/*
33 |
34 | - name: Publish distribution 📦 to PyPI
35 | uses: pypa/gh-action-pypi-publish@master
36 | with:
37 | user: __token__
38 | password: ${{ secrets.PYPI_TOKEN }}
39 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
3 |
4 | import sys
5 | from setuptools import setup
6 |
7 | TEST_HELP = """
8 | Note: running tests is no longer done using 'python setup.py test'. Instead
9 | you will need to run:
10 |
11 | pip install -e .
12 | pytest
13 |
14 | """
15 |
16 | if 'test' in sys.argv:
17 | print(TEST_HELP)
18 | sys.exit(1)
19 |
20 | DOCS_HELP = """
21 | Note: building the documentation is no longer done using
22 | 'python setup.py build_docs'. Instead you will need to run:
23 |
24 | cd docs
25 | make html
26 |
27 | """
28 |
29 | if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv:
30 | print(DOCS_HELP)
31 | sys.exit(1)
32 |
33 | # Note that requires and provides should not be included in the call to
34 | # ``setup``, since these are now deprecated. See this link for more details:
35 | # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM
36 |
37 | setup(use_scm_version={'write_to': 'corazon/version.py'})
38 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Compiled files
2 | *.py[cod]
3 | *.a
4 | *.o
5 | *.so
6 | __pycache__
7 |
8 | # Ignore .c files by default to avoid including generated code. If you want to
9 | # add a non-generated .c extension, use `git add -f filename.c`.
10 | *.c
11 |
12 | # Other generated files
13 | */version.py
14 | */cython_version.py
15 | htmlcov
16 | .coverage
17 | MANIFEST
18 | .ipynb_checkpoints
19 |
20 | # Sphinx
21 | docs/api
22 | docs/_build
23 |
24 | # Eclipse editor project files
25 | .project
26 | .pydevproject
27 | .settings
28 |
29 | # Pycharm editor project files
30 | .idea
31 |
32 | # Floobits project files
33 | .floo
34 | .flooignore
35 |
36 | # Packages/installer info
37 | *.egg
38 | *.egg-info
39 | dist
40 | build
41 | eggs
42 | parts
43 | bin
44 | var
45 | sdist
46 | develop-eggs
47 | .installed.cfg
48 | distribute-*.tar.gz
49 | pip-wheel-metadata/
50 |
51 | # Other
52 | .cache
53 | .tox
54 | .*.sw[op]
55 | *~
56 | .project
57 | .pydevproject
58 | .settings
59 |
60 | # Mac OSX
61 | .DS_Store
62 |
63 | local-files
64 | input-files
65 | output-files
66 | 38venv
67 |
--------------------------------------------------------------------------------
/corazon/run_list.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Created on Fri Mar 5 16:47:59 2021
5 |
6 | @author: smullally
7 | """
8 | from corazon import run_pipeline
9 | import numpy as np
10 |
11 | filename = "/Users/smullally/Science/tess_false_alarms/keplerTargets/target_selection/rsync_target_lists/qlpFilenames_noebplanets_mag13.txt"
12 | filelist = list(np.loadtxt(filename, dtype=str))
13 |
14 | num = 20
15 |
16 | outdir = "/Users/smullally/Science/tess_false_alarms/vet_results/March122021/"
17 | run_tag = "2021Mar12qlp"
18 |
19 | for file in filelist[0:num]:
20 |
21 | sp = file.split("/")[-1].split("_")
22 | lc_author = sp[1]
23 | if lc_author == "tess-spoc":
24 | sector = int(sp[4].split("-")[1][-3:])
25 | ticid = int(sp[4].split("-")[0])
26 | if lc_author == "qlp":
27 | sector = int(sp[4].split("-")[0][-3:])
28 | ticid = int(sp[4].split("-")[1])
29 |
30 | run_pipeline.run_write_one(ticid, sector, outdir, lc_author=lc_author,
31 | plot=True, run_tag = run_tag)
32 |
33 |
34 | #%%
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/tests/test_pipeline.py:
--------------------------------------------------------------------------------
1 | import corazon.pipeline as pipe
2 | import lightkurve as lk
3 |
4 |
5 | def test_pipeline_1():
6 | # TODO: Implement me
7 | ticid = 377780790
8 | sector = 14
9 |
10 | lcdata =lk.search_lightcurve("Kepler-10", mission='TESS',sector=14)[0].download()
11 |
12 | config = pipe.load_def_config()
13 | vetter_list = pipe.load_def_vetter()
14 | thresholds = {'snr' : 1,
15 | 'norm_lpp' : 2.0,
16 | 'tp_cover' : 0.6,
17 | 'oe_sigma' : 3,
18 | 'sweet' : 3}
19 |
20 | tce_tces, result_strings, metrics_list = pipe.search_and_vet_one(ticid,
21 | sector, lcdata, config, vetter_list,
22 | thresholds, plot=False)
23 |
24 | assert lk.__version__ =='2.0b5'
25 | assert tce_tces[0]['snr']< 1
26 |
27 |
28 |
29 | import corazon.simulate as sim
30 |
31 | def test_simulate_1():
32 | numberlc = 5
33 | noise_range = [100, 200]
34 | outputfilename = None
35 | results, tces = sim.simulate_gaussian_tces(numberlc, noise_range, outputfilename)
36 |
37 | assert len(results) == numberlc
38 | assert len(tces) == numberlc
--------------------------------------------------------------------------------
/corazon_runner/factory.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import argparse
4 |
5 | from corazon_runner.constants import DATA_HOST
6 | from corazon_runner.datatypes import Mode
7 | from corazon_runner.utils import test_against_tess_data, sync_data, test_against_tess_data_multi
8 |
9 |
10 | def capture_options() -> argparse.Namespace:
11 | parser = argparse.ArgumentParser()
12 | parser.add_argument('-o', '--data-output', required=True,
13 | help="Where to store the output data")
14 | parser.add_argument('-i', '--data-input', required=True,
15 | help="Where to find input data")
16 | parser.add_argument('-m', '--mode', type=Mode, required=True)
17 |
18 | return parser.parse_args()
19 |
20 |
21 | def main() -> None:
22 | options = capture_options()
23 | if options.mode is Mode.RunCalculation:
24 | test_against_tess_data(options.data_input, options.data_output)
25 |
26 | elif options.mode is Mode.SyncData:
27 | sync_data(DATA_HOST, options.data_input)
28 |
29 | elif options.mode is Mode.RunConcurrently:
30 | test_against_tess_data_multi(options.data_input, options.data_output)
31 |
32 | else:
33 | raise NotImplementedError(options.mode)
34 |
35 |
36 | if __name__ == '__main__':
37 | main()
38 |
--------------------------------------------------------------------------------
/LICENSE.rst:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2021, Association of Universities for Research in Astronomy.
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | corazon
2 | =======
3 | ***a simple BLS exoplanet search pipeline for TESS data***
4 |
5 | .. image:: https://readthedocs.org/projects/corazon/badge/?version=latest
6 | :target: https://corazon.readthedocs.io/en/latest/?badge=latest
7 | :alt: Documentation Status
8 |
9 | .. image:: https://github.com/spacetelescope/corazon/workflows/CI/badge.svg
10 | :target: https://github.com/spacetelescope/corazon/actions
11 | :alt: GitHub Actions CI Status
12 |
13 | .. image:: http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat
14 | :target: http://www.astropy.org
15 | :alt: Powered by Astropy Badge
16 |
17 | `corazon` is a Python package to run a simple BLS exoplanet search on TESS FFIs.
18 | It retrievs TESS light curves, detrends a light curve,
19 | runs a box-least-squares fit search algorithm and vets the signal using
20 | `exovetter` (https://github.com/spacetelescope/exovetter/).
21 |
22 | This package depends on the next yet released (as of Jan 22, 2021) Lightkurve v2.0 (https://github.com/KeplerGO/lightkurve)to allow it to retrieve high-level-science
23 | products from the MAST. It must be locally installed from git to run corazon.
24 |
25 | To run corazon on one target do the following:
26 |
27 | from corazon import run_pipeline
28 |
29 | ticid = 383724012
30 |
31 | sector = 14
32 |
33 | outdir = "/Users/username/local/directory/to/store/output/"
34 |
35 | run_pipeline.run_write_one(ticid, sector, outdir)
36 |
37 |
38 | If you are getting the data from a local directory, do the followingg
39 | local_dir - "/Users/username/local/directory/of/data/files"
40 |
41 | run_pipeline.run_write_one(ticid, sector, outdir, local_dir)
--------------------------------------------------------------------------------
/corazon/plateau.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 |
5 | __version__ = "$Id: plateau.py 18 2019-02-11 15:59:10Z fergalm $"
6 | __URL__ = "$URL: https://svn.code.sf.net/p/greataunttess/code/trunk/plateau.py $"
7 |
8 | """
9 | Find continuous regions in an array above some value
10 |
11 | This code gets called in lots of places, but doesn't really belong in
12 | any of my other modules
13 | """
14 |
15 | def plateau(array, threshold):
16 | """Find plateaus in an array, i.e continuous regions that exceed threshold
17 |
18 | Given an array of numbers, return a 2d array such that
19 | out[:,0] marks the indices where the array crosses threshold from
20 | below, and out[:,1] marks the next time the array crosses that
21 | same threshold from below.
22 |
23 | Inputs:
24 | array (1d numpy array)
25 | threshold (float or array) If threshold is a single number, any point
26 | above that value is above threshold. If it's an array,
27 | it must have the same length as the first argument, and
28 | an array[i] > threshold[i] to be included as a plateau
29 |
30 | Returns:
31 | Numpy 2d array with 2 columns.
32 |
33 |
34 | Notes:
35 | To find the length of the plateaus, use
36 | out[:,1] - out[:,0]
37 |
38 | To find the length of the largest plateau, use
39 | np.max(out[:,1] - out[:,0])
40 |
41 | The algorithm fails if a value is exactly equal to the threshold.
42 | To guard against this, we add a very small amount to threshold
43 | to ensure floating point arithmetic prevents two numbers being
44 | exactly equal.
45 | """
46 |
47 |
48 | arr = array.astype(np.float32)
49 | arr = arr - threshold + 1e-12
50 | arrPlus = np.roll(arr, 1)
51 |
52 | #Location of changes from -ve to +ve (or vice versa)
53 | #Last point is bogus , so we calcualte it by hand
54 | sgnChange = arr*arrPlus
55 |
56 | #Roll around can't compute sign change for zeroth elt.
57 | sgnChange[0] = +1
58 | if arr[0] > 0:
59 | sgnChange[0] = -1
60 |
61 | loc = np.where(sgnChange < 0)[0]
62 | loc = loc.copy()
63 |
64 | if np.fmod( len(loc), 2) != 0:
65 | loc.resize( (len(loc)+1))
66 | loc[-1] = len(arr)
67 |
68 | if len(loc) == 0:
69 | return []
70 | return loc.reshape( (-1,2))
71 |
--------------------------------------------------------------------------------
/corazon/gen_lightcurve.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Created on Fri May 29 09:18:01 2020
5 |
6 | @author: smullally
7 |
8 | Generate light curves. Each function is a different way to generate a light curve.
9 |
10 | """
11 |
12 |
13 |
14 | import lightkurve as lk
15 |
16 |
17 | def eleanor_pca(ticid, sector, pxsize = 19):
18 | import eleanor
19 | star = eleanor.Source(tic = ticid, sector = sector)
20 |
21 | data = eleanor.TargetData(star, height=pxsize, width=pxsize, bkg_size=31,
22 | do_psf=False, do_pca=True)
23 |
24 | return data.time, data.pca_flux, data.quality
25 |
26 | #Could do a single sector FFI light curve wiih lightkurve
27 |
28 | def eleanor_corr(ticid, sector, pxsize = 19):
29 | import eleanor
30 | star = eleanor.Source(tic = ticid, sector = sector)
31 |
32 | data = eleanor.TargetData(star, height=pxsize, width=pxsize, bkg_size=31,
33 | do_psf=False, do_pca=True)
34 |
35 | return data.time, data.corr_flux, data.quality
36 |
37 |
38 | def hlsp(ticid, sector, author="tess-spoc", local_dir = None):
39 | """
40 |
41 |
42 | Parameters
43 | ----------
44 | ticid : int
45 | DESCRIPTION.
46 | sector : int
47 | Sector of observations to vet
48 | author : string, OPTIONAL
49 | options include tess-spoc and tess-qlp.
50 | The default is "tess-spoc".
51 | loocaldir : string
52 | local directory to read from None: Default
53 |
54 | Returns
55 | -------
56 | lc : lightkurve object
57 | lightkurve object of the data requested.
58 |
59 | """
60 |
61 | #print(f'TIC {ticid}')
62 |
63 | if local_dir is None:
64 |
65 | lc = lk.search_lightcurve(f"TIC {ticid}", sector=sector,
66 | cadence="ffi",author=author).download()
67 | else:
68 |
69 | filename = get_hlsp_filename(ticid, sector, author)
70 |
71 | lc = lk.io.read(local_dir + "/" + filename)
72 |
73 |
74 | return lc
75 |
76 |
77 | def get_hlsp_filename(ticid, sector, author):
78 |
79 | if author == "tess-spoc":
80 | filename = "hlsp_tess-spoc_tess_phot_%016u-s%04u_tess_v1_lc.fits" % (ticid, sector)
81 |
82 | if author == "qlp":
83 | filename = "hlsp_qlp_tess_ffi_s%04u-%016u_tess_v01_llc.fits" % (sector,ticid)
84 |
85 | return filename
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = corazon
3 | description = Pipeline to run exovetter
4 | long_description = file: README.rst
5 | long_description_content_type = text/x-rst
6 | keywords = astronomy, astrophysics
7 | author = Susan Mullally et al.
8 | author_email = smullally@stsci.edu
9 | license = BSD
10 | license_file = LICENSE.rst
11 | url = https://github.com/spacetelescope/corazon
12 | edit_on_github = False
13 | github_project = spacetelescope/corazon
14 | classifiers =
15 | Intended Audience :: Science/Research
16 | License :: OSI Approved :: BSD License
17 | Operating System :: OS Independent
18 | Programming Language :: Python :: 3
19 | Programming Language :: Python :: Implementation :: CPython
20 | Topic :: Scientific/Engineering :: Astronomy
21 | Topic :: Scientific/Engineering :: Physics
22 |
23 | [options]
24 | packages = find:
25 | zip_safe = False
26 | setup_requires =
27 | setuptools_scm
28 | install_requires =
29 | numpy>=1.17
30 | astropy>=4
31 | python_requires = >=3.7
32 |
33 | [options.extras_require]
34 | all =
35 | matplotlib
36 | lightkurve
37 | test =
38 | pytest-astropy-header
39 | pytest-doctestplus
40 | docs =
41 | sphinx-automodapi
42 | sphinx_rtd_theme
43 |
44 | [tool:pytest]
45 | minversion = 5.0
46 | testpaths = "tests" "docs" "corazon"
47 | norecursedirs = build docs/_build
48 | astropy_header = True
49 | doctest_plus = enabled
50 | filterwarnings =
51 | error
52 | ignore:numpy.ufunc size changed:RuntimeWarning
53 | ignore:Using or importing the ABCs:DeprecationWarning
54 | ignore:the imp module is deprecated:DeprecationWarning
55 | ignore:`LightCurveFile.header` is deprecated
56 | ignore:unclosed file:ResourceWarning
57 |
58 | [flake8]
59 | exclude = .git,__pycache__,build,dist
60 | max-line-length = 125
61 |
62 | [coverage:run]
63 | source = corazon
64 | omit =
65 | corazon/version*
66 | */corazon/version*
67 |
68 | [coverage:report]
69 | exclude_lines =
70 | # Have to re-enable the standard pragma
71 | pragma: no cover
72 | # Don't complain about packages we have installed
73 | except ImportError
74 | # Don't complain if tests don't hit assertions
75 | raise AssertionError
76 | raise NotImplementedError
77 | # Don't complain about script hooks
78 | def main\(.*\):
79 | # Ignore branches that don't pertain to this version of Python
80 | pragma: py{ignore_python_version}
81 | # Don't complain about IPython completion helper
82 | def _ipython_key_completions_
83 |
--------------------------------------------------------------------------------
/.github/workflows/ci_workflows.yml:
--------------------------------------------------------------------------------
1 | # GitHub Actions workflow for testing and continuous integration.
2 | #
3 | # This file performs testing using tox and tox.ini to define and configure the test environments.
4 |
5 | name: CI
6 |
7 | on:
8 | push:
9 | branches:
10 | - master
11 | pull_request:
12 | branches:
13 | - master
14 |
15 | jobs:
16 | # Github Actions supports ubuntu, windows, and macos virtual environments:
17 | # https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners
18 | ci_tests:
19 | name: ${{ matrix.name }}
20 | runs-on: ${{ matrix.os }}
21 | strategy:
22 | matrix:
23 | include:
24 | - name: Code style checks
25 | os: ubuntu-latest
26 | python: 3.x
27 | toxenv: codestyle
28 |
29 | - name: PEP 517
30 | os: ubuntu-latest
31 | python: 3.x
32 | toxenv: pep517
33 |
34 | - name: Security audit
35 | os: ubuntu-latest
36 | python: 3.x
37 | toxenv: securityaudit
38 |
39 | # UNCOMMENT TO ENABLE
40 | #- name: Python 3.7 with oldest supported dependencies
41 | # os: ubuntu-latest
42 | # python: 3.7
43 | # toxenv: py37-test-oldestdeps
44 |
45 | # APPEND -cov TO toxenv FOR COVERAGE
46 | - name: Linux - Python 3.9
47 | os: ubuntu-latest
48 | python: 3.9
49 | toxenv: py39-test
50 |
51 | - name: OS X - Python 3.7
52 | os: macos-latest
53 | python: 3.7
54 | toxenv: py37-test
55 |
56 | - name: Windows - Python 3.8
57 | os: windows-latest
58 | python: 3.8
59 | toxenv: py38-test
60 |
61 | - name: Python 3.9 with latest dev versions of key dependencies
62 | os: ubuntu-latest
63 | python: 3.9
64 | toxenv: py39-test-devdeps
65 |
66 | steps:
67 | - name: Checkout code
68 | uses: actions/checkout@v2
69 | with:
70 | fetch-depth: 0
71 | - name: Set up python ${{ matrix.python }} on ${{ matrix.os }}
72 | uses: actions/setup-python@v2
73 | with:
74 | python-version: ${{ matrix.python }}
75 | - name: Install base dependencies
76 | run: |
77 | python -m pip install --upgrade pip
78 | python -m pip install tox
79 | - name: Test with tox
80 | run: |
81 | tox -e ${{ matrix.toxenv }}
82 | # Activate your repo on codecov.io first.
83 | - name: Upload coverage to codecov
84 | if: "contains(matrix.toxenv, '-cov')"
85 | uses: codecov/codecov-action@v2
86 | with:
87 | file: ./coverage.xml
88 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | py{37,38,39}-test{,-oldestdeps,-devdeps}{,-cov}
4 | linkcheck
5 | codestyle
6 | securityaudit
7 | pep517
8 | requires =
9 | setuptools >= 30.3.0
10 | pip >= 19.3.1
11 | isolated_build = true
12 |
13 | [testenv]
14 | # Suppress display of matplotlib plots generated during docs build, if any
15 | setenv = MPLBACKEND=agg
16 |
17 | # Pass through the following environment variables which may be needed for the CI
18 | passenv = HOME WINDIR LC_ALL LC_CTYPE CC CI
19 |
20 | # Run the tests in a temporary directory to make sure that we don't import
21 | # this package from the source tree
22 | changedir = .tmp/{envname}
23 |
24 | # tox environments are constructed with so-called 'factors' (or terms)
25 | # separated by hyphens, e.g. test-devdeps-cov. Lines below starting with factor:
26 | # will only take effect if that factor is included in the environment name. To
27 | # see a list of example environments that can be run, along with a description,
28 | # run:
29 | #
30 | # tox -l -v
31 | #
32 | description =
33 | run tests
34 | oldestdeps: with the oldest supported version of key dependencies
35 | devdeps: with the latest developer version of key dependencies
36 | cov: and test coverage
37 |
38 | # The following provides some specific pinnings for key packages
39 | deps =
40 |
41 | cov: codecov
42 | cov: coverage
43 | cov: pytest-cov
44 |
45 | oldestdeps: numpy==1.17.*
46 | oldestdeps: astropy==4.0.*
47 |
48 | devdeps: git+https://github.com/astropy/astropy.git#egg=astropy
49 |
50 | # The following indicates which extras_require from setup.cfg will be installed
51 | extras =
52 | test
53 |
54 | commands =
55 | pip freeze
56 | !cov: pytest --pyargs {toxinidir}/tests {toxinidir}/docs {posargs}
57 | cov: pytest --pyargs {toxinidir}/tests {toxinidir}/docs --cov corazon --cov-config={toxinidir}/setup.cfg {posargs}
58 | cov: coverage xml -o {toxinidir}/coverage.xml
59 |
60 | [testenv:linkcheck]
61 | changedir = docs
62 | description = check the links in the HTML docs
63 | extras = docs
64 | commands =
65 | pip freeze
66 | sphinx-build -W -b linkcheck . _build/html
67 |
68 | [testenv:codestyle]
69 | skip_install = true
70 | changedir = .
71 | description = check code style with flake8
72 | deps = flake8
73 | commands = flake8 corazon --count
74 |
75 | [testenv:securityaudit]
76 | skip_install = true
77 | changedir = .
78 | description = security audit with bandit
79 | deps = bandit
80 | commands = bandit -r corazon -c .bandit.yaml
81 |
82 | [testenv:pep517]
83 | skip_install = true
84 | changedir = .
85 | description = PEP 517
86 | deps =
87 | build
88 | twine >= 3.3
89 | commands =
90 | python -m build --sdist .
91 | twine check --strict dist/*
92 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Spacetelescope Open Source Code of Conduct
2 |
3 | We expect all "spacetelescope" organization projects to adopt a code of conduct
4 | that ensures a productive, respectful environment for all open source
5 | contributors and participants. We are committed to providing a strong and
6 | enforced code of conduct and expect everyone in our community to follow these
7 | guidelines when interacting with others in all forums. Our goal is to keep ours
8 | a positive, inclusive, successful, and growing community. The community of
9 | participants in open source Astronomy projects is made up of members from
10 | around the globe with a diverse set of skills, personalities, and experiences.
11 | It is through these differences that our community experiences success and
12 | continued growth.
13 |
14 | As members of the community:
15 |
16 | - We pledge to treat all people with respect and provide a harassment-
17 | and bullying-free environment, regardless of sex, sexual orientation and/or
18 | gender identity, disability, physical appearance, body size, race,
19 | nationality, ethnicity, and religion. In particular, sexual language and
20 | imagery, sexist, racist, or otherwise exclusionary jokes are not appropriate.
21 | - We pledge to respect the work of others by recognizing acknowledgment/citation
22 | requests of original authors. As authors, we pledge to be explicit about how
23 | we want our own work to be cited or acknowledged.
24 | - We pledge to welcome those interested in joining the community, and realize
25 | that including people with a variety of opinions and backgrounds will only
26 | serve to enrich our community. In particular, discussions relating to
27 | pros/cons of various technologies, programming languages, and so on are
28 | welcome, but these should be done with respect, taking proactive measure to
29 | ensure that all participants are heard and feel confident that they can
30 | freely express their opinions.
31 | - We pledge to welcome questions and answer them respectfully, paying
32 | particular attention to those new to the community. We pledge to provide
33 | respectful criticisms and feedback in forums, especially in discussion
34 | threads resulting from code contributions.
35 | - We pledge to be conscientious of the perceptions of the wider community and
36 | to respond to criticism respectfully. We will strive to model behaviors that
37 | encourage productive debate and disagreement, both within our community and
38 | where we are criticized. We will treat those outside our community with the
39 | same respect as people within our community.
40 | - We pledge to help the entire community follow the code of conduct, and to not
41 | remain silent when we see violations of the code of conduct. We will take
42 | action when members of our community violate this code such as such as
43 | contacting conduct@stsci.edu (all emails sent to this address will be
44 | treated with the strictest confidence) or talking privately with the person.
45 |
46 | This code of conduct applies to all community situations online and offline,
47 | including mailing lists, forums, social media, conferences, meetings,
48 | associated social events, and one-to-one interactions.
49 |
50 | Parts of this code of conduct have been adapted from the Astropy and NumFOCUS
51 | codes of conduct:
52 |
53 | - https://www.astropy.org/code_of_conduct.html
54 | - https://www.numfocus.org/about/code-of-conduct/
55 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | if NOT "%PAPER%" == "" (
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
12 | )
13 |
14 |
15 | if "%1" == "" goto help
16 |
17 | if "%1" == "help" (
18 | :help
19 | echo.Please use `make ^` where ^ is one of
20 | echo. html to make standalone HTML files
21 | echo. dirhtml to make HTML files named index.html in directories
22 | echo. pickle to make pickle files
23 | echo. json to make JSON files
24 | echo. htmlhelp to make HTML files and a HTML help project
25 | echo. qthelp to make HTML files and a qthelp project
26 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
27 | echo. changes to make an overview over all changed/added/deprecated items
28 | echo. linkcheck to check all external links for integrity
29 | echo. doctest to run all doctests embedded in the documentation if enabled
30 | goto end
31 | )
32 |
33 | if "%1" == "clean" (
34 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
35 | del /q /s %BUILDDIR%\*
36 | goto end
37 | )
38 |
39 | if "%1" == "html" (
40 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
41 | if errorlevel 1 exit /b 1
42 | echo.
43 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
44 | goto end
45 | )
46 |
47 | if "%1" == "dirhtml" (
48 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
49 | echo.
50 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
51 | goto end
52 | )
53 |
54 | if "%1" == "pickle" (
55 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
56 | echo.
57 | echo.Build finished; now you can process the pickle files.
58 | goto end
59 | )
60 |
61 | if "%1" == "json" (
62 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
63 | echo.
64 | echo.Build finished; now you can process the JSON files.
65 | goto end
66 | )
67 |
68 | if "%1" == "htmlhelp" (
69 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
70 | echo.
71 | echo.Build finished; now you can run HTML Help Workshop with the ^
72 | .hhp project file in %BUILDDIR%/htmlhelp.
73 | goto end
74 | )
75 |
76 | if "%1" == "qthelp" (
77 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
78 | echo.
79 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
80 | .qhcp project file in %BUILDDIR%/qthelp, like this:
81 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\packagename.qhcp
82 | echo.To view the help file:
83 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\packagename.ghc
84 | goto end
85 | )
86 |
87 | if "%1" == "latex" (
88 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
89 | echo.
90 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
91 | goto end
92 | )
93 |
94 | if "%1" == "changes" (
95 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
96 | echo.
97 | echo.The overview file is in %BUILDDIR%/changes.
98 | goto end
99 | )
100 |
101 | if "%1" == "linkcheck" (
102 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
103 | echo.
104 | echo.Link check complete; look for any errors in the above output ^
105 | or in %BUILDDIR%/linkcheck/output.txt.
106 | goto end
107 | )
108 |
109 | if "%1" == "doctest" (
110 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
111 | echo.
112 | echo.Testing of doctests in the sources finished, look at the ^
113 | results in %BUILDDIR%/doctest/output.txt.
114 | goto end
115 | )
116 |
117 | :end
118 |
--------------------------------------------------------------------------------
/corazon_runner/utils.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import os
3 | import subprocess
4 | import time
5 | import types
6 |
7 | from corazon_runner.constants import LOCAL_DATA_PATH
8 | from corazon_runner.datatypes import TESSLightCurveFile
9 |
10 |
11 | def locate_and_resolve_tess_datums(input_dir: str) -> types.GeneratorType:
12 | for filepath in glob.glob(f'{input_dir}/*.txt'):
13 | filename = os.path.basename(filepath)
14 | print(f'Loading File: {filename}')
15 | with open(filepath, 'rb') as stream:
16 | lines = stream.read().decode('utf-8').split('\n')
17 |
18 | filepath_name = '-'.join(os.path.basename(filepath).rsplit('.', 1)[0].rsplit('_', 3)[1:])
19 | for line in lines:
20 | if line in ['']:
21 | continue
22 |
23 | rel_path = os.path.join(LOCAL_DATA_PATH, line.strip('/'))
24 | if not os.path.exists(rel_path):
25 | continue
26 |
27 | rel_filename = os.path.basename(line)
28 | if rel_filename.startswith('hlsp_tess-spoc'):
29 | hunk, sector = rel_filename.rsplit('-', 1)
30 | sector, hunk = sector.split('_', 1)
31 | sector = int(sector.strip('s'))
32 | tic, hunk = rel_filename.rsplit('-', 1)
33 | hunk, tic = tic.rsplit('_', 1)
34 | tic = int(tic)
35 | local_dir = os.path.dirname(rel_path)
36 | output_dir = os.path.join('spoc', filepath_name)
37 | yield TESSLightCurveFile(sector, tic, rel_filename, rel_path, local_dir, 'tess-spoc', output_dir)
38 |
39 | elif rel_filename.startswith('hlsp_qlp_tess'):
40 | sector, hunk = rel_filename.rsplit('-', 1)
41 | hunk, sector = sector.rsplit('_', 1)
42 | sector = int(sector.strip('s'))
43 | hunk, tic = rel_filename.rsplit('-', 1)
44 | tic, hunk = tic.split('_', 1)
45 | tic = int(tic)
46 | local_dir = os.path.dirname(rel_path)
47 | output_dir = os.path.join('qlp', filepath_name)
48 | yield TESSLightCurveFile(sector, tic, rel_filename, rel_path, local_dir, 'qlp', output_dir)
49 |
50 | else:
51 | raise NotImplementedError(line)
52 |
53 | def test_against_tess_data(input_dir: str, output_dir: str) -> None:
54 | import os
55 | import shutil
56 | import tempfile
57 |
58 | from corazon import run_pipeline
59 |
60 | from tess_stars2px import tess_stars2px_function_entry
61 |
62 | entries = []
63 | for entry in locate_and_resolve_tess_datums(input_dir):
64 | entries.append(entry)
65 |
66 | if os.path.exists(output_dir):
67 | raise IOError(f'Output DIR Exists: {output_dir}')
68 |
69 | os.makedirs(output_dir)
70 |
71 | # import pdb; pdb.set_trace()
72 | print(f'Light curves found: {len(entries)}')
73 | print(f'Output Directory: {output_dir}')
74 | for idx, entry in enumerate(entries):
75 | if idx % 100 == 0:
76 | print(f'Chunk: {idx}')
77 |
78 | tic_folder = os.path.join(output_dir, entry.output_dir, str(entry.tic))
79 | if not os.path.exists(tic_folder):
80 | os.makedirs(tic_folder)
81 |
82 | # tic_folder = os.path.join(output_dir, str(entry.tic))
83 | run_pipeline.run_write_one(entry.tic, entry.sector, tic_folder, entry.option, entry.local_dir)
84 |
85 |
86 | def run_command(cmd: str) -> None:
87 | proc = subprocess.Popen(cmd, shell=True)
88 | while proc.poll() is None:
89 | time.sleep(.1)
90 | continue
91 |
92 | if proc.poll() in [23]:
93 | pass
94 |
95 | elif proc.poll() > 0:
96 | raise NotImplementedError(f'Program Error: {proc.poll()}')
97 |
98 |
99 | def sync_data(host: str, input_dir: str) -> None:
100 | for filepath in glob.glob(f'{input_dir}/*.txt'):
101 | cmd = [f'rsync -avp --files-from {filepath} {host}: {LOCAL_DATA_PATH}']
102 | import pdb; pdb.set_trace()
103 | run_command(cmd)
104 |
105 | def test_against_tess_data_multi(input_dir: str, output_dir: str) -> None:
106 | import os
107 | import shutil
108 | import tempfile
109 |
110 | from corazon import run_pipeline
111 |
112 | from tess_stars2px import tess_stars2px_function_entry
113 |
114 | entries = []
115 | for entry in locate_and_resolve_tess_datums(input_dir):
116 | entries.append(entry)
117 |
118 | if os.path.exists(output_dir):
119 | raise IOError('Output DIR Exists: {output_dir}')
120 |
121 | os.makedirs(output_dir)
122 |
123 | print(f'Light curves found: {len(entries)}')
124 | print(f'Output Directory: {output_dir}')
125 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 |
15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
16 |
17 | #This is needed with git because git doesn't create a dir if it's empty
18 | $(shell [ -d "_static" ] || mkdir -p _static)
19 |
20 | help:
21 | @echo "Please use \`make ' where is one of"
22 | @echo " html to make standalone HTML files"
23 | @echo " dirhtml to make HTML files named index.html in directories"
24 | @echo " singlehtml to make a single large HTML file"
25 | @echo " pickle to make pickle files"
26 | @echo " json to make JSON files"
27 | @echo " htmlhelp to make HTML files and a HTML help project"
28 | @echo " qthelp to make HTML files and a qthelp project"
29 | @echo " devhelp to make HTML files and a Devhelp project"
30 | @echo " epub to make an epub"
31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
32 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
33 | @echo " text to make text files"
34 | @echo " man to make manual pages"
35 | @echo " changes to make an overview of all changed/added/deprecated items"
36 | @echo " linkcheck to check all external links for integrity"
37 |
38 | clean:
39 | -rm -rf $(BUILDDIR)
40 | -rm -rf api
41 | -rm -rf generated
42 |
43 | html:
44 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
45 | @echo
46 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
47 |
48 | dirhtml:
49 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
50 | @echo
51 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
52 |
53 | singlehtml:
54 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
55 | @echo
56 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
57 |
58 | pickle:
59 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
60 | @echo
61 | @echo "Build finished; now you can process the pickle files."
62 |
63 | json:
64 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
65 | @echo
66 | @echo "Build finished; now you can process the JSON files."
67 |
68 | htmlhelp:
69 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
70 | @echo
71 | @echo "Build finished; now you can run HTML Help Workshop with the" \
72 | ".hhp project file in $(BUILDDIR)/htmlhelp."
73 |
74 | qthelp:
75 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
76 | @echo
77 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
78 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
79 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Astropy.qhcp"
80 | @echo "To view the help file:"
81 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Astropy.qhc"
82 |
83 | devhelp:
84 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
85 | @echo
86 | @echo "Build finished."
87 | @echo "To view the help file:"
88 | @echo "# mkdir -p $$HOME/.local/share/devhelp/Astropy"
89 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Astropy"
90 | @echo "# devhelp"
91 |
92 | epub:
93 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
94 | @echo
95 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
96 |
97 | latex:
98 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
99 | @echo
100 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
101 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
102 | "(use \`make latexpdf' here to do that automatically)."
103 |
104 | latexpdf:
105 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
106 | @echo "Running LaTeX files through pdflatex..."
107 | make -C $(BUILDDIR)/latex all-pdf
108 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
109 |
110 | text:
111 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
112 | @echo
113 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
114 |
115 | man:
116 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
117 | @echo
118 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
119 |
120 | changes:
121 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
122 | @echo
123 | @echo "The overview file is in $(BUILDDIR)/changes."
124 |
125 | linkcheck:
126 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
127 | @echo
128 | @echo "Link check complete; look for any errors in the above output " \
129 | "or in $(BUILDDIR)/linkcheck/output.txt."
130 |
131 | doctest:
132 | @echo "Run 'python setup.py test' in the root directory to run doctests " \
133 | @echo "in the documentation."
134 |
--------------------------------------------------------------------------------
/corazon/run_pipeline.py:
--------------------------------------------------------------------------------
1 | import corazon.pipeline as pipeline
2 | from datetime import datetime
3 | import os
4 | from exovetter import vetters
5 | import matplotlib.pyplot as plt
6 | import corazon.gen_lightcurve as genlc
7 | #sys.path[2] = '/Users/smullally/Python_Code/lightkurve/lightkurve'
8 |
9 |
10 | def run_write_one(ticid, sector, out_dir, lc_author = 'qlp',local_dir = None,
11 | run_tag = None, config_file = None, plot=False):
12 | """
13 | Run the full bls search on a list of ticids stored in a file.
14 |
15 | Parameters
16 | ----------
17 | ticid : int
18 | tess input catalog number
19 | sector : int
20 | tess sector to search
21 | out_dir : string
22 | directory to store all the results. One dir per ticid will be created.
23 | lc_author : string
24 | 'qlp' or 'tess-spoc'
25 | local_dir : string
26 | defaul is None and then pulls data from MAST API. Otherwise contains
27 | directory name for the location of the data files.
28 | run_tag : string, optional
29 | directory name and string to attach to output file names.
30 |
31 | Returns
32 | -------
33 | None.
34 |
35 | """
36 |
37 | if run_tag is None:
38 | now = datetime.now()
39 | run_tag = now.strftime("crz%m%d%Y") + "_"+lc_author
40 |
41 | if config_file is None:
42 | config = load_def_config()
43 | else:
44 | print("Not implememted read in config file")
45 | #config = pipeline.load_config_file()
46 |
47 | vetter_list = load_def_vetter()
48 | thresholds = load_def_thresholds()
49 |
50 |
51 | target_dir = "/tic%09is%02i/" % (int(ticid), sector)
52 | log_name = out_dir + target_dir + "tic%09i-%s.log" % (ticid, run_tag)
53 | output_file = out_dir + target_dir + "tic%09i-%s-tcesum.csv" % (ticid, run_tag)
54 |
55 | if not os.path.exists(out_dir):
56 | os.mkdir(out_dir)
57 |
58 | try:
59 | os.mkdir(out_dir+target_dir)
60 | except FileExistsError:
61 | pass
62 | except PermissionError as e:
63 | log_obj = open(log_name,'w+')
64 | log_obj.write("Permission Error on Target Directory ")
65 | log_obj.write(e)
66 | log_obj.close()
67 |
68 | try:
69 |
70 | lcdata = genlc.hlsp(ticid, sector, author=lc_author,local_dir = local_dir)
71 | if lc_author == 'qlp':
72 | lcdata['quality'] = lcdata['quality'].value & 2237
73 |
74 | tce_list, result_strings, metrics_list = pipeline.search_and_vet_one(ticid,
75 | sector, lcdata, config,
76 | vetter_list, thresholds, plot=plot)
77 |
78 | if plot:
79 | plotfilename = "tic%09i-%s-plot.png" % (ticid,
80 | run_tag)
81 | plt.savefig(out_dir + target_dir + plotfilename, bbox_inches='tight')
82 | plt.close()
83 |
84 | output_obj = open(output_file, 'w')
85 | for r in result_strings:
86 | output_obj.write(r)
87 |
88 | output_obj.close()
89 |
90 | #Write TCEs
91 | for tce in tce_list:
92 | tcefilename = "tic%09i-%02i-%s.json" % (ticid,
93 | int(tce['event']),
94 | run_tag)
95 |
96 | full_filename = out_dir + target_dir + tcefilename
97 | tce['lc_author'] = lc_author
98 | tce.to_json(full_filename)
99 |
100 | #Write metrics
101 | #print(metrics_list)
102 | #for i, metric in enumerate(metrics_list):
103 | # metricfilename = "tic%09i-%02i-%s-vetting.json" % (ticid,
104 | # i+1, run_tag)
105 | # full_filename = out_dir + target_dir + metricfilename
106 | # thejson = json.dumps(metric)
107 | # mobj = open(full_filename,'w+')
108 | # mobj.write(thejson)
109 | # mobj.close()
110 |
111 |
112 | log_obj = open(log_name, 'w+')
113 | log_obj.write("Success.")
114 | log_obj.close()
115 |
116 | except Exception as e:
117 | log_obj = open(log_name,'w+')
118 | log_obj.write("Failed to create TCEs for TIC %i for Sector %i \n" % (ticid, sector))
119 | log_obj.write(str(e))
120 | log_obj.close()
121 |
122 | def load_def_config():
123 | """
124 | Get the default configuration dictionary.
125 | Returns
126 | -------
127 | config : dict
128 | dictionary of default values that are required to run corazon pipeline
129 |
130 | """
131 |
132 | config = dict()
133 |
134 | config = {
135 | "det_window" : 95, #window used for detrending
136 | "noise_window" : 19, #window used for running outlier rejection
137 | "n_sigma" : 4.5, #noise/outlier reject sigma
138 | "max_period_days" : 11,
139 | "min_period_days" : 0.8,
140 | "bls_durs_hrs" : [1,2,4,8,12,14],
141 | "minSnr" : [1],
142 | "maxTces" : 20,
143 | "fracRemain" : 0.7
144 | }
145 |
146 | return config
147 |
148 | def load_def_vetter():
149 | """
150 | Load default vetter list of vetters to run.
151 | """
152 |
153 | vetter_list = [vetters.Lpp(),
154 | vetters.OddEven(),
155 | vetters.TransitPhaseCoverage(),
156 | vetters.Sweet()]
157 |
158 | return vetter_list
159 |
160 | def load_def_thresholds():
161 | """
162 | Load a dictionary of the default threshold values for the vetters.
163 |
164 | Returns
165 | -------
166 | thresholds : dict
167 |
168 | """
169 | thresholds = {'snr' : 1,
170 | 'norm_lpp' : 2.0,
171 | 'tp_cover' : 0.6,
172 | 'oe_sigma' : 3,
173 | 'sweet' : 3}
174 |
175 | return thresholds
--------------------------------------------------------------------------------
/corazon/pipeline.py:
--------------------------------------------------------------------------------
1 | __all__ = ['search_and_vet_one', 'vet_tce','vet_all_tces','get_disposition',
2 | 'load_def_config','load_def_vetter']
3 |
4 | import corazon.planetSearch as ps
5 | import corazon.gen_lightcurve as genlc
6 | import matplotlib.pyplot as plt
7 | import exovetter.tce as TCE
8 | import astropy.units as u
9 | import exovetter.const as const
10 | import lightkurve as lk
11 | from exovetter import vetters
12 |
13 | def load_def_config():
14 | """
15 | Get the default configuration dictionary.
16 | Returns
17 | -------
18 | config : dict
19 | dictionary of default values that are required to run corazon pipeline
20 |
21 | """
22 |
23 | config = dict()
24 |
25 | config = {
26 | "det_window" : 65,
27 | "noise_window" : 27,
28 | "n_sigma" : 4.5, #noise reject sigma
29 | "max_period_days" : 10,
30 | "min_period_days" : 0.8,
31 | "bls_durs_hrs" : [1,2,4,8,12],
32 | "minSnr" : [1],
33 | "maxTces" : 20,
34 | "fracRemain" : 0.7
35 | }
36 |
37 | return config
38 |
39 | def load_def_vetter():
40 | """
41 | Load default vetter list of vetters to run.
42 | """
43 |
44 | vetter_list = [vetters.Lpp(),
45 | vetters.OddEven(),
46 | vetters.TransitPhaseCoverage(),
47 | vetters.Sweet()]
48 |
49 | return vetter_list
50 |
51 |
52 | def search_and_vet_one(ticid, sector, lcdata, config, vetter_list,
53 | thresholds, plot=True):
54 | """
55 | Search and vet one ticid using config and vetter list
56 |
57 | Parameters
58 | ----------
59 | ticid : int
60 | TIC Identification number
61 | sector : int
62 | Sector of the TESS data to use for analysis
63 | lcdata : lightkkurve obect
64 | time and flux and quality flags populated
65 | config : dict
66 | configuration dictionary
67 | vetter_list : list
68 | list of vetters from exovetter to run
69 |
70 | Returns
71 | -------
72 | tce_tces : list
73 | list of exovetter TCEs for this target
74 | result_strings : str
75 | string version of tce and decision
76 |
77 | metrics_list : list
78 | all metrics, one per tce
79 |
80 |
81 | """
82 |
83 | time = lcdata['time'].value
84 | flux = lcdata['flux'].value
85 | flags = lcdata['quality'].value
86 |
87 | good_time, meddet_flux = ps.clean_timeseries(time, flux, flags,
88 | config["det_window"],
89 | config["noise_window"],
90 | config["n_sigma"],
91 | sector)
92 |
93 |
94 | tce_list, stats = ps.identifyTces(good_time, meddet_flux,
95 | bls_durs_hrs=config["bls_durs_hrs"],
96 | minSnr=config["minSnr"],
97 | fracRemain=config["fracRemain"],
98 | maxTces=config["maxTces"],
99 | minP=config["min_period_days"],
100 | maxP=config["max_period_days"])
101 |
102 | if plot:
103 | plot_lc_tce(ticid, tce_list, time, flux, flags, good_time,
104 | meddet_flux, stats, sector)
105 |
106 | lcformat = lcdata['time'].format
107 | tce_lc = lk.LightCurve(time=good_time, flux=meddet_flux+1,
108 | time_format=lcformat, meta={'sector':sector})
109 |
110 | result_strings, disp, reason, metrics_list, tce_tces = vet_all_tces(tce_lc,
111 | tce_list, ticid,
112 | vetter_list, thresholds,
113 | plot=False)
114 |
115 | return tce_tces, result_strings, metrics_list
116 |
117 |
118 | def vet_tce(tce, tce_lc, vetter_list, plot=False):
119 |
120 | metrics = dict()
121 | for v in vetter_list:
122 | vetter = v
123 |
124 | try:
125 | _ = vetter.run(tce, tce_lc)
126 | except ValueError:
127 | pass
128 | if plot:
129 | vetter.plot()
130 | metrics.update(vetter.__dict__)
131 |
132 | return metrics
133 |
134 | def get_disposition(metrics, thresholds):
135 | """Apply thresholds to get a passfail"""
136 |
137 | disp = 'PASS'
138 | reason = ''
139 | if metrics['snr'] < thresholds['snr']:
140 | disp = 'FAIL'
141 | reason = reason + "-LowSNR-"
142 | if metrics['norm_lpp'] > thresholds['norm_lpp']:
143 | disp = 'FAIL'
144 | reason = reason + "-NormLPP-"
145 | if metrics['tp_cover'] < thresholds['tp_cover']:
146 | disp = 'FAIL'
147 | reason = reason + "-PoorTransitCoverage-"
148 | if metrics['oe_sigma'] > thresholds['oe_sigma']:
149 | disp = 'FAIL'
150 | reason = reason + "-OddEvenDetected-"
151 | if metrics['sweet']['amp'][0, -1] > thresholds['sweet']:
152 | disp = 'FAIL'
153 | reason = reason + "-SWEETHalfPeriod"
154 | if metrics['sweet']['amp'][1, -1] > thresholds['sweet']:
155 | disp = 'FAIL'
156 | reason = reason + "-SWEETAtPeriod"
157 | if metrics['sweet']['amp'][2,-1] > thresholds['sweet']:
158 | disp = 'FAIL'
159 | reason = reason + "-SWEETTwicePeriod-"
160 |
161 |
162 | return disp,reason
163 |
164 | def make_result_string(tce, disposition, reason):
165 | """
166 | Create a string that summarizes the TCE and its disposition
167 | Parameters
168 | ----------
169 | tce : TYPE
170 | DESCRIPTION.
171 | disposition : string
172 | DESCRIPTION.
173 | reason : string
174 | DESCRIPTION.
175 |
176 | Returns
177 | -------
178 | None.
179 |
180 | """
181 | st = "%s, %s, %i, %8.4f, %9.4f, %8.3f, %5.3f, %5.2f, %s, %s\n" % \
182 | (tce['target'], tce['event'],
183 | tce['sector'],
184 | tce['period'].value,
185 | tce['epoch'].value,
186 | tce['depth'].value*1e6,
187 | tce['duration'].value*24.0,
188 | tce['snr'],
189 | disposition, reason)
190 | return st
191 |
192 |
193 | def vet_all_tces(lc, tce_dict_list, ticid, vetter_list, thresholds, plot=False):
194 | lcformat = lc['time'].format
195 | disp_list = []
196 | reason_list = []
197 | result_list = []
198 | metrics_list = []
199 | tce_list = []
200 | pn = 1
201 | for item in tce_dict_list:
202 | tce = TCE.Tce(period = item[0]*u.day, epoch=item[1]*u.day,
203 | depth=item[2] * const.frac_amp,
204 | duration=item[3]*u.day,
205 | epoch_offset=const.string_to_offset[lcformat],
206 | snr=item[4],
207 | target = f"TIC {ticid}",
208 | sector = lc.sector,
209 | event = f"{pn}")
210 |
211 | metrics = vet_tce(tce, lc, vetter_list, plot=plot)
212 | metrics['snr'] = tce['snr']
213 | disposition, reason = get_disposition(metrics, thresholds)
214 | result_string = make_result_string(tce, disposition, reason)
215 | tce['disposition'] = disposition
216 | tce['reason'] = reason
217 | tce_list.append(tce)
218 | disp_list.append(disposition)
219 | reason_list.append(reason)
220 | result_list.append(result_string)
221 | metrics_list.append(metrics)
222 | pn=pn+1
223 |
224 |
225 | return result_list, disp_list, reason_list, metrics_list, tce_list
226 |
227 |
228 | def plot_lc_tce(ticid, tce_list, time, flux, flags, good_time,
229 | good_flux, stats, sector):
230 | col = ['tab:orange','tab:green','tab:purple','tab:brown',
231 | 'gold','magenta','lightpink']
232 | plt.figure(figsize=(10,6))
233 | plt.subplot(211)
234 | plt.plot(good_time, good_flux,'.')
235 | plt.title("Lightcurve for TIC %i in S%i" % (int(ticid), int(sector)))
236 |
237 | axes = plt.gca()
238 | y_min, y_max = axes.get_ylim()
239 | x_min, x_max = axes.get_xlim()
240 | for n,s in enumerate(stats):
241 | plt.vlines(stats[n]['transit_times'], y_min, y_max,
242 | colors=col[n], zorder=1, label=str(n+1))
243 | plt.legend()
244 | plt.subplot(212)
245 | plt.plot(time, flux,'.', label="original lc")
246 | plt.plot(time[flags!=0], flux[flags!=0],'o', ms=3, label='flagged')
247 | plt.legend()
248 | plt.xlim(x_min, x_max)
249 |
250 | def open_output_file(filename, headerlist, thresholds):
251 | fobj = open(filename, 'a')
252 |
253 | fobj.write("# thresholds: " + str(thresholds)+"\n")
254 |
255 | header = headerlist[0]
256 | for h in headerlist[1:]:
257 | header = header + ", " + h
258 |
259 | fobj.write(header + '\n')
260 |
261 | return fobj
262 |
263 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # STSCI documentation build configuration file, created by
4 | # sphinx-quickstart on Thu Oct 22 17:25:41 2015.
5 | #
6 | # This file is execfile()d with the current directory set to its
7 | # containing dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out
13 | # serve to show the default.
14 |
15 | import datetime
16 | import os
17 | from configparser import ConfigParser
18 | from pkg_resources import get_distribution
19 |
20 | # -- General configuration ------------------------------------------------
21 | conf = ConfigParser()
22 | conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')])
23 | setup_cfg = dict(conf.items('metadata'))
24 |
25 | # Configuration for intersphinx: refer to the Python standard library.
26 | # Uncomment if you cross-ref to API doc from other packages.
27 | intersphinx_mapping = {
28 | 'python': ('https://docs.python.org/3/',
29 | (None, 'http://data.astropy.org/intersphinx/python3.inv')),
30 | 'numpy': ('https://numpy.org/doc/stable/',
31 | (None, 'http://data.astropy.org/intersphinx/numpy.inv')),
32 | 'astropy': ('https://docs.astropy.org/en/stable/', None)}
33 |
34 | # Add any Sphinx extension module names here, as strings. They can be
35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
36 | # ones.
37 | extensions = [
38 | 'sphinx.ext.autodoc',
39 | 'sphinx.ext.intersphinx',
40 | 'sphinx.ext.todo',
41 | 'sphinx.ext.inheritance_diagram',
42 | 'sphinx.ext.viewcode',
43 | 'sphinx.ext.napoleon',
44 | 'sphinx_automodapi.automodapi',
45 | 'sphinx.ext.mathjax']
46 |
47 | # Add any paths that contain templates here, relative to this directory.
48 | # templates_path = ['_templates']
49 |
50 | # The suffix of source filenames.
51 | source_suffix = '.rst'
52 |
53 | # The encoding of source files.
54 | # source_encoding = 'utf-8-sig'
55 |
56 | # The master toctree document.
57 | master_doc = 'index'
58 |
59 | # General information about the project
60 | project = setup_cfg['name']
61 | author = setup_cfg['author']
62 | year = datetime.datetime.now().year
63 | copyright = f'{year}, {author}'
64 |
65 | # The version info for the project you're documenting, acts as replacement for
66 | # |version| and |release|, also used in various other places throughout the
67 | # build documents.
68 | #
69 | # The full version, including alpha/beta/rc tags.
70 | release = get_distribution(project).version
71 | # The short X.Y version.
72 | version = '.'.join(release.split('.')[:2])
73 |
74 | # The language for content autogenerated by Sphinx. Refer to documentation
75 | # for a list of supported languages.
76 | # language = None
77 |
78 | # There are two options for replacing |today|: either, you set today to some
79 | # non-false value, then it is used:
80 | # today = ''
81 | # Else, today_fmt is used as the format for a strftime call.
82 | # today_fmt = '%B %d, %Y'
83 |
84 | # List of patterns, relative to source directory, that match files and
85 | # directories to ignore when looking for source files.
86 | exclude_patterns = ['_build']
87 |
88 | # The reST default role (used for this markup: `text`) to use for all
89 | # documents.
90 | default_role = 'obj'
91 |
92 | # Don't show summaries of the members in each class along with the
93 | # class' docstring
94 | numpydoc_show_class_members = False
95 |
96 | autosummary_generate = True
97 |
98 | automodapi_toctreedirnm = 'api'
99 |
100 | # Class documentation should contain *both* the class docstring and
101 | # the __init__ docstring
102 | autoclass_content = "both"
103 |
104 | # Render inheritance diagrams in SVG
105 | graphviz_output_format = "svg"
106 |
107 | graphviz_dot_args = [
108 | '-Nfontsize=10',
109 | '-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif',
110 | '-Efontsize=10',
111 | '-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif',
112 | '-Gfontsize=10',
113 | '-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif'
114 | ]
115 |
116 | # If true, '()' will be appended to :func: etc. cross-reference text.
117 | # add_function_parentheses = True
118 |
119 | # If true, the current module name will be prepended to all description
120 | # unit titles (such as .. function::).
121 | # add_module_names = True
122 |
123 | # If true, sectionauthor and moduleauthor directives will be shown in the
124 | # output. They are ignored by default.
125 | # show_authors = False
126 |
127 | # The name of the Pygments (syntax highlighting) style to use.
128 | pygments_style = 'sphinx'
129 |
130 | # A list of ignored prefixes for module index sorting.
131 | # modindex_common_prefix = []
132 |
133 | # If true, keep warnings as "system message" paragraphs in the built documents.
134 | # keep_warnings = False
135 |
136 |
137 | # -- Options for HTML output ----------------------------------------------
138 |
139 | # The theme to use for HTML and HTML Help pages. See the documentation for
140 | # a list of builtin themes.
141 | # html_theme = 'sphinx_rtd_theme'
142 |
143 | # The name for this set of Sphinx documents. If None, it defaults to
144 | # " v documentation".
145 | # html_title = None
146 |
147 | # A shorter title for the navigation bar. Default is the same as html_title.
148 | # html_short_title = None
149 |
150 | # The name of an image file (within the static path) to use as favicon of the
151 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
152 | # pixels large.
153 | # html_favicon = None
154 |
155 | # Add any extra paths that contain custom files (such as robots.txt or
156 | # .htaccess) here, relative to this directory. These files are copied
157 | # directly to the root of the documentation.
158 | # html_extra_path = []
159 |
160 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
161 | # using the given strftime format.
162 | html_last_updated_fmt = '%b %d, %Y'
163 |
164 | # If true, SmartyPants will be used to convert quotes and dashes to
165 | # typographically correct entities.
166 | # html_use_smartypants = True
167 |
168 | # Custom sidebar templates, maps document names to template names.
169 | html_sidebars = {'**': ['globaltoc.html', 'relations.html', 'searchbox.html']}
170 |
171 | # Additional templates that should be rendered to pages, maps page names to
172 | # template names.
173 | # html_additional_pages = {}
174 |
175 | # If false, no module index is generated.
176 | html_domain_indices = True
177 |
178 | # If false, no index is generated.
179 | html_use_index = True
180 |
181 | # If true, the index is split into individual pages for each letter.
182 | # html_split_index = False
183 |
184 | # If true, links to the reST sources are added to the pages.
185 | # html_show_sourcelink = True
186 |
187 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
188 | # html_show_sphinx = True
189 |
190 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
191 | # html_show_copyright = True
192 |
193 | # If true, an OpenSearch description file will be output, and all pages will
194 | # contain a tag referring to it. The value of this option must be the
195 | # base URL from which the finished HTML is served.
196 | # html_use_opensearch = ''
197 |
198 | # This is the file name suffix for HTML files (e.g. ".xhtml").
199 | # html_file_suffix = None
200 |
201 | # Output file base name for HTML help builder.
202 | htmlhelp_basename = f'{project}doc'
203 |
204 | # -- Options for LaTeX output ---------------------------------------------
205 |
206 | latex_elements = {
207 | # The paper size ('letterpaper' or 'a4paper').
208 | 'papersize': 'letterpaper',
209 | # The font size ('10pt', '11pt' or '12pt').
210 | 'pointsize': '14pt',
211 | # Additional stuff for the LaTeX preamble.
212 | 'preamble': r'''\usepackage{enumitem} \setlistdepth{99}'''
213 | }
214 |
215 | # Grouping the document tree into LaTeX files. List of tuples
216 | # (source start file, target name, title,
217 | # author, documentclass [howto, manual, or own class]).
218 | latex_documents = [
219 | ('index', f'{project}.tex', f'{project} Documentation',
220 | f'{project}', 'manual'),
221 | ]
222 |
223 | # The name of an image file (relative to this directory) to place at the top of
224 | # the title page.
225 |
226 | # For "manual" documents, if this is true, then toplevel headings are parts,
227 | # not chapters.
228 | # latex_use_parts = False
229 |
230 | # If true, show page references after internal links.
231 | # latex_show_pagerefs = False
232 |
233 | # If true, show URL addresses after external links.
234 | latex_show_urls = 'True'
235 |
236 | # Documents to append as an appendix to all manuals.
237 | # latex_appendices = []
238 |
239 | # If false, no module index is generated.
240 | latex_domain_indices = True
241 |
242 | # -- Options for manual page output ---------------------------------------
243 |
244 | # One entry per manual page. List of tuples
245 | # (source start file, name, description, authors, manual section).
246 | man_pages = [
247 | ('index', f'{project}', f'{project} Documentation',
248 | [f'{project}'], 1)
249 | ]
250 |
251 | # If true, show URL addresses after external links.
252 | man_show_urls = True
253 |
254 | # -- Options for Texinfo output -------------------------------------------
255 |
256 | # Grouping the document tree into Texinfo files. List of tuples
257 | # (source start file, target name, title, author,
258 | # dir menu entry, description, category)
259 | texinfo_documents = [
260 | ('index', f'{project}', f'{project} Documentation',
261 | f'{author}', f'{project}', f'{project}',
262 | 'Miscellaneous'),
263 | ]
264 |
265 | # Documents to append as an appendix to all manuals.
266 | # texinfo_appendices = []
267 |
268 | # If false, no module index is generated.
269 | texinfo_domain_indices = True
270 |
271 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
272 | texinfo_show_urls = 'inline'
273 |
274 | # If true, do not generate a @detailmenu in the "Top" node's menu.
275 | # texinfo_no_detailmenu = False
276 |
277 | # -- Options for Epub output ----------------------------------------------
278 |
279 | # Bibliographic Dublin Core info.
280 | epub_title = f'{project}'
281 | epub_author = f'{author}'
282 | epub_publisher = f'{author}'
283 | epub_copyright = f'{year} {author}'
284 |
285 | # The basename for the epub file. It defaults to the project name.
286 | # epub_basename = u'wfc3tools'
287 |
288 | # The HTML theme for the epub output. Since the default themes are not
289 | # optimized for small screen space, using the same theme for HTML and
290 | # epub output is usually not wise. This defaults to 'epub', a theme designed
291 | # to save visual space.
292 | epub_theme = 'epub'
293 |
294 | # The language of the text. It defaults to the language option
295 | # or en if the language is not set.
296 | # epub_language = ''
297 |
298 | # The scheme of the identifier. Typical schemes are ISBN or URL.
299 | # epub_scheme = ''
300 |
301 | # The unique identifier of the text. This can be a ISBN number
302 | # or the project homepage.
303 | # epub_identifier = ''
304 |
305 | # A unique identification for the text.
306 | # epub_uid = ''
307 |
308 | # A tuple containing the cover image and cover page html template filenames.
309 | # epub_cover = ()
310 |
311 | # A sequence of (type, uri, title) tuples for the guide element of content.opf.
312 | # epub_guide = ()
313 |
314 | # HTML files that should be inserted before the pages created by sphinx.
315 | # The format is a list of tuples containing the path and title.
316 | # epub_pre_files = []
317 |
318 | # HTML files shat should be inserted after the pages created by sphinx.
319 | # The format is a list of tuples containing the path and title.
320 | # epub_post_files = []
321 |
322 | # A list of files that should not be packed into the epub file.
323 | epub_exclude_files = ['search.html']
324 |
325 | # The depth of the table of contents in toc.ncx.
326 | # epub_tocdepth = 3
327 |
328 | # Allow duplicate toc entries.
329 | # epub_tocdup = True
330 |
331 | # Choose between 'default' and 'includehidden'.
332 | # epub_tocscope = 'default'
333 |
334 | # Fix unsupported image types using the PIL.
335 | # epub_fix_images = False
336 |
337 | # Scale large images.
338 | # epub_max_image_width = 0
339 |
340 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
341 | # epub_show_urls = 'inline'
342 |
343 | # If false, no index is generated.
344 | # epub_use_index = True
345 |
--------------------------------------------------------------------------------
/corazon/planetSearch.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Created on Sat Feb 9 16:47:22 2019
5 |
6 | @author: smullally
7 | """
8 |
9 | from astropy.timeseries import BoxLeastSquares
10 | import numpy as np
11 | from astropy.convolution import convolve, Box1DKernel
12 | from corazon import plateau
13 |
14 | #import matplotlib.pyplot as plt
15 |
16 |
17 | def clean_timeseries(time, flux, qflags, det_window, noise_window, n_sigma, sector):
18 |
19 | qbad = qflags != 0
20 | tgaps = loadGapInfoBySector(time,sector)
21 | bad = idNoisyData(flux, noise_window, Nsigma=n_sigma)
22 |
23 | flagged = bad | qbad | tgaps #Indicate bad data
24 | med_det = median_detrend(flux[~flagged], det_window)
25 | det_time = time[~flagged]
26 |
27 | #Look for extra noisy section on length of around 1.5 days (window = 90)
28 | std_bad, med_std = running_std_gap(med_det, 70, N=2, nSigTimes=5)
29 | #print(len(std_bad[std_bad]))
30 |
31 |
32 | good_time = det_time[~std_bad]
33 | good_flux = med_det[~std_bad]
34 |
35 | #Final Pass for single point outliers that are not periodic
36 | spo_idx = findOutliers(good_time, good_flux, gap=None,
37 | threshold_sigma=3.0,
38 | precision_days=0.02085,
39 | maxClusterLen = 3
40 | )
41 | #print("spo_idx")
42 | #print(spo_idx)
43 | #spogaps = np.zeros(len(spo_idx)) == 1
44 | #spogaps[spo_idx] = True
45 | #plt.figure()
46 | #plt.plot(good_time[~spo_idx], good_flux[~spo_idx])
47 |
48 | return good_time[~spo_idx], good_flux[~spo_idx]
49 |
50 | def loadGapInfoBySector(time, sector):
51 | """Loads a list of bad cadence indices.
52 |
53 | TESS produces quality flags, but does not populate the FFIs with them.
54 | Instead we have to look the up in the data release notes.
55 |
56 | Based on the Data release notes, but modified by hand based on
57 | inspection of Wasp 126
58 |
59 | Inputs
60 | ---------
61 | time
62 | (1d np array) Array of TJDs for the data. See `extractlc.loadSingleSector`.
63 | sector
64 | (int)
65 |
66 | Returns
67 | -----------
68 | 1d boolean np array of length time
69 | """
70 | gaps = np.zeros_like(time, dtype=bool)
71 |
72 | if sector == 1:
73 | # See page 2 of
74 | #https://archive.stsci.edu/missions/tess/doc/tess_drn/tess_sector_01_drn01_v01.pdf
75 | gaps |= (time <= 1325.61)
76 | gaps |= (1338.52153 <= time) & (time <= 1339.65310) #Inter orbit gap
77 | gaps |= (1346.95 <= time) & (time <= 1349.75) #See DRN 1 p3
78 | gaps |= (time >= 1352.92) #End of sector usually bad.
79 | elif sector == 2:
80 | gaps |= (1367.15347 <= time) & (time <= 1368.59406) #
81 | elif sector == 3:
82 | gaps |= (1381.1 <= time) & (time <= 1385.89663) #Pre "science start"
83 | gaps |= (1394.47997 <= time) & (time <= 1395.80497) #apears to be bad??
84 | gaps |= (1395.47997 <= time) & (time <= 1396.60497) #Inter orbit gap
85 | gaps |= (1406.2 <= time) & (time <= 1409.38829) #Post science
86 | elif sector == 4:
87 | #The bad guide star data may still be usable. Need to check
88 | gaps |= (1410.89974 <= time) & (time <= 1413.26468) #Bad Guide star
89 | # gaps |= (1418.53691 <= time) & (time <= 1421.86) #Instr. Anom.
90 | # gaps |= (1422.95 <= time) & (time <= 1424.54897) #Inter orbit gap
91 | gaps |= (1418.53691 <= time) & (time <= 1424.54897)
92 | gaps |= (1436.0 <= time) & (time <= 1439.8) #Not sure what this is
93 | elif sector == 5:
94 | gaps |= (1450.01 <= time) & (time <= 1451.81) #Inter orbit gap
95 | #I don't think this means the data is generally bad
96 | gaps |= (1463.55 <= time) & (time <= 1464.40056) #Camera 1 guiding
97 | elif sector == 6:
98 | gaps |= (1477.0 <= time) & (time <= 1478.41) #inter orbit gap
99 | gaps |= (1463.6 <= time) & (time <= 1468.26998) #before beginning of 6
100 | elif sector == 7:
101 | #gaps |= (1517 <= time) & (time <= 1491.62) # orbit range
102 | gaps |= (1502.5 <= time) & (time <= 1505.01) #inter sector gap
103 | elif sector == 14:
104 | gaps |= (1696.2 <= time) & (time <= 1697.2) #inter sector gap
105 | #gaps |= gaps
106 | elif sector == 15:
107 | gaps |= (1723.25 <= time) & (time <= 1725.6)
108 | gaps |= (1736.01 <= time)
109 | elif sector == 16:
110 | gaps |= (1738.65 >= time)
111 | gaps |= (time >= 1763.31) # orbit range
112 | gaps |= (1750.25 <= time) & (time <= 1751.659) #inter sector gap
113 | elif sector == 26 :
114 | gaps |= (2010.26209 >= time)
115 | gaps |= (time >= 2035.1343)
116 | gaps |= (2021.482 <= time) & (time <= 2023.28936)
117 |
118 | # gaps |= (<= time) & (time <= ) #
119 |
120 | else:
121 | raise ValueError("No gap info available for sector %i" %(sector))
122 |
123 | return gaps
124 |
125 |
126 |
127 | def median_detrend(flux, window):
128 | """
129 | Fergal's code to median detrend.
130 | """
131 | size = len(flux)
132 | nPoints = window
133 |
134 | filtered = np.zeros(size)
135 | for i in range(size):
136 | #This two step ensures that lwr and upr lie in the range [0,size)
137 | lwr = max(i-nPoints, 0)
138 | upr = min(lwr + 2*nPoints, size)
139 | lwr = upr- 2*nPoints
140 |
141 | sub = flux[lwr:upr]
142 |
143 | offset = np.median(sub)
144 | try:
145 | filtered[i] = flux[i]/offset - 1
146 | except ZeroDivisionError:
147 | filtered[i] = 0
148 |
149 | return filtered
150 |
151 | def median_subtract(flux, window):
152 | """
153 | Fergal's code to median detrend.
154 | """
155 | size = len(flux)
156 | nPoints = window
157 |
158 | filtered = np.zeros(size)
159 | for i in range(size):
160 | #This two step ensures that lwr and upr lie in the range [0,size)
161 | lwr = max(i-nPoints, 0)
162 | upr = min(lwr + 2*nPoints, size)
163 | lwr = upr- 2*nPoints
164 |
165 | sub = flux[lwr:upr]
166 |
167 | offset = np.median(sub)
168 | try:
169 | filtered[i] = flux[i] - offset
170 | except ZeroDivisionError:
171 | filtered[i] = 0
172 |
173 | return filtered
174 |
175 | def conv_detrend(flux, window, gap):
176 | """
177 | return median detrended array
178 | centered on zero at the end.
179 | if gap is true, then don't use that point
180 | """
181 |
182 | #filt = medfilt(flux,window) + 1e-8
183 | box_kernel = Box1DKernel(window, mode="linear_interp")
184 | filt = convolve(flux[~gap], box_kernel, boundary="extend")
185 | median_det = flux[~gap]/filt - 1
186 |
187 | return median_det
188 |
189 | def idNoisyData(flux, window, Nsigma=4):
190 | """
191 | Determine sections of the data that are very noisy compared to the rest.
192 | Look for N sigma away from the std of the data.
193 | Be careful, try not to cut out planets.
194 | I recommend using a window that is smaller than used for planet finding.
195 |
196 | """
197 | win = int(window)
198 | if ~is_odd(win):
199 | win=win+1
200 |
201 | is_bad = np.zeros(len(flux)) == 1
202 |
203 | mdFlux = median_detrend(flux[~is_bad], win)
204 |
205 | for i in np.arange(1,4):
206 |
207 | if np.all(is_bad):
208 | continue
209 |
210 | sd = np.std(mdFlux)
211 | is_bad |= np.abs(mdFlux) > Nsigma * sd
212 |
213 | return is_bad
214 |
215 | def is_odd(num):
216 | return num % 2 != 0
217 |
218 |
219 | def running_std_gap(flux, window, N=3, nSigTimes=3.3):
220 | """
221 | for specified window, determine data chunks that are parts of sections
222 | of the data that have std nSigTimes larger than the overall std. only pulls
223 | out N sections.
224 |
225 | Returns isbad array of 1 and 0 where 1 means bad data and 0 means clean
226 | Be sure to set a wide enough window so you don't throw away planets.
227 | Probably N*duration(in points) of longest transit expected.
228 | """
229 | gap = np.zeros(len(flux))
230 |
231 | std_array = np.zeros(len(flux))
232 |
233 |
234 | for i in range(window, len(flux), 1):
235 |
236 | f = flux[i-window:i]
237 | std_array[i] = np.nanstd(f)
238 |
239 | #plt.figure()
240 | #plt.plot(std_array,'.')
241 |
242 | med_std = np.median(std_array)
243 | #print("mean std: %f" % med_std)
244 |
245 | argsort = np.argsort(std_array)
246 |
247 | for i in argsort[-1*N:]:
248 | if std_array[i] > med_std * nSigTimes:
249 | gap[i-window:i] = 1
250 |
251 | isbad = gap == 1
252 |
253 | return isbad, med_std
254 |
255 |
256 | def calcBls(flux,time, bls_durs, minP=None, maxP=None, min_trans=3):
257 | """
258 | Take a bls and return the spectrum.
259 | """
260 |
261 | bls = BoxLeastSquares(time, flux)
262 | period_grid = bls.autoperiod(bls_durs,minimum_period=minP, \
263 | maximum_period=maxP, minimum_n_transit=min_trans, \
264 | frequency_factor=0.8)
265 |
266 | bls_power = bls.power(period_grid, bls_durs, oversample=20)
267 |
268 | return bls_power
269 |
270 | def findBlsSignal(time, flux, bls_durations, minP=None, maxP=None, min_trans=3):
271 |
272 | bls_power = calcBls(flux, time, bls_durations, minP=minP, maxP=maxP, min_trans=min_trans)
273 |
274 | index = np.argmax(bls_power.power)
275 | bls_period = bls_power.period[index]
276 | bls_t0 = bls_power.transit_time[index]
277 | bls_depth = bls_power.depth[index]
278 | bls_duration = bls_power.duration[index]
279 | bls_snr = bls_power.depth_snr[index]
280 |
281 | return np.array([bls_period, bls_t0, bls_depth, bls_duration, bls_snr])
282 |
283 | def simpleSnr(time,flux,results):
284 | """
285 | calculate a simple snr on the planet based on the depth and scatter
286 | after you remove the planet model from the data and meidan detrend.
287 | """
288 |
289 | model = BoxLeastSquares(time,flux)
290 | fmodel = model.model(time,results[0],results[3],results[1])
291 | flatten = median_subtract(flux-fmodel, 12)
292 |
293 | noise = np.std(flatten)
294 | snr = results[2]/noise
295 |
296 | return snr
297 |
298 |
299 | def identifyTces(time, flux, bls_durs_hrs=[1,2,4,8,12], minSnr=3, fracRemain=0.5, \
300 | maxTces=10, minP=None, maxP=None):
301 | """
302 | Find highest point in the bls.
303 | remove that signal, median detrend again
304 | Find the next signal.
305 | Stop when less than half the original data set remains.
306 | Or, when depth of signal is less than snr*running_std
307 |
308 | returns period, t0, depth, duration, snr for each signal found.
309 | """
310 |
311 | keepLooking = True
312 | counter = 0
313 | results = []
314 | stats = []
315 | bls_durs_day=np.array(bls_durs_hrs)/24
316 |
317 | t=time.copy()
318 | f=flux.copy()
319 |
320 |
321 | while keepLooking:
322 |
323 | bls_results = findBlsSignal(t, f, bls_durs_day, minP=minP, maxP=maxP)
324 | #print(bls_results)
325 | #simple ssnr because the BLS depth snr is acting strangely
326 | bls_results[4] = simpleSnr(t, f, bls_results)
327 |
328 |
329 | results.append(bls_results)
330 | bls = BoxLeastSquares(t,f)
331 | bls_stats = bls.compute_stats(bls_results[0], bls_results[3],bls_results[1])
332 | stats.append(bls_stats)
333 | #signal_snr = bls_stats['depth'][0]/bls_stats['depth'
334 | transit_mask = bls.transit_mask(t, bls_results[0],\
335 | bls_results[3]*1.1, bls_results[1])
336 | #plt.figure()
337 | #plt.plot(t,f,'ko',ms=3)
338 |
339 | t=t[~transit_mask]
340 | f=f[~transit_mask]
341 |
342 | #plt.plot(t,f,'r.')
343 | #Conditions to keep looking
344 | if (len(t)/len(time) > fracRemain) & \
345 | (bls_results[4] >= minSnr) & \
346 | (counter <= maxTces) :
347 |
348 | counter=counter + 1
349 | keepLooking = True
350 |
351 | else:
352 | keepLooking = False
353 |
354 |
355 | return np.array(results), np.array(stats)
356 |
357 | #from pdb import set_trace as debug
358 |
359 | def findOutliers(time, flux, gap=None,
360 | threshold_sigma=4,
361 | precision_days=0.0205,
362 | maxClusterLen = 2
363 | ):
364 | """
365 | Identify single point outliers.
366 |
367 | Preserves consecutive outliers, and those that are evenly spaced in
368 | time. This protects short duration transits.
369 |
370 | Inputs:
371 | ------------
372 | time, flux
373 | (1d numpy array) Input data. Flux should have mean (or median) value
374 | of zero. Units of time are assumed to be days.
375 |
376 | Optional Inputs
377 | ------------------
378 | precision_days
379 | (float) Points that are evenly spaced to within this precision
380 | are considered periodic, and not marked as outliers. Setting
381 | this to zero turns off the search of periodicity.
382 |
383 | threshold_sigma
384 | (float) Points more than this many sigma from zero are considered
385 | potential outliers.
386 | maxClusterLen
387 | (int) Outliers are not marked if they are part of
388 | a contiguous cluster at least this long.
389 |
390 | Returns:
391 | ------------
392 | An array of indices indicating which points are single point
393 | outliers. The length of this array is equal to the number of outlier
394 | points flagged. The length of this array is NOT equal to the length
395 | of the flux array.
396 |
397 | Notes
398 | ----------
399 | `precision_days` should be set to a value comparable to the cadence time.
400 | For Kepler long cadence, this is 29.52 minutes = 0.0205 days.
401 |
402 | If `time` is not in units of days, set the value of precision in the
403 | same units.
404 | """
405 |
406 | assert not np.all(gap), "Can't find outliers if all data is gapped"
407 |
408 | if gap is None:
409 | gap = np.zeros_like(flux, dtype=bool)
410 | indices = np.zeros_like(gap)
411 |
412 | #Remove as much signal as possible from the data
413 | # fluxDetrended = medianDetrend(flux, 3)
414 | fluxDetrended = np.diff(flux)
415 | fluxDetrended = np.append(fluxDetrended, [0]) #Keep the length the same
416 | #debug()
417 | assert len(fluxDetrended) == len(flux)
418 |
419 | #Find outliers as too far away from the mean.
420 | rms = robustStd(fluxDetrended[~gap])
421 | threshold_counts = threshold_sigma * rms / np.sqrt(2)
422 | rawOutliers = plateau.plateau(np.fabs(fluxDetrended), threshold_counts)
423 |
424 |
425 | if len(rawOutliers) == 0:
426 | return indices
427 |
428 | # Remove clusters of 2 or more consectutive outliers
429 | #singleOutlierIndices = np.sort(outliers[(outliers[:,1] - outliers[:,0] <= 2)][:,0])
430 | # debug()
431 | span = rawOutliers[:,1] - rawOutliers[:,0]
432 | outliers = rawOutliers[span < maxClusterLen+2]
433 | for p1, p2 in outliers:
434 | indices[p1+1 :p2] = True
435 |
436 |
437 | #Check for periodicities in outliers
438 | if precision_days > 0:
439 | notOutliers = findPeriodicOutliers(time, indices, precision_days)
440 | indices[notOutliers] = False
441 |
442 | return indices
443 |
444 | #from pdb import set_trace as debug
445 |
446 | def findPeriodicOutliers(time_days, singleOutlierIndex, precision_days):
447 | """Identify groups of outliers that are equally spaced in time
448 |
449 | Inputs
450 | ---------
451 | time_days
452 | (1d numpy array) Times of data points, in days
453 |
454 | singleOutlierIndex
455 | (array of ints) Array elements of time that have
456 | been flagged as outliers. ``len(singleOutlierIndices == len(time)``
457 |
458 | precision_days
459 | (float) How close to perfectly evenly spaced do points need to
460 | be to be marked as periodic?
461 |
462 | Returns
463 | ----------
464 | notOutliers
465 | An array of elements of `singleOutlierIndices` that are periodic.
466 | set(notOutliers) is a wholly owned subset of set(singleOutlierIndices)
467 | """
468 |
469 | assert len(time_days) == len(singleOutlierIndex)
470 |
471 | #Convert to list of indices
472 | singleOutliers = np.where(singleOutlierIndex)[0]
473 | notOutliers = []
474 |
475 | outlierTimes = time_days[singleOutliers]
476 | diffs = [outlierTimes[i+1] - outlierTimes[i] for i in range(0, len(outlierTimes)-1)]
477 | diffs = [round(d, 5) for d in diffs]
478 |
479 | if len(singleOutliers) >= 4:
480 | if len(set(diffs)) == len(diffs):
481 | possibleTimes = np.array([])
482 | else:
483 | period = max(set(diffs), key = diffs.count) # period = most common difference
484 | epoch = outlierTimes[ diffs.index(period) ]
485 | possibleTimes = np.arange(epoch, outlierTimes[-1] + 0.5*period, period)
486 |
487 | notOutliers = []
488 | for i in range(len(outlierTimes)):
489 | if np.any((abs(possibleTimes - outlierTimes[i]) < precision_days)):
490 | notOutliers.append(singleOutliers[i])
491 |
492 |
493 | elif len(singleOutliers) == 3:
494 | #If we only have three outliers, and they are equally spaced
495 | #then they are periodic
496 | if abs(diffs[0] - diffs[1]) < precision_days:
497 | notOutliers.extend(singleOutliers)
498 | #debug()
499 | assert set(notOutliers).issubset(singleOutliers)
500 | return notOutliers
501 |
502 |
503 | def robustStd(y):
504 | assert len(y) > 0
505 | mad = y - np.median(y)
506 | mad = np.median(np.fabs(mad))
507 |
508 | #See https://en.wikipedia.org/wiki/Median_absolute_deviation
509 | #Valid for gaussians
510 | std = mad * 1.4826
511 | return std
--------------------------------------------------------------------------------