├── .github
└── workflows
│ ├── doc.yml
│ ├── pypi_upload.yaml
│ └── python-app.yml
├── .gitignore
├── LICENSE
├── README.md
├── docs
├── example_header_parser_functions
│ └── parse_omegacam_header.py
├── example_starred_notebooks
│ └── example_roi_modelling.ipynb
├── flow_diagram
│ ├── flow_diagram.py
│ └── workflow_diagram.svg
└── mkdocs
│ ├── contents
│ ├── cookbook.md
│ ├── cutouts.png
│ ├── example_deconv.png
│ ├── example_lightcurve.png
│ ├── footprints_plot_example.jpg
│ ├── footprints_with_gaia_stars_plot_example.jpg
│ ├── index.md
│ ├── installation.md
│ ├── lightcurver_logo.svg
│ ├── norm_coeff_plot_example.png
│ ├── psf_plot_example.jpg
│ ├── references.bib
│ ├── single_frame_wide_field.jpg
│ ├── starphotom_plot_example.jpg
│ └── tutorial.md
│ └── mkdocs.yml
├── lightcurver
├── __init__.py
├── pipeline
│ ├── __init__.py
│ ├── example_config_file
│ │ └── config.yaml
│ ├── pipeline_dependency_graph.yaml
│ ├── state_checkers.py
│ ├── task_wrappers.py
│ └── workflow_manager.py
├── plotting
│ ├── __init__.py
│ ├── footprint_plotting.py
│ ├── html_visualisation.py
│ ├── image_plotting.py
│ ├── joint_modelling_plotting.py
│ ├── normalization_plotting.py
│ ├── photometry_plotting.py
│ ├── plot_curves_template.html
│ ├── psf_plotting.py
│ └── sources_plotting.py
├── processes
│ ├── __init__.py
│ ├── absolute_zeropoint_calculation.py
│ ├── alternate_plate_solving_adapt_existing_wcs.py
│ ├── alternate_plate_solving_with_gaia.py
│ ├── background_estimation.py
│ ├── cutout_making.py
│ ├── frame_characterization.py
│ ├── frame_importation.py
│ ├── frame_star_assignment.py
│ ├── normalization_calculation.py
│ ├── plate_solving.py
│ ├── psf_modelling.py
│ ├── roi_file_preparation.py
│ ├── roi_modelling.py
│ ├── star_extraction.py
│ ├── star_photometry.py
│ └── star_querying.py
├── scripts
│ ├── initialize.py
│ └── run.py
├── structure
│ ├── __init__.py
│ ├── database.py
│ ├── exceptions.py
│ ├── user_config.py
│ └── user_header_parser.py
└── utilities
│ ├── __init__.py
│ ├── absolute_magnitudes_from_gaia.py
│ ├── absolute_magnitudes_from_panstarrs.py
│ ├── chi2_selector.py
│ ├── footprint.py
│ ├── gaia.py
│ ├── image_coordinates.py
│ ├── lightcurves_postprocessing.py
│ ├── star_naming.py
│ └── starred_utilities.py
├── pyproject.toml
└── tests
├── __init__.py
├── test_database_queries
├── __init__.py
└── test_queries.py
├── test_entire_pipeline
├── raw_frames
│ ├── OMEGA.2021-08-22T07:25:56.281_13OFCS.fits
│ └── OMEGA.2021-08-22T07:32:00.792_13OFCS.fits
└── test_run_pipeline_example_config.py
├── test_processes
├── __init__.py
├── test_background_estimation.py
├── test_cutout_making.py
├── test_frame_characterization.py
└── test_star_extraction.py
├── test_products_handling
├── __init__.py
├── test_grouping.py
└── test_magnitude_errors.py
└── test_starred_calls
├── __init__.py
└── test_starred_calls.py
/.github/workflows/doc.yml:
--------------------------------------------------------------------------------
1 | name: doc
2 | on:
3 | push:
4 | branches:
5 | - main
6 | permissions:
7 | contents: write
8 | jobs:
9 | deploy:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 | - name: Configure Git Credentials
14 | run: |
15 | git config user.name github-actions[bot]
16 | git config user.email 41898282+github-actions[bot]@users.noreply.github.com
17 | - uses: actions/setup-python@v5
18 | with:
19 | python-version: 3.x
20 | - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
21 | - uses: actions/cache@v4
22 | with:
23 | key: mkdocs-material-${{ env.cache_id }}
24 | path: .cache
25 | restore-keys: |
26 | mkdocs-material-
27 | - run: pip install mkdocs-material mkdocs-nav-weight pymdown-extensions pdoc
28 | - run: mkdocs gh-deploy --force --config-file ./docs/mkdocs/mkdocs.yml
--------------------------------------------------------------------------------
/.github/workflows/pypi_upload.yaml:
--------------------------------------------------------------------------------
1 | name: Build and Publish to PyPI
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'v*'
7 | workflow_dispatch:
8 |
9 | jobs:
10 | build-and-publish:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 |
15 | - name: Set up Python
16 | uses: actions/setup-python@v2
17 | with:
18 | python-version: '3.x'
19 | - name: Verify Version Match
20 | run: |
21 | VERSION=$(grep "^version" pyproject.toml | awk -F '"' '{print $2}')
22 | TAG_NAME=${GITHUB_REF#refs/tags/}
23 | echo "extracted version from setup.py: $VERSION"
24 | echo "Git tag is: $TAG_NAME"
25 | if [ "v$VERSION" != "$TAG_NAME" ]; then
26 | echo "Error: VERSION file does not match the git tag."
27 | exit 1
28 | fi
29 |
30 | - name: Install dependencies
31 | run: |
32 | python -m pip install --upgrade pip
33 | pip install build twine
34 |
35 | - name: Build package
36 | run: python -m build
37 |
38 | - name: Publish package
39 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
40 | env:
41 | TWINE_USERNAME: __token__
42 | TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
43 | run: twine upload dist/*
44 |
45 |
--------------------------------------------------------------------------------
/.github/workflows/python-app.yml:
--------------------------------------------------------------------------------
1 | name: lightcurver
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: [ "main" ]
8 |
9 | permissions:
10 | contents: read
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - uses: actions/checkout@v3
18 | - name: Set up Python 3.10
19 | uses: actions/setup-python@v3
20 | with:
21 | python-version: "3.10"
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | pip install .[test] flake8 pytest
26 | # This installs your package along with the [test] dependencies if specified in pyproject.toml
27 | # If you don't have [test] dependencies, just use `pip install .`
28 | - name: Lint with flake8
29 | run: |
30 | # stop the build if there are Python syntax errors or undefined names
31 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
32 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
33 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
34 | - name: Test with pytest
35 | run: |
36 | pytest
37 |
38 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | .idea/
161 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | [](https://joss.theoj.org/papers/4c19a3f804d62aafd0ca23f297fce6c9)
4 | 
5 | [](https://duxfrederic.github.io/lightcurver/)
6 | [](https://www.python.org)
7 | [](https://pypi.org/project/lightcurver/)
8 | [](https://www.gnu.org/licenses/gpl-3.0)
9 |
10 |
11 | # `lightcurver`
12 | Welcome to `lightcurver`!
13 | This is a photometry library leveraging [STARRED](https://gitlab.com/cosmograil/starred),
14 | best used with time series of wide-field images. You can read more about it in the [documentation](https://duxfrederic.github.io/lightcurver/).
15 |
16 | Essentially, `lightcurver` provides the infrastructure to
17 | - prepare a Point Spread Function (PSF) model for each wide-field image,
18 | - precisely calibrate the relative zero point between each image.
19 |
20 | This enables `STARRED` to model the pixels of the region of interest (ROI),
21 | yielding of course high quality light curves of the point sources in the ROI,
22 | but also recovering the subpixel information to provide a high signal-to-noise ratio/resolution of the ROI itself.
23 | The example below shows a cutout of a wide-field image (one in a set of a hundred),
24 | the fitted high resolution model, and the Hubble Space Telescope image of the same region.
25 |
26 | 
27 |
28 | ## Features
29 | * **Plate solving:** uses [Astrometry.net](https://astrometry.net/) to establish the footprint of each frame.
30 | * **_Gaia_ reference stars:** leverages _Gaia_ information to select the right reference stars in the field of view of each frame.
31 | * **Preserves sub-pixel information**: never interpolates, essential to preserve the sub-pixel information that can be recovered by `STARRED` in a multi-epoch forward modelling.
32 | * **Incremental:** uses `SQL` queries to dynamically determine which process needs be executed on which frame.
33 | * **Semi-automatic:** create a `yaml` configuration file once for the first few frames, then run the
34 | pipeline whenever a new frame is added, providing auto-updating light curves.
35 |
36 | ## Getting Started
37 | 1. **Installation**: the short version, install via `pip`:
38 |
39 | ```
40 | pip install lightcurver
41 | ```
42 | [The slightly longer version](https://duxfrederic.github.io/lightcurver/installation/), in case you plan on using a GPU or the plate solving.
43 | 2. **Usage**:
44 | Adapt the [yaml config file template](https://github.com/duxfrederic/lightcurver/blob/main/lightcurver/pipeline/example_config_file/config.yaml), then run the pipeline with
45 | ```bash
46 | lc_run /path/to/config.yaml
47 | ```
48 | The pipeline is incremental, but in a scenario of testing, you can run specific steps only, for example:
49 | ```bash
50 | lc_run /path/to/config.yaml --start stamp_extraction --stop psf_modeling
51 | ```
52 | (The names of the pipeline steps/tasks are listed upon running `lc_run -h`.)
53 | 3. **Tutorial**: follow the [tutorial](https://duxfrederic.github.io/lightcurver/tutorial/) of the documentation, which provides a dataset you can experiment with.
54 | You can also test your installation with a subset of the dataset provided in the tutorial:
55 | ```bash
56 | cd /clone/of/lightcurver
57 | pytest .
58 | ```
59 |
60 |
61 | ## Contributing
62 | If you're encountering problems, then I would like to hear about them and will try to fix them. Feel free to create an issue
63 | in this repository.
64 | If you're finding this package useful and want to contribute, please create a pull request after forking the repository.
65 | If you need general help, feel free to [reach out](mailto:frederic.dux@epfl.ch)!
66 |
67 | ## License
68 | `lightcurver` is licensed under the GPL v3.0 License. See the [LICENSE](LICENSE) file for more details.
69 |
70 | ## The implemented processing steps
71 | This is an overview of the steps taken by the pipeline.
72 | 
73 |
--------------------------------------------------------------------------------
/docs/example_header_parser_functions/parse_omegacam_header.py:
--------------------------------------------------------------------------------
1 | def parse_header(header):
2 | from dateutil import parser
3 | from astropy.time import Time
4 | exptime = header['exptime']
5 | gain = header['gain']
6 | time = Time(parser.parse(header['obstart']))
7 | return {'exptime': exptime, 'gain': gain, 'mjd': time.mjd}
--------------------------------------------------------------------------------
/docs/flow_diagram/flow_diagram.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import matplotlib.patches as mpatches
3 | from graphviz import Digraph
4 |
5 | dot = Digraph()
6 | dot.attr(bgcolor='transparent')
7 |
8 |
9 | #dot.node('A', 'Initialize Database\nJust making sure the database exists, with the right schema.', shape='box')
10 | dot.node('B', 'Read/Convert/SkySub/Character Catalog\nreads the images from a directory, converts them to electrons,\nsubtracts the sky, finds sources, measures noise, seeing,\ncalculates ephemeris, records all to database', shape='box', color='white', fontcolor='white')
11 | dot.node('C', 'Plate Solving\nUses astrometry.net\'s solve-field and initial guess of plate scale\nprovided in user config to add a reliable WCS to each image fast.\nIf user config states that images are already plate solved, skip.', shape='box', color='white', fontcolor='white')
12 | dot.node('D', 'Calculate Common and Total Footprint\nChecks the footprint of the images, see how big the common footprint is.\n', shape='box', fontcolor='white', color='white')
13 | dot.node('E', 'Query Gaia for Stars\nGiven the footprints above, finds stars in gaia for PSF modelling and normalization.\n', shape='box', fontcolor='white', color='white')
14 | dot.node('F', 'Stamp Extraction\nExtracts stamps of all good stars and all epochs.\nAlso extract stamps of the region of interest (ROI).\nSaves the stamps to an HDF5 file\n Also cleans the cosmics.', shape='box', color='white', fontcolor='white')
15 | dot.node('G', 'PSF Modeling\nCreates a PSF model for each frame', shape='box', color='white', fontcolor='white')
16 | dot.node('H', 'Star Photometry\nUses the PSF model to do PSF photometry of each star, using STARRED\n(simultaneous forward modelling). The fluxes (per frame and per star) are saved', shape='box', color='white', fontcolor='white')
17 | dot.node('I', 'Calculate Normalization Coefficient\nGiven the star photometry, calculates a representative relative flux for each image.\n', shape='box', color='white', fontcolor='white')
18 | dot.node('J', 'Prepare Calibrated Cutouts\nPrepares cutouts for each ROI and each frame, calibrated in flux by the normalization coefficient.', shape='box', color='white', fontcolor='white')
19 | dot.node('K', 'Forward modelling\nSTARRED can be run on the prepared cutouts.', shape='box', color='white', fontcolor='white')
20 |
21 | for edge in ['BC', 'CD', 'DE', 'EF', 'FG', 'GH', 'HI', 'IJ', 'JK']:
22 | dot.edge(*edge, color='white')
23 | #dot.edges(['BC', 'CD', 'DE', 'EF', 'FG', 'GH', 'HI', 'IJ', 'JK'], color='white')
24 |
25 | dot.render('workflow_diagram', format='svg', cleanup=True)
26 |
27 |
--------------------------------------------------------------------------------
/docs/flow_diagram/workflow_diagram.svg:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
12 | B
13 |
14 | Read/Convert/SkySub/Character Catalog
15 | reads the images from a directory, converts them to electrons,
16 | subtracts the sky, finds sources, measures noise, seeing,
17 | calculates ephemeris, records all to database
18 |
19 |
20 |
21 | C
22 |
23 | Plate Solving
24 | Uses astrometry.net's solve-field and initial guess of plate scale
25 | provided in user config to add a reliable WCS to each image fast.
26 | If user config states that images are already plate solved, skip.
27 |
28 |
29 |
30 | B->C
31 |
32 |
33 |
34 |
35 |
36 | D
37 |
38 | Calculate Common and Total Footprint
39 | Checks the footprint of the images, see how big the common footprint is.
40 |
41 |
42 |
43 | C->D
44 |
45 |
46 |
47 |
48 |
49 | E
50 |
51 | Query Gaia for Stars
52 | Given the footprints above, finds stars in gaia for PSF modelling and normalization.
53 |
54 |
55 |
56 | D->E
57 |
58 |
59 |
60 |
61 |
62 | F
63 |
64 | Stamp Extraction
65 | Extracts stamps of all good stars and all epochs.
66 | Also extract stamps of the region of interest (ROI).
67 | Saves the stamps to an HDF5 file
68 | Also cleans the cosmics.
69 |
70 |
71 |
72 | E->F
73 |
74 |
75 |
76 |
77 |
78 | G
79 |
80 | PSF Modeling
81 | Creates a PSF model for each frame
82 |
83 |
84 |
85 | F->G
86 |
87 |
88 |
89 |
90 |
91 | H
92 |
93 | Star Photometry
94 | Uses the PSF model to do PSF photometry of each star, using STARRED
95 | (joint deconvolution). The fluxes (per frame and per star) are saved
96 |
97 |
98 |
99 | G->H
100 |
101 |
102 |
103 |
104 |
105 | I
106 |
107 | Calculate Normalization Coefficient
108 | Given the star photometry, calculates a representative relative flux for each image.
109 |
110 |
111 |
112 | H->I
113 |
114 |
115 |
116 |
117 |
118 | J
119 |
120 | Prepare Calibrated Cutouts
121 | Prepares cutouts for each ROI and each frame, calibrated in flux by the normalization coefficient.
122 |
123 |
124 |
125 | I->J
126 |
127 |
128 |
129 |
130 |
131 | K
132 |
133 | Deconvolution
134 | STARRED can be run on the prepared cutouts.
135 |
136 |
137 |
138 | J->K
139 |
140 |
141 |
142 |
143 |
144 |
--------------------------------------------------------------------------------
/docs/mkdocs/contents/cookbook.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Cookbook
3 | weight: 7
4 | ---
5 | # Cookbook and random fixes
6 |
7 | `lightcurver` will absolutely fail you a lot. Sorry, astronomical data is just too messy and such is life.
8 | Here I will add example situations and how to fix them.
9 |
10 | ### Some of my images were imported, but cannot be plate solved due to low quality
11 | High airmass observations, clouds, tracking problems ...
12 | If you have such images and are confident that you will not be able to extract value from them,
13 | you can remove them from consideration by flagging them in the database:
14 | ````sqlite3
15 | 'UPDATE frames SET comment='cannot be plate solved', eliminated = 1 WHERE plate_solved=0;'
16 | ````
17 |
18 | ### I manually plate solved my images after importation, how can I make my pipeline aware of this?
19 | If my plate solving step failed you, and if you managed to add a WCS yourself to the fits files in your `frames` directory,
20 | then you will need to manually run the process storing the footprints in the database and checking that your region of interest
21 | is in the frame.
22 | Here is how you might do that, with your current directory set to your working directory.
23 | ```python
24 | import os
25 | os.environ['LIGHTCURVER_CONFIG'] = "/path/to/config.yaml"
26 | from pathlib import Path
27 | from astropy.io import fits
28 | from astropy.wcs import WCS
29 |
30 | from lightcurver.processes.plate_solving import post_plate_solve_steps
31 | from lightcurver.structure.user_config import get_user_config
32 | from lightcurver.structure.database import execute_sqlite_query
33 | from lightcurver.processes.frame_star_assignment import populate_stars_in_frames
34 |
35 | user_config = get_user_config()
36 |
37 | solved = Path('frames').glob('*.fits')
38 |
39 | for s in solved:
40 | s = str(s)
41 | if 'sources.fits' in s:
42 | # this is a table of sources, skip
43 | continue
44 | wcs = WCS(fits.getheader(s))
45 | if not wcs.is_celestial:
46 | # this one wasn't solved then
47 | continue
48 | frame_id = execute_sqlite_query('select id from frames where image_relpath = ?',
49 | params=(s,), is_select=True)[0][0]
50 |
51 | try:
52 | post_plate_solve_steps(frame_path=s, user_config=user_config, frame_id=frame_id)
53 | except AssertionError:
54 | # already inserted
55 | pass
56 |
57 | execute_sqlite_query(query="UPDATE frames SET plate_solved = ? WHERE id = ?",
58 | params=(1, frame_id), is_select=False)
59 |
60 | # now that we know what our footprints are, populate the table telling us which frame has which star.
61 | populate_stars_in_frames()
62 | ```
63 |
64 | ### The sources were not correctly found by `sep`, how to re-run that part only after changing the config?
65 | ```python
66 | import os
67 | os.environ['LIGHTCURVER_CONFIG'] = "/path/to/config.yaml"
68 |
69 | from lightcurver.pipeline.task_wrappers import source_extract_all_images
70 |
71 | source_extract_all_images()
72 | ```
73 | You can also pass a list of strings to `source_extract_all_images`, filtering on the `frames` table of the database,
74 | for instance:
75 | ```python
76 | source_extract_all_images(['plate_solved = 0'])
77 | ```
78 |
--------------------------------------------------------------------------------
/docs/mkdocs/contents/cutouts.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/cutouts.png
--------------------------------------------------------------------------------
/docs/mkdocs/contents/example_deconv.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/example_deconv.png
--------------------------------------------------------------------------------
/docs/mkdocs/contents/example_lightcurve.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/example_lightcurve.png
--------------------------------------------------------------------------------
/docs/mkdocs/contents/footprints_plot_example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/footprints_plot_example.jpg
--------------------------------------------------------------------------------
/docs/mkdocs/contents/footprints_with_gaia_stars_plot_example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/footprints_with_gaia_stars_plot_example.jpg
--------------------------------------------------------------------------------
/docs/mkdocs/contents/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | weight_index: 10
4 | ---
5 | # `lightcurver` Documentation
6 |
7 |
8 | ## What is `lightcurver`?
9 |
10 | `lightcurver` is a package / pipeline leveraging `STARRED` for precise photometry of blended sources in a large number of epochs.
11 | It specifically tackles the following type of problem:
12 |
13 | - The dataset has a large number of frames.
14 | - The frames are oversampled wide-field images (think modern sky surveys, such as the upcoming Vera Rubin LSST).
15 | - Only a small portion of each frame if of interest (think supernova embedded in a galaxy, or lensed quasars).
16 |
17 | The above type of problem has historically been hard to solve: obtaining high quality light curves for such datasets
18 | has mostly been a manual process, taking up to a few months of an investigator's time per light curve.
19 | Doubt would also always remain: have we extracted as much signal as possible from the data, or could the signal-to-noise
20 | ratio of the curves be improved if we re-process everything just one last time?
21 | Are there systematics errors in the normalization that bias the shape of the extracted light curves?
22 |
23 | `lightcurver` aims at making the process at least semi-automatic (set it up once for a few frames, then let the pipeline
24 | automatically handle any future frame), and builds up on experience of manually reducing such datasets to offer
25 | a no-compromise photometric precision.
26 | The end goal is being able to claim that the photometric uncertainty obtained in the light curves is dominated by
27 | the noise levels in the original frames, and not by normalisation or deblending scatter or systematics.
28 |
29 |
30 | ## Example result
31 |
32 |
33 | The two figures below show `lightcurver` outputs for a dataset captured by the ESO 2.2 meters telescope. (1)
34 |
35 | 1. ESO program `0106.A-9005(A)`, PI Courbin, La Silla Chile
36 |
37 | The first image shows a calibrated cutout of the object of interest, a lensed quasar, from one of the wide-field images
38 | of the dataset. The second image is the `STARRED` model, a high resolution image cumulating the signal of all the frames.
39 | The last image is a Hubble Space Telescope image for comparison.
40 |
41 |
42 |
43 | The other product being the lightcurves of the point sources:
44 |
45 |
46 |
47 | ## The method
48 | `lightcurver` will prepare, for each frame, a precisely flux-calibrated cutout of the region of interest, together with a
49 | ready-to-use PSF model. The cutout pixels can then be modelled with `STARRED`, in a so-called "joint-forward-modelling".
50 | The process fits a high resolution model, with a constant pixelated component and a variable-flux point source component,
51 | to all available epochs at once, minimizing one global chi-squared.
52 | This allows us to both obtain precise light curves of the point sources modelled with the PSF, and the high resolution
53 | model that cumulates the signal of al the frames of the region of interest.
54 |
55 |
--------------------------------------------------------------------------------
/docs/mkdocs/contents/installation.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Installation
3 | weight_index: 9
4 | ---
5 | # Installation
6 | `lightcurver` requires several components to function:
7 |
8 | - `STARRED`: it will be installed automatically with `pip install lightcurver`, but if you want to use it with a GPU there
9 | might be some more setup to do. See the [installation instructions](https://cosmograil.gitlab.io/starred/installation.html#) of the package itself.
10 | - The dependencies handled by your python package manager, such as `astropy`, `shapely`, `astroquery`, `pandas`, `pyyaml`
11 | ...these will be installed automatically by `pip install lightcurver`.
12 | - (optional) `Astrometry.net`: their [installation instructions](https://astrometry.net/doc/build.html) should get you started.
13 | Alternatively, you can get an API key from their [nova](https://nova.astrometry.net/) service. I would recommend against using it in production, as to not overload their servers.
14 |
15 |
16 | So, I would suggest creating a python (3.9+, ideally 3.11) environment, say `lightcurver_env`,
17 | and install the present package in it.
18 |
19 | ## The quick version
20 | Chances are this will work:
21 | ```bash
22 | conda activate lightcurver_env # if using conda
23 | source lightcurver_env/bin/activate # if using python's venv
24 | pip install lightcurver
25 | ```
26 |
27 | Or for the `git` version (includes some minimal test data):
28 | ```bash
29 | git clone git@github.com:duxfrederic/lightcurver.git
30 | cd lightcurver
31 | conda activate lightcurver_env
32 | pip install .
33 | ```
34 |
35 | ## If the quick version fails: list of dependencies
36 | Should the above fail, there might be a dependency problem requiring the manual handling of the different packages.
37 | Here is the list of dependencies that need be installed:
38 |
39 | 1. `numpy < 2.00` - as of June 2024, `sep` is not compatible with `numpy >= 2.00`
40 | 2. `scipy`
41 | 3. `matplotlib`
42 | 4. `pandas`
43 | 5. `astropy`
44 | 6. `astroquery` - for querying Gaia and VizieR
45 | 7. `h5py` - for storing cutouts and PSF models
46 | 8. `photutils` - for aperture photometry used as initial guess
47 | 9. `astroalign` - for finding transformations between frames
48 | 10. `shapely` - for computing footprints of frames
49 | 11. `ephem` - for calculating airmass, moon distance, etc.
50 | 12. `pytest` - for executing the automatic tests
51 | 13. `sep` - for source and background extraction
52 | 14. `astroscrappy` - for cleaning the cosmics
53 | 15. `pyyaml` - for reading the config file
54 | 16. `starred-astro` - assume the latest version, will install its own dependencies.
55 | 17. `widefield_plate_solver` - an astrometry.net wrapper
56 | 18. `ccdproc` - for identifying bad columns of pixels
57 |
58 |
59 | ## Testing your installation
60 |
61 | You can test your installation by following the [tutorial](tutorial.md).
62 | The automated tests also include the processing of a subset of the dataset given in the tutorial, you can thus run them
63 | instead to check functionality (should take 1-2 minutes).
64 | ```bash
65 | cd /your/clone/of/lightcurver
66 | pytest .
67 | ```
68 |
69 | If you are going to use a local installation of `Astrometry.net`, do not forget to download their index files as well! The combination of 4100 and 5200 should do the trick.
70 |
--------------------------------------------------------------------------------
/docs/mkdocs/contents/norm_coeff_plot_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/norm_coeff_plot_example.png
--------------------------------------------------------------------------------
/docs/mkdocs/contents/psf_plot_example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/psf_plot_example.jpg
--------------------------------------------------------------------------------
/docs/mkdocs/contents/references.bib:
--------------------------------------------------------------------------------
1 | % starred
2 | @ARTICLE{starred,
3 | author = {{Michalewicz}, Kevin and {Millon}, Martin and {Dux}, Fr{\'e}d{\'e}ric and {Courbin}, Fr{\'e}d{\'e}ric},
4 | title = "{STARRED: a two-channel deconvolution method with Starlet regularization}",
5 | journal = {The Journal of Open Source Software},
6 | keywords = {Python, PSF, Jupyter Notebook, astronomy, deconvolution},
7 | year = 2023,
8 | month = may,
9 | volume = {8},
10 | number = {85},
11 | eid = {5340},
12 | pages = {5340},
13 | doi = {10.21105/joss.05340},
14 | adsurl = {https://ui.adsabs.harvard.edu/abs/2023JOSS....8.5340M},
15 | adsnote = {Provided by the SAO/NASA Astrophysics Data System}
16 | }
--------------------------------------------------------------------------------
/docs/mkdocs/contents/single_frame_wide_field.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/single_frame_wide_field.jpg
--------------------------------------------------------------------------------
/docs/mkdocs/contents/starphotom_plot_example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/docs/mkdocs/contents/starphotom_plot_example.jpg
--------------------------------------------------------------------------------
/docs/mkdocs/mkdocs.yml:
--------------------------------------------------------------------------------
1 | site_name: lightcurver
2 | docs_dir: 'contents'
3 | theme:
4 | name: material
5 | highlightjs: true
6 | hljs_languages:
7 | - yaml
8 | - rust
9 | - python
10 | - bash
11 | icon:
12 | annotation: material/plus-circle
13 | admonition:
14 | : material/alert-circle
15 | logo: ./lightcurver_logo.svg
16 | palette:
17 | # Palette toggle for light mode
18 | - scheme: default
19 | toggle:
20 | icon: material/brightness-7
21 | name: Switch to dark mode
22 |
23 | # Palette toggle for dark mode
24 | - scheme: slate
25 | toggle:
26 | icon: material/brightness-4
27 | name: Switch to light mode
28 | features:
29 | - navigation.instant
30 | markdown_extensions:
31 | - admonition
32 | - attr_list
33 | - md_in_html
34 | - pymdownx.superfences
35 | - pymdownx.tabbed:
36 | alternate_style: true
37 | plugins:
38 | - search
39 | - mkdocs-nav-weight
40 | copyright: Copyright © 2024, 2025 Frédéric Dux
41 | repo_url: https://github.com/duxfrederic/lightcurver/
42 |
--------------------------------------------------------------------------------
/lightcurver/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/lightcurver/__init__.py
--------------------------------------------------------------------------------
/lightcurver/pipeline/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/lightcurver/pipeline/__init__.py
--------------------------------------------------------------------------------
/lightcurver/pipeline/pipeline_dependency_graph.yaml:
--------------------------------------------------------------------------------
1 | # here we define the graph of dependencies for our pipeline.
2 | # each task has a description and dependencies.
3 | # the nature can either be:
4 | # - independent_each_frame -- can potentially be parallelized and executed in whatever order for all frames
5 | # that have not had this step executed yet.
6 | # - all_frames_at_once -- process that has to act on all the frames at once, nothing to parallelize here.
7 | # - all_missing_frames_at_once -- process that has to be run on all the frames that are missing at once.
8 | tasks:
9 | - name: initialize_database
10 | description: "Just making sure the database exists, with the right schema."
11 | nature: independent
12 | dependencies: []
13 | - name: read_convert_skysub_character_catalog
14 | description: "reads the images from a directory, converts them to electrons, subtracts the sky, finds sources, measures noise, seeing, calculates ephemeris, records all to database"
15 | dependencies: [initialize_database]
16 | nature: independent_each_frame
17 | - name: plate_solving
18 | description: "Uses astrometry.net's solve-field and initial guess of plate scale provided in user config to add a reliable WCS to each image fast. If user config states that images are already plate solved, skip."
19 | dependencies: [read_convert_skysub_character_catalog]
20 | nature: independent_each_frame
21 | - name: calculate_common_and_total_footprint
22 | description: "Checks the footprint of the images, see how big the common footprint is. Detects bad pointings and flags them if it improves the size of the common footprint."
23 | dependencies: [plate_solving]
24 | nature: all_frames_at_once
25 | - name: query_gaia_for_stars
26 | description: "Given the footprints above, finds stars in gaia for PSF modelling and normalization. Will first try the common footprint, but if too small (less than 10 good stars), will expand to total footprint for more. The PSF and normalization will then be based off different stars on a frame-by-frame basis."
27 | dependencies: [calculate_common_and_total_footprint]
28 | nature: independent
29 | - name: stamp_extraction
30 | description: "Extracts stamps (size defined in user config) of all good stars and all epochs. Also extract stamps of regions of interest (ROIs) (user config, size defined per ROI). Saves the stamps to an HDF5 file, and creates automatic masks for the stars (masking any object that is not the central star.) Also cleans the cosmics using L.A.Cosmic"
31 | dependencies: [query_gaia_for_stars]
32 | nature: independent_each_frame
33 | - name: psf_modeling
34 | description: "Creates a PSF model for each frame"
35 | dependencies: [stamp_extraction]
36 | nature: independent_each_frame
37 | - name: star_photometry
38 | description: "Uses the PSF model to do PSF photometry of each star, using STARRED (forward modelling). The fluxes (per frame and per star) are saved in the database."
39 | dependencies: [psf_modeling]
40 | nature: all_missing_frames_at_once
41 | - name: calculate_normalization_coefficient
42 | description: "Given the star photometry, calculates a representative relative flux for each image. This flux will be 1 for a reference image (must be user defined in config). Saves the normalization coefficient and its uncertainty in the database."
43 | dependencies: [star_photometry]
44 | nature: all_missing_frames_at_once
45 | - name: calculate_absolute_zeropoints
46 | description: "Uses Gaia magnitudes to estimate the zeropoint in our band."
47 | dependencies: [star_photometry]
48 | nature: all_frames_at_once
49 | - name: prepare_calibrated_cutouts
50 | description: "Prepares cutouts for each ROI and each frame, calibrated in flux by the normalization coefficient."
51 | dependencies: [calculate_absolute_zeropoints, calculate_normalization_coefficient]
52 | nature: all_frames_at_once
53 | - name: model_calibrated_cutouts
54 | description: "This is the final step, we jointly model all the pixels of the ROI using STARRED."
55 | dependencies: [prepare_calibrated_cutouts, psf_modeling, stamp_extraction]
56 | nature: all_frames_at_once
57 |
58 |
59 |
--------------------------------------------------------------------------------
/lightcurver/pipeline/state_checkers.py:
--------------------------------------------------------------------------------
1 | # here we'll define simple functions that check the health of our products.
2 | # can be beneficial to stop the pipeline if an early stage fails.
3 |
4 | from ..structure.database import get_count_based_on_conditions
5 | from ..structure.user_config import get_user_config
6 |
7 |
8 | def check_plate_solving():
9 | user_config = get_user_config()
10 | plate_solved = get_count_based_on_conditions(conditions='plate_solved = 1 and eliminated = 0', table='frames')
11 | total = get_count_based_on_conditions(conditions='eliminated = 0', table='frames')
12 | min_success_fraction = user_config['plate_solving_min_success_fraction']
13 | reasonable_loss = plate_solved / total >= min_success_fraction
14 | if not reasonable_loss:
15 | message = "The plate solving failed too often given your config's plate_solving_min_success_fraction"
16 | message += "There might be a problem, or there might just be a lot of difficult images"
17 | message += "Please investigate"
18 | else:
19 | message = "Plate solving succeeded often enough."
20 | return reasonable_loss, message
21 |
22 |
23 |
--------------------------------------------------------------------------------
/lightcurver/pipeline/task_wrappers.py:
--------------------------------------------------------------------------------
1 | # This file wraps around the processes defined in the processes subpackage.
2 | # The wrappers typically determine which frames / regions / psfs (depending on task) need processing
3 | # before proceeding, and adds structure around multiprocessing when needed.
4 | # This is not needed for all processes.
5 | from multiprocessing import Pool, Manager
6 | import os
7 | import numpy as np
8 | import pandas as pd
9 | import logging
10 | import logging.handlers
11 | from pathlib import Path
12 | import functools
13 | import json
14 |
15 |
16 | from ..structure.user_config import get_user_config
17 | from ..structure.database import get_pandas, execute_sqlite_query
18 | from ..processes.frame_importation import process_new_frame
19 | from ..processes.plate_solving import solve_one_image_and_update_database, select_frames_needing_plate_solving
20 | from ..utilities.footprint import (calc_common_and_total_footprint, get_frames_hash,
21 | save_combined_footprints_to_db, identify_and_eliminate_bad_pointings)
22 | from ..plotting.footprint_plotting import plot_footprints
23 | from ..processes.star_extraction import extract_sources_from_sky_sub_image
24 |
25 |
26 | def worker_init(log_queue):
27 | logger = logging.getLogger(f"worker-{os.getpid()}")
28 | logger.setLevel(logging.INFO)
29 | q_handler = logging.handlers.QueueHandler(log_queue)
30 | logger.addHandler(q_handler)
31 | logger.propagate = False
32 |
33 |
34 | def log_process(func):
35 | @functools.wraps(func)
36 | def wrapper(args):
37 | frame_id_for_logger = args[-1]
38 | logger = logging.getLogger(f"Process-{os.getpid()}")
39 | logger.info(f"{func.__name__} .... Processing image with ID {frame_id_for_logger}")
40 | return func(*args[:-1]) # execute original function without the last arg (used for logging)
41 | return wrapper
42 |
43 |
44 | @log_process
45 | def process_new_frame_wrapper(*args):
46 | process_new_frame(*args)
47 |
48 |
49 | def read_convert_skysub_character_catalog():
50 | # boiler plate logger setup
51 | log_queue = Manager().Queue()
52 | base_logger = logging.getLogger("lightcurver")
53 | listener = logging.handlers.QueueListener(log_queue, *base_logger.handlers)
54 | listener.start()
55 | logger = logging.getLogger("lightcurver.importation")
56 |
57 | # find the new frames, we compare on file name!
58 | user_config = get_user_config()
59 | match_pattern = user_config.get('files_match_pattern', '*')
60 | available_frames = sorted(sum([list(raw_dir.glob(match_pattern)) for raw_dir in user_config['raw_dirs']], start=[]))
61 | df_available_frames = pd.DataFrame({'frame_name': [frame.name for frame in available_frames]})
62 | already_imported = get_pandas(columns=['original_image_path', 'id'])
63 | if not already_imported.empty:
64 | already_imported['name'] = already_imported.apply(lambda row: Path(row['original_image_path']).name, axis=1)
65 | else:
66 | # just a decoy
67 | already_imported['name'] = pd.Series(dtype='str')
68 | new_frames_df = df_available_frames[~df_available_frames['frame_name'].isin(already_imported['name'])]
69 | new_frames = [frame for frame in available_frames if frame.name in new_frames_df['frame_name'].tolist()]
70 | logger.info(f"Importing {len(new_frames)} new frames from directories {user_config['raw_dirs']}.")
71 | logger.info(f"Will write them to {user_config['workdir'] / 'frames'}")
72 | logger.info(f"Database will be at {user_config['workdir'] / 'database.sqlite3'}")
73 |
74 | with Pool(processes=user_config['multiprocessing_cpu_count'],
75 | initializer=worker_init, initargs=(log_queue,)) as pool:
76 | pool.map(process_new_frame_wrapper, [
77 | (new_frame,
78 | user_config,
79 | new_frame) # duplicating so to have an identifier for logger.
80 | for new_frame in new_frames
81 | ])
82 |
83 | listener.stop()
84 |
85 |
86 | @log_process
87 | def solve_one_image_and_update_database_wrapper(*args):
88 | solve_one_image_and_update_database(*args)
89 |
90 |
91 | def plate_solve_all_frames():
92 | # boiler plate logger setup
93 | log_queue = Manager().Queue()
94 | base_logger = logging.getLogger("lightcurver")
95 | listener = logging.handlers.QueueListener(log_queue, *base_logger.handlers)
96 | listener.start()
97 | logger = logging.getLogger("lightcurver.plate_solving")
98 |
99 | user_config = get_user_config()
100 | workdir = Path(user_config['workdir'])
101 | frames_to_process = select_frames_needing_plate_solving(user_config=user_config, logger=logger)
102 | logger.info(f"Ready to plate solve {len(frames_to_process)} frames.")
103 |
104 | with Pool(processes=user_config['multiprocessing_cpu_count'],
105 | initializer=worker_init, initargs=(log_queue,)) as pool:
106 | pool.map(solve_one_image_and_update_database_wrapper, [
107 | (workdir / row['image_relpath'],
108 | workdir / row['sources_relpath'],
109 | user_config,
110 | row['id'],
111 | row['id']) # duplicating row['id'] for logger naming
112 | for index, row in frames_to_process.iterrows()
113 | ])
114 |
115 | listener.stop()
116 |
117 |
118 | def calc_common_and_total_footprint_and_save():
119 | """
120 | verifies whether the footprint was already calculated for the set of frames at hand
121 | if no, calculates it and stores it.
122 |
123 | Returns: None
124 |
125 | """
126 | logger = logging.getLogger("lightcurver.combined_footprint_calculation")
127 | # so, before we do anything, let us eliminate the really obvious bad pointings.
128 | identify_and_eliminate_bad_pointings()
129 | # ok, keep going.
130 | query = """
131 | SELECT frames.id, footprints.polygon
132 | FROM footprints
133 | JOIN frames ON footprints.frame_id = frames.id
134 | WHERE frames.eliminated != 1;
135 | """
136 | results = execute_sqlite_query(query)
137 | frames_ids = [result[0] for result in results]
138 | frames_hash = get_frames_hash(frames_ids)
139 | # check if already exists
140 | count = execute_sqlite_query("SELECT COUNT(*) FROM combined_footprint WHERE hash = ?",
141 | params=(frames_hash,))[0][0]
142 | if count > 0:
143 | logger.info(f'This combined footprint (hash {frames_hash}) was already calculated.')
144 | return
145 | logger.info(f'Calculating combined footprint (hash {frames_hash}) (loaded polygons from database, now combining).')
146 | polygon_list = [np.array(json.loads(result[1])) for result in results]
147 | common_footprint, largest_footprint = calc_common_and_total_footprint(polygon_list)
148 |
149 | user_config = get_user_config()
150 | plots_dir = user_config['plots_dir']
151 | footprints_plot_path = plots_dir / 'footprints.jpg'
152 | plot_footprints(polygon_list, common_footprint, largest_footprint, save_path=footprints_plot_path)
153 | logger.info(f'Combined footprint plot (hash {frames_hash}) saved at {footprints_plot_path}.')
154 |
155 | # ok, save it
156 | save_combined_footprints_to_db(frames_hash, common_footprint, largest_footprint)
157 | logger.info(f'Combined footprint with (hash {frames_hash} saved to db')
158 |
159 |
160 | @log_process
161 | def extract_sources_from_sky_sub_image_wrapper(*args):
162 | extract_sources_from_sky_sub_image(*args)
163 |
164 |
165 | def source_extract_all_images(conditions=None):
166 | """
167 | This is not called directly in the pipeline, but can be useful if you want to re-extract all the sources
168 | with different parameters.
169 | So, unlike a routine called by the pipeline, this one takes an argument.
170 |
171 | :param conditions: list of strings, e.g. ['eliminated = 0', 'plate_solved = 0']. Default: None.
172 | To filter what frames will be source-extracted again.
173 | :returns: Nothing
174 |
175 | """
176 | log_queue = Manager().Queue()
177 | base_logger = logging.getLogger("lightcurver")
178 | listener = logging.handlers.QueueListener(log_queue, *base_logger.handlers)
179 | listener.start()
180 | logger = logging.getLogger("lightcurver.source_extraction")
181 |
182 | user_config = get_user_config()
183 | workdir = Path(user_config['workdir'])
184 | frames_to_process = get_pandas(columns=['id', 'image_relpath', 'sources_relpath',
185 | 'exptime', 'background_rms_electron_per_second'],
186 | conditions=conditions)
187 | logger.info(f"Extracting sources from {len(frames_to_process)} frames.")
188 |
189 | with Pool(processes=user_config['multiprocessing_cpu_count'],
190 | initializer=worker_init, initargs=(log_queue,)) as pool:
191 | pool.map(extract_sources_from_sky_sub_image_wrapper, [
192 | (workdir / row['image_relpath'],
193 | workdir / row['sources_relpath'],
194 | user_config['source_extraction_threshold'],
195 | user_config['source_extraction_min_area'],
196 | row['exptime'],
197 | row['background_rms_electron_per_second'],
198 | user_config['plots_dir'] / 'source_extraction' / f"{Path(row['image_relpath']).stem}.jpg",
199 | row['id']) # for logger naming
200 | for index, row in frames_to_process.iterrows()
201 | ])
202 |
203 | listener.stop()
204 |
--------------------------------------------------------------------------------
/lightcurver/pipeline/workflow_manager.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | from importlib import resources
3 | from collections import deque
4 | import logging
5 | from datetime import datetime
6 | import os
7 |
8 |
9 | from ..structure.user_config import get_user_config, compare_config_with_pipeline_delivered_one
10 | from ..structure.database import initialize_database
11 | from ..processes.cutout_making import extract_all_stamps
12 | from ..processes.star_querying import query_gaia_stars
13 | from ..processes.psf_modelling import model_all_psfs
14 | from ..processes.star_photometry import do_star_photometry
15 | from ..processes.normalization_calculation import calculate_coefficient
16 | from ..processes.roi_file_preparation import prepare_roi_file
17 | from ..processes.roi_modelling import do_modelling_of_roi
18 | from ..processes.alternate_plate_solving_with_gaia import alternate_plate_solve_gaia
19 | from ..processes.alternate_plate_solving_adapt_existing_wcs import alternate_plate_solve_adapt_ref
20 | from ..processes.absolute_zeropoint_calculation import calculate_zeropoints
21 | from ..structure.exceptions import TaskWasNotSuccessful
22 | from .task_wrappers import (read_convert_skysub_character_catalog,
23 | plate_solve_all_frames, calc_common_and_total_footprint_and_save)
24 | from .state_checkers import check_plate_solving
25 |
26 |
27 | def setup_base_logger():
28 | time_now = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
29 | user_config = get_user_config()
30 | log_dir = user_config['workdir'] / 'logs'
31 | log_file_path = str(log_dir / f"{time_now}.log")
32 |
33 | logging.basicConfig(level=logging.INFO,
34 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
35 |
36 | base_logger = logging.getLogger("lightcurver")
37 | file_handler = logging.FileHandler(log_file_path)
38 | file_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
39 | base_logger.addHandler(file_handler)
40 | base_logger.setLevel(logging.INFO)
41 |
42 |
43 | class WorkflowManager:
44 | """
45 | A small class that will run our tasks. It serves the purpose of running the tasks
46 | in the right order given the dependencies in the pipeline_dependency_graph.yaml file.
47 | We can also use it to assign different versions of the tasks based on the user config.
48 |
49 | todo: implement post checks for each tasks
50 | todo: implement pre checks so we can skip tasks from the workflow manager? for now pre-checks are done within each
51 | task.
52 | """
53 | def __init__(self, logger=None):
54 | # initial check: make sure this version of the pipeline doesn't have keywords in its config that the
55 | # user config is missing.
56 | extra_keys = compare_config_with_pipeline_delivered_one()
57 | extra_values = extra_keys['pipeline_extra_keys_values']
58 | if extra := extra_keys['extra_keys_in_pipeline_config']:
59 | # ok, make an informative error message here
60 | message = "You are missing the following parameters in your config file:\n"
61 | message += f"{'Parameter':<50} {'(Default value)':<50}\n"
62 | message += f"{'-' * 50} {'-' * 50}\n"
63 |
64 | for key in extra:
65 | value = extra_values[key]
66 | formatted_value = "None (not set)" if value is None else str(value)
67 | message += f"{key:<50} {formatted_value:<50}\n"
68 |
69 | raise RuntimeError(message)
70 |
71 | if extra := extra_keys['extra_keys_in_user_config']:
72 | error_message = ("You have parameters in your config file that"
73 | f" are not in the latest config version: {extra}. \n"
74 | "You might want to remove them, or check against the latest config "
75 | "shipped with the pipeline.\n"
76 | "To ignore this error, set the `LIGHTCURVER_RELAX_CONFIG_CHECK` "
77 | "environment variable to 1.")
78 | if 'LIGHTCURVER_RELAX_CONFIG_CHECK' in os.environ:
79 | print('===== Skipped error due to LIGHTCURVER_RELAX_CONFIG_CHECK environment variable being set ======')
80 | print(error_message)
81 | print('===== Skipped error due to LIGHTCURVER_RELAX_CONFIG_CHECK environment variable being set ======')
82 | else:
83 | raise RuntimeError(error_message)
84 |
85 | # load the actual config ...
86 | self.user_config = get_user_config()
87 |
88 | # the plan: load the yaml defining the pipeline steps.
89 | with resources.open_text('lightcurver.pipeline', 'pipeline_dependency_graph.yaml') as file:
90 | self.pipe_config = yaml.safe_load(file)
91 | self.task_graph = {}
92 | self.build_dependency_graph()
93 | # some tasks can be done in multiple ways, let's define this here
94 | if self.user_config['plate_solving_strategy'] == 'plate_solve':
95 | plate_solve_function = plate_solve_all_frames
96 | elif self.user_config['plate_solving_strategy'] == 'alternate_gaia_solve':
97 | plate_solve_function = alternate_plate_solve_gaia
98 | elif self.user_config['plate_solving_strategy'] == 'adapt_wcs_from_reference':
99 | plate_solve_function = alternate_plate_solve_adapt_ref
100 | else:
101 | raise AssertionError("The config's plate_solving_strategy should be plate_solve, "
102 | "alternate_gaia_solve or adapt_wcs_from_reference.")
103 | self.task_attribution = {
104 | 'initialize_database': initialize_database,
105 | 'read_convert_skysub_character_catalog': read_convert_skysub_character_catalog,
106 | 'plate_solving': plate_solve_function,
107 | 'calculate_common_and_total_footprint': calc_common_and_total_footprint_and_save,
108 | 'query_gaia_for_stars': query_gaia_stars,
109 | 'stamp_extraction': extract_all_stamps,
110 | 'psf_modeling': model_all_psfs,
111 | 'star_photometry': do_star_photometry,
112 | 'calculate_normalization_coefficient': calculate_coefficient,
113 | 'calculate_absolute_zeropoints': calculate_zeropoints,
114 | 'prepare_calibrated_cutouts': prepare_roi_file,
115 | 'model_calibrated_cutouts': do_modelling_of_roi,
116 | }
117 |
118 | self.post_task_attribution = {
119 | 'plate_solving': check_plate_solving
120 | }
121 | assert set(self.task_attribution.keys()) == set([entry['name'] for entry in self.pipe_config['tasks']])
122 |
123 | if logger is None:
124 | logger = logging.getLogger(__name__)
125 | setup_base_logger()
126 | self.logger = logger
127 |
128 |
129 | def build_dependency_graph(self):
130 | for task in self.pipe_config['tasks']:
131 | task_name = task['name']
132 | self.task_graph[task_name] = {'dependencies': set(task['dependencies']), 'next': []}
133 | for dep in task['dependencies']:
134 | if dep in self.task_graph:
135 | self.task_graph[dep]['next'].append(task_name)
136 | else:
137 | self.task_graph[dep] = {'dependencies': set(), 'next': [task_name]}
138 |
139 | def topological_sort(self):
140 | """
141 | Getting the tasks in the right order, just in case we have multiple dependencies in the future.
142 |
143 | Returns: list of tasks in the right order.
144 | """
145 | #
146 | in_degree = {task: 0 for task in self.task_graph}
147 | for task in self.task_graph:
148 | for next_task in self.task_graph[task]['next']:
149 | in_degree[next_task] += 1
150 |
151 | queue = deque([task for task in in_degree if in_degree[task] == 0])
152 | sorted_tasks = []
153 |
154 | while queue:
155 | task = queue.popleft()
156 | sorted_tasks.append(task)
157 | for next_task in self.task_graph[task]['next']:
158 | in_degree[next_task] -= 1
159 | if in_degree[next_task] == 0:
160 | queue.append(next_task)
161 |
162 | if len(sorted_tasks) != len(self.task_graph):
163 | raise Exception("A cycle was detected in the task dependencies, or a task is missing.")
164 |
165 | return sorted_tasks
166 |
167 | def run(self, start_step=None, stop_step=None):
168 | """
169 | Runs the pipeline from the specified start step to the stop step.
170 |
171 | Args:
172 | start_step (str): Task name to start from. If None, starts from the beginning.
173 | stop_step (str): Task name to stop at. If None, runs to completion.
174 |
175 | Returns:
176 | None
177 | """
178 | self.logger.info(f"Workflow manager: Running tasks from {start_step or 'start'} to {stop_step or 'end'}. "
179 | f"Working directory: {self.user_config['workdir']}")
180 |
181 | sorted_tasks = self.topological_sort()
182 | start_index = sorted_tasks.index(start_step) if start_step else 0
183 | stop_index = sorted_tasks.index(stop_step) + 1 if stop_step else len(sorted_tasks)
184 |
185 | for task_name in sorted_tasks[start_index:stop_index]:
186 | task = next((item for item in self.pipe_config['tasks'] if item['name'] == task_name), None)
187 | if task:
188 | self.execute_task(task)
189 |
190 | post_check = self.post_task_attribution.get(task_name, None)
191 | if post_check:
192 | success, message = post_check()
193 | if not success:
194 | self.logger.error(
195 | f'Post-check failed for {task_name}. Stopping pipeline with message: {message}'
196 | )
197 | raise TaskWasNotSuccessful(message)
198 | else:
199 | self.logger.info(f'Post-check successful for task {task_name}, with message: {message}')
200 |
201 | def execute_task(self, task):
202 | # Assume task_func is a callable for simplicity
203 | task_func = self.task_attribution.get(task['name'])
204 | self.logger.info(
205 | f"Running task {task['name']}. Working directory: {self.user_config['workdir']}"
206 | )
207 | task_func()
208 |
209 | def get_tasks(self):
210 | return sorted(list(self.task_attribution.keys()))
211 |
212 |
--------------------------------------------------------------------------------
/lightcurver/plotting/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/lightcurver/plotting/__init__.py
--------------------------------------------------------------------------------
/lightcurver/plotting/footprint_plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | from shapely.geometry import Polygon
3 |
4 |
5 | def plot_footprints(footprint_arrays, common_footprint=None, largest_footprint=None, save_path=None):
6 | """
7 | shows the polygons representing the footprints of frames on a plot.
8 |
9 | """
10 | fig = plt.figure(figsize=(10, 8))
11 | if (largest_footprint is not None) and (not largest_footprint.is_empty):
12 | plt.fill(*largest_footprint.exterior.xy, alpha=0.2, color='purple', label='Largest Footprint')
13 | # Plot each footprint array
14 | for footprint_array in footprint_arrays:
15 | polygon = Polygon(footprint_array)
16 | x, y = polygon.exterior.xy
17 | plt.plot(x, y, color='gray', lw=0.5)
18 | if (common_footprint is not None) and (not common_footprint.is_empty):
19 | plt.fill(*common_footprint.exterior.xy, alpha=0.5, color='blue', label='Common Footprint')
20 | plt.legend()
21 | plt.xlabel('R.A.')
22 | plt.ylabel('Dec.')
23 | if save_path is not None:
24 | plt.tight_layout()
25 | plt.savefig(save_path, bbox_inches='tight', pad_inches=0.)
26 | plt.close()
27 | else:
28 | return fig, plt.gca()
29 |
30 |
31 |
--------------------------------------------------------------------------------
/lightcurver/plotting/html_visualisation.py:
--------------------------------------------------------------------------------
1 | from importlib import resources
2 |
3 |
4 | def generate_lightcurve_html(df, output_file="lightcurves.html"):
5 | """
6 | Generates an interactive light curve visualization HTML file.
7 |
8 | Args:
9 | df: pandas.DataFrame containing light curve data
10 | output_file: Path for output HTML file
11 | """
12 |
13 | csv_data = df.to_csv(index=False, float_format="%.6f").strip()
14 |
15 | template = resources.read_text("lightcurver.plotting", "plot_curves_template.html")
16 | # inject light curves
17 | html = template.replace(
18 | '// CSV_DATA_PLACEHOLDER',
19 | f'const csvData = `{csv_data}`;'
20 | )
21 |
22 | with open(output_file, 'w') as f:
23 | f.write(html)
24 |
--------------------------------------------------------------------------------
/lightcurver/plotting/image_plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | from astropy.visualization import ZScaleInterval, ImageNormalize
3 | from astropy.visualization.stretch import AsinhStretch
4 |
5 |
6 | def plot_image(image, wcs=None, save_path=None, colorbar=False, **imshow_kwargs):
7 | """
8 | Plot the image with detected sources marked (for debugging).
9 |
10 | Parameters:
11 | image (numpy.ndarray): Image data, 2D array.
12 | wcs (astropy.wcs.WCS object): the WCS corresponding to the data. default None.
13 | save_path: do we save the plot somewhere? default None.
14 | imshow_kwargs: additional keyword arguments to be passed to matplotlib imshow.
15 | """
16 | fig = plt.figure(figsize=(11, 11))
17 | if wcs is not None:
18 | ax = plt.subplot(projection=wcs)
19 | else:
20 | ax = plt.subplot()
21 | norm = ImageNormalize(image,
22 | interval=ZScaleInterval(contrast=0.4),
23 | stretch=AsinhStretch()
24 | )
25 | ax.imshow(image, cmap='gray', origin='lower', norm=norm, **imshow_kwargs)
26 | if colorbar:
27 | plt.colorbar()
28 |
29 | if save_path is not None:
30 | plt.tight_layout()
31 | plt.savefig(save_path, bbox_inches='tight', pad_inches=0.)
32 | plt.close()
33 | else:
34 | return fig, ax
35 |
--------------------------------------------------------------------------------
/lightcurver/plotting/joint_modelling_plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 |
4 |
5 | def plot_joint_modelling_diagnostic(datas, noisemaps, residuals, loss_curve, chi2_per_frame, starlet_background=None,
6 | save_path=None):
7 | """
8 | a diagnostic of how well a joint modelling went. We will plot a stack of the data, and a stack of
9 | the residuals. We will show how the loss behaved during optimization.
10 | It is important to avoid systematics, as this is the core of this package: doing precise photometry,
11 | so convergence when modelling the pixels is essential.
12 | Args:
13 | datas: 3d array of shape (N, nx, ny): N stars, one slice per star.
14 | noisemaps: same as above but for the noisemaps
15 | residuals: same as above but for the residuals
16 | loss_curve: 1D array containing the evolution of the loss during optimization
17 | chi2_per_frame: 1D array, the chi2 value of the fit, one per slice.
18 | starlet_background: 2D array, in case we included a regularized pixelated background in the model. default None.
19 | save_path: optional, string or path, where to save the plot.
20 |
21 | Returns: None
22 |
23 | """
24 | plt.style.use('dark_background')
25 | cmap = 'viridis'
26 | cmap_residuals = 'coolwarm'
27 | text_color = 'red'
28 | text_size = 11
29 |
30 | data_stack = np.mean(datas, axis=0) # mean and not median, we wanna see them outliers
31 | residuals_stack = np.mean(residuals, axis=0)
32 | rel_residuals_stack = np.mean(residuals / noisemaps, axis=0)
33 |
34 | sub_size = 3
35 | ncols = 5 if starlet_background is None else 6
36 | fig_size_mult = 4.6 if starlet_background is None else 5.6
37 | fig, ax = plt.subplots(1, ncols, figsize=(fig_size_mult * sub_size, sub_size))
38 | ax = ax.flatten()
39 | # data stack
40 | ax[0].imshow(data_stack, cmap=cmap, aspect='auto', origin='lower')
41 | ax[0].axis('off')
42 | ax[0].text(0.5, 0.01, 'Data stack',
43 | horizontalalignment='center',
44 | verticalalignment='bottom',
45 | transform=ax[0].transAxes,
46 | color=text_color, fontsize=text_size,
47 | weight='bold')
48 | # residuals stack
49 | ax[1].imshow(residuals_stack, cmap=cmap_residuals, aspect='auto', origin='lower')
50 | ax[1].axis('off')
51 | ax[1].text(0.5, 0.01, 'residuals stack',
52 | horizontalalignment='center',
53 | verticalalignment='bottom',
54 | transform=ax[1].transAxes,
55 | color=text_color, fontsize=text_size,
56 | weight='bold')
57 |
58 | # rel residuals stack
59 | ax[2].imshow(rel_residuals_stack, cmap=cmap_residuals, aspect='auto', origin='lower')
60 | ax[2].axis('off')
61 | ax[2].text(0.5, 0.01, 'rel. residuals stack',
62 | horizontalalignment='center',
63 | verticalalignment='bottom',
64 | transform=ax[2].transAxes,
65 | color=text_color, fontsize=text_size,
66 | weight='bold')
67 |
68 | # loss plot
69 | ax[3].plot(loss_curve, color='white')
70 | ax[3].text(0.5, 0.99, 'loss',
71 | horizontalalignment='center',
72 | verticalalignment='top',
73 | transform=ax[3].transAxes,
74 | color='white', fontsize=text_size,
75 | weight='bold')
76 |
77 | ax[3].axis('off')
78 | # chi2 plot
79 | ax[4].hist(chi2_per_frame, color='white', bins=len(chi2_per_frame))
80 | ax[4].text(0.5, 0.99, 'chi2 per frame',
81 | horizontalalignment='center',
82 | verticalalignment='top',
83 | transform=ax[4].transAxes,
84 | color='white', fontsize=text_size,
85 | weight='bold')
86 | # and view of the background common to all epochs if we included one, just to make sure it isn't nonsense.
87 | if starlet_background is not None:
88 | ax[5].imshow(starlet_background, origin='lower')
89 | ax[5].axis('off')
90 | ax[5].text(0.5, 0.99, 'regularized background',
91 | horizontalalignment='center',
92 | verticalalignment='top',
93 | transform=ax[5].transAxes,
94 | color='white', fontsize=text_size,
95 | weight='bold')
96 | plt.tight_layout()
97 |
98 | if save_path is not None:
99 | plt.savefig(save_path, pad_inches=0, bbox_inches='tight', dpi=130)
100 | plt.close()
101 | else:
102 | plt.show()
103 |
--------------------------------------------------------------------------------
/lightcurver/plotting/normalization_plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import matplotlib.colors as colors
3 | import matplotlib as mpl
4 | import pandas as pd
5 |
6 | from ..structure.database import execute_sqlite_query
7 | from ..utilities.chi2_selector import get_chi2_bounds
8 |
9 |
10 | def plot_normalized_star_curves(combined_footprint_hash, save_path=None):
11 | """
12 | Given our config params (in particular chi2 rejection criterion), plot the fluxes of the stars that were
13 | used to build the normalization coefficient of the frames, normalized by this very coefficient.
14 | Args:
15 | combined_footprint_hash: footprint hash that we are working with.
16 | save_path: path or str, where we save the plot.
17 |
18 | Returns:
19 | None
20 | """
21 | # doing this because we set the stylesheet to dark for other plots, and we don't want it here (hard to read)
22 | mpl.rcParams.update(mpl.rcParamsDefault)
23 |
24 | # query the coefficients and fluxes
25 | fluxes_fit_chi2_min, fluxes_fit_chi2_max = get_chi2_bounds(psf_or_fluxes='fluxes')
26 |
27 | query_norm = """
28 | SELECT
29 | nc.*, f.mjd
30 | FROM
31 | normalization_coefficients nc
32 | JOIN
33 | frames f ON nc.frame_id = f.id
34 | WHERE
35 | coefficient > coefficient_uncertainty
36 | AND
37 | combined_footprint_hash = ?
38 | """
39 | df_norm = execute_sqlite_query(query=query_norm,
40 | params=(combined_footprint_hash,),
41 | use_pandas=True, is_select=True)
42 | query_fluxes = """
43 | SELECT
44 | sff.*, s.name AS name, s.gmag as gmag
45 | FROM
46 | star_flux_in_frame sff
47 | JOIN
48 | stars s ON sff.star_gaia_id = s.gaia_id
49 | WHERE
50 | sff.chi2 > ?
51 | AND
52 | sff.chi2 < ?
53 | AND
54 | sff.combined_footprint_hash = ?
55 | """
56 | df_fluxes = execute_sqlite_query(query=query_fluxes,
57 | params=(fluxes_fit_chi2_min, fluxes_fit_chi2_max, combined_footprint_hash,),
58 | use_pandas=True, is_select=True)
59 | df = pd.merge(df_norm, df_fluxes, on=['frame_id'])
60 |
61 | # normalize the fluxes
62 | df['normalized_flux'] = df['flux'] / df['coefficient']
63 | df['normalized_uncertainty'] = df['flux_uncertainty'] / df['coefficient']
64 |
65 | # ok, prepare plotting
66 | star_names = df['name'].unique()
67 | plot_size = 3.5
68 | n_rows = len(star_names) + 1
69 |
70 | fig, axs = plt.subplots(n_rows, 1, figsize=(2 * plot_size, n_rows * plot_size), sharex=True)
71 | axs = axs.flatten()
72 |
73 | # norm color scale across all stars based on chi2 values
74 | norm = colors.Normalize(vmin=fluxes_fit_chi2_min, vmax=fluxes_fit_chi2_max)
75 | cmap = plt.get_cmap('coolwarm')
76 |
77 | # norm coefficient plot
78 | axs[0].errorbar(df_norm['mjd'], df_norm['coefficient'],
79 | yerr=df_norm['coefficient_uncertainty'],
80 | fmt='.', ms=0,
81 | ecolor='gray', alpha=0.8, zorder=-1000, elinewidth=0.8)
82 | sc = axs[0].scatter(df_norm['mjd'], df_norm['coefficient'], s=5, edgecolor='none', color='red')
83 | axs[0].set_ylabel('Normalization Coefficient')
84 | axs[0].grid(True)
85 | cl = fig.colorbar(sc, ax=axs[0], label='chi2')
86 |
87 | # norm star fluxes
88 | for i, name in enumerate(sorted(star_names)):
89 | ax = axs[i + 1]
90 | star_data = df[df['name'] == name]
91 | medflux = star_data['normalized_flux'].median()
92 | err = ((star_data['normalized_uncertainty'] / medflux)**2 + (star_data['coefficient_uncertainty'])**2)**0.5
93 | ax.errorbar(star_data['mjd'], star_data['normalized_flux'] / medflux,
94 | yerr=err, fmt='.', ms=0, ecolor='gray', alpha=0.5, zorder=-1000, elinewidth=0.5)
95 | sc = ax.scatter(star_data['mjd'], star_data['normalized_flux'] / medflux,
96 | c=star_data['chi2'], cmap=cmap, norm=norm, s=10,
97 | edgecolor='none', label=f"Star {name} (gmag: {star_data['gmag'].unique()[0]:.1f})")
98 | ax.set_ylabel('Normalized flux / global median')
99 | ax.set_ylim((0.9, 1.1))
100 | ax.grid(True)
101 | ax.legend()
102 |
103 | fig.colorbar(sc, ax=ax, label='chi2')
104 |
105 | # time label
106 | axs[-1].set_xlabel('MJD')
107 |
108 | plt.tight_layout()
109 | cl.set_label('')
110 | cl.set_ticks([])
111 |
112 | if save_path is not None:
113 | plt.savefig(save_path, pad_inches=0, bbox_inches='tight', dpi=150)
114 | plt.close()
115 | else:
116 | return fig, axs
117 |
--------------------------------------------------------------------------------
/lightcurver/plotting/psf_plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 |
4 |
5 | def plot_psf_diagnostic(datas, noisemaps, residuals, full_psf,
6 | loss_curve=None, masks=None, names=None,
7 | diagnostic_text=None,
8 | save_path=None):
9 | """
10 | a utility to plot a summary of a psf modelling: plots the stars, noisemaps, and residuals.
11 | optionally, one can pass the loss curve (to make sure it looks like the model converged),
12 | names (one name per star), masks (for visualization, will appear as holes in the noisemaps),
13 | and some diagnostic text to be displayed at the top right.
14 | Args:
15 | datas: 3d array of shape (N, nx, ny): N stars, one slice per star.
16 | noisemaps: same as above but for the noisemaps
17 | residuals: same as above but for the residuals
18 | full_psf: full pixelated psf model
19 | loss_curve: optional, 1D array containing the evolution of the loss during optimization
20 | masks: optional, 3d array of shape (N, nx, ny): the masks that were used during optimization.
21 | names: optional, list of strings: for identifying stars.
22 | diagnostic_text: optional, string, with line breaks (max ~ 20 chars per line)
23 | save_path: optional, string or path, where to save the plot.
24 |
25 | Returns: None
26 |
27 | """
28 | plt.style.use('dark_background')
29 | cmap = 'viridis'
30 | cmap_residuals = 'coolwarm'
31 | text_color = 'white'
32 | text_size = 11
33 | single_letter_text_size = 14
34 | info_box_text_size = 6
35 |
36 | N = len(datas)
37 | if names is not None:
38 | assert N == len(names)
39 |
40 | sub_size = 3
41 | fig, ax = plt.subplots(3, N+1, figsize=((N+1)*sub_size, 3*sub_size))
42 |
43 | for i in range(N):
44 | for j in range(3):
45 | ax[j, i].axis('off')
46 | ax[j, i].set_aspect('equal')
47 | if j == 0:
48 | ax[j, i].imshow(datas[i], cmap=cmap, origin='lower')
49 | if names is not None:
50 | ax[j, i].text(0.5, 0.02, names[i],
51 | horizontalalignment='center',
52 | verticalalignment='bottom',
53 | transform=ax[j, i].transAxes,
54 | color=text_color, fontsize=single_letter_text_size,
55 | weight='bold')
56 | elif j == 1:
57 | ax[j, i].imshow(noisemaps[i], cmap=cmap, origin='lower')
58 | ax[j, i].text(0.5, 0.02, 'noisemap, mask',
59 | horizontalalignment='center',
60 | verticalalignment='bottom',
61 | transform=ax[j, i].transAxes,
62 | color=text_color, fontsize=text_size,
63 | weight='bold')
64 | elif j == 2:
65 | res = np.array(residuals[i]) # explicit casting, jax stuff
66 | if masks is not None:
67 | mask = np.array(masks[i]).astype(bool)
68 | res[np.where(~mask)] = np.nan
69 | ax[j, i].imshow(res, cmap=cmap_residuals, origin='lower')
70 | ax[j, i].text(0.5, 0.02, 'residuals',
71 | horizontalalignment='center',
72 | verticalalignment='bottom',
73 | transform=ax[j, i].transAxes,
74 | color=text_color, fontsize=text_size,
75 | weight='bold')
76 |
77 | # info box
78 | ax[0, N].text(0.1, 0.99, diagnostic_text,
79 | horizontalalignment='left',
80 | verticalalignment='top',
81 | fontsize=info_box_text_size,
82 | color='white')
83 | ax[0, N].axis('off')
84 |
85 | # loss plot
86 | if loss_curve is not None:
87 | ax[1, N].plot(loss_curve, color='white')
88 | ax[1, N].text(0.5, 0.99, 'loss',
89 | horizontalalignment='center',
90 | verticalalignment='top',
91 | transform=ax[1, N].transAxes,
92 | color='white', fontsize=text_size,
93 | weight='bold')
94 |
95 | ax[1, N].axis('off')
96 | # psf model plot
97 | ax[2, N].imshow(full_psf, cmap=cmap, aspect='auto', origin='lower')
98 | ax[2, N].axis('off')
99 | ax[2, N].text(0.5, 0.01, 'Full PSF',
100 | horizontalalignment='center',
101 | verticalalignment='bottom',
102 | transform=ax[2, N].transAxes,
103 | color=text_color, fontsize=text_size,
104 | weight='bold')
105 | ax[2, N].set_aspect('equal')
106 |
107 | plt.subplots_adjust(wspace=0, hspace=0)
108 | if save_path is not None:
109 | plt.savefig(save_path, pad_inches=0, bbox_inches='tight')
110 | plt.close()
111 | else:
112 | return fig, ax
113 |
--------------------------------------------------------------------------------
/lightcurver/plotting/sources_plotting.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 |
3 | from .image_plotting import plot_image
4 | from .footprint_plotting import plot_footprints
5 |
6 |
7 | def plot_sources(sources, image, wcs=None, save_path=None, sources_label=None,
8 | kwargs_imshow=None, **kwargs_plot):
9 | """
10 | Plot the image with detected sources marked (for debugging).
11 |
12 | Parameters:
13 | sources (astropy.table.Table): Table of detected sources.
14 | image (numpy.ndarray): Image data, 2D array.
15 | wcs (astropy.wcs.WCS object): the WCS corresponding to the data. default None.
16 | save_path (pathlib.Path or str): path to potential save location for image.
17 | """
18 | kwargs_imshow = {} if kwargs_imshow is None else kwargs_imshow
19 | fig, ax = plot_image(image=image,
20 | wcs=wcs,
21 | save_path=None,
22 | **kwargs_imshow)
23 |
24 | base_plot_options = {'marker': 'o',
25 | 'ls': 'None',
26 | 'mfc': 'None',
27 | 'color': 'red',
28 | 'ms': 10,
29 | 'alpha': 0.7}
30 | base_plot_options.update(kwargs_plot)
31 |
32 | if wcs is not None:
33 | ra, dec = wcs.all_pix2world(sources['xcentroid'], sources['ycentroid'], 0)
34 | ax.plot(ra, dec, color='red', label=sources_label,
35 | transform=ax.get_transform('world'),
36 | **base_plot_options)
37 | else:
38 | ax.plot(sources['xcentroid'], sources['ycentroid'],
39 | label=sources_label,
40 | **base_plot_options)
41 | if save_path is not None:
42 | plt.tight_layout()
43 | plt.savefig(save_path, bbox_inches='tight', pad_inches=0.)
44 | plt.close()
45 | else:
46 | return fig, ax
47 |
48 |
49 | def plot_coordinates_and_sources_on_image(data, sources, gaia_coords, wcs, save_path, **kwargs_imshow):
50 | """
51 | This is similar to the above, but we want to focus on the quality of the WCS.
52 | Args:
53 | data: image
54 | sources: astropy Table
55 | gaia_coords: Skycoord, usually from gaia
56 | wcs: wcs object astropy
57 | save_path: where to save
58 | **kwargs_imshow:
59 |
60 | Returns:
61 |
62 | """
63 |
64 | kwargs_imshow = {} if kwargs_imshow is None else kwargs_imshow
65 | fig, ax = plot_image(image=data,
66 | wcs=wcs,
67 | save_path=None,
68 | **kwargs_imshow)
69 |
70 | if gaia_coords is not None:
71 | ax.scatter(gaia_coords.ra, gaia_coords.dec, transform=ax.get_transform('world'), s=10, edgecolor='r',
72 | facecolor='none', label='Gaia Stars')
73 | if sources is not None:
74 | ax.scatter(sources['x'], sources['y'], s=10, color='blue', label='Detections', alpha=0.7)
75 |
76 | ax.set_xlabel('RA')
77 | ax.set_ylabel('Dec')
78 |
79 | if save_path is not None:
80 | plt.savefig(save_path, bbox_inches='tight', pad_inches=0)
81 | plt.close()
82 | else:
83 | return fig, ax
84 |
85 |
86 | def plot_footprints_with_stars(footprint_arrays, stars, save_path=None):
87 | """
88 |
89 | Args:
90 | footprint_arrays: list of arrays where each array represents a footprint's corners.
91 | stars: pandas dataframe of stars, with columns 'name', 'ra', 'dec'
92 | save_path: str or path
93 |
94 | Returns:
95 |
96 | """
97 | fig, ax = plot_footprints(footprint_arrays, common_footprint=None, largest_footprint=None, save_path=None)
98 | for _, star in stars.iterrows():
99 | ax.plot(star['ra'], star['dec'], 'o', color='red', markersize=5, mfc='None')
100 | ax.text(star['ra'], star['dec'], star['name'], fontsize=8, ha='right')
101 |
102 | if save_path is not None:
103 | plt.savefig(save_path, bbox_inches='tight', pad_inches=0, dpi=300)
104 | plt.close()
105 | else:
106 | return fig, ax
107 |
--------------------------------------------------------------------------------
/lightcurver/processes/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/lightcurver/processes/__init__.py
--------------------------------------------------------------------------------
/lightcurver/processes/absolute_zeropoint_calculation.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 | import sqlite3
4 |
5 | from ..utilities.footprint import get_combined_footprint_hash
6 | from ..structure.user_config import get_user_config
7 | from ..structure.database import execute_sqlite_query, get_pandas
8 | from ..utilities.absolute_magnitudes_from_panstarrs import save_panstarrs_catalog_photometry_to_database
9 | from ..utilities.absolute_magnitudes_from_gaia import save_gaia_catalog_photometry_to_database
10 |
11 |
12 | magnitude_calculation_functions = {
13 | 'gaia': save_gaia_catalog_photometry_to_database,
14 | 'panstarrs': save_panstarrs_catalog_photometry_to_database
15 | }
16 |
17 |
18 | def get_gaia_ids_with_flux_in_frame(combined_footprint_hash):
19 | """
20 | Queries all gaia_id of stars for which there is at least one flux_in_frame
21 | entry in the given combined_footprint_hash.
22 |
23 | Args:
24 | combined_footprint_hash: The combined footprint hash to filter the stars.
25 |
26 | Returns:
27 | List of gaia_id of stars.
28 | """
29 | query = """
30 | SELECT DISTINCT star_gaia_id
31 | FROM star_flux_in_frame
32 | WHERE combined_footprint_hash = ?
33 | """
34 | params = (combined_footprint_hash,)
35 | result = execute_sqlite_query(query, params)
36 | gaia_ids = [row[0] for row in result]
37 | return gaia_ids
38 |
39 |
40 | def calculate_zeropoints():
41 | """
42 | Calculates zeropoints for each frame based on provided magnitudes and updates the database.
43 | Args:
44 | -
45 | Returns:
46 | None
47 | """
48 |
49 | # boiler plate
50 | user_config = get_user_config()
51 | frames_ini = get_pandas(columns=['id'],
52 | conditions=['plate_solved = 1', 'eliminated = 0', 'roi_in_footprint = 1'])
53 | combined_footprint_hash = get_combined_footprint_hash(user_config, frames_ini['id'].to_list())
54 |
55 | # first, trigger the calculation of the magnitudes of the reference stars in the band of the config
56 | gaia_ids = get_gaia_ids_with_flux_in_frame(combined_footprint_hash)
57 | source_catalog = user_config['reference_absolute_photometric_survey']
58 | absolute_mag_func = magnitude_calculation_functions[source_catalog]
59 | for gaia_id in pd.unique(gaia_ids):
60 | absolute_mag_func(gaia_id)
61 |
62 | # now, query the star fluxes and their reference magnitudes from our database.
63 | # we also join on the table of calibrated magnitudes obtained from gaia or panstarrs, etc.
64 | flux_query = """
65 | SELECT
66 | sff.frame_id,
67 | sff.flux,
68 | s.gaia_id,
69 | csp.mag as catalog_mag
70 | FROM
71 | star_flux_in_frame sff
72 | JOIN
73 | stars s ON sff.star_gaia_id = s.gaia_id
74 | AND
75 | s.combined_footprint_hash = sff.combined_footprint_hash
76 | JOIN
77 | frames f ON f.id = sff.frame_id
78 | JOIN
79 | catalog_star_photometry csp ON csp.star_gaia_id = s.gaia_id
80 | WHERE
81 | sff.combined_footprint_hash = ?
82 | AND
83 | csp.catalog = ?
84 | """
85 |
86 | # get the fluxes measured on the frames
87 | flux_data = execute_sqlite_query(flux_query,
88 | params=(combined_footprint_hash,
89 | user_config['reference_absolute_photometric_survey']),
90 | is_select=True, use_pandas=True)
91 | if flux_data.empty:
92 | return
93 |
94 | # continue with zeropoint calculation now
95 | flux_data['instrumental_mag'] = -2.5 * np.log10(flux_data['flux'])
96 | flux_data['mag_difference'] = flux_data['catalog_mag'] - flux_data['instrumental_mag']
97 |
98 | # zeropoint and uncertainty (std) for each frame
99 | zeropoint_results = flux_data.groupby('frame_id')['mag_difference'].agg(['median', 'std']).reset_index()
100 | zeropoint_results.rename(columns={'median': 'zeropoint', 'std': 'zeropoint_uncertainty'}, inplace=True)
101 |
102 | # Update database
103 | insert_query = """
104 | INSERT INTO absolute_zeropoints (frame_id, combined_footprint_hash, zeropoint,
105 | zeropoint_uncertainty, source_catalog)
106 | VALUES (?, ?, ?, ?, ?)
107 | ON CONFLICT(frame_id, combined_footprint_hash) DO UPDATE SET
108 | zeropoint = excluded.zeropoint,
109 | zeropoint_uncertainty = excluded.zeropoint_uncertainty;
110 | """
111 | data_to_insert = [(row['frame_id'],
112 | combined_footprint_hash,
113 | row['zeropoint'],
114 | row['zeropoint_uncertainty'],
115 | source_catalog) for _, row in zeropoint_results.iterrows()]
116 |
117 | db_path = get_user_config()['database_path']
118 | with sqlite3.connect(db_path, timeout=15.0) as conn:
119 | conn.executemany(insert_query, data_to_insert)
120 |
--------------------------------------------------------------------------------
/lightcurver/processes/alternate_plate_solving_adapt_existing_wcs.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from astropy.io import fits
3 | import astroalign as aa
4 | from astropy.wcs import WCS
5 | from astropy.table import Table
6 | import logging
7 |
8 | from ..structure.user_config import get_user_config
9 | from ..structure.database import execute_sqlite_query
10 | from .plate_solving import select_frames_needing_plate_solving, post_plate_solve_steps
11 |
12 |
13 | def adapt_wcs(reference_wcs, reference_sources, target_sources):
14 |
15 | # first, extract the cd matrix from the reference wcs object
16 | if reference_wcs.wcs.has_cd():
17 | cdmatrix = reference_wcs.wcs.cd.copy()
18 | elif reference_wcs.wcs.has_pc():
19 | cdmatrix = reference_wcs.wcs.pc.copy()
20 | cdmatrix *= reference_wcs.wcs.cdelt
21 | else:
22 | raise AttributeError("No celestial WCS found in the provided WCS object.")
23 |
24 | transform, (match1, match2) = aa.find_transform(reference_sources, target_sources)
25 |
26 | # ok, now we transform the WCS.
27 | # if the pixels are transformed actively, the coordinates must be
28 | # transformed the other way to compensate:
29 | similarity = transform.params
30 | scaled_rotation = similarity[:2, :2]
31 | translation = similarity[:2, 2]
32 |
33 | # copy the ref wcs and inverse transform it:
34 | wcs_new = reference_wcs.deepcopy()
35 | # update the ref pixel
36 | refpixel = reference_wcs.wcs.crpix
37 | refpixel = np.dot(scaled_rotation, refpixel) + translation
38 | wcs_new.wcs.crpix = refpixel
39 | # rotation and scaling of the cd matrix.
40 | wcs_new.wcs.cd = np.dot(scaled_rotation, cdmatrix)
41 |
42 | return wcs_new, (match1, match2)
43 |
44 |
45 | def alternate_plate_solve_adapt_ref():
46 | user_config = get_user_config()
47 | workdir = user_config['workdir']
48 | logger = logging.getLogger("lightcurver.alternate_plate_solving_adapt_existing_wcs")
49 |
50 | # select the frame to use as reference
51 | reference_frame_for_wcs = user_config['reference_frame_for_wcs']
52 | if reference_frame_for_wcs is not None:
53 | query = "select image_relpath, sources_relpath, id from frames where id = ?"
54 | frame_path, sources_path, ref_id = execute_sqlite_query(query, params=(reference_frame_for_wcs,),
55 | is_select=True, use_pandas=False)[0]
56 | else:
57 | query = "select image_relpath, sources_relpath, id from frames where plate_solved = 1 limit 1"
58 | frame_path, sources_path, ref_id = execute_sqlite_query(query, is_select=True, use_pandas=False)[0]
59 |
60 | # just pointing the relative paths to the correct absolute ones:
61 | frame_path = workdir / frame_path
62 | sources_path = workdir / sources_path
63 |
64 | logger.info(f'Attempting to align the WCS of frame {frame_path} onto more images.')
65 | header = fits.getheader(frame_path)
66 | reference_sources = Table(fits.getdata(sources_path))
67 | # unpack to match the needed format
68 | reference_sources = [(row['x'], row['y']) for row in reference_sources]
69 | logger.info(f'The reference frame {frame_path} has {len(reference_sources)} sources.')
70 | reference_wcs = WCS(header)
71 | if not reference_wcs.is_celestial:
72 | message = f'The WCS of the frame {frame_path} is not celestial. Aborting'
73 | logger.info(message)
74 | raise RuntimeError(message)
75 |
76 | # select the frames that need plate solving.
77 | frames = select_frames_needing_plate_solving(user_config=user_config, logger=logger)
78 |
79 | for ii, frame in frames.iterrows():
80 |
81 | target_sources = Table(fits.getdata(workdir / frame['sources_relpath']))
82 | # same as above:
83 | target_sources = [(row['x'], row['y']) for row in target_sources]
84 |
85 | try:
86 | wcs_new, (match1, match2) = adapt_wcs(reference_wcs=reference_wcs, reference_sources=reference_sources,
87 | target_sources=target_sources)
88 | success = True
89 | except aa.MaxIterError:
90 | logger.warning(f"Could not align frame {frame['id']}: max iterations reached before solution.")
91 | success = False
92 | except Exception as e:
93 | logger.warning(f"I frame {frame['id']}: error, {e}")
94 | success = False
95 |
96 | if success:
97 | logger.info(f"Adapted WCS of frame {frame['id']}")
98 | with fits.open(workdir / frame['image_relpath'], mode="update") as hdul:
99 | hdul[0].header.update(wcs_new.to_header())
100 | hdul.flush()
101 | # post plate solve steps
102 | post_plate_solve_steps(frame_path=workdir / frame['image_relpath'],
103 | user_config=user_config, frame_id=frame['id'])
104 |
105 | # at the end, set the image to plate solved in db, and flag it as having had a plate solve attempt.
106 | execute_sqlite_query(query="UPDATE frames SET plate_solved = ?, attempted_plate_solve = 1 WHERE id = ?",
107 | params=(1 if success else 0, frame['id']), is_select=False)
108 |
109 |
110 |
111 |
--------------------------------------------------------------------------------
/lightcurver/processes/alternate_plate_solving_with_gaia.py:
--------------------------------------------------------------------------------
1 | # this is for the rare cases where standard plate solving fails
2 | # we use our gaia stars and detections to create a reasonable WCS for our frames.
3 | # to be attempted if all else fails.
4 | import numpy as np
5 | from astropy.coordinates import SkyCoord
6 | from astropy.time import Time
7 | from astropy.wcs import WCS
8 | from astropy.table import Table
9 | from astropy.io import fits
10 | import astroalign as aa
11 |
12 | from ..structure.database import execute_sqlite_query, get_pandas
13 | from ..utilities.gaia import find_gaia_stars
14 | from ..plotting.sources_plotting import plot_coordinates_and_sources_on_image
15 | from ..processes.plate_solving import post_plate_solve_steps
16 | from ..structure.user_config import get_user_config
17 |
18 |
19 | def create_initial_wcs(pixel_scale, image_shape, center_ra, center_dec, rotation_angle_deg):
20 | """
21 | This makes an astropy wcs object with cd matrix, starting from a pixel scale, image shape, rotation and
22 | coordinates.
23 |
24 | Args:
25 | pixel_scale: float, arcsecond / pixel
26 | image_shape: tuple
27 | center_ra: float
28 | center_dec: float
29 | rotation_angle_deg: float
30 |
31 | Returns:
32 | astropy.wcs.WCS object
33 | """
34 | w = WCS(naxis=2)
35 | w.wcs.crpix = [(image_shape[1] - 1) / 2, (image_shape[0] - 1) / 2] # center of the image
36 | w.wcs.crval = [center_ra, center_dec] # ra, dec at the center
37 | w.wcs.ctype = ["RA---TAN", "DEC--TAN"]
38 |
39 | rotation_angle_rad = np.deg2rad(rotation_angle_deg)
40 | pixel_scale_deg = pixel_scale / 3600.0
41 | w.wcs.cd = np.array([[-pixel_scale_deg * np.cos(rotation_angle_rad), pixel_scale_deg * np.sin(rotation_angle_rad)],
42 | [pixel_scale_deg * np.sin(rotation_angle_rad), pixel_scale_deg * np.cos(rotation_angle_rad)]])
43 |
44 | return w
45 |
46 |
47 | def refine_wcs_with_astroalign(sources, gaia_star_coords, wcs):
48 | """
49 | takes in a 'guess' wcs object (made by a helper function in this file), crossmatches the resulting
50 | pixel positions of the gaia stars to the extracted sources, and if possible calculates a WCS.
51 | Args:
52 | sources: astropy Table of sources, with 'xcentroid' and 'ycentroid' columns
53 | gaia_star_coords: astropy SkyCoord containing the coordinates of our stars (please correct for proper motion)
54 | wcs: an initial guess wcs object
55 |
56 | Returns:
57 |
58 | """
59 |
60 | gaia_pix_coords = wcs.world_to_pixel(gaia_star_coords)
61 | gaia_pix_positions = np.vstack(gaia_pix_coords).T
62 |
63 | image_positions = np.column_stack((sources['x'], sources['y']))
64 |
65 | try:
66 | transformation, (source_idx, gaia_idx) = aa.find_transform(image_positions, gaia_pix_positions)
67 | except Exception as e:
68 | print(f"Error finding transformation with astroalign: {e}")
69 | raise
70 |
71 | crpix = transformation.inverse.params[:2, :2] @ wcs.wcs.crpix + transformation.inverse.translation
72 |
73 | new_cd = transformation.inverse.params[:2, :2] @ wcs.wcs.cd
74 | wcs_updated = wcs.deepcopy()
75 | wcs_updated.wcs.crpix = crpix
76 | wcs_updated.wcs.cd = new_cd
77 |
78 | return wcs_updated
79 |
80 |
81 | def alternate_plate_solve_gaia():
82 | """
83 | This cross-matches Gaia detections with the sources in our images, given a good estimation of the pixel scale,
84 | rotation of the field and center of the field.
85 | Then, it creates a WCS that best matches the Gaia sources to our pixels.
86 | Returns:
87 | Nothing, but loops over the frames and updates database and fits headers.
88 |
89 | """
90 | user_config = get_user_config()
91 | ra, dec = user_config['ROI_ra_deg'], user_config['ROI_dec_deg']
92 | center_radius = {'center': (ra, dec), 'radius': user_config['alternate_plate_solve_gaia_radius']/3600.}
93 | gaia_stars = find_gaia_stars('circle', center_radius=center_radius,
94 | gaia_provider=user_config['gaia_provider'])
95 | gaia_stars['pmra'][np.isnan(gaia_stars['pmra'])] = 0
96 | gaia_stars['pmdec'][np.isnan(gaia_stars['pmdec'])] = 0
97 | gaia_coords = SkyCoord(ra=gaia_stars['ra'],
98 | dec=gaia_stars['dec'],
99 | pm_ra_cosdec=gaia_stars['pmra'],
100 | pm_dec=gaia_stars['pmdec'],
101 | frame='icrs',
102 | obstime=Time(gaia_stars['ref_epoch'].value, format='decimalyear'))
103 | pixel_scale = np.mean(user_config['plate_scale_interval'])
104 |
105 | frames_to_process = get_pandas(columns=['id', 'image_relpath', 'sources_relpath', 'mjd'],
106 | conditions=['plate_solved = 0', 'eliminated = 0'])
107 | plot_dir = user_config['plots_dir'] / 'gaia_plate_solve_diagnostic'
108 | plot_dir.mkdir(parents=True, exist_ok=True)
109 | for i, frame in frames_to_process.iterrows():
110 |
111 | gaia_coords_moved = gaia_coords.apply_space_motion(new_obstime=Time(frame['mjd'], format='mjd'))
112 | frame_path = user_config['workdir'] / frame['image_relpath']
113 | frame_id = frame['id']
114 | image = fits.getdata(frame_path).astype(float)
115 | sources_path = user_config['workdir'] / frame['sources_relpath']
116 | sources = Table(fits.getdata(sources_path))
117 | initial_wcs = create_initial_wcs(pixel_scale=pixel_scale, center_ra=ra, center_dec=dec, rotation_angle_deg=0,
118 | image_shape=image.shape)
119 | try:
120 | new_wcs = refine_wcs_with_astroalign(sources, gaia_coords_moved, initial_wcs)
121 | success = True
122 | except Exception as e:
123 | print(f"Could not solve frame {frame_id}: {e}.")
124 | success = False
125 | if success:
126 | with fits.open(frame_path, mode="update") as hdul:
127 | hdul[0].header.update(new_wcs.to_header())
128 | hdul.flush()
129 | plot_path = plot_dir / f"{frame_path.stem}.jpg"
130 | plot_coordinates_and_sources_on_image(image, sources=sources,
131 | gaia_coords=gaia_coords_moved, wcs=new_wcs, save_path=plot_path)
132 | post_plate_solve_steps(frame_path=frame_path, user_config=user_config, frame_id=frame_id)
133 |
134 | # at the end, set the image to plate solved in db
135 | execute_sqlite_query(query="UPDATE frames SET plate_solved = ? WHERE id = ?",
136 | params=(1 if success else 0, frame_id), is_select=False)
137 |
--------------------------------------------------------------------------------
/lightcurver/processes/background_estimation.py:
--------------------------------------------------------------------------------
1 | import sep
2 | import numpy as np
3 |
4 |
5 | def subtract_background(image, mask_sources_first=False, n_boxes=10):
6 | """
7 | subtracts an estimated smooth background from the 2d array "image".
8 | basic, for complex cases do your own background subtraction.
9 | Background subtraction is particularly sensitive if we want to achieve
10 | high precision on the relative zeropoints of the images.
11 | (we will not include a background in our forward modelling later down the line, because it can always be
12 | degenerate with other sources or the object itself).
13 |
14 | Here we will do this in two steps if mask_sources_first is True.
15 | 1. roughly subtract the background, extract the sources.
16 | 2. mask the sources, estimate the background again.
17 |
18 | :param image: 2d numpy array
19 | :param mask_sources_first: bool, whether we identify sources and mask before doing background estimation.
20 | :param n_boxes: int, in how many boxes do we divide the side of the image for background estimation?
21 | :return: 2d numpy array
22 | """
23 | # so, first, estimate a background.
24 | box_size = np.min(image.shape) // n_boxes
25 | bkg = sep.Background(image, bw=box_size, bh=box_size, fw=3, fh=3)
26 | image_sub = image - bkg
27 | if not mask_sources_first:
28 | # that's it
29 | return image_sub, bkg
30 |
31 | # find a lot of sources.
32 | sources, seg_map = sep.extract(data=image_sub, var=bkg.globalrms**2, thresh=2,
33 | minarea=10, segmentation_map=True)
34 | # estimate again
35 | bkg = sep.Background(image, bw=box_size, bh=box_size, fw=3, fh=3, mask=(seg_map > 0))
36 | # sub again
37 | image_sub = image - bkg
38 | # and done
39 | return image_sub, bkg
40 |
--------------------------------------------------------------------------------
/lightcurver/processes/frame_characterization.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import ephem
3 | from astropy.coordinates import SkyCoord
4 | from astropy.coordinates import Angle
5 | from astropy import units as u
6 |
7 |
8 | def calculate_airmass(altitude_degrees: np.array) -> np.array:
9 | """
10 | Calculate the airmass using Rozenberg's empirical relation.
11 |
12 | Parameters
13 | ----------
14 | altitude_degrees : float or ndarray
15 | The apparent altitude of the object in degrees. Can be a single value or a NumPy array.
16 |
17 | Returns
18 | -------
19 | float or ndarray
20 | The airmass value(s). Returns -1.0 for altitudes below the horizon (altitude_radians < 0),
21 | -2.0 for altitudes above 90 degrees (altitude_radians > pi/2), and the airmass according
22 | to Rozenberg's relation for altitudes in between.
23 |
24 | Notes
25 | -----
26 | Rozenberg's empirical relation is defined as:
27 | X = 1 / (sin(ho) + 0.025 * exp(-11 * sin(ho)))
28 | where ho is the apparent altitude of the object. This formula is applicable down
29 | to the horizon (where it gives X = 40).
30 | """
31 | altitude_radians = np.radians(np.asarray(altitude_degrees))
32 | with np.errstate(divide='ignore', invalid='ignore'):
33 | airmass_values = np.where(
34 | (altitude_radians < 0),
35 | -1.0,
36 | np.where(
37 | (altitude_radians > np.pi / 2),
38 | -2.0,
39 | 1.0 / (np.sin(altitude_radians) + 0.025 * np.exp(-11.0 * np.sin(altitude_radians)))
40 | )
41 | )
42 | return airmass_values
43 |
44 |
45 | def ephemeris(mjd: float,
46 | ra_object: float, dec_object: float,
47 | telescope_longitude: float, telescope_latitude: float, telescope_elevation: float) -> dict:
48 | """
49 | This function calculates and returns the ephemeris for a given object at a specific time and location.
50 |
51 | Parameters:
52 | mjd (float): Modified Julian Date for the observation.
53 | ra_object (float): Right ascension of the observed object in degrees.
54 | dec_object (float): Declination of the observed object in degrees.
55 | telescope_longitude (float): Longitude of the telescope's location in degrees.
56 | telescope_latitude (float): Latitude of the telescope's location in degrees.
57 | telescope_elevation (float): Elevation of the telescope's location in meters.
58 |
59 | Returns:
60 | dict: A dictionary containing information about the astro conditions, comments, target, moon, and sun.
61 | - 'astro_conditions' (bool): Indicates if the astro conditions are favorable.
62 | - 'comments' (str): Additional comments about the astro conditions.
63 | - 'target_info' (dict): Information about the target object, including altitude, azimuth, and airmass.
64 | - 'moon_info' (dict): Information about the moon, including distance from the target, illumination, and altitude.
65 | - 'sun_info' (dict): Information about the sun, including altitude.
66 | """
67 |
68 | results = {
69 | 'weird_astro_conditions': False,
70 | 'comments': "",
71 | 'target_info': {},
72 | 'moon_info': {},
73 | 'sun_info': {}
74 | }
75 |
76 | telescope = ephem.Observer()
77 | # Convert longitude and latitude to strings
78 | long_str = Angle(telescope_longitude * u.deg).to_string(unit=u.degree, sep=':')
79 | lat_str = Angle(telescope_latitude * u.deg).to_string(unit=u.degree, sep=':')
80 | telescope.long = long_str
81 | telescope.lat = lat_str
82 | telescope.elevation = telescope_elevation
83 | telescope.epoch = ephem.J2000
84 |
85 | jd = mjd + 2400000.5
86 | djd = jd - 2415020.0
87 | telescope.date = djd
88 |
89 | target = ephem.FixedBody()
90 | # make them coordinate strings for pyephem:
91 | coord = SkyCoord(ra=ra_object*u.degree, dec=dec_object*u.degree)
92 | ra_str = coord.ra.to_string(unit=u.hour, sep=':')
93 | dec_str = coord.dec.to_string(unit=u.degree, sep=':')
94 | target._ra = ra_str
95 | target._dec = dec_str
96 | target.compute(telescope)
97 |
98 | moon = ephem.Moon()
99 | moon.compute(telescope)
100 | moondist = np.degrees(float(ephem.separation(moon, target)))
101 |
102 | sun = ephem.Sun()
103 | sun.compute(telescope)
104 |
105 | target_alt_deg = np.degrees(target.alt)
106 | target_az_deg = np.degrees(target.az)
107 |
108 | airmass = calculate_airmass(target_alt_deg)
109 | if airmass < 1.0 or airmass > 5.0:
110 | results['weird_astro_conditions'] = True
111 | results['comments'] += f"Target altitude: {target_alt_deg:.2f} degrees (airmass {airmass:.2f})."
112 |
113 | moon_distance_deg = np.degrees(ephem.separation(moon, target))
114 | moon_illumination = moon.phase
115 | moon_alt_deg = np.degrees(moon.alt)
116 |
117 | sun_alt_deg = np.degrees(sun.alt)
118 | if sun_alt_deg > 0.0:
119 | results['weird_astro_conditions'] = True
120 | results['comments'] += f" Sun altitude: {sun_alt_deg:.2f} degrees."
121 |
122 | # Fill target, moon, and sun info
123 | results['target_info'] = {'altitude_deg': target_alt_deg,
124 | 'azimuth_deg': target_az_deg,
125 | 'airmass': airmass,
126 | 'moon_dist': moondist}
127 | results['moon_info'] = {'distance_deg': moon_distance_deg,
128 | 'illumination': moon_illumination,
129 | 'altitude_deg': moon_alt_deg}
130 | results['sun_info'] = {'altitude_deg': sun_alt_deg}
131 |
132 | return results
133 |
134 |
135 | def estimate_seeing(sources_table):
136 | """
137 | logic written by Malte Tewes, https://github.com/mtewes in 2010, for the COSMOULINE pipe of COSMOGRAIL.
138 | it seems to have worked well for 14 years now, so I'm just keeping the main flow of it.
139 |
140 | this function estimates a seeing value (in pixels) based on a table of sources as extracted by sep.
141 | we need the FWHM column to be present in the table, hence the table of sources has to come from
142 | the extract_stars function of this repo.
143 |
144 | :param sources_table: astropy.table.Table containing detections
145 | :return: float, a seeing value (reasonable FWHM) in pixels.
146 | """
147 |
148 | fwhms = sources_table['FWHM']
149 |
150 | if len(fwhms) > 10: # loads of stars
151 | # We want a crude guess at what kind of range we have to look for stars
152 | # The goal here is to have a "nice-looking"
153 | # histogram with a well-defined peak somewhere inside the range.
154 | min_fwhm = 1.5
155 | med_fwhm = np.median(fwhms)
156 | if med_fwhm < min_fwhm:
157 | med_fwhm = min_fwhm
158 |
159 | wide_stars = 3.0 * med_fwhm
160 |
161 | max_fwhm = 30.0
162 | if wide_stars < max_fwhm:
163 | max_fwhm = wide_stars
164 |
165 | # At this point the true seeing should be between min_fwhm and max_fwhm.
166 | # We build a first histogram :
167 | (hist, edges) = np.histogram(fwhms, bins=10,
168 | range=(min_fwhm, max_fwhm))
169 | # Note that points outside the range are not taken into account at all,
170 | # they don't fill the side bins!
171 |
172 | # We find the peak, and build a narrower hist around it
173 | max_pos = np.argmax(hist)
174 | if max_pos == 0:
175 | seeing_pixels = np.median(fwhms)
176 | elif max_pos == len(hist) - 1:
177 | seeing_pixels = np.median(fwhms)
178 | else: # the normal situation :
179 | peak_pos = float(0.5 * (edges[max_pos] + edges[max_pos + 1]))
180 |
181 | # We build a second histogram around this position,
182 | # with a narrower range:
183 | hist, edges = np.histogram(fwhms,
184 | bins=10,
185 | range=(peak_pos - 2.0, peak_pos + 2.0))
186 | max_pos = np.argmax(hist)
187 | peak_pos = 0.5 * (edges[max_pos] + edges[max_pos + 1])
188 |
189 | # We take the median of values around this peak_pos :
190 | star_fwhms = fwhms[np.logical_and(fwhms > peak_pos - 1.0,
191 | fwhms < peak_pos + 1.0)]
192 | if len(star_fwhms) > 0:
193 | seeing_pixels = np.median(star_fwhms)
194 | else:
195 | seeing_pixels = peak_pos
196 |
197 | elif len(fwhms) > 0: # few stars, not ideal but whatever
198 | seeing_pixels = np.median(fwhms)
199 |
200 | else: # no stars
201 | seeing_pixels = -1.0
202 | return seeing_pixels
203 |
--------------------------------------------------------------------------------
/lightcurver/processes/frame_star_assignment.py:
--------------------------------------------------------------------------------
1 | from shapely.geometry import Point, Polygon
2 | from shapely import intersection
3 | import json
4 | import sqlite3
5 | import numpy as np
6 |
7 | from ..structure.user_config import get_user_config
8 |
9 |
10 | def populate_stars_in_frames():
11 | """
12 | Populates the stars_in_frames table by checking which stars fall within each frame's footprint.
13 | """
14 |
15 | user_config = get_user_config()
16 | # not using execute_sqlite_query, so we do not open and close the database on million times.
17 | conn = sqlite3.connect(user_config['database_path'])
18 | cursor = conn.cursor()
19 |
20 | # load the footprints and stars
21 | cursor.execute("""SELECT frame_id, polygon FROM footprints""")
22 | frame_footprints = cursor.fetchall()
23 | cursor.execute("""SELECT gaia_id, ra, dec, combined_footprint_hash FROM stars""")
24 | stars = cursor.fetchall()
25 |
26 | for frame_id, footprint_str in frame_footprints:
27 | # assume gnomonic projection, let's just treat our footprint as flat.
28 | # should be good enough.
29 | footprint_polygon = Polygon(json.loads(footprint_str))
30 | # we don't want to select stars too close to the edges, too many problems.
31 | # to address this, we'll shrink our polygon by a margin.
32 |
33 | # get the edges:
34 | x, y = footprint_polygon.exterior.xy
35 | # the mean declination:
36 | mean_dec = np.nanmean(y)
37 | margin_degrees = 4. / 3600 # enforce a margin of 15 arcseconds.
38 | # just de-projecting the margin for RA:
39 | ra_margin = margin_degrees / np.cos(np.radians(mean_dec))
40 |
41 | # I do not know how to shrink y polygon by different amounts in different directions ...
42 | # no worries, we'll just translate the polygon by the amount in a cross-like pattern,
43 | # then take the intersection to get our 'shrunk' polygon.
44 | translated_polygons = []
45 | for translation in ([1, 0], [-1, 0], [0, -1], [0, 1]):
46 | adjusted_vertices = []
47 | for ra, dec in zip(x, y):
48 | adjusted_ra = ra + translation[0] * ra_margin
49 | adjusted_dec = dec + translation[1] * margin_degrees
50 | adjusted_vertices.append((adjusted_ra, adjusted_dec))
51 | translated_polygons.append(Polygon(adjusted_vertices))
52 |
53 | # do the intersection of our translations to get the reduced footprint:
54 | shrunk_footprint_polygon = translated_polygons[0]
55 | for trans in translated_polygons[1:]:
56 | shrunk_footprint_polygon = intersection(shrunk_footprint_polygon, trans)
57 |
58 | # and now we check!
59 | for star_id, ra, dec, combined_footprint_hash in stars:
60 | star_point = Point(ra, dec)
61 | if star_point.within(shrunk_footprint_polygon):
62 | try:
63 | cursor.execute("""INSERT INTO stars_in_frames (frame_id, star_gaia_id, combined_footprint_hash)
64 | VALUES (?, ?, ?)""", (frame_id, star_id, combined_footprint_hash))
65 | except sqlite3.IntegrityError:
66 | # handles cases where the same star-frame relationship might already be in the table
67 | continue
68 |
69 | conn.commit()
70 | conn.close()
71 |
72 |
--------------------------------------------------------------------------------
/lightcurver/processes/plate_solving.py:
--------------------------------------------------------------------------------
1 | import os
2 | from astropy.io import fits
3 | from astropy.table import Table
4 | from astropy.wcs import WCS
5 | from astropy.wcs.utils import proj_plane_pixel_scales
6 | from widefield_plate_solver import plate_solve
7 | from widefield_plate_solver.exceptions import CouldNotSolveError
8 | import logging
9 |
10 | from ..structure.database import execute_sqlite_query, get_pandas
11 | from ..utilities.footprint import database_insert_single_footprint, get_angle_wcs
12 |
13 |
14 | def select_frames_needing_plate_solving(user_config, logger):
15 | """
16 | Given the user cnofig and the state of the database, returns a pandas dataframe with the frames that need
17 | plate solving.
18 |
19 | Args:
20 | user_config: dictionary, read with structure.user_config.get_user_config
21 | logger: an instance of a logger, for printing messages
22 |
23 | Returns:
24 | pandas dataframe containing the frames to treat, columns id, image_relpath, sources_relpath
25 | """
26 | # so, we select our frames to plate solve depending on the user config.
27 | if user_config['plate_solve_frames'] == 'all_not_eliminated':
28 | conditions = ['eliminated = 0']
29 | logger.info(f"Processing all the frames (even the ones already solved) that are not flagged as eliminated.")
30 | elif user_config['plate_solve_frames'] == 'all_never_attempted':
31 | conditions = ['eliminated = 0', 'attempted_plate_solve = 0']
32 | logger.info(f"Processing all the frames that do not have a solve attempt yet.")
33 | elif user_config['plate_solve_frames'] == 'all_not_plate_solved':
34 | conditions = ['eliminated = 0', 'plate_solved = 0']
35 | logger.info(f"Processing all the frames that are not plate solved, even those that were already attempted.")
36 | else:
37 | raise ValueError(f"Not an expected selection strategy for frames to solve: {user_config['plate_solve_frames']}")
38 |
39 | frames_to_process = get_pandas(columns=['id', 'image_relpath', 'sources_relpath'],
40 | conditions=conditions)
41 | return frames_to_process
42 |
43 |
44 | def solve_one_image(image_path, sources_path, user_config):
45 |
46 | sources = Table(fits.getdata(sources_path))
47 |
48 | if user_config['astrometry_net_api_key'] is None:
49 | use_api = False
50 | else:
51 | use_api = True
52 | os.environ['astrometry_net_api_key'] = user_config['astrometry_net_api_key']
53 |
54 | ra, dec = user_config['ROI_ra_deg'], user_config['ROI_dec_deg']
55 | plate_scale_min, plate_scale_max = user_config['plate_scale_interval']
56 |
57 | wcs = plate_solve(fits_file_path=image_path, sources=sources,
58 | use_existing_wcs_as_guess=False,
59 | use_api=use_api,
60 | redo_if_done=True, # we check for this upstream in this package
61 | ra_approx=ra, dec_approx=dec,
62 | scale_min=plate_scale_min, scale_max=plate_scale_max,
63 | do_debug_plot=False,
64 | odds_to_solve=1e8)
65 |
66 | return WCS(wcs)
67 |
68 |
69 | def post_plate_solve_steps(frame_path, user_config, frame_id):
70 | """
71 | This is called after an astrometric solution has been found for an image (it is also called if the image
72 | is already plate solved, runs then on the existing solution)
73 |
74 | it
75 | - calculates a footprint for the image (represented by a polygon, inserted as json in the database)
76 | - checks if the ROI is contained by the image (if no, eliminates it)
77 | - has a bit of a check on the anisotropy of the pixels (bad solutions are not likely to have square pixels)
78 | - updates database with rotation of the field, pixel scale ...
79 |
80 | Args:
81 | frame_path: path to the fits file with WCS
82 | user_config: dictionary containing the user config
83 | frame_id: integer, database, frames column id.
84 |
85 | Returns:
86 |
87 | """
88 | logger = logging.getLogger("lightcurver.plate_solving")
89 | logger.info(f'Post plate solve steps for frame {frame_id} (path {frame_path})')
90 | # our object might be out of the footprint of the image!
91 | final_header = fits.getheader(frame_path)
92 | # replace the wcs above with the WCS we saved in the header of the image (contains naxis)
93 | wcs = WCS(final_header)
94 | # last check
95 | if not wcs.is_celestial:
96 | logger.info(f'Frame {frame_id} (path {frame_path}) does not contain a valid WCS.')
97 | return # do nothing more, this frame will not be selected later.
98 | in_footprint = user_config['ROI_SkyCoord'].contained_by(wcs)
99 | if in_footprint:
100 | execute_sqlite_query(query="UPDATE frames SET roi_in_footprint = ? WHERE id = ?",
101 | params=(1, frame_id), is_select=False)
102 | # also, let us save the actual footprint
103 | footprint_array = wcs.calc_footprint()
104 | database_insert_single_footprint(frame_id, footprint_array)
105 | # and let us compute the pixel scale!
106 | psx, psy = proj_plane_pixel_scales(wcs)
107 | # these are most likely in deg/pixel. astropy says that wcs should carry a cunit attribute,
108 | # but it does not. Anyway, let us assume deg/pixel -- never seen anything else when working with wcs
109 | # of wide field images.
110 | anisotropy = float(abs(psx - psy) / (psx + psy))
111 | suspicious_astrometry = abs(psx - psy) / (psx + psy) > float(user_config['max_pixel_anisotropy'])
112 | if suspicious_astrometry:
113 | message = "Your pixels are more rectangular than your config tolerance! Flagging (eliminating) this frame."
114 | message += f"Anisotropy: {anisotropy:.01%}, path: {frame_path}, db id: {frame_id})."
115 | logger.info(message)
116 | execute_sqlite_query(query="""UPDATE
117 | frames
118 | SET
119 | eliminated = 1,
120 | comment='suspicious_plate_solved'
121 | WHERE
122 | id = ?""",
123 | params=(frame_id,), is_select=False)
124 | angle_to_north = get_angle_wcs(wcs)
125 | pixel_scale = 0.5 * (psx + psy) * 3600 # to arcsecond / pixel
126 | # first, set the pixel scale
127 | execute_sqlite_query(query="UPDATE frames SET pixel_scale = ? WHERE id = ?",
128 | params=(pixel_scale, frame_id), is_select=False)
129 | # then, use it to compute the seeing in arcseconds, and insert the angle at the same time to combine queries
130 | execute_sqlite_query(query="""UPDATE
131 | frames
132 | SET
133 | seeing_arcseconds = pixel_scale * seeing_pixels,
134 | angle_to_north = ?
135 | WHERE
136 | id = ?""",
137 | params=(angle_to_north, frame_id), is_select=False)
138 | logger.info(f'Updated pixel scale: {pixel_scale:.03f}"/pixel for frame {frame_id} (path {frame_path}).')
139 |
140 |
141 | def solve_one_image_and_update_database(image_path, sources_path, user_config, frame_id):
142 | """
143 | solves image using the sources in sources_path, then adds useful things to the database.
144 | If already solved according to user_config, just does the database part.
145 | Args:
146 | image_path: path to fits file containing the image
147 | sources_path: path to fits file containing the sources extracted from the image
148 | user_config: dictionary read by the pipeline
149 | frame_id: the database frame id
150 | Returns:
151 | nothing
152 | """
153 | logger = logging.getLogger("lightcurver.plate_solving")
154 | if not user_config['already_plate_solved']:
155 | logger.info(f'Attempting astrometric solution for frame {frame_id} (path: {image_path}).')
156 | try:
157 | wcs = solve_one_image(image_path, sources_path, user_config)
158 | success = wcs.is_celestial
159 | except CouldNotSolveError:
160 | success = False
161 | else:
162 | logger.info(f'Frame {frame_id} (path: {image_path}) is already solved according to user config.')
163 | success = True
164 |
165 | if success:
166 | post_plate_solve_steps(frame_id=frame_id, frame_path=image_path, user_config=user_config)
167 | # at the end, set the image to plate solved in db, and flag it as having had a plate solve attempt.
168 | execute_sqlite_query(query="UPDATE frames SET plate_solved = ?, attempted_plate_solve = 1 WHERE id = ?",
169 | params=(1 if success else 0, frame_id), is_select=False)
170 |
--------------------------------------------------------------------------------
/lightcurver/processes/star_extraction.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from astropy.table import Table
3 | import sep
4 | from astropy.io import fits
5 | from ..plotting.sources_plotting import plot_sources
6 |
7 |
8 | def extract_stars(image_background_subtracted, variance_map, detection_threshold=3, min_area=10,
9 | debug_plot_path=None):
10 | """
11 | Extract star positions from an image using SEP (Source Extractor as a Python library).
12 |
13 | Parameters:
14 | image_background_subtracted: image contained in numpy 2d array, with background subtracted!
15 | variance_map: image, map of variance.
16 | detection_threshold: float, in units of sigma, default 3
17 | min_area: int, min number of pixels in detection for it to be considered, default 10
18 |
19 | Returns:
20 | astropy.table.Table: Table of detected sources.
21 | """
22 |
23 | objects = sep.extract(data=image_background_subtracted,
24 | thresh=detection_threshold,
25 | var=variance_map,
26 | minarea=min_area)
27 |
28 | sources = Table()
29 | for col in objects.dtype.names:
30 | sources[col] = objects[col]
31 |
32 | # just to stick to the daostarfinder way
33 | sources['xcentroid'] = sources['x']
34 | sources['ycentroid'] = sources['y']
35 |
36 | # remove the weirdly elongated ones -- probably most stars, and we care about stars the most
37 | elongation = sources['a'] / sources['b']
38 | med_el = np.median(elongation)
39 | std_el = np.std(elongation)
40 | sources['elongation'] = elongation
41 | sources = sources[sources['elongation'] < med_el + 3*std_el]
42 |
43 | # define some FWHM quantity
44 | sources['FWHM'] = 2 * (np.log(2) * (sources['a']**2 + sources['b']**2))**0.5
45 | # and ellipticity
46 | sources['ellipticity'] = 1 - sources['b'] / sources['a']
47 |
48 | # brightest first
49 | sources.sort('flux', reverse=True)
50 |
51 | if debug_plot_path is not None:
52 | debug_plot_path.parent.mkdir(exist_ok=True)
53 | plot_sources(sources=sources, image=image_background_subtracted, save_path=debug_plot_path)
54 |
55 | return sources
56 |
57 |
58 | def extract_sources_from_sky_sub_image(image_path, sources_path, detection_threshold, min_area,
59 | exptime, background_rms_electron_per_second, debug_plot_path):
60 | """
61 | Not used in the main pipeline but can be useful. Given an image, extracts
62 | sources, and save them in sources_path
63 | Args:
64 | image_path: path, str: fits file containing an image.
65 | sources_path: path, str: where the sources are saved
66 | detection_threshold: float, significance for acceptance
67 | min_area: int, minimum number of pixels above threshold
68 | exptime: float, exposure time of the frame (used to calculate a variance map,
69 | because the pipeline stores the frames in e- / second)
70 | background_rms_electron_per_second: float, scatter in the background in e- / second.
71 | Used to calculate the variance map as well.
72 | debug_plot_path: where we (potentially) save a plot of our sources.
73 | Returns:
74 | Nothing
75 | """
76 | image_electrons = exptime * fits.getdata(image_path).astype(float)
77 | background_rms_electrons = exptime * background_rms_electron_per_second
78 |
79 | variance_map = background_rms_electrons**2 + np.abs(image_electrons)
80 |
81 | sources = extract_stars(image_background_subtracted=image_electrons,
82 | detection_threshold=detection_threshold,
83 | variance_map=variance_map,
84 | min_area=min_area,
85 | debug_plot_path=debug_plot_path)
86 |
87 | sources.write(sources_path, overwrite=True)
88 |
89 |
--------------------------------------------------------------------------------
/lightcurver/processes/star_querying.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from astropy.coordinates import SkyCoord
3 | import json
4 | import numpy as np
5 |
6 | from ..utilities.footprint import load_combined_footprint_from_db, get_frames_hash
7 | from ..structure.user_config import get_user_config
8 | from ..processes.frame_star_assignment import populate_stars_in_frames
9 | from ..utilities.gaia import find_gaia_stars
10 | from ..utilities.star_naming import generate_star_names
11 | from ..structure.database import get_pandas, execute_sqlite_query
12 | from ..plotting.sources_plotting import plot_footprints_with_stars
13 |
14 |
15 | def query_gaia_stars():
16 | """
17 | this is called by the workflow manager.
18 | Queries the frames to be used, and checks the star selection strategy from the user config.
19 | Then queries the stars with the additional criteria from the config (mag range, astrometric noise, photometric
20 | noise ...) and inserts them in the database for easy access.
21 | Also, runs the function that assigns stars to each frame depending on the respective footprint of the frame.
22 | Returns:
23 |
24 | """
25 | logger = logging.getLogger("lightcurver.querying_ref_stars_from_gaia")
26 | user_config = get_user_config()
27 | frames_info = get_pandas(columns=['id', 'pixel_scale'], conditions=['frames.eliminated != 1'])
28 | if user_config['star_selection_strategy'] != 'ROI_disk':
29 | # then it depends on the frames we're considering.
30 | frames_hash = get_frames_hash(frames_info['id'].to_list())
31 | else:
32 | # if ROI_disk, it does not depend on the frames: unique region defined by its radius.
33 | frames_hash = hash(user_config['ROI_disk_radius_arcseconds'])
34 |
35 | # before doing anything, check whether we are already done
36 | count = execute_sqlite_query("SELECT COUNT(*) FROM stars WHERE combined_footprint_hash = ?",
37 | params=(frames_hash,), is_select=True)[0][0]
38 | if count > 0 and not user_config['gaia_query_redo']:
39 | logger.info(f'Gaia stars already fetched for this footprint: {frames_hash}')
40 | logger.info('Still re-calculating which star is in which frame again.')
41 | # we still need to populate the new frames though
42 | populate_stars_in_frames()
43 | return
44 | elif count > 0 and user_config['gaia_query_redo']:
45 | logger.info(f'Gaia stars already fetched for this footprint: {frames_hash} but redo is True.')
46 | # then we need to purge the database from the stars queried with this footprint.
47 | # TODO I forgot we have two types of footprints for a given footprint hash dayum
48 | # TODO for now proceeding with the user having to set redo if changing footprint type
49 | execute_sqlite_query("DELETE FROM stars WHERE combined_footprint_hash = ?",
50 | params=(frames_hash,), is_select=True)
51 | logger.info(f' deleted previously queried stars.')
52 |
53 | if user_config['star_selection_strategy'] == 'common_footprint_stars':
54 | logger.info(f'config star selection strategy: common footprint of the frames')
55 | _, common_footprint = load_combined_footprint_from_db(frames_hash)
56 | region_type = 'polygon'
57 | query_footprint = common_footprint['coordinates'][0]
58 | # then we want to make sure we use stars that are available in all frames.
59 | # this likely achieves the best precision, but is only possible typically in dedicated
60 | # monitoring programs with stable pointings.
61 | elif user_config['star_selection_strategy'] == 'stars_per_frame':
62 | logger.info(f'config star selection strategy: combined largest footprint of the frames')
63 | largest_footprint, _ = load_combined_footprint_from_db(frames_hash)
64 | region_type = 'polygon'
65 | query_footprint = largest_footprint['coordinates'][0]
66 | # then, we must fall back to using stars selected in each individual frame.
67 | # here, we will query a larger footprint so that we have options in each
68 | # individual frame.
69 | elif user_config['star_selection_strategy'] == 'ROI_disk':
70 | logger.info(f'config star selection strategy: in a disk around the ROI.')
71 | center = user_config['ROI_ra_deg'], user_config['ROI_dec_deg']
72 | radius = user_config['ROI_disk_radius_arcseconds'] / 3600.0
73 | region_type = 'circle'
74 | query_footprint = {'center': center, 'radius': radius}
75 | else:
76 | raise RuntimeError("Not an agreed upon strategy for star selection:", user_config['star_selection_strategy'])
77 |
78 | kwargs_query = {
79 | 'astrometric_excess_noise_max': user_config['star_max_astrometric_excess_noise'],
80 | 'gmag_range': (user_config['star_min_gmag'], user_config['star_max_gmag']),
81 | 'min_phot_g_mean_flux_over_error': user_config['min_phot_g_mean_flux_over_error'],
82 | 'gaia_provider': user_config['gaia_provider']
83 | }
84 | logging.info(f'Querying stars with the following parameters: {kwargs_query}')
85 |
86 | stars_table = find_gaia_stars(region_type, query_footprint, **kwargs_query)
87 |
88 | message = f"Too few stars compared to the config criterion! Only {len(stars_table)} stars available."
89 | enough_stars = len(stars_table) >= user_config['min_number_stars']
90 | if not enough_stars:
91 | logging.error(message + ' Force stopping.')
92 | assert enough_stars, message
93 |
94 | columns = ['combined_footprint_hash', 'name', 'ra', 'dec', 'gmag', 'rmag', 'bmag', 'pmra', 'pmdec', 'ref_epoch',
95 | 'gaia_id', 'distance_to_roi_arcsec']
96 | insert_query = f"INSERT INTO stars ({', '.join(columns)}) VALUES ({', '.join(len(columns)*['?'])})"
97 | stars_coord = SkyCoord(ra=stars_table['ra'], dec=stars_table['dec'])
98 | stars_table['distance_to_roi'] = stars_coord.separation(user_config['ROI_SkyCoord']).arcsecond
99 | # we do not want the ROI itself as a reference:
100 | stars_table = stars_table[stars_table['distance_to_roi'] > user_config['ROI_size']]
101 | stars_table.sort('distance_to_roi')
102 | # add a friendly name to each star (a, b, c, ....)
103 | stars_table['name'] = generate_star_names(len(stars_table))
104 | for star in stars_table:
105 | # loads of floats because float32 does weird stuff to sqlite? not sure, but explicit casting fixes issues.
106 | star_data = (frames_hash, star['name'], float(star['ra']), float(star['dec']), float(star['phot_g_mean_mag']),
107 | float(star['phot_rp_mean_mag']), float(star['phot_bp_mean_mag']),
108 | float(star['pmra']), float(star['pmdec']), float(star['ref_epoch']), int(star['source_id']),
109 | star['distance_to_roi'])
110 | execute_sqlite_query(insert_query, params=star_data, is_select=False)
111 | logger.info('Calculating which star is in which frame.')
112 | populate_stars_in_frames()
113 | # let us also make a plot of how the gaia stars we queried are distributed within our footprint.
114 | query = """
115 | SELECT frames.id, footprints.polygon
116 | FROM footprints
117 | JOIN frames ON footprints.frame_id = frames.id
118 | WHERE frames.eliminated != 1;
119 | """
120 | results = execute_sqlite_query(query)
121 | polygon_list = [np.array(json.loads(result[1])) for result in results]
122 | save_path = user_config['plots_dir'] / 'footprints_with_gaia_stars.jpg'
123 | plot_footprints_with_stars(footprint_arrays=polygon_list, stars=stars_table.to_pandas(), save_path=save_path)
124 | logger.info(f'Plot of the queried reference Gaia stars saved at {save_path}.')
125 |
--------------------------------------------------------------------------------
/lightcurver/scripts/initialize.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import argparse
4 | from ruamel.yaml import YAML
5 | from importlib import resources
6 | from pathlib import Path
7 |
8 |
9 | def copy_template(template_path, target_path):
10 | shutil.copy(template_path, target_path)
11 |
12 |
13 | def update_config(config_path):
14 | yaml = YAML()
15 | yaml.preserve_quotes = True
16 |
17 | with open(config_path, 'r') as file:
18 | config = yaml.load(file)
19 |
20 | # Prompt the user for new values
21 | new_value = input("Enter new value for 'example_key': ")
22 | config['example_key'] = new_value
23 |
24 | with open(config_path, 'w') as file:
25 | yaml.dump(config, file)
26 |
27 |
28 | def initialize():
29 | parser = argparse.ArgumentParser(description="Initialize the basic configuration for lightcurver.")
30 | parser.add_argument('--workdir', type=str, help="The path to the desired working directory.",
31 | default=".")
32 | parser.add_argument('--roi_name', type=str, help="Name of the region of interest.",
33 | default=None)
34 | parser.add_argument('--roi_ra', type=float, help="R.A. in degrees of the region of interest.",
35 | default=None)
36 | parser.add_argument('--roi_dec', type=float, help="Dec. in degrees of the region of interest.",
37 | default=None)
38 | parser.add_argument('--photom_band', type=str, help="Photometric filter of the observations.",
39 | default=None)
40 | args = parser.parse_args()
41 | workdir = Path(args.workdir).absolute()
42 | workdir.mkdir(exist_ok=True)
43 | print(f'Initializing working directory at {workdir}')
44 | # template config file in the installed package
45 | with resources.open_text('lightcurver.pipeline.example_config_file', 'config.yaml') as ff:
46 | config_path = workdir / 'config.yaml'
47 | with open(config_path, 'w') as new_file:
48 | new_file.write(ff.read())
49 |
50 | # header parser directory
51 | parser_dir = workdir / 'header_parser'
52 | parser_dir.mkdir(exist_ok=True)
53 | parser_file = parser_dir / 'parse_header.py'
54 | with open(parser_file, 'w') as ff:
55 | ff.write(f"""
56 | def parse_header(header):
57 | raise RuntimeError('Adjust the header parser function at {parser_file}')
58 | # example:
59 | from dateutil import parser
60 | from astropy.time import Time
61 | exptime = header['exptime']
62 | gain = header['gain']
63 | time = Time(parser.parse(header['obstart']))
64 | return {{'exptime': exptime, 'gain': gain, 'mjd': time.mjd}}
65 |
66 | """)
67 |
68 | # adjusting config file
69 | yaml = YAML()
70 | yaml.preserve_quotes = True
71 |
72 | with open(config_path, 'r') as file:
73 | config = yaml.load(file)
74 | config['workdir'] = str(workdir)
75 | if args.roi_name is None:
76 | args.roi_name = input("Name of the target? ").strip()
77 | if args.roi_ra is None:
78 | args.roi_ra = float(input("Right ascension of the target? "))
79 | if args.roi_dec is None:
80 | args.roi_dec = float(input("Declination of the target? "))
81 | config["ROI"] = {args.roi_name: {'coordinates': [args.roi_ra, args.roi_dec]}}
82 |
83 | if args.photom_band is None:
84 | config['photometric_band'] = input('Photometric band of the observations? ').strip()
85 |
86 | with open(config_path, 'w') as file:
87 | yaml.dump(config, file)
88 | print(f"Adapt the header parser at {parser_file}.")
89 | print(f"Prepared rough configuration at {config_path} -- go through it and refine it.")
90 |
91 |
--------------------------------------------------------------------------------
/lightcurver/scripts/run.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | import yaml
4 | from importlib import resources
5 |
6 | from lightcurver.pipeline.workflow_manager import WorkflowManager
7 |
8 |
9 | def run():
10 | # loading the possible tasks from the yaml file defining the pipeline.
11 | with resources.open_text('lightcurver.pipeline', 'pipeline_dependency_graph.yaml') as file:
12 | pipe_config = yaml.safe_load(file)
13 | task_list = ' - ' + '\n - '.join([task['name'] for task in pipe_config['tasks']])
14 |
15 | docstring = f"""
16 | Run the lightcurver pipeline.
17 |
18 | The pipeline can be run in entirety or from a specific start point to a stop point. Most of the time,
19 | running it entirely is fine as it is incremental. For testing purposes, the arguments below can be added
20 | to run specific parts only.
21 |
22 | Arguments:
23 | - `--start`: The name of the step to begin execution from.
24 | - `--stop`: The name of the step to stop execution at.
25 |
26 | List of step names that can be passed to --start or --stop:
27 | {task_list}
28 |
29 | Examples:
30 | 1. Run the entire pipeline (fine in most cases):
31 | `python run.py config.yaml`
32 |
33 | 2. Run from a specific step:
34 | `python run.py config.yaml --start plate_solving`
35 |
36 | 3. Run up to a specific step:
37 | `python run.py config.yaml --stop star_photometry`
38 |
39 | 4. Run from a start step to a stop step:
40 | `python run.py config.yaml --start plate_solving --stop star_photometry`
41 |
42 |
43 | """
44 |
45 | parser = argparse.ArgumentParser(description=docstring, formatter_class=argparse.RawTextHelpFormatter)
46 |
47 | parser.add_argument('config_file', type=str,
48 | help="The path to the config.yaml configuration file.")
49 |
50 | parser.add_argument('--start', type=str,
51 | help="Name of the step to start the pipeline from. Default: start of pipeline.", default=None)
52 |
53 | parser.add_argument('--stop', type=str,
54 | help="Name of the step to stop the pipeline at. Default: end of pipeline.",
55 | default=None)
56 |
57 | args = parser.parse_args()
58 |
59 | os.environ['LIGHTCURVER_CONFIG'] = args.config_file
60 | wf_manager = WorkflowManager()
61 | wf_manager.run(start_step=args.start, stop_step=args.stop)
62 |
--------------------------------------------------------------------------------
/lightcurver/structure/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/lightcurver/structure/__init__.py
--------------------------------------------------------------------------------
/lightcurver/structure/exceptions.py:
--------------------------------------------------------------------------------
1 | class NoConfigFilePathInEnvironment(Exception):
2 | def __init__(self):
3 | message = """You need to define the path to your configuration file,
4 | e.g. export LIGHTCURVER_CONFIG="/path/to/user_config.yaml"
5 | Then re-run the pipeline.
6 | """
7 | super().__init__(message)
8 |
9 |
10 | class TaskWasNotSuccessful(Exception):
11 | def __init__(self, message):
12 | super().__init__(message)
13 |
--------------------------------------------------------------------------------
/lightcurver/structure/user_config.py:
--------------------------------------------------------------------------------
1 | import yaml
2 | import os
3 | from pathlib import Path
4 | from astropy.coordinates import SkyCoord
5 | from astropy import units as u
6 | from importlib import resources
7 |
8 | from .exceptions import NoConfigFilePathInEnvironment
9 |
10 |
11 | def get_user_config():
12 | """
13 | This reads the yaml file containing the user config.
14 | Then processes some of its parameters and builds others.
15 |
16 | Returns:
17 | dictionary of parameters.
18 | """
19 | if 'LIGHTCURVER_CONFIG' not in os.environ:
20 | raise NoConfigFilePathInEnvironment
21 | config_path = os.environ['LIGHTCURVER_CONFIG']
22 | with open(config_path, 'r') as file:
23 | config = yaml.safe_load(file)
24 |
25 | roi_keys = list(config['ROI'].keys())
26 | config['roi_name'] = roi_name = roi_keys[0]
27 | ra, dec = config['ROI'][config['roi_name']]['coordinates']
28 | config['ROI_ra_deg'] = ra
29 | config['ROI_dec_deg'] = dec
30 | config['ROI_SkyCoord'] = SkyCoord(ra*u.deg, dec*u.deg)
31 |
32 | assert 'raw_dirs' in config
33 | raw_dirs = config['raw_dirs']
34 | if type(raw_dirs) is list:
35 | raw_dirs = [Path(pp) for pp in raw_dirs]
36 | elif type(raw_dirs) is str:
37 | raw_dirs = [Path(raw_dirs)]
38 | config['raw_dirs'] = raw_dirs
39 |
40 | assert 'workdir' in config
41 | config['workdir'] = Path(config['workdir'])
42 | config['database_path'] = config['workdir'] / 'database.sqlite3'
43 | config['plots_dir'] = config['workdir'] / 'plots'
44 | config['logs_dir'] = config['workdir'] / 'logs'
45 | config['frames_dir'] = config['workdir'] / 'frames'
46 | config['regions_path'] = config['workdir'] / 'regions.h5'
47 | config['psfs_path'] = config['workdir'] / 'psfs.h5'
48 | for directory in [config['plots_dir'], config['logs_dir'], config['frames_dir']]:
49 | directory.mkdir(parents=True, exist_ok=True)
50 |
51 | # star names: make it a list if user defined a string.
52 | # e.g. stars_to_use = 'abcd' --> ['a', 'b', 'c', 'd']
53 | if type(config['stars_to_use_psf']) is str:
54 | config['stars_to_use_psf'] = [c for c in config['stars_to_use_psf']]
55 | if type(config['stars_to_use_norm']) is str:
56 | config['stars_to_use_norm'] = [c for c in config['stars_to_use_norm']]
57 | if type(config['stars_to_exclude_psf']) is str:
58 | config['stars_to_exclude_psf'] = [c for c in config['stars_to_exclude_psf']]
59 | if type(config['stars_to_exclude_norm']) is str:
60 | config['stars_to_exclude_norm'] = [c for c in config['stars_to_exclude_norm']]
61 |
62 | # photometric bands check
63 | photom_band = config['photometric_band']
64 | if photom_band in ['r_sdss', 'i_sdss', 'g_sdss', 'V', 'R', 'Ic', 'B_T', 'V_T']:
65 | config['reference_absolute_photometric_survey'] = 'gaia'
66 | elif 'panstarrs' in photom_band:
67 | # check declination
68 | if dec < -30.5:
69 | raise RuntimeError('With this declination, '
70 | 'it is unlikely you will find pan-starrs magnitudes for absolute calibration.')
71 | config['reference_absolute_photometric_survey'] = 'panstarrs'
72 | else:
73 | raise RuntimeError(f'Config check: not a photometric band we implemented: {photom_band}')
74 |
75 | # constraints on ROI cutout prep:
76 | if 'constraints_on_frame_columns_for_roi' not in config:
77 | config['constraints_on_frame_columns_for_roi'] = {}
78 | if 'constraints_on_normalization_coeff' not in config:
79 | config['constraints_on_normalization_coeff'] = {}
80 |
81 | # fixing the astrometry: default false
82 | if 'fix_point_source_astrometry' not in config:
83 | config['fix_point_source_astrometry'] = False
84 |
85 | return config
86 |
87 |
88 | def compare_config_with_pipeline_delivered_one():
89 | # rough loading of user config:
90 | if 'LIGHTCURVER_CONFIG' not in os.environ:
91 | raise NoConfigFilePathInEnvironment
92 | config_path = os.environ['LIGHTCURVER_CONFIG']
93 | with open(config_path, 'r') as file:
94 | user_config = yaml.safe_load(file)
95 |
96 | # rough loading of pipeline config:
97 | pipeline_config_path = resources.files('lightcurver.pipeline.example_config_file') / 'config.yaml'
98 | with open(pipeline_config_path, 'r') as file:
99 | pipeline_config = yaml.safe_load(file)
100 |
101 | user_config_keys = set(user_config.keys())
102 | pipeline_config_keys = set(pipeline_config.keys())
103 |
104 | user_extra_keys = user_config_keys.difference(pipeline_config_keys)
105 | pipeline_extra_keys = pipeline_config_keys.difference(user_config_keys)
106 | pipeline_extra_keys_values = {key: pipeline_config[key] for key in pipeline_extra_keys}
107 |
108 | return {
109 | 'extra_keys_in_user_config': user_extra_keys,
110 | 'extra_keys_in_pipeline_config': pipeline_extra_keys,
111 | 'pipeline_extra_keys_values': pipeline_extra_keys_values
112 | }
113 |
--------------------------------------------------------------------------------
/lightcurver/structure/user_header_parser.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import os
3 |
4 | from .user_config import get_user_config
5 |
6 |
7 | def load_custom_header_parser():
8 | """
9 | dynamically load the 'parse_header' function from the user-defined file
10 | located at '$workdir/header_parser/header_parser.py'.
11 | """
12 | workspace_dir = get_user_config()['workdir']
13 |
14 | file_path = os.path.join(workspace_dir, 'header_parser', 'parse_header.py')
15 | module_name = 'header_parser'
16 |
17 | spec = importlib.util.spec_from_file_location(module_name, file_path)
18 | module = importlib.util.module_from_spec(spec)
19 | spec.loader.exec_module(module)
20 |
21 | try:
22 | parse_header_function = getattr(module, 'parse_header')
23 | return parse_header_function
24 | except AttributeError:
25 | raise ImportError("The function 'parse_header' was not found in 'header_parser.py'.")
26 |
--------------------------------------------------------------------------------
/lightcurver/utilities/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/lightcurver/utilities/__init__.py
--------------------------------------------------------------------------------
/lightcurver/utilities/absolute_magnitudes_from_gaia.py:
--------------------------------------------------------------------------------
1 | from ..structure.database import execute_sqlite_query
2 | from ..structure.user_config import get_user_config
3 |
4 |
5 | def save_gaia_catalog_photometry_to_database(gaia_id):
6 | """
7 | Converts Gaia magnitudes to other photometric magnitudes using polynomial coefficients.
8 | For example, the sdss coefficients are from table 5.6 of
9 | https://gea.esac.esa.int/archive/documentation/GEDR3/Data_processing/chap_cu5pho/cu5pho_sec_photSystem/cu5pho_ssec_photRelations.html
10 | (see, e.g., row g - r, here we do r - g, and add g.)
11 | Args:
12 | gaia_id (int): star at hand for which we would like to compute photometry in the band of the config.
13 | Returns:
14 | None
15 | """
16 | # taken from tables 5.6 and 5.7, see link in docstring
17 | coefficients = {
18 | 'r_sdss': [-0.09837, 0.08592, 0.1907, -0.1701, 0.02263],
19 | 'i_sdss': [-0.293, 0.6404, -0.09609, -0.002104],
20 | 'g_sdss': [0.2199, -0.6365, -0.1548, 0.0064],
21 | 'V': [-0.02704, 0.01424, -0.2156, 0.01426],
22 | 'R': [-0.02275, 0.3961, -0.1243, -0.01396, 0.003775],
23 | 'Ic': [0.01753, 0.76, -0.0991],
24 | 'V_T': [-0.01077, -0.0682, -0.2387, 0.02342],
25 | 'B_T': [-0.004288, -0.8547, 0.1244, -0.9085, 0.4843, -0.06814]
26 | }
27 | user_config = get_user_config()
28 | band = user_config['photometric_band']
29 | if band not in coefficients:
30 | raise ValueError(f"Unsupported band. Choose among {list(coefficients.keys())}.")
31 |
32 | # get the photometry from our database
33 | flux_query = """
34 | SELECT
35 | gaia_id,
36 | gmag as phot_g_mean_mag,
37 | bmag as phot_bp_mean_mag,
38 | rmag as phot_rp_mean_mag
39 | FROM
40 | stars
41 | WHERE
42 | gaia_id = ?
43 | LIMIT
44 | 1
45 | """
46 |
47 | gaia_mags = execute_sqlite_query(flux_query, (gaia_id,), is_select=True, use_pandas=True)
48 | coef = coefficients[band]
49 | bp_rp = gaia_mags['phot_bp_mean_mag'] - gaia_mags['phot_rp_mean_mag']
50 | g = gaia_mags['phot_g_mean_mag']
51 | band_mag = (g - sum(coef[i] * bp_rp**i for i in range(len(coef))))[0] # pandas series, extract 0th element
52 |
53 | # now we insert in the database:
54 | query = """
55 | INSERT OR REPLACE INTO catalog_star_photometry (catalog, band, mag, mag_err, original_catalog_id,
56 | star_gaia_id)
57 | VALUES (?, ?, ?, ?, ?, ?)
58 | """
59 | params = ('gaia',
60 | band,
61 | band_mag,
62 | 0.03, # nominal mag scatter for relations above
63 | gaia_id, # not important here
64 | gaia_id)
65 | # insert and hopefully done.
66 | execute_sqlite_query(query, params, is_select=False)
67 |
--------------------------------------------------------------------------------
/lightcurver/utilities/absolute_magnitudes_from_panstarrs.py:
--------------------------------------------------------------------------------
1 | from astroquery.mast import Catalogs
2 | from astropy.coordinates import SkyCoord
3 | from astropy import units as u
4 | import numpy as np
5 | import logging
6 |
7 | from ..structure.database import execute_sqlite_query
8 | from ..structure.user_config import get_user_config
9 |
10 | """
11 | This file is used for photometric calibration, optionally. We query pan-starrs and save magnitudes to our database.
12 | """
13 |
14 |
15 | def save_panstarrs_catalog_photometry_to_database(gaia_id):
16 | """
17 | Filtering the mast results: we want stars with precise PSF photometry and astrometry.
18 | Warning: no correction implemented for proper motion. Gaia ref epoch and pan-starrs dr2 roughly match in time frame
19 | though.
20 | Args:
21 | gaia_id: gaia_id of the star as saved in the database
22 | returns:
23 | Nothing
24 | """
25 | logger = logging.getLogger('lightcurver.save_panstarrs_catalog_photometry_to_database')
26 | # 0. check whether we already have data for this star. If yes, we will not redo this step (slow).
27 | check_query = """
28 | SELECT COUNT(*) FROM catalog_star_photometry
29 | WHERE star_gaia_id = ? AND catalog = 'panstarrs'
30 | """
31 | check_params = (gaia_id, )
32 | result = execute_sqlite_query(check_query, check_params)
33 | if result[0][0] > 0:
34 | # magnitude already exists for this star, no need to proceed further
35 | print(f"Data already exists for Gaia ID {gaia_id} and catalog panstarrs")
36 | return
37 |
38 | # 1. query pan-starrs.
39 | mast_results = search_panstarrs_around_coordinates(gaia_id)
40 | # 2. check that we have the relevant data
41 | mag_dict = photometric_selection_heuristic(mast_results)
42 | if mag_dict is None:
43 | # no relevant information ended up being available.
44 | logger.warning(f"No relevant Pan-STARRS photometry found for star {gaia_id}.")
45 | return
46 | # 3. if pan-starrs had the right information, we insert.
47 | query = """
48 | INSERT OR REPLACE INTO catalog_star_photometry (catalog, band, mag, mag_err, original_catalog_id,
49 | star_gaia_id)
50 | VALUES (?, ?, ?, ?, ?, ?)
51 | """
52 | params = ('panstarrs',
53 | mag_dict['band'],
54 | mag_dict['mag'],
55 | mag_dict['mag_err'],
56 | mag_dict['catalog_ID'],
57 | gaia_id)
58 | # insert and done!
59 | execute_sqlite_query(query, params, is_select=False)
60 |
61 |
62 | def search_panstarrs_around_coordinates(gaia_id):
63 | """
64 | Just using astorquery's MAST interface to find sources around the coordinates
65 | Args:
66 | gaia_id: int, gaia_id. ra and dec will be queried from our database.
67 | returns:
68 | MAST results in an astropy Table, with columns such as raMean, gMeanPSFMag, etc.
69 | """
70 | ra, dec = execute_sqlite_query('SELECT ra, dec FROM stars WHERE gaia_id = ?', (gaia_id, ))[0]
71 |
72 | coord = SkyCoord(ra=ra * u.deg, dec=dec * u.deg, frame='icrs')
73 | radius = 1.5 * u.arcsecond # this is generous given the magnitude of the proper motion of most stars.
74 | result = Catalogs.query_region(coord, radius=radius, catalog="PanSTARRS", data_release="dr2")
75 | return result
76 |
77 |
78 | def photometric_selection_heuristic(mast_results):
79 | """
80 | Just a helper function to eliminate bad photometry information: compares mast_result to actual band needed
81 | from user config
82 | Args:
83 | mast_results: astropy table output of search_panstarrs_around_coordinates above
84 | Return:
85 | dictionary with band, mag, mag_err, catalog ID, or None if mast_result does not contain the right information.
86 | """
87 | # ok, now we start cleaning up. Pan-Starrs has a tendency to not properly cluster identical detections.
88 | # Thus, we'll do some rough filtering on the nDetections column first.
89 | if len(mast_results) > 1:
90 | n_detections = mast_results['nDetections']
91 | max_index = np.argmax(n_detections)
92 | # in such cases, the second source will typically have been detecting one or two times max,
93 | # against many times for the main (correct) one.
94 | mast_results = mast_results[mast_results['nDetections'] > 0.2 * n_detections[max_index]]
95 | # so, eliminate such wrongly unmerged detections.
96 |
97 | # next, either we have nothing found in pan-starrs, or still multiple detections:
98 | if len(mast_results) != 1:
99 | # can't do anything with this danger of selecting the wrong source
100 | return None
101 |
102 | # now check that the detection has photometry in the right bands depending on what we need.
103 | result = mast_results[0]
104 | config = get_user_config()
105 | band = config['photometric_band']
106 | # just a sanity check:
107 | if 'panstarrs' not in band:
108 | raise RuntimeError('Running a Pan-STARRS related function when the config file does not mention Pan-STARRS?')
109 | # see what coverage we have for this star:
110 | available_bands = [b for b in 'grizy' if result[f'{b}MeanPSFMag']]
111 | # now the band we need:
112 | band = band.replace('_panstarrs', '')
113 | if (band in 'grizy') and (band not in available_bands):
114 | # then, no can do.
115 | return None
116 | elif band in 'grizy':
117 | # then, simple.
118 | mag = result[f'{band}MeanPSFMag']
119 | mag_err = result[f'{band}MeanPSFMagErr']
120 | elif band == 'c':
121 | # composite, we need both g and r
122 | if ('g' not in available_bands) or ('r' not in available_bands):
123 | return None
124 | # we combine according to https://iopscience.iop.org/article/10.1088/1538-3873/aabadf/pdf (Eq 2)
125 | mag = 0.49 * result['gMeanPSFMag'] + 0.51 * result['rMeanPSFMag']
126 | # approx, same for uncertainty
127 | mag_err = 0.49 * result['gMeanPSFMagErr'] + 0.51 * result['rMeanPSFMagErr']
128 | elif band == 'o':
129 | # another composite, we need r and i
130 | if ('r' not in available_bands) or ('i' not in available_bands):
131 | return None
132 | # same as above, Eq (2) of link
133 | mag = 0.55 * result['rMeanPSFMag'] + 0.45 * result['iMeanPSFMag']
134 | # approx, same for uncertainty
135 | mag_err = 0.55 * result['rMeanPSFMagErr'] + 0.45 * result['iMeanPSFMagErr']
136 | else:
137 | # how did we end up here? For sanity, raise.
138 | raise RuntimeError(f'User config provided a band related to pan-starrs that we do not know about: {band}')
139 |
140 | # if we made it here, then we have a relatively safe catalog magnitude for the star at hand
141 | return {'band': band, 'mag': mag, 'mag_err': mag_err, 'catalog_ID': result['objID']}
142 |
--------------------------------------------------------------------------------
/lightcurver/utilities/chi2_selector.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from astropy.stats import sigma_clipped_stats
3 |
4 | from ..structure.user_config import get_user_config
5 | from ..structure.database import execute_sqlite_query
6 |
7 |
8 | def get_chi2_bounds(psf_or_fluxes):
9 | """
10 | This is a wrapper around the user config. We need chi2 bounds to only query the frames
11 | with a good enough PSF fit. There are several options to decide on the bounds:
12 | Params:
13 | psf_or_fluxes: string, either 'psf' or 'fluxes'.
14 | Returns:
15 | tuple (chi2_min, chi2_max)
16 | """
17 | user_config = get_user_config()
18 | assert psf_or_fluxes in ['psf', 'fluxes'], f'get_chi2_bounds: not something I know of: {psf_or_fluxes}'
19 | conf = user_config[f'{psf_or_fluxes}_fit_exclude_strategy']
20 | if conf is None:
21 | return -np.inf, np.inf
22 | elif type(conf) is dict:
23 | # then we're either dealing with bounds, or sigma clip
24 | assert len(conf.keys()) == 1
25 | key = list(conf.keys())[0]
26 | conf, val = key, conf[key]
27 | if conf == 'sigma_clip':
28 | # here we need to query the chi2. we'll just query them all to have
29 | # a feeling of what chi2 values we have, can be a systematic in the noisemaps.
30 | if psf_or_fluxes == 'psf':
31 | chi2val = execute_sqlite_query("select chi2 from PSFs", is_select=True, use_pandas=True)
32 | else:
33 | chi2val = execute_sqlite_query("select chi2 from star_flux_in_frame",
34 | is_select=True, use_pandas=True)
35 | mean, median, std = sigma_clipped_stats(chi2val['chi2'], sigma=val)
36 | chi2_min = median - val * std
37 | chi2_max = median + val * std
38 | return chi2_min, chi2_max
39 | elif conf == 'threshold':
40 | return val
41 | else:
42 | raise RuntimeError(f"Unexpected psf_fit_exclude_strategy: {conf}. valid: None, 'sigma_clip' or 'threshold'")
43 |
--------------------------------------------------------------------------------
/lightcurver/utilities/footprint.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from shapely.geometry import Polygon, mapping
3 | from functools import reduce
4 | import json
5 | from astropy.io import fits
6 | from astropy.wcs import WCS
7 |
8 | from ..structure.database import execute_sqlite_query, get_pandas
9 | from ..structure.user_config import get_user_config
10 |
11 |
12 | def get_combined_footprint_hash(user_config, frames_id_list):
13 | """
14 | Calculates the hash of the combined footprint of the frames whose id is in frames_id_list.
15 | Args:
16 | user_config: dictionary obtained from structure.config.get_user_config
17 | frames_id_list: list of integers, which frames are we using.
18 |
19 | Returns:
20 | frames_hash, integer
21 |
22 | """
23 | if user_config['star_selection_strategy'] != 'ROI_disk':
24 | # then it depends on the frames we're considering.
25 | frames_hash = get_frames_hash(frames_id_list)
26 | else:
27 | # if ROI_disk, it does not depend on the frames: unique region defined by its radius.
28 | return hash(user_config['ROI_disk_radius_arcseconds'])
29 |
30 |
31 | def calc_common_and_total_footprint(list_of_footprints):
32 | """
33 | Calculate the common (intersection) and largest (union) footprints from a list of numpy arrays: products
34 | of astropy.wcs.WCS.calc_footprint.
35 | Then determines both the intersection and union of these
36 | footprints using Shapely Polygons. The intersection represents the area common to all frames,
37 | while the union covers the total area spanned by any of the frames.
38 |
39 | Parameters:
40 | - list_of_fits_paths: A list of paths to FITS files.
41 |
42 | Returns:
43 | - wcs_objects: A list of WCS objects corresponding to the FITS files.
44 | - common_footprint: A Shapely Polygon representing the intersected footprint common to all WCS footprints.
45 | - largest_footprint: A Shapely Polygon representing the union of all WCS footprints.
46 | """
47 | wcs_footprints = list_of_footprints
48 |
49 | polygons = [Polygon(footprint) for footprint in wcs_footprints]
50 | try:
51 | common_footprint = reduce(lambda x, y: x.intersection(y), polygons)
52 | common_footprint = common_footprint.simplify(tolerance=0.001, preserve_topology=True)
53 | except TypeError:
54 | # we might have no common footprint?
55 | common_footprint = None
56 |
57 | largest_footprint = reduce(lambda x, y: x.union(y), polygons)
58 | largest_footprint = largest_footprint.simplify(tolerance=0.001, preserve_topology=True)
59 |
60 | return common_footprint, largest_footprint
61 |
62 |
63 | def database_insert_single_footprint(frame_id, footprint_array):
64 | polygon_list = footprint_array.tolist()
65 | polygon_str = json.dumps(polygon_list)
66 |
67 | execute_sqlite_query(query="INSERT OR REPLACE INTO footprints (frame_id, polygon) VALUES (?, ?)",
68 | params=(frame_id, polygon_str),
69 | is_select=False)
70 |
71 |
72 | def database_get_footprint(frame_id):
73 | result = execute_sqlite_query(query="SELECT polygon FROM footprints WHERE frame_id = ?",
74 | params=(frame_id,),
75 | is_select=True)[0]
76 |
77 | polygon_list = json.loads(result)
78 | footprint_polygon = np.array(polygon_list)
79 |
80 | return footprint_polygon
81 |
82 |
83 | def get_frames_hash(frames_ids):
84 | """
85 | when calculating footprints, we need a way to identify which footprint was calculated from which frames.
86 | I don't want to deal with the relational many-to-many situation that will arise in a relational database,
87 | so let's calculate a hash of the integers of the frames that were used to calculate a footprint.
88 | then to check for a footprint, which can just query the hash.
89 | Args:
90 | frames_ids: list of integers, frames.id in the database
91 |
92 | Returns:
93 | a text hash
94 |
95 | """
96 | assert len(set(frames_ids)) == len(frames_ids), "Non-unique frame ids passed to this function"
97 | sorted_frame_ids = sorted(frames_ids)
98 | frame_ids_tuple = tuple(sorted_frame_ids)
99 | return hash(frame_ids_tuple)
100 |
101 |
102 | def save_combined_footprints_to_db(frames_hash, common_footprint, largest_footprint):
103 |
104 | common_str = json.dumps(mapping(common_footprint))
105 | largest_str = json.dumps(mapping(largest_footprint))
106 | save_query = "INSERT INTO combined_footprint (hash, largest, common) VALUES (?, ?, ?)"
107 | execute_sqlite_query(save_query,
108 | params=(frames_hash, largest_str, common_str),
109 | is_select=False)
110 |
111 |
112 | def load_combined_footprint_from_db(frames_hash):
113 |
114 | query = "SELECT largest, common FROM combined_footprint WHERE hash = ?"
115 | result = execute_sqlite_query(query,
116 | params=(frames_hash,),
117 | is_select=True)[0]
118 | largest = json.loads(result[0])
119 | common = json.loads(result[1])
120 | if result:
121 | return largest, common
122 | else:
123 | return None
124 |
125 |
126 | def check_in_footprint_for_all_images():
127 | """
128 | Just a wrapper for running the footprint check. Can be useful to manually execute in some cases.
129 | Will
130 | - load the footprints from db or load all the headers of the plate solved images depending on function argument
131 | - check if the ROI coord is in the footprint defined by the WCS
132 | - update the frames table.
133 | We skip the footprints table, as this is the ROI and not a simple star we want a better check
134 | than the simple gnonomic projection we are forced to rely on when using the footprint table.
135 |
136 | Returns:
137 | Nothing
138 | """
139 | frames_to_process = get_pandas(columns=['id', 'image_relpath'],
140 | conditions=['plate_solved = 1', 'eliminated = 0'])
141 | user_config = get_user_config()
142 |
143 | for i, frame in frames_to_process.iterrows():
144 | frame_id = frame['id']
145 | frame_path = user_config['workdir'] / frame['image_relpath']
146 | final_header = fits.getheader(frame_path)
147 | wcs = WCS(final_header)
148 | in_footprint = user_config['ROI_SkyCoord'].contained_by(wcs)
149 | execute_sqlite_query(query="UPDATE frames SET roi_in_footprint = ? WHERE id = ?",
150 | params=(int(in_footprint), frame_id), is_select=False)
151 |
152 |
153 | def identify_and_eliminate_bad_pointings():
154 | """
155 | Called after calculating the footprints. Will identify pointings that are ~really~ different, and
156 | flag them in the database ('eliminated = 1', 'comment = "bad_pointing"')
157 | Returns: nothing
158 |
159 | """
160 |
161 | select_query = """
162 | SELECT frames.id, footprints.polygon
163 | FROM footprints
164 | JOIN frames ON footprints.frame_id = frames.id
165 | WHERE frames.eliminated != 1;
166 | """
167 | update_query = """
168 | UPDATE frames
169 | SET comment = 'bad_pointing', eliminated = 1
170 | WHERE id = ?;
171 | """
172 |
173 | results = execute_sqlite_query(select_query, is_select=True, use_pandas=True)
174 | mean_positions = []
175 |
176 | for i, row in results.iterrows():
177 | frame_id = row['id']
178 | polygon = row['polygon']
179 | polygon = np.array(json.loads(polygon))
180 | mean_position = np.mean(polygon, axis=0)
181 | mean_positions.append((frame_id, mean_position))
182 |
183 | all_means = np.array([pos for _, pos in mean_positions])
184 | overall_mean = np.mean(all_means, axis=0)
185 |
186 | # distance of each frame's mean position from the overall mean
187 | deviations = [(frame_id, np.linalg.norm(mean_pos - overall_mean)) for frame_id, mean_pos in mean_positions]
188 |
189 | # threshold
190 | deviation_values = [dev for _, dev in deviations]
191 | mean_deviation = np.mean(deviation_values)
192 | std_deviation = np.std(deviation_values)
193 | threshold = mean_deviation + 5 * std_deviation # quite a generous threshold.
194 |
195 | # flag frames with significant deviation
196 | bad_frames = [frame_id for frame_id, dev in deviations if dev > threshold]
197 |
198 | for frame_id in bad_frames:
199 | execute_sqlite_query(update_query, params=(frame_id,))
200 |
201 |
202 | def get_angle_wcs(wcs_object):
203 | """
204 | Takes a WCS object, and returns the angle in degrees to the North (so, angle relative to "North up, East left")
205 | Args:
206 | wcs_object: astropy WCS object
207 |
208 | Returns:
209 | angle: float, angle in degrees.
210 | """
211 |
212 | if hasattr(wcs_object.wcs, 'cd'):
213 | matrix = wcs_object.wcs.cd
214 | elif hasattr(wcs_object.wcs, 'pc'):
215 | matrix = wcs_object.wcs.pc
216 | else:
217 | raise ValueError("Neither CD nor PC matrix found in WCS.")
218 |
219 | cd1_1, cd1_2 = matrix[0, 0], matrix[0, 1]
220 | cd2_1, cd2_2 = matrix[1, 0], matrix[1, 1]
221 |
222 | angle = np.arctan2(-cd1_2, cd2_2) * 180.0 / np.pi
223 |
224 | return angle
225 |
--------------------------------------------------------------------------------
/lightcurver/utilities/gaia.py:
--------------------------------------------------------------------------------
1 | from astroquery.utils.tap.core import TapPlus
2 | from astropy.table import Table, Column
3 | import numpy as np
4 |
5 | """
6 | This file implements queries to Gaia, the aim being finding appropriate calibration stars in the field of interest.
7 | We save the astrometry and photometry of the queried stars, as the photometry can be used later for absolute
8 | zeropoint calibration.
9 | """
10 |
11 | # 2024-04-19: Gaia archive down.
12 | # let us query our Gaia stars from Vizier instead.
13 | vizier_to_gaia_column_mapping = {
14 | 'RA_ICRS': 'ra',
15 | 'DE_ICRS': 'dec',
16 | 'Gmag': 'phot_g_mean_mag',
17 | 'RPmag': 'phot_rp_mean_mag',
18 | 'BPmag': 'phot_bp_mean_mag',
19 | 'pmRA': 'pmra',
20 | 'pmDE': 'pmdec',
21 | 'Source': 'source_id',
22 | 'sepsi': 'astrometric_excess_noise_sig',
23 | 'RFG': 'phot_g_mean_flux_over_error'
24 | }
25 | gaia_to_vizier_column_mapping = {value: key for key, value in vizier_to_gaia_column_mapping.items()}
26 |
27 |
28 | def construct_where_conditions(gaia_provider, astrometric_excess_noise_max=None, gmag_range=None,
29 | min_phot_g_mean_flux_over_error=None):
30 | """
31 | utility function for the find functions below, just checks on the conditions and returns a list of potential
32 | statements to us after 'WHERE'. Also takes care of formatting the table from which we will query.
33 | Args:
34 | gaia_provider: string, 'gaia' or 'vizier'
35 | astrometric_excess_noise_max: float, default None
36 | gmag_range: tuple of floats (min, max), default None
37 | min_phot_g_mean_flux_over_error: float, default None
38 |
39 | Returns:
40 | list of strings containing the conditions, and string containing the name of the table to query.
41 |
42 | """
43 | gaia_provider = gaia_provider.lower()
44 | assert gaia_provider in ['gaia', 'vizier'], "gaia_provider must be either 'gaia' or 'vizier'"
45 | if gaia_provider == 'gaia':
46 | query_table = "gaiadr3.gaia_source as gdr3 "
47 | else:
48 | query_table = f'"I/355/gaiadr3" AS gdr3 '
49 |
50 | where_conditions = []
51 |
52 | if astrometric_excess_noise_max is not None:
53 | col_name = 'astrometric_excess_noise_sig'
54 | if gaia_provider == 'vizier':
55 | col_name = gaia_to_vizier_column_mapping[col_name]
56 | where_conditions.append(f"gdr3.{col_name} < {astrometric_excess_noise_max}")
57 |
58 | if gmag_range is not None:
59 | col_name = 'phot_g_mean_mag'
60 | if gaia_provider == 'vizier':
61 | col_name = gaia_to_vizier_column_mapping[col_name]
62 | where_conditions.append(f"gdr3.{col_name} BETWEEN {gmag_range[0]} AND {gmag_range[1]}")
63 |
64 | if min_phot_g_mean_flux_over_error is not None:
65 | col_name = 'phot_g_mean_flux_over_error'
66 | if gaia_provider == 'vizier':
67 | col_name = gaia_to_vizier_column_mapping[col_name]
68 | where_conditions.append(f"gdr3.{col_name} > {min_phot_g_mean_flux_over_error}")
69 |
70 | return where_conditions, query_table
71 |
72 |
73 | def find_gaia_stars(region_type, *args, **kwargs):
74 | """
75 | Main function to query Gaia stars based on region type (Circle or Polygon).
76 |
77 | :param region_type: str, 'circle' or 'polygon' to define the type of region for the query.
78 | :param args: Arguments passed to the specific region query function.
79 | :param kwargs: Keyword arguments for filtering options, passed to the specific region query function.
80 | """
81 | if region_type.lower() == 'circle':
82 | stars_table = find_gaia_stars_in_circle(*args, **kwargs)
83 | elif region_type.lower() == 'polygon':
84 | stars_table = find_gaia_stars_in_polygon(*args, **kwargs)
85 | else:
86 | raise ValueError("region_type must be either 'Circle' or 'Polygon'")
87 |
88 | # it seems that there is some variation when querying gaia? sometimes columns are capitalized, others not.
89 | # so, force lower:
90 | for name in stars_table.colnames:
91 | new_name = name.lower()
92 | stars_table.rename_column(name, new_name)
93 | return stars_table
94 |
95 |
96 | def run_query(gaia_provider, adql_query):
97 | """
98 | Utility function to run the adql query constructed by other functions, given a gaia provider (gaia or vizier)
99 | Args:
100 | gaia_provider: str, 'gaia' or 'vizier'
101 | adql_query: str, an adql query.
102 |
103 | Returns:
104 | astropy table of sources, with columns following the gaia archive labelling.
105 | """
106 |
107 | # import Gaia here: on servers without an internet connection, this line waits until timeout
108 | # before printing a warning. For daily runs without an internet connection, better not import
109 | # Gaia when not needed.
110 | from astroquery.gaia import Gaia
111 |
112 | if gaia_provider.lower() == 'gaia':
113 | Gaia.MAIN_GAIA_TABLE = 'gaiadr3.gaia_source'
114 | Gaia.ROW_LIMIT = 2000
115 | job = Gaia.launch_job_async(adql_query)
116 | result = job.get_results()
117 | elif gaia_provider.lower() == 'vizier':
118 | tap = TapPlus(url="https://TAPVizieR.cds.unistra.fr/TAPVizieR/tap")
119 | job = tap.launch_job(adql_query)
120 | result_vizier = job.get_results()
121 | # change column names to what we expect from Gaia:
122 | result = Table()
123 | for vizier_col, gaia_col in vizier_to_gaia_column_mapping.items():
124 | if vizier_col in result_vizier.colnames:
125 | result[gaia_col] = result_vizier[vizier_col]
126 | # vizier does not provide the reference epoch.
127 | result['ref_epoch'] = Column(name='ref_epoch', data=2016.0 * np.ones(len(result), dtype=float))
128 | # check that we're using dr3, for which the ref epoch is indeed 2016:
129 | if 'gaiadr3' not in adql_query:
130 | raise FutureWarning("Using Vizier and 2016 as ref epoch, but not using Gaia DR3. Ref epoch changed? check.")
131 | else:
132 | raise RuntimeError("gaia_provider must be 'gaia' or 'vizier'")
133 |
134 | return result
135 |
136 |
137 | def find_gaia_stars_in_circle(center_radius, gaia_provider='gaia', astrometric_excess_noise_max=None, gmag_range=None,
138 | min_phot_g_mean_flux_over_error=None):
139 | """
140 | Query Gaia stars within a circle defined by a central point and radius, with additional filtering options.
141 |
142 | :param center_radius: dictionary {'center': tuple(ra,dec), 'radius': radius_degrees}
143 | :param gaia_provider: str, default 'gaia'. Either 'gaia' or 'vizier'
144 | :param astrometric_excess_noise_max: float, maximum allowed astrometric excess noise (None for no filter)
145 | :param gmag_range: tuple, magnitude range in g-band as (min_gmag, max_gmag) (None for no filter)
146 | :param min_phot_g_mean_flux_over_error: float, flux error correlates to variability. (None for no filter)
147 |
148 | Returns: astropy table of gaia sources
149 | """
150 | where_conditions, query_table = construct_where_conditions(gaia_provider, astrometric_excess_noise_max,
151 | gmag_range, min_phot_g_mean_flux_over_error)
152 |
153 | # Constructing the circle condition
154 | c = center_radius['center']
155 | r = center_radius['radius']
156 | # while handling the vizier weird column naming:
157 | ra_col = 'ra'
158 | dec_col = 'dec'
159 | if gaia_provider == 'vizier':
160 | ra_col = gaia_to_vizier_column_mapping[ra_col]
161 | dec_col = gaia_to_vizier_column_mapping[dec_col]
162 | where_conditions.append(
163 | f"1=CONTAINS(POINT('ICRS', gdr3.{ra_col}, gdr3.{dec_col}), CIRCLE('ICRS', {c[0]}, {c[1]}, {r}))"
164 | )
165 |
166 | where_clause = " AND ".join(where_conditions)
167 |
168 | adql_query = f"""
169 | SELECT * FROM {query_table}
170 | """
171 | if where_clause:
172 | adql_query += f" WHERE {where_clause}"
173 |
174 | result = run_query(gaia_provider=gaia_provider, adql_query=adql_query)
175 | return result
176 |
177 |
178 | def find_gaia_stars_in_polygon(vertices, gaia_provider='gaia', astrometric_excess_noise_max=None, gmag_range=None,
179 | min_phot_g_mean_flux_over_error=None):
180 | """
181 | Query Gaia stars within a polygon defined by a list of vertices, with additional filtering options.
182 |
183 | :param vertices: list of tuples, each tuple contains RA and Dec in degrees [(ra1, dec1), (ra2, dec2), ..., ]
184 | :param gaia_provider: str, default 'gaia'. Either 'gaia' or 'vizier'
185 | :param astrometric_excess_noise_max: float, maximum allowed astrometric excess noise (None for no filter)
186 | :param gmag_range: tuple, magnitude range in g-band as (min_gmag, max_gmag) (None for no filter)
187 | :param min_phot_g_mean_flux_over_error: float, flux error correlates to variability. (None for no filter)
188 |
189 | Returns: astropy table of gaia sources
190 | """
191 |
192 | from astroquery.gaia import Gaia
193 | where_conditions, query_table = construct_where_conditions(gaia_provider, astrometric_excess_noise_max,
194 | gmag_range, min_phot_g_mean_flux_over_error)
195 |
196 | polygon_string = ', '.join([f"{vertex[0]},{vertex[1]}" for vertex in vertices])
197 |
198 | # handle the vizier weird column naming:
199 | ra_col = 'ra'
200 | dec_col = 'dec'
201 | if gaia_provider == 'vizier':
202 | ra_col = gaia_to_vizier_column_mapping[ra_col]
203 | dec_col = gaia_to_vizier_column_mapping[dec_col]
204 | where_conditions.append(
205 | f"1=CONTAINS(POINT('ICRS', gdr3.{ra_col}, gdr3.{dec_col}), POLYGON('ICRS', {polygon_string}))"
206 | )
207 |
208 | where_clause = " AND ".join(where_conditions)
209 |
210 | adql_query = f"""
211 | SELECT * FROM {Gaia.MAIN_GAIA_TABLE}
212 | WHERE {where_clause}
213 | """
214 |
215 | result = run_query(gaia_provider=gaia_provider, adql_query=adql_query)
216 | return result
217 |
218 |
--------------------------------------------------------------------------------
/lightcurver/utilities/image_coordinates.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def rescale_image_coordinates(xy_coordinates_array, image_shape):
5 | """
6 | translates and rescales the coordinates in xy_array (origin bottom left of image), such that the new origin
7 | is the center of the image, and the coordinates go from -1 to 1.
8 | E.g., bottom left is (-1, -1), center is (0, 0), top left is (-1, 1).
9 | Function only factored out to keep things consistent with distortion.
10 |
11 | Args:
12 | xy_coordinates_array: an array of shape (N, 2), denoting a list of coordinate pairs (x,y) with origin
13 | at the bottom left of the image.
14 | image_shape: shape of the image the coordinates refer to, obtained with `image.shape`
15 |
16 | Returns:
17 | ` rescaled_xy_coordinates: an array of shape (N,2) with origin in the center of the image, and values in [-1, 1]
18 | """
19 | image_dims = np.array(image_shape)[::-1] # reversed because y~lines, x~columns
20 | center = (image_dims - 1) / 2.
21 |
22 | rescaled_xy_coordinates = xy_coordinates_array - center
23 | rescaled_xy_coordinates /= image_dims
24 |
25 | return rescaled_xy_coordinates
26 |
--------------------------------------------------------------------------------
/lightcurver/utilities/lightcurves_postprocessing.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 | from scipy.stats import sigmaclip
4 | from copy import deepcopy
5 | import warnings
6 |
7 |
8 | def group_observations(df, threshold=0.8):
9 | grouped_results = []
10 | df_sorted = df.sort_values(by='mjd')
11 | point_sources = {col.split('_')[0] for col in df.columns if col.endswith('_flux') and not col.endswith('_d_flux')}
12 |
13 | def process_group(df_group):
14 | avg_mjd = df_group['mjd'].mean()
15 | scatter_mjd = df_group['mjd'].std()
16 | # eh, make it 0 if only one element in group
17 | scatter_mjd = 0. if np.isnan(scatter_mjd) else scatter_mjd
18 | this_epoch_group = {"mjd": avg_mjd, "scatter_mjd": scatter_mjd}
19 | flux_columns = [f'{ps}_flux' for ps in point_sources] + [f'{ps}_d_flux' for ps in point_sources]
20 | optional_averages = {col: df_group[col].mean() for col in df_group.columns if col not in (['mjd'] + flux_columns)}
21 | this_epoch_group.update(optional_averages)
22 | for ps in sorted(point_sources):
23 | col = f'{ps}_flux'
24 | d_col = f'{ps}_d_flux'
25 | curve_data = df_group[col].to_numpy()
26 | curve_variances = df_group[d_col].to_numpy()**2
27 | filtered_data, low_limit, high_limit = sigmaclip(curve_data, low=2, high=2)
28 | filtered_indices = np.logical_and(curve_data >= low_limit, curve_data <= high_limit)
29 | filtered_variances = curve_variances[filtered_indices]
30 | if len(filtered_variances) > 0 and np.all(filtered_variances > 0):
31 | weights = 1. / filtered_variances
32 | weighted_mean = np.average(filtered_data, weights=weights)
33 | weighted_variance = np.average((filtered_data - weighted_mean)**2, weights=weights)
34 | weighted_std_deviation = np.sqrt(weighted_variance)
35 | d_flux = np.sqrt(1. / np.sum(weights))
36 | count_flux = len(filtered_variances)
37 | else:
38 | weighted_mean = float('nan')
39 | weighted_std_deviation = float('nan')
40 | d_flux = float('inf')
41 | count_flux = 0
42 | this_epoch_group[f'{ps}_flux'] = weighted_mean
43 | this_epoch_group[f'{ps}_d_flux'] = d_flux
44 | this_epoch_group[f'{ps}_scatter_flux'] = weighted_std_deviation
45 | this_epoch_group[f'{ps}_count_flux'] = count_flux
46 | return this_epoch_group
47 |
48 | start_idx = 0
49 | for i in range(1, len(df_sorted)):
50 | if df_sorted.iloc[i]['mjd'] - df_sorted.iloc[i - 1]['mjd'] > threshold:
51 | df_group = df_sorted.iloc[start_idx:i]
52 | grouped_results.append(process_group(df_group))
53 | start_idx = i
54 | if start_idx < len(df_sorted):
55 | df_group = df_sorted.iloc[start_idx:]
56 | grouped_results.append(process_group(df_group))
57 | return pd.DataFrame(grouped_results)
58 |
59 |
60 | def convert_flux_to_magnitude(df):
61 | """
62 | Converts fluxes and flux errors/scatters in a DataFrame to magnitudes,
63 | assuming a 'zeropoint' column (zero if absent).
64 | This is very tailored to the output of the group_observations function.
65 |
66 | Parameters:
67 | - df: pandas DataFrame containing fluxes and flux errors.
68 | The DataFrame should contain columns named "{ps}_flux", "{ps}_d_flux", and optionally "{ps}_scatter_flux"
69 | for each source {ps}, and optionally a "zeropoint" column.
70 |
71 | Returns:
72 | - A new pandas DataFrame with magnitudes and magnitude errors/scatters.
73 | For each source {ps}, the following columns are added:
74 | - "{ps}_mag": Nominal magnitude
75 | - "{ps}_d_mag_down": Lower magnitude uncertainty
76 | - "{ps}_d_mag_up": Upper magnitude uncertainty
77 | - "{ps}_scatter_mag_down": Lower scatter magnitude (if applicable)
78 | - "{ps}_scatter_mag_up": Upper scatter magnitude (if applicable)
79 | """
80 | df = deepcopy(df)
81 |
82 | # check zeropoint present
83 | if 'zeropoint' not in df.columns:
84 | warnings.warn('Zeropoint column missing. Using a zeropoint of 0.', RuntimeWarning)
85 | df['zeropoint_used_in_conversion'] = 0.
86 | df['zeropoint'] = 0.
87 |
88 | # group relevant columns
89 | aux_columns = [c for c in df.columns if '_scatter_flux' in c or '_d_flux' in c or '_count' in c]
90 | flux_columns = [c for c in df.columns if '_flux' in c and c not in aux_columns]
91 |
92 | zeropoint = df['zeropoint']
93 |
94 | # some utility function to compute magnitude uncertainties from flux uncertainties
95 | def compute_mags_asymmetric_errors(flux_values, flux_errors, zp, source, prefix):
96 | """
97 | Helper function to compute magnitudes and asymmetric errors.
98 |
99 | Parameters:
100 | - F: Flux values.
101 | - dF: Flux uncertainties.
102 | - zp: Zeropoint values.
103 | - source: name of the source at hand
104 | - prefix: either 'd' or 'scatter' depending on the uncertainty type
105 |
106 | Returns:
107 | - Tuple of Series: (mag, sigma_down, sigma_up)
108 | """
109 | # safety
110 | flux_values = np.array(flux_values)
111 | flux_errors = np.array(flux_errors)
112 | # zp never goes through the jax machinery, should be a numpy array.
113 |
114 | # nominal magnitude
115 | mag = -2.5 * np.log10(flux_values) + zp
116 | # upper and lower fluxes
117 | flux_up = flux_values + flux_errors
118 | flux_down = flux_values - flux_errors
119 | # start: NaNs
120 | mag_down = np.full_like(mag, np.nan, dtype=np.float64)
121 | mag_up = np.full_like(mag, np.nan, dtype=np.float64)
122 | # validity masks
123 | valid_plus = flux_up > 0
124 | valid_minus = flux_down > 0
125 | # calc magnitudes where valid
126 | mag_down[valid_plus] = -2.5 * np.log10(flux_up[valid_plus]) + zp[valid_plus]
127 | mag_up[valid_minus] = -2.5 * np.log10(flux_down[valid_minus]) + zp[valid_minus]
128 | # asymmetric errors
129 | sigma_down = mag - mag_down
130 | sigma_up = mag_up - mag
131 | # assign
132 | df[f'{source}_mag'] = mag
133 | df[f'{source}_{prefix}_mag_down'] = sigma_down
134 | df[f'{source}_{prefix}_mag_up'] = sigma_up
135 | # linearized mag uncertainty for comparison
136 | df[f'{source}_{prefix}_mag'] = 2.5 / np.log(10) * np.abs(flux_errors / flux_values)
137 |
138 | for error_type in ('d', 'scatter'):
139 | for flux_col in flux_columns:
140 | ps = flux_col.split('_')[0]
141 | error_col = f'{ps}_{error_type}_flux'
142 | if error_col in df.columns:
143 | compute_mags_asymmetric_errors(flux_values=df[flux_col],
144 | flux_errors=df[error_col],
145 | zp=zeropoint,
146 | source=ps,
147 | prefix=error_type)
148 |
149 | return df
150 |
--------------------------------------------------------------------------------
/lightcurver/utilities/star_naming.py:
--------------------------------------------------------------------------------
1 | import string
2 | import itertools
3 |
4 |
5 | def generate_star_names(n):
6 | """
7 | just so we have a nicer way (than gaia ID in a given combined footprint / selection of stars) to refer to our stars
8 | (assign each a letter / a combination of letters).
9 | Args:
10 | n: int, number of labels to generate
11 |
12 | Returns:
13 | list of strings, e.g. ['a', 'b', 'c', 'd', 'e', 'f', ...]
14 | """
15 | def all_strings():
16 | size = 1
17 | while True:
18 | for s in itertools.product(string.ascii_lowercase, repeat=size):
19 | yield "".join(s)
20 | size += 1
21 |
22 | return [name for _, name in zip(range(n), all_strings())]
23 |
24 |
--------------------------------------------------------------------------------
/lightcurver/utilities/starred_utilities.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from copy import deepcopy
3 |
4 | from starred.deconvolution.loss import Loss
5 | from starred.optim.optimization import Optimizer
6 | from starred.deconvolution.parameters import ParametersDeconv
7 | from starred.optim.inference_base import FisherCovariance
8 |
9 |
10 | def get_flux_uncertainties(kwargs, kwargs_up, kwargs_down, data, noisemap, model):
11 | """
12 | Assuming the other parameters well constrained, this estimates the uncertainty on the flux.
13 | Args:
14 | kwargs: optimized starred parameters
15 | kwargs_up: upper bounds
16 | kwargs_down: lower bounds
17 | data: 3d array, the data
18 | noisemap: 3d array, same shape as data
19 | model: the model instance
20 | Returns:
21 | an array, the same shape as kwargs['kwargs_analytic']['a']: one uncertainty per flux.
22 |
23 | """
24 | kwargs_fixed = deepcopy(kwargs)
25 | del kwargs_fixed['kwargs_analytic']['a']
26 |
27 | parameters = ParametersDeconv(kwargs_init=kwargs,
28 | kwargs_fixed=kwargs_fixed,
29 | kwargs_up=kwargs_up,
30 | kwargs_down=kwargs_down)
31 | loss = Loss(data, model, parameters, noisemap ** 2,
32 | regularization_terms='l1_starlet')
33 | optim = Optimizer(loss, parameters, method='l-bfgs-b')
34 | optim.minimize(maxiter=10)
35 |
36 | fish = FisherCovariance(parameters, optim, diagonal_only=True)
37 | fish.compute_fisher_information()
38 | k_errs = fish.get_kwargs_sigma()
39 | return np.array(k_errs['kwargs_analytic']['a'])
40 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=67.6.1", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "lightcurver"
7 | version = "1.2.3"
8 | authors = [{ name = "Frédéric Dux", email = "duxfrederic@gmail.com" }]
9 | description = "A thorough structure for precise photometry and forward modelling of time series of wide field images."
10 | readme = "README.md"
11 | keywords = ["photometry", "astronomy", "forward modelling", "PSF", "pipeline"]
12 | classifiers = []
13 | dependencies = [
14 | "pyyaml",
15 | "matplotlib",
16 | "astropy",
17 | "numpy < 2.0.0",
18 | "sep",
19 | "scipy",
20 | "ephem",
21 | "pandas",
22 | "shapely",
23 | "astroquery",
24 | "h5py",
25 | "astroscrappy",
26 | "starred-astro >= 1.4.7",
27 | "pytest",
28 | "numpy",
29 | "scipy",
30 | "h5py",
31 | "photutils",
32 | "astroalign",
33 | "ruamel.yaml",
34 | "widefield_plate_solver",
35 | "ccdproc"
36 | ]
37 |
38 | [project.urls]
39 | homepage = "https://duxfrederic.github.io/lightcurver/"
40 | repository = "https://github.com/duxfrederic/lightcurver"
41 |
42 | [project.optional-dependencies]
43 | test = [
44 | "pytest"
45 | ]
46 |
47 | [project.scripts]
48 | lc_init = "lightcurver.scripts.initialize:initialize"
49 | lc_run = "lightcurver.scripts.run:run"
50 |
51 | [tool.setuptools]
52 | packages.find = {where = ["."], include = ["lightcurver*"]}
53 | package-dir = {"" = "."}
54 | package-data = {"lightcurver" = ["pipeline/*.yaml", "pipeline/example_config_file/config.yaml", "plotting/plot_curves_template.html"]}
55 |
56 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_database_queries/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/test_database_queries/__init__.py
--------------------------------------------------------------------------------
/tests/test_database_queries/test_queries.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import patch
3 | import sqlite3
4 | import tempfile
5 | import shutil
6 | from lightcurver.structure.database import initialize_database
7 | from lightcurver.processes.normalization_calculation import update_normalization_coefficients
8 |
9 |
10 | class TestNormalizationCoefficients(unittest.TestCase):
11 | def setUp(self):
12 | self.temp_dir = tempfile.mkdtemp()
13 | self.db_path = f"{self.temp_dir}/test.db"
14 |
15 | # initialize the sqlite3 at the temporary path
16 | initialize_database(self.db_path)
17 |
18 | # patch get_user_config to return the temporary database path
19 | self.patcher = patch('lightcurver.processes.normalization_calculation.get_user_config')
20 | self.mock_get_user_config = self.patcher.start()
21 | self.mock_get_user_config.return_value = {'database_path': self.db_path}
22 |
23 | def tearDown(self):
24 | self.patcher.stop()
25 | shutil.rmtree(self.temp_dir)
26 |
27 | def test_insert_normalization_coefficients(self):
28 | norm_data = [(1, -1, 1.0, 0.05), (2, -2, 0.9, 0.07)]
29 | update_normalization_coefficients(norm_data)
30 |
31 | # check data inserted correctly
32 | with sqlite3.connect(self.db_path) as conn:
33 | cursor = conn.cursor()
34 | cursor.execute("SELECT * FROM normalization_coefficients")
35 | results = cursor.fetchall()
36 | self.assertEqual(len(results), 2)
37 | self.assertIn((1, -1, 1.0, 0.05), results)
38 | self.assertIn((2, -2, 0.9, 0.07), results)
39 |
40 | # now we're modifying an entry
41 | norm_data = [(1, -1, 1.1, 0.15)]
42 | update_normalization_coefficients(norm_data)
43 | # check new values
44 | with sqlite3.connect(self.db_path) as conn:
45 | cursor = conn.cursor()
46 | cursor.execute("SELECT * FROM normalization_coefficients")
47 | results = cursor.fetchall()
48 | self.assertEqual(len(results), 2)
49 | self.assertIn((1, -1, 1.1, 0.15), results)
50 | self.assertIn((2, -2, 0.9, 0.07), results)
51 |
52 |
53 | if __name__ == '__main__':
54 | unittest.main()
55 |
--------------------------------------------------------------------------------
/tests/test_entire_pipeline/raw_frames/OMEGA.2021-08-22T07:25:56.281_13OFCS.fits:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/test_entire_pipeline/raw_frames/OMEGA.2021-08-22T07:25:56.281_13OFCS.fits
--------------------------------------------------------------------------------
/tests/test_entire_pipeline/raw_frames/OMEGA.2021-08-22T07:32:00.792_13OFCS.fits:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/test_entire_pipeline/raw_frames/OMEGA.2021-08-22T07:32:00.792_13OFCS.fits
--------------------------------------------------------------------------------
/tests/test_entire_pipeline/test_run_pipeline_example_config.py:
--------------------------------------------------------------------------------
1 | import os
2 | import tempfile
3 | import shutil
4 | import yaml
5 | import sqlite3
6 | from lightcurver.pipeline.workflow_manager import WorkflowManager
7 | from lightcurver.pipeline.task_wrappers import source_extract_all_images
8 |
9 |
10 | def database_checks(db_path):
11 | """
12 | Performs various checks on the database, including chi2 values and data integrity.
13 | """
14 | conn = sqlite3.connect(db_path)
15 | cursor = conn.cursor()
16 |
17 | # chi2 checks
18 | cursor.execute("SELECT COUNT(*) FROM PSFs WHERE chi2 >= 2;")
19 | assert cursor.fetchone()[0] == 0, "There are PSF models with chi2 >= 2."
20 | cursor.execute("SELECT COUNT(*) FROM star_flux_in_frame WHERE chi2 >= 2;")
21 | assert cursor.fetchone()[0] == 0, "There are stars_in_flux_in_frame values with chi2 >= 2."
22 |
23 | # count check
24 | cursor.execute("SELECT COUNT(*) FROM PSFs;")
25 | n_psf = cursor.fetchone()[0]
26 |
27 | cursor.execute("SELECT COUNT(*) FROM frames;")
28 | n_frames = cursor.fetchone()[0]
29 |
30 | cursor.execute("SELECT COUNT(*) FROM normalization_coefficients;")
31 | n_coeffs = cursor.fetchone()[0]
32 |
33 | assert n_psf == n_frames == n_coeffs, f"not same number of coeffs, psfs, frames: {n_coeffs, n_psf, n_frames}"
34 |
35 | conn.close()
36 |
37 |
38 | def test_run_workflow():
39 | current_dir = os.path.dirname(os.path.abspath(__file__))
40 |
41 | # paths relative to the repository root
42 | config_path = os.path.join(current_dir, '..', '..', 'lightcurver', 'pipeline',
43 | 'example_config_file', 'config.yaml')
44 | header_function_path = os.path.join(current_dir, '..', '..', 'docs',
45 | 'example_header_parser_functions', 'parse_omegacam_header.py')
46 | data_path = os.path.join(current_dir, 'raw_frames')
47 |
48 | # temp dir setup
49 | temp_dir = tempfile.mkdtemp(prefix='lightcurver_test_')
50 |
51 | # modify the configuration: has to point to tempdir
52 | with open(config_path, 'r') as file:
53 | config = yaml.safe_load(file)
54 |
55 | config['workdir'] = temp_dir
56 | config['raw_dirs'] = [data_path]
57 | config['already_plate_solved'] = 1
58 | config['ROI_disk_radius_arcseconds'] = 100
59 | config['stars_to_use_psf'] = config['stars_to_use_norm'] = 2
60 | config['stamp_size_stars'] = config['stamp_size_ROI'] = 24
61 | config['multiprocessing_cpu_count'] = 2 # what GitHub gives us I think
62 | config['fix_point_source_astrometry'] = 2.0 # testing the gaussian priors.
63 |
64 | # save the modified configuration to a temporary file
65 | temp_config_path = os.path.join(temp_dir, 'config_temp.yaml')
66 | with open(temp_config_path, 'w') as file:
67 | yaml.safe_dump(config, file)
68 |
69 | # copy the header function
70 | header_parser_dir = os.path.join(temp_dir, 'header_parser')
71 | os.mkdir(header_parser_dir)
72 | shutil.copy2(header_function_path, os.path.join(header_parser_dir, 'parse_header.py'))
73 |
74 | os.environ['LIGHTCURVER_CONFIG'] = temp_config_path
75 |
76 | # first run the importation
77 | wf_manager = WorkflowManager()
78 | wf_manager.run(stop_step='query_gaia_for_stars')
79 |
80 | # for this test, we'll also pretend that one of the images does not have an astrometric solution.
81 | # so, set the mode we want to test in the config:
82 | with open(temp_config_path, 'r') as file:
83 | config = yaml.safe_load(file)
84 | # this is the mode we are testing:
85 | config['plate_solving_strategy'] = 'adapt_wcs_from_reference'
86 | # because we do not want to depend on astrometry.net for this test.
87 | config['already_plate_solved'] = 0
88 | with open(temp_config_path, 'w') as file:
89 | yaml.safe_dump(config, file)
90 | # ready to run:
91 | wf_manager = WorkflowManager()
92 | wf_manager.run(stop_step='plate_solving')
93 | # just before the plate solving, let us indicate that one of the images has an astrometric solution
94 | db_path = os.path.join(temp_dir, 'database.sqlite3')
95 | with sqlite3.connect(db_path) as conn:
96 | cursor = conn.cursor()
97 | cursor.execute("update frames set plate_solved = 0, attempted_plate_solve = 0 where id = 1")
98 | cursor.execute("update frames set plate_solved = 1, attempted_plate_solve = 1 where id = 2")
99 | # and now run the whole pipeline.
100 | wf_manager.run()
101 |
102 | # some basic database checks: did the psf fits go well? same for photometry.
103 | # do we indeed have 2 psfs?
104 | # do we have 2 normalization coefficients?
105 | database_checks(db_path)
106 |
107 | # now, the user might want to redo the source extraction should the initial one not have
108 | # given the expected result. Test that it works without error here:
109 | source_extract_all_images()
110 |
111 | # now, redo everything but with the possibility of distorting the PSF. Namely, we start from the psf state by
112 | # removing all psfs from the database.
113 | with sqlite3.connect(db_path) as conn:
114 | cursor = conn.cursor()
115 | cursor.execute("DELETE FROM PSFs")
116 |
117 | # set distortion to true, and while we're at it a different calibration (using pan-starrs)
118 | with open(temp_config_path, 'r') as file:
119 | config = yaml.safe_load(file)
120 | config['field_distortion'] = True
121 | config['photometric_band'] = 'r_panstarrs'
122 | # not redoing roi modelling to things speed up
123 | config['do_ROI_model'] = False
124 | with open(temp_config_path, 'w') as file:
125 | yaml.safe_dump(config, file)
126 |
127 | # and run again without error!
128 | wf_manager.run(start_step="psf_modeling")
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/tests/test_processes/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/test_processes/__init__.py
--------------------------------------------------------------------------------
/tests/test_processes/test_background_estimation.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | from lightcurver.processes.background_estimation import subtract_background
4 |
5 |
6 | class TestSubtractBackground(unittest.TestCase):
7 |
8 | def test_subtract_background(self):
9 | mean = 100
10 | sigma = 5
11 | test_image = np.random.normal(mean, sigma, size=(100, 100))
12 |
13 | image_subtracted, background = subtract_background(test_image)
14 |
15 | self.assertEqual(image_subtracted.shape, test_image.shape)
16 |
17 | self.assertTrue(np.isclose(background.globalback, mean, rtol=1e-1))
18 | self.assertTrue(np.isclose(background.globalrms, sigma, rtol=1e-1))
19 |
20 |
21 | if __name__ == '__main__':
22 | unittest.main()
23 |
--------------------------------------------------------------------------------
/tests/test_processes/test_cutout_making.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | from astropy.io import fits
4 | from astropy.coordinates import SkyCoord
5 |
6 | from lightcurver.processes.cutout_making import extract_stamp
7 |
8 |
9 | class TestExtractStamp(unittest.TestCase):
10 |
11 | def setUp(self):
12 | self.data = np.random.rand(100, 100)
13 | self.header = fits.Header()
14 | self.header['CRVAL1'] = 10.0
15 | self.header['CRVAL2'] = 20.0
16 | self.header['CRPIX1'] = 50.0
17 | self.header['CRPIX2'] = 50.0
18 | self.header['CD1_1'] = 0.1
19 | self.header['CD1_2'] = 0.0
20 | self.header['CD2_1'] = 0.0
21 | self.header['CD2_2'] = 0.1
22 | self.header['CTYPE1'] = 'RA---TAN'
23 | self.header['CTYPE2'] = 'DEC--TAN'
24 |
25 | # Sample parameters
26 | self.exptime = 1.0
27 | self.sky_coord = SkyCoord(10.0, 20.0, unit='deg')
28 | self.cutout_size = 10
29 |
30 | def test_extract_stamp(self):
31 | # check the function can be called
32 | cutout, noisemap, wcs_header_string, position = extract_stamp(
33 | self.data, self.header, self.exptime, self.sky_coord, self.cutout_size,
34 | background_rms_electron_per_second=0.5)
35 |
36 | # check ... output shapes?
37 | self.assertEqual(cutout.shape, (self.cutout_size, self.cutout_size))
38 | self.assertEqual(noisemap.shape, (self.cutout_size, self.cutout_size))
39 |
40 | # is the wcs propagated?
41 | self.assertTrue(wcs_header_string)
42 |
43 |
44 | if __name__ == '__main__':
45 | unittest.main()
46 |
--------------------------------------------------------------------------------
/tests/test_processes/test_frame_characterization.py:
--------------------------------------------------------------------------------
1 | # tests/test_processes/test_frame_characterization.py
2 | import unittest
3 | from lightcurver.processes.frame_characterization import ephemeris
4 |
5 |
6 | class EphemerisTestCase(unittest.TestCase):
7 | def test_ephemeris(self):
8 | # somehow realistic data
9 | mjd = 60365.13
10 | ra, dec = 141.23246, 2.32358
11 | altitude = 2400.0
12 | latitude = -29.256
13 | longitude = -70.738
14 |
15 | results = ephemeris(mjd, ra, dec, longitude, latitude, altitude)
16 |
17 | # Verify the structure and some key aspects of the results
18 | self.assertIsInstance(results, dict)
19 | self.assertIn('weird_astro_conditions', results)
20 | self.assertIn('comments', results)
21 | self.assertIn('target_info', results)
22 | self.assertIn('moon_info', results)
23 | self.assertIn('sun_info', results)
24 |
25 |
26 | if __name__ == '__main__':
27 | unittest.main()
28 |
--------------------------------------------------------------------------------
/tests/test_processes/test_star_extraction.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | from astropy.table import Table
4 |
5 | from lightcurver.processes.star_extraction import extract_stars
6 |
7 |
8 | class TestExtractStars(unittest.TestCase):
9 | """
10 | This is more of a test to make sure the API of sep hasn't changed.
11 | """
12 |
13 | def setUp(self):
14 | # Create a sample background subtracted image
15 | self.image_background_subtracted = np.random.rand(100, 100)
16 | self.background_rms = 1.0
17 | self.detection_threshold = 3
18 | self.min_area = 10
19 |
20 | def test_extract_stars(self):
21 | sources = extract_stars(self.image_background_subtracted, self.background_rms,
22 | self.detection_threshold, self.min_area)
23 |
24 | # check if the output is an astropy Table
25 | self.assertIsInstance(sources, Table)
26 |
27 | # check if the required columns are present
28 | self.assertIn('xcentroid', sources.colnames)
29 | self.assertIn('ycentroid', sources.colnames)
30 | self.assertIn('flag', sources.colnames)
31 | self.assertIn('a', sources.colnames)
32 | self.assertIn('b', sources.colnames)
33 | self.assertIn('flux', sources.colnames)
34 | self.assertIn('npix', sources.colnames)
35 |
36 | # check if indeed no source detected
37 | self.assertEqual(len(sources), 0, "There should be zero sources detected here.")
38 |
39 |
40 | if __name__ == '__main__':
41 | unittest.main()
42 |
--------------------------------------------------------------------------------
/tests/test_products_handling/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/test_products_handling/__init__.py
--------------------------------------------------------------------------------
/tests/test_products_handling/test_grouping.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import pandas as pd
3 |
4 | from lightcurver.utilities.lightcurves_postprocessing import group_observations
5 |
6 |
7 | def test_grouping_multiple_observations():
8 | # two groups: first two observations close in time,
9 | # next two observations form the second group.
10 | df = pd.DataFrame({
11 | 'mjd': [1.0, 1.2, 2.5, 2.6],
12 | 'A_flux': [10.0, 12.0, 20.0, 22.0],
13 | 'A_d_flux': [1.0, 1.0, 2.0, 2.0],
14 | 'other': [100, 200, 300, 400]
15 | })
16 | result = group_observations(df, threshold=0.8)
17 | # expect 2 groups
18 | assert len(result) == 2, "expected 2 groups based on time differences."
19 |
20 | # for group 1: indices 0 and 1, weights are 1/1^2 = 1 so weighted average = (10+12)/2 = 11.
21 | np.testing.assert_almost_equal(result.loc[0, 'A_flux'], 11.0, decimal=3)
22 | # for group 2: indices 2 and 3, weighted average = (20+22)/2 = 21.
23 | np.testing.assert_almost_equal(result.loc[1, 'A_flux'], 21.0, decimal=3)
24 |
25 | # optional column "other" is averaged as well.
26 | np.testing.assert_almost_equal(result.loc[0, 'other'], 150.0, decimal=3)
27 | np.testing.assert_almost_equal(result.loc[1, 'other'], 350.0, decimal=3)
28 |
29 |
30 | def test_single_observation_group():
31 | # a single observation should yield a group that, due to sigma clipping, produces nan flux, inf uncertainty.
32 | df = pd.DataFrame({
33 | 'mjd': [1.0],
34 | 'A_flux': [10.0],
35 | 'A_d_flux': [1.0]
36 | })
37 | result = group_observations(df, threshold=0.8)
38 | assert len(result) == 1, "expected a single group."
39 | # with one observation, sigma clipping returns an empty set; hence, the else branch applies.
40 | np.testing.assert_almost_equal(result.loc[0, 'A_flux'], 10.0, decimal=3)
41 | np.testing.assert_almost_equal(result.loc[0, 'A_d_flux'], 1.0, decimal=3)
42 | assert result.loc[0, 'A_count_flux'] == 1, "expected count of 1 for a single observation group."
43 |
44 |
45 | def test_last_group_inclusion():
46 | # test that the last observation is correctly grouped even if it stands alone.
47 | df = pd.DataFrame({
48 | 'mjd': [1.0, 1.2, 3.0],
49 | 'A_flux': [10.0, 12.0, 20.0],
50 | 'A_d_flux': [1.0, 1.0, 2.0]
51 | })
52 | result = group_observations(df, threshold=0.8)
53 | # expected groups: group 1 from indices 0,1 and group 2 from index 2.
54 | assert len(result) == 2, "expected 2 groups when the last observation is isolated."
55 | # group 1 should aggregate correctly.
56 | np.testing.assert_almost_equal(result.loc[0, 'A_flux'], 11.0, decimal=5)
57 | # group 2 is a single observation; expect sigma clipping to remove it.
58 | np.testing.assert_almost_equal(result.loc[1, 'A_flux'], 20.0, decimal=3)
59 | np.testing.assert_almost_equal(result.loc[1, 'mjd'], 3.0, decimal=5)
60 |
61 |
--------------------------------------------------------------------------------
/tests/test_products_handling/test_magnitude_errors.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import pandas as pd
3 | import numpy as np
4 |
5 | from lightcurver.utilities.lightcurves_postprocessing import convert_flux_to_magnitude
6 |
7 |
8 | class TestConvertFluxToMagnitude(unittest.TestCase):
9 | def test_convert_flux_to_magnitude(self):
10 | # sample data with some error bar going to negative flux
11 | data = {
12 | 'A_flux': [100, 50, 10, 5],
13 | 'A_d_flux': [10, 5, 2, 6],
14 | 'A_scatter_flux': [8, 4, 1.5, 3],
15 | 'zeropoint': [25, 25, 25, 25]
16 | }
17 | df = pd.DataFrame(data)
18 |
19 | # expected output:
20 | expected_data = {
21 | 'A_flux': [100, 50, 10, 5],
22 | 'A_d_flux': [10, 5, 2, 6],
23 | 'A_scatter_flux': [8, 4, 1.5, 3],
24 | 'zeropoint': [25, 25, 25, 25],
25 | 'A_mag': [20.0, 20.7526, 22.5, 23.253],
26 | 'A_d_mag_down': [0.1035, 0.1035, 0.1980, 0.856],
27 | 'A_d_mag_up': [0.1144, 0.1142, 0.2423, np.nan],
28 | 'A_scatter_mag_down': [0.0835, 0.0835, 0.152, 0.510],
29 | 'A_scatter_mag_up': [0.090, 0.090, 0.176, 0.995]
30 | }
31 | expected_df = pd.DataFrame(expected_data)
32 |
33 | result_df = convert_flux_to_magnitude(df)
34 |
35 | tol = 1e-2
36 |
37 | # Test 'A_mag'
38 | for i in range(len(expected_df)):
39 | expected_mag = expected_df.at[i, 'A_mag']
40 | result_mag = result_df.at[i, 'A_mag']
41 | if np.isnan(expected_mag):
42 | self.assertTrue(np.isnan(result_mag), f"Row {i} A_mag should be NaN")
43 | else:
44 | self.assertAlmostEqual(result_mag, expected_mag, delta=tol, msg=f"Row {i} A_mag mismatch")
45 |
46 | # Test 'A_d_mag_down'
47 | for i in range(len(expected_df)):
48 | expected_d_down = expected_df.at[i, 'A_d_mag_down']
49 | result_d_down = result_df.at[i, 'A_d_mag_down']
50 | if np.isnan(expected_d_down):
51 | self.assertTrue(np.isnan(result_d_down), f"Row {i} A_d_mag_down should be NaN")
52 | else:
53 | self.assertAlmostEqual(result_d_down, expected_d_down, delta=tol,
54 | msg=f"Row {i} A_d_mag_down mismatch")
55 |
56 | # Test 'A_d_mag_up'
57 | for i in range(len(expected_df)):
58 | expected_d_up = expected_df.at[i, 'A_d_mag_up']
59 | result_d_up = result_df.at[i, 'A_d_mag_up']
60 | if np.isnan(expected_d_up):
61 | self.assertTrue(np.isnan(result_d_up), f"Row {i} A_d_mag_up should be NaN")
62 | else:
63 | self.assertAlmostEqual(result_d_up, expected_d_up, delta=tol,
64 | msg=f"Row {i} A_d_mag_up mismatch")
65 |
66 | for i in range(len(expected_df)):
67 | expected_scatter_down = expected_df.at[i, 'A_scatter_mag_down']
68 | result_scatter_down = result_df.at[i, 'A_scatter_mag_down']
69 | if np.isnan(expected_scatter_down):
70 | self.assertTrue(np.isnan(result_scatter_down), f"Row {i} A_scatter_mag_down should be NaN")
71 | else:
72 | self.assertAlmostEqual(result_scatter_down, expected_scatter_down, delta=tol,
73 | msg=f"Row {i} A_scatter_mag_down mismatch")
74 |
75 | expected_scatter_up = expected_df.at[i, 'A_scatter_mag_up']
76 | result_scatter_up = result_df.at[i, 'A_scatter_mag_up']
77 | if np.isnan(expected_scatter_up):
78 | self.assertTrue(np.isnan(result_scatter_up), f"Row {i} A_scatter_mag_up should be NaN")
79 | else:
80 | self.assertAlmostEqual(result_scatter_up, expected_scatter_up, delta=tol,
81 | msg=f"Row {i} A_scatter_mag_up mismatch")
82 |
83 |
84 | if __name__ == '__main__':
85 | unittest.main(argv=[''], exit=False)
86 |
--------------------------------------------------------------------------------
/tests/test_starred_calls/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/duxfrederic/lightcurver/aef929e674d6d0744e01949fc6e743676f1e4b9d/tests/test_starred_calls/__init__.py
--------------------------------------------------------------------------------
/tests/test_starred_calls/test_starred_calls.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | import numpy as np
3 | from starred.procedures.psf_routines import build_psf
4 |
5 | from lightcurver.processes.star_photometry import do_one_star_forward_modelling
6 |
7 |
8 | class TestStarredCalls(unittest.TestCase):
9 |
10 | def setUp(self):
11 | x, y = np.meshgrid(np.arange(-8, 8), np.arange(-8, 8))
12 | gauss = np.exp(-0.1 * (x**2 + y**2))
13 | self.data = 0.1*np.random.rand(5, 16, 16) + np.repeat(gauss[None, :, :], repeats=5, axis=0)
14 | self.noisemap = 0.1*np.ones((5, 16, 16))
15 | self.psf = np.repeat(gauss[None, :, :], repeats=5, axis=0)
16 |
17 | self.subsampling_factor = 1
18 | self.n_iter = 50
19 |
20 | def test_do_one_star_forward_modelling(self):
21 | # call
22 | result = do_one_star_forward_modelling(self.data, self.noisemap, self.psf, self.subsampling_factor, self.n_iter)
23 |
24 | self.assertIsInstance(result, dict)
25 |
26 | # check if expected keys are present there
27 | self.assertIn('scale', result)
28 | self.assertIn('kwargs_final', result)
29 | self.assertIn('fluxes', result)
30 | self.assertIn('fluxes_uncertainties', result)
31 | self.assertIn('chi2', result)
32 | self.assertIn('chi2_per_frame', result)
33 | self.assertIn('loss_curve', result)
34 | self.assertIn('residuals', result)
35 |
36 | # check if 'scale' is a positive float
37 | self.assertIsInstance(result['scale'], float)
38 | self.assertGreater(result['scale'], 0)
39 |
40 | # check if 'kwargs_final' and 'kwargs_uncertainties' are dictionaries
41 | self.assertIsInstance(result['kwargs_final'], dict)
42 |
43 | # check if 'fluxes' and 'fluxes_uncertainties' are 1D numpy arrays
44 | self.assertIsInstance(result['fluxes'], np.ndarray)
45 | self.assertIsInstance(result['fluxes_uncertainties'], np.ndarray)
46 | self.assertEqual(result['fluxes'].ndim, 1)
47 | self.assertEqual(result['fluxes_uncertainties'].ndim, 1)
48 | self.assertEqual(result['fluxes'].size, result['fluxes_uncertainties'].size)
49 | self.assertEqual(result['fluxes'].size, self.data.shape[0])
50 |
51 | self.assertIsInstance(result['chi2'], float) # float and not jax array
52 | self.assertGreaterEqual(result['chi2'], 0)
53 |
54 | self.assertIsInstance(result['chi2_per_frame'], np.ndarray)
55 | self.assertEqual(result['chi2_per_frame'].ndim, 1)
56 | self.assertEqual(len(result['chi2_per_frame']), self.data.shape[0])
57 |
58 | self.assertEqual(len(result['loss_curve']), self.n_iter)
59 | # if not the starred api might have changed and implemented some stop of the optimization
60 | # before the completion of the iterations. we do not want that typically, so
61 | # go in the code and make it stick to the set number of iterations.
62 |
63 | # check that 'residuals' is a 3D numpy array with same shape as the input data
64 | self.assertEqual(result['residuals'].shape, self.data.shape)
65 |
66 | def test_build_psf(self):
67 | result = build_psf(self.data,
68 | self.noisemap,
69 | subsampling_factor=self.subsampling_factor,
70 | n_iter_analytic=5,
71 | n_iter_adabelief=10,
72 | masks=np.ones_like(self.data),
73 | guess_method_star_position='center')
74 | self.assertIsInstance(result, dict)
75 | self.assertIn('full_psf', result)
76 | self.assertIn('adabelief_extra_fields', result)
77 | self.assertIn('loss_history', result['adabelief_extra_fields'])
78 | self.assertIn('narrow_psf', result)
79 | self.assertIn('chi2', result)
80 | self.assertIn('residuals', result)
81 |
82 |
83 | if __name__ == '__main__':
84 | unittest.main()
85 |
--------------------------------------------------------------------------------