├── .git_archival.txt
├── .gitattributes
├── .github
└── workflows
│ ├── build_container.yaml
│ ├── build_docs.yaml
│ ├── cache.yaml
│ ├── rebase_checker.yaml
│ ├── ruff.yaml
│ └── test_and_build.yaml
├── .gitignore
├── .pre-commit-config.yaml
├── Dockerfile
├── LICENSE
├── README.md
├── README_dev.md
├── batch
├── compile_prenight_metadata_cache.sh
└── run_prenight_sims.sh
├── container_environment.yaml
├── docs
├── .gitignore
├── Makefile
├── api.rst
├── archive.rst
├── conf.py
├── data-api.rst
├── data-download.rst
├── documenteer.toml
├── index.rst
├── installation.rst
├── introduction.rst
├── maf-api-batches.rst
├── maf-api-db.rst
├── maf-api-maf-contrib.rst
├── maf-api-maps.rst
├── maf-api-metricbundles.rst
├── maf-api-metrics.rst
├── maf-api-plots.rst
├── maf-api-run-comparison.rst
├── maf-api-slicers.rst
├── maf-api-stackers.rst
├── maf-api-utils.rst
├── maf-api.rst
├── maf.rst
├── metric_list.py
├── moving-objects-api.rst
├── moving-objects.rst
├── phot-utils-api.rst
├── phot-utils.rst
├── satellite-constellations-api.rst
├── satellite-constellations.rst
├── selfcal-api.rst
├── selfcal.rst
├── sim-archive-api.rst
├── skybrightness-api.rst
├── skybrightness.rst
└── user-guide.rst
├── example_deployment.yaml
├── optional-requirements.txt
├── pyproject.toml
├── requirements.txt
├── rubin_sim
├── __init__.py
├── data
│ ├── __init__.py
│ └── rs_download_data.py
├── maf
│ ├── __init__.py
│ ├── batches
│ │ ├── __init__.py
│ │ ├── altaz_batch.py
│ │ ├── col_map_dict.py
│ │ ├── common.py
│ │ ├── ddf_batch.py
│ │ ├── filterchange_batch.py
│ │ ├── glance_batch.py
│ │ ├── hourglass_batch.py
│ │ ├── info_batch.py
│ │ ├── metadata_batch.py
│ │ ├── moving_objects_batch.py
│ │ ├── openshutter_batch.py
│ │ ├── radar_limited.py
│ │ ├── science_radar_batch.py
│ │ ├── skycoverage.py
│ │ ├── slew_batch.py
│ │ ├── srd_batch.py
│ │ ├── time_batch.py
│ │ ├── time_sci_batch.py
│ │ └── visitdepth_batch.py
│ ├── db
│ │ ├── __init__.py
│ │ ├── add_run.py
│ │ ├── results_db.py
│ │ └── tracking_db.py
│ ├── ddf_dir.py
│ ├── generate_ss.py
│ ├── glance_dir.py
│ ├── maf_contrib
│ │ ├── __init__.py
│ │ ├── calculate_lsst_field_visibility_astropy.py
│ │ ├── depth_limited_num_gal_metric.py
│ │ ├── example_new_metrics.py
│ │ ├── filter_pair_t_gaps_metric.py
│ │ ├── grb_transient_metric.py
│ │ ├── gw170817_det_metric.py
│ │ ├── intervals_between_obs_metric.py
│ │ ├── kne_metrics.py
│ │ ├── lss_metrics.py
│ │ ├── lss_obs_strategy
│ │ │ ├── __init__.py
│ │ │ ├── constants_for_pipeline.py
│ │ │ ├── galaxy_counts_metric_extended.py
│ │ │ └── galaxy_counts_with_pixel_calibration.py
│ │ ├── lv_dwarfs
│ │ │ ├── __init__.py
│ │ │ └── lv_dwarfs_metrics.py
│ │ ├── microlensing_metric.py
│ │ ├── num_obs_in_survey_time_overlap_metric.py
│ │ ├── periodic_metric.py
│ │ ├── periodic_star_metric.py
│ │ ├── periodic_star_modulation_metric.py
│ │ ├── presto_color_kne_pop_metric.py
│ │ ├── selfcal_uniformity_metric.py
│ │ ├── star_count_mass_metric.py
│ │ ├── star_count_metric.py
│ │ ├── star_counts
│ │ │ ├── __init__.py
│ │ │ ├── abs_mag.py
│ │ │ ├── coords.py
│ │ │ ├── readme.txt
│ │ │ ├── spec_type.py
│ │ │ ├── starcount.py
│ │ │ ├── starcount_bymass.py
│ │ │ └── stellardensity.py
│ │ ├── static_probes_fom_summary_metric.py
│ │ ├── tdes_pop_metric.py
│ │ ├── transient_ascii_sed_metric.py
│ │ ├── triplet_metric.py
│ │ ├── var_depth_metric.py
│ │ ├── xrb_metrics.py
│ │ └── young_stellar_objects_metric.py
│ ├── maf_night_report.py
│ ├── make_fbs_tracking_db.py
│ ├── maps
│ │ ├── __init__.py
│ │ ├── base_map.py
│ │ ├── create_gaia_density_map.py
│ │ ├── dust_map.py
│ │ ├── dust_map_3d.py
│ │ ├── ebv_3d_hp.py
│ │ ├── ebv_hp.py
│ │ ├── gal_coords_map.py
│ │ ├── galactic_plane_priority_maps.py
│ │ ├── stellar_density_map.py
│ │ └── trilegal_map.py
│ ├── metadata_dir.py
│ ├── metric_bundles
│ │ ├── __init__.py
│ │ ├── metric_bundle.py
│ │ ├── metric_bundle_group.py
│ │ └── mo_metric_bundle.py
│ ├── metrics
│ │ ├── __init__.py
│ │ ├── agn_time_lag_metric.py
│ │ ├── agnstructure.py
│ │ ├── area_summary_metrics.py
│ │ ├── base_metric.py
│ │ ├── brown_dwarf_metric.py
│ │ ├── cadence_metrics.py
│ │ ├── calibration_metrics.py
│ │ ├── chip_vendor_metric.py
│ │ ├── color_slope_metrics.py
│ │ ├── coverage_metric.py
│ │ ├── crowding_metric.py
│ │ ├── cumulative_metric.py
│ │ ├── dcr_metric.py
│ │ ├── exgal_m5.py
│ │ ├── fft_metric.py
│ │ ├── galactic_plane_metrics.py
│ │ ├── galplane_time_sampling_metrics.py
│ │ ├── hourglass_metric.py
│ │ ├── incremental_template_metric.py
│ │ ├── kuiper_metrics.py
│ │ ├── mo_metrics.py
│ │ ├── mo_summary_metrics.py
│ │ ├── night_pointing_metric.py
│ │ ├── optimal_m5_metric.py
│ │ ├── pair_metric.py
│ │ ├── periodic_detect_metric.py
│ │ ├── phase_gap_metric.py
│ │ ├── qso_number_counts_metric.py
│ │ ├── scaling_metrics.py
│ │ ├── schedview_metrics.py
│ │ ├── season_metrics.py
│ │ ├── simple_metrics.py
│ │ ├── sky_sat_metric.py
│ │ ├── sn_cadence_metric.py
│ │ ├── sn_n_sn_metric.py
│ │ ├── sn_sl_metric.py
│ │ ├── sn_snr_metric.py
│ │ ├── snr_weight.py
│ │ ├── star_density.py
│ │ ├── string_count_metric.py
│ │ ├── summary_metrics.py
│ │ ├── surfb_metric.py
│ │ ├── technical_metrics.py
│ │ ├── tgaps.py
│ │ ├── transient_metrics.py
│ │ ├── use_metrics.py
│ │ ├── vector_metrics.py
│ │ ├── visit_groups_metric.py
│ │ └── weak_lensing_systematics_metric.py
│ ├── plots
│ │ ├── __init__.py
│ │ ├── hg_plotters.py
│ │ ├── hourglass_plotters.py
│ │ ├── mo_plotters.py
│ │ ├── nd_plotters.py
│ │ ├── neo_distance_plotter.py
│ │ ├── night_pointing_plotter.py
│ │ ├── oned_plotters.py
│ │ ├── perceptual_rainbow.py
│ │ ├── plot_handler.py
│ │ ├── skyproj_plotters.py
│ │ ├── spatial_plotters.py
│ │ ├── special_plotters.py
│ │ ├── two_d_plotters.py
│ │ └── xyplotter.py
│ ├── run_comparison
│ │ ├── __init__.py
│ │ ├── archive.py
│ │ ├── gather_summaries.py
│ │ ├── microlensing_compare.py
│ │ ├── radar_plot.py
│ │ └── summary_plots.py
│ ├── run_moving_calc.py
│ ├── run_moving_fractions.py
│ ├── run_moving_join.py
│ ├── run_selfcal_metric.py
│ ├── scimaf_dir.py
│ ├── show_maf.py
│ ├── slicers
│ │ ├── __init__.py
│ │ ├── base_slicer.py
│ │ ├── base_spatial_slicer.py
│ │ ├── healpix_sdss_slicer.py
│ │ ├── healpix_slicer.py
│ │ ├── healpix_subset_slicer.py
│ │ ├── hourglass_slicer.py
│ │ ├── mo_slicer.py
│ │ ├── movie_slicer.py
│ │ ├── nd_slicer.py
│ │ ├── one_d_slicer.py
│ │ ├── time_interval_slicers.py
│ │ ├── uni_slicer.py
│ │ └── user_points_slicer.py
│ ├── stackers
│ │ ├── __init__.py
│ │ ├── base_stacker.py
│ │ ├── coord_stackers.py
│ │ ├── date_stackers.py
│ │ ├── general_stackers.py
│ │ ├── get_col_info.py
│ │ ├── label_stackers.py
│ │ ├── m5_optimal_stacker.py
│ │ ├── mo_phase.py
│ │ ├── mo_stackers.py
│ │ ├── n_follow_stacker.py
│ │ ├── neo_dist_stacker.py
│ │ ├── sdss_stackers.py
│ │ ├── sn_stacker.py
│ │ └── teff_stacker.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── astrometry_utils.py
│ │ ├── generate_fov_map.py
│ │ ├── get_date_version.py
│ │ ├── maf_utils.py
│ │ ├── opsim_utils.py
│ │ ├── output_utils.py
│ │ ├── sn_n_sn_utils.py
│ │ ├── sn_utils.py
│ │ └── stellar_mags.py
│ └── web
│ │ ├── __init__.py
│ │ ├── favicon.ico
│ │ ├── maf_run_results.py
│ │ ├── maf_tracking.py
│ │ ├── sorttable.js
│ │ └── templates
│ │ ├── allmetricresults.html
│ │ ├── configs.html
│ │ ├── macros.html
│ │ ├── master.html
│ │ ├── metricselect.html
│ │ ├── multicolor.html
│ │ ├── results.html
│ │ ├── runselect.html
│ │ └── stats.html
├── moving_objects
│ ├── __init__.py
│ ├── base_obs.py
│ ├── cheby_fits.py
│ ├── cheby_values.py
│ ├── chebyshev_utils.py
│ ├── direct_obs.py
│ ├── make_lsst_obs.py
│ ├── ooephemerides.py
│ ├── orbits.py
│ ├── pre_generate.py
│ └── utils.py
├── phot_utils
│ ├── __init__.py
│ ├── bandpass.py
│ ├── photometric_parameters.py
│ ├── physical_parameters.py
│ ├── predicted_zeropoints.py
│ ├── sed.py
│ ├── sed_utils.py
│ ├── signaltonoise.py
│ └── spectral_resampling.py
├── satellite_constellations
│ ├── __init__.py
│ ├── basis_function.py
│ ├── model_observatory.py
│ └── sat_utils.py
├── selfcal
│ ├── __init__.py
│ ├── generate_catalog.py
│ ├── offsets.py
│ ├── solver.py
│ └── star_tools.py
├── sim_archive
│ ├── __init__.py
│ ├── make_snapshot.py
│ ├── prenight.py
│ └── sim_archive.py
└── skybrightness
│ ├── __init__.py
│ ├── allsky_db.py
│ ├── data
│ ├── ESO_Spectra
│ │ └── eso_tools.py
│ └── solarSpec
│ │ └── package.py
│ ├── generate_hdf5.py
│ ├── interp_components.py
│ ├── sky_model.py
│ ├── twilight_func.py
│ └── utils.py
├── setup.cfg
├── setup.py
├── showmaf-deploy.yaml
├── test-requirements.txt
└── tests
├── data
├── test_data.py
└── test_ddf_grid.py
├── maf
├── test_3x2fom.py
├── test_archive.py
├── test_basemetrics.py
├── test_batchcommon.py
├── test_batches.py
├── test_cadencemetrics.py
├── test_calibrationmetrics.py
├── test_color_slopes.py
├── test_gathersummaries.py
├── test_healpixslicer.py
├── test_healpixsubsetslicer.py
├── test_hgplotters.py
├── test_hourglassmetric.py
├── test_io.py
├── test_json.py
├── test_maps.py
├── test_metricbundle.py
├── test_mometrics.py
├── test_movieslicer.py
├── test_ndslicer.py
├── test_neodistanceplotter.py
├── test_onedslicer.py
├── test_opsimutils.py
├── test_plotters.py
├── test_resultsdb.py
├── test_schedviewmetrics.py
├── test_simplemetrics.py
├── test_snmetrics.py
├── test_snmetrics_nsn.py
├── test_stackers.py
├── test_stellarmags.py
├── test_stringcount.py
├── test_summary_plots.py
├── test_summarymetrics.py
├── test_technicalmetrics.py
├── test_templatemetrics.py
├── test_timeintervalslicers.py
├── test_trackingdb.py
├── test_unislicer.py
├── test_vectormetrics.py
└── test_visitgroupsmetric.py
├── moving_objects
├── test_camera.py
├── test_chebyfits.py
├── test_chebyshevutils.py
├── test_chebyvalues.py
├── test_ephemerides.py
└── test_orbits.py
├── phot_utils
├── test_approximatebandpasses.py
├── test_photometricparameters.py
├── test_photometry.py
├── test_predicted_zeropoints.py
├── test_read_bandpasses.py
├── test_sed.py
└── test_snr.py
├── satellite_constellations
└── test_satellites.py
├── sim_archive
├── test_make_snapshot.py
├── test_prenight.py
└── test_sim_archive.py
└── skybrightness
└── test_skymodel.py
/.git_archival.txt:
--------------------------------------------------------------------------------
1 | ref-names: HEAD -> main
2 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | .git_archival.txt export-subst
2 |
--------------------------------------------------------------------------------
/.github/workflows/build_container.yaml:
--------------------------------------------------------------------------------
1 | name: Docker Build
2 |
3 | "on":
4 | # Modify workflow to run on tag, when update dockerfile
5 | workflow_dispatch:
6 |
7 |
8 | jobs:
9 | build:
10 | runs-on: ubuntu-latest
11 |
12 |
13 | steps:
14 | - uses: actions/checkout@v4
15 |
16 | - uses: lsst-sqre/build-and-push-to-ghcr@tickets/DM-41857
17 | id: build
18 | with:
19 | image: ${{ github.repository }}
20 | github_token: ${{ secrets.GITHUB_TOKEN }}
21 | dockerfile: Dockerfile
22 | cache-from: type=local,src=/tmp/.buildx-cache
23 | cache-to: type=local,dest=/tmp/.buildx-cache
24 |
25 | - run: echo Pushed ghcr.io/${{ github.repository }}:${{ steps.build.outputs.tag }}
--------------------------------------------------------------------------------
/.github/workflows/build_docs.yaml:
--------------------------------------------------------------------------------
1 | name: Build and Upload Docs
2 |
3 | "on":
4 | push:
5 | tags:
6 | - "*"
7 | branches:
8 | - "main"
9 | pull_request: {}
10 | workflow_dispatch:
11 |
12 | jobs:
13 | build_sphinx_docs:
14 | name: Build and upload documentation
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v4
18 | - uses: conda-incubator/setup-miniconda@v3
19 | with:
20 | auto-update-conda: true
21 | python-version: "3.12"
22 | miniforge-version: latest
23 | channels: conda-forge,defaults
24 | show-channel-urls: true
25 |
26 | - name: configure conda and install requirements
27 | shell: bash -l {0}
28 | run: |
29 | conda install --yes pip
30 | conda install --yes --file=requirements.txt
31 | pip install "documenteer[guide]"
32 |
33 | - name: install rubin_sim
34 | shell: bash -l {0}
35 | run: |
36 | echo `pwd`
37 | ls ${{ github.workspace }}
38 | python -m pip install . --no-deps
39 |
40 | - name: Access rubin-sim-data cache
41 | id: cache-rs
42 | uses: actions/cache@v4
43 | env:
44 | cache-name: cached-rubin-sim-data
45 | with:
46 | path: ~/rubin_sim_data
47 | key: ${{ env.cache-name }}
48 | restore-keys: |
49 | ${{ env.cache-name }}
50 |
51 | - name: Update rubin-sim-data if needed
52 | shell: bash -l {0}
53 | run: |
54 | export RUBIN_SIM_DATA_DIR=~/rubin_sim_data
55 | # Download anything that is not current
56 | rs_download_data --tdqm_disable --update --dirs "maf,throughputs"
57 |
58 | - name: check conda and documenteer
59 | shell: bash -l {0}
60 | run: |
61 | conda list
62 |
63 | - name: build docs
64 | shell: bash -l {0}
65 | run: |
66 | export RUBIN_SIM_DATA_DIR=~/rubin_sim_data
67 | cd docs
68 | make html
69 |
70 | - name: upload documentation
71 | uses: lsst-sqre/ltd-upload@v1
72 | with:
73 | project: "rubin-sim"
74 | dir: "docs/_build/html"
75 | username: ${{ secrets.ltd_username }}
76 | password: ${{ secrets.ltd_password }}
77 |
78 |
--------------------------------------------------------------------------------
/.github/workflows/cache.yaml:
--------------------------------------------------------------------------------
1 | name: Cache rubin-sim-data
2 | on:
3 | # Run job at the end of each day
4 | schedule:
5 | - cron: "0 0 * * *"
6 | # and on manual workflow
7 | workflow_dispatch:
8 |
9 |
10 | jobs:
11 | make-cache:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v4
15 | - uses: conda-incubator/setup-miniconda@v3
16 | with:
17 | auto-update-conda: true
18 | python-version: "3.12"
19 | miniforge-version: latest
20 | channels: conda-forge,defaults
21 | show-channel-urls: true
22 |
23 | - name: Configure conda and install minimal requirements for cache
24 | shell: bash -l {0}
25 | run: |
26 | conda install --yes rubin-scheduler
27 |
28 | - name: Install rubin_sim from git
29 | shell: bash -l {0}
30 | run: |
31 | echo `pwd`
32 | ls ${{ github.workspace }}
33 | python -m pip install -e . --no-deps
34 |
35 | - name: Access rubin-sim-data cache
36 | id: cache-rs
37 | uses: actions/cache@v4
38 | env:
39 | cache-name: cached-rubin-sim-data
40 | with:
41 | path: ~/rubin_sim_data
42 | key: ${{ env.cache-name }}
43 | restore-keys: |
44 | ${{ env.cache-name }}
45 |
46 | - name: Download data.
47 | shell: bash -l {0}
48 | run: |
49 | export RUBIN_SIM_DATA_DIR=~/rubin_sim_data
50 | scheduler_download_data --tdqm_disable --update
51 | rs_download_testing
52 |
53 | - name: Check data
54 | shell: bash -l {0}
55 | run: |
56 | export RUBIN_SIM_DATA_DIR=~/rubin_sim_data
57 | echo $RUBIN_SIM_DATA_DIR contents
58 | ls $RUBIN_SIM_DATA_DIR
59 | echo "__contents of versions.txt__"
60 | cat $RUBIN_SIM_DATA_DIR/versions.txt
61 |
--------------------------------------------------------------------------------
/.github/workflows/rebase_checker.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | name: Check that 'main' is not merged into the development branch
3 |
4 | on: pull_request
5 |
6 | jobs:
7 | call-workflow:
8 | uses: lsst/rubin_workflows/.github/workflows/rebase_checker.yaml@main
9 |
--------------------------------------------------------------------------------
/.github/workflows/ruff.yaml:
--------------------------------------------------------------------------------
1 | name: Ruff and iSort
2 | on:
3 | # Trigger the workflow on push (to main) or pull request
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | branches:
9 | - main
10 | workflow_dispatch:
11 |
12 | jobs:
13 | isort:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v4
17 | - uses: isort/isort-action@v1
18 | with:
19 | requirements-files: "requirements.txt test-requirements.txt"
20 | ruff:
21 | runs-on: ubuntu-latest
22 | steps:
23 | - uses: actions/checkout@v4
24 | - uses: astral-sh/ruff-action@v3
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 | de*.bsp
6 |
7 | # C extensions
8 | *.so
9 |
10 | # Distribution / packaging
11 | .Python
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | pip-wheel-metadata/
25 | share/python-wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | .python-version
87 |
88 | # pipenv
89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
92 | # install all needed dependencies.
93 | #Pipfile.lock
94 |
95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
96 | __pypackages__/
97 |
98 | # Celery stuff
99 | celerybeat-schedule
100 | celerybeat.pid
101 |
102 | # SageMath parsed files
103 | *.sage.py
104 |
105 | # Environments
106 | .env
107 | .venv
108 | env/
109 | venv/
110 | ENV/
111 | env.bak/
112 | venv.bak/
113 |
114 | # Spyder project settings
115 | .spyderproject
116 | .spyproject
117 |
118 | # Rope project settings
119 | .ropeproject
120 |
121 | # mkdocs documentation
122 | /site
123 |
124 | # mypy
125 | .mypy_cache/
126 | .dmypy.json
127 | dmypy.json
128 |
129 | # Pyre type checker
130 | .pyre/
131 |
132 | # version file
133 | version.py
134 |
135 | # pycharm files
136 | .idea/
137 |
138 | # vscode files
139 | .vscode/
140 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.4.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - id: check-toml
9 | - repo: https://github.com/psf/black
10 | rev: 23.7.0
11 | hooks:
12 | - id: black
13 | # It is recommended to specify the latest version of Python
14 | # supported by your project here, or alternatively use
15 | # pre-commit's default_language_version, see
16 | # https://pre-commit.com/#top_level-default_language_version
17 | language_version: python3.11
18 | - repo: https://github.com/pycqa/isort
19 | rev: 5.12.0
20 | hooks:
21 | - id: isort
22 | name: isort (python)
23 | - repo: https://github.com/astral-sh/ruff-pre-commit
24 | # Ruff version.
25 | rev: v0.3.4
26 | hooks:
27 | - id: ruff
28 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Follow https://micromamba-docker.readthedocs.io/en/latest/
2 |
3 | # Base container
4 | FROM mambaorg/micromamba:1.5.9
5 |
6 | # Copy current directory
7 | COPY --chown=$MAMBA_USER:$MAMBA_USER . /home/${MAMBA_USER}/rubin_sim
8 |
9 | # Install container requirements from conda-forge
10 | # Note that open-orb dependencies are omitted
11 | RUN micromamba install -y -n base -f /home/${MAMBA_USER}/rubin_sim/container_environment.yaml
12 | RUN micromamba clean --all --yes
13 |
14 | ARG MAMBA_DOCKERFILE_ACTIVATE=1
15 |
16 | # Install current version of rubin-sim
17 | RUN python -m pip install /home/$MAMBA_USER/rubin_sim --no-deps
18 |
19 | # Container execution
20 | # Mount fbs simulation outputs expected at /data/fbs_sims
21 | # Mount rubin_sim_data (if needed) at /data/rubin_sim_data
22 |
23 | EXPOSE 8080
24 | ENV PORT=8080
25 |
26 | ENV RUBIN_SIM_DATA_DIR=/data/rubin_sim_data
27 |
28 | # Start up show_maf on port 8080
29 | CMD cd /data/fbs_sims && show_maf -p 8080 --no_browser
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # rubin_sim
2 | Scheduler, survey strategy analysis, and other simulation tools for Rubin Observatory.
3 |
4 |
5 | [](https://pypi.org/project/rubin-sim/)
6 | [](https://anaconda.org/conda-forge/rubin-sim)
7 | [](https://github.com/lsst/rubin_sim/actions/workflows/test_and_build.yaml)
8 | [](https://github.com/lsst/rubin_sim/actions/workflows/build_docs.yaml)
9 | [](https://codecov.io/gh/lsst/rubin_sim)
10 |
11 |
12 | [](https://zenodo.org/badge/latestdoi/365031715)
13 |
14 |
15 | ## rubin_sim ##
16 |
17 | The [Legacy Survey of Space and Time](http://www.lsst.org) (LSST)
18 | is anticipated to encompass around 2 million observations spanning a decade,
19 | averaging 800 visits per night. The `rubin_sim` package was built to help
20 | understand the predicted performance of the LSST.
21 |
22 | The `rubin_sim` package contains the following main modules:
23 | * `phot_utils` - provides synthetic photometry
24 | using provided throughput curves based on current predicted performance.
25 | * `skybrightness` incorporates the ESO
26 | sky model, modified to match measured sky conditions at the LSST site,
27 | including an addition of a model for twilight skybrightness. This is used
28 | to generate the pre-calculated skybrightness data used in
29 | [`rubin_scheduler.skybrightness_pre`](https://rubin-scheduler.lsst.io/skybrightness-pre.html).
30 | * `moving_objects` provides a way to generate
31 | synthetic observations of moving objects, based on how they would appear in
32 | pointing databases ("opsims") created by
33 | [`rubin_scheduler`](https://rubin-scheduler.lsst.io).
34 | * `maf` the Metrics Analysis Framework, enabling efficient and
35 | scientifically varied evaluation of the LSST survey strategy and progress
36 | by providing a framework to enable these metrics to run in a
37 | standardized way on opsim outputs.
38 |
39 | More documentation for `rubin_sim` is available at
40 | [https://rubin-sim.lsst.io](https://rubin-sim.lsst.io), including installation instructions.
41 |
42 | ### Getting Help ###
43 |
44 | Questions about `rubin_sim` can be posted on the [sims slack channel](https://lsstc.slack.com/archives/C2LQ5JW9W), or on https://community.lsst.org/c/sci/survey_strategy/ (optionally, tag @yoachim and/or @ljones so we get notifications about it).
45 |
--------------------------------------------------------------------------------
/batch/compile_prenight_metadata_cache.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #SBATCH --account=rubin:developers # Account name
3 | #SBATCH --job-name=auxtel_prenight_daily # Job name
4 | #SBATCH --output=/sdf/data/rubin/shared/scheduler/prenight/sbatch/compile_prenight_metadata_cache.out # Output file (stdout)
5 | #SBATCH --error=/sdf/data/rubin/shared/scheduler/prenight/sbatch/compile_prenight_metadata_cache.err # Error file (stderr)
6 | #SBATCH --partition=milano # Partition (queue) names
7 | #SBATCH --nodes=1 # Number of nodes
8 | #SBATCH --ntasks=1 # Number of tasks run in parallel
9 | #SBATCH --cpus-per-task=1 # Number of CPUs per task
10 | #SBATCH --mem=4G # Requested memory
11 | #SBATCH --time=1:00:00 # Wall time (hh:mm:ss)
12 |
13 | echo "******** START of compile_prenight_metadata_cache.sh **********"
14 |
15 | # Source global definitions
16 | if [ -f /etc/bashrc ]; then
17 | . /etc/bashrc
18 | fi
19 |
20 | # SLAC S3DF - source all files under ~/.profile.d
21 | if [[ -e ~/.profile.d && -n "$(ls -A ~/.profile.d/)" ]]; then
22 | source <(cat $(find -L ~/.profile.d -name '*.conf'))
23 | fi
24 |
25 | source /sdf/group/rubin/sw/w_latest/loadLSST.sh
26 | conda activate /sdf/data/rubin/shared/scheduler/envs/prenight
27 | export AWS_PROFILE=prenight
28 | WORK_DIR=$(date '+/sdf/data/rubin/shared/scheduler/prenight/work/compile_prenight_metadata_cache/%Y-%m-%dT%H%M%S' --utc)
29 | echo "Working in $WORK_DIR"
30 | mkdir ${WORK_DIR}
31 | cd ${WORK_DIR}
32 | printenv > env.out
33 | compile_sim_archive_metadata_resource --append
34 | echo "******* END of compile_prenight_metadata_cache.sh *********"
35 |
--------------------------------------------------------------------------------
/container_environment.yaml:
--------------------------------------------------------------------------------
1 | name: base
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - numpy <2
6 | - matplotlib-base
7 | - healpy
8 | - pandas
9 | - pyarrow
10 | - numexpr
11 | - scipy
12 | - sqlalchemy
13 | - astropy
14 | - pytables
15 | - h5py
16 | - astroplan
17 | - git
18 | - colorcet
19 | - cycler
20 | - george
21 | - scikit-learn
22 | - shapely
23 | - skyproj
24 | - tqdm
25 | - jinja2
26 | - tornado
27 | - rubin-scheduler
28 |
29 |
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | _build/*
2 | source/*
3 | maf-metric-list.rst
4 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?= --keep-going -T -n
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | clean:
18 | rm -rf $(BUILDDIR)
19 |
20 | linkcheck:
21 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
22 | @echo
23 | @echo "Link check complete; look for any errors in the above output " \
24 | "or in $(BUILDDIR)/linkcheck/output.txt."
25 |
26 | # Catch-all target: route all unknown targets to Sphinx using the new
27 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
28 | %: Makefile
29 | python metric_list.py
30 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
31 |
32 |
33 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _api:
4 |
5 | API
6 | ===
7 |
8 | .. toctree::
9 | :maxdepth: 2
10 |
11 | Data
12 |
13 | Metrics Analysis Framework (MAF)
14 |
15 | Moving Objects
16 |
17 | Phot Utils
18 |
19 | Satellite Constellations
20 |
21 | Self Calibration
22 |
23 | Sim archive
24 |
25 | Skybrightness
26 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # For the full list of built-in configuration values, see the documentation:
4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
5 |
6 | from documenteer.conf.guide import * # noqa: F403, import *
7 |
8 | linkcheck_retries = 2
9 |
--------------------------------------------------------------------------------
/docs/data-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.data
2 |
3 | .. _data-api:
4 |
5 | ========
6 | Data API
7 | ========
8 |
9 | .. automodule:: rubin_sim.data
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/data-download.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.data
2 |
3 | .. _data-download:
4 |
5 | =============
6 | Data Download
7 | =============
8 |
9 | The ``rubin_sim.data`` module provides a script to download the data
10 | required to run various modules in ``rubin_sim``, as well as to check the
11 | expected versions of the data. It also provides utilities to interpret
12 | the location of this $RUBIN_SIM_DATA_DIR on disk and to return the
13 | path to the current baseline simulation output (one of the datafiles
14 | downloaded by this module).
15 |
16 | With the split of ``rubin_sim`` into ``rubin_sim`` + ``rubin_scheduler``, the
17 | required data download utilities now live in the
18 | `rubin_scheduler.data `_
19 | package. ``rubin_scheduler`` is a necessary dependency of ``rubin_sim`` and
20 | should have
21 | been installed during the :ref:`installation ` process.
22 | The ``rubin_sim.data`` module simply provides additional information on the
23 | data files necessary for ``rubin_sim``, then calls the scripts from
24 | ``rubin_scheduler.data`` to execute the download.
25 |
26 |
27 | Downloading Necessary Data
28 | ^^^^^^^^^^^^^^^^^^^^^^^^^^
29 |
30 | Please see the information in the
31 | `rubin-scheduler "Downloading Necessary Data" documentation `_
32 | for more details on setting up $RUBIN_SIM_DATA_DIR (which is
33 | shared between ``rubin_scheduler``, ``rubin_sim`` and ``schedview``).
34 |
35 | Using either the default path to $RUBIN_SIM_DATA_DIR, or after setting it
36 | explicitly, first download the necessary data for ``rubin_scheduler`` and
37 | then add the (larger) data set for ``rubin_sim``:
38 |
39 | .. code-block:: bash
40 |
41 | scheduler_download_data
42 | rs_download_data
43 |
44 | This creates a series of directories at $RUBIN_SIM_DATA_DIR (in addition
45 | to the directories originating from `rubin_scheduler `_):
46 |
47 | * maf (containing data used for various metrics)
48 | * maps (containing various stellar density and 2-D and 3-D dust maps)
49 | * movingObjects (containing asteroid SEDs)
50 | * orbits (containing orbits for Solar System population samples)
51 | * orbits_precompute (precomputed daily orbits for the samples above)
52 | * sim_baseline (containing the current baseline simulation output)
53 | * skybrightness (containing information needed for the skybrightness module)
54 | * throughputs (current baseline throughput information)
55 | * test (containing data for unit tests)
56 |
57 |
58 | Note that the data will only be downloaded for the directories which do
59 | not already exist, regardless of whether the version on disk is up to date.
60 | To force an update to a version which matches the ``rubin_scheduler`` version:
61 |
62 | .. code-block:: bash
63 |
64 | rs_download_data --update
65 |
66 | This can also be applied only to certain directories, using the
67 | ``--dirs`` flag. It may be worth noting that some of the above directories
68 | are more sizeable than others -- the ``maps``, ``maf`` and
69 | ``orbits_precompute`` directories are the largest and if not needed, can
70 | be skipped in download by using ``--dirs``.
--------------------------------------------------------------------------------
/docs/documenteer.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | title = "rubin_sim"
3 | copyright = "2023 Association of Universities for Research in Astronomy, Inc. (AURA)"
4 | base_url = "https://rubin-sim.lsst.io"
5 | github_url = "https://github.com/lsst/rubin_sim"
6 |
7 | [project.python]
8 | package="rubin_sim"
9 |
10 | [sphinx]
11 | extensions = ["sphinx.ext.viewcode"]
12 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _rubin-sim-index:
4 |
5 | #########
6 | rubin_sim
7 | #########
8 |
9 | Helping simulate and evaluate the performance of
10 | Rubin C. Observatory's Legacy Survey of Space and Time (the LSST),
11 | through evaluating prototypes of various analysis, simulating photometry
12 | and providing a framework for analyzing survey strategy progress and outcomes.
13 |
14 |
15 | .. toctree::
16 | :maxdepth: 2
17 |
18 | Introduction
19 | Installation
20 | User Guide
21 | API
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _installation:
4 |
5 | ############
6 | Installation
7 | ############
8 |
9 | Quick Installation
10 | ------------------
11 |
12 | Installation from PyPI:
13 |
14 | ::
15 |
16 | pip install rubin-sim
17 | scheduler_download_data
18 | rs_download_data
19 |
20 | Note: pip installation of rubin-sim will lack the JPL data (DE405, etc.)
21 | that is needed to actually run ``pyoorb``, used in ``rubin_sim.moving_objects``, as this is not currently available from PyPI.
22 | Please see the `oorb installation instructions `_ for more information.
23 |
24 | or from conda-forge:
25 |
26 | ::
27 |
28 | conda install -c conda-forge rubin-sim
29 | scheduler_download_data
30 | rs_download_data
31 |
32 | The `scheduler_download_data` and `rs_download_data` commands will
33 | download data files to the default location of `~/rubin_sim_data`.
34 | To store the data elsewhere, see instructions at
35 | :ref:`Data Download`.
36 |
37 | For Developer Use
38 | -----------------
39 |
40 | First, clone the `rubin_sim `_ repository:
41 |
42 | ::
43 |
44 | git clone git@github.com:lsst/rubin_sim.git
45 | cd rubin_sim
46 | conda create --channel conda-forge --name rubin-sim --file requirements.txt python=3.12
47 | conda activate rubin-sim
48 | conda install -c conda-forge --file=test-requirements.txt # Optional test requirements
49 | pip install -e . --no-deps
50 | scheduler_download_data
51 | rs_download_data
52 |
53 | The `scheduler_download_data` and `rs_download_data` commands will
54 | download data files to the default location of `~/rubin_sim_data`.
55 | To store the data elsewhere, see instructions at
56 | :ref:`Data Download`.
57 |
58 | Note conda may override previous installs of
59 | `rubin_scheduler`, in which case one can uninstall the conda version
60 | and re-run `pip install -e . --no-deps` from the needed git repo directory.
61 |
62 | Building Documentation
63 | ----------------------
64 |
65 | An online copy of the documentation is available at https://rubin-sim.lsst.io,
66 | however building a local copy can be done as follows:
67 |
68 | ::
69 |
70 | pip install "documenteer[guide]"
71 | cd docs
72 | make html
73 |
74 |
75 | The root of the local documentation will then be ``docs/_build/html/index.html``.
76 |
77 |
--------------------------------------------------------------------------------
/docs/introduction.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _introduction:
4 |
5 | ############
6 | Introduction
7 | ############
8 |
9 | The `Legacy Survey of Space and Time `_ (LSST)
10 | is anticipated to encompass around 2 million observations spanning a decade,
11 | averaging 800 visits per night. The ``rubin_sim`` package was built to help
12 | understand the predicted performance of the LSST.
13 |
14 | The :ref:`Phot Utils` module provides synthetic photometry
15 | using provided throughput curves based on current predicted performance.
16 |
17 | The :ref:`skybrightness` module incorporates the ESO
18 | sky model, modified to match measured sky conditions at the LSST site,
19 | including an addition of a model for twilight skybrightness. This is used
20 | to generate the pre-calculated skybrightness data used in
21 | `rubin_scheduler `_.
22 |
23 | The :ref:`Moving Objects` module provides a way to create
24 | synthetic observations of moving objects, based on how they would appear in
25 | pointing databases ("opsims") created by
26 | `rubin_scheduler `_.
27 |
28 | One of the major goals for ``rubin_sim`` is to enable efficient and
29 | scientifically varied evaluation of the LSST survey strategy and progress,
30 | by providing a framework to enable these metrics to run in a
31 | standardized way on opsim outputs.
32 | The :ref:`Metrics Analysis Framework` module provides these tools.
33 |
34 | .. toctree::
35 | :maxdepth: 2
36 |
37 | User Guide
38 |
--------------------------------------------------------------------------------
/docs/maf-api-batches.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-batches:
4 |
5 | =======
6 | Batches
7 | =======
8 |
9 | .. automodule:: rubin_sim.maf.batches
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-db.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-db:
4 |
5 | ==============
6 | Databases (db)
7 | ==============
8 |
9 | .. automodule:: rubin_sim.maf.db
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-maf-contrib.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-maf-contrib:
4 |
5 | ===========
6 | Maf Contrib
7 | ===========
8 |
9 | .. automodule:: rubin_sim.maf.maf_contrib
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-maps.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-maps:
4 |
5 | =======
6 | Maps
7 | =======
8 |
9 | .. automodule:: rubin_sim.maf.maps
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-metricbundles.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-metricbundles:
4 |
5 | ==============
6 | Metric Bundles
7 | ==============
8 |
9 | .. automodule:: rubin_sim.maf.metric_bundles
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-metrics.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-metrics:
4 |
5 | =======
6 | Metrics
7 | =======
8 |
9 | .. automodule:: rubin_sim.maf.metrics
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-plots.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-plots:
4 |
5 | =======
6 | Plots
7 | =======
8 |
9 | .. automodule:: rubin_sim.maf.plots
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-run-comparison.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-run-comparison:
4 |
5 | ==============
6 | Run Comparison
7 | ==============
8 |
9 | .. automodule:: rubin_sim.maf.run_comparison
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-slicers.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-slicers:
4 |
5 | =======
6 | Slicers
7 | =======
8 |
9 | .. automodule:: rubin_sim.maf.slicers
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-stackers.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-stackers:
4 |
5 | ========
6 | Stackers
7 | ========
8 |
9 | .. automodule:: rubin_sim.maf.stackers
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api-utils.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api-utils:
4 |
5 | =======
6 | Utils
7 | =======
8 |
9 | .. automodule:: rubin_sim.maf.utils
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/maf-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.maf
2 |
3 | .. _maf-api:
4 |
5 | =======
6 | MAF API
7 | =======
8 |
9 |
10 | .. toctree::
11 |
12 | Batches
13 | Databases (db)
14 | Maps
15 | Maf Contrib
16 | Metrics
17 | MetricBundles
18 | Plots
19 | Run Comparison
20 | Slicers
21 | Stackers
22 | Utils
--------------------------------------------------------------------------------
/docs/metric_list.py:
--------------------------------------------------------------------------------
1 | __all__ = ("make_metric_list",)
2 |
3 | import inspect
4 |
5 | import rubin_sim.maf.maf_contrib as maf_contrib
6 | import rubin_sim.maf.metrics as metrics
7 |
8 |
9 | def make_metric_list(outfile):
10 | f = open(outfile, "w")
11 |
12 | # Print header
13 | print(".. py:currentmodule:: rubin_sim.maf", file=f)
14 | print("", file=f)
15 | print(".. _maf-metric-list:", file=f)
16 | print("", file=f)
17 | print("################################", file=f)
18 | print("rubin_sim MAF: Available metrics", file=f)
19 | print("################################", file=f)
20 |
21 | print(" ", file=f)
22 |
23 | print("Core LSST MAF metrics", file=f)
24 | print("^^^^^^^^^^^^^^^^^^^^^", file=f)
25 | print(" ", file=f)
26 | for name, obj in inspect.getmembers(metrics):
27 | if inspect.isclass(obj):
28 | modname = inspect.getmodule(obj).__name__
29 | if modname.startswith("rubin_sim.maf.metrics"):
30 | link = f":py:class:`~rubin_sim.maf.metrics.{name}` "
31 | simpledoc = inspect.getdoc(obj).split("\n")[0]
32 | print(f"- {link} \n \t {simpledoc}", file=f)
33 | print(" ", file=f)
34 |
35 | print("Contributed maf_contrib metrics", file=f)
36 | print("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^", file=f)
37 | print(" ", file=f)
38 | for name, obj in inspect.getmembers(maf_contrib):
39 | if inspect.isclass(obj):
40 | modname = inspect.getmodule(obj).__name__
41 | if modname.startswith("rubin_sim.maf.maf_contrib") and name.endswith("Metric"):
42 | link = f":py:class:`~rubin_sim.maf.maf_contrib.{name}` "
43 | simpledoc = inspect.getdoc(obj).split("\n")[0]
44 | print(f"- {link} \n \t {simpledoc}", file=f)
45 | print(" ", file=f)
46 |
47 |
48 | if __name__ == "__main__":
49 | make_metric_list("maf-metric-list.rst")
50 |
--------------------------------------------------------------------------------
/docs/moving-objects-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.moving_objects
2 |
3 | .. _moving-objects-api:
4 |
5 | ==================
6 | Moving Objects API
7 | ==================
8 |
9 | .. automodule:: rubin_sim.moving_objects
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/moving-objects.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _moving-objects:
4 |
5 |
6 | ##############
7 | Moving Objects
8 | ##############
9 |
10 | The ``rubin_sim.movingObjects`` module provides tools to
11 | generate simulated ephemerides of a population of
12 | small bodies throughout an LSST pointing history.
13 | These ephemerides are typically used for further
14 | analysis in :ref:`MAF ` to evaluate the effect of
15 | survey strategy on various populations
16 | of Solar System objects.
17 |
18 | There are several populations available in the "orbits" directory of
19 | $RUBIN_SIM_DATA_DIR. Many of these populations were contributed or
20 | enhanced by the LSST Solar System Science Collaboration (SSSC).
21 | Further documentation on these orbital populations is available in the
22 | `LSST-SSSC "SSSC_test_populations" `_ repo.
--------------------------------------------------------------------------------
/docs/phot-utils-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.phot_utils
2 |
3 | .. _phot-utils-api:
4 |
5 | ==============
6 | Phot Utils API
7 | ==============
8 |
9 | .. automodule:: rubin_sim.phot_utils
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/phot-utils.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _phot-utils:
4 |
5 | ##########
6 | Phot Utils
7 | ##########
8 |
9 | The ``rubin_sim.photUtils`` module provides synthetic photometry
10 | and SNR calculation methods for Rubin. There are expected throughput
11 | curves available in the 'throughputs' directory of $RUBIN_SIM_DATA_DIR.
--------------------------------------------------------------------------------
/docs/satellite-constellations-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.satellite_constellations
2 |
3 | .. _satellite-constellations-api:
4 |
5 | ============================
6 | Satellite Constellations API
7 | ============================
8 |
9 | .. automodule:: rubin_sim.satellite_constellations
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/satellite-constellations.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _satellite-constellations:
4 |
5 | ########################
6 | Satellite Constellations
7 | ########################
8 |
9 | The ``rubin_sim.satellite_constellations`` module contains
10 | tools for creating and propagating satellite mega constellations
11 | to evaluate their impact (in terms of streaks) in Rubin images.
12 | There is also an extension for ``rubin_scheduler`` that will add
13 | "satellite dodging" to the scheduler logic, at a cost of overall image depth.
14 |
--------------------------------------------------------------------------------
/docs/selfcal-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.selfcal
2 |
3 | .. _selfcal-api:
4 |
5 | ====================
6 | Self Calibration API
7 | ====================
8 |
9 | .. automodule:: rubin_sim.selfcal
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/selfcal.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _selfcal:
4 |
5 | ################
6 | Self Calibration
7 | ################
8 |
9 | The ``rubin_sim.selfcal`` module contains tools for simulating a basic
10 | self-calibration effort. This was used to estimate photometric errors
11 | remaining after self-calibration in `LSE-180 `_.
12 | This module remains useful for first-pass investigations into the effects
13 | of survey strategy choices on the resulting photometric calibration
14 | possibilities, which in particular can be useful for investigating
15 | the effects of footprint or rolling cadence.
--------------------------------------------------------------------------------
/docs/sim-archive-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.sim_archive
2 |
3 | .. _sim-archive-api:
4 |
5 | ===============
6 | sim_archive API
7 | ===============
8 |
9 | .. automodule:: rubin_sim.sim_archive
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
13 |
--------------------------------------------------------------------------------
/docs/skybrightness-api.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.skybrightness
2 |
3 | .. _skybrightness-api:
4 |
5 | =================
6 | Skybrightness API
7 | =================
8 |
9 | .. automodule:: rubin_sim.skybrightness
10 | :imported-members:
11 | :members:
12 | :show-inheritance:
--------------------------------------------------------------------------------
/docs/skybrightness.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim.skybrightness
2 |
3 | .. _skybrightness:
4 |
5 | #############
6 | Skybrightness
7 | #############
8 |
9 | The ``rubin_sim.skybrightness`` module generates
10 | predicted skybrightness values (in either magnitudes per
11 | square arcsecond for any LSST bandpass or
12 | as a SED over the relevant wavelengths).
13 | It uses the ESO skybrightness model components
14 | (includes upper and lower atmosphere emission lines, airglow continuum,
15 | zodiacal light and scattered lunar light) and has additional
16 | twilight components.
17 | The model predictions have been tested against skybrightness
18 | measurements at the LSST site.
19 |
20 | More details about the rubin_sim version of the model and
21 | its validation for Rubin are available in
22 | `An optical to IR sky brightness model for the LSST by Yoachim et. al.
23 | `_.
24 |
--------------------------------------------------------------------------------
/docs/user-guide.rst:
--------------------------------------------------------------------------------
1 | .. py:currentmodule:: rubin_sim
2 |
3 | .. _user-guide:
4 |
5 | ##########
6 | User Guide
7 | ##########
8 |
9 |
10 | .. toctree::
11 |
12 | Data Download Utilities
13 |
14 | Metrics Analysis Framework (MAF)
15 |
16 | Moving Objects
17 |
18 | Phot Utils
19 |
20 | Satellite Constellations
21 |
22 | Self Calibration
23 |
24 | Simulation archive
25 |
26 | Skybrightness
27 |
--------------------------------------------------------------------------------
/example_deployment.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: v1
3 | kind: Namespace
4 | metadata:
5 | name: maf
6 |
7 | ---
8 | apiVersion: apps/v1
9 | kind: Deployment
10 | metadata:
11 | namespace: maf
12 | name: maf-server
13 | labels:
14 | app: maf-server
15 | spec:
16 | replicas: 1
17 | selector:
18 | matchLabels: app maf-server
19 | template:
20 | metadata:
21 | labels:
22 | app: maf-server
23 | spec:
24 | containers:
25 | - name: maf
26 | image: "ghcr.io/....:tag"
27 | imagePullPolicy: Always
28 | resources:
29 | limits:
30 | cpu: 1
31 | memory: "2Gi"
32 | requests:
33 | cpu: 500m
34 | memory: "1Gi"
35 | volumeMounts:
36 | - mountPath: /sdf/data/rubin
37 | name: sdf-data-rubin
38 | volumes:
39 | - name: sdf-data-rubin
40 | persistentVolumeClaim:
41 | claimName: sdf-data-rubin
42 | ---
43 | apiVersion: v1
44 | kind: PersistentVolumeClaim
45 | metadata:
46 | namespace: maf
47 | name: sdf-data-rubin
48 | spec:
49 | storageClassName: sdf-data-rubin
50 | accessModes:
51 | - ReadOnlyMany
52 | resources:
53 | requests:
54 | storage: 1Gi
55 | ---
56 | apiVersion: v1
57 | kind: Service
58 | metadata:
59 | namespace: maf
60 | name: usdf-maf
61 | labels:
62 | app: maf-server
63 | annotations:
64 | metallb.universe.tf/address-pool: sdf-services
65 | spec:
66 | type: LoadBalancer
67 | ports:
68 | - name: http
69 | port: 80
70 | protocol: TCP
71 | targetPort: 80
72 | selector:
73 | app: maf-server
--------------------------------------------------------------------------------
/optional-requirements.txt:
--------------------------------------------------------------------------------
1 | openorb
2 | openorb-data-de405
3 | george
4 | scikit-learn
5 | jinja2
6 | tornado
7 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | astropy-base
2 | astroplan
3 | colorcet
4 | conda
5 | cycler
6 | gitpython
7 | healpy
8 | h5py
9 | matplotlib-base
10 | numpy
11 | numexpr
12 | pandas
13 | pyarrow
14 | pytables
15 | rubin-scheduler >=3.0
16 | scipy
17 | setuptools_scm
18 | setuptools_scm_git_archive
19 | shapely
20 | skyfield >=1.52
21 | skyproj
22 | sqlalchemy
23 | tqdm
24 |
--------------------------------------------------------------------------------
/rubin_sim/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib.metadata import PackageNotFoundError, version
2 |
3 | try:
4 | __version__ = version("rubin_sim")
5 | except PackageNotFoundError:
6 | # package is not installed
7 | pass
8 |
--------------------------------------------------------------------------------
/rubin_sim/data/__init__.py:
--------------------------------------------------------------------------------
1 | from .rs_download_data import * # noqa: F403
2 |
--------------------------------------------------------------------------------
/rubin_sim/maf/__init__.py:
--------------------------------------------------------------------------------
1 | #
2 | # LSST Data Management System
3 | # Copyright 2008, 2009, 2010 LSST Corporation.
4 | #
5 | # This product includes software developed by the
6 | # LSST Project (http://www.lsst.org/).
7 | #
8 | # This program is free software: you can redistribute it and/or modify
9 | # it under the terms of the GNU General Public License as published by
10 | # the Free Software Foundation, either version 3 of the License, or
11 | # (at your option) any later version.
12 | #
13 | # This program is distributed in the hope that it will be useful,
14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 | # GNU General Public License for more details.
17 | #
18 | # You should have received a copy of the LSST License Statement and
19 | # the GNU General Public License along with this program. If not,
20 | # see .
21 | #
22 | """Python interface to the metrics analysis framework."""
23 | from .batches import *
24 | from .db import *
25 | from .maf_contrib import *
26 | from .maps import *
27 | from .metric_bundles import *
28 | from .metrics import *
29 | from .plots import *
30 | from .run_comparison import *
31 | from .slicers import *
32 | from .stackers import *
33 | from .utils import *
34 | from .web import *
35 |
--------------------------------------------------------------------------------
/rubin_sim/maf/batches/__init__.py:
--------------------------------------------------------------------------------
1 | from .altaz_batch import *
2 | from .col_map_dict import *
3 | from .common import *
4 | from .ddf_batch import *
5 | from .filterchange_batch import *
6 | from .glance_batch import *
7 | from .hourglass_batch import *
8 | from .info_batch import *
9 | from .metadata_batch import *
10 | from .moving_objects_batch import *
11 | from .openshutter_batch import *
12 | from .radar_limited import *
13 | from .science_radar_batch import *
14 | from .skycoverage import *
15 | from .slew_batch import *
16 | from .srd_batch import *
17 | from .time_batch import *
18 | from .visitdepth_batch import *
19 |
--------------------------------------------------------------------------------
/rubin_sim/maf/batches/hourglass_batch.py:
--------------------------------------------------------------------------------
1 | """Run the hourglass metric."""
2 |
3 | __all__ = ("hourglassPlots",)
4 |
5 | import rubin_sim.maf.metric_bundles as mb
6 | import rubin_sim.maf.metrics as metrics
7 | import rubin_sim.maf.slicers as slicers
8 |
9 | from .col_map_dict import col_map_dict
10 |
11 |
12 | def hourglassPlots(colmap=None, runName="opsim", nyears=10, extraSql=None, extraInfoLabel=None):
13 | """Run the hourglass metric, for each individual year.
14 |
15 | Parameters
16 | ----------
17 | colmap : `dict`, optional
18 | A dictionary with a mapping of column names.
19 | run_name : `str`, optional
20 | The name of the simulated survey.
21 | nyears : `int`, optional
22 | How many years to attempt to make hourglass plots for.
23 | extraSql : `str`, optional
24 | Add an extra sql constraint before running metrics.
25 | extraInfoLabel : `str`, optional
26 | Add an extra piece of info_label before running metrics.
27 |
28 | Returns
29 | -------
30 | metric_bundleDict : `dict` of `maf.MetricBundle`
31 | """
32 | if colmap is None:
33 | colmap = col_map_dict()
34 | bundleList = []
35 |
36 | sql = ""
37 | info_label = ""
38 | # Add additional sql constraint (such as wfdWhere) and info_label
39 | if (extraSql is not None) and (len(extraSql) > 0):
40 | sql = extraSql
41 | if extraInfoLabel is None:
42 | info_label = extraSql.replace("filter =", "").replace("filter=", "")
43 | info_label = info_label.replace('"', "").replace("'", "")
44 | if extraInfoLabel is not None:
45 | info_label = extraInfoLabel
46 |
47 | years = list(range(nyears + 1))
48 | displayDict = {"group": "Hourglass"}
49 | for year in years[1:]:
50 | displayDict["subgroup"] = "Year %d" % year
51 | displayDict["caption"] = (
52 | "Visualization of the filter usage of the telescope. "
53 | "The black wavy line indicates lunar phase; the red and blue "
54 | "solid lines indicate nautical and civil twilight."
55 | )
56 | sqlconstraint = "night > %i and night <= %i" % (
57 | 365.25 * (year - 1),
58 | 365.25 * year,
59 | )
60 | if len(sql) > 0:
61 | sqlconstraint = "(%s) and (%s)" % (sqlconstraint, sql)
62 | md = info_label + " year %i-%i" % (year - 1, year)
63 | slicer = slicers.HourglassSlicer()
64 | metric = metrics.HourglassMetric(
65 | night_col=colmap["night"], mjd_col=colmap["mjd"], metric_name="Hourglass"
66 | )
67 | bundle = mb.MetricBundle(
68 | metric,
69 | slicer,
70 | constraint=sqlconstraint,
71 | info_label=md,
72 | display_dict=displayDict,
73 | )
74 | bundleList.append(bundle)
75 |
76 | # Set the run_name for all bundles and return the bundleDict.
77 | for b in bundleList:
78 | b.set_run_name(runName)
79 | return mb.make_bundles_dict_from_list(bundleList)
80 |
--------------------------------------------------------------------------------
/rubin_sim/maf/db/__init__.py:
--------------------------------------------------------------------------------
1 | from .results_db import * # noqa: F403
2 | from .tracking_db import * # noqa: F403
3 |
--------------------------------------------------------------------------------
/rubin_sim/maf/db/add_run.py:
--------------------------------------------------------------------------------
1 | __all__ = ("add_run",)
2 |
3 | import argparse
4 |
5 | from . import add_run_to_database
6 |
7 |
8 | def add_run():
9 | parser = argparse.ArgumentParser(description="Add a MAF run to the tracking database.")
10 | parser.add_argument("maf_dir", type=str, help="Directory containing MAF outputs.")
11 | parser.add_argument("-c", "--maf_comment", type=str, default=None, help="Comment on MAF analysis.")
12 | parser.add_argument("--group", type=str, default=None, help="Opsim Group name.")
13 | parser.add_argument("--run_name", type=str, default=None, help="Run Name.")
14 | parser.add_argument("--run_comment", type=str, default=None, help="Comment on OpSim run.")
15 | parser.add_argument("--db_file", type=str, default="None", help="Opsim Sqlite filename")
16 | defaultdb = "trackingDb_sqlite.db"
17 | parser.add_argument(
18 | "-t",
19 | "--tracking_db",
20 | type=str,
21 | default=defaultdb,
22 | help="Tracking database filename. Default is %s, in the current directory." % defaultdb,
23 | )
24 | args = parser.parse_args()
25 |
26 | add_run_to_database(
27 | maf_dir=args.maf_dir,
28 | tracking_db_file=args.tracking_db,
29 | run_group=args.group,
30 | run_name=args.run_name,
31 | run_comment=args.run_comment,
32 | maf_comment=args.maf_comment,
33 | db_file=args.db_file,
34 | )
35 |
--------------------------------------------------------------------------------
/rubin_sim/maf/ddf_dir.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import glob
3 | import os
4 | import shutil
5 |
6 | import rubin_sim.maf.batches as batches
7 | import rubin_sim.maf.db as db
8 | import rubin_sim.maf.metric_bundles as mb
9 |
10 | __all__ = ("ddf_dir",)
11 |
12 | import matplotlib
13 |
14 | matplotlib.use("Agg")
15 |
16 |
17 | def ddf_dir():
18 | """
19 | Run the glance batch on all .db files in a directory.
20 | """
21 |
22 | parser = argparse.ArgumentParser()
23 | parser.add_argument("--db", type=str, default=None)
24 | parser.add_argument("--nside", type=int, default=512)
25 | parser.add_argument("--old_coords", dest="old_coords", action="store_true")
26 | parser.set_defaults(verbose=False)
27 |
28 | args = parser.parse_args()
29 |
30 | if args.db is None:
31 | db_files = glob.glob("*.db")
32 | db_files = [filename for filename in db_files if "trackingDb" not in filename]
33 | else:
34 | db_files = [args.db]
35 | run_names = [os.path.basename(name).replace(".db", "") for name in db_files]
36 |
37 | for filename, name in zip(db_files, run_names):
38 | if os.path.isdir(name + "_ddf"):
39 | shutil.rmtree(name + "_ddf")
40 |
41 | bdict = {}
42 | bdict.update(batches.ddfBatch(run_name=name, nside=args.nside, old_coords=args.old_coords))
43 | results_db = db.ResultsDb(out_dir=name + "_ddf")
44 | group = mb.MetricBundleGroup(
45 | bdict,
46 | filename,
47 | out_dir=name + "_ddf",
48 | results_db=results_db,
49 | save_early=False,
50 | )
51 | group.run_all(clear_memory=True, plot_now=True)
52 | results_db.close()
53 | db.add_run_to_database(name + "_ddf", "trackingDb_sqlite.db", None, name, "", "", name + ".db")
54 |
--------------------------------------------------------------------------------
/rubin_sim/maf/glance_dir.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import glob
3 | import os
4 | import shutil
5 |
6 | import rubin_sim.maf.batches as batches
7 | import rubin_sim.maf.db as db
8 | import rubin_sim.maf.metric_bundles as mb
9 |
10 | __all__ = ("glance_dir",)
11 |
12 | import matplotlib
13 |
14 | matplotlib.use("Agg")
15 |
16 |
17 | def glance_dir():
18 | """
19 | Run the glance batch on all .db files in a directory.
20 | """
21 |
22 | parser = argparse.ArgumentParser()
23 | parser.add_argument("--db", type=str, default=None)
24 | args = parser.parse_args()
25 |
26 | if args.db is None:
27 | db_files = glob.glob("*.db")
28 | db_files = [filename for filename in db_files if "trackingDb" not in filename]
29 | else:
30 | db_files = [args.db]
31 | run_names = [os.path.basename(name).replace(".db", "") for name in db_files]
32 |
33 | for filename, name in zip(db_files, run_names):
34 | if os.path.isdir(name + "_glance"):
35 | shutil.rmtree(name + "_glance")
36 | colmap = batches.col_map_dict()
37 |
38 | bdict = {}
39 | bdict.update(batches.glanceBatch(colmap, name))
40 | results_db = db.ResultsDb(out_dir=name + "_glance")
41 | group = mb.MetricBundleGroup(
42 | bdict,
43 | filename,
44 | out_dir=name + "_glance",
45 | results_db=results_db,
46 | save_early=False,
47 | )
48 | group.run_all(clear_memory=True, plot_now=True)
49 | results_db.close()
50 | db.add_run_to_database(
51 | maf_dir=name + "_glance",
52 | tracking_db_file="trackingDb_sqlite.db",
53 | run_group=None,
54 | run_name=name,
55 | run_comment="",
56 | maf_comment="Glance",
57 | db_file=name + ".db",
58 | )
59 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/__init__.py:
--------------------------------------------------------------------------------
1 | # Add similar lines (from .filename import *) when you add new metrics,
2 | # stackers or slicers.
3 | from .calculate_lsst_field_visibility_astropy import *
4 | from .depth_limited_num_gal_metric import *
5 | from .filter_pair_t_gaps_metric import *
6 | from .grb_transient_metric import *
7 | from .gw170817_det_metric import *
8 | from .kne_metrics import *
9 | from .lss_metrics import *
10 | from .lss_obs_strategy import *
11 | from .lv_dwarfs import *
12 | from .microlensing_metric import *
13 | from .periodic_metric import *
14 | from .periodic_star_metric import *
15 | from .periodic_star_modulation_metric import *
16 | from .presto_color_kne_pop_metric import *
17 | from .selfcal_uniformity_metric import *
18 | from .star_count_mass_metric import *
19 | from .star_count_metric import *
20 | from .static_probes_fom_summary_metric import *
21 | from .tdes_pop_metric import *
22 | from .triplet_metric import *
23 | from .var_depth_metric import *
24 | from .xrb_metrics import *
25 | from .young_stellar_objects_metric import *
26 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/example_new_metrics.py:
--------------------------------------------------------------------------------
1 | # Example of a new metric added to the repo.
2 | # ljones@astro.washington.edu
3 |
4 | __all__ = ("NightsWithNFiltersMetric",)
5 |
6 | import numpy as np
7 |
8 | from rubin_sim.maf.metrics import BaseMetric
9 |
10 |
11 | class NightsWithNFiltersMetric(BaseMetric):
12 | """Count how many times more than NFilters are used within the same night,
13 | for this set of visits.
14 |
15 | Parameters
16 | ----------
17 | n_filters : `int`, optional
18 | How many filters to look for, within the same night.
19 | """
20 |
21 | def __init__(self, night_col="night", filter_col="filter", n_filters=3, **kwargs):
22 | """
23 | night_col = the name of the column defining the night
24 | filter_col = the name of the column defining the filter
25 | n_filters = the minimum desired set of filters used in these visits
26 | """
27 | self.night_col = night_col
28 | self.filter_col = filter_col
29 | self.n_filters = n_filters
30 | super(NightsWithNFiltersMetric, self).__init__(col=[self.night_col, self.filter_col], **kwargs)
31 |
32 | def run(self, data_slice, slice_point=None):
33 | count = 0
34 | unique_nights = np.unique(data_slice[self.night_col])
35 | for n in unique_nights:
36 | condition = data_slice[self.night_col] == n
37 | unique_filters = np.unique(data_slice[self.filter_col][condition])
38 | if len(unique_filters) > self.n_filters:
39 | count += 1
40 | return count
41 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/gw170817_det_metric.py:
--------------------------------------------------------------------------------
1 | # Metric for kilonova detectability based on GW170817 SED used in Scolnic et
2 | # al. 2018 and Setzer et al. 2019. The chosen detection criteria are related
3 | # to those used in the LSST DESC white paper detectability work and the two
4 | # references above.
5 | #
6 | # Contact for this code:
7 | # christian.setzer@fysik.su.se
8 | from pathlib import Path
9 |
10 | from .transient_ascii_sed_metric import TransientAsciiSEDMetric
11 |
12 | __all__ = ("GW170817DetMetric",)
13 | base_path = Path(__file__).parent
14 |
15 |
16 | class GW170817DetMetric(TransientAsciiSEDMetric):
17 | """
18 | Wrapper metric class for GW170817-like kilonovae based on the
19 | TransientAsciiSEDMetric. Defaults are set to those corresponding to similar
20 | detection criteria used in Scolnic et al. 2018 and Setzer et al. 2019.
21 | However, due to the simplified nature of transient distribution for
22 | computing this metric, the criteria have been altered to only include
23 | criteria two and three. The chosen redshift is at the approximate mean
24 | redshift of the detected cosmological redshift distribution shown in
25 | Setzer et al. 2019.
26 |
27 | Parameters
28 | -----------
29 | ascii_file : `str`, optional
30 | The ascii file containing the inputs for the SED. The file must
31 | contain three columns - ['phase', 'wave', 'flux'] -
32 | of phase/epoch (in days), wavelength (Angstroms), and
33 | flux (ergs/s/Angstrom). Default, data provided with sims_maf_contrib.
34 | metric_name : `str`, optional
35 | Name of the metric, can be overwritten by user or child metric.
36 | z : `float`, optional
37 | Cosmological redshift at which to consider observations of the
38 | tranisent SED. Default 0.08.
39 | num_filters : `int`, optional
40 | Number of filters that need to be observed for an object to be
41 | counted as detected. Default 2. (if num_per_lightcurve is 0, then
42 | this will be reset to 0).
43 | filter_time : `float`, optional
44 | The time within which observations in at least num_filters are
45 | required (in days). Default 25.0 days.
46 | num_phases_to_run : `int`, optional
47 | Sets the number of phases that should be checked.
48 | One can imagine pathological cadences where many objects pass the
49 | detection criteria, but would not if the observations were offset
50 | by a phase-shift. Default 5.
51 | """
52 |
53 | def __init__(
54 | self,
55 | ascii_file=(base_path / "../data/DECAMGemini_SED.txt").resolve(),
56 | metric_name="GW170817DetMetric",
57 | z=0.08,
58 | num_filters=2,
59 | filter_time=25.0,
60 | num_phases_to_run=5,
61 | **kwargs,
62 | ):
63 | """"""
64 | super(GW170817DetMetric, self).__init__(
65 | ascii_file=ascii_file,
66 | metric_name=metric_name,
67 | z=z,
68 | num_filters=num_filters,
69 | filter_time=filter_time,
70 | num_phases_to_run=num_phases_to_run,
71 | **kwargs,
72 | )
73 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/intervals_between_obs_metric.py:
--------------------------------------------------------------------------------
1 | # Example for IntervalsBetweenObsMetric
2 | # Somayeh Khakpash - Lehigh University
3 | # Last edited : 10/21/2020
4 | # Calculates statistics (mean or median or standard deviation) of intervals
5 | # between observations during simultaneous windows/Inter-seasonal gap of
6 | # another survey.
7 | # SurveyIntervals is the list of the survey observing window/Inter-seasonal
8 | # gap intervals. It should be in the format:
9 | # SurveyIntervals = [ [YYYY-MM-DD, YYYY-MM-DD] , [YYYY-MM-DD, YYYY-MM-DD] ,
10 | # ... , [YYYY-MM-DD, YYYY-MM-DD] ]
11 | # We are interested in calculating this metric in each of the LSST passbands.
12 | # The difference between this metric and the VisitGapMetric metric is that
13 | # VisitGapMetric calculates reduceFunc of gaps between observations of a
14 | # data_slice throughout the whole
15 | # baseline, but IntervalsBetweenObsMetric calculates the gaps between
16 | # observations during another survey observing window.
17 | # This metric combined with surveys footprint
18 | # overlap can determine how many often another survey footprint is
19 | # observed by LSST during specific time intervals.
20 | __all__ = ("IntervalsBetweenObsMetric",)
21 |
22 | import numpy as np
23 | from astropy.time import Time
24 |
25 | from rubin_sim.maf.metrics import BaseMetric
26 |
27 |
28 | class IntervalsBetweenObsMetric(BaseMetric):
29 | def __init__(
30 | self,
31 | survey_intervals,
32 | stat,
33 | metric_name="IntervalsBetweenObsMetric",
34 | time_col="observationStartMJD",
35 | **kwargs,
36 | ):
37 | self.time_col = time_col
38 | self.metric_name = metric_name
39 | self.survey_intervals = survey_intervals
40 | self.stat = stat
41 | super(IntervalsBetweenObsMetric, self).__init__(col=time_col, metric_name=metric_name, **kwargs)
42 |
43 | def run(self, data_slice, slice_point=None):
44 | data_slice.sort(order=self.time_col)
45 | obs_diff = []
46 |
47 | for interval in self.survey_intervals:
48 | start_interval = Time(interval[0] + " 00:00:00")
49 | end_interval = Time(interval[1] + " 00:00:00")
50 | index = data_slice[self.time_col][
51 | np.where(
52 | (data_slice[self.time_col] > start_interval.mjd)
53 | & (data_slice[self.time_col] < end_interval.mjd)
54 | )[0]
55 | ]
56 | obs_diff_per_interval = np.diff(index)
57 | obs_diff = obs_diff + obs_diff_per_interval.tolist()
58 |
59 | if self.stat == "mean":
60 | result = np.mean(obs_diff)
61 |
62 | elif self.stat == "median":
63 | result = np.median(obs_diff)
64 |
65 | elif self.stat == "std":
66 | result = np.std(obs_diff)
67 |
68 | return result
69 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/lss_metrics.py:
--------------------------------------------------------------------------------
1 | __all__ = ("GalaxyCountsMetric",)
2 |
3 | import healpy as hp
4 | import numpy as np
5 | import scipy
6 |
7 | from rubin_sim.maf.metrics import BaseMetric, ExgalM5
8 |
9 |
10 | class GalaxyCountsMetric(BaseMetric):
11 | """Estimate the number of galaxies expected at a particular (extragalactic)
12 | coadded depth.
13 | """
14 |
15 | def __init__(self, m5_col="fiveSigmaDepth", nside=128, metric_name="GalaxyCounts", **kwargs):
16 | self.m5_col = m5_col
17 | super(GalaxyCountsMetric, self).__init__(col=self.m5_col, metric_name=metric_name, **kwargs)
18 | # Use the extinction corrected coadded depth metric to calculate
19 | # the depth at each point.
20 | self.coaddmetric = ExgalM5(m5_col=self.m5_col)
21 | # Total of 41253.0 galaxies across the sky (at what magnitude?).
22 | # This didn't seem to work quite right for me..
23 | self.scale = 41253.0 / hp.nside2npix(nside) / 5000.0
24 | # Reset units (otherwise uses magnitudes).
25 | self.units = "Galaxy Counts"
26 |
27 | def _gal_count(self, apparent_mag, coaddm5):
28 | # Order for galCount must be apparent mag, then coaddm5,
29 | # for scipy.integrate method.
30 | dn_gal = np.power(10.0, -3.52) * np.power(10.0, 0.34 * apparent_mag)
31 | completeness = 0.5 * scipy.special.erfc(apparent_mag - coaddm5)
32 | return dn_gal * completeness
33 |
34 | def run(self, data_slice, slice_point=None):
35 | # Calculate the coadded depth.
36 | coaddm5 = self.coaddmetric.run(data_slice)
37 | # Calculate the number of galaxies.
38 | # From Carroll et al, 2014 SPIE (http://arxiv.org/abs/1501.04733)
39 | # I'm not entirely certain this gives a properly calibrated number
40 | # of galaxy counts, however it is proportional to the expected number
41 | # at least (and should be within an order of magnitude)
42 | num_gal, int_err = scipy.integrate.quad(self._gal_count, -np.inf, 32, args=coaddm5)
43 | num_gal *= self.scale
44 | return num_gal
45 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/lss_obs_strategy/__init__.py:
--------------------------------------------------------------------------------
1 | from .constants_for_pipeline import *
2 | from .galaxy_counts_metric_extended import *
3 | from .galaxy_counts_with_pixel_calibration import *
4 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/lss_obs_strategy/constants_for_pipeline.py:
--------------------------------------------------------------------------------
1 | # Various things declared here to be imported when running
2 | # artificialStructureCalculation.
3 | # Makes the updates easier, since the constants/objects defined here
4 | # are used by different
5 | # functions, e.g. power law constants are called by
6 | # GalaxyCountsMetric_extended as well as
7 | # GalaxyCounts_withPixelCalibration.
8 | #
9 | # Included here:
10 | # * power law constants from the i-band mock catalog; based on mocks
11 | # from Padilla et al.
12 | # * normalization constant for galaxy counts from the mock catalogs.
13 | #
14 | # Humna Awan
15 | # humna.awan@rutgers.edu
16 |
17 | # Power law constants for each z-bin based on N. D. Padilla et al.'s
18 | # mock catalogs
19 | # General power law form: 10**(a*m+b)
20 | # Declare the dictionary for the power law constants
21 | from collections import OrderedDict
22 |
23 | power_law_const_a = OrderedDict()
24 | power_law_const_b = OrderedDict()
25 | # 0. start_interval.mjd)
39 | & (data_slice[self.time_col] < end_interval.mjd)
40 | )[0]
41 | n__obs = n__obs + np.size(index)
42 |
43 | return n__obs
44 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/periodic_metric.py:
--------------------------------------------------------------------------------
1 | # Example for PeriodicMetric
2 | # Mike Lund - Vanderbilt University
3 | # mike.lund@gmail.com
4 | # Last edited 3/10/2015
5 | # Motivation: The detection of periodic signals can be examined by
6 | # using canonical signals and attempted to recover these.
7 | # However, a more general approach would be to examine the strength in
8 | # signal that is lost as a result of poor phase coverage.
9 | # This metric calculates the spectral window function for a set of
10 | # scheduled observations. The largest peak at a nonzero frequency is
11 | # used as a proxy to quantify how much power is
12 | # lost to other frequencies. Full phase coverage will result in a value of 1.
13 | # We refer to this as the Periodic Purity Function.
14 |
15 | __all__ = ("PeriodicMetric",)
16 |
17 | import numpy as np
18 |
19 | from rubin_sim.maf.metrics import BaseMetric
20 |
21 |
22 | class PeriodicMetric(BaseMetric):
23 | """From a set of observation times, uses code provided by Robert Siverd
24 | (LCOGT) to calculate the spectral window function.
25 | """
26 |
27 | def __init__(self, time_col="expMJD", **kwargs):
28 | self.time_col = time_col
29 | super(PeriodicMetric, self).__init__(col=[self.time_col], **kwargs)
30 |
31 | def run(self, data_slice, slice_point=None):
32 | frq_pts = 30000.0
33 | max_frq = 25.0
34 | times = data_slice[self.time_col]
35 | times = times - times[0] # change times to smaller numbers
36 | use_jd = np.array(times)
37 | window_frq = np.arange(frq_pts) * max_frq / frq_pts
38 | window_val = np.zeros_like(window_frq, dtype="float")
39 | for x, frq in enumerate(window_frq):
40 | window_val[x] = np.sum(np.cos(-2.0 * np.pi * frq * use_jd))
41 | window_val /= np.float(use_jd.size)
42 | secondpeak = np.sort(window_val)[-2]
43 | totalsum = (np.sum(window_val) - np.sort(window_val)[-1]) / (frq_pts - 1)
44 | data = np.asarray([secondpeak, totalsum])
45 | return data
46 |
47 | def reduce_peak(self, data):
48 | return 1.0 - data[0]
49 |
50 | def reduce_sum(self, data):
51 | return data[1]
52 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_count_mass_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("StarCountMassMetric",)
2 |
3 | import numpy as np
4 |
5 | from rubin_sim.maf.metrics import BaseMetric
6 |
7 | from .star_counts import starcount_bymass
8 |
9 | # Example for CountMassMetric
10 | # Mike Lund - Vanderbilt University
11 | # mike.lund@gmail.com
12 | # Last edited 8/15/2015
13 | # Motivation: The distances to stars in LSST will be significant enough that
14 | # the structure of the Galaxy will be readily apparent because of its
15 | # influence on the number of stars in a given field.
16 | # Any metric concerned with the number of potential objects to be detected
17 | # will need to feature not only the effects of the cadence but also the
18 | # number of objects per field.
19 | # This metric identifies the number of stars in a given field in a particular
20 | # mass range that will be fainter than the saturation limit of 16th magnitude
21 | # and still bright enough to have noise less than 0.03 mag.
22 | # M1 and M2 are the low and high limits of the mass range in solar masses.
23 | # 'band' is the band for the observations to be made in.
24 | # Requires StarCounts.StarCounts
25 |
26 | # NOTE
27 | # There are stellar luminosity function maps available within MAF
28 | # that may supersede these StarCount functions
29 |
30 |
31 | class StarCountMassMetric(BaseMetric):
32 | """Find the number of stars in a given field in the mass range
33 | fainter than magnitude 16 and bright enough to have noise less than
34 | 0.03 in a given band.
35 | M1 and m2 are the upper and lower limits of the mass range.
36 | 'band' is the band to be observed.
37 |
38 | This metric uses the stellar distance and luminosity equations
39 | contributed by Mike Lund, which are based on the Galfast model.
40 | There are some imposed limitations on the expected magnitudes
41 | of the stars included for the metric, based on assuming saturation
42 | at 16th magnitude and not considering stars with magnitude
43 | uncertainties greater than 0.03 (based on photometry/m5 alone).
44 |
45 | Parameters
46 | ----------
47 | m1 : `float`
48 | Lower limit of the mass range.
49 | m2 : `float`
50 | Upper limit of the mass range.
51 | band : `str`
52 | Bandpass to consider.
53 | """
54 |
55 | def __init__(self, m1=0.9, m2=1.0, band="i", **kwargs):
56 | self.m1 = m1
57 | self.m2 = m2
58 | self.band = band
59 | super(StarCountMassMetric, self).__init__(col=[], **kwargs)
60 |
61 | def run(self, data_slice, slice_point=None):
62 | self.dec_col = np.degrees(data_slice[0][3])
63 | self.ra_col = np.degrees(data_slice[0][2])
64 | return starcount_bymass.starcount_bymass(self.ra_col, self.dec_col, self.m1, self.m2, self.band)
65 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_count_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("StarCountMetric",)
2 |
3 | import numpy as np
4 |
5 | from rubin_sim.maf.metrics import BaseMetric
6 |
7 | from .star_counts import starcount
8 |
9 | # Example for CountMetric
10 | # Mike Lund - Vanderbilt University
11 | # mike.lund@gmail.com
12 | # Last edited 8/15/2015
13 | # Motivation: The distances to stars in LSST will be significant enough
14 | # that the structure of the Galaxy will be readily apparent because of
15 | # its influence on the number of stars in a given field.
16 | # Any metric concerned with the number of potential objects to be
17 | # detected will need to feature not only the effects of the cadence
18 | # but also the number of objects per field.
19 | # This metric identifies the number of stars in a given field in a
20 | # particular distance range.
21 | # D1 and D2 are the close and far distances in parsecs.
22 | # Requires StarCounts.StarCounts
23 |
24 | # NOTE
25 | # There are stellar luminosity function maps available within MAF
26 | # that may supersede these StarCount functions
27 |
28 |
29 | class StarCountMetric(BaseMetric):
30 | """Find the number of stars in a given field between d1 and d2 in parsecs.
31 |
32 | This metric uses the stellar distance and luminosity equations
33 | contributed by Mike Lund, which are based on the Galfast model.
34 | There are some imposed limitations on the expected magnitudes
35 | of the stars included for the metric, based on assuming saturation
36 | at 16th magnitude and not considering stars with magnitude
37 | uncertainties greater than 0.03 (based on photometry/m5 alone).
38 |
39 |
40 | Parameters
41 | ----------
42 | d1 : `float`
43 | d1 in parsecs
44 | d2 : `float`
45 | d2 in parsecs
46 | """
47 |
48 | def __init__(self, d1=100, d2=1000, **kwargs):
49 | self.d1 = d1
50 | self.d2 = d2
51 | super(StarCountMetric, self).__init__(col=[], **kwargs)
52 |
53 | def run(self, data_slice, slice_point=None):
54 | self.dec_col = np.degrees(data_slice[0][3])
55 | self.ra_col = np.degrees(data_slice[0][2])
56 | return starcount(self.ra_col, self.dec_col, self.d1, self.d2)
57 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_counts/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = (
2 | "abs_mag",
3 | "coords",
4 | "spec_type",
5 | "starcount",
6 | "starcount_bymass",
7 | "stellardensity",
8 | )
9 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_counts/readme.txt:
--------------------------------------------------------------------------------
1 | The codes included in StarCounts.StarCounts are required for use with the field star count metrics (CountMassMetric and CountMetric)
2 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_counts/spec_type.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # Mike Lund - Vanderbilt University
4 | # mike.lund@gmail.com
5 | # Last edited 8/15/2015
6 | # Description: Calculates spectral types for stars on the main sequence as
7 | # a function of stellar mass. For use with Field Star Count metric
8 | import sys
9 |
10 | import numpy as np
11 | from scipy.interpolate import interp1d
12 |
13 |
14 | def extrap1d(interpolator):
15 | xs = interpolator.x
16 | ys = interpolator.y
17 |
18 | def pointwise(x):
19 | if x < xs[0]:
20 | return ys[0]
21 | elif x > xs[-1]:
22 | return ys[-1]
23 | else:
24 | return interpolator(x)
25 |
26 | def ufunclike(xs):
27 | return np.array(list(map(pointwise, np.array(xs))))
28 |
29 | return ufunclike
30 |
31 |
32 | def spec_type(mass):
33 | mass_range = [
34 | 0.06,
35 | 0.21,
36 | 0.40,
37 | 0.51,
38 | 0.67,
39 | 0.79,
40 | 0.92,
41 | 1.05,
42 | 1.4,
43 | 1.6,
44 | 2.0,
45 | 2.9,
46 | 3.8,
47 | 5.9,
48 | 7.6,
49 | 17.5,
50 | 23,
51 | 37,
52 | 60,
53 | 120,
54 | ]
55 | spec_range = [
56 | 68,
57 | 65,
58 | 62,
59 | 60,
60 | 55,
61 | 50,
62 | 45,
63 | 40,
64 | 35,
65 | 30,
66 | 25,
67 | 20,
68 | 18,
69 | 15,
70 | 13,
71 | 10,
72 | 8,
73 | 6,
74 | 5,
75 | 3,
76 | ]
77 | f = interp1d(mass_range, spec_range)
78 | f2 = extrap1d(f)
79 | # f2=interp1d(mass_range, spec_range, kind='slinear')
80 | if mass == 0:
81 | xnew = np.logspace(-2, 3, 100)
82 | import matplotlib.pyplot as plt
83 |
84 | plt.plot(mass_range, spec_range, "o", xnew, f2(xnew), "-") # , xnew, f2(xnew),'--')
85 | plt.xscale("log")
86 | plt.show()
87 | return f2([mass])
88 |
89 |
90 | if __name__ == "__main__":
91 | print(spec_type(float(sys.argv[1]))) # mass
92 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_counts/starcount.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # Mike Lund - Vanderbilt University
4 | # mike.lund@gmail.com
5 | # Last edited 8/15/2015
6 | # Description: Calculates the number of stars in a given direction and
7 | # between a given set of distances. For use with Field Star Count metric
8 |
9 | import numpy as np
10 |
11 | from . import coords, stellardensity
12 |
13 | skyarea = 41253.0
14 | distancebins = 51
15 |
16 |
17 | def star_vols(d1, d2, area):
18 | distance_edges = (np.linspace((d1**3.0), (d2**3.0), num=distancebins)) ** (1.0 / 3)
19 | volumeshell = (area / skyarea) * (4.0 * np.pi / 3) * (distance_edges[1:] ** 3 - distance_edges[:-1] ** 3)
20 | distances = ((distance_edges[1:] ** 3 + distance_edges[:-1] ** 3) / 2.0) ** (1.0 / 3)
21 | return volumeshell, distances
22 |
23 |
24 | def starcount(eq_ra, eq_dec, d1, d2):
25 | volumes, distances = star_vols(d1, d2, 9.62)
26 | b_deg, l_deg = coords.eq_gal3(eq_ra, eq_dec)
27 | positions = [coords.gal_cyn(b_deg, l_deg, x) for x in distances]
28 | densities = [stellardensity.stellardensity(x[0], x[2]) for x in positions]
29 | totalcount = np.sum(np.asarray(volumes) * np.asarray(densities))
30 | return totalcount
31 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_counts/starcount_bymass.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # Mike Lund - Vanderbilt University
4 | # mike.lund@gmail.com
5 | # Last edited 8/15/2015
6 | # Description: Takes a given set of galactic coordinates and a stellar
7 | # mass range, then calculates the number of stars within that range
8 | # that will be fainter than mag 16, and have sufficiently low noise
9 | # in the given band. For use with Field Star Count metric
10 |
11 |
12 | import numpy as np
13 | from scipy.optimize import newton
14 |
15 | from . import abs_mag, spec_type
16 | from .starcount import starcount
17 |
18 | xi = 1.0
19 | alpha = 2.35
20 |
21 |
22 | def IMF(lower, upper):
23 | exp = alpha - 1.0
24 | part1 = xi / exp
25 | part2 = lower**-exp - upper**-exp
26 | part3 = part1 * part2
27 | return part3
28 |
29 |
30 | def get_distance(apparent, absolute):
31 | part1 = (apparent - absolute) / 5.0
32 | part2 = 10 * 10**part1
33 | return part2
34 |
35 |
36 | def noise_opt(m, band, sigma):
37 | part1 = mag_error(m, band)
38 | part2 = part1**2.0
39 | part3 = sigma**2.0
40 | total = part2 - part3
41 | return total
42 |
43 |
44 | def mag_error(ap_mag, band, calcm5=0): # apparent magnitude and band
45 | gamma = {"u": 0.037, "g": 0.038, "r": 0.039, "i": 0.039, "z": 0.040, "y": 0.040}
46 | m5 = {"u": 23.9, "g": 25.0, "r": 24.7, "i": 24.0, "z": 23.3, "y": 22.1}
47 | if calcm5 == 0:
48 | calcm5 = m5[band]
49 | X = 10.0 ** (0.4 * (ap_mag - calcm5))
50 | random_2 = np.sqrt((0.04 - gamma[band]) * X + gamma[band] * X * X)
51 | error_2 = random_2**2.0 + (0.005) ** 2.0
52 | mag_error = np.sqrt(error_2)
53 | return mag_error
54 |
55 |
56 | def noise_calc(band):
57 | gamma = {"u": 0.037, "g": 0.038, "r": 0.039, "i": 0.039, "z": 0.040, "y": 0.040}
58 | m5 = {"u": 23.9, "g": 25.0, "r": 24.7, "i": 24.0, "z": 23.3, "y": 22.1}
59 | sigma = 0.03
60 | sigma_sys = 0.005
61 |
62 | def fun(x):
63 | sigma_sys**2
64 | +(0.04 - gamma[band]) * 10 ** (0.4 * (x - m5[band]))
65 | +gamma[band] * 10 ** (0.8 * (x - m5[band]))
66 | -(sigma**2)
67 |
68 | return newton(fun, 25)
69 |
70 |
71 | def dist_calc(mass, band):
72 | # mass to spectral type
73 | # spectral type to absolute mag
74 | bands = ["z", "y", "i", "r", "g", "u"]
75 | output = abs_mag.abs_mag(spec_type.spec_type(mass))[0]
76 | indexvalue = bands.index(band)
77 | absolutemag = output[indexvalue][0]
78 | apparent = noise_calc(band)
79 | dist_min = get_distance(16, absolutemag)
80 | dist_max = get_distance(apparent, absolutemag)
81 | return dist_min, dist_max
82 | # abs mag to apparent mag ranges, > 16, noise dependent upper limit
83 |
84 |
85 | def starcount_bymass(eq_ra, eq_dec, m1, m2, band):
86 | masses = np.linspace(m1, m2, num=20)
87 | totmass = IMF(m1, m2)
88 | totmass = IMF(0.2, 1.04)
89 | massbins = IMF(masses[:-1], masses[1:])
90 | massfractions = massbins / totmass
91 | distances = [dist_calc(x, band) for x in masses[:-1]]
92 | starcounts = [y * starcount(eq_ra, eq_dec, x[0], x[1]) for x, y in zip(distances, massfractions)]
93 | return sum(starcounts)
94 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maf_contrib/star_counts/stellardensity.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # Mike Lund - Vanderbilt University
4 | # mike.lund@gmail.com
5 | # Last edited 8/15/2015
6 | # Description: Calculates the stellar density based off of
7 | # Juric et al 2008 and Jackson et al 2002. For use with Field Star Count metric
8 |
9 | import numpy as np
10 |
11 | zsun = 25.0
12 | rsun = 8000.0
13 | density_rsun = 0.0364
14 | f = 0.1
15 |
16 |
17 | def diskprofile(R, Z, L, H):
18 | part1 = (-R / L) - (abs(Z + zsun) / H)
19 | part2 = np.exp(part1)
20 | part3 = np.exp(rsun / L)
21 | tot = density_rsun * part2 * part3
22 | return tot
23 |
24 |
25 | def thindisk(R, Z):
26 | return diskprofile(R, Z, 2150.0, 245.0)
27 |
28 |
29 | def thickdisk(R, Z):
30 | return diskprofile(R, Z, 3261.0, 743.0)
31 |
32 |
33 | def bulge(R, Z):
34 | factor = 2 * (diskprofile(0, 0, 2150.0, 245.0) + diskprofile(0, 0, 3261.0, 743.0))
35 | distance = (R**2 + Z**2) ** 0.5
36 | expfunc = np.exp(-distance / 800)
37 | return factor * expfunc
38 |
39 |
40 | def halo(R, Z):
41 | q_h = 0.64
42 | n_h = 2.77
43 | f_h = 0.001
44 | part1 = R**2.0 + (Z / q_h) ** 2.0
45 | part2 = rsun / np.power(part1, 0.5)
46 | part3 = np.power(part2, n_h)
47 | tot = density_rsun * f_h * part3
48 | return tot
49 |
50 |
51 | def stellardensity(R, Z, rho=0):
52 | part1 = thindisk(R, Z)
53 | part2 = thickdisk(R, Z)
54 | tot_density = part1 / 1.1 + f / 1.1 * part2 + halo(R, Z) + bulge(R, Z)
55 | return tot_density
56 |
57 |
58 | # Juric et al 2008
59 | # Jackson et al 2002
60 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/__init__.py:
--------------------------------------------------------------------------------
1 | from .base_map import *
2 | from .dust_map import *
3 | from .dust_map_3d import *
4 | from .ebv_3d_hp import *
5 | from .gal_coords_map import *
6 | from .galactic_plane_priority_maps import *
7 | from .stellar_density_map import *
8 | from .trilegal_map import *
9 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/base_map.py:
--------------------------------------------------------------------------------
1 | __all__ = ("MapsRegistry", "BaseMap")
2 |
3 | import inspect
4 |
5 |
6 | class MapsRegistry(type):
7 | """
8 | Meta class for Maps, to build a registry of maps classes.
9 | """
10 |
11 | def __init__(cls, name, bases, dict):
12 | super(MapsRegistry, cls).__init__(name, bases, dict)
13 | if not hasattr(cls, "registry"):
14 | cls.registry = {}
15 | modname = inspect.getmodule(cls).__name__
16 | if modname.startswith("rubin_sim.maf.maps"):
17 | modname = ""
18 | else:
19 | if len(modname.split(".")) > 1:
20 | modname = ".".join(modname.split(".")[:-1]) + "."
21 | else:
22 | modname = modname + "."
23 | mapsname = modname + name
24 | if mapsname in cls.registry:
25 | raise Exception("Redefining maps %s! (there are >1 maps with the same name)" % (mapsname))
26 | if mapsname != "BaseMaps":
27 | cls.registry[mapsname] = cls
28 |
29 | def get_class(cls, mapsname):
30 | return cls.registry[mapsname]
31 |
32 | def help(cls, doc=False):
33 | for mapsname in sorted(cls.registry):
34 | if not doc:
35 | print(mapsname)
36 | if doc:
37 | print("---- ", mapsname, " ----")
38 | print(cls.registry[mapsname].__doc__)
39 | maps = cls.registry[mapsname]()
40 | print(" added to slice_point: ", ",".join(maps.keynames))
41 |
42 |
43 | class BaseMap(metaclass=MapsRegistry):
44 | """Base for maps classes."""
45 |
46 | def __init__(self, **kwargs):
47 | self.keynames = ["newkey"]
48 |
49 | def __eq__(self, othermap):
50 | return self.keynames == othermap.keynames
51 |
52 | def __ne__(self, othermap):
53 | return self.keynames != othermap.keynames
54 |
55 | def __lt__(self, othermap):
56 | return self.keynames < othermap.keynames
57 |
58 | def __gt__(self, othermap):
59 | return self.keynames > othermap.keynames
60 |
61 | def __le__(self, othermap):
62 | return self.keynames <= othermap.keynames
63 |
64 | def __ge__(self, othermap):
65 | return self.keynames >= othermap.keynames
66 |
67 | def run(self, slice_points):
68 | """Given slice_points (dict containing metadata about each slice_point,
69 | including ra/dec), adds additional metadata at each slice_point
70 | and returns updated dict.
71 | """
72 | raise NotImplementedError("This must be defined in subclass")
73 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/dust_map.py:
--------------------------------------------------------------------------------
1 | __all__ = ("DustMap",)
2 |
3 | import warnings
4 |
5 | from rubin_sim.maf.maps import BaseMap
6 |
7 | from .ebv_hp import eb_vhp
8 |
9 |
10 | class DustMap(BaseMap):
11 | """Add the E(B-V) values to the slice points.
12 |
13 | Primarily, this calls eb_vhp to read a healpix map of E(B-V) values over
14 | the sky, then assigns ebv values to each slice_point.
15 | If the slicer is a healpix slicer, this is trivial.
16 | Otherwise, it either uses the nearest healpix grid point or interpolates.
17 |
18 | The key added to the slice points is `ebv`.
19 |
20 | Parameters
21 | ----------
22 | interp : `bool`, opt
23 | Interpolate the dust map at each slice_point (True)
24 | or just use the nearest value (False).
25 | Default is False.
26 | nside : `int`, opt
27 | Default nside value to read the dust map from disk.
28 | Primarily useful if the slicer is not a healpix slicer.
29 | Default 128.
30 | map_path : `str`, opt
31 | Define a path to the directory holding the dust map files.
32 | Default None, which uses RUBIN_SIM_DATA_DIR.
33 | """
34 |
35 | def __init__(self, interp=False, nside=128, map_path=None):
36 | self.keynames = ["ebv"]
37 | self.interp = interp
38 | self.nside = nside
39 | self.map_path = map_path
40 |
41 | def run(self, slice_points):
42 | # If the slicer has nside,
43 | # it's a healpix slicer so we can read the map directly
44 | if "nside" in slice_points:
45 | if slice_points["nside"] != self.nside:
46 | warnings.warn(
47 | f"Slicer value of nside {slice_points['nside']} different "
48 | f"from map value {self.nside}, using slicer value"
49 | )
50 | slice_points["ebv"] = eb_vhp(
51 | slice_points["nside"],
52 | pixels=slice_points["sid"],
53 | map_path=self.map_path,
54 | )
55 | # Not a healpix slicer,
56 | # look up values based on RA,dec with possible interpolation
57 | else:
58 | slice_points["ebv"] = eb_vhp(
59 | self.nside,
60 | ra=slice_points["ra"],
61 | dec=slice_points["dec"],
62 | interp=self.interp,
63 | map_path=self.map_path,
64 | )
65 |
66 | return slice_points
67 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/ebv_hp.py:
--------------------------------------------------------------------------------
1 | __all__ = ("eb_vhp",)
2 |
3 | import os
4 |
5 | import healpy as hp
6 | import numpy as np
7 | from rubin_scheduler.data import get_data_dir
8 |
9 | from rubin_sim.maf.utils import radec2pix
10 |
11 |
12 | def eb_vhp(nside, ra=None, dec=None, pixels=None, interp=False, map_path=None):
13 | """Read in a healpix dust map and return values for given RA, Dec values.
14 |
15 | This is primarily a tool for the rubin_sim.maf.DustMap class.
16 |
17 | nside : `int`
18 | Healpixel resolution (2^x).
19 | ra : `np.ndarray` or `float`, opt
20 | RA (can take numpy array).
21 | Default None sets up healpix array of nside. Radians.
22 | dec : `np.ndarray` or `float`, opt
23 | Dec (can take numpy array).
24 | Default None set up healpix array of nside. Radians.
25 | pixels : `np.ndarray`, opt
26 | Healpixel IDs, to sub-select particular healpix points.
27 | Default uses all points.
28 | NOTE - to use a healpix map, set pixels and not ra/dec.
29 | interp : `bool`, opt
30 | Should returned values be interpolated (True)
31 | or just nearest neighbor (False)
32 | map_path : `str`, opt
33 | Path to directory containing dust map files.
34 | """
35 |
36 | if (ra is None) & (dec is None) & (pixels is None):
37 | raise RuntimeError("Need to set ra,dec or pixels.")
38 |
39 | # Load the map
40 | if map_path is not None:
41 | ebv_data_dir = map_path
42 | else:
43 | ebv_data_dir = os.path.join(get_data_dir(), "maps", "DustMaps")
44 | if not hasattr(eb_vhp, "nside"):
45 | eb_vhp.nside = nside
46 |
47 | if (not hasattr(eb_vhp, "dustmap")) | (eb_vhp.nside != nside):
48 | eb_vhp.nside = nside
49 | filename = "dust_nside_%i.npz" % eb_vhp.nside
50 | eb_vhp.dustMap = np.load(os.path.join(ebv_data_dir, filename))["ebvMap"]
51 |
52 | # If we are interpolating to arbitrary positions
53 | if interp:
54 | result = hp.get_interp_val(eb_vhp.dustMap, np.pi / 2.0 - dec, ra)
55 | else:
56 | # If we know the pixel indices we want
57 | if pixels is not None:
58 | result = eb_vhp.dustMap[pixels]
59 | # Look up
60 | else:
61 | pixels = radec2pix(eb_vhp.nside, ra, dec)
62 | result = eb_vhp.dustMap[pixels]
63 |
64 | return result
65 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/gal_coords_map.py:
--------------------------------------------------------------------------------
1 | __all__ = ("GalCoordsMap",)
2 |
3 | from astropy import units as u
4 | from astropy.coordinates import SkyCoord
5 |
6 | from rubin_sim.maf.maps import BaseMap
7 |
8 |
9 | class GalCoordsMap(BaseMap):
10 | """Add `gall` and `galb` (in radians) to the slice point dictionaries."""
11 |
12 | def __init__(self):
13 | self.keynames = ["gall", "galb"]
14 |
15 | def run(self, slice_points):
16 | coords = SkyCoord(ra=slice_points["ra"] * u.rad, dec=slice_points["dec"] * u.rad)
17 | gal = coords.galactic
18 | gall = gal.l.rad
19 | galb = gal.b.rad
20 | slice_points["gall"] = gall
21 | slice_points["galb"] = galb
22 | return slice_points
23 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/stellar_density_map.py:
--------------------------------------------------------------------------------
1 | __all__ = ("StellarDensityMap",)
2 |
3 | import os
4 |
5 | import healpy as hp
6 | import numpy as np
7 | from rubin_scheduler.data import get_data_dir
8 |
9 | from rubin_sim.maf.utils import radec2pix
10 |
11 | from . import BaseMap
12 |
13 |
14 | class StellarDensityMap(BaseMap):
15 | """Read and hold the cumulative stellar luminosity function for
16 | each slice point.
17 |
18 | The underlying stellar luminosity function map is nside = 64, and contains
19 | stars per sq degree at a series of magnitudes (the map contains
20 | `starLumFunc_` and `starMapBins_`).
21 | For slice points which do not match nside=64, the map uses the nearest
22 | healpix point on the nside=64 grid.
23 |
24 | The stellar luminosity function comes from the GalFast model.
25 |
26 | Parameters
27 | ----------
28 | startype : `str` ('allstars', 'wdstars')
29 | Load the luminosity function for all stars ('allstars'),
30 | which includes main-sequence stars
31 | white dwarfs, blue horozontal branch, RR Lyrae, and Cepheids.
32 | The 'wdstars' option only includes white dwarf stars.
33 | filtername : `str`
34 | Filter to use. Options of u,g,r,i,z,y
35 | """
36 |
37 | def __init__(self, startype="allstars", filtername="r", map_dir=None):
38 | if map_dir is not None:
39 | self.map_dir = map_dir
40 | else:
41 | self.map_dir = os.path.join(get_data_dir(), "maps", "StarMaps")
42 | self.filtername = filtername
43 | self.keynames = [
44 | f"starLumFunc_{self.filtername}",
45 | f"starMapBins_{self.filtername}",
46 | ]
47 | if startype == "allstars":
48 | self.startype = ""
49 | else:
50 | self.startype = startype + "_"
51 |
52 | def _read_map(self):
53 | filename = "starDensity_%s_%snside_64.npz" % (self.filtername, self.startype)
54 | star_map = np.load(os.path.join(self.map_dir, filename))
55 | self.star_map = star_map["starDensity"].copy()
56 | self.star_map_bins = star_map["bins"].copy()
57 | self.starmap_nside = hp.npix2nside(np.size(self.star_map[:, 0]))
58 |
59 | def run(self, slice_points):
60 | self._read_map()
61 |
62 | nside_match = False
63 | if "nside" in slice_points:
64 | if slice_points["nside"] == self.starmap_nside:
65 | slice_points[f"starLumFunc_{self.filtername}"] = self.star_map
66 | nside_match = True
67 | if not nside_match:
68 | # Compute the healpix for each slice_point on the nside=64 grid
69 | indx = radec2pix(self.starmap_nside, slice_points["ra"], slice_points["dec"])
70 | slice_points[f"starLumFunc_{self.filtername}"] = self.star_map[indx, :]
71 |
72 | slice_points[f"starMapBins_{self.filtername}"] = self.star_map_bins
73 | return slice_points
74 |
--------------------------------------------------------------------------------
/rubin_sim/maf/maps/trilegal_map.py:
--------------------------------------------------------------------------------
1 | __all__ = ("TrilegalDensityMap",)
2 |
3 | import os
4 |
5 | import healpy as hp
6 | import numpy as np
7 | from astropy import units as u
8 | from astropy.coordinates import SkyCoord
9 | from rubin_scheduler.data import get_data_dir
10 | from rubin_scheduler.utils import _build_tree, _hpid2_ra_dec, _xyz_from_ra_dec
11 |
12 | from . import BaseMap
13 |
14 |
15 | class TrilegalDensityMap(BaseMap):
16 | """Read and hold the cumulative stellar luminosity function for
17 | each slice point.
18 |
19 | The stellar luminosity function comes from the TRILEGAL model.
20 |
21 | Parameters
22 | ----------
23 | filtername : `str`, opt
24 | Filter to use. Options of u,g,r,i,z,y. Default r.
25 | nside : `int`, opt
26 | The HEALpix nside (can be 64 or 128). Default 64.
27 | ext : `bool`, opt
28 | Use the full sky maps. Default True.
29 |
30 | Notes
31 | -----
32 | The underlying stellar luminosity function map is available in a
33 | variety of nsides, and contains
34 | stars per sq degree at a series of magnitudes (the map contains
35 | `starLumFunc_` and `starMapBins_`).
36 | For slice points which do not match one of the native nside options,
37 | the map uses the nearest healpix point on the specified nside grid.
38 | """
39 |
40 | def __init__(self, filtername="r", nside=64, ext=True):
41 | self.map_dir = os.path.join(get_data_dir(), "maps", "TriMaps")
42 | self.filtername = filtername
43 | self.keynames = [
44 | f"starLumFunc_{self.filtername}",
45 | f"starMapBins_{self.filtername}",
46 | ]
47 | self.nside = nside
48 | self.ext = ext
49 |
50 | def _read_map(self):
51 | if self.ext:
52 | filename = "TRIstarDensity_%s_nside_%i_ext.npz" % (
53 | self.filtername,
54 | self.nside,
55 | )
56 | else:
57 | filename = "TRIstarDensity_%s_nside_%i.npz" % (self.filtername, self.nside)
58 | star_map = np.load(os.path.join(self.map_dir, filename))
59 | self.star_map = star_map["starDensity"].copy()
60 | self.star_map_bins = star_map["bins"].copy()
61 | self.starmap_nside = hp.npix2nside(np.size(self.star_map[:, 0]))
62 | # note, the trilegal maps are in galactic coordinates
63 | # and use nested healpix.
64 | gal_l, gal_b = _hpid2_ra_dec(self.nside, np.arange(hp.nside2npix(self.nside)), nest=True)
65 |
66 | # Convert that to RA,dec. Then do nearest neighbor lookup.
67 | c = SkyCoord(l=gal_l * u.rad, b=gal_b * u.rad, frame="galactic").transform_to("icrs")
68 | ra = c.ra.rad
69 | dec = c.dec.rad
70 |
71 | self.tree = _build_tree(ra, dec)
72 |
73 | def run(self, slice_points):
74 | self._read_map()
75 |
76 | x, y, z = _xyz_from_ra_dec(slice_points["ra"], slice_points["dec"])
77 |
78 | dist, indices = self.tree.query(list(zip(x, y, z)))
79 |
80 | slice_points["starLumFunc_%s" % self.filtername] = self.star_map[indices, :]
81 | slice_points["starMapBins_%s" % self.filtername] = self.star_map_bins
82 | return slice_points
83 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metadata_dir.py:
--------------------------------------------------------------------------------
1 | from . import batches as batches
2 | from .db import ResultsDb
3 | from .metric_bundles import MetricBundleGroup
4 | from .slicers import HealpixSlicer, make_wfd_subset_slicer
5 |
6 | __all__ = ("metadata_dir",)
7 |
8 | import argparse
9 | import glob
10 | import os
11 | import shutil
12 |
13 | import matplotlib
14 |
15 | matplotlib.use("Agg")
16 |
17 |
18 | def metadata_dir():
19 | """
20 | Run the metadata batch on all .db files in a directory.
21 | """
22 | parser = argparse.ArgumentParser()
23 | parser.add_argument("--db", type=str, default=None)
24 | parser.add_argument(
25 | "--nside",
26 | type=float,
27 | default=64,
28 | help="nside to use for the healpix slicer and subsetslicer for metrics.",
29 | )
30 | parser.add_argument(
31 | "--wfd_threshold",
32 | type=float,
33 | default=750,
34 | help="Threshold number of visits per pointing to use to define the WFD footprint."
35 | "Default value of 750 corresponds to the minimum median value per pointing from the SRD.",
36 | )
37 | parser.add_argument(
38 | "--no_clobber",
39 | dest="no_clobber",
40 | action="store_true",
41 | default=False,
42 | help="Do not remove existing directory outputs",
43 | )
44 | args = parser.parse_args()
45 |
46 | # If runNames not given, scan for sim_name databases in current
47 | # directory and use those
48 | # Note that 'runNames' can be full path to directories
49 |
50 | if args.db is None:
51 | # Just look for any .db files in this directory
52 | db_files = glob.glob("*.db")
53 | # But remove trackingDb and results_db if they're there
54 | try:
55 | db_files.remove("trackingDb_sqlite.db")
56 | except ValueError:
57 | pass
58 | try:
59 | db_files.remove("resultsDb_sqlite.db")
60 | except ValueError:
61 | pass
62 | elif isinstance(args.db, str):
63 | db_files = [args.db]
64 | else:
65 | db_files = args.db
66 |
67 | sim_names = [os.path.basename(name).replace(".db", "") for name in db_files]
68 |
69 | for filename, sim_name in zip(db_files, sim_names):
70 | # Connect to the database
71 | colmap = batches.col_map_dict()
72 |
73 | # Set and create if needed the output directory
74 | out_dir = sim_name + "_meta"
75 | if not args.no_clobber:
76 | if os.path.isdir(out_dir):
77 | shutil.rmtree(out_dir)
78 |
79 | # Find the 'wfd' footprint - use the scheduler footprint.
80 | allsky_slicer = HealpixSlicer(nside=args.nside)
81 | wfd_slicer = make_wfd_subset_slicer(nside=args.nside)
82 |
83 | bdict = batches.info_bundle_dicts(allsky_slicer, wfd_slicer, sim_name, colmap)
84 |
85 | # Set up the resultsDB
86 | results_db = ResultsDb(out_dir=out_dir)
87 | # Go and run it
88 | group = MetricBundleGroup(bdict, filename, out_dir=out_dir, results_db=results_db, save_early=False)
89 | group.run_all(clear_memory=True, plot_now=True)
90 | results_db.close()
91 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metric_bundles/__init__.py:
--------------------------------------------------------------------------------
1 | from .metric_bundle import *
2 | from .metric_bundle_group import *
3 | from .mo_metric_bundle import *
4 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/__init__.py:
--------------------------------------------------------------------------------
1 | from .agn_time_lag_metric import *
2 | from .agnstructure import *
3 | from .area_summary_metrics import *
4 | from .base_metric import *
5 | from .brown_dwarf_metric import *
6 | from .cadence_metrics import *
7 | from .calibration_metrics import *
8 | from .chip_vendor_metric import *
9 | from .color_slope_metrics import *
10 | from .coverage_metric import *
11 | from .crowding_metric import *
12 | from .cumulative_metric import *
13 | from .dcr_metric import *
14 | from .exgal_m5 import *
15 | from .fft_metric import *
16 | from .galactic_plane_metrics import *
17 | from .galplane_time_sampling_metrics import *
18 | from .hourglass_metric import *
19 | from .incremental_template_metric import *
20 | from .kuiper_metrics import *
21 | from .mo_metrics import *
22 | from .mo_summary_metrics import *
23 | from .night_pointing_metric import *
24 | from .optimal_m5_metric import *
25 | from .pair_metric import *
26 | from .periodic_detect_metric import *
27 | from .phase_gap_metric import *
28 | from .qso_number_counts_metric import *
29 | from .scaling_metrics import *
30 | from .schedview_metrics import *
31 | from .season_metrics import *
32 | from .simple_metrics import *
33 | from .sky_sat_metric import *
34 | from .sn_cadence_metric import *
35 | from .sn_n_sn_metric import *
36 | from .sn_sl_metric import *
37 | from .sn_snr_metric import *
38 | from .snr_weight import *
39 | from .star_density import *
40 | from .string_count_metric import *
41 | from .summary_metrics import *
42 | from .surfb_metric import *
43 | from .technical_metrics import *
44 | from .tgaps import *
45 | from .transient_metrics import *
46 | from .use_metrics import *
47 | from .vector_metrics import *
48 | from .visit_groups_metric import *
49 | from .weak_lensing_systematics_metric import *
50 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/chip_vendor_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("ChipVendorMetric",)
2 |
3 | import numpy as np
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class ChipVendorMetric(BaseMetric):
9 | """
10 | See what happens if we have chips from different vendors
11 | """
12 |
13 | def __init__(self, cols=None, **kwargs):
14 | if cols is None:
15 | cols = []
16 | super(ChipVendorMetric, self).__init__(
17 | col=cols, metric_dtype=float, units="1,2,3:v1,v2,both", **kwargs
18 | )
19 |
20 | def _chip_names2vendor_id(self, chip_name):
21 | """
22 | given a list of chipnames, convert to 1 or 2, representing
23 | different vendors
24 | """
25 | vendors = []
26 | for chip in chip_name:
27 | # Parse the chip_name string.
28 | if int(chip[2]) % 2 == 0:
29 | vendors.append(1)
30 | else:
31 | vendors.append(2)
32 | return vendors
33 |
34 | def run(self, data_slice, slice_point=None):
35 | if "chipNames" not in list(slice_point.keys()):
36 | raise ValueError("No chipname info, need to set use_camera=True with a spatial slicer.")
37 |
38 | uvendor_i_ds = np.unique(self._chip_names2vendor_id(slice_point["chipNames"]))
39 | if np.size(uvendor_i_ds) == 1:
40 | result = uvendor_i_ds
41 | else:
42 | result = 3
43 | return result
44 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/coverage_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("YearCoverageMetric",)
2 |
3 | import numpy as np
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class YearCoverageMetric(BaseMetric):
9 | """Count the number of `bins` covered by night_col.
10 |
11 | The default `bins` cover years 0 to 10.
12 | Handy for checking that a point on the sky gets observed every year,
13 | as the default settings result in the metric returning the number years
14 | in the data_slice (when used with a HealpixSlicer).
15 |
16 | Parameters
17 | ----------
18 | night_col : `str`, opt
19 | Data column to histogram. Default 'night'.
20 | bins : `np.ndarray`, (N,), opt
21 | Bins to use in the histogram. Default corresponds to years 0-10
22 | (with 365.25 nights per year).
23 | units : `str`, opt
24 | Units to use for the metric result. Default 'N years'.
25 |
26 | Returns
27 | -------
28 | nbins : `int`
29 | Number of histogram bins where the histogram value is greater than 0.
30 | Typically this will be the number of years in the 'night_col'.
31 | """
32 |
33 | def __init__(self, night_col="night", bins=None, units=None, **kwargs):
34 | self.night_col = night_col
35 | if bins is None:
36 | self.bins = np.arange(0, np.ceil(365.25 * 10.0), 365.25) - 0.5
37 | else:
38 | self.bins = bins
39 |
40 | if units is None:
41 | units = "N years"
42 |
43 | super().__init__([night_col], units=units)
44 |
45 | def run(self, data_slice, slice_point):
46 | hist, be = np.histogram(data_slice[self.night_col], bins=self.bins)
47 | result = np.where(hist > 0)[0].size
48 | return result
49 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/cumulative_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("CumulativeMetric",)
2 |
3 | import numpy as np
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class CumulativeMetric(BaseMetric):
9 | """For plotting up the cumulative number of observations.
10 | Expected to be used with a UniSlicer or UserPointSlicer with one point.
11 |
12 | Parameters
13 | ----------
14 | interp_points : `np.array`, (N,) or None
15 | The points to interpolate the cumulative number of observations to.
16 | If None, then the range of the data is used with a stepsize of 1.
17 | """
18 |
19 | def __init__(
20 | self,
21 | metric_name="Cumulative",
22 | time_col="observationStartMJD",
23 | night_col="night",
24 | interp_points=None,
25 | **kwargs,
26 | ):
27 | super().__init__(col=[time_col, night_col], metric_name=metric_name, metric_dtype="object", **kwargs)
28 | self.time_col = time_col
29 | self.night_col = night_col
30 | self.interp_points = interp_points
31 | self.plot_dict = {"xlabel": "MJD (days)", "ylabel": "N obs"}
32 |
33 | def run(self, data_slice, slice_point=None):
34 | data_slice.sort(order=self.time_col)
35 | if self.interp_points is None:
36 | interp_points = np.arange(
37 | data_slice[self.night_col].min(),
38 | data_slice[self.night_col].max() + 1,
39 | 1,
40 | )
41 | else:
42 | interp_points = self.interp_points
43 | cumulative_number = np.arange(data_slice.size) + 1
44 | yresult = np.interp(interp_points, data_slice[self.night_col], cumulative_number)
45 | xresult = interp_points
46 | return {"x": xresult, "y": yresult, "plot_dict": self.plot_dict}
47 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/exgal_m5.py:
--------------------------------------------------------------------------------
1 | __all__ = ("ExgalM5",)
2 |
3 | from rubin_sim.phot_utils import DustValues
4 |
5 | from .base_metric import BaseMetric
6 | from .simple_metrics import Coaddm5Metric
7 |
8 |
9 | class ExgalM5(BaseMetric):
10 | """
11 | Calculate co-added five-sigma limiting depth after dust extinction.
12 |
13 | Uses phot_utils to calculate dust extinction.
14 |
15 | Parameters
16 | ----------
17 | m5_col : `str`, optional
18 | Column name for five sigma depth. Default 'fiveSigmaDepth'.
19 | unit : `str`, optional
20 | Label for units. Default 'mag'.
21 |
22 | Returns
23 | -------
24 | coadd_m5 : `float`
25 | Coadded m5 value, corrected for galactic dust extinction.
26 | """
27 |
28 | def __init__(
29 | self, m5_col="fiveSigmaDepth", metric_name="ExgalM5", units="mag", filter_col="filter", **kwargs
30 | ):
31 | # Set the name for the dust map to use.
32 | # This is gathered into the MetricBundle.
33 | maps = ["DustMap"]
34 | self.m5_col = m5_col
35 | self.filter_col = filter_col
36 | super().__init__(
37 | col=[self.m5_col, self.filter_col], maps=maps, metric_name=metric_name, units=units, **kwargs
38 | )
39 | # Set the default wavelength limits for the lsst filters.
40 | # These are approximately correct.
41 | dust_properties = DustValues()
42 | self.ax1 = dust_properties.ax1
43 | # We will call Coaddm5Metric to calculate the coadded depth.
44 | # Set it up here.
45 | self.coaddm5_metric = Coaddm5Metric(m5_col=m5_col)
46 |
47 | def run(self, data_slice, slice_point):
48 | """Compute the co-added m5 depth and then apply
49 | dust extinction to that magnitude.
50 | """
51 | m5 = self.coaddm5_metric.run(data_slice)
52 | if m5 == self.coaddm5_metric.badval:
53 | return self.badval
54 | # Total dust extinction along this line of sight.
55 | # Correct default A to this EBV value.
56 | a_x = self.ax1[data_slice[self.filter_col][0]] * slice_point["ebv"]
57 | return m5 - a_x
58 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/fft_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("FftMetric",)
2 |
3 | from scipy import fftpack
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class FftMetric(BaseMetric):
9 | """Calculate a truncated FFT of the exposure times."""
10 |
11 | def __init__(self, times_col="expmjd", metric_name="Fft", n_coeffs=100, **kwargs):
12 | """Instantiate metric.
13 |
14 | 'times_col' = column with the time of the visit (default expmjd),
15 | 'n_coeffs' = number of coefficients of the (real) FFT to keep."""
16 | self.times = times_col
17 | super(FftMetric, self).__init__(col=[self.times], metric_name=metric_name, **kwargs)
18 | # Set up length of return values.
19 | self.n_coeffs = n_coeffs
20 | return
21 |
22 | def run(self, data_slice, slice_point=None):
23 | fft = fftpack.rfft(data_slice[self.times])
24 | return fft[0 : self.n_coeffs]
25 |
26 | def reduce_peak(self, fft_coeff):
27 | pass
28 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/kuiper_metrics.py:
--------------------------------------------------------------------------------
1 | __all__ = ("KuiperMetric",)
2 |
3 | import numpy as np
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class KuiperMetric(BaseMetric):
9 | """Find the Kuiper V statistic for a distribution, useful for angles.
10 |
11 | Value of 0 means perfecty uniform, 1 means delta function
12 | """
13 |
14 | def run(self, data_slice, slice_point=None):
15 | """"""
16 | # Assume input in degrees
17 | values = np.sort(data_slice[self.colname] % 360)
18 |
19 | dist_1 = (np.arange(values.size) + 1) / values.size
20 | uniform = values / (360.0)
21 |
22 | d_plus = np.max(uniform - dist_1)
23 | d_minus = np.max(dist_1 - uniform)
24 | result = d_plus + d_minus
25 |
26 | return result
27 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/night_pointing_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("NightPointingMetric",)
2 |
3 | import numpy as np
4 | from astropy import units as u
5 | from astropy.coordinates import AltAz, EarthLocation, get_body, get_sun
6 | from astropy.time import Time
7 | from rubin_scheduler.utils import Site
8 |
9 | from .base_metric import BaseMetric
10 |
11 |
12 | class NightPointingMetric(BaseMetric):
13 | """
14 | Gather relevant information for a night to plot.
15 | """
16 |
17 | def __init__(
18 | self,
19 | alt_col="altitude",
20 | az_col="azimuth",
21 | filter_col="filter",
22 | mjd_col="observationStartMJD",
23 | metric_name="NightPointing",
24 | telescope="LSST",
25 | **kwargs,
26 | ):
27 | cols = [alt_col, az_col, filter_col, mjd_col]
28 | super(NightPointingMetric, self).__init__(
29 | col=cols, metric_name=metric_name, metric_dtype="object", **kwargs
30 | )
31 | self.telescope = Site(name=telescope)
32 | self.alt_col = alt_col
33 | self.az_col = az_col
34 | self.filter_col = filter_col
35 | self.mjd_col = mjd_col
36 |
37 | self.location = EarthLocation(
38 | lat=self.telescope.latitude_rad * u.rad,
39 | lon=self.telescope.longitude_rad * u.rad,
40 | height=self.telescope.height * u.m,
41 | )
42 |
43 | def run(self, data_slice, slice_point=None):
44 | pad = 30.0 / 60.0 / 24.0
45 | mjd_min = data_slice[self.mjd_col].min() - pad
46 | mjd_max = data_slice[self.mjd_col].max() + pad
47 |
48 | # How often to plot the moon and things
49 | step = 20.0 / 60.0 / 24.0
50 | mjds = Time(np.arange(mjd_min, mjd_max + step, step), format="mjd")
51 |
52 | aa = AltAz(location=self.location, obstime=mjds)
53 |
54 | moon_coords = get_body("moon", mjds).transform_to(aa)
55 | sun_coords = get_sun(mjds).transform_to(aa)
56 |
57 | moon_alts = np.array(moon_coords.alt.rad)
58 | moon_azs = np.array(moon_coords.az.rad)
59 | mjds = np.array(mjds)
60 | sun_alts = np.array(sun_coords.alt.rad)
61 | sun_azs = np.array(sun_coords.az.rad)
62 |
63 | return {
64 | "data_slice": data_slice,
65 | "moon_alts": moon_alts,
66 | "moon_azs": moon_azs,
67 | "mjds": mjds,
68 | "sun_alts": sun_alts,
69 | "sun_azs": sun_azs,
70 | }
71 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/pair_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("PairMetric",)
2 |
3 | import numpy as np
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class PairMetric(BaseMetric):
9 | """Count the number of pairs of visits that could be used for
10 | Solar System object detection.
11 |
12 | Parameters
13 | ----------
14 | mjd_col : `str`, opt
15 | Name of the MJD column in the observations.
16 | metric_name : `str`, opt
17 | Name for the resulting metric. If None, one is constructed from
18 | the class name.
19 | match_min : `float`, opt
20 | Minutes after first observation to count something as a match.
21 | match_max : `float`, opt
22 | Minutes after first observation to count something as a match.
23 | bin_size : `float`, opt
24 | bin_size to use (minutes).
25 | Note that bin_size should be considerably smaller than the difference
26 | between match_min and match_max.
27 |
28 | Result
29 | ------
30 | num_pairs : `float`
31 | The number of pairs of visits within the min and max time range.
32 | """
33 |
34 | def __init__(
35 | self,
36 | mjd_col="observationStartMJD",
37 | metric_name="Pairs",
38 | match_min=20.0,
39 | match_max=40.0,
40 | bin_size=5.0,
41 | **kwargs,
42 | ):
43 | self.mjd_col = mjd_col
44 | self.bin_size = bin_size / 60.0 / 24.0
45 | self.match_min = match_min / 60.0 / 24.0
46 | self.match_max = match_max / 60.0 / 24.0
47 | super(PairMetric, self).__init__(col=mjd_col, metric_name=metric_name, units="N Pairs", **kwargs)
48 |
49 | def run(self, data_slice, slice_point=None):
50 | bins = np.arange(
51 | data_slice[self.mjd_col].min(),
52 | data_slice[self.mjd_col].max() + self.bin_size,
53 | self.bin_size,
54 | )
55 |
56 | hist, bin_edges = np.histogram(data_slice[self.mjd_col], bins=bins)
57 | nbin_min = np.round(self.match_min / self.bin_size)
58 | nbin_max = np.round(self.match_max / self.bin_size)
59 | bins_to_check = np.arange(nbin_min, nbin_max + 1, 1)
60 | bins_w_obs = np.where(hist > 0)[0]
61 | # now, for each bin with an observation,
62 | # need to check if there is a bin
63 | # far enough ahead that is also populated.
64 | result = 0
65 | for binadd in bins_to_check:
66 | result += np.size(np.intersect1d(bins_w_obs, bins_w_obs + binadd))
67 | if result == 0:
68 | result = self.badval
69 | return result
70 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/schedview_metrics.py:
--------------------------------------------------------------------------------
1 | """Metrics for scheduler monitoring and progress."""
2 |
3 | __all__ = ["AgeMetric"]
4 |
5 | import numpy as np
6 |
7 | from .base_metric import BaseMetric
8 |
9 |
10 | class AgeMetric(BaseMetric):
11 | def __init__(
12 | self, mjd, mjd_col="observationStartMJD", long_limit=30, metric_name="age", mask_val=np.nan, **kwargs
13 | ):
14 | """Metric that shows the time since the previous visit in each slice,
15 | as of a given time
16 |
17 | Parameters
18 | ----------
19 | mjd : `float`
20 | Reference time for the age.
21 | mjd_col : `str`
22 | Column with the time of visit, by default "observationStartMJD"
23 | long_limit : `int`
24 | The age past which to mask values, by default 30
25 | metric_name : `str`
26 | The metric name, by default 'age'
27 | mask_val : `object`
28 | Name for masked values, by default np.nan
29 | """
30 | self.mjd = mjd
31 | self.mjd_col = mjd_col
32 | self.long_limit = long_limit
33 | super().__init__(col=[self.mjd_col], metric_name=metric_name, mask_val=mask_val, **kwargs)
34 |
35 | def run(self, data_slice, slice_point=None):
36 | age = self.mjd - np.max(data_slice[self.mjd_col])
37 | if age > self.long_limit:
38 | age = self.mask_val
39 | return age
40 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/sky_sat_metric.py:
--------------------------------------------------------------------------------
1 | __all__ = ("SkySaturationMetric",)
2 |
3 | import numpy as np
4 |
5 | from .base_metric import BaseMetric
6 |
7 |
8 | class SkySaturationMetric(BaseMetric):
9 | """Check if the sky would saturate a visit in an exposure"""
10 |
11 | def __init__(self, metric_name="SkySaturation", units="#", **kwargs):
12 | super().__init__(col=["saturation_mag"], units=units, metric_name=metric_name, **kwargs)
13 |
14 | def run(self, data_slice, slice_point):
15 | # Saturation stacker returns NaN if the sky saturates
16 | finite = np.isfinite(data_slice["saturation_mag"])
17 | result = np.size(data_slice["saturation_mag"]) - np.size(np.where(finite)[0])
18 | return result
19 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/snr_weight.py:
--------------------------------------------------------------------------------
1 | __all__ = ("SnrWeightedMetric",)
2 |
3 | import numpy as np
4 |
5 | from rubin_sim.maf.utils import m52snr
6 |
7 | from .base_metric import BaseMetric
8 |
9 |
10 | class SnrWeightedMetric(BaseMetric):
11 | """Take the SNR weighted average of a column."""
12 |
13 | def __init__(self, col, m5_col="fiveSigmaDepth", metric_name=None, **kwargs):
14 | if metric_name is None:
15 | metric_name = "SNR Weighted %s" % col
16 | super(SnrWeightedMetric, self).__init__(col=[m5_col, col], metric_name=metric_name, **kwargs)
17 | self.m5_col = m5_col
18 | self.col = col
19 | self.star_mag = 20.0 # Arbitrary reference, value doesn't matter
20 |
21 | def run(self, data_slice, slice_point=None):
22 | snr = m52snr(self.star_mag, data_slice[self.m5_col])
23 | result = np.average(data_slice[self.col], weights=snr)
24 | return result
25 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/star_density.py:
--------------------------------------------------------------------------------
1 | __all__ = ("StarDensityMetric",)
2 |
3 | import warnings
4 |
5 | from scipy.interpolate import interp1d
6 |
7 | from .base_metric import BaseMetric
8 |
9 |
10 | class StarDensityMetric(BaseMetric):
11 | """Interpolate the stellar luminosity function to return the number of
12 | stars per square arcsecond brighter than the mag_limit.
13 | Note that the map is built from CatSim stars in the range 20 < r < 28.
14 | mag_limit values outside that the range of the map's starMapBins will
15 | return self.badval
16 |
17 | The stellar density maps are available in any bandpass, but bandpasses
18 | other than r band must use a pre-configured StellarDensityMap (not just the
19 | default). In other words, when setting up the metric bundle for an i-band
20 | stellar density using (as an example) a HealpixSlicer:
21 | ```
22 | map = maf.StellarDensityMap(filtername='i')
23 | metric = maf.StarDensityMetric(filtername='i', mag_limit=25.0)
24 | slicer = maf.HealpixSlicer()
25 | bundle = maf.MetricBundle(metric, slicer, "", mapsList=[map])
26 | ```
27 |
28 | Parameters
29 | ----------
30 | mag_limit : `float`, opt
31 | Magnitude limit at which to evaluate the stellar luminosity function.
32 | Returns number of stars per square arcsecond brighter than this limit.
33 | Default 25.
34 | filtername : `str`, opt
35 | Which filter to evaluate the luminosity function in; Note that using
36 | bands other than r will require setting up a custom (rather than
37 | default) version of the stellar density map.
38 | Default r.
39 | units : `str`, opt
40 | Units for the output values. Default "stars/sq arcsec".
41 | maps : `list` of `str`, opt
42 | Names for the maps required. Default "StellarDensityMap".
43 |
44 | Returns
45 | -------
46 | result : `float`
47 | Number of stars brighter than mag_limit in filtername, based on the
48 | stellar density map.
49 | """
50 |
51 | def __init__(
52 | self, mag_limit=25.0, filtername="r", units="stars/sq arcsec", maps=["StellarDensityMap"], **kwargs
53 | ):
54 | super(StarDensityMetric, self).__init__(col=[], maps=maps, units=units, **kwargs)
55 | self.mag_limit = mag_limit
56 | if "rmagLimit" in kwargs:
57 | warnings.warn(
58 | "rmagLimit is deprecated; please use mag_limit instead "
59 | "(will use the provided rmagLimit for now)."
60 | )
61 | self.mag_limit = kwargs["rmagLimit"]
62 | self.filtername = filtername
63 |
64 | def run(self, data_slice, slice_point=None):
65 | # Interpolate the data to the requested mag
66 | interp = interp1d(
67 | slice_point["starMapBins_%s" % self.filtername][1:],
68 | slice_point["starLumFunc_%s" % self.filtername],
69 | )
70 | # convert from stars/sq degree to stars/sq arcsec
71 | try:
72 | result = interp(self.mag_limit) / (3600.0**2)
73 | except ValueError:
74 | # This probably means the interpolation went out of range
75 | # (magLimit <15 or >28)
76 | return self.badval
77 | return result
78 |
--------------------------------------------------------------------------------
/rubin_sim/maf/metrics/use_metrics.py:
--------------------------------------------------------------------------------
1 | __all__ = ("UseMetric",)
2 |
3 | import numpy as np
4 |
5 | from rubin_sim.maf.metrics.base_metric import BaseMetric
6 |
7 |
8 | class UseMetric(BaseMetric): # pylint: disable=too-few-public-methods
9 | """Metric to classify visits by type of visits"""
10 |
11 | def __init__(self, note_col="scheduler_note", **kwargs):
12 | self.note_col = note_col
13 | super().__init__(col=[note_col], metric_dtype="object", **kwargs)
14 |
15 | def run(self, data_slice, slice_point=None): # pylint: disable=invalid-name
16 | """Run the metric.
17 |
18 | Parameters
19 | ----------
20 | data_slice : `np.ndarray`, (N,)`
21 | slice_point : `dict`
22 | Dictionary of slice_point metadata passed to each metric.
23 | E.g. the ra/dec of the healpix pixel.
24 |
25 | Returns
26 | -------
27 | use_name : `str`
28 | use at each slice_point.
29 | """
30 | use_name = None
31 | visible_bands = ("u", "g", "r")
32 | notes = data_slice[self.note_col]
33 | if len(notes.shape) == 0:
34 | note = notes
35 | else:
36 | note = notes[0]
37 | assert np.all(notes == note)
38 |
39 | note_elems = note.replace(":", ", ").split(", ")
40 | # XXX--survey note strings should not be hard-coded here.
41 | if note_elems[0] == "greedy":
42 | use_name = note_elems[0]
43 | if note_elems[0] == "DD":
44 | use_name = note_elems[1]
45 | if (note_elems[0] == "blob") | (note_elems[0] == "blob_twi"):
46 | use_name = "wide with only IR"
47 | for band in visible_bands:
48 | if band in note_elems[1]:
49 | use_name = "wide with u, g, or r"
50 |
51 | assert use_name is not None, f"Unrecognized scheduler_note: {note}"
52 | return use_name
53 |
54 |
55 | # internal functions & classes
56 |
--------------------------------------------------------------------------------
/rubin_sim/maf/plots/__init__.py:
--------------------------------------------------------------------------------
1 | from .hg_plotters import *
2 | from .hourglass_plotters import *
3 | from .mo_plotters import *
4 | from .nd_plotters import *
5 | from .neo_distance_plotter import *
6 | from .night_pointing_plotter import *
7 | from .oned_plotters import *
8 | from .perceptual_rainbow import *
9 | from .plot_handler import *
10 | from .skyproj_plotters import *
11 | from .spatial_plotters import *
12 | from .special_plotters import *
13 | from .two_d_plotters import *
14 | from .xyplotter import *
15 |
--------------------------------------------------------------------------------
/rubin_sim/maf/plots/perceptual_rainbow.py:
--------------------------------------------------------------------------------
1 | __all__ = ("make_pr_cmap",)
2 |
3 | from matplotlib.colors import LinearSegmentedColormap
4 |
5 |
6 | def make_pr_cmap():
7 | colors = [
8 | [135, 59, 97],
9 | [143, 64, 127],
10 | [143, 72, 157],
11 | [135, 85, 185],
12 | [121, 102, 207],
13 | [103, 123, 220],
14 | [84, 146, 223],
15 | [69, 170, 215],
16 | [59, 192, 197],
17 | [60, 210, 172],
18 | [71, 223, 145],
19 | [93, 229, 120],
20 | [124, 231, 103],
21 | [161, 227, 95],
22 | [198, 220, 100],
23 | [233, 213, 117],
24 | ]
25 | mpl_colors = []
26 | for color in colors:
27 | mpl_colors.append(tuple([x / 255.0 for x in color]))
28 | # Set up the colormap
29 | cmap = LinearSegmentedColormap.from_list("perceptual_rainbow", mpl_colors)
30 | return cmap
31 |
--------------------------------------------------------------------------------
/rubin_sim/maf/plots/xyplotter.py:
--------------------------------------------------------------------------------
1 | __all__ = ("XyPlotter",)
2 |
3 | import matplotlib.pyplot as plt
4 |
5 | from .plot_handler import BasePlotter
6 |
7 |
8 | class XyPlotter(BasePlotter):
9 | """Bare-bones plotter for making scatter plots. Expects single metric value
10 | (e.g, from UniSlicer or UserPointSlicer with one point)"""
11 |
12 | def __init__(self):
13 | self.object_plotter = True
14 | self.plot_type = "simple"
15 | self.default_plot_dict = {
16 | "title": None,
17 | "xlabel": "",
18 | "ylabel": "",
19 | "figsize": None,
20 | }
21 |
22 | def __call__(self, metric_value_in, slicer, user_plot_dict, fig=None):
23 | plot_dict = {}
24 | plot_dict.update(self.default_plot_dict)
25 | plot_dict.update(user_plot_dict)
26 | plot_dict.update(metric_value_in[0]["plot_dict"])
27 |
28 | if fig is None:
29 | fig = plt.figure(figsize=plot_dict["figsize"])
30 | ax = fig.add_subplot(111)
31 | x = metric_value_in[0]["x"]
32 | y = metric_value_in[0]["y"]
33 | ax.plot(x, y)
34 | ax.set_title(plot_dict["title"])
35 | ax.set_xlabel(plot_dict["xlabel"])
36 | ax.set_ylabel(plot_dict["ylabel"])
37 | return fig
38 |
--------------------------------------------------------------------------------
/rubin_sim/maf/run_comparison/__init__.py:
--------------------------------------------------------------------------------
1 | from .archive import *
2 | from .gather_summaries import *
3 | from .microlensing_compare import *
4 | from .radar_plot import *
5 | from .summary_plots import *
6 |
--------------------------------------------------------------------------------
/rubin_sim/maf/scimaf_dir.py:
--------------------------------------------------------------------------------
1 | from . import batches as batches
2 | from . import db as db
3 | from . import metricBundles as mmB
4 |
5 | __all__ = ("scimaf_dir",)
6 |
7 | import argparse
8 | import glob
9 | import os
10 | import shutil
11 | import sqlite3
12 | import warnings
13 |
14 | import matplotlib
15 | import pandas as pd
16 |
17 | matplotlib.use("Agg")
18 |
19 |
20 | def scimaf_dir():
21 | """Run the science batch on all .db files in a directory."""
22 |
23 | parser = argparse.ArgumentParser()
24 | parser.add_argument("--db", type=str, default=None)
25 | parser.add_argument(
26 | "--no_clobber",
27 | dest="no_clobber",
28 | action="store_true",
29 | default=False,
30 | help="Do not remove existing directory outputs",
31 | )
32 | parser.set_defaults(no_long_micro=False)
33 | parser.add_argument("--limited", dest="limited", action="store_true")
34 | parser.set_defaults(limited=False)
35 | args = parser.parse_args()
36 |
37 | if args.db is None:
38 | db_files = glob.glob("*.db")
39 | db_files = [filename for filename in db_files if "trackingDb" not in filename]
40 | else:
41 | db_files = [args.db]
42 | run_names = [os.path.basename(name).replace(".db", "") for name in db_files]
43 |
44 | for filename, name in zip(db_files, run_names):
45 | out_dir = name + "_sci"
46 |
47 | # Grab the starting date for the Presto KNe metric
48 | try:
49 | con = sqlite3.connect(filename)
50 | mjd0_df = pd.read_sql("select min(observationStartMJD) from observations;", con)
51 | con.close()
52 | mjd0 = mjd0_df.values.min()
53 | # If this fails for any reason (aka schema change)
54 | except: # noqa E722
55 | warnings.warn("Could not find survey start date for Presto KNe, setting mjd0=None.")
56 | mjd0 = None
57 | # Clobber output directory if it exists
58 | if not args.no_clobber:
59 | if os.path.isdir(out_dir):
60 | shutil.rmtree(out_dir)
61 | results_db = db.ResultsDb(out_dir=out_dir)
62 | # Set up the metricBundles
63 | if args.limited:
64 | bdict = batches.radar_limited(
65 | runName=name,
66 | mjd0=mjd0,
67 | )
68 | else:
69 | bdict = batches.science_radar_batch(
70 | runName=name,
71 | mjd0=mjd0,
72 | )
73 | # Run them, including generating plots
74 | group = mmB.MetricBundleGroup(
75 | bdict, filename, out_dir=out_dir, results_db=results_db, save_early=False
76 | )
77 | group.run_all(clear_memory=True, plot_now=True)
78 | results_db.close()
79 | db.add_run_to_database(
80 | out_dir,
81 | "trackingDb_sqlite.db",
82 | run_group=None,
83 | run_name=name,
84 | run_comment=None,
85 | maf_comment="ScienceRadar",
86 | db_file=name + ".db",
87 | )
88 |
--------------------------------------------------------------------------------
/rubin_sim/maf/slicers/__init__.py:
--------------------------------------------------------------------------------
1 | from .base_slicer import *
2 | from .base_spatial_slicer import *
3 | from .healpix_sdss_slicer import *
4 | from .healpix_slicer import *
5 | from .healpix_subset_slicer import *
6 | from .hourglass_slicer import *
7 | from .mo_slicer import *
8 | from .movie_slicer import *
9 | from .nd_slicer import *
10 | from .one_d_slicer import *
11 | from .time_interval_slicers import *
12 | from .uni_slicer import *
13 | from .user_points_slicer import *
14 |
--------------------------------------------------------------------------------
/rubin_sim/maf/slicers/hourglass_slicer.py:
--------------------------------------------------------------------------------
1 | __all__ = ("HourglassSlicer",)
2 |
3 |
4 | from rubin_sim.maf.plots import HourglassPlot
5 |
6 | from .uni_slicer import UniSlicer
7 |
8 |
9 | class HourglassSlicer(UniSlicer):
10 | """Slicer to make the filter hourglass plots"""
11 |
12 | def __init__(self, verbose=True, badval=-666):
13 | # Inherits from UniSlicer, so nslice=1 and only one 'slice'.
14 | super(HourglassSlicer, self).__init__(verbose=verbose, badval=badval)
15 | self.columns_needed = []
16 | self.slicer_name = "HourglassSlicer"
17 | self.plot_funcs = [
18 | HourglassPlot,
19 | ]
20 |
21 | def write_data(self, outfilename, metric_values, metric_name="", **kwargs):
22 | """
23 | Override base write method: we don't want to save hourglass metric
24 | data.
25 |
26 | The data volume is too large.
27 | """
28 | pass
29 |
30 | def read_metric_data(self, infilename):
31 | """
32 | Override base read method to 'pass':
33 | we don't save or read hourglass metric data.
34 |
35 | The data volume is too large.
36 | """
37 | pass
38 |
--------------------------------------------------------------------------------
/rubin_sim/maf/slicers/uni_slicer.py:
--------------------------------------------------------------------------------
1 | """UniSlicer - no slicing at all, simply return all data points."""
2 |
3 | __all__ = ("UniSlicer",)
4 |
5 | from functools import wraps
6 |
7 | import numpy as np
8 |
9 | from .base_slicer import BaseSlicer
10 |
11 |
12 | class UniSlicer(BaseSlicer):
13 | """UniSlicer."""
14 |
15 | def __init__(self, verbose=True, badval=-666):
16 | """Instantiate unislicer."""
17 | super(UniSlicer, self).__init__(verbose=verbose, badval=badval)
18 | self.nslice = 1
19 | self.shape = self.nslice
20 | self.slice_points["sid"] = np.array(
21 | [
22 | 0,
23 | ],
24 | int,
25 | )
26 | self.plot_funcs = []
27 |
28 | def setup_slicer(self, sim_data, maps=None):
29 | """Use sim_data to set indexes to return."""
30 | self._run_maps(maps)
31 | sim_dataCol = sim_data.dtype.names[0]
32 | self.indices = np.ones(len(sim_data[sim_dataCol]), dtype="bool")
33 |
34 | @wraps(self._slice_sim_data)
35 | def _slice_sim_data(islice):
36 | """Return all indexes in sim_data."""
37 | idxs = self.indices
38 | slice_point = {"sid": islice}
39 | for key in self.slice_points:
40 | if len(np.shape(self.slice_points[key])) == 0:
41 | keyShape = 0
42 | else:
43 | keyShape = np.shape(self.slice_points[key])[0]
44 | if keyShape == self.nslice:
45 | slice_point[key] = self.slice_points[key][islice]
46 | else:
47 | slice_point[key] = self.slice_points[key]
48 | return {"idxs": idxs, "slice_point": slice_point}
49 |
50 | setattr(self, "_slice_sim_data", _slice_sim_data)
51 |
52 | def __eq__(self, other_slicer):
53 | """Evaluate if slicers are equivalent."""
54 | if isinstance(other_slicer, UniSlicer):
55 | return True
56 | else:
57 | return False
58 |
--------------------------------------------------------------------------------
/rubin_sim/maf/stackers/__init__.py:
--------------------------------------------------------------------------------
1 | from .base_stacker import *
2 | from .coord_stackers import *
3 | from .date_stackers import *
4 | from .general_stackers import *
5 | from .get_col_info import *
6 | from .label_stackers import *
7 | from .m5_optimal_stacker import *
8 | from .mo_stackers import *
9 | from .n_follow_stacker import *
10 | from .neo_dist_stacker import *
11 | from .sdss_stackers import *
12 | from .sn_stacker import *
13 | from .teff_stacker import *
14 |
--------------------------------------------------------------------------------
/rubin_sim/maf/stackers/sdss_stackers.py:
--------------------------------------------------------------------------------
1 | __all__ = ("SdssRADecStacker",)
2 |
3 |
4 | import numpy as np
5 |
6 | from .base_stacker import BaseStacker
7 | from .coord_stackers import wrap_ra
8 |
9 |
10 | class SdssRADecStacker(BaseStacker):
11 | """convert the p1,p2,p3... columns to radians and wrap them"""
12 |
13 | cols_added = ["RA1", "Dec1", "RA2", "Dec2", "RA3", "Dec3", "RA4", "Dec4"]
14 |
15 | def __init__(self, pcols=["p1", "p2", "p3", "p4", "p5", "p6", "p7", "p8"]):
16 | """The p1,p2 columns represent the corners of chips.
17 | Could generalize this a bit."""
18 | self.units = ["rad"] * 8
19 | self.cols_req = pcols
20 |
21 | def _run(self, sim_data, cols_present=False):
22 | if cols_present:
23 | # Assume this is unusual enough to run that you really mean it.
24 | pass
25 | for pcol, newcol in zip(self.cols_req, self.cols_added):
26 | if newcol[0:2] == "RA":
27 | sim_data[newcol] = wrap_ra(np.radians(sim_data[pcol]))
28 | else:
29 | sim_data[newcol] = np.radians(sim_data[pcol])
30 | return sim_data
31 |
--------------------------------------------------------------------------------
/rubin_sim/maf/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .astrometry_utils import *
2 | from .get_date_version import *
3 | from .maf_utils import *
4 | from .opsim_utils import *
5 | from .output_utils import *
6 | from .stellar_mags import *
7 |
--------------------------------------------------------------------------------
/rubin_sim/maf/utils/astrometry_utils.py:
--------------------------------------------------------------------------------
1 | __all__ = ("sigma_slope", "m52snr", "astrom_precision")
2 |
3 | import numpy as np
4 |
5 | """Some simple functions that are useful for astrometry calculations. """
6 |
7 |
8 | def sigma_slope(x, sigma_y):
9 | """
10 | Calculate the uncertainty in fitting a line, as
11 | given by the spread in x values and the uncertainties
12 | in the y values.
13 |
14 | Parameters
15 | ----------
16 | x : numpy.ndarray
17 | The x values of the data
18 | sigma_y : numpy.ndarray
19 | The uncertainty in the y values
20 |
21 | Returns
22 | -------
23 | float
24 | The uncertainty in the line fit
25 | """
26 | w = 1.0 / sigma_y**2
27 | denom = np.sum(w) * np.sum(w * x**2) - np.sum(w * x) ** 2
28 | if denom <= 0:
29 | return np.nan
30 | else:
31 | result = np.sqrt(np.sum(w) / denom)
32 | return result
33 |
34 |
35 | def m52snr(m, m5, gamma=0.04):
36 | """
37 | Calculate the SNR for a star of magnitude m in an
38 | observation with 5-sigma limiting magnitude depth m5.
39 | Assumes gaussian distribution of photons and might not be
40 | strictly due in bluer filters. See table 2 and equation 5
41 | in astroph/0805.2366.
42 |
43 | Parameters
44 | ----------
45 | m : `float` or `np.ndarray` (N,)
46 | The magnitude of the star
47 | m5 : `float` or `np.ndarray` (N,)
48 | The m5 limiting magnitude of the observation
49 | gamma : `float` or None
50 | The 'gamma' value used when calculating photometric or
51 | astrometric errors and weighting SNR accordingly.
52 | See equation 5 of the LSST Overview paper.
53 | Use "None" to discount the gamma factor completely
54 | and use standard 5*10^(0.4 * (m5-m)).
55 |
56 | Returns
57 | -------
58 | snr : `float` or `np.ndarray` (N,)
59 | The SNR
60 | """
61 | # gamma varies per band, but is fairly close to 0.04
62 |
63 | if gamma is None:
64 | snr = 5.0 * 10.0 ** (-0.4 * (m - m5))
65 | else:
66 | xval = np.power(10, 0.4 * (m - m5))
67 | snr = 1 / np.sqrt((0.04 - gamma) * xval + gamma * xval * xval)
68 | return snr
69 |
70 |
71 | def astrom_precision(fwhm, snr, systematic_floor=0.00):
72 | """
73 | Calculate the approximate precision of astrometric measurements,
74 | given a particular seeing and SNR value.
75 |
76 | Parameters
77 | ----------
78 | fwhm : `float` or `np.ndarray` (N,)
79 | The seeing (FWHMgeom) of the observation.
80 | snr : float` or `np.ndarray` (N,)
81 | The SNR of the object.
82 | systematic_floor : `float`
83 | Systematic noise floor for astrometric error, in arcseconds.
84 | Default here is 0, for backwards compatibility.
85 | General Rubin use should be 0.01.
86 |
87 | Returns
88 | -------
89 | astrom_err : `float` or `numpy.ndarray` (N,)
90 | The astrometric precision, in arcseconds.
91 | """
92 | astrom_err = np.sqrt((fwhm / snr) ** 2 + systematic_floor**2)
93 | return astrom_err
94 |
--------------------------------------------------------------------------------
/rubin_sim/maf/utils/generate_fov_map.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from rubin_scheduler.utils import gnomonic_project_tosky
3 |
4 | # Use the main stack to make a rough array.
5 | # This code needs an update to work without lsst.sims.
6 |
7 | if __name__ == "__main__":
8 | import lsst.sims.utils as simsUtils
9 | from lsst.obs.lsst import LsstCamMapper
10 | from lsst.sims.coordUtils import _chipNameFromRaDec
11 |
12 | mapper = LsstCamMapper()
13 | camera = mapper.camera
14 | epoch = 2000.0
15 |
16 | ra = 0.0
17 | dec = 0.0
18 | rot_sky_pos = 0.0
19 | mjd = 5300.0
20 |
21 | obs_metadata = simsUtils.ObservationMetaData(
22 | pointing_ra=np.degrees(ra),
23 | pointing_dec=np.degrees(dec),
24 | rot_sky_pos=np.degrees(rot_sky_pos),
25 | mjd=mjd,
26 | )
27 |
28 | nside = int(1000)
29 | # 60k pixels, from 0 to 3.5 degrees
30 | x_one = np.linspace(-1.75, 1.75, int(nside))
31 |
32 | # make 2-d x,y arrays
33 | x_two = np.broadcast_to(x_one, (nside, nside))
34 | y_two = np.broadcast_to(x_one, (nside, nside)).T
35 |
36 | result = np.ones((nside, nside), dtype=bool)
37 | ra_two, dec_two = gnomonic_project_tosky(np.radians(x_two), np.radians(y_two), ra, dec)
38 | chip_names = _chipNameFromRaDec(
39 | ra_two.ravel(),
40 | dec_two.ravel(),
41 | epoch=epoch,
42 | camera=camera,
43 | obs_metadata=obs_metadata,
44 | )
45 |
46 | chip_names = chip_names.reshape(nside, nside)
47 | wavefront_names = [
48 | name
49 | for name in np.unique(chip_names[np.where(chip_names is not None)])
50 | if ("SW" in name) | ("R44" in name) | ("R00" in name) | ("R04" in name) | ("R40" in name)
51 | ]
52 | # If it's on a waverfront sensor, that's false
53 | for name in wavefront_names:
54 | result[np.where(chip_names == name)] = False
55 | # No chipname, that's a false
56 | result[np.where(chip_names is None)] = False
57 |
58 | np.savez("fov_map.npz", x=x_one, image=result)
59 |
--------------------------------------------------------------------------------
/rubin_sim/maf/utils/get_date_version.py:
--------------------------------------------------------------------------------
1 | __all__ = ("get_date_version",)
2 |
3 | import time
4 | from importlib import metadata
5 |
6 |
7 | def get_date_version():
8 | """
9 | Get today's date and a dictionary with the MAF version information.
10 | This is written into configuration output files, to help track MAF runs.
11 |
12 | Returns
13 | -------
14 | str, dict
15 | String with today's date, Dictionary with version information.
16 | """
17 |
18 | version = metadata.version("rubin_sim")
19 | # today_date = time.strftime("%x")
20 | today_date = "-".join([time.strftime(x) for x in ["%Y", "%m", "%d"]])
21 | version_info = {
22 | "__version__": version,
23 | "__repo_version__": None,
24 | "__fingerprint__": None,
25 | "__dependency_versions__": None,
26 | }
27 |
28 | return today_date, version_info
29 |
--------------------------------------------------------------------------------
/rubin_sim/maf/utils/output_utils.py:
--------------------------------------------------------------------------------
1 | __all__ = ("name_sanitize",)
2 |
3 |
4 | def name_sanitize(in_string):
5 | """
6 | Convert a string to a more file name (and web) friendly format.
7 |
8 | Parameters
9 | ----------
10 | in_string : `str`
11 | The input string to be sanitized.
12 | Typically these are combinations of metric names and metadata.
13 |
14 | Returns
15 | -------
16 | out_string : `str`
17 | The string after removal/replacement of non-friendly characters.
18 | """
19 | # Replace <, > and = signs.
20 | out_string = in_string.replace(">", "gt").replace("<", "lt").replace("=", "eq")
21 | # Remove single-spaces, strip '.'s and ','s
22 | out_string = out_string.replace(" ", "_").replace(".", "_").replace(",", "")
23 | # and remove / and \
24 | out_string = out_string.replace("/", "_").replace("\\", "_")
25 | # and remove parentheses
26 | out_string = out_string.replace("(", "").replace(")", "")
27 | # Remove ':' and ';"
28 | out_string = out_string.replace(":", "_").replace(";", "_")
29 | # Replace '%' and #
30 | out_string = out_string.replace("%", "_").replace("#", "_")
31 | # Remove '__'
32 | while "__" in out_string:
33 | out_string = out_string.replace("__", "_")
34 | return out_string
35 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/__init__.py:
--------------------------------------------------------------------------------
1 | from .maf_run_results import *
2 | from .maf_tracking import *
3 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsst/rubin_sim/5fd51c7f820329fb5644607851f46ef12d0efaee/rubin_sim/maf/web/favicon.ico
--------------------------------------------------------------------------------
/rubin_sim/maf/web/sorttable.js:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsst/rubin_sim/5fd51c7f820329fb5644607851f46ef12d0efaee/rubin_sim/maf/web/sorttable.js
--------------------------------------------------------------------------------
/rubin_sim/maf/web/templates/allmetricresults.html:
--------------------------------------------------------------------------------
1 | {% extends "master.html" %}
2 | {% import 'macros.html' as mcr %}
3 |
4 | {% set active_page = "allMetricResults" %}
5 |
6 | {% set run = runlist.get_run(runId) %}
7 |
8 | {% set metrics = run.metrics %}
9 |
10 | {% block moresidebar %}
11 |
12 |
13 | {% for g in run.groups.keys() %}
14 |
15 | {{ g|escape }}
16 | {% for sg in run.groups[g] %}
17 |
18 | {% endfor %}
19 |
20 | {% endfor %}
21 |
22 | {% endblock %}
23 |
24 |
25 | {% block content %}
26 |
27 | {# Show all metric results, including summary stats in a table rather
28 | than per metric. #}
29 |
30 | {% set metricInfo = run.metric_info() %}
31 |
32 | {% for g in run.groups.keys() %}
33 | {% set groupstart = True %}
34 | {% for sg in run.groups[g] %}
35 | {% if groupstart == True %}
36 |
37 | {% set groupstart = False %}
38 | {% endif %}
39 |
40 | Group: {{g|escape}}; Subgroup: {{sg|escape}}
41 |
42 | {% set subsetMetrics = run.metrics_in_subgroup(g, sg) %}
43 |
44 | {% for metric in subsetMetrics %}
45 | {% set metricInfo = run.metric_info(metric) %}
46 | {% set metricPlots = run.plots_for_metric(metric) %}
47 | {# Only show anything here for this metric if it had plots #}
48 | {% if metricPlots|length > 0 %}
49 | {# Print the metric info #}
50 | {{ mcr.PrintMetricInfo(runId, metric, metricInfo) }}
51 | {# Add the plots for this metric #}
52 | {{ mcr.MakePlotTable(metricPlots, run) }}
53 |
54 | {% set caption = run.caption_for_metric(metric) %}
55 | {{ caption|escape }}
56 |
57 | {% endif %}
58 |
59 | {% endfor %}
60 |
61 | {# Add a table with the summary stats for all the metrics in this subgroup #}
62 |
63 | {% set statNames = run.all_stat_names(subsetMetrics) %}
64 |
65 | {{ mcr.MakeStatTable(subsetMetrics, g, sg, run) }}
66 |
67 |
68 |
69 | {% endfor %}
70 | {% endfor %}
71 |
72 | {% endblock %}
73 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/templates/configs.html:
--------------------------------------------------------------------------------
1 | {% extends "master.html" %}
2 |
3 | {% set active_page = "configs" %}
4 |
5 | {% set run = runlist.get_run(runId) %}
6 |
7 | {% block moresidebar %}
8 |
12 | {% endblock %}
13 |
14 | {% block content %}
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | {% endblock %}
29 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/templates/metricselect.html:
--------------------------------------------------------------------------------
1 | {% extends "master.html" %}
2 |
3 | {% set active_page = "listMetrics" %}
4 | {% set run = runlist.get_run(runId) %}
5 |
6 |
7 | {% block moresidebar %}
8 |
9 |
90 |
91 | {% endblock %}
92 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/templates/results.html:
--------------------------------------------------------------------------------
1 | {% extends "master.html" %}
2 | {% import 'macros.html' as mcr %}
3 |
4 | {% set active_page = "listMetrics" %}
5 |
6 | {% set run = runlist.get_run(runId) %}
7 | {% set selectMetrics = run.convert_select_to_metrics(groupList,
8 | metricIdList) %}
9 | {% set selectGroups = run.metrics_to_subgroups(selectMetrics) %}
10 |
11 |
12 | {% block moresidebar %}
13 |
14 |
15 | {% for g in selectGroups.keys() %}
16 |
17 | {{ g|escape }}
18 | {% for sg in selectGroups[g] %}
19 |
20 | {% endfor %}
21 |
22 | {% endfor %}
23 |
24 | {% endblock %}
25 |
26 |
27 | {% block content %}
28 |
29 | {# show metric information for each metric selected from 'select' page #}
30 |
31 |
32 | {% for g in selectGroups.keys() %}
33 | {% set groupstart = True %}
34 | {% for sg in selectGroups[g] %}
35 | {% if groupstart == True %}
36 |
37 | {% set groupstart = False %}
38 | {% endif %}
39 |
40 | {% set subsetMetrics = run.metrics_in_subgroup(g, sg, metrics=selectMetrics) %}
41 | {% for metric in subsetMetrics %}
42 | {% set metricInfo = run.metric_info(metric) %}
43 |
44 |
45 | {# Print the metric info #}
46 | {{ mcr.PrintMetricInfo(runId, metric, metricInfo) }}
47 |
48 | {# Add the plots for this metric #}
49 | {% set metricPlots = run.plots_for_metric(metric) %}
50 | {{ mcr.MakePlotTable(metricPlots, run) }}
51 |
52 |
53 | {% set caption = run.caption_for_metric(metric) %}
54 | {{ caption|escape }}
55 |
56 |
57 | {# Add the summary stats for this metric #}
58 | {% set stats = run.stats_for_metric(metric) %}
59 | {% set statdict = run.stat_dict(stats) %}
60 |
61 | {% for name in statdict.keys() %}
62 |
63 |
64 | {{ name|escape }}:
65 |
66 |
67 | {% if name == 'Count' %}
68 | {{ '%d'|format(statdict[name]) }}
69 | {% else %}
70 | {{ '%.2f'|format(statdict[name])}}
71 | {% endif %}
72 |
73 |
74 | {% endfor %}
75 |
76 |
77 |
78 | {% endfor %}
79 | {% endfor %}
80 | {% endfor %}
81 |
82 | {% endblock %}
83 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/templates/runselect.html:
--------------------------------------------------------------------------------
1 | {% block javascript %}
2 |
3 | {% endblock %}
4 |
5 | {% extends "master.html" %}
6 |
7 | {% if runId < 0 %}
8 | {% set runId = runlist.runs['maf_run_id'][0] %}
9 | {% endif %}
10 |
11 | {% block pagetitle %}
12 | Opsim Run Select
13 | {% endblock %}
14 |
15 | {% block title %}
16 | List of all Opsim Runs
17 | {% endblock %}
18 |
19 | {% set active_page = "listRuns" %}
20 |
21 | {% block moresidebar %}
22 |
23 | {% endblock %}
24 |
25 | {% block content %}
26 |
27 |
28 |
29 | {# Get basic run info to include into table (for table header) #}
30 | {% set runInfo = runlist.run_info(runlist.runs[0]) %}
31 | {% for key in runInfo %}
32 | {{ key|escape }}
33 | {% endfor %}
34 |
35 |
36 |
37 | {# Show run info for each run #}
38 | {% for run in runlist.runs %}
39 | {% set runInfo = runlist.run_info(run) %}
40 |
41 | {% for key in runInfo %}
42 | {% if loop.index == 1 %}
43 | {{runInfo[key]|escape }}
44 | {% elif key == 'RunDb File' %}
45 | {{runInfo[key][1]|escape}}
46 | {% elif key == 'ResultsDb' %}
47 | ResultsDb
48 | {% else %}
49 | {{ runInfo[key]|escape }}
50 | {% endif %}
51 | {% endfor %}
52 |
53 | {% endfor %}
54 |
55 |
56 |
57 |
58 | {% endblock %}
59 |
--------------------------------------------------------------------------------
/rubin_sim/maf/web/templates/stats.html:
--------------------------------------------------------------------------------
1 | {% extends "master.html" %}
2 | {% import 'macros.html' as mcr %}
3 |
4 | {% set active_page = "stats" %}
5 |
6 | {% set run = runlist.get_run(runId) %}
7 |
8 | {% block moresidebar %}
9 |
10 | {% for group in run.groups.keys() %}
11 |
12 | {{group}}
13 | {% for subgroup in run.groups[group] %}
14 | {% set metrics = run.metrics_in_subgroup(group, subgroup) %}
15 | {% set statNames = run.all_stat_names(metrics) %}
16 | {% if statNames|length > 0 %}
17 |
18 | {% endif %}
19 | {% endfor %}
20 |
21 | {% endfor %}
22 |
23 | {% endblock %}
24 |
25 |
26 | {% block content %}
27 |
28 |
29 | {% set resultsDb = run.get_results_db() %}
30 | Download summary results sqlite file
31 |
32 |
33 | {% for group in run.groups.keys() %}
34 | {% set groupstart = True %}
35 | {% for subgroup in run.groups[group] %}
36 | {# Identify the metrics to put into table #}
37 | {% set metrics = run.metrics_in_subgroup(group, subgroup) %}
38 |
39 | {# add an anchor if it's the start of a group #}
40 | {% if groupstart %}
41 |
42 | {% set groupstart = False %}
43 | {% endif %}
44 |
45 | {{ mcr.MakeStatTable(metrics, group, subgroup, run) }}
46 |
47 | {% endfor %}
48 | {% endfor %} {# end of group/subgroup #}
49 |
50 | {% endblock %}
51 |
--------------------------------------------------------------------------------
/rubin_sim/moving_objects/__init__.py:
--------------------------------------------------------------------------------
1 | from .base_obs import * # noqa: F403
2 | from .cheby_fits import * # noqa: F403
3 | from .cheby_values import * # noqa: F403
4 | from .chebyshev_utils import * # noqa: F403
5 | from .direct_obs import * # noqa: F403
6 | from .ooephemerides import * # noqa: F403
7 | from .orbits import * # noqa: F403
8 | from .utils import * # noqa: F403
9 |
--------------------------------------------------------------------------------
/rubin_sim/moving_objects/pre_generate.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import os
3 |
4 | import numpy as np
5 |
6 | from rubin_sim.data import get_data_dir
7 | from rubin_sim.moving_objects import DirectObs, Orbits
8 |
9 | if __name__ == "__main__":
10 | """Pre-generate a series of nightly ephemerides with a 1-night timestep."""
11 | mjd_start = 60676.0
12 | length = 365.25 * 12 # How long to pre-compute for
13 | dtime = 1
14 | mjds = np.arange(mjd_start, mjd_start + length, dtime)
15 |
16 | orbit_files = glob.glob(os.path.join(get_data_dir(), "orbits/") + "*.txt")
17 | output_dir = os.path.join(os.path.join(get_data_dir(), "orbits_precompute/"))
18 |
19 | names = ["ra", "dec"]
20 | types = [float] * 2
21 | dt = list(zip(names, types))
22 |
23 | for filename in orbit_files:
24 | print("working on %s" % filename)
25 | orbits = Orbits()
26 | orbits.read_orbits(filename)
27 | # Array to hold results
28 | results = np.zeros((len(orbits.orbits), np.size(mjds)), dt)
29 | do = DirectObs()
30 | _temp_positions = do.generate_ephemerides(orbits, mjds, eph_mode="nbody", eph_type="basic")
31 | results["ra"] += _temp_positions["ra"]
32 | results["dec"] += _temp_positions["dec"]
33 | np.savez(
34 | os.path.join(output_dir, os.path.basename(filename).replace(".txt", ".npz")),
35 | positions=results,
36 | mjds=mjds,
37 | )
38 |
--------------------------------------------------------------------------------
/rubin_sim/moving_objects/utils.py:
--------------------------------------------------------------------------------
1 | __all__ = ("read_observations",)
2 |
3 | import logging
4 |
5 | from rubin_sim.maf.utils import get_sim_data
6 |
7 |
8 | def read_observations(simfile, colmap, constraint=None, dbcols=None):
9 | """Read the opsim database.
10 |
11 | Parameters
12 | ----------
13 | simfile : `str`
14 | Name (& path) of the opsim database file.
15 | colmap : `dict`
16 | colmap dictionary (from rubin_sim.maf.batches.ColMapDict)
17 | constraint : `str`, optional
18 | Optional SQL constraint (minus 'where') on the data to read from db.
19 | Default is None.
20 | dbcols : `list` of [`str`], optional
21 | List of additional columns to query from the db and add to the
22 | output observations.
23 | Default None.
24 |
25 | Returns
26 | -------
27 | simdata : `np.ndarray`, (N)
28 | The OpSim data read from the database.
29 | """
30 | if "rotSkyPos" not in colmap:
31 | colmap["rotSkyPos"] = "rotSkyPos"
32 |
33 | # Set the minimum required columns.
34 | min_cols = [
35 | colmap["mjd"],
36 | colmap["night"],
37 | colmap["ra"],
38 | colmap["dec"],
39 | colmap["filter"],
40 | colmap["exptime"],
41 | colmap["seeingGeom"],
42 | colmap["fiveSigmaDepth"],
43 | ]
44 | if dbcols is not None:
45 | min_cols += dbcols
46 |
47 | more_cols = [
48 | colmap["rotSkyPos"],
49 | colmap["seeingEff"],
50 | "solarElong",
51 | "observationId",
52 | ]
53 |
54 | cols = min_cols + more_cols
55 | cols = list(set(cols))
56 | logging.info("Querying for columns:\n %s" % (cols))
57 |
58 | # Go ahead and query for all of the observations.
59 | simdata = get_sim_data(simfile, constraint, cols)
60 | logging.info("Queried data from opsim %s, fetched %d visits." % (simfile, len(simdata)))
61 | return simdata
62 |
--------------------------------------------------------------------------------
/rubin_sim/phot_utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .bandpass import *
2 | from .photometric_parameters import *
3 | from .physical_parameters import *
4 | from .predicted_zeropoints import *
5 | from .sed import *
6 | from .signaltonoise import *
7 | from .spectral_resampling import *
8 |
--------------------------------------------------------------------------------
/rubin_sim/phot_utils/physical_parameters.py:
--------------------------------------------------------------------------------
1 | __all__ = ("PhysicalParameters",)
2 |
3 |
4 | class PhysicalParameters:
5 | """
6 | Stores physical constants and other immutable parameters
7 | used by the sims_phot_utils code.
8 | """
9 |
10 | def __init__(self):
11 | self._lightspeed = 299792458.0 # speed of light, m/s
12 | self._planck = 6.626068e-27 # planck's constant, ergs*seconds
13 | self._nm2m = 1.00e-9 # nanometers to meters conversion m/nm
14 | self._ergsetc2jansky = 1.00e23 # erg/cm2/s/Hz to Jansky units (fnu)
15 |
16 | @property
17 | def lightspeed(self):
18 | """Speed of light in meters per second."""
19 | return self._lightspeed
20 |
21 | @lightspeed.setter
22 | def lightspeed(self, value):
23 | raise RuntimeError("Cannot change the value of lightspeed " + "(Einstein does not approve)")
24 |
25 | @property
26 | def nm2m(self):
27 | """Conversion factor to go from nm to m."""
28 | return self._nm2m
29 |
30 | @nm2m.setter
31 | def nm2m(self, value):
32 | raise RuntimeError("Cannot change the value of nm2m")
33 |
34 | @property
35 | def ergsetc2jansky(self):
36 | """Conversion factor to go from ergs/sec/cm^2 to Janskys."""
37 | return self._ergsetc2jansky
38 |
39 | @ergsetc2jansky.setter
40 | def ergsetc2jansky(self, value):
41 | raise RuntimeError("Cannot change the value of ergsetc2Jansky")
42 |
43 | @property
44 | def planck(self):
45 | """Planck's constant in ergs*seconds."""
46 | return self._planck
47 |
48 | @planck.setter
49 | def planck(self, value):
50 | raise RuntimeError("Cannot change the value of planck")
51 |
--------------------------------------------------------------------------------
/rubin_sim/phot_utils/sed_utils.py:
--------------------------------------------------------------------------------
1 | __all__ = ("get_imsim_flux_norm",)
2 |
3 | import numpy as np
4 |
5 | from .bandpass import Bandpass
6 |
7 |
8 | def get_imsim_flux_norm(sed, magmatch):
9 | """
10 | Calculate the flux normalization of an SED in the imsim bandpass.
11 |
12 | Parameters
13 | -----------
14 | sed is the SED to be normalized
15 |
16 | magmatch is the desired magnitude in the imsim bandpass
17 |
18 | Returns
19 | --------
20 | The factor by which the flux of sed needs to be multiplied to achieve
21 | the desired magnitude.
22 | """
23 |
24 | # This method works based on the assumption that the imsim bandpass
25 | # is a delta function. If that ever ceases to be true, the unit test
26 | # testSedUtils.py, which checks that the results of this method are
27 | # identical to calling Sed.calcFluxNorm and passing in the imsim bandpass,
28 | # will fail and we will know to modify this method.
29 |
30 | if not hasattr(get_imsim_flux_norm, "imsim_wavelen"):
31 | bp = Bandpass()
32 | bp.imsim_bandpass()
33 | non_zero_dex = np.where(bp.sb > 0.0)[0][0]
34 | get_imsim_flux_norm.imsim_wavelen = bp.wavelen[non_zero_dex]
35 |
36 | if sed.fnu is None:
37 | sed.flambda_tofnu()
38 |
39 | if (
40 | get_imsim_flux_norm.imsim_wavelen < sed.wavelen.min()
41 | or get_imsim_flux_norm.imsim_wavelen > sed.wavelen.max()
42 | ):
43 | raise RuntimeError(
44 | "Cannot normalize sed "
45 | "at wavelength of %e nm\n" % get_imsim_flux_norm.imsim_wavelen
46 | + "The SED does not cover that wavelength\n"
47 | + "(Covers %e < lambda %e)" % (sed.wavelen.min(), sed.wavelen.max())
48 | )
49 |
50 | mag = -2.5 * np.log10(np.interp(get_imsim_flux_norm.imsim_wavelen, sed.wavelen, sed.fnu)) - sed.zp
51 | dmag = magmatch - mag
52 | return np.power(10, (-0.4 * dmag))
53 |
--------------------------------------------------------------------------------
/rubin_sim/satellite_constellations/__init__.py:
--------------------------------------------------------------------------------
1 | from .basis_function import *
2 | from .model_observatory import *
3 | from .sat_utils import *
4 |
--------------------------------------------------------------------------------
/rubin_sim/satellite_constellations/basis_function.py:
--------------------------------------------------------------------------------
1 | __all__ = ("SatelliteAvoidBasisFunction",)
2 |
3 | import healpy as hp
4 | import numpy as np
5 | import rubin_scheduler.scheduler.basis_functions as bf
6 |
7 |
8 | class SatelliteAvoidBasisFunction(bf.BaseBasisFunction):
9 | """Uses satellite position information from the Conditions object
10 | and then avoids streaks.
11 |
12 | Parameters
13 | ----------
14 | forecast_time : `float`
15 | The time ahead to forecast satellite streaks (minutes).
16 | smooth_fwhm : `float`
17 | The smoothing full width half max to use (degrees)
18 | """
19 |
20 | def __init__(self, nside=32, forecast_time=90.0, smooth_fwhm=3.5):
21 | super().__init__(nside=nside)
22 | self.forecast_time = forecast_time / 60.0 / 24 # To days
23 | self.smooth_fwhm = np.radians(smooth_fwhm)
24 |
25 | def _calc_value(self, conditions, indx=None):
26 | result = 0
27 | # find the indices that are relevant
28 | indx_min = np.min(np.searchsorted(conditions.satellite_mjds, conditions.mjd))
29 | indx_max = np.max(np.searchsorted(conditions.satellite_mjds, conditions.mjd + self.forecast_time))
30 |
31 | if indx_max > indx_min:
32 | result = np.sum(conditions.satellite_maps[indx_min:indx_max], axis=0)
33 | result = hp.smoothing(result, fwhm=self.smooth_fwhm)
34 | result = hp.ud_grade(result, self.nside)
35 | result[np.where(result < 0)] = 0
36 | # Make it negative, so positive weights will result
37 | # in avoiding satellites
38 | result *= -1
39 |
40 | return result
41 |
--------------------------------------------------------------------------------
/rubin_sim/selfcal/__init__.py:
--------------------------------------------------------------------------------
1 | from .generate_catalog import *
2 | from .offsets import *
3 | from .solver import *
4 | from .star_tools import *
5 |
--------------------------------------------------------------------------------
/rubin_sim/selfcal/star_tools.py:
--------------------------------------------------------------------------------
1 | __all__ = ("stars_project", "assign_patches")
2 |
3 | import numpy as np
4 | from rubin_scheduler.utils import gnomonic_project_toxy
5 |
6 |
7 | def stars_project(stars, visit):
8 | """
9 | Project the stars to x,y plane for a given visit.
10 | """
11 | xtemp, ytemp = gnomonic_project_toxy(
12 | np.radians(stars["ra"]),
13 | np.radians(stars["decl"]),
14 | np.radians(visit["ra"]),
15 | np.radians(visit["dec"]),
16 | )
17 | # Rotate the field using the visit rotSkyPos.
18 | # Hope I got that sign right...
19 | sin_rot = np.sin(np.radians(visit["rotSkyPos"]))
20 | cos_rot = np.cos(np.radians(visit["rotSkyPos"]))
21 | stars["x"] = cos_rot * xtemp + sin_rot * ytemp
22 | stars["y"] = -1.0 * sin_rot * xtemp + cos_rot * ytemp
23 |
24 | stars["radius"] = (stars["x"] ** 2 + stars["y"] ** 2) ** 0.5
25 | return stars
26 |
27 |
28 | def assign_patches(stars, visit, n_patches=16, radius_fov=1.8):
29 | """
30 | Assign PatchIDs to everything.
31 | Assume that stars have already been projected to x,y
32 | """
33 | maxx, maxy = gnomonic_project_toxy(0.0, np.radians(radius_fov), 0.0, 0.0)
34 | nsides = n_patches**0.5
35 |
36 | # This should move all coords to 0 < x < nsides-1
37 | px = np.floor((stars["x"] + maxy) / (2.0 * maxy) * nsides)
38 | py = np.floor((stars["y"] + maxy) / (2.0 * maxy) * nsides)
39 |
40 | stars["sub_patch"] = px + py * nsides
41 | stars["patch_id"] = stars["sub_patch"] + visit["observationId"] * n_patches
42 | return stars
43 |
--------------------------------------------------------------------------------
/rubin_sim/sim_archive/__init__.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import logging
3 |
4 | from .make_snapshot import *
5 |
6 | HAVE_LSST_RESOURCES = importlib.util.find_spec("lsst") and importlib.util.find_spec("lsst.resources")
7 | if HAVE_LSST_RESOURCES:
8 | from .prenight import *
9 | from .sim_archive import *
10 | else:
11 | logging.error("rubin_sim.sim_archive requires lsst.resources.")
12 |
--------------------------------------------------------------------------------
/rubin_sim/skybrightness/__init__.py:
--------------------------------------------------------------------------------
1 | from .allsky_db import *
2 | from .interp_components import *
3 | from .sky_model import *
4 | from .twilight_func import *
5 | from .utils import *
6 |
--------------------------------------------------------------------------------
/rubin_sim/skybrightness/data/solarSpec/package.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import lsst.sims.photUtils.Sed as Sed
4 | import numpy as np
5 |
6 | dataDir = os.getenv("SIMS_SKYBRIGHTNESS_DATA_DIR")
7 |
8 | data = np.genfromtxt(
9 | os.path.join(dataDir, "solarSpec/solarSpec.dat"),
10 | dtype=list(zip(["microns", "Irr"], [float] * 2)),
11 | )
12 | # #convert W/m2/micron to erg/s/cm2/nm (HA, it's the same!)
13 | # data['Irr'] = data['Irr']*1
14 |
15 | sun = Sed()
16 | sun.setSED(data["microns"] * 1e3, flambda=data["Irr"])
17 |
18 | # Match the wavelength spacing and range to the ESO spectra
19 | airglowSpec = np.load(os.path.join(dataDir, "ESO_Spectra/Airglow/airglowSpectra.npz"))
20 | sun.resampleSED(wavelen_match=airglowSpec["wave"])
21 |
22 | np.savez(os.path.join(dataDir, "solarSpec/solarSpec.npz"), wave=sun.wavelen, spec=sun.flambda)
23 |
--------------------------------------------------------------------------------
/rubin_sim/skybrightness/twilight_func.py:
--------------------------------------------------------------------------------
1 | __all__ = ("twilight_func", "zenith_twilight", "simple_twi")
2 |
3 | import numpy as np
4 |
5 |
6 | def simple_twi(xdata, *args):
7 | """
8 | Fit a simple slope and constant to many healpixels
9 |
10 | xdata should have keys:
11 | sunAlt
12 | hpid
13 |
14 | args:
15 | 0: slope
16 | 1:hpid: magnitudes
17 | hpid+1:2*hpid: constant offsets
18 | """
19 |
20 | args = np.array(args)
21 | hpmax = np.max(xdata["hpid"])
22 | result = args[xdata["hpid"] + 1] * np.exp(xdata["sunAlt"] * args[0]) + args[xdata["hpid"] + 2 + hpmax]
23 | return result
24 |
25 |
26 | def twilight_func(xdata, *args, amCut=1.0):
27 | """
28 | xdata: numpy array with columns 'alt', 'az', 'sunAlt' all in radians.
29 | az should be relative to the sun (i.e., sun is at az zero.
30 |
31 | based on what I've seen, here's my guess for how to fit the twilight:
32 | args[0] = ratio of (zenith twilight flux at sun_alt = -12) and dark sky
33 | zenith flux
34 | args[1] = decay slope for all pixels (mags/radian)
35 | args[2] = airmass term for hemisphere away from the sun.
36 | (factor to multiply max brightness at zenith by)
37 | args[3] = az term for hemisphere towards sun
38 | args[4] = zenith dark sky flux
39 | args[5:] = zenith dark sky times constant (optional)
40 |
41 | amCut : float (1.0)
42 | The airmass cut to apply to use only the away from sun fit.
43 | Was set to 1.1 previously for not very clear reasons.
44 |
45 | """
46 |
47 | args = np.array(args)
48 | az = xdata["azRelSun"]
49 | airmass = xdata["airmass"]
50 | sun_alt = xdata["sunAlt"]
51 | flux = np.zeros(az.size, dtype=float)
52 | away = np.where((airmass <= amCut) | ((az >= np.pi / 2) & (az <= 3.0 * np.pi / 2)))
53 | towards = np.where((airmass > amCut) & ((az < np.pi / 2) | (az > 3.0 * np.pi / 2)))
54 |
55 | flux = args[0] * args[4] * 10.0 ** (args[1] * (sun_alt + np.radians(12.0)) + args[2] * (airmass - 1.0))
56 | flux[towards] *= 10.0 ** (args[3] * np.cos(az[towards]) * (airmass[towards] - 1.0))
57 |
58 | # This let's one fit the dark sky background simultaneously.
59 | # It assumes the dark sky is a function of airmass only.
60 | # Forced to be args[4] at zenith.
61 | if np.size(args) >= 6:
62 | flux[away] += args[4] * np.exp(args[5:][xdata["hpid"][away]] * (airmass[away] - 1.0))
63 | flux[towards] += args[4] * np.exp(args[5:][xdata["hpid"][towards]] * (airmass[towards] - 1.0))
64 |
65 | return flux
66 |
67 |
68 | def zenith_twilight(alpha, *args):
69 | """
70 | The flux at zenith as a linear combination of a twilight component
71 | and a constant:
72 | alpha = sun altitude (radians)
73 | args[0] = ratio of (zenith twilight flux at sunAlt = -12) and
74 | dark sky zenith flux
75 | args[1] = decay slope for all pixels (mags/radian)
76 | args[2] = airmass term for hemisphere away from the sun.
77 | (factor to multiply max brightness at zenith by)
78 | args[3] = az term for hemisphere towards sun
79 | args[4] = zenith dark sky flux
80 | """
81 |
82 | flux = args[0] * args[4] * 10.0 ** (args[1] * (alpha + np.radians(12.0))) + args[4]
83 | return flux
84 |
--------------------------------------------------------------------------------
/rubin_sim/skybrightness/utils.py:
--------------------------------------------------------------------------------
1 | __all__ = ("wrap_ra", "robust_rms", "recalc_mags")
2 |
3 | import glob
4 | import os
5 |
6 | import numpy as np
7 | from rubin_scheduler.data import get_data_dir
8 |
9 | from rubin_sim.phot_utils import Bandpass, Sed
10 |
11 |
12 | def wrap_ra(ra):
13 | """
14 | Wrap only RA values into 0-2pi (using mod).
15 | """
16 | ra = ra % (2.0 * np.pi)
17 | return ra
18 |
19 |
20 | def robust_rms(array, missing=0.0):
21 | """
22 | Use the interquartile range to compute a robust approximation of the RMS.
23 | if passed an array smaller than 2 elements, return missing value
24 | """
25 | if np.size(array) < 2:
26 | rms = missing
27 | else:
28 | iqr = np.percentile(array, 75) - np.percentile(array, 25)
29 | rms = iqr / 1.349 # approximation
30 | return rms
31 |
32 |
33 | def spec2mags(spectra_list, wave):
34 | """Convert sky spectra to magnitudes"""
35 | # Load LSST filters
36 | through_path = os.path.join(get_data_dir(), "throughputs/baseline")
37 | keys = ["u", "g", "r", "i", "z", "y"]
38 |
39 | dtype = [("mags", "float", (6))]
40 | result = np.zeros(len(spectra_list), dtype=dtype)
41 |
42 | filters = {}
43 | for filtername in keys:
44 | bp = np.loadtxt(
45 | os.path.join(through_path, "total_" + filtername + ".dat"),
46 | dtype=list(zip(["wave", "trans"], [float] * 2)),
47 | )
48 | temp_b = Bandpass()
49 | temp_b.set_bandpass(bp["wave"], bp["trans"])
50 | filters[filtername] = temp_b
51 |
52 | filterwave = np.array([filters[f].calc_eff_wavelen()[0] for f in keys])
53 |
54 | for i, spectrum in enumerate(spectra_list):
55 | tempSed = Sed()
56 | tempSed.set_sed(wave, flambda=spectrum)
57 | for j, filtName in enumerate(keys):
58 | try:
59 | result["mags"][i][j] = tempSed.calc_mag(filters[filtName])
60 | except ValueError:
61 | pass
62 | return result, filterwave
63 |
64 |
65 | def recalc_mags(data_dir=None):
66 | """Recalculate the magnitudes for sky brightness components.
67 |
68 | DANGER: Overwrites data files in place. The rubin_sim_data/skybrightness
69 | folder will need to be packaged and updated after running this to propagate
70 | changes to other users.
71 | """
72 | dirs = ["Airglow", "MergedSpec", "ScatteredStarLight", "Zodiacal", "LowerAtm", "Moon", "UpperAtm"]
73 |
74 | if data_dir is None:
75 | data_dir = get_data_dir()
76 |
77 | full_paths = [os.path.join(data_dir, "skybrightness/ESO_Spectra", dirname) for dirname in dirs]
78 | for path in full_paths:
79 | files = glob.glob(os.path.join(path, "*.npz"))
80 | for filename in files:
81 | data = np.load(filename)
82 |
83 | spec = data["spec"].copy()
84 | wave = data["wave"].copy()
85 | data.close()
86 | new_mags, filterwave = spec2mags(spec["spectra"], wave)
87 | spec["mags"] = new_mags["mags"]
88 |
89 | np.savez(filename, wave=wave, spec=spec, filterWave=filterwave)
90 |
91 | pass
92 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools_scm
2 | from setuptools import setup
3 |
4 | setup(version=setuptools_scm.get_version())
5 |
--------------------------------------------------------------------------------
/showmaf-deploy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: v1
3 | kind: Namespace
4 | metadata:
5 | name: maf
6 |
7 | ---
8 | apiVersion: apps/v1
9 | kind: Deployment
10 | metadata:
11 | namespace: maf
12 | name: maf-server
13 | labels:
14 | app: maf-server
15 | spec:
16 | replicas: 1
17 | selector:
18 | matchLabels:
19 | app: maf-server
20 | template:
21 | metadata:
22 | labels:
23 | app: maf-server
24 | spec:
25 | containers:
26 | - name: maf
27 | image: "ghcr.io/lsst/rubin_sim:main"
28 | imagePullPolicy: Always
29 | resources:
30 | limits:
31 | cpu: 1
32 | memory: "10Gi"
33 | requests:
34 | cpu: 500m
35 | memory: "8Gi"
36 | volumeMounts:
37 | - mountPath: /data/fbs_sims
38 | name: sdf-data-rubin
39 | subPath: shared/fbs_sims
40 | volumes:
41 | - name: sdf-data-rubin
42 | persistentVolumeClaim:
43 | claimName: sdf-data-rubin
44 | ---
45 | apiVersion: v1
46 | kind: PersistentVolumeClaim
47 | metadata:
48 | namespace: maf
49 | name: sdf-data-rubin
50 | spec:
51 | storageClassName: sdf-data-rubin
52 | accessModes:
53 | - ReadOnlyMany
54 | resources:
55 | requests:
56 | storage: 8Gi
57 | ---
58 | apiVersion: v1
59 | kind: Service
60 | metadata:
61 | namespace: maf
62 | name: usdf-maf
63 | labels:
64 | app: maf-server
65 | annotations:
66 | metallb.universe.tf/address-pool: sdf-services
67 | spec:
68 | type: LoadBalancer
69 | ports:
70 | - name: http
71 | port: 80
72 | protocol: TCP
73 | targetPort: 8080
74 | selector:
75 | app: maf-server
76 | ---
77 | apiVersion: networking.k8s.io/v1
78 | kind: Ingress
79 | metadata:
80 | name: maf-ingress
81 | namespace: maf
82 | labels:
83 | app: maf-server
84 | spec:
85 | ingressClassName: nginx
86 | rules:
87 | - host: usdf-maf.slac.stanford.edu
88 | http:
89 | paths:
90 | - backend:
91 | service:
92 | name: usdf-maf
93 | port:
94 | number: 80
95 | path: /
96 | pathType: Prefix
--------------------------------------------------------------------------------
/test-requirements.txt:
--------------------------------------------------------------------------------
1 | pytest
2 | pytest-cov
3 | black>=25.0.0
4 | ruff
5 | isort
6 |
7 |
--------------------------------------------------------------------------------
/tests/data/test_data.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 |
4 | from rubin_scheduler.data import data_versions, get_data_dir
5 |
6 | from rubin_sim.data import get_baseline
7 | from rubin_sim.data import get_data_dir as gdd
8 |
9 |
10 | class DataTest(unittest.TestCase):
11 | def testBaseline(self):
12 | """
13 | Get the baseline sim location
14 | """
15 | data_dir = get_data_dir()
16 | dd2 = gdd()
17 |
18 | assert data_dir == dd2
19 |
20 | if "sim_baseline" in os.listdir(data_dir):
21 | _ = get_baseline()
22 | _ = data_versions()
23 |
24 |
25 | if __name__ == "__main__":
26 | unittest.main()
27 |
--------------------------------------------------------------------------------
/tests/data/test_ddf_grid.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from rubin_scheduler.scheduler.surveys import generate_ddf_grid
4 |
5 |
6 | class GenerateDDFTest(unittest.TestCase):
7 | def testGenDDF(self):
8 | """
9 | Test the DDF grid generator over in rubin_scheduler
10 | """
11 | # This triggers several RunTimeErrors (intentionally?).
12 | result = generate_ddf_grid(survey_length=0.01, verbose=False)
13 | assert result is not None
14 |
15 |
16 | if __name__ == "__main__":
17 | unittest.main()
18 |
--------------------------------------------------------------------------------
/tests/maf/test_3x2fom.py:
--------------------------------------------------------------------------------
1 | import os
2 | import shutil
3 | import tempfile
4 | import unittest
5 |
6 | from rubin_scheduler.data import get_data_dir
7 | from rubin_scheduler.utils.code_utilities import sims_clean_up
8 |
9 | import rubin_sim.maf as maf
10 |
11 | TEST_DB = "example_v3.4_0yrs.db"
12 |
13 |
14 | class Test3x2(unittest.TestCase):
15 | @classmethod
16 | def tearDownClass(cls):
17 | sims_clean_up()
18 |
19 | def setUp(self):
20 | self.out_dir = tempfile.mkdtemp(prefix="TMB")
21 |
22 | @unittest.skipUnless(
23 | os.path.isdir(os.path.join(get_data_dir(), "maps")),
24 | "Skipping 3x3 metric test because no dust maps.",
25 | )
26 | def test_3x2(self):
27 | # Only testing that the metric successfully runs, not checking that
28 | # the output values are valid.
29 | bundle_list = []
30 | nside = 64
31 | colmap = maf.batches.col_map_dict("fbs")
32 | nfilters_needed = 6
33 | lim_ebv = 0.2
34 | ptsrc_lim_mag_i_band = 25.9
35 | m = maf.metrics.ExgalM5WithCuts(
36 | m5_col=colmap["fiveSigmaDepth"],
37 | filter_col=colmap["filter"],
38 | lsst_filter="i",
39 | n_filters=nfilters_needed,
40 | extinction_cut=lim_ebv,
41 | depth_cut=ptsrc_lim_mag_i_band,
42 | )
43 | s = maf.slicers.HealpixSlicer(nside=nside, use_cache=False)
44 | sql = "scheduler_note not like 'DD%' and night < 365"
45 | threeby_two_summary_simple = maf.metrics.StaticProbesFoMEmulatorMetricSimple(
46 | nside=nside, metric_name="3x2ptFoM_simple"
47 | )
48 | threeby_two_summary = maf.maf_contrib.StaticProbesFoMEmulatorMetric(
49 | nside=nside, metric_name="3x2ptFoM"
50 | )
51 | bundle_list.append(
52 | maf.metric_bundles.MetricBundle(
53 | m,
54 | s,
55 | sql,
56 | summary_metrics=[threeby_two_summary, threeby_two_summary_simple],
57 | )
58 | )
59 |
60 | database = os.path.join(get_data_dir(), "tests", TEST_DB)
61 | results_db = maf.db.ResultsDb(out_dir=self.out_dir)
62 | bd = maf.metric_bundles.make_bundles_dict_from_list(bundle_list)
63 | bg = maf.metric_bundles.MetricBundleGroup(bd, database, out_dir=self.out_dir, results_db=results_db)
64 | bg.run_all()
65 |
66 | def tearDown(self):
67 | if os.path.isdir(self.out_dir):
68 | shutil.rmtree(self.out_dir)
69 |
70 |
71 | if __name__ == "__main__":
72 | unittest.main()
73 |
--------------------------------------------------------------------------------
/tests/maf/test_batchcommon.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import rubin_sim.maf.batches as batches
4 |
5 |
6 | class TestCommon(unittest.TestCase):
7 | def test_col_map(self):
8 | colmap = batches.col_map_dict("opsimv4")
9 | self.assertEqual(colmap["raDecDeg"], True)
10 | self.assertEqual(colmap["ra"], "fieldRA")
11 | colmap = batches.col_map_dict("fbs")
12 | self.assertEqual(colmap["raDecDeg"], True)
13 | self.assertEqual(colmap["skyBrightness"], "skyBrightness")
14 |
15 | def test_filter_list(self):
16 | filterlist, colors, orders, sqls, info_label = batches.common.filter_list(all=False, extra_sql=None)
17 | self.assertEqual(len(filterlist), 6)
18 | self.assertEqual(len(colors), 6)
19 | self.assertEqual(sqls["u"], "filter = 'u'")
20 | filterlist, colors, orders, sqls, info_label = batches.common.filter_list(all=True, extra_sql=None)
21 | self.assertIn("all", filterlist)
22 | self.assertEqual(sqls["all"], "")
23 | filterlist, colors, orders, sqls, info_label = batches.common.filter_list(
24 | all=True, extra_sql="night=3"
25 | )
26 | self.assertEqual(sqls["all"], "night=3")
27 | self.assertEqual(sqls["u"], "(night=3) and (filter = 'u')")
28 | self.assertEqual(info_label["u"], "night=3 u band")
29 | filterlist, colors, orders, sqls, info_label = batches.common.filter_list(
30 | all=True, extra_sql="night=3", extra_info_label="night 3"
31 | )
32 | self.assertEqual(info_label["u"], "night 3 u band")
33 |
34 |
35 | if __name__ == "__main__":
36 | unittest.main()
37 |
--------------------------------------------------------------------------------
/tests/maf/test_color_slopes.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 |
5 | import rubin_sim.maf.metrics as metrics
6 |
7 |
8 | class TestSimpleMetrics(unittest.TestCase):
9 | def test_color_slope(self):
10 | names = ["night", "observationStartMJD", "filter", "fiveSigmaDepth"]
11 | types = [int, float, " 0]
52 | self.assertTrue(
53 | np.allclose(
54 | np.array(SIMDATA["observationStartMJD"]),
55 | slice_points["mjd"][nonempty_slices],
56 | atol=self.interval_seconds / (24 * 60 * 60.0),
57 | )
58 | )
59 | self.assertTrue(np.all(slice_points["duration"] == self.interval_seconds))
60 |
61 |
62 | class TestBlockIntervalSlicer(unittest.TestCase):
63 | def setUp(self):
64 | self.slicer = BlockIntervalSlicer()
65 |
66 | def test_setup_slicer(self):
67 | self.slicer.setup_slicer(SIMDATA)
68 | self.assertEqual(self.slicer.nslice, 4)
69 | slice_points = self.slicer.get_slice_points()
70 | sim_idxs = self.slicer.sim_idxs
71 | visits = pd.DataFrame(SIMDATA)
72 | for sid in slice_points["sid"]:
73 | these_visits = visits.iloc[sim_idxs[sid]]
74 | self.assertTrue(
75 | np.all(these_visits["scheduler_note"] == these_visits["scheduler_note"].values[0])
76 | )
77 |
78 |
79 | class TestVisitIntervalSlicer(unittest.TestCase):
80 | def setUp(self):
81 | self.slicer = VisitIntervalSlicer()
82 |
83 | def test_setup_slicer(self):
84 | self.slicer.setup_slicer(SIMDATA)
85 | self.assertEqual(self.slicer.nslice, len(SIMDATA["observationStartMJD"]))
86 | slice_points = self.slicer.get_slice_points()
87 | self.assertIn("sid", slice_points)
88 | self.assertIn("mjd", slice_points)
89 | self.assertIn("duration", slice_points)
90 | self.assertTrue(np.all(slice_points["duration"] == SIMDATA["visitTime"]))
91 |
92 |
93 | # internal functions & classes
94 |
95 | run_tests_now = __name__ == "__main__"
96 | if run_tests_now:
97 | unittest.main()
98 |
--------------------------------------------------------------------------------
/tests/moving_objects/test_camera.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 |
5 | from rubin_sim.moving_objects import BaseObs
6 |
7 |
8 | @unittest.skip("Temporary skip until ephemerides replaced")
9 | class TestCamera(unittest.TestCase):
10 | def setUp(self):
11 | obj_ra = np.array([10.0, 12.1], float)
12 | obj_dec = np.array([-30.0, -30.0], float)
13 | obj_mjd = np.array([59580.16, 59580.16], float)
14 | self.ephems = np.array(
15 | list(zip(obj_ra, obj_dec, obj_mjd)),
16 | dtype=([("ra", float), ("dec", float), ("mjd", float)]),
17 | )
18 | obs_ra = np.array([10.0, 10.0], float)
19 | obs_dec = np.array([-30.0, -30.0], float)
20 | obs_mjd = np.array([59580.16, 59580.16], float)
21 | obs_rot_sky_pos = np.zeros(2)
22 | self.obs = np.array(
23 | list(zip(obs_ra, obs_dec, obs_rot_sky_pos, obs_mjd)),
24 | dtype=([("ra", float), ("dec", float), ("rotSkyPos", float), ("mjd", float)]),
25 | )
26 |
27 | def test_camera_fov(self):
28 | obs = BaseObs(
29 | obs_ra="ra",
30 | obs_dec="dec",
31 | obs_time_col="mjd",
32 | footprint="camera",
33 | )
34 | idx_obs = obs.sso_in_camera_fov(self.ephems, self.obs)
35 | self.assertEqual(idx_obs, [0])
36 |
37 |
38 | if __name__ == "__main__":
39 | unittest.main()
40 |
--------------------------------------------------------------------------------
/tests/moving_objects/test_chebyshevutils.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 |
5 | from rubin_sim.moving_objects import chebeval, chebfit, make_cheb_matrix
6 |
7 |
8 | class TestChebgrid(unittest.TestCase):
9 | def test_raise_error(self):
10 | x = np.linspace(-1, 1, 9)
11 | y = np.sin(x)
12 | dy = np.cos(x)
13 | p, resid, rms, maxresid = chebfit(x, y, dy, n_poly=4)
14 | with self.assertRaises(RuntimeError):
15 | chebeval(np.linspace(-1, 1, 17), p, interval=[1, 2, 3])
16 |
17 | def test_eval(self):
18 | x = np.linspace(-1, 1, 9)
19 | y = np.sin(x)
20 | dy = np.cos(x)
21 | p, resid, rms, maxresid = chebfit(x, y, dy, n_poly=4)
22 | yy_w_vel, vv = chebeval(np.linspace(-1, 1, 17), p)
23 | yy_wout_vel, vv = chebeval(np.linspace(-1, 1, 17), p, do_velocity=False)
24 | self.assertTrue(np.allclose(yy_wout_vel, yy_w_vel))
25 | # Test that we get a nan for a value outside the range of the
26 | # 'interval', if mask=True
27 | yy_w_vel, vv = chebeval(np.linspace(-2, 1, 17), p, mask=True)
28 | self.assertTrue(
29 | np.isnan(yy_w_vel[0]),
30 | msg="Expected NaN for masked/out of range value, but got %.2e" % (yy_w_vel[0]),
31 | )
32 |
33 | def test_ends_locked(self):
34 | x = np.linspace(-1, 1, 9)
35 | y = np.sin(x)
36 | dy = np.cos(x)
37 | for polynomial in range(4, 10):
38 | p, resid, rms, maxresid = chebfit(x, y, dy, n_poly=4)
39 | yy, vv = chebeval(np.linspace(-1, 1, 17), p)
40 | self.assertAlmostEqual(yy[0], y[0], places=13)
41 | self.assertAlmostEqual(yy[-1], y[-1], places=13)
42 | self.assertAlmostEqual(vv[0], dy[0], places=13)
43 | self.assertAlmostEqual(vv[-1], dy[-1], places=13)
44 |
45 | def test_accuracy(self):
46 | """If n_poly is greater than number of values being fit,
47 | then fit should be exact."""
48 | x = np.linspace(0, np.pi, 9)
49 | y = np.sin(x)
50 | dy = np.cos(x)
51 | p, resid, rms, maxresid = chebfit(x, y, dy, n_poly=16)
52 | yy, vv = chebeval(x, p, interval=np.array([0, np.pi]))
53 | self.assertTrue(np.allclose(yy, y, rtol=1e-13))
54 | self.assertTrue(np.allclose(vv, dy, rtol=1e-13))
55 | self.assertLess(np.sum(resid), 1e-13)
56 |
57 | def test_accuracy_prefit_c1c2(self):
58 | """If n_poly is greater than number of values being fit,
59 | then fit should be exact."""
60 | NPOINTS = 8
61 | NPOLY = 16
62 | x = np.linspace(0, np.pi, NPOINTS + 1)
63 | y = np.sin(x)
64 | dy = np.cos(x)
65 | xmatrix, dxmatrix = make_cheb_matrix(NPOINTS + 1, NPOLY)
66 | p, resid, rms, maxresid = chebfit(
67 | x, y, dy, x_multiplier=xmatrix, dx_multiplier=dxmatrix, n_poly=NPOLY
68 | )
69 | yy, vv = chebeval(x, p, interval=np.array([0, np.pi]))
70 | self.assertTrue(np.allclose(yy, y, rtol=1e-13))
71 | self.assertTrue(np.allclose(vv, dy, rtol=1e-13))
72 | self.assertLess(np.sum(resid), 1e-13)
73 |
74 |
75 | if __name__ == "__main__":
76 | unittest.main()
77 |
--------------------------------------------------------------------------------
/tests/phot_utils/test_photometry.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 |
4 | import numpy as np
5 | from rubin_scheduler.data import get_data_dir
6 | from rubin_scheduler.utils.code_utilities import sims_clean_up
7 |
8 | from rubin_sim.phot_utils.bandpass import Bandpass
9 | from rubin_sim.phot_utils.sed import Sed
10 |
11 |
12 | class PhotometryUnitTest(unittest.TestCase):
13 | @classmethod
14 | def tearDown_class(cls):
15 | sims_clean_up()
16 |
17 | def test_alternate_bandpasses_stars(self):
18 | """Test our ability to do photometry using non-LSST bandpasses.
19 |
20 | Calculate the photometry by built-in methods and 'by hand'.
21 | """
22 | bandpass_dir = os.path.join(get_data_dir(), "tests", "cartoonSedTestData")
23 |
24 | test_band_passes = {}
25 | keys = ["u", "g", "r", "i", "z"]
26 |
27 | bplist = []
28 |
29 | for kk in keys:
30 | test_band_passes[kk] = Bandpass()
31 | test_band_passes[kk].read_throughput(os.path.join(bandpass_dir, "test_bandpass_%s.dat" % kk))
32 | bplist.append(test_band_passes[kk])
33 |
34 | sed_obj = Sed()
35 | phi_array, wave_len_step = sed_obj.setup_phi_array(bplist)
36 |
37 | sed_file_name = os.path.join(get_data_dir(), "tests", "cartoonSedTestData/starSed/")
38 | sed_file_name = os.path.join(sed_file_name, "kurucz", "km20_5750.fits_g40_5790.gz")
39 | ss = Sed()
40 | ss.read_sed_flambda(sed_file_name)
41 |
42 | control_bandpass = Bandpass()
43 | control_bandpass.imsim_bandpass()
44 | ff = ss.calc_flux_norm(22.0, control_bandpass)
45 | ss.multiply_flux_norm(ff)
46 |
47 | test_mags = []
48 | for kk in keys:
49 | test_mags.append(ss.calc_mag(test_band_passes[kk]))
50 |
51 | ss.resample_sed(wavelen_match=bplist[0].wavelen)
52 | ss.flambda_tofnu()
53 | mags = -2.5 * np.log10(np.sum(phi_array * ss.fnu, axis=1) * wave_len_step) - ss.zp
54 | self.assertEqual(len(mags), len(test_mags))
55 | self.assertGreater(len(mags), 0)
56 | for j in range(len(mags)):
57 | self.assertAlmostEqual(mags[j], test_mags[j], 3)
58 |
59 |
60 | if __name__ == "__main__":
61 | unittest.main()
62 |
--------------------------------------------------------------------------------
/tests/phot_utils/test_predicted_zeropoints.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 | from rubin_scheduler.utils import SysEngVals
5 |
6 | from rubin_sim.phot_utils import (
7 | predicted_zeropoint,
8 | predicted_zeropoint_e2v,
9 | predicted_zeropoint_hardware,
10 | predicted_zeropoint_hardware_e2v,
11 | predicted_zeropoint_hardware_itl,
12 | predicted_zeropoint_itl,
13 | )
14 |
15 |
16 | class PredictedZeropointsTst(unittest.TestCase):
17 | def test_predicted_zeropoints(self):
18 | bands = ["u", "g", "r", "i", "z", "y"]
19 | sev = SysEngVals()
20 | for b in bands:
21 | zp = predicted_zeropoint(band=b, airmass=1.0, exptime=1)
22 | self.assertAlmostEqual(zp, sev.zp_t[b], delta=0.005)
23 | zp_hardware = predicted_zeropoint_hardware(b, exptime=1)
24 | self.assertTrue(zp < zp_hardware)
25 | # Check the vendors
26 | zp_v = predicted_zeropoint_itl(band=b, airmass=1.0, exptime=1)
27 | self.assertAlmostEqual(zp, zp_v, delta=0.1)
28 | zp_v = predicted_zeropoint_e2v(band=b, airmass=1.0, exptime=1)
29 | self.assertAlmostEqual(zp, zp_v, delta=0.1)
30 | zp_v = predicted_zeropoint_hardware_itl(band=b, exptime=1)
31 | self.assertAlmostEqual(zp_hardware, zp_v, delta=0.1)
32 | zp_v = predicted_zeropoint_hardware_e2v(band=b, exptime=1)
33 | self.assertAlmostEqual(zp_hardware, zp_v, delta=0.1)
34 | # Check some of the scaling
35 | zp_test = predicted_zeropoint(band=b, airmass=1.5, exptime=1)
36 | self.assertTrue(zp > zp_test)
37 | zp_test = predicted_zeropoint(band=b, airmass=1.0, exptime=30)
38 | self.assertAlmostEqual(zp, zp_test - 2.5 * np.log10(30), places=7)
39 |
40 | funcs = [predicted_zeropoint, predicted_zeropoint_itl, predicted_zeropoint_e2v]
41 | for zpfunc in funcs:
42 | zp = []
43 | for x in np.arange(1.0, 2.5, 0.1):
44 | for exptime in np.arange(1.0, 130, 30):
45 | zp.append(zpfunc(b, x, exptime))
46 | zp = np.array(zp)
47 | self.assertTrue(zp.max() - zp.min() < 6)
48 | self.assertTrue(zp.max() < 35)
49 | self.assertTrue(zp.min() > 25)
50 |
51 |
52 | if __name__ == "__main__":
53 | unittest.main()
54 |
--------------------------------------------------------------------------------
/tests/phot_utils/test_read_bandpasses.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 |
4 | from rubin_scheduler.data import get_data_dir
5 |
6 | from rubin_sim.phot_utils import Bandpass
7 |
8 |
9 | class ReadBandPassTest(unittest.TestCase):
10 | """
11 | Tests for reading in bandpasses
12 | """
13 |
14 | def test_read(self):
15 | """
16 | Check that we can read things stored in the throughputs directory.
17 | """
18 | throughputs_dir = os.path.join(get_data_dir(), "throughputs")
19 |
20 | # select files to try and read
21 | files = [
22 | "2MASS/2MASS_Ks.dat",
23 | "WISE/WISE_w1.dat",
24 | "johnson/johnson_U.dat",
25 | "sdss/sdss_r.dat",
26 | ]
27 | for filename in files:
28 | bp = Bandpass()
29 | bp.read_throughput(os.path.join(throughputs_dir, filename))
30 |
--------------------------------------------------------------------------------
/tests/satellite_constellations/test_satellites.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | import numpy as np
4 | from rubin_scheduler.utils import SURVEY_START_MJD
5 |
6 | from rubin_sim.satellite_constellations import Constellation, oneweb_tles, starlink_tles_v1, starlink_tles_v2
7 |
8 |
9 | class TestSatellites(unittest.TestCase):
10 | def test_constellations(self):
11 | """Test stellite constellations"""
12 |
13 | mjd0 = SURVEY_START_MJD
14 | sv1 = starlink_tles_v1()
15 | sv2 = starlink_tles_v2()
16 | onw = oneweb_tles()
17 |
18 | assert sv1 is not None
19 | assert sv2 is not None
20 | assert onw is not None
21 |
22 | const = Constellation(sv1)
23 |
24 | lengths, n_s = const.check_pointings(
25 | np.array([85.0, 82.0]),
26 | np.array([0.0, 0.0]),
27 | np.arange(2) + mjd0 + 1.5,
28 | 30.0,
29 | )
30 |
31 | assert np.size(lengths) == 2
32 | assert np.size(n_s) == 2
33 |
34 |
35 | if __name__ == "__main__":
36 | unittest.main()
37 |
--------------------------------------------------------------------------------
/tests/sim_archive/test_make_snapshot.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import unittest
3 |
4 | from rubin_scheduler.scheduler.schedulers.core_scheduler import CoreScheduler
5 |
6 | if importlib.util.find_spec("lsst"):
7 | HAVE_TS = importlib.util.find_spec("lsst.ts")
8 | else:
9 | HAVE_TS = False
10 |
11 | if HAVE_TS:
12 | from rubin_sim.sim_archive import get_scheduler_instance_from_repo
13 |
14 |
15 | class TestMakeSnapshot(unittest.TestCase):
16 | @unittest.skip("Skipping because test depends on external repo.")
17 | @unittest.skipIf(not HAVE_TS, "No lsst.ts")
18 | def test_get_scheduler_instance_photcal(self):
19 | scheduler = get_scheduler_instance_from_repo(
20 | config_repo="https://github.com/lsst-ts/ts_config_ocs.git",
21 | config_script="Scheduler/feature_scheduler/auxtel/fbs_config_image_photocal_survey.py",
22 | config_branch="main",
23 | )
24 | self.assertIsInstance(scheduler, CoreScheduler)
25 |
--------------------------------------------------------------------------------
/tests/sim_archive/test_prenight.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import unittest
3 | from tempfile import TemporaryDirectory
4 |
5 | try:
6 | from lsst.resources import ResourcePath
7 |
8 | HAVE_RESOURCES = True
9 | except ModuleNotFoundError:
10 | HAVE_RESOURCES = False
11 |
12 | if HAVE_RESOURCES:
13 | from rubin_sim.sim_archive import prenight_sim_cli
14 |
15 | # We need rubin_sim to get the baseline sim
16 | # Tooling prefers checking that it exists using importlib rather
17 | # than importing it and not actually using it.
18 | HAVE_RUBIN_SIM = importlib.util.find_spec("rubin_sim")
19 |
20 |
21 | class TestPrenight(unittest.TestCase):
22 | @unittest.skip("Too slow")
23 | @unittest.skipIf(not HAVE_RESOURCES, "No lsst.resources")
24 | @unittest.skipIf(not HAVE_RUBIN_SIM, "No rubin_sim, needed for rubin_sim.data.get_baseline")
25 | def test_prenight(self):
26 | with TemporaryDirectory() as test_archive_dir:
27 | archive_uri = ResourcePath(test_archive_dir).geturl() # type: ignore
28 | prenight_sim_cli(["--archive", archive_uri, "--telescope", "simonyi"])
29 | pass
30 |
--------------------------------------------------------------------------------