├── .gitignore ├── .readthedocs.yaml ├── LICENSE.BSD-2-Clause ├── LICENSE.GPL-2.0 ├── LICENSE.txt ├── MANIFEST.in ├── README.md ├── docs ├── Makefile ├── conf.py ├── examples.rst ├── figs │ └── comparison.png ├── index.rst ├── installation.rst ├── license.rst ├── maps.rst ├── modules.rst └── requirements.txt ├── dustmaps ├── __init__.py ├── bayestar.py ├── bh.py ├── chen2014.py ├── chen2018.py ├── config.py ├── csfd.py ├── data │ └── bh │ │ ├── bh.h5 │ │ ├── rednorth.ascii │ │ └── redsouth.ascii ├── decaps.py ├── dustexceptions.py ├── edenhofer2023.py ├── equirectangular_map.py ├── examples │ ├── __init__.py │ ├── plot_bayestar.py │ ├── plot_bh.py │ ├── plot_chen2014.py │ ├── plot_comparison.py │ ├── plot_iphas.py │ ├── plot_lenz2017.py │ ├── plot_marshall.py │ ├── plot_planck.py │ └── plot_sfd.py ├── fetch_utils.py ├── gaia_tge.py ├── healpix_map.py ├── iphas.py ├── json_serializers.py ├── leike2020.py ├── leike_ensslin_2019.py ├── lenz2017.py ├── map_base.py ├── marshall.py ├── output │ └── .gitignore ├── pg2010.py ├── planck.py ├── sfd.py ├── std_paths.py ├── tests │ ├── __init__.py │ ├── argonaut_output_v1.txt │ ├── ned_output.json │ ├── test_bayestar.py │ ├── test_config.json │ ├── test_config.py │ ├── test_config_with_envvar.json │ ├── test_edenhofer2023.py │ ├── test_iphas.py │ ├── test_marshall.py │ ├── test_planck.py │ ├── test_serializers.py │ └── test_sfd.py └── unstructured_map.py ├── paper ├── codemeta.json ├── figure.pdf ├── paper.bib └── paper.md └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg-info* 3 | *.png 4 | *.ipynb 5 | *.dat 6 | *.bak 7 | dist/ 8 | venv/ 9 | docs/_build/ 10 | build/ 11 | dustmaps/data/ 12 | frames/ 13 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file for Sphinx projects 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Set the OS, Python version and other tools you might need 8 | build: 9 | os: ubuntu-22.04 10 | tools: 11 | python: "3.12" 12 | # You can also specify other tool versions: 13 | # nodejs: "20" 14 | # rust: "1.70" 15 | # golang: "1.20" 16 | 17 | # Build documentation in the "docs/" directory with Sphinx 18 | sphinx: 19 | configuration: docs/conf.py 20 | # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs 21 | # builder: "dirhtml" 22 | # Fail on all warnings to avoid broken references 23 | # fail_on_warning: true 24 | 25 | # Optionally build your docs in additional formats such as PDF and ePub 26 | # formats: 27 | # - pdf 28 | # - epub 29 | 30 | # Optional but recommended, declare the Python requirements required 31 | # to build your documentation 32 | # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 33 | python: 34 | install: 35 | - requirements: docs/requirements.txt 36 | -------------------------------------------------------------------------------- /LICENSE.BSD-2-Clause: -------------------------------------------------------------------------------- 1 | BSD 2-Clause License 2 | 3 | Copyright (c) 2024, Gregory M. Green 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | 8 | 1. Redistributions of source code must retain the above copyright notice, this 9 | list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright notice, 12 | this list of conditions and the following disclaimer in the documentation 13 | and/or other materials provided with the distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 16 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 17 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 18 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 19 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 20 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 21 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 22 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 23 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 24 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 25 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024, Gregory M. Green 2 | 3 | This project is dual-licensed under the following licenses: 4 | 5 | 1. The GNU General Public License, Version 2.0 (GPL-2.0) 6 | - Full text available in the file `LICENSE.GPL-2.0`. 7 | - More information: https://opensource.org/licenses/GPL-2.0 8 | 9 | 2. The 2-Clause BSD License (Simplified BSD License) 10 | - Full text available in the file `LICENSE.BSD-2-Clause`. 11 | - More information: https://opensource.org/licenses/BSD-2-Clause 12 | 13 | You may choose to use this software under the terms of either license. 14 | If you redistribute or modify this software, ensure your usage complies with the chosen license's terms. 15 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include dustmaps/tests/ned_output.json 2 | include dustmaps/data/bh/bh.h5 3 | include LICENSE.txt 4 | include README.md 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![DOI](http://joss.theoj.org/papers/10.21105/joss.00695/status.svg)](https://doi.org/10.21105/joss.00695) [![DOI](https://zenodo.org/badge/59614814.svg)](https://zenodo.org/badge/latestdoi/59614814) 2 | 3 | dustmaps 4 | ======== 5 | 6 | The ``dustmaps`` package provides a uniform interface for dealing with a number 7 | of 2D and 3D maps of interstellar dust reddening/extinction. 8 | 9 | Supported Dust Maps 10 | ------------------- 11 | 12 | The currently supported dust maps are: 13 | 14 | 1. Burstein & Heiles (1982; BH'82) 15 | 2. Chen et al. (2014) 16 | 3. Green, Schlafly, Finkbeiner et al. (2015,2018,2019; Bayestar) 17 | 4. Marshall et al. (2006) 18 | 5. Planck Collaboration (2013) 19 | 6. Planck Collaboration (2016; GNILC) 20 | 7. Sale et al. (2014; IPHAS) 21 | 8. Schlegel, Finkbeiner & Davis (1998; SFD'98) 22 | 9. Lenz, Hensley & Doré (2017) 23 | 10. Peek & Graves (2010) 24 | 11. Leike & Enßlin (2019) 25 | 12. Leike, Glatzle & Enßlin (2020) 26 | 13. Edenhofer et al. (2023) 27 | 14. Chiang (2023; CSFD) 28 | 15. Zucker, Saydjari, & Speagle et al. (2025; DECaPS) 29 | 30 | To request addition of another dust map in this package, [file an issue on 31 | GitHub](https://github.com/gregreen/dustmaps/issues), or submit a pull request. 32 | 33 | 34 | Installation 35 | ------------ 36 | 37 | Download the repository from [GitHub](https://github.com/gregreen/dustmaps) and 38 | then run: 39 | 40 | python setup.py install --large-data-dir=/path/where/you/want/large/data/files/stored 41 | 42 | Alternatively, you can use the Python package manager `pip`: 43 | 44 | pip install dustmaps 45 | 46 | 47 | Getting the Data 48 | ---------------- 49 | 50 | To fetch the data for the SFD dust map, run: 51 | 52 | python setup.py fetch --map-name=sfd 53 | 54 | You can download the other dust maps by changing "sfd" to "csfd", "planck", 55 | "planckGNILC", "bayestar", "iphas", "marshall", "chen2014", "lenz2017", 56 | "pg2010", "leikeensslin2019", "leike2020", "edenhofer2023", "bh", or "decaps". 57 | 58 | Alternatively, if you have used `pip` to install `dustmaps`, then you can 59 | configure the data directory and download the data by opening up a python 60 | interpreter and running: 61 | 62 | >>> from dustmaps.config import config 63 | >>> config['data_dir'] = '/path/where/you/want/large/data/files/stored' 64 | >>> 65 | >>> import dustmaps.sfd 66 | >>> dustmaps.sfd.fetch() 67 | >>> 68 | >>> import dustmaps.csfd 69 | >>> dustmaps.csfd.fetch() 70 | >>> 71 | >>> import dustmaps.planck 72 | >>> dustmaps.planck.fetch() 73 | >>> 74 | >>> import dustmaps.planck 75 | >>> dustmaps.planck.fetch(which='GNILC') 76 | >>> 77 | >>> import dustmaps.bayestar 78 | >>> dustmaps.bayestar.fetch() 79 | >>> 80 | >>> import dustmaps.iphas 81 | >>> dustmaps.iphas.fetch() 82 | >>> 83 | >>> import dustmaps.marshall 84 | >>> dustmaps.marshall.fetch() 85 | >>> 86 | >>> import dustmaps.chen2014 87 | >>> dustmaps.chen2014.fetch() 88 | >>> 89 | >>> import dustmaps.lenz2017 90 | >>> dustmaps.lenz2017.fetch() 91 | >>> 92 | >>> import dustmaps.pg2010 93 | >>> dustmaps.pg2010.fetch() 94 | >>> 95 | >>> import dustmaps.leike_ensslin_2019 96 | >>> dustmaps.leike_ensslin_2019.fetch() 97 | >>> 98 | >>> import dustmaps.leike2020 99 | >>> dustmaps.leike2020.fetch() 100 | >>> 101 | >>> import dustmaps.edenhofer2023 102 | >>> dustmaps.edenhofer2023.fetch() 103 | >>> 104 | >>> import dustmaps.decaps 105 | >>> dustmaps.decaps.fetch() 106 | 107 | 108 | 109 | Querying the Maps 110 | ----------------- 111 | 112 | Maps are queried using 113 | [`astropy.coordinates.SkyCoord`](http://docs.astropy.org/en/stable/api/astropy.coordinates.SkyCoord.html#astropy.coordinates.SkyCoord) 114 | objects. This means that any coordinate system supported by `astropy` can be 115 | used as input. For example, we can query SFD'98 as follows: 116 | 117 | >>> from dustmaps.sfd import SFDQuery 118 | >>> from astropy.coordinates import SkyCoord 119 | >>> 120 | >>> sfd = SFDQuery() 121 | >>> 122 | >>> c = SkyCoord( 123 | '05h00m00.00000s', 124 | '+30d00m00.0000s', 125 | frame='icrs') 126 | >>> print sfd(c) 127 | 0.483961 128 | 129 | Above, we have used the ICRS coordinate system (the inputs are RA and Dec). We 130 | can use other coordinate systems, such as Galactic coordinates, and we can 131 | provide coordinate arrays. The following example uses both features: 132 | 133 | >>> c = SkyCoord( 134 | [75.00000000, 130.00000000], 135 | [-89.00000000, 10.00000000], 136 | frame='galactic', 137 | unit='deg') 138 | >>> print sfd(c) 139 | [ 0.0146584 0.97695869] 140 | 141 | 142 | Documentation 143 | ------------- 144 | 145 | Read the full documentation at http://dustmaps.readthedocs.io/en/latest/. 146 | 147 | 148 | Citation 149 | -------- 150 | 151 | If you make use of this software in a publication, please cite 152 | [Green (2018) in The Journal of Open Source Software](https://doi.org/10.21105/joss.00695): 153 | 154 | @ARTICLE{2018JOSS....3..695M, 155 | author = {{Green}, {Gregory M.}}, 156 | title = "{dustmaps: A Python interface for maps of interstellar dust}", 157 | journal = {The Journal of Open Source Software}, 158 | year = "2018", 159 | month = "Jun", 160 | volume = {3}, 161 | number = {26}, 162 | pages = {695}, 163 | doi = {10.21105/joss.00695}, 164 | adsurl = {https://ui.adsabs.harvard.edu/abs/2018JOSS....3..695M}, 165 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 166 | } 167 | 168 | 169 | Development 170 | ----------- 171 | 172 | Development of `dustmaps` takes place on GitHub, at 173 | https://github.com/gregreen/dustmaps. Any bugs, feature requests, pull requests, 174 | or other issues can be filed there. Contributions to the software are welcome. 175 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/dustmaps.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/dustmaps.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/dustmaps" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/dustmaps" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # dustmaps documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Oct 14 17:20:58 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | # 19 | import os 20 | import sys 21 | # sys.path.insert(0, os.path.abspath('.')) 22 | # sys.path.insert(0, os.path.join(os.path.dirname(__name__), '..')) 23 | sys.path.insert(0, os.path.abspath('..')) 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # 29 | # needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | 'sphinx.ext.autodoc', 36 | 'sphinx.ext.doctest', 37 | 'sphinx.ext.coverage', 38 | 'sphinx.ext.mathjax', 39 | 'sphinx.ext.viewcode', 40 | 'sphinx.ext.napoleon', 41 | 'sphinx_rtd_theme' 42 | # 'sphinxcontrib.googleanalytics' 43 | # 'sphinxcontrib.programoutput' 44 | ] 45 | 46 | # Add any paths that contain templates here, relative to this directory. 47 | templates_path = ['_templates'] 48 | 49 | # The suffix(es) of source filenames. 50 | # You can specify multiple suffix as a list of string: 51 | # 52 | # source_suffix = ['.rst', '.md'] 53 | source_suffix = '.rst' 54 | 55 | # The encoding of source files. 56 | # 57 | # source_encoding = 'utf-8-sig' 58 | 59 | # The master toctree document. 60 | master_doc = 'index' 61 | 62 | # General information about the project. 63 | project = u'dustmaps' 64 | copyright = u'2016, Gregory M. Green' 65 | author = u'Gregory M. Green' 66 | 67 | # The version info for the project you're documenting, acts as replacement for 68 | # |version| and |release|, also used in various other places throughout the 69 | # built documents. 70 | # 71 | # The short X.Y version. 72 | version = u'v1.0' 73 | # The full version, including alpha/beta/rc tags. 74 | release = u'v1.0.4' 75 | 76 | # The language for content autogenerated by Sphinx. Refer to documentation 77 | # for a list of supported languages. 78 | # 79 | # This is also used if you do content translation via gettext catalogs. 80 | # Usually you set "language" from the command line for these cases. 81 | language = None 82 | 83 | # There are two options for replacing |today|: either, you set today to some 84 | # non-false value, then it is used: 85 | # 86 | # today = '' 87 | # 88 | # Else, today_fmt is used as the format for a strftime call. 89 | # 90 | # today_fmt = '%B %d, %Y' 91 | 92 | # List of patterns, relative to source directory, that match files and 93 | # directories to ignore when looking for source files. 94 | # This patterns also effect to html_static_path and html_extra_path 95 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 96 | 97 | # The reST default role (used for this markup: `text`) to use for all 98 | # documents. 99 | # 100 | # default_role = None 101 | 102 | # If true, '()' will be appended to :func: etc. cross-reference text. 103 | # 104 | # add_function_parentheses = True 105 | 106 | # If true, the current module name will be prepended to all description 107 | # unit titles (such as .. function::). 108 | # 109 | # add_module_names = True 110 | 111 | # If true, sectionauthor and moduleauthor directives will be shown in the 112 | # output. They are ignored by default. 113 | # 114 | # show_authors = False 115 | 116 | # The name of the Pygments (syntax highlighting) style to use. 117 | pygments_style = 'sphinx' 118 | 119 | # A list of ignored prefixes for module index sorting. 120 | # modindex_common_prefix = [] 121 | 122 | # If true, keep warnings as "system message" paragraphs in the built documents. 123 | # keep_warnings = False 124 | 125 | # If true, `todo` and `todoList` produce output, else they produce nothing. 126 | todo_include_todos = False 127 | 128 | 129 | # -- Options for HTML output ---------------------------------------------- 130 | 131 | # The theme to use for HTML and HTML Help pages. See the documentation for 132 | # a list of builtin themes. 133 | # 134 | html_theme = 'sphinx_rtd_theme' 135 | # html_theme = 'alabaster' 136 | 137 | # Theme options are theme-specific and customize the look and feel of a theme 138 | # further. For a list of options available for each theme, see the 139 | # documentation. 140 | # 141 | # html_theme_options = {} 142 | 143 | # Add any paths that contain custom themes here, relative to this directory. 144 | # html_theme_path = [] 145 | 146 | # The name for this set of Sphinx documents. 147 | # " v documentation" by default. 148 | # 149 | # html_title = u'dustmaps vv0.1a3' 150 | 151 | # A shorter title for the navigation bar. Default is the same as html_title. 152 | # 153 | # html_short_title = None 154 | 155 | # The name of an image file (relative to this directory) to place at the top 156 | # of the sidebar. 157 | # 158 | # html_logo = None 159 | 160 | # The name of an image file (relative to this directory) to use as a favicon of 161 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 162 | # pixels large. 163 | # 164 | # html_favicon = None 165 | 166 | # Add any paths that contain custom static files (such as style sheets) here, 167 | # relative to this directory. They are copied after the builtin static files, 168 | # so a file named "default.css" will overwrite the builtin "default.css". 169 | html_static_path = ['_static'] 170 | 171 | # Add any extra paths that contain custom files (such as robots.txt or 172 | # .htaccess) here, relative to this directory. These files are copied 173 | # directly to the root of the documentation. 174 | # 175 | # html_extra_path = [] 176 | 177 | # If not None, a 'Last updated on:' timestamp is inserted at every page 178 | # bottom, using the given strftime format. 179 | # The empty string is equivalent to '%b %d, %Y'. 180 | # 181 | # html_last_updated_fmt = None 182 | 183 | # If true, SmartyPants will be used to convert quotes and dashes to 184 | # typographically correct entities. 185 | # 186 | # html_use_smartypants = True 187 | 188 | # Custom sidebar templates, maps document names to template names. 189 | # 190 | # html_sidebars = {} 191 | 192 | # Additional templates that should be rendered to pages, maps page names to 193 | # template names. 194 | # 195 | # html_additional_pages = {} 196 | 197 | # If false, no module index is generated. 198 | # 199 | # html_domain_indices = True 200 | 201 | # If false, no index is generated. 202 | # 203 | # html_use_index = True 204 | 205 | # If true, the index is split into individual pages for each letter. 206 | # 207 | # html_split_index = False 208 | 209 | # If true, links to the reST sources are added to the pages. 210 | # 211 | # html_show_sourcelink = True 212 | 213 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 214 | # 215 | # html_show_sphinx = True 216 | 217 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 218 | # 219 | # html_show_copyright = True 220 | 221 | # If true, an OpenSearch description file will be output, and all pages will 222 | # contain a tag referring to it. The value of this option must be the 223 | # base URL from which the finished HTML is served. 224 | # 225 | # html_use_opensearch = '' 226 | 227 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 228 | # html_file_suffix = None 229 | 230 | # Language to be used for generating the HTML full-text search index. 231 | # Sphinx supports the following languages: 232 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 233 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' 234 | # 235 | # html_search_language = 'en' 236 | 237 | # A dictionary with options for the search language support, empty by default. 238 | # 'ja' uses this config value. 239 | # 'zh' user can custom change `jieba` dictionary path. 240 | # 241 | # html_search_options = {'type': 'default'} 242 | 243 | # The name of a javascript file (relative to the configuration directory) that 244 | # implements a search results scorer. If empty, the default will be used. 245 | # 246 | # html_search_scorer = 'scorer.js' 247 | 248 | # Output file base name for HTML help builder. 249 | htmlhelp_basename = 'dustmapsdoc' 250 | 251 | # -- Options for LaTeX output --------------------------------------------- 252 | 253 | latex_elements = { 254 | # The paper size ('letterpaper' or 'a4paper'). 255 | # 256 | # 'papersize': 'letterpaper', 257 | 258 | # The font size ('10pt', '11pt' or '12pt'). 259 | # 260 | # 'pointsize': '10pt', 261 | 262 | # Additional stuff for the LaTeX preamble. 263 | # 264 | # 'preamble': '', 265 | 266 | # Latex figure (float) alignment 267 | # 268 | # 'figure_align': 'htbp', 269 | } 270 | 271 | # Grouping the document tree into LaTeX files. List of tuples 272 | # (source start file, target name, title, 273 | # author, documentclass [howto, manual, or own class]). 274 | latex_documents = [ 275 | (master_doc, 'dustmaps.tex', u'dustmaps Documentation', 276 | u'Gregory M. Green', 'manual'), 277 | ] 278 | 279 | # The name of an image file (relative to this directory) to place at the top of 280 | # the title page. 281 | # 282 | # latex_logo = None 283 | 284 | # For "manual" documents, if this is true, then toplevel headings are parts, 285 | # not chapters. 286 | # 287 | # latex_use_parts = False 288 | 289 | # If true, show page references after internal links. 290 | # 291 | # latex_show_pagerefs = False 292 | 293 | # If true, show URL addresses after external links. 294 | # 295 | # latex_show_urls = False 296 | 297 | # Documents to append as an appendix to all manuals. 298 | # 299 | # latex_appendices = [] 300 | 301 | # It false, will not define \strong, \code, itleref, \crossref ... but only 302 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 303 | # packages. 304 | # 305 | # latex_keep_old_macro_names = True 306 | 307 | # If false, no module index is generated. 308 | # 309 | # latex_domain_indices = True 310 | 311 | 312 | # -- Options for manual page output --------------------------------------- 313 | 314 | # One entry per manual page. List of tuples 315 | # (source start file, name, description, authors, manual section). 316 | man_pages = [ 317 | (master_doc, 'dustmaps', u'dustmaps Documentation', 318 | [author], 1) 319 | ] 320 | 321 | # If true, show URL addresses after external links. 322 | # 323 | # man_show_urls = False 324 | 325 | 326 | # -- Options for Texinfo output ------------------------------------------- 327 | 328 | # Grouping the document tree into Texinfo files. List of tuples 329 | # (source start file, target name, title, author, 330 | # dir menu entry, description, category) 331 | texinfo_documents = [ 332 | (master_doc, 'dustmaps', u'dustmaps Documentation', 333 | author, 'dustmaps', 'One line description of project.', 334 | 'Miscellaneous'), 335 | ] 336 | 337 | # Documents to append as an appendix to all manuals. 338 | # 339 | # texinfo_appendices = [] 340 | 341 | # If false, no module index is generated. 342 | # 343 | # texinfo_domain_indices = True 344 | 345 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 346 | # 347 | # texinfo_show_urls = 'footnote' 348 | 349 | # If true, do not generate a @detailmenu in the "Top" node's menu. 350 | # 351 | # texinfo_no_detailmenu = False 352 | 353 | # Google Analytics 354 | # googleanalytics_id = 'UA-57454625-3' 355 | 356 | # Mock modules, rather than importing them. 357 | 358 | # import sys 359 | # from mock import Mock as MagicMock 360 | # 361 | # class Mock(MagicMock): 362 | # @classmethod 363 | # def __getattr__(cls, name): 364 | # return Mock() 365 | 366 | autodoc_mock_imports = [ 367 | 'astropy', 368 | 'astropy.coordinates', 369 | 'astropy.coordinates.SkyCoord', 370 | 'astropy.io', 371 | 'astropy.io.fits', 372 | 'astropy.units', 373 | 'astropy.wcs', 374 | 'contextlib', 375 | 'contextlib.closing', 376 | 'h5py', 377 | 'hashlib', 378 | 'healpy', 379 | 'numpy', 380 | 'PIL', 381 | 'PIL.Image', 382 | 'scipy', 383 | 'scipy.ndimage', 384 | 'scipy.ndimage.map_coordinates', 385 | 'scipy.spatial', 386 | 'scipy.spatial.cKDTree', 387 | 'shutil', 388 | 'tqdm' 389 | ] 390 | # 'progressbar', 391 | # 'progressbar.ProgressBar', 392 | # 'progressbar.widgets', 393 | # 'progressbar.widgets.DataSize', 394 | # 'progressbar.widgets.AdaptiveTransferSpeed', 395 | # 'progressbar.widgets.Bar', 396 | # 'progressbar.widgets.AdaptiveETA', 397 | # 'progressbar.widgets.Percentage', 398 | # 'progressbar.widgets.FormatCustomText', 399 | # 'progressbar.utils', 400 | # 'progressbar.utils.scale_1024'] 401 | 402 | # sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) 403 | -------------------------------------------------------------------------------- /docs/figs/comparison.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gregreen/dustmaps/035a65a7bbf02431172113883993a7da1ba1cacb/docs/figs/comparison.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. dustmaps documentation master file, created by 2 | sphinx-quickstart on Fri Oct 14 17:20:58 2016. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | dustmaps documentation 7 | ==================================== 8 | 9 | :code:`dustmaps` provides a unified interface for several 2D and 3D 10 | maps of interstellar dust reddening and extinction. 11 | 12 | To get started, take a look at :doc:`installation` and 13 | :doc:`examples`. To see a list of all available maps, take a look 14 | at :doc:`maps`. For a complete reference to the API, see 15 | :doc:`modules`. 16 | 17 | If you make use of :code:`dustmaps` in your research, please cite 18 | `Green (2018) `_:: 19 | 20 | @ARTICLE{2018JOSS....3..695M, 21 | author = {{Green}, {Gregory M.}}, 22 | title = "{dustmaps: A Python interface for maps of interstellar dust}", 23 | journal = {The Journal of Open Source Software}, 24 | year = "2018", 25 | month = "Jun", 26 | volume = {3}, 27 | number = {26}, 28 | pages = {695}, 29 | doi = {10.21105/joss.00695}, 30 | adsurl = {https://ui.adsabs.harvard.edu/abs/2018JOSS....3..695G}, 31 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 32 | } 33 | 34 | 35 | Contents 36 | ======== 37 | 38 | .. toctree:: 39 | :maxdepth: 2 40 | 41 | installation 42 | examples 43 | maps 44 | modules 45 | license 46 | 47 | 48 | Indices and tables 49 | ================== 50 | 51 | * :ref:`genindex` 52 | * :ref:`modindex` 53 | * :ref:`search` 54 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | There are two ways to install :code:`dustmaps`. 5 | 6 | 7 | 1. Using :code:`pip` 8 | -------------------- 9 | 10 | From the commandline, run 11 | 12 | .. code-block :: bash 13 | 14 | pip install dustmaps 15 | 16 | You may have to use :code:`sudo`. 17 | 18 | Next, we'll configure the package and download the dust maps we'll want to use. 19 | Start up a python interpreter and type: 20 | 21 | .. code-block :: python 22 | 23 | from dustmaps.config import config 24 | config['data_dir'] = '/path/to/store/maps/in' 25 | 26 | import dustmaps.sfd 27 | dustmaps.sfd.fetch() 28 | 29 | import dustmaps.csfd 30 | dustmaps.csfd.fetch() 31 | 32 | import dustmaps.planck 33 | dustmaps.planck.fetch() 34 | 35 | import dustmaps.planck 36 | dustmaps.planck.fetch(which='GNILC') 37 | 38 | import dustmaps.bayestar 39 | dustmaps.bayestar.fetch() 40 | 41 | import dustmaps.iphas 42 | dustmaps.iphas.fetch() 43 | 44 | import dustmaps.marshall 45 | dustmaps.marshall.fetch() 46 | 47 | import dustmaps.chen2014 48 | dustmaps.chen2014.fetch() 49 | 50 | import dustmaps.lenz2017 51 | dustmaps.lenz2017.fetch() 52 | 53 | import dustmaps.pg2010 54 | dustmaps.pg2010.fetch() 55 | 56 | import dustmaps.leike_ensslin_2019 57 | dustmaps.leike_ensslin_2019.fetch() 58 | 59 | import dustmaps.leike2020 60 | dustmaps.leike2020.fetch() 61 | 62 | import dustmaps.edenhofer2023 63 | dustmaps.edenhofer2023.fetch() 64 | 65 | import dustmaps.gaia_tge 66 | dustmaps.gaia_tge.fetch() 67 | 68 | import dustmaps.decaps 69 | dustmaps.decaps.fetch() 70 | 71 | All the dust maps should now be in the path you gave to 72 | :code:`config['data_dir']`. Note that these dust maps can be very large - some 73 | are several Gigabytes! Only download those you think you'll need. 74 | 75 | Note that there are two versions of the Bayestar dust map. By default, 76 | :code:`dustmaps.bayestar.fetch()` will download Bayestar19 (Green et al. 2019). 77 | In order to download earlier version of the map (Green et al. 2015, 2018), you can 78 | provide the keyword argument :code:`version='bayestar2017'` (Green et al. 2018) or 79 | :code:`version='bayestar2015'` (Green et al. 2015). 80 | 81 | 82 | 2. Using :code:`setup.py` 83 | ------------------------- 84 | 85 | An alternative way to download :code:`dustmaps`, if you don't want to use 86 | :code:`pip`, is to download or clone the respository from 87 | https://github.com/gregreen/dustmaps. 88 | 89 | 90 | In this case, you will have to manually make sure that the dependencies are 91 | satisfied: 92 | 93 | * :code:`numpy` 94 | * :code:`scipy` 95 | * :code:`astropy` 96 | * :code:`h5py` 97 | * :code:`healpy` 98 | * :code:`requests` 99 | * :code:`six` 100 | * :code:`progressbar2` 101 | * :code:`tqdm` 102 | 103 | 104 | These packages can typically be installed using the Python package manager, 105 | :code:`pip`. 106 | 107 | Once these dependencies are installed, run the following command from the root 108 | directory of the :code:`dustmaps` package: 109 | 110 | .. code-block :: bash 111 | 112 | python setup.py install --large-data-dir=/path/to/store/maps/in 113 | 114 | Then, fetch the maps you'd like to use. Depending on which dust maps you choose 115 | to download, this step can take up several Gigabytes of disk space. Be careful 116 | to only download those you think you'll need: 117 | 118 | .. code-block :: bash 119 | 120 | python setup.py fetch --map-name=sfd 121 | python setup.py fetch --map-name=csfd 122 | python setup.py fetch --map-name=planck 123 | python setup.py fetch --map-name=planckGNILC 124 | python setup.py fetch --map-name=bayestar 125 | python setup.py fetch --map-name=iphas 126 | python setup.py fetch --map-name=marshall 127 | python setup.py fetch --map-name=chen2014 128 | python setup.py fetch --map-name=lenz2017 129 | python setup.py fetch --map-name=leikeensslin2019 130 | python setup.py fetch --map-name=leike2020 131 | python setup.py fetch --map-name=edenhofer2023 132 | python setup.py fetch --map-name=gaia_tge 133 | python setup.py fetch --map-name=decaps 134 | 135 | 136 | That's it! 137 | 138 | Note that the above code will download the latest version of the Bayestar dust 139 | map (the 2019 version). If you want to download the 2015 and 2017 versions, you 140 | can enter the commands 141 | 142 | .. code-block :: bash 143 | 144 | python setup.py fetch --map-name=bayestar2015 145 | python setup.py fetch --map-name=bayestar2017 146 | 147 | 3. Custom configuration file location (Optional) 148 | ------------------------------------------------ 149 | 150 | By default, a configuration file is stored in :code:`~/.dustmapsrc`. This 151 | file might look like the following:: 152 | 153 | {"data_dir": "/path/to/store/maps/in"} 154 | 155 | If you would like :code:`dustmaps` to use a different configuration file, 156 | then you can set the environmental variable :code:`DUSTMAPS_CONFIG_FNAME`. 157 | For example, in a :code:`bash` terminal, 158 | 159 | .. code-block :: bash 160 | 161 | export DUSTMAPS_CONFIG_FNAME=/path/to/custom/config/file.json 162 | python script_using_dustmaps.py 163 | 164 | The paths listed in the configuration file can also include environmental 165 | variables, which will be expanded when :code:`dustmaps` is loaded. For example, 166 | the configuration file could contain the following:: 167 | 168 | {"data_dir": "/path/with/${VARIABLE}/included"} 169 | 170 | If the environmental variable :code:`VARIABLE` is set to :code:`"foo"`, 171 | for example, then :code:`dustmaps` will expand :code:`data_dir` to 172 | :code:`"/path/with/foo/included"`. 173 | -------------------------------------------------------------------------------- /docs/license.rst: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | The :code:`dustmaps` documentation is covered by the MIT License, as given below. 5 | 6 | 7 | The MIT License (MIT) 8 | --------------------- 9 | 10 | Copyright (c) 2016 `Gregory M. Green `_ 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 17 | -------------------------------------------------------------------------------- /docs/maps.rst: -------------------------------------------------------------------------------- 1 | Available Dust Maps 2 | =================== 3 | 4 | 5 | Two-Dimensional Dust Maps 6 | ------------------------- 7 | 8 | 9 | SFD 10 | ~~~ 11 | 12 | A two-dimensional map of dust reddening across the entire sky. The "SFD" dust 13 | map is based on far-infrared emission of dust. The authors model the temperature 14 | and optical depth of the dust, and then calibrate a relationship between the 15 | dust's far-infrared optical depth and optical reddening. This calibration was 16 | updated by 17 | `Schlafly & Finkbeiner (2011) `_. 18 | 19 | In order to convert SFD values of E(B-V) to extinction, one should use the 20 | conversions provided in 21 | `Table 6 of Schlafly & Finkbeiner (2011) `_. 22 | 23 | * **Reference**: `Schlegel, Finkbeiner & Davis (1998) `_ 24 | * **Recalibration**: `Schlafly & Finkbeiner (2011) `_ 25 | 26 | 27 | CSFD (Chiang 2023) 28 | ~~~~~~~~~~~~~~~~~~ 29 | 30 | "Corrected SFD," a 2D dust map based on a reanalysis of SFD, using 31 | tomographically constrained templates from WISE galaxy density fields to remove 32 | extragalactic contamination from the cosmic infrared background (CIB). 33 | 34 | * **Reference**: `Chiang (2023) `_ 35 | * **Website**: `Project description `_ 36 | * **Data**: `Zenodo `_ 37 | 38 | 39 | Gaia Total Galactic Extinction (2022) 40 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 41 | 42 | A two-dimensional map of A0, the monochromatic extinction at 541.4 nm. The map 43 | is based on extinction estimates for giants beyond 300 pc. The individual 44 | exitnction estimates estimates were obtained by fitting Gaia BP/RP spectra, 45 | parallaxes and G-band apparent magnitudes. 46 | 47 | The map comes in multiple HEALPix levels (6 to 9). By default, an "optimum" 48 | map is loaded, with an adaptive HEALPix level, based on the local number 49 | of stars (at least 3 stars are required per pixel). 50 | 51 | * **Reference**: Delchambre et al. (2022). 52 | 53 | 54 | Lenz, Hensley & Doré (2017) 55 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 56 | 57 | A two-dimensional map of dust reddening, covering 40% of the sky with a 16.1' 58 | resolution. This map is derived from emission from low-velocity 59 | (l.o.s. velocity < 90 km/s) HI, which is found to correlate much more strongly 60 | with E(B-V) than emission from high-velocity HI. The underlying data comes from 61 | the HI4PI Survey. This map reports E(B-V) in magnitudes. 62 | 63 | * **Reference**: `Lenz, Hensley & Doré (2017) `_. 64 | * **See also**: `GitHub page `_. 65 | 66 | 67 | Planck Collaboration (2013) 68 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 69 | 70 | Two-dimensional maps of dust reddening across the entire sky. The 71 | `Planck Collaboration (2013) `_ 72 | fits a modified blackbody dust emission model to the Planck and IRAS 73 | far-infrared maps, and provides three different conversions to dust reddening. 74 | 75 | The three maps provided by 76 | `Planck Collaboration (2013) `_ 77 | are based on: 78 | 79 | #. τ\ :sub:`353`\ : dust optical depth at 353 GHz. 80 | #. ℛ: thermal dust radiance. 81 | #. A recommended extragalactic reddening estimate, based on thermal dust 82 | radiance, but with point sources removed. 83 | 84 | * **Reference**: `Planck Collaboration (2013) `_ 85 | * **Website**: `Planck Explanatory Supplement `_ 86 | 87 | 88 | Planck Collaboration (2016; "GNILC") 89 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 90 | 91 | Two-dimensional maps of dust reddening across the entire sky, using the 92 | generalized needlet internal linear combination (GNILC) method to separate 93 | out Galactic dust emission from CIB anisotropies. 94 | 95 | This map contains both reddening estimates and estimated uncertainties. 96 | 97 | * **Reference**: `Planck Collaboration (2016) `_ 98 | * **Website**: `Planck Explanatory Supplement `_ 99 | 100 | 101 | Peek & Graves (2010) 102 | ~~~~~~~~~~~~~~~~~~~~ 103 | 104 | A correction to the SFD'98 dust map, based on color excess measurements of "standard crayons" -- spectroscopically selected passively evolving galaxies. The maps have an angular resolution of 4.5°, and have a 1σ uncertainty of 1.5 mmag in E(B-V). Subtract this map from SFD'98 to obtain the corrected E(B-V) reddening. 105 | 106 | * **Reference**: `Peek & Graves (2010) `_ 107 | 108 | 109 | Burstein & Heiles 110 | ~~~~~~~~~~~~~~~~~ 111 | 112 | Primarily of historical interest, the 113 | `Burstein & Heiles (1982) `_ 114 | dust reddening maps are derived from HI column density and galaxy counts. 115 | 116 | * **Reference**: `Burstein & Heiles (1982) `_ 117 | 118 | 119 | Three-Dimensional Dust Maps 120 | --------------------------- 121 | 122 | 123 | Bayestar 124 | ~~~~~~~~ 125 | 126 | A three-dimensional map of Milky Way dust reddening, covering the three quarters 127 | of the sky north of a declination of -30°. The map is probabilistic. containing 128 | samples of the reddening along each line of sight. The "Bayestar" dust map is 129 | inferred from stellar photometry of 800 million stars observed by Pan-STARRS 1, 130 | and 2MASS photometry for a quarter of the stars. The latest version of Bayestar 131 | also makes use of *Gaia* DR2 parallaxes. 132 | 133 | There are three versions of Bayestar, called *Bayestar19*, *Bayestar17* and 134 | *Bayestar15* here. By default, :code:`dustmaps` will use the latest version, 135 | Bayestar19, although the earlier versions of the map can be selected by providing 136 | the keyword argument :code:`version='bayestar2017'` or :code:`version='bayestar2015'` 137 | in routines such as :code:`dustmaps.bayestar.fetch`, 138 | :code:`dustmaps.bayestar.BayestarQuery` and :code:`dustmaps.bayestar.BayestarWebQuery`. 139 | If you want to make sure that your code will always use the same version of the 140 | map, even as new versions of Bayestar are released, then set the :code:`version` 141 | keyword explicitly. 142 | 143 | The units of reddening used by each map are slightly different: 144 | 145 | #. Bayestar19 reports reddening in an arbitrary unit that can be converted to 146 | extinction in different bands using the coefficients given in Table 1 of 147 | Green, Schlafly, Finkbeiner et al. (2019). 148 | #. Bayestar17 reports reddening in an arbitrary unit that can be converted to 149 | extinction in different bands using the coefficients given in Table 1 of 150 | `Green, Schlafly, Finkbeiner et al. (2018) `_. 151 | #. Bayestar15 reports reddening in the same units as those used by SFD. Therefore, 152 | in order to convert Bayestar15 reddenings to extinction in different bands, one 153 | should use the conversions provided in 154 | `Table 6 of Schlafly & Finkbeiner (2011) `_. 155 | 156 | * **References**: `Green, Schlafly, Finkbeiner et al. (2019) `_, 157 | `Green, Schlafly, Finkbeiner et al. (2018) `_ 158 | and `Green, Schlafly, Finkbeiner et al. (2015) `_. 159 | * **Website**: `argonaut.skymaps.info `_ 160 | 161 | DECaPS 162 | ~~~~~~~~ 163 | 164 | A three-dimensional map of Milky Way dust reddening, covering the southern Galactic plane (239° < l < 6°) in a 20°-thick strip. 165 | The map has an angular resolution of 1' (on par with Herschel), a typical maximum reliable distance of ~ 10 kpc, and is reliable up to A(V)~12 mag in extinction. 166 | The map is probabilistic. containing samples of the reddening along each line of sight. The "DECaPS" dust map is 167 | inferred from stellar photometry of 709 million stars observed by the DECaPS2 survey, in combination with 2MASS, VVV, and unWISE. 168 | The DECaPS map also makes use of *Gaia* DR3 parallaxes where available. When combined with Bayestar, DECaPS completes the coverage 169 | of the Galactic plane within a 20°-thick strip, enabling reddening estimates over the entire disk. 170 | 171 | Unlike the Bayestar map, whose reddening is reported in an arbitrary unit, the DECaPS reddening is reported in units of E(B-V) in mags 172 | 173 | * **References**: Zucker, Saydjari, Speagle et al. (2025) 174 | * **Data**: `Dataverse `_ 175 | 176 | 177 | Chen et al. (2014) 178 | ~~~~~~~~~~~~~~~~~~ 179 | 180 | A three-dimensional map of dust extinction in the Galactic anticenter. The map 181 | covers about 6000 deg\ :sup:`2`\ , from 140° < ℓ < 240° and -60° < b < 40°, and 182 | is based on stellar photometry from the Xuyi Schmidt Telescope Photometric 183 | Survey of the Galactic Anticentre (XSTPS-GAC), 6MASS and *WISE*. The map has an 184 | angular resolution of 3 to 9 arcminutes, and reports *r*-band extinction, along 185 | with Gaussian error estimates. 186 | 187 | * **Reference**: `Chen et al. (2014) `_ 188 | * **Website**: `http://lamost973.pku.edu.cn `_ 189 | 190 | 191 | Edenhofer et al. (2023) 192 | ~~~~~~~~~~~~~~~~~~~~~~~ 193 | 194 | A three-dimensional map of Milky Way dust extinction, with a Gaussian process 195 | prior on the logarithm of the dust extinction density. The prior is implemented 196 | on a spherical grid. The map starts at 69 pc and extends out to 1.25 kpc in 197 | distance from the Sun. It has an angular resolution of 14' and a maximum 198 | distance voxalization of 0.4 pc at 69 pc and a minimum distance voxalization of 199 | 7 pc at 1250 pc. The map is based on the stellar distance and extinction 200 | estimates of Zhang, Green & Rix (2023), and therefore reports extinctions in 201 | their units. Accompanying the main reconstruciton is an additional map that uses 202 | less data but extends out to 2 kpc from the Sun. 203 | 204 | * **Reference**: `Edenhofer et al. (2023) `_ 205 | * **Data**: `Zenodo `_ 206 | 207 | 208 | IPHAS 209 | ~~~~~ 210 | 211 | A three-dimensional map of Milky Way dust extinction, covering a 10°-thick strip 212 | of the Galactic plane, between 30° < ℓ < 120°. The map is probabilistic, 213 | containing samples of the cumulative extinction along each line of sight. The 214 | map is based on IPHAS imaging of stars. The map returns A\ :sub:`0`\ , the 215 | monochromatic extinction. 216 | 217 | * **Reference**: `Sale et al. (2014) `_ 218 | * **Website**: `www.iphas.org/extinction `_ 219 | 220 | 221 | Leike & Enßlin (2019) 222 | ~~~~~~~~~~~~~~~~~~~~~~ 223 | 224 | A three-dimensional map of Milky Way dust extinction, incorporating a Gaussian 225 | process prior on the log of the dust extinction density. The map is based on 226 | the Gaia DR2 catalog parallaxes and G-band extinctions, and spans a (600 pc)³ 227 | box centered on the Sun. 228 | 229 | * **Reference**: `Leike & Enßlin (2019) `_ 230 | * **Data**: `Zenodo `_ 231 | 232 | 233 | Leike, Glatzle & Enßlin (2020) 234 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 235 | 236 | A three-dimensional map of Milky Way dust extinction, incorporating a Gaussian 237 | process prior on the log of the dust extinction density, similar to Leike & 238 | Enßlin (2019). The map is based on data from Gaia, 2MASS, Pan-STARRS 1 and 239 | ALLWISE, and is calculated on a Cartesian grid spanning a 240 | (740 pc)×(740 pc)×(540 pc) box (in Galactic *x*, *y* and *z*, respectively) 241 | centered on the Sun. 242 | 243 | * **References**: `Leike, Glatzle & Enßlin (2020) `_ 244 | * **Data**: `Zenodo `_ 245 | 246 | 247 | Marshall et al. (2006) 248 | ~~~~~~~~~~~~~~~~~~~~~~ 249 | 250 | A three-dimensional map of Milky Way dust extinction, covering a 20°-thick strip 251 | of the Galactic plane, between -100° < ℓ < 100°. The map is contains 2MASS 252 | K\ :sub:`s`\ -band extinctions with a Gaussian uncertainty estimates. The map is 253 | based on a comparison of 2MASS colors of stars with expectations from the 254 | Besançon model of the Galaxy. 255 | 256 | * **Reference**: `Marshall et al. (2006) `_ 257 | * **Website**: `http://cds.u-strasbg.fr/ `_ 258 | -------------------------------------------------------------------------------- /docs/modules.rst: -------------------------------------------------------------------------------- 1 | dustmap modules 2 | =============== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | 8 | bayestar (Green et al. 2015, 2018) 9 | ---------------------------------- 10 | .. automodule:: dustmaps.bayestar 11 | :members: 12 | :special-members: 13 | :show-inheritance: 14 | 15 | decaps (Zucker, Saydjari, Speagle et al. 2025) 16 | ---------------------------------- 17 | .. automodule:: dustmaps.decaps 18 | :members: 19 | :special-members: 20 | :show-inheritance: 21 | 22 | 23 | bh (Burstein & Heiles 1982) 24 | --------------------------- 25 | .. automodule:: dustmaps.bh 26 | :members: 27 | :special-members: 28 | :show-inheritance: 29 | 30 | chen2014 (Chen et al. 2014) 31 | --------------------------- 32 | .. automodule:: dustmaps.chen2014 33 | :members: 34 | :special-members: 35 | :show-inheritance: 36 | 37 | csfd (Chiang 2023) 38 | --------------------------- 39 | .. automodule:: dustmaps.csfd 40 | :members: 41 | :special-members: 42 | :show-inheritance: 43 | 44 | gaia_tge (Delchambre et al. 2022) 45 | --------------------------- 46 | .. automodule:: dustmaps.gaia_tge 47 | :members: 48 | :special-members: 49 | :show-inheritance: 50 | 51 | iphas (Sale et al. 2014) 52 | ------------------------ 53 | .. automodule:: dustmaps.iphas 54 | :members: 55 | :special-members: 56 | :show-inheritance: 57 | 58 | leike_ensslin_2019 (Leike & Enßlin 2019) 59 | ---------------------------------------- 60 | .. automodule:: dustmaps.leike_ensslin_2019 61 | :members: 62 | :special-members: 63 | :show-inheritance: 64 | 65 | leike2020 (Leike, Glatzle & Enßlin 2020) 66 | ---------------------------------------- 67 | .. automodule:: dustmaps.leike2020 68 | :members: 69 | :special-members: 70 | :show-inheritance: 71 | 72 | edenhofer2023 (Edenhofer et al. 2023) 73 | ------------------------------------- 74 | .. automodule:: dustmaps.edenhofer2023 75 | :members: 76 | :special-members: 77 | :show-inheritance: 78 | 79 | lenz2017 (Lenz, Hensley & Doré 2017) 80 | ------------------------------------ 81 | .. automodule:: dustmaps.lenz2017 82 | :members: 83 | :special-members: 84 | :show-inheritance: 85 | 86 | marshall (Marshall et al. 2006) 87 | ------------------------------- 88 | .. automodule:: dustmaps.marshall 89 | :members: 90 | :special-members: 91 | :show-inheritance: 92 | 93 | pg2010 (Peek & Graves 2010) 94 | --------------------------- 95 | .. automodule:: dustmaps.pg2010 96 | :members: 97 | :special-members: 98 | :show-inheritance: 99 | 100 | planck (Planck Collaboration 2013, 2016) 101 | ---------------------------------------- 102 | .. automodule:: dustmaps.planck 103 | :members: 104 | :special-members: 105 | :show-inheritance: 106 | 107 | 108 | sfd (Schlegel, Finkbeiner & Davis 1998) 109 | --------------------------------------- 110 | .. automodule:: dustmaps.sfd 111 | :members: 112 | :special-members: 113 | :show-inheritance: 114 | 115 | 116 | fetch_utils 117 | ----------- 118 | .. automodule:: dustmaps.fetch_utils 119 | :members: 120 | :special-members: 121 | :show-inheritance: 122 | 123 | 124 | map_base 125 | -------- 126 | .. automodule:: dustmaps.map_base 127 | :members: 128 | :special-members: __call__ 129 | :show-inheritance: 130 | 131 | 132 | healpix_map 133 | ----------- 134 | .. automodule:: dustmaps.healpix_map 135 | :members: 136 | :special-members: 137 | :show-inheritance: 138 | 139 | 140 | unstructured_map 141 | ---------------- 142 | .. automodule:: dustmaps.unstructured_map 143 | :members: 144 | :special-members: 145 | :show-inheritance: 146 | 147 | 148 | config 149 | ------ 150 | .. automodule:: dustmaps.config 151 | :members: 152 | :show-inheritance: 153 | 154 | 155 | std_paths 156 | --------- 157 | .. automodule:: dustmaps.std_paths 158 | :members: 159 | :special-members: 160 | :show-inheritance: 161 | 162 | 163 | json_serializers 164 | ---------------- 165 | .. automodule:: dustmaps.json_serializers 166 | :members: 167 | :special-members: 168 | :show-inheritance: 169 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | argparse>=1.2.1 2 | nose>=1.3.7 3 | Sphinx>=1.4.6 4 | sphinx-autobuild>=0.6.0 5 | sphinxcontrib-napoleon>=0.5.3 6 | sphinxcontrib-programoutput>=0.8 7 | progressbar2>=3.30.2 8 | six>=1.10.0 9 | sphinx_rtd_theme 10 | -------------------------------------------------------------------------------- /dustmaps/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # __init__.py 4 | # Makes the contents of the package "dustmaps" discoverable. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | -------------------------------------------------------------------------------- /dustmaps/bh.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # bh.py 4 | # Reads the Burstein & Heiles (1982; BH) dust reddening map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import numpy as np 22 | import astropy.coordinates as coordinates 23 | import h5py 24 | import os 25 | 26 | from .std_paths import * 27 | from .map_base import DustMap, ensure_flat_galactic 28 | 29 | def ascii2h5(bh_dir=None): 30 | """ 31 | Convert the Burstein & Heiles (1982) dust map from ASCII to HDF5. 32 | """ 33 | 34 | if bh_dir is None: 35 | bh_dir = os.path.join(data_dir_default, 'bh') 36 | 37 | fname = os.path.join(bh_dir, '{}.ascii') 38 | 39 | f = h5py.File('bh.h5', 'w') 40 | 41 | for region in ('hinorth', 'hisouth'): 42 | data = np.loadtxt(fname.format(region), dtype='f4') 43 | 44 | # Reshape and clip 45 | data.shape = (210, 201) # (R, N) 46 | data = data[:201] # Last 9 records are empty 47 | 48 | # Use NaNs where no data 49 | data[data < -9000] = np.nan 50 | 51 | dset = f.create_dataset( 52 | region, 53 | data=data, 54 | chunks=True, 55 | compression='gzip', 56 | compression_opts=3 57 | ) 58 | 59 | dset.attrs['axes'] = ('R', 'N') 60 | dset.attrs['description'] = ( 61 | 'HI 21cm column densities, in units of 10*NHYD. ' 62 | 'R = 100 + [(90^o-|b|) sin(l)]/[0.3 degrees]. ' 63 | 'N = 100 + [(90^o-|b|) cos (l)]/[0.3 degrees].' 64 | ) 65 | 66 | for region in ('rednorth', 'redsouth'): 67 | data = np.loadtxt(fname.format(region), dtype='f4') 68 | 69 | # Reshape and clip 70 | data.shape = (94, 1200) # (R, N) 71 | data = data[:93] # Last record is empty 72 | 73 | # Use NaNs where no data 74 | data[data < -9000] = np.nan 75 | 76 | dset = f.create_dataset( 77 | region, 78 | data=data, 79 | chunks=True, 80 | compression='gzip', 81 | compression_opts=3 82 | ) 83 | 84 | dset.attrs['axes'] = ('R', 'N') 85 | dset.attrs['description'] = ( 86 | 'E(B-V), in units of 0.001 mag. ' 87 | 'R = (|b| - 10) / (0.6 degrees). ' 88 | 'N = (l + 0.15) / 0.3 - 1.' 89 | ) 90 | 91 | f.attrs['description'] = ( 92 | 'The Burstein & Heiles (1982) dust map.' 93 | ) 94 | 95 | f.close() 96 | 97 | 98 | class BHQuery(DustMap): 99 | """ 100 | Queries the Burstein & Heiles (1982) reddening map. 101 | """ 102 | 103 | def __init__(self, bh_dir=None): 104 | """ 105 | Args: 106 | bh_dir (Optional[str]): The directory containing the Burstein & 107 | Heiles dust map. Defaults to `None`, meaning that the default 108 | directory is used. 109 | """ 110 | if bh_dir is None: 111 | bh_dir = os.path.join(data_dir_default, 'bh') 112 | 113 | f = h5py.File(os.path.join(bh_dir, 'bh.h5'), 'r') 114 | self._hinorth = f['hinorth'][:] 115 | self._hisouth = f['hisouth'][:] 116 | self._rednorth = f['rednorth'][:] 117 | self._redsouth = f['redsouth'][:] 118 | f.close() 119 | 120 | def _lb2RN_northcap(self, l, b): 121 | R = 100. + (90. - b) * np.sin(np.radians(l)) / 0.3 122 | N = 100. + (90. - b) * np.cos(np.radians(l)) / 0.3 123 | return np.round(R).astype('i4'), np.round(N).astype('i4') 124 | 125 | def _lb2RN_southcap(self, l, b): 126 | R = 100. + (90. + b) * np.sin(np.radians(l)) / 0.3 127 | N = 100. + (90. + b) * np.cos(np.radians(l)) / 0.3 128 | return np.round(R).astype('i4'), np.round(N).astype('i4') 129 | 130 | def _lb2RN_mid(self, l, b): 131 | R = (np.abs(b) - 10.) / 0.6 132 | N = (np.mod(l, 360.) + 0.15) / 0.3 - 1 133 | return np.round(R).astype('i4'), np.round(N).astype('i4') 134 | 135 | def _lb2ebv_northcap(self, l, b): 136 | R, N = self._lb2RN_northcap(l, b) 137 | return -0.0372 + self._hinorth[R,N] * 0.0000357 138 | 139 | def _lb2ebv_southcap(self, l, b): 140 | R, N = self._lb2RN_southcap(l, b) 141 | return -0.0372 + self._hisouth[R,N] * 0.0000357 142 | 143 | def _lb2ebv_midnorth(self, l, b): 144 | R, N = self._lb2RN_mid(l, b) 145 | return self._rednorth[R,N] * 0.001 146 | 147 | def _lb2ebv_midsouth(self, l, b): 148 | R, N = self._lb2RN_mid(l, b) 149 | return self._redsouth[R,N] * 0.001 150 | 151 | @ensure_flat_galactic 152 | def query(self, coords): 153 | """ 154 | Returns E(B-V) at the specified location(s) on the sky. 155 | 156 | Args: 157 | coords (`astropy.coordinates.SkyCoord`): The coordinates to query. 158 | 159 | Returns: 160 | A float array of reddening, in units of E(B-V), at the given 161 | coordinates. The shape of the output is the same as the shape of the 162 | coordinates stored by `coords`. 163 | """ 164 | # gal = coords.transform_to('galactic') 165 | gal = coords 166 | l = gal.l.deg 167 | b = gal.b.deg 168 | 169 | # Detect scalar input 170 | scalar_input = not hasattr(l, '__len__') 171 | if scalar_input: 172 | l = np.array([l]) 173 | b = np.array([b]) 174 | 175 | # Fill return array with NaNs 176 | ebv = np.empty(l.shape, dtype='f8') 177 | ebv[:] = np.nan 178 | 179 | # Fill northern cap 180 | idx = (b >= 65.) & (b <= 90.) 181 | ebv[idx] = self._lb2ebv_northcap(l[idx], b[idx]) 182 | 183 | # Fill southern cap 184 | idx = (b <= -65.) & (b >= -90.) 185 | ebv[idx] = self._lb2ebv_southcap(l[idx], b[idx]) 186 | 187 | # Fill northern midplane 188 | idx = (b < 65.) & (b >= 10.) 189 | ebv[idx] = self._lb2ebv_midnorth(l[idx], b[idx]) 190 | 191 | # Fill southern midplane 192 | idx = (b > -65.) & (b <= -10.) 193 | ebv[idx] = self._lb2ebv_midsouth(l[idx], b[idx]) 194 | 195 | if scalar_input: 196 | ebv = ebv[0] 197 | 198 | return ebv 199 | 200 | 201 | 202 | 203 | def main(): 204 | #ascii2h5() 205 | bh = BHQuery() 206 | 207 | # Calculate E(B-V) on a grid 208 | l = np.arange(-180, 180., 0.1) 209 | b = np.arange(-90., 90.01, 0.1) 210 | l, b = np.meshgrid(l, b) 211 | 212 | c = coordinates.SkyCoord(l, b, frame='galactic', unit='deg') 213 | 214 | ebv = bh.query(c) 215 | 216 | # Apply gamma stretch 217 | gamma = 0.8 218 | img = np.power(np.abs(ebv), gamma) * np.sign(ebv) 219 | 220 | import matplotlib 221 | matplotlib.use('Agg') 222 | import matplotlib.pyplot as plt 223 | 224 | fig = plt.figure(figsize=(12,6), dpi=300) 225 | ax = fig.add_subplot(1,1,1, axisbg='blue') 226 | ax.imshow( 227 | img, 228 | origin='lower', 229 | interpolation='none', 230 | cmap='Greys', 231 | aspect='equal', 232 | extent=(-180, 180., -90., 90.), 233 | vmin=-np.power(0.1, gamma), 234 | vmax=np.power(0.5, gamma), 235 | rasterized=True 236 | ) 237 | ax.set_xlim(ax.get_xlim()[::-1]) 238 | ax.set_xlabel(r'$\ell$', fontsize=18) 239 | ax.set_ylabel(r'$b$', fontsize=18) 240 | ax.set_title(r'$\mathrm{Burstein - Heiles \ \left( 1982 \right)}$', fontsize=22) 241 | 242 | fig.savefig(os.path.join(output_dir, 'bh.svg'), dpi=300, bbox_inches='tight') 243 | #plt.show() 244 | 245 | return 0 246 | 247 | if __name__ == '__main__': 248 | main() 249 | -------------------------------------------------------------------------------- /dustmaps/chen2014.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # chen2014.py 4 | # Reads the Chen et al. (2014) dust map, based on stellar photometry from the 5 | # Xuyi Schmidt Telescope Photometric Survey of the Galactic Anticentre. 6 | # 7 | # Copyright (C) 2016 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function, division 21 | 22 | import numpy as np 23 | import h5py 24 | import os 25 | 26 | import astropy.coordinates as coordinates 27 | import astropy.units as units 28 | 29 | from .std_paths import * 30 | from .map_base import DustMap, ensure_flat_galactic 31 | from .unstructured_map import UnstructuredDustMap 32 | from . import fetch_utils 33 | 34 | 35 | class Chen2014Query(UnstructuredDustMap): 36 | """ 37 | The 3D dust map of Chen et al. (2014), based on stellar photometry from the 38 | Xuyi Schmidt Telescope Photometric Survey of the Galactic Anticentre. The 39 | map covers 140 deg < l < 240 deg, -60 deg < b < 40 deg. 40 | """ 41 | 42 | def __init__(self, map_fname=None): 43 | """ 44 | Args: 45 | map_fname (Optional[:obj:`str`]): Filename at which the map is stored. 46 | Defaults to ``None``, meaning that the default filename is used. 47 | """ 48 | if map_fname is None: 49 | map_fname = os.path.join(data_dir(), 'chen2014', 'chen2014.h5') 50 | 51 | with h5py.File(map_fname, 'r') as f: 52 | self._dists = f['dists'][:] 53 | self._lb = f['pix_lb'][:] 54 | self._A = f['A_r'][:] 55 | self._sigma_A = f['A_r_err'][:] 56 | 57 | # Have to filter out zero pixels 58 | # idx = ~np.all(self._A < 1.e-5, axis=1) 59 | # self._lb = self._lb[idx] 60 | # self._A = self._A[idx] 61 | # self._sigma_A = self._sigma_A[idx] 62 | 63 | self._n_dists = self._dists.size 64 | 65 | # Don't query more than this angular distance from any point 66 | max_pix_scale = 0.5 * units.deg 67 | 68 | # Tesselate the sphere 69 | coords = coordinates.SkyCoord( 70 | self._lb[:,0], 71 | self._lb[:,1], 72 | unit='deg', 73 | frame='galactic') 74 | 75 | super(Chen2014Query, self).__init__(coords, max_pix_scale, metric_p=2) 76 | 77 | @ensure_flat_galactic 78 | def query(self, coords, return_sigma=False): 79 | """ 80 | Returns r-band extinction, A_r, at the given coordinates. Can also 81 | return uncertainties. 82 | 83 | Args: 84 | coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to query. 85 | return_sigma (Optional[:obj:`bool`]): If ``True``, returns the uncertainty in 86 | extinction as well. Defaults to ``False``. 87 | 88 | Returns: 89 | Extinction in the r-band at the specified coordinates, in mags. 90 | The shape of the output depends on whether :obj:`coords` contains 91 | distances. 92 | 93 | If :obj:`coords` does not specify distance(s), then the shape of the 94 | output begins with :obj:`coords.shape`. If :obj:`coords` does specify 95 | distance(s), then the shape of the output begins with 96 | ``coords.shape + ([number of distance bins],)``. 97 | """ 98 | n_coords_ret = coords.shape[0] 99 | 100 | # Determine if distance has been requested 101 | has_dist = hasattr(coords.distance, 'kpc') 102 | d = coords.distance.kpc if has_dist else None 103 | 104 | # Convert coordinates to pixel indices 105 | pix_idx = self._coords2idx(coords) 106 | 107 | # Determine which coordinates are out of bounds 108 | mask_idx = (pix_idx == self._n_pix) 109 | if np.any(mask_idx): 110 | pix_idx[mask_idx] = 0 111 | 112 | # Which distances to extract 113 | if has_dist: 114 | d = coords.distance.kpc 115 | dist_idx_ceil = np.searchsorted(self._dists, d) 116 | 117 | ret = np.empty((n_coords_ret,), dtype='f8') 118 | if return_sigma: 119 | sigma_ret = np.empty((n_coords_ret,), dtype='f8') 120 | 121 | # d < d(nearest distance slice) 122 | idx_near = (dist_idx_ceil == 0) & ~mask_idx 123 | print('d < d(nearest): {:d}'.format(np.sum(idx_near))) 124 | if np.any(idx_near): 125 | a = d[idx_near] / self._dists[0] 126 | ret[idx_near] = a[:] * self._A[pix_idx[idx_near], 0] 127 | if return_sigma: 128 | sigma_ret[idx_near] = a[:] * self._sigma_A[pix_idx[idx_near], 0] 129 | 130 | # d > d(farthest distance slice) 131 | idx_far = (dist_idx_ceil == self._n_dists) & ~mask_idx 132 | print('d > d(farthest): {:d}'.format(np.sum(idx_far))) 133 | if np.any(idx_far): 134 | ret[idx_far] = self._A[pix_idx[idx_far], -1] 135 | if return_sigma: 136 | sigma_ret[idx_far] = self._sigma_A[pix_idx[idx_far], -1] 137 | 138 | # d(nearest distance slice) < d < d(farthest distance slice) 139 | idx_btw = ~idx_near & ~idx_far & ~mask_idx 140 | print('d(nearest) < d < d(farthest): {:d}'.format(np.sum(idx_btw))) 141 | if np.any(idx_btw): 142 | d_ceil = self._dists[dist_idx_ceil[idx_btw]] 143 | d_floor = self._dists[dist_idx_ceil[idx_btw]-1] 144 | a = (d_ceil - d[idx_btw]) / (d_ceil - d_floor) 145 | ret[idx_btw] = ( 146 | (1.-a[:]) * self._A[pix_idx[idx_btw], dist_idx_ceil[idx_btw]] 147 | + a[:] * self._A[pix_idx[idx_btw], dist_idx_ceil[idx_btw]-1]) 148 | if return_sigma: 149 | w0 = (1.-a)**2 150 | w1 = a**2 151 | norm = 1. / (w0 + w1) 152 | w0 *= norm 153 | w1 *= norm 154 | sigma_ret[idx_btw] = np.sqrt( 155 | w0 * self._sigma_A[pix_idx[idx_btw], dist_idx_ceil[idx_btw]]**2 156 | + w1 * self._sigma_A[pix_idx[idx_btw], dist_idx_ceil[idx_btw]-1]**2 157 | ) 158 | else: 159 | # TODO: Harmonize order of distances & samples with Bayestar. 160 | ret = self._A[pix_idx, :] 161 | if return_sigma: 162 | sigma_ret = self._sigma_A[pix_idx, :] 163 | 164 | if np.any(mask_idx): 165 | ret[mask_idx] = np.nan 166 | if return_sigma: 167 | sigma_ret[mask_idx] = np.nan 168 | 169 | if return_sigma: 170 | return ret, sigma_ret 171 | 172 | return ret 173 | 174 | @property 175 | def distances(self): 176 | """ 177 | Returns the distance bins that the map uses. The return type is 178 | :obj:`astropy.units.Quantity`, which stores unit-full quantities. 179 | """ 180 | return self._dists * units.kpc 181 | 182 | 183 | def ascii2h5(dat_fname, h5_fname): 184 | """ 185 | Converts from the original ASCII format of the Chen+ (2014) 3D dust map to 186 | the HDF5 format. 187 | 188 | Args: 189 | dat_fname (:obj:`str`): Filename of the original ASCII .dat file. 190 | h5_fname (:obj:`str`): Output filename to write the resulting HDF5 file to. 191 | """ 192 | table = np.loadtxt(dat_fname, skiprows=1, dtype='f4') 193 | 194 | filter_kwargs = dict( 195 | chunks=True, 196 | compression='gzip', 197 | compression_opts=3) 198 | 199 | # Filter out pixels with all zeros 200 | idx = ~np.all(table[:,2:32] < 1.e-5, axis=1) 201 | 202 | with h5py.File(h5_fname, 'w') as f: 203 | d = np.arange(0., 4.351, 0.15).astype('f4') 204 | 205 | dset = f.create_dataset('dists', data=d, **filter_kwargs) 206 | dset.attrs['description'] = 'Distances at which extinction is measured' 207 | dset.attrs['units'] = 'kpc' 208 | 209 | dset = f.create_dataset('pix_lb', data=table[idx,0:2], **filter_kwargs) 210 | dset.attrs['description'] = 'Galactic (l, b) of each pixel' 211 | dset.attrs['units'] = 'deg' 212 | 213 | dset = f.create_dataset('A_r', data=table[idx,2:32], **filter_kwargs) 214 | dset.attrs['description'] = 'Extinction' 215 | dset.attrs['shape'] = '(pixel, distance)' 216 | dset.attrs['band'] = 'r' 217 | dset.attrs['units'] = 'mag' 218 | 219 | dset = f.create_dataset('A_r_err', data=table[idx,32:], **filter_kwargs) 220 | dset.attrs['description'] = 'Gaussian uncertainty in extinction' 221 | dset.attrs['shape'] = '(pixel, distance)' 222 | dset.attrs['band'] = 'r' 223 | dset.attrs['units'] = 'mag' 224 | 225 | 226 | def fetch(clobber=False): 227 | """ 228 | Downloads the Chen et al. (2014) dust map. 229 | 230 | Args: 231 | clobber (Optional[:obj:`bool`]): If ``True``, any existing file will be 232 | overwritten, even if it appears to match. If ``False`` (the 233 | default), :obj:`fetch()` will attempt to determine if the dataset 234 | already exists. This determination is not 100\% robust against data 235 | corruption. 236 | """ 237 | 238 | dest_dir = fname_pattern = os.path.join(data_dir(), 'chen2014') 239 | url = 'https://paperdata.china-vo.org/diskec/extinmaps/chen2014map.dat' 240 | dat_fname = os.path.join(dest_dir, 'chen2014.dat') 241 | h5_fname = os.path.join(dest_dir, 'chen2014.h5') 242 | md5 = 'f8a2bc46d411c57ca4c76dc344e291f1' 243 | 244 | # Check if file already exists 245 | if not clobber: 246 | h5_size = 52768768 # Guess, in Bytes 247 | h5_dsets = { 248 | 'dists': (30,), 249 | 'pix_lb': (557398, 2), 250 | 'A_r': (557398, 30), 251 | 'A_r_err': (557398, 30) 252 | } 253 | if fetch_utils.h5_file_exists(h5_fname, h5_size, dsets=h5_dsets): 254 | print('File appears to exist already. Call `fetch(clobber=True)` ' 255 | 'to force overwriting of existing file.') 256 | return 257 | 258 | # Download the table 259 | print('Downloading {}'.format(url)) 260 | fetch_utils.download_and_verify(url, md5, fname=dat_fname) 261 | 262 | # Convert from ASCII to HDF5 format 263 | print('Repacking files...') 264 | ascii2h5(dat_fname, h5_fname) 265 | 266 | # Cleanup 267 | print('Removing original file...') 268 | os.remove(dat_fname) 269 | -------------------------------------------------------------------------------- /dustmaps/chen2018.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # chen2018.py 4 | # Reads the 3D dust map of Chen et al. (2018). 5 | # 6 | # Copyright (C) 2019 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import numpy as np 22 | import os 23 | 24 | import astropy.coordinates as coordinates 25 | import astropy.units as units 26 | import astropy.io.fits as fits 27 | 28 | from .equirectangular_map import EquirectangularDustMap 29 | from .std_paths import * 30 | from . import fetch_utils 31 | 32 | 33 | class Chen2018Query(EquirectangularDustMap): 34 | """ 35 | The 3D dust map of Chen et al. (2018), based on parallaxes from Gaia and 36 | photometry from Gaia and 2MASS imaging in the Galactic plane. The map 37 | covers |b| < 10 deg. 38 | """ 39 | 40 | def __init__(self, map_fname=None, color='BR'): 41 | """ 42 | Args: 43 | map_fname (Optional[:obj:`str`]): Filename at which the map is stored. 44 | Defaults to ``None``, meaning that the default filename is used. 45 | """ 46 | if map_fname is None: 47 | map_fname = os.path.join(data_dir(), 'chen2018', 'chen2018.fits') 48 | 49 | with fits.open(map_fname) as f: 50 | d = f[1].data[:] 51 | 52 | lon0,lon1 = 0., 360. 53 | lat0,lat1 = -10., 10. 54 | dist0,dist1 = (0.2, 6.0) * units.kpc 55 | shape = (3600, 200, 30) 56 | 57 | pix_val = d['E'+color] 58 | pix_val.shape = shape 59 | 60 | super(Chen2018Query, self).__init__( 61 | pix_val, 62 | lon0, lon1, 63 | lat0, lat1, 64 | dist0=dist0, dist1=dist1, 65 | axis_order=('lon', 'lat', 'dist'), 66 | dist_interp='linear', 67 | frame='galactic' 68 | ) 69 | 70 | 71 | def fetch(clobber=False): 72 | """ 73 | Downloads the 3D dust map of Chen et al. (2018). 74 | 75 | Args: 76 | clobber (Optional[bool]): If ``True``, any existing file will be 77 | overwritten, even if it appears to match. If ``False`` (the 78 | default), ``fetch()`` will attempt to determine if the dataset 79 | already exists. This determination is not 100\% robust against data 80 | corruption. 81 | """ 82 | dest_dir = fname_pattern = os.path.join(data_dir(), 'chen2018') 83 | table_fname = os.path.join(dest_dir, 'chen2018.fits') 84 | 85 | # Check if the FITS table already exists 86 | table_md5sum = '043f2aa2064af607b56c040971f8d786' 87 | 88 | if (not clobber) and fetch_utils.check_md5sum(table_fname, table_md5sum): 89 | print('File appears to exist already. Call `fetch(clobber=True)` ' 90 | 'to force overwriting of existing file.') 91 | return 92 | 93 | # Download from the server 94 | url = 'http://paperdata.china-vo.org/diskec/cestar/table1.zip' 95 | archive_fname = os.path.join(dest_dir, 'table1.zip') 96 | archive_md5sum = '4acedf3f11ee8045e102a78b5b72036b' 97 | 98 | fetch_utils.download_and_verify(url, archive_md5sum, archive_fname) 99 | 100 | # Extract the FITS table 101 | print('Exracting FITS table from Zip archive ...') 102 | import zipfile 103 | 104 | readme_fname = os.path.join(dest_dir, 'readme.txt') 105 | 106 | with zipfile.ZipFile(archive_fname, 'r') as f: 107 | f.extract('table1.fits', path=dest_dir) 108 | f.extract('readme.txt', path=dest_dir) 109 | 110 | os.rename(os.path.join(dest_dir, 'table1.fits'), table_fname) 111 | 112 | # Delete the Zip archive 113 | print('Removing Zip archive ...') 114 | os.remove(archive_fname) 115 | -------------------------------------------------------------------------------- /dustmaps/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # config.py 4 | # Allow configuration options to be set. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import json 22 | import os 23 | from warnings import warn 24 | 25 | 26 | class ConfigError(Exception): 27 | pass 28 | 29 | 30 | class ConfigWarning(UserWarning): 31 | pass 32 | 33 | 34 | class Configuration(object): 35 | """ 36 | A class that stores the package configuration. 37 | 38 | By default, the configuration is loaded from 39 | 40 | ~/.dustmapsrc 41 | 42 | This can be overridden by setting the environmental variable 43 | :obj:`DUSTMAPS_CONFIG_FNAME`. 44 | 45 | Paths stored in the configuration file (such as the data 46 | directory, :obj:`data_dir`, can include environmental 47 | variables, which will be expanded. 48 | """ 49 | 50 | def __init__(self, fname): 51 | self._success = False 52 | self.fname = fname 53 | self.load() 54 | 55 | def load(self): 56 | if os.path.isfile(self.fname): 57 | with open(self.fname, 'r') as f: 58 | try: 59 | self._options = json.load(f) 60 | self._success = True 61 | except ValueError as error: 62 | warn(('The config file appears to be corrupted:\n\n' 63 | ' {fname}\n\n' 64 | 'Either fix the config file manually, or overwrite ' 65 | 'it with a blank configuration as follows:\n\n' 66 | ' from dustmaps.config import config\n' 67 | ' config.reset()\n\n' 68 | 'Note that this will delete your configuration! For ' 69 | 'example, if you have specified a data directory, ' 70 | 'then dustmaps will forget about its location.' 71 | ).format(fname=self.fname), ConfigWarning) 72 | self._options = {} 73 | else: 74 | warn(('Configuration file not found:\n\n' 75 | ' {fname}\n\n' 76 | 'To create a new configuration file in the default ' 77 | 'location, run the following python code:\n\n' 78 | ' from dustmaps.config import config\n' 79 | ' config.reset()\n\n' 80 | 'Note that this will delete your configuration! For ' 81 | 'example, if you have specified a data directory, ' 82 | 'then dustmaps will forget about its location.' 83 | ).format(fname=self.fname), ConfigWarning) 84 | self._options = {} 85 | self._success = True 86 | 87 | def save(self, force=False): 88 | """ 89 | Saves the configuration to a JSON, in the standard config location. 90 | 91 | Args: 92 | force (Optional[:obj:`bool`]): Continue writing, even if the original 93 | config file was not loaded properly. This is dangerous, because 94 | it could cause the previous configuration options to be lost. 95 | Defaults to :obj:`False`. 96 | 97 | Raises: 98 | :obj:`ConfigError`: if the configuration file was not successfully 99 | loaded on initialization of the class, and 100 | :obj:`force` is :obj:`False`. 101 | """ 102 | if (not self._success) and (not force): 103 | raise ConfigError(( 104 | 'The config file appears to be corrupted:\n\n' 105 | ' {fname}\n\n' 106 | 'Before attempting to save the configuration, please either ' 107 | 'fix the config file manually, or overwrite it with a blank ' 108 | 'configuration as follows:\n\n' 109 | ' from dustmaps.config import config\n' 110 | ' config.reset()\n\n' 111 | ).format(fname=self.fname)) 112 | 113 | with open(self.fname, 'w') as f: 114 | json.dump(self._options, f, indent=2) 115 | 116 | def __setitem__(self, key, value): 117 | self._options[key] = value 118 | self.save() 119 | 120 | def __getitem__(self, key): 121 | return self._options.get(key, None) 122 | 123 | def get(self, key, default=None): 124 | """ 125 | Gets a configuration option, returning a default value if the specified 126 | key isn't set. 127 | """ 128 | return self._options.get(key, default) 129 | 130 | def remove(self, key): 131 | """ 132 | Deletes a key from the configuration. 133 | """ 134 | self._options.pop(key, None) 135 | self.save() 136 | 137 | def reset(self): 138 | """ 139 | Resets the configuration, and overwrites the existing configuration 140 | file. 141 | """ 142 | self._options = {} 143 | self.save(force=True) 144 | self._success = True 145 | 146 | 147 | # The package configuration filename 148 | default_config_fname = os.path.expanduser('~/.dustmapsrc') 149 | config_fname = os.environ.get('DUSTMAPS_CONFIG_FNAME', default_config_fname) 150 | if default_config_fname != config_fname: 151 | warn('Overriding default configuration file with {}'.format(config_fname), 152 | ConfigWarning) 153 | 154 | # The package configuration. By default, this is read from ``~/.dustmapsrc``. 155 | # The default location can be overridden by setting the ``DUSTMAPS_CONFIG_FNAME`` 156 | # environment variable. 157 | # 158 | # This is the object that the user should interact with in order to change 159 | # settings. For example, to set the directory where large files (e.g., dust maps) 160 | # will be stored: 161 | # 162 | # .. code-block:: python 163 | # 164 | # from dustmaps.config import config 165 | # config['data_dir'] = '/path/to/data/directory' 166 | config = Configuration(config_fname) 167 | -------------------------------------------------------------------------------- /dustmaps/csfd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # csfd.py 4 | # Reads the "Corrected SFD" dust reddening map of Chiang (2023). 5 | # 6 | # Copyright (C) 2023 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import os 22 | import numpy as np 23 | import healpy as hp 24 | import astropy.io.fits as fits 25 | import astropy.units as units 26 | 27 | from .std_paths import * 28 | from .healpix_map import HEALPixQuery 29 | from . import fetch_utils 30 | from . import dustexceptions 31 | 32 | 33 | class CSFDQuery(HEALPixQuery): 34 | """ 35 | Queries the Corrected SFD dust map of Chiang (2023). This map is based 36 | on SFD, but contains a correction to remove contamination from 37 | large-scale structure (i.e., external galaxies). 38 | """ 39 | 40 | def __init__(self, map_fname=None, mask_fname=None): 41 | """ 42 | Args: 43 | map_fname (Optional[:obj:`str`]): Filename of the CSFD EBV map. 44 | Defaults to ```None``, meaning that the default location is 45 | used. 46 | mask_fname (Optional[:obj:`str`]): Filename of the CSFD mask map. 47 | Defaults to ```None``, meaning that the default location is 48 | used. 49 | """ 50 | 51 | if map_fname is None: 52 | map_fname = os.path.join(data_dir(), 'csfd', 'csfd_ebv.fits') 53 | if mask_fname is None: 54 | mask_fname = os.path.join(data_dir(), 'csfd', 'mask.fits') 55 | 56 | try: 57 | with fits.open(map_fname) as hdulist: 58 | ebv_data = hdulist['xtension'].data[:]['T'].flatten() 59 | with fits.open(mask_fname) as hdulist: 60 | mask_data = hdulist['xtension'].data[:]['T'].flatten() 61 | except IOError as error: 62 | print(dustexceptions.data_missing_message('csfd', 63 | 'CSFD (Chiang 2023)')) 64 | raise error 65 | 66 | super(CSFDQuery, self).__init__(ebv_data, False, 'galactic', 67 | flags=mask_data) 68 | 69 | def query(self, coords, **kwargs): 70 | """ 71 | Returns CSFD reddening on the same scale as SFD (similar to E(B-V)) at 72 | the specified location(s) on the sky. Also optionally returns a 73 | bit mask, where the bits (ordered from least to most significant) have 74 | the following meanings:: 75 | 76 | Bit 0: 'LSS_corr' - This bit is set in the footprint within which 77 | the LSS is reconstructed, and CSFD = SFD - LSS (otherwise 78 | CSFD = SFD). 79 | Bit 1: 'no_IRAS' - Set in the area with no IRAS data (DIRBE data 80 | filled in SFD); LSS removal in CSFD is done using a 1 deg 81 | smoothed LSS. 82 | Bit 2: 'cosmology' - Set in the area where both the LSS and CSFD 83 | are most reliable for precision cosmology analyses. 84 | 85 | Args: 86 | coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to 87 | query. 88 | return_flags (Optional[:obj:`bool`]): If ``True``, then a 89 | bit mask is returned as well, indicating where CSFD 90 | has been corrected for large-scale structure, where IRAS data 91 | was used, and where the map is suitable for cosmology. See 92 | above description of bits. Defaults to ``False``. 93 | 94 | Returns: 95 | A float array of the reddening, at the given coordinates. The 96 | shape of the output is the same as the shape of the input 97 | coordinate array, ``coords``. If ``return_flags`` is ``True``, 98 | a second array (a bit mask) of the same shape is returned. See 99 | above description of the meaning of each bit. 100 | """ 101 | return super(CSFDQuery, self).query(coords, **kwargs) 102 | 103 | 104 | def fetch(clobber=False): 105 | """ 106 | Downloads the Corrected SFD dust map of Chiang (2023). 107 | 108 | Args: 109 | clobber (Optional[bool]): If ``True``, any existing file will be 110 | overwritten, even if it appears to match. If ``False`` (the 111 | default), ``fetch()`` will attempt to determine if the dataset 112 | already exists. This determination is not 100\% robust against data 113 | corruption. 114 | """ 115 | file_spec = [ 116 | ('csfd_ebv.fits', '31cd2eec51bcb5f106af84a610ced53c'), 117 | ('mask.fits', '9142f5a5d184125836a68b6f48d1113f') 118 | ] 119 | for fn,md5sum in file_spec: 120 | fname = os.path.join(data_dir(), 'csfd', fn) 121 | # Download from Zenodo 122 | url = 'https://zenodo.org/record/8207175/files/{}'.format(fn) 123 | fetch_utils.download_and_verify(url, md5sum, fname, clobber=clobber) 124 | 125 | -------------------------------------------------------------------------------- /dustmaps/data/bh/bh.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gregreen/dustmaps/035a65a7bbf02431172113883993a7da1ba1cacb/dustmaps/data/bh/bh.h5 -------------------------------------------------------------------------------- /dustmaps/dustexceptions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # dustexceptions.py 4 | # Defines exceptions for the dustmaps package. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | from . import std_paths 22 | 23 | class Error(Exception): 24 | pass 25 | 26 | class CoordFrameError(Error): 27 | pass 28 | 29 | 30 | def data_missing_message(package, name): 31 | return ("The {name} dust map is not in the data directory:\n\n" 32 | " {data_dir}\n\n" 33 | "To change the data directory, call:\n\n" 34 | " from dustmaps.config import config\n" 35 | " config['data_dir'] = '/path/to/data/directory'\n\n" 36 | "To download the {name} map to the data directory, call:\n\n" 37 | " import dustmaps.{package}\n" 38 | " dustmaps.{package}.fetch()\n").format( 39 | data_dir=std_paths.data_dir(), 40 | package=package, 41 | name=name) 42 | -------------------------------------------------------------------------------- /dustmaps/examples/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # __init__.py 4 | # Makes the examples in the package "dustmaps" discoverable. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_bayestar.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_bayestar.py 4 | # An example of how to query the "Bayestar" dust map of 5 | # Green, Schlafly, Finkbeiner et al. (2015). 6 | # 7 | # Copyright (C) 2016 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function 21 | 22 | import numpy as np 23 | import os.path 24 | 25 | try: 26 | import PIL.Image 27 | except ImportError as error: 28 | print('This example requires Pillow or PIL.\n' 29 | 'See .') 30 | raise error 31 | 32 | from astropy.coordinates import SkyCoord 33 | import astropy.units as u 34 | 35 | from dustmaps.bayestar import BayestarQuery 36 | 37 | 38 | def numpy2pil(a, vmin, vmax): 39 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 40 | a = (254.99 * a).astype('u1') 41 | return PIL.Image.fromarray(a) 42 | 43 | 44 | def main(): 45 | w,h = (2056,1024) 46 | l_0 = 130. 47 | 48 | # Set up Bayestar query object 49 | print('Loading bayestar map...') 50 | bayestar = BayestarQuery(max_samples=1) 51 | 52 | # Create a grid of coordinates 53 | print('Creating grid of coordinates...') 54 | l = np.linspace(-180.+l_0, 180.+l_0, 2*w) 55 | b = np.linspace(-90., 90., 2*h+2) 56 | b = b[1:-1] 57 | l,b = np.meshgrid(l, b) 58 | 59 | l += (np.random.random(l.shape) - 0.5) * 360./(2.*w) 60 | b += (np.random.random(l.shape) - 0.5) * 180./(2.*h) 61 | 62 | ebv = np.empty(l.shape+(3,), dtype='f8') 63 | 64 | for k,d in enumerate([0.5, 1.5, 5.]): 65 | # d = 5. # We'll query integrated reddening to a distance of 5 kpc 66 | coords = SkyCoord(l*u.deg, b*u.deg, d*u.kpc, frame='galactic') 67 | 68 | # Get the dust median reddening at each coordinate 69 | print('Querying map...') 70 | ebv[:,:,k] = bayestar.query(coords, mode='median') 71 | 72 | ebv[:,:,2] -= ebv[:,:,1] 73 | ebv[:,:,1] -= ebv[:,:,0] 74 | 75 | # Convert the output array to a PIL image and save 76 | print('Saving image...') 77 | img = numpy2pil(ebv[::-1,::-1,:], 0., 1.5) 78 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 79 | fname = 'bayestar.png' 80 | img.save(fname) 81 | 82 | return 0 83 | 84 | 85 | if __name__ == '__main__': 86 | main() 87 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_bh.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_bh.py 4 | # An example of how to query the Burstein & Heiles (1982) dust map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import numpy as np 22 | import os.path 23 | 24 | try: 25 | import PIL.Image 26 | except ImportError as error: 27 | print('This example requires Pillow or PIL.\n' 28 | 'See .') 29 | raise error 30 | 31 | from astropy.coordinates import SkyCoord 32 | import astropy.units as u 33 | 34 | from dustmaps.bh import BHQuery 35 | 36 | 37 | def numpy2pil(a, vmin, vmax): 38 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 39 | a = (254.99 * a).astype('u1') 40 | return PIL.Image.fromarray(a) 41 | 42 | 43 | def main(): 44 | w,h = (2056,1024) 45 | l_0 = 0. 46 | 47 | # Create a grid of coordinates 48 | print('Creating grid of coordinates...') 49 | l = np.linspace(-180.+l_0, 180.+l_0, 2*w) 50 | b = np.linspace(-90., 90., 2*h+2) 51 | b = b[1:-1] 52 | l,b = np.meshgrid(l, b) 53 | 54 | l += (np.random.random(l.shape) - 0.5) * 360./(2.*w) 55 | b += (np.random.random(l.shape) - 0.5) * 180./(2.*h) 56 | 57 | coords = SkyCoord(l*u.deg, b*u.deg, frame='galactic') 58 | 59 | # Set up BH query object 60 | print('Loading BH map...') 61 | bh = BHQuery() 62 | 63 | print('Querying map...') 64 | ebv = bh.query(coords) 65 | 66 | # Convert the output array to a PIL image and save 67 | print('Saving image...') 68 | img = numpy2pil(ebv[::-1,::-1], 0., 1.5) 69 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 70 | fname = 'bh.png' 71 | img.save(fname) 72 | 73 | return 0 74 | 75 | 76 | if __name__ == '__main__': 77 | main() 78 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_chen2014.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_chen.py 4 | # An example of how to query the Chen et al. (2014) 3D dust map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function 20 | 21 | import numpy as np 22 | import os.path 23 | 24 | try: 25 | import PIL.Image 26 | except ImportError as error: 27 | print('This example requires Pillow or PIL.\n' 28 | 'See .') 29 | raise error 30 | 31 | from astropy.coordinates import SkyCoord 32 | import astropy.units as u 33 | 34 | from dustmaps.chen2014 import Chen2014Query 35 | 36 | 37 | def numpy2pil(a, vmin, vmax, fill=0): 38 | mask = np.isnan(a) 39 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 40 | a = (254.99 * a).astype('u1') 41 | a[mask] = fill 42 | return PIL.Image.fromarray(a) 43 | 44 | 45 | def main(): 46 | w,h = (2056/2, 2056/2) 47 | 48 | # Set up Chen2014 object 49 | print('Loading Chen+(2014) map...') 50 | query = Chen2014Query() 51 | 52 | # Create a grid of coordinates 53 | print('Creating grid of coordinates...') 54 | # l = np.linspace(140., 240., 2*w) 55 | # b = np.linspace(-60., 40., 2*h) 56 | l = np.linspace(186., 202., 2*w) 57 | b = np.linspace(-24., -8., 2*h) 58 | dl = l[1] - l[0] 59 | db = b[1] - b[0] 60 | l,b = np.meshgrid(l, b) 61 | 62 | # l += (np.random.random(l.shape) - 0.5) * dl 63 | # b += (np.random.random(l.shape) - 0.5) * db 64 | 65 | A = np.empty(l.shape+(3,), dtype='f8') 66 | 67 | for k,d in enumerate([0.5, 1.0, 4.]): 68 | coords = SkyCoord(l*u.deg, b*u.deg, d*u.kpc, frame='galactic') 69 | 70 | # Get the dust median reddening at each coordinate 71 | print('Querying map to {:.1f} kpc...'.format(d)) 72 | A[:,:,k] = query.query(coords) 73 | 74 | # Convert the output array to a PIL image and save 75 | print('Saving image...') 76 | img = numpy2pil(A[::-1,::-1,k], 0., 2., fill=255) 77 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 78 | fname = 'chen2014_{:03.1f}kpc.png'.format(d) 79 | img.save(fname) 80 | 81 | A[:,:,2] -= A[:,:,1] 82 | A[:,:,1] -= A[:,:,0] 83 | 84 | # Convert the output array to a PIL image and save 85 | print('Saving image...') 86 | img = numpy2pil(A[::-1,::-1,:], 0., 2., fill=255) 87 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 88 | fname = 'chen2014.png' 89 | img.save(fname) 90 | 91 | return 0 92 | 93 | 94 | if __name__ == '__main__': 95 | main() 96 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_comparison.py: -------------------------------------------------------------------------------- 1 | 2 | #!/usr/bin/env python 3 | # 4 | # plot_comparison.py 5 | # An example of how to plot three different dust maps. 6 | # 7 | # Copyright (C) 2016 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function 21 | 22 | import matplotlib 23 | matplotlib.use('Agg') 24 | import matplotlib.pyplot as plt 25 | import numpy as np 26 | 27 | import astropy.units as units 28 | from astropy.coordinates import SkyCoord 29 | 30 | from dustmaps.sfd import SFDQuery 31 | from dustmaps.planck import PlanckQuery 32 | from dustmaps.bayestar import BayestarQuery 33 | 34 | import os 35 | 36 | 37 | def main(): 38 | c0 = SkyCoord.from_name('orion a', frame='galactic') 39 | print(c0) 40 | 41 | # l = np.arange(c0.l.deg - 5., c0.l.deg + 5., 0.05) 42 | # b = np.arange(c0.b.deg - 5., c0.b.deg + 5., 0.05) 43 | 44 | l0, b0 = (37., -16.) 45 | l = np.arange(l0 - 5., l0 + 5., 0.05) 46 | b = np.arange(b0 - 5., b0 + 5., 0.05) 47 | l, b = np.meshgrid(l, b) 48 | coords = SkyCoord(l*units.deg, b*units.deg, 49 | distance=1.*units.kpc, frame='galactic') 50 | 51 | sfd = SFDQuery() 52 | Av_sfd = 2.742 * sfd(coords) 53 | 54 | planck = PlanckQuery() 55 | Av_planck = 3.1 * planck(coords) 56 | 57 | bayestar = BayestarQuery(max_samples=1) 58 | Av_bayestar = 2.742 * bayestar(coords) 59 | 60 | fig = plt.figure(figsize=(12,4), dpi=150) 61 | 62 | for k,(Av,title) in enumerate([(Av_sfd, 'SFD'), 63 | (Av_planck, 'Planck'), 64 | (Av_bayestar, 'Bayestar')]): 65 | ax = fig.add_subplot(1,3,k+1) 66 | ax.imshow( 67 | np.sqrt(Av)[::,::-1], 68 | vmin=0., 69 | vmax=2., 70 | origin='lower', 71 | interpolation='nearest', 72 | cmap='binary', 73 | aspect='equal' 74 | ) 75 | ax.axis('off') 76 | ax.set_title(title) 77 | 78 | fig.subplots_adjust(wspace=0., hspace=0.) 79 | plt.savefig('comparison.png', dpi=150) 80 | 81 | return 0 82 | 83 | 84 | if __name__ == '__main__': 85 | main() 86 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_iphas.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_iphas.py 4 | # An example of how to query the "IPHAS" dust map of 5 | # Sale et al. (2014). 6 | # 7 | # Copyright (C) 2016 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function 21 | 22 | import numpy as np 23 | import os.path 24 | 25 | try: 26 | import PIL.Image 27 | except ImportError as error: 28 | print('This example requires Pillow or PIL.\n' 29 | 'See .') 30 | raise error 31 | 32 | from astropy.coordinates import SkyCoord 33 | import astropy.units as u 34 | 35 | from dustmaps.iphas import IPHASQuery 36 | 37 | 38 | def numpy2pil(a, vmin, vmax, fill=0): 39 | mask = np.isnan(a) 40 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 41 | a = (254.99 * a).astype('u1') 42 | a[mask] = fill 43 | return PIL.Image.fromarray(a) 44 | 45 | 46 | def main(): 47 | w,h = (2*2056, 2*int(2056*(30./200.))) 48 | l_0 = 122.5 49 | 50 | # Set up IPHASquery object 51 | print('Loading IPHAS map...') 52 | iphas = IPHASQuery() 53 | 54 | # Create a grid of coordinates 55 | print('Creating grid of coordinates...') 56 | l = np.linspace(-100.+l_0, 100.+l_0, 2*w) 57 | b = np.linspace(-15., 15., 2*h) 58 | dl = l[1] - l[0] 59 | db = b[1] - b[0] 60 | l,b = np.meshgrid(l, b) 61 | 62 | l += (np.random.random(l.shape) - 0.5) * dl 63 | b += (np.random.random(l.shape) - 0.5) * db 64 | 65 | A = np.empty(l.shape+(3,), dtype='f8') 66 | 67 | for k,d in enumerate([0.5, 1.5, 5.]): 68 | # d = 5. # We'll query integrated reddening to a distance of 5 kpc 69 | coords = SkyCoord(l*u.deg, b*u.deg, d*u.kpc, frame='galactic') 70 | 71 | # Get the dust median reddening at each coordinate 72 | print('Querying map...') 73 | A[:,:,k] = iphas.query(coords, mode='random_sample') 74 | 75 | A[:,:,2] -= A[:,:,1] 76 | A[:,:,1] -= A[:,:,0] 77 | 78 | # Convert the output array to a PIL image and save 79 | print('Saving image...') 80 | img = numpy2pil(A[::-1,::-1,:], 0., 4.5, fill=255) 81 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 82 | fname = 'iphas.png' 83 | img.save(fname) 84 | 85 | return 0 86 | 87 | 88 | if __name__ == '__main__': 89 | main() 90 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_lenz2017.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # plot_lenz2017.py 5 | # An example of how to query the Lenz, Hensley & Doré (2017) dust map. 6 | # 7 | # Copyright (C) 2018 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function, division 21 | 22 | import numpy as np 23 | import os.path 24 | 25 | try: 26 | import PIL.Image 27 | except ImportError as error: 28 | print('This example requires Pillow or PIL.\n' 29 | 'See .') 30 | raise error 31 | 32 | from astropy.coordinates import SkyCoord 33 | import astropy.units as u 34 | 35 | from dustmaps.lenz2017 import Lenz2017Query 36 | 37 | 38 | def numpy2pil(a, vmin, vmax): 39 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 40 | a = (254.99 * a).astype('u1') 41 | return PIL.Image.fromarray(a) 42 | 43 | 44 | def main(): 45 | w,h = (2056,1024) 46 | l_0 = 0. 47 | 48 | # Create a grid of coordinates 49 | print('Creating grid of coordinates...') 50 | l = np.linspace(-180.+l_0, 180.+l_0, 2*w) 51 | b = np.linspace(-90., 90., 2*h+2) 52 | b = b[1:-1] 53 | l,b = np.meshgrid(l, b) 54 | 55 | l += (np.random.random(l.shape) - 0.5) * 360./(2.*w) 56 | b += (np.random.random(l.shape) - 0.5) * 180./(2.*h) 57 | 58 | coords = SkyCoord(l*u.deg, b*u.deg, frame='galactic') 59 | 60 | # Set up Lenz+(2017) query object 61 | print('Loading Lenz+(2017) map...') 62 | q = Lenz2017Query() 63 | 64 | print('Querying map...') 65 | ebv = q.query(coords) 66 | 67 | # Convert the output array to a PIL image and save 68 | print('Saving image...') 69 | img = numpy2pil(ebv[::-1,::-1], 0., 0.05) 70 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 71 | fname = 'lenz2017.png' 72 | img.save(fname) 73 | 74 | return 0 75 | 76 | 77 | if __name__ == '__main__': 78 | main() 79 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_marshall.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_iphas.py 4 | # An example of how to query the Marshall et al. (2006) dust map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function 20 | 21 | import numpy as np 22 | import os.path 23 | 24 | try: 25 | import PIL.Image 26 | except ImportError as error: 27 | print('This example requires Pillow or PIL.\n' 28 | 'See .') 29 | raise error 30 | 31 | from astropy.coordinates import SkyCoord 32 | import astropy.units as u 33 | 34 | from dustmaps.marshall import MarshallQuery 35 | 36 | 37 | def numpy2pil(a, vmin, vmax, fill=0): 38 | mask = np.isnan(a) 39 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 40 | a = (254.99 * a).astype('u1') 41 | a[mask] = fill 42 | return PIL.Image.fromarray(a) 43 | 44 | 45 | def main(): 46 | w,h = (2*2056, 2*int(2056*(20./200.))) 47 | l_0 = 0. 48 | 49 | # Set up MarshallQuery object 50 | print('Loading Marshall map...') 51 | query = MarshallQuery() 52 | 53 | # Create a grid of coordinates 54 | print('Creating grid of coordinates...') 55 | l = np.linspace(-100.+l_0, 100.+l_0, 2*w) 56 | b = np.linspace(-10., 10., 2*h) 57 | dl = l[1] - l[0] 58 | db = b[1] - b[0] 59 | l,b = np.meshgrid(l, b) 60 | 61 | l += (np.random.random(l.shape) - 0.5) * dl 62 | b += (np.random.random(l.shape) - 0.5) * db 63 | 64 | A = np.empty(l.shape+(3,), dtype='f8') 65 | 66 | for k,d in enumerate([1., 2.5, 5.]): 67 | coords = SkyCoord(l*u.deg, b*u.deg, d*u.kpc, frame='galactic') 68 | 69 | # Get the mean dust extinction at each coordinate 70 | print('Querying map...') 71 | A[:,:,k] = query(coords, return_sigma=False) 72 | 73 | A[:,:,2] -= A[:,:,1] 74 | A[:,:,1] -= A[:,:,0] 75 | 76 | # Convert the output array to a PIL image and save 77 | print('Saving image...') 78 | img = numpy2pil(A[::-1,::-1,:], 0., 1., fill=255) 79 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 80 | fname = 'marshall.png' 81 | img.save(fname) 82 | 83 | return 0 84 | 85 | 86 | if __name__ == '__main__': 87 | main() 88 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_planck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_planck.py 4 | # An example of how to query the Planck Collaboration dust map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import numpy as np 22 | import os.path 23 | 24 | try: 25 | import PIL.Image 26 | except ImportError as error: 27 | print('This example requires Pillow or PIL.\n' 28 | 'See .') 29 | raise error 30 | 31 | from astropy.coordinates import SkyCoord 32 | import astropy.units as u 33 | 34 | from dustmaps.planck import PlanckQuery 35 | 36 | 37 | def numpy2pil(a, vmin, vmax): 38 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 39 | a = (254.99 * a).astype('u1') 40 | return PIL.Image.fromarray(a) 41 | 42 | 43 | def main(): 44 | w,h = (2056,1024) 45 | l_0 = 0. 46 | 47 | # Create a grid of coordinates 48 | print('Creating grid of coordinates...') 49 | l = np.linspace(-180.+l_0, 180.+l_0, 2*w) 50 | b = np.linspace(-90., 90., 2*h+2) 51 | b = b[1:-1] 52 | l,b = np.meshgrid(l, b) 53 | 54 | l += (np.random.random(l.shape) - 0.5) * 360./(2.*w) 55 | b += (np.random.random(l.shape) - 0.5) * 180./(2.*h) 56 | 57 | coords = SkyCoord(l*u.deg, b*u.deg, frame='galactic') 58 | 59 | planck_components = [ 60 | ('ebv', 0., 1.5), 61 | ('radiance', 0., 1.5), 62 | ('tau', 0., 1.5), 63 | ('temp', 15.*u.K, 25.*u.K), 64 | ('err_temp', 0.*u.K, 4.*u.K), 65 | ('beta', 1., 3.), 66 | ('err_beta', 0., 0.2)] 67 | 68 | for component,vmin,vmax in planck_components: 69 | # Set up Planck query object 70 | print('Loading Planck map...') 71 | planck = PlanckQuery(component=component) 72 | 73 | print('Querying map...') 74 | res = planck.query(coords) 75 | 76 | # Convert the output array to a PIL image and save 77 | print('Saving image...') 78 | img = numpy2pil(res[::-1,::-1], vmin, vmax) 79 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 80 | fname = 'planck_{}.png'.format(component) 81 | img.save(fname) 82 | 83 | return 0 84 | 85 | 86 | if __name__ == '__main__': 87 | main() 88 | -------------------------------------------------------------------------------- /dustmaps/examples/plot_sfd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # plot_sfd.py 4 | # An example of how to query the Schlegel, Finkbeiner & Davis (1998) dust map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import numpy as np 22 | import os.path 23 | 24 | try: 25 | import PIL.Image 26 | except ImportError as error: 27 | print('This example requires Pillow or PIL.\n' 28 | 'See .') 29 | raise error 30 | 31 | from astropy.coordinates import SkyCoord 32 | import astropy.units as u 33 | 34 | from dustmaps.sfd import SFDQuery 35 | 36 | 37 | def numpy2pil(a, vmin, vmax): 38 | a = np.clip((a - vmin) / (vmax - vmin), 0., 1.) 39 | a = (254.99 * a).astype('u1') 40 | return PIL.Image.fromarray(a) 41 | 42 | 43 | def main(): 44 | w,h = (2056,1024) 45 | l_0 = 0. 46 | 47 | # Create a grid of coordinates 48 | print('Creating grid of coordinates...') 49 | l = np.linspace(-180.+l_0, 180.+l_0, 2*w) 50 | b = np.linspace(-90., 90., 2*h+2) 51 | b = b[1:-1] 52 | l,b = np.meshgrid(l, b) 53 | 54 | l += (np.random.random(l.shape) - 0.5) * 360./(2.*w) 55 | b += (np.random.random(l.shape) - 0.5) * 180./(2.*h) 56 | 57 | coords = SkyCoord(l*u.deg, b*u.deg, frame='galactic') 58 | 59 | # Set up SFD query object 60 | print('Loading SFD map...') 61 | sfd = SFDQuery() 62 | 63 | print('Querying map...') 64 | ebv = sfd.query(coords) 65 | 66 | # Convert the output array to a PIL image and save 67 | print('Saving image...') 68 | img = numpy2pil(ebv[::-1,::-1], 0., 1.5) 69 | img = img.resize((w,h), resample=PIL.Image.LANCZOS) 70 | fname = 'sfd.png' 71 | img.save(fname) 72 | 73 | return 0 74 | 75 | 76 | if __name__ == '__main__': 77 | main() 78 | -------------------------------------------------------------------------------- /dustmaps/gaia_tge.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # gaia_tge.py 4 | # Reads the Gaia TGE dust reddening maps. 5 | # 6 | # Copyright (C) 2022 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import os 22 | import numpy as np 23 | import healpy as hp 24 | from astropy.table import Table 25 | import astropy.units as units 26 | 27 | from .std_paths import * 28 | from .healpix_map import HEALPixQuery 29 | from . import fetch_utils 30 | from . import dustexceptions 31 | 32 | 33 | class GaiaTGEQuery(HEALPixQuery): 34 | """ 35 | Queries the Gaia Total Galactic Extinction (Delchambre 2022) dust map, 36 | which contains estimates of monochromatic extinction, A0, in mags. 37 | """ 38 | 39 | def __init__(self, map_fname=None, healpix_level='optimum'): 40 | """ 41 | Args: 42 | map_fname (Optional[`str`]): Filename of the Gaia TGE map. 43 | Defaults to ``None``, meaning that the default location is 44 | used. 45 | healpix_level (Optional[`int` or `str`]): Which HEALPix 46 | level to load into the map. If "optimum" (the default), loads 47 | the optimum HEALPix level available at each location. If an 48 | `int`, instead loads the specified HEALPix level. 49 | """ 50 | 51 | if map_fname is None: 52 | map_fname = os.path.join( 53 | data_dir(), 54 | 'gaia_tge', 55 | 'TotalGalacticExtinctionMap_001.csv.gz' 56 | ) 57 | 58 | try: 59 | # Cannot use astropy ECSV reader, due to bug in processing 60 | # null values 61 | dtype = [ 62 | ('solution_id', 'i8'), 63 | ('healpix_id', 'i8'), 64 | ('healpix_level', 'i1'), 65 | ('a0', 'f4'), 66 | ('a0_uncertainty', 'f4'), 67 | ('a0_min', 'f4'), 68 | ('a0_max', 'f4'), 69 | ('num_tracers_used', 'i4'), 70 | ('optimum_hpx_flag', '?'), 71 | ('status', 'i2') 72 | ] 73 | converters = {8: lambda x: x == '"True"'} 74 | d = np.genfromtxt( 75 | map_fname, comments='#', delimiter=',', 76 | encoding='utf-8', converters=converters, 77 | dtype=dtype 78 | )[1:] 79 | except IOError as error: 80 | print(dustexceptions.data_missing_message('gaia_tge', 81 | 'Gaia TGE')) 82 | raise error 83 | 84 | if isinstance(healpix_level, int): 85 | idx = (d['healpix_level'] == healpix_level) 86 | n_pix = np.count_nonzero(idx) 87 | if n_pix == 0: 88 | levels_avail = np.unique(d['healpix_level']).tolist() 89 | raise ValueError( 90 | 'Requested HEALPix level not stored in map. Available ' 91 | 'levels: {}'.format(levels_avail) 92 | ) 93 | hpx_sort_idx = np.argsort(d['healpix_id'][idx]) 94 | idx = np.where(idx)[0] 95 | idx = idx[hpx_sort_idx] 96 | elif healpix_level == 'optimum': 97 | idx_opt = d['optimum_hpx_flag'] 98 | # Upscale to highest HEALPix level 99 | hpx_level = d['healpix_level'][idx_opt] 100 | hpx_level_max = np.max(hpx_level) 101 | n_pix = 12 * 4**hpx_level_max 102 | # Index from original array to use in each pixel of final map 103 | idx = np.full(n_pix, -1, dtype='i8') # Empty pixel -> index=-1 104 | # Get the ring-ordered index of the optimal pixels 105 | idx_opt = np.where(idx_opt)[0] 106 | hpx_idx = d['healpix_id'][idx_opt] 107 | # Add pixels of each level to the map 108 | for level in np.unique(hpx_level): 109 | nside = 2**level 110 | idx_lvl = (hpx_level == level) 111 | # Get the nest-ordered index of optimal pixels at this level 112 | hpx_idx_nest = hpx_idx[idx_lvl] 113 | # Fill in index (in orig arr) of these pixels 114 | mult_factor = 4**(hpx_level_max-level) 115 | hpx_idx_base = hpx_idx_nest*mult_factor 116 | for offset in range(mult_factor): 117 | idx[hpx_idx_base+offset] = idx_opt[idx_lvl] 118 | else: 119 | raise ValueError( 120 | '`healpix_level` must be either an integer or "optimum"' 121 | ) 122 | 123 | bad_mask = (idx == -1) 124 | 125 | pix_val = d['a0'][idx] 126 | pix_val[bad_mask] = np.nan 127 | 128 | dtype = [ 129 | ('a0_uncertainty', 'f4'), 130 | ('num_tracers_used', 'i4'), 131 | ('optimum_hpx_flag', 'bool') 132 | ] 133 | flags = np.empty(n_pix, dtype=dtype) 134 | for key,dt in dtype: 135 | flags[key] = d[key][idx] 136 | flags[key][bad_mask] = {'f4':np.nan, 'i4':-1, 'bool':False}[dt] 137 | 138 | super(GaiaTGEQuery, self).__init__( 139 | pix_val, True, 'icrs', flags=flags 140 | ) 141 | 142 | def query(self, coords, **kwargs): 143 | """ 144 | Returns a numpy array containing A0 at the specified 145 | location(s) on the sky. Optionally, returns a 2nd array containing 146 | flags at the same location(s). 147 | 148 | Args: 149 | coords (`astropy.coordinates.SkyCoord`): The coordinates to 150 | query. 151 | return_flags (Optional[`bool`]): If `True`, returns a 2nd array 152 | containing flags at each coordinate. Defaults to `False`. 153 | 154 | Returns: 155 | A numpy array containing A0 at the specified 156 | coordinates. The shape of the output is the same as the shape of 157 | the input coordinate array, ``coords``. If `return_flags` is 158 | `True`, a 2nd record array containing flags at each coordinate 159 | is also returned. 160 | """ 161 | return super(GaiaTGEQuery, self).query(coords, **kwargs) 162 | 163 | 164 | def fetch(): 165 | """ 166 | Downloads the Gaia Total Galactic Extinction (TGE) dust maps, placing 167 | it in the default ``dustmaps`` directory. 168 | """ 169 | props = { 170 | 'url': ( 171 | 'http://cdn.gea.esac.esa.int/Gaia/gdr3/Astrophysical_parameters/' 172 | 'total_galactic_extinction_map/TotalGalacticExtinctionMap_001.csv.gz' 173 | ), 174 | 'md5': '5f6271869b7e60960a955f08ca11dc37', 175 | 'fname': 'TotalGalacticExtinctionMap_001.csv.gz' 176 | } 177 | fname = os.path.join(data_dir(), 'gaia_tge', props['fname']) 178 | fetch_utils.download_and_verify(props['url'], props['md5'], fname=fname) 179 | 180 | 181 | def main(): 182 | from astropy.coordinates import SkyCoord 183 | q = GaiaTGEQuery() 184 | c = SkyCoord([0., 180., 0.], [0., 0., 90.], frame='galactic', unit='deg') 185 | print(q(c)) 186 | 187 | 188 | if __name__ == '__main__': 189 | main() 190 | -------------------------------------------------------------------------------- /dustmaps/healpix_map.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # healpix_map.py 4 | # A set of HEALPix map classes. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | import six 21 | 22 | import numpy as np 23 | import healpy as hp 24 | import astropy.io.fits as fits 25 | 26 | from .map_base import DustMap, coord2healpix 27 | 28 | 29 | class HEALPixQuery(DustMap): 30 | """ 31 | A class for querying HEALPix maps. 32 | """ 33 | 34 | def __init__(self, pix_val, nest, coord_frame, flags=None): 35 | """ 36 | Args: 37 | pix_val (array): Value of the map in every pixel. The length of the 38 | array must be of the form `12 * nside**2`, where `nside` is a 39 | power of two. 40 | nest (bool): `True` if the map uses nested ordering. `False` if 41 | ring ordering is used. 42 | coord_frame (str): The coordinate system that the HEALPix map is in. 43 | Should be one of the frames supported by `astropy.coordinates`. 44 | """ 45 | self._nside = hp.pixelfunc.npix2nside(len(pix_val)) 46 | self._pix_val = pix_val 47 | self._nest = nest 48 | self._frame = coord_frame 49 | self._flags = flags 50 | if (flags is not None) and (flags.shape[0] != pix_val.shape[0]): 51 | raise ValueError(( 52 | 'The shape of `flags` ({}) must match the shape ' 53 | 'of `pix_val` ({}) along the first axis.' 54 | ).format(flags.shape, pix_val.shape)) 55 | super(HEALPixQuery, self).__init__() 56 | 57 | def query(self, coords, return_flags=False): 58 | """ 59 | Args: 60 | coords (`astropy.coordinates.SkyCoord`): The coordinates to query. 61 | return_flags ([Optional[:obj:`bool`]): If `True`, return flags at 62 | each pixel. Only possible if flags were provided during 63 | initialization. 64 | 65 | Returns: 66 | A float array of the value of the map at the given coordinates. The 67 | shape of the output is the same as the shape of the coordinates 68 | stored by `coords`. If `return_flags` is `True`, then a second 69 | array, containing flags at each pixel, is also returned. 70 | """ 71 | pix_idx = coord2healpix(coords, self._frame, 72 | self._nside, nest=self._nest) 73 | sel_pix = self._pix_val[pix_idx] 74 | 75 | if return_flags: 76 | if self._flags is None: 77 | raise ValueError( 78 | '`return_flags` is True, but the class was initialized ' 79 | 'without flags.' 80 | ) 81 | return sel_pix, self._flags[pix_idx] 82 | 83 | return self._pix_val[pix_idx] 84 | 85 | 86 | class HEALPixFITSQuery(HEALPixQuery): 87 | """ 88 | A HEALPix map class that is initialized from a FITS file. 89 | """ 90 | 91 | def __init__(self, fname, coord_frame, hdu=0, field=None, 92 | dtype='f8', scale=None): 93 | """ 94 | Args: 95 | fname (str, HDUList, TableHDU or BinTableHDU): The filename, HDUList 96 | or HDU from which the map should be loaded. 97 | coord_frame (str): The coordinate system in which the HEALPix map is 98 | defined. Must be a coordinate frame which ``astropy`` 99 | understands. 100 | hdu (Optional[int or str]): Specifies which HDU to load the map 101 | from. Defaults to ``0``. 102 | field (Optional[int or str]): Specifies which field (column) to load 103 | the map from. Defaults to ``None``, meaning that ``hdu.data[:]`` 104 | is used. 105 | dtype (Optional[str or type]): The data will be coerced to this 106 | datatype. Can be any type specification that numpy understands, 107 | including a structured datatype, if multiple fields are to be 108 | loaded. Defaults to ``'f8'``, for IEEE754 double precision. 109 | scale (Optional[:obj:`float`]): Scale factor to be multiplied into 110 | the data. 111 | """ 112 | close_file = False 113 | 114 | if isinstance(fname, six.string_types): 115 | close_file = True 116 | hdulist = fits.open(fname) 117 | print(hdulist.info()) 118 | hdu = hdulist[hdu] 119 | elif isinstance(fname, fits.HDUList): 120 | hdu = fname[hdu] 121 | elif (isinstance(fname, fits.TableHDU) 122 | or isinstance(fname, fits.BinTableHDU)): 123 | hdu = fname 124 | else: 125 | raise TypeError('`fname` must be a `str`, `HDUList`, `TableHDU` or ' 126 | '`BinTableHDU`.') 127 | 128 | if field is None: 129 | pix_val = np.array(hdu.data[:].ravel().astype(dtype)) 130 | else: 131 | pix_val = np.array(hdu.data[field][:].ravel().astype(dtype)) 132 | 133 | if scale is not None: 134 | names = pix_val.dtype.names 135 | if names is None: 136 | pix_val *= scale 137 | else: 138 | for n in names: 139 | pix_val[n] *= scale 140 | 141 | nest = hdu.header.get('ORDERING', 'NESTED').strip() == 'NESTED' 142 | 143 | if close_file: 144 | hdulist.close() 145 | 146 | super(HEALPixFITSQuery, self).__init__(pix_val, nest, coord_frame) 147 | -------------------------------------------------------------------------------- /dustmaps/leike2020.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # leike2020.py 4 | # 5 | # The Leike, Glatzle & Ensslin (2020) dust map. 6 | # 7 | # Copyright (C) 2020 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function, division 21 | 22 | import numpy as np 23 | import h5py 24 | import astropy.coordinates as coordinates 25 | import astropy.units as units 26 | from astropy.coordinates import Longitude 27 | 28 | from .map_base import DustMap, ensure_flat_coords 29 | from .std_paths import * 30 | from . import fetch_utils 31 | 32 | 33 | class Leike2020Query(DustMap): 34 | """ 35 | A class for querying the Leike, Glatzle & Ensslin (2020) dust map. 36 | 37 | For details on how to use this map, see the original paper: 38 | https://ui.adsabs.harvard.edu/abs/2020A%26A...639A.138L/abstract. 39 | 40 | The data is deposited at Zenodo: https://doi.org/10.5281/zenodo.3993082. 41 | """ 42 | 43 | def __init__(self, map_fname=None): 44 | """ 45 | Args: 46 | map_fname (Optional[str]): Filename of the map. Defaults 47 | to :obj:`None`, meaning that the default location 48 | is used. 49 | """ 50 | 51 | if map_fname is None: 52 | map_fname = os.path.join( 53 | data_dir(), 54 | 'leike_2020', 55 | 'mean_std.h5' 56 | ) 57 | 58 | self._data = {} 59 | 60 | with h5py.File(map_fname) as f: 61 | self._data['mean'] = f['mean'][:] 62 | self._data['std'] = f['std'][:] 63 | 64 | self._xyz0 = (-370., -370., -270.) # Lower edge of map, in pc 65 | self._shape = self._data['mean'].shape 66 | 67 | def _coords2idx(self, coords): 68 | c = coords.transform_to('galactic').represent_as('cartesian') 69 | 70 | idx = np.empty((3,) + c.shape, dtype='i4') 71 | mask = np.zeros(c.shape, dtype=bool) 72 | 73 | for i,x in enumerate((c.x, c.y, c.z)): 74 | idx[i,...] = np.floor(x.to('pc').value - self._xyz0[i]) 75 | mask |= (idx[i] < 0) | (idx[i] >= self._shape[i]) 76 | 77 | for i in range(3): 78 | idx[i, mask] = -1 79 | 80 | return idx, mask 81 | 82 | @ensure_flat_coords 83 | def query(self, coords, component='mean'): 84 | """ 85 | Returns the extinction density (in e-foldings / kpc, in Gaia G-band) 86 | at the given coordinates. 87 | 88 | Args: 89 | coords (:obj:`astropy.coordinates.SkyCoord`): Coordinates at which 90 | to query the extinction. Must be 3D (i.e., include distance 91 | information). 92 | component (str): Which component to return. Allowable values are 93 | 'mean' (for the mean extinction density) and 'std' (for the 94 | standard deviation of extinction density). Defaults to 'mean'. 95 | 96 | Returns: 97 | The extinction density, in units of e-foldings / kpc, as either a 98 | numpy array or float, with the same shape as the input 99 | :obj:`coords`. 100 | """ 101 | idx,mask = self._coords2idx(coords) 102 | 103 | v = self._data[component][idx[0], idx[1], idx[2]] 104 | 105 | if np.any(mask): 106 | # Set extinction to NaN for out-of-bounds (x, y, z) 107 | v[mask] = np.nan 108 | 109 | return v 110 | 111 | 112 | def fetch(clobber=False, fetch_samples=False): 113 | """ 114 | Downloads the 3D dust map of Leike & Ensslin (2020). 115 | 116 | Args: 117 | clobber (Optional[bool]): If ``True``, any existing file will be 118 | overwritten, even if it appears to match. If ``False`` (the 119 | default), ``fetch()`` will attempt to determine if the dataset 120 | already exists. This determination is not 100\% robust against data 121 | corruption. 122 | fetch_samples (Optional[bool]): If ``True``, the samples will also be 123 | downloaded. If ``False`` (the default), only the mean and standard 124 | deviation will be downloaded. The samples take up 14 GB, which is 125 | why the default is not to download them. 126 | """ 127 | dest_dir = fname_pattern = os.path.join(data_dir(), 'leike_2020') 128 | 129 | file_spec = [ 130 | ('mean_std.h5', '1ea998fdaef58f53da639356362223ba') 131 | ] 132 | if fetch_samples: 133 | file_spec += [ 134 | ('samples.h5', '581f9ebc4775d37fd431fc6c0984dcf6') 135 | ] 136 | 137 | for fn,md5sum in file_spec: 138 | fname = os.path.join(dest_dir, fn) 139 | 140 | # Check if the file already exists 141 | if (not clobber) and fetch_utils.check_md5sum(fname, md5sum): 142 | print('File "{}" appears to exist already. Call '.format(fn)+ 143 | '`fetch(clobber=True)` to force overwriting of existing '+ 144 | 'file.') 145 | continue 146 | 147 | # Download from the server 148 | url = 'https://zenodo.org/record/3993082/files/{}?download=1'.format(fn) 149 | fetch_utils.download_and_verify(url, md5sum, fname) 150 | 151 | -------------------------------------------------------------------------------- /dustmaps/leike_ensslin_2019.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # leike_ensslin_2019.py 4 | # 5 | # The Leike & Ensslin (2019) dust map. 6 | # 7 | # Copyright (C) 2019 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function, division 21 | 22 | import numpy as np 23 | import h5py 24 | import astropy.coordinates as coordinates 25 | import astropy.units as units 26 | from astropy.coordinates import Longitude 27 | 28 | from .map_base import DustMap, ensure_flat_coords 29 | from .std_paths import * 30 | from . import fetch_utils 31 | 32 | 33 | class LeikeEnsslin2019Query(DustMap): 34 | """ 35 | A class for querying the Leike & Ensslin (2019) dust map. 36 | """ 37 | 38 | def __init__(self, map_fname=None): 39 | """ 40 | Args: 41 | map_fname (Optional[str]): Filename of the map. Defaults 42 | to :obj:`None`, meaning that the default location 43 | is used. 44 | """ 45 | 46 | if map_fname is None: 47 | map_fname = os.path.join( 48 | data_dir(), 49 | 'leike_ensslin_2019', 50 | 'simple_cube.h5' 51 | ) 52 | 53 | self._data = {} 54 | 55 | with h5py.File(map_fname) as f: 56 | self._data['mean'] = f['mean'][:] 57 | self._data['std'] = f['std'][:] 58 | 59 | self._shape = self._data['mean'].shape 60 | 61 | def _coords2idx(self, coords): 62 | c = coords.transform_to('galactic').represent_as('cartesian') 63 | 64 | idx = np.empty((3,) + c.shape, dtype='i4') 65 | mask = np.zeros(c.shape, dtype=bool) 66 | 67 | for i,x in enumerate((c.x, c.y, c.z)): 68 | idx[i,...] = np.floor(x.to('pc').value + 300) * 256/600. 69 | mask |= (idx[i] < 0) | (idx[i] >= self._shape[i]) 70 | 71 | for i in range(3): 72 | idx[i, mask] = -1 73 | 74 | return idx, mask 75 | 76 | @ensure_flat_coords 77 | def query(self, coords, component='mean'): 78 | """ 79 | Returns the extinction density (in e-foldings / kpc, in Gaia G-band) 80 | at the given coordinates. 81 | 82 | Args: 83 | coords (:obj:`astropy.coordinates.SkyCoord`): Coordinates at which 84 | to query the extinction. Must be 3D (i.e., include distance 85 | information). 86 | component (str): Which component to return. Allowable values are 87 | 'mean' (for the mean extinction density) and 'std' (for the 88 | standard deviation of extinction density). Defaults to 'mean'. 89 | 90 | Returns: 91 | The extinction density, in units of e-foldings / kpc, as either a 92 | numpy array or float, with the same shape as the input 93 | :obj:`coords`. 94 | """ 95 | idx,mask = self._coords2idx(coords) 96 | 97 | v = self._data[component][idx[0], idx[1], idx[2]] 98 | 99 | if np.any(mask): 100 | # Set extinction to NaN for out-of-bounds (x, y, z) 101 | v[mask] = np.nan 102 | 103 | return v 104 | 105 | 106 | def fetch(clobber=False): 107 | """ 108 | Downloads the 3D dust map of Leike & Ensslin (2019). 109 | 110 | Args: 111 | clobber (Optional[bool]): If ``True``, any existing file will be 112 | overwritten, even if it appears to match. If ``False`` (the 113 | default), ``fetch()`` will attempt to determine if the dataset 114 | already exists. This determination is not 100\% robust against data 115 | corruption. 116 | """ 117 | dest_dir = fname_pattern = os.path.join(data_dir(), 'leike_ensslin_2019') 118 | fname = os.path.join(dest_dir, 'simple_cube.h5') 119 | 120 | # Check if the FITS table already exists 121 | md5sum = 'f54e01c253453117e3770575bed35078' 122 | 123 | if (not clobber) and fetch_utils.check_md5sum(fname, md5sum): 124 | print('File appears to exist already. Call `fetch(clobber=True)` ' 125 | 'to force overwriting of existing file.') 126 | return 127 | 128 | # Download from the server 129 | url = 'https://zenodo.org/record/2577337/files/simple_cube.h5?download=1' 130 | fetch_utils.download_and_verify(url, md5sum, fname) 131 | -------------------------------------------------------------------------------- /dustmaps/lenz2017.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # 4 | # lenz2017.py 5 | # Reads the Lenz, Hensley & Doré (2017) dust reddening map. 6 | # http://arxiv.org/abs/1706.00011 7 | # 8 | # Copyright (C) 2018 Gregory M. Green 9 | # 10 | # dustmaps is free software: you can redistribute it and/or modify 11 | # it under the terms of either: 12 | # 13 | # - The GNU General Public License as published by the Free Software Foundation, 14 | # either version 2 of the License, or (at your option) any later version, or 15 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 16 | # 17 | # You should have received copies of the GNU General Public License 18 | # and the BSD License along with this program. 19 | # 20 | 21 | from __future__ import print_function, division 22 | 23 | import os 24 | import numpy as np 25 | import healpy as hp 26 | import astropy.io.fits as fits 27 | import astropy.units as units 28 | 29 | from .std_paths import * 30 | from .healpix_map import HEALPixFITSQuery 31 | from . import fetch_utils 32 | from . import dustexceptions 33 | 34 | 35 | class Lenz2017Query(HEALPixFITSQuery): 36 | """ 37 | Queries the Lenz, Hensley & Doré (2017) dust map: 38 | http://arxiv.org/abs/1706.00011 39 | """ 40 | 41 | def __init__(self, map_fname=None): 42 | """ 43 | Args: 44 | map_fname (Optional[:obj:`str`]): Filename for the Lenz map. Defaults to 45 | ``None``, meaning that the default location is used. 46 | """ 47 | 48 | if map_fname is None: 49 | map_fname = os.path.join( 50 | data_dir(), 51 | 'lenz2017', 52 | 'ebv_lhd.hpx.fits') 53 | 54 | try: 55 | super(Lenz2017Query, self).__init__( 56 | map_fname, 'galactic', 57 | hdu=1, 58 | field='EBV') 59 | except IOError as error: 60 | print(dustexceptions.data_missing_message('lenz2017', 61 | 'Lenz et al. (2017)')) 62 | raise error 63 | 64 | def query(self, coords, **kwargs): 65 | """ 66 | Returns E(B-V), in mags, at the specified location(s) on the sky. 67 | 68 | Args: 69 | coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to query. 70 | 71 | Returns: 72 | A float array of the reddening, in magnitudes of E(B-V), at the 73 | selected coordinates. 74 | """ 75 | return super(Lenz2017Query, self).query(coords, **kwargs) 76 | 77 | 78 | def fetch(): 79 | """ 80 | Downloads the Lenz, Hensley & Doré (2017) dust map, placing it in the 81 | default :obj:`dustmaps` data directory. 82 | """ 83 | doi = '10.7910/DVN/AFJNWJ' 84 | fname = os.path.join( 85 | data_dir(), 86 | 'lenz2017', 87 | 'ebv_lhd.hpx.fits') 88 | fetch_utils.dataverse_download_doi( 89 | doi, fname, 90 | file_requirements={'filename': 'ebv_lhd.hpx.fits'}) 91 | 92 | 93 | def main(): 94 | from astropy.coordinates import SkyCoord 95 | q = Lenz2017Query() 96 | c = SkyCoord([0., 180., 0.], [0., 0., 90.], frame='galactic', unit='deg') 97 | print(q(c)) 98 | 99 | 100 | if __name__ == '__main__': 101 | main() 102 | -------------------------------------------------------------------------------- /dustmaps/output/.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore everything in this directory, except .gitignore 2 | * 3 | !.gitignore 4 | -------------------------------------------------------------------------------- /dustmaps/pg2010.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # pg2010.py 4 | # Reads the Peek & Graves (2010) correction to the SFD'98 dust reddening map. 5 | # 6 | # Copyright (C) 2018 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import os 22 | import numpy as np 23 | 24 | from .std_paths import * 25 | from .map_base import DustMap, WebDustMap, ensure_flat_galactic 26 | from . import fetch_utils 27 | from . import dustexceptions 28 | from .sfd import SFDBase 29 | 30 | 31 | class PG2010Query(SFDBase): 32 | """ 33 | Queries the Peek & Graves (2010) correction to the SFD'98 dust reddening map. 34 | """ 35 | 36 | map_name = 'pg2010' 37 | map_name_long = "Peek & Graves (2010)" 38 | poles = ['ngp'] 39 | 40 | def __init__(self, map_dir=None, component='dust'): 41 | """ 42 | Args: 43 | map_dir (Optional[:obj:`str`]): The directory containing the SFD map. 44 | Defaults to :obj:`None`, which means that :obj:`dustmaps` will look in its 45 | default data directory. 46 | component (Optional[:obj:`str`]): :obj:`'dust'` (the default) to load the correction 47 | to E(B-V), or :obj:`'err'` to load the uncertainty in the correction. 48 | """ 49 | 50 | if map_dir is None: 51 | map_dir = os.path.join(data_dir(), 'pg2010') 52 | 53 | if component not in ['dust', 'err']: 54 | raise ValueError('`component` must be either "dust" or "err"') 55 | 56 | base_fname = os.path.join(map_dir, 'PG_{}_4096'.format(component)) 57 | 58 | super(PG2010Query, self).__init__(base_fname) 59 | 60 | def query(self, coords, order=1): 61 | """ 62 | Returns the P&G (2010) correction to the SFD'98 E(B-V) at the specified 63 | location(s) on the sky. If component is 'err', then return the 64 | uncertainty in the correction. 65 | 66 | Args: 67 | coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to query. 68 | order (Optional[:obj:`int`]): Interpolation order to use. Defaults to ``1``, 69 | for linear interpolation. 70 | 71 | Returns: 72 | A float array containing the P&G (2010) correction (or its 73 | uncertainty) to SFD'98 at every input coordinate. The shape 74 | of the output will be the same as the shape of the coordinates 75 | stored by :obj:`coords`. 76 | """ 77 | return super(PG2010Query, self).query(coords, order=order) 78 | 79 | 80 | def fetch(): 81 | """ 82 | Downloads the Peek & Graves (2010) dust map, placing it in 83 | the data directory for :obj:`dustmap`. 84 | """ 85 | doi = '10.7910/DVN/VBSI4A' 86 | 87 | for component in ['dust', 'err']: 88 | requirements = {'filename': 'PG_{}_4096_ngp.fits'.format(component)} 89 | local_fname = os.path.join( 90 | data_dir(), 91 | 'pg2010', 'PG_{}_4096_ngp.fits'.format(component)) 92 | print('Downloading P&G (2010) {} data file to {}'.format( 93 | component, local_fname)) 94 | fetch_utils.dataverse_download_doi( 95 | doi, 96 | local_fname, 97 | file_requirements=requirements) 98 | -------------------------------------------------------------------------------- /dustmaps/planck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # planck.py 4 | # Reads the Planck Collaboration dust reddening maps. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import os 22 | import numpy as np 23 | import healpy as hp 24 | import astropy.io.fits as fits 25 | import astropy.units as units 26 | 27 | from .std_paths import * 28 | from .healpix_map import HEALPixFITSQuery 29 | from . import fetch_utils 30 | from . import dustexceptions 31 | 32 | 33 | class PlanckQuery(HEALPixFITSQuery): 34 | """ 35 | Queries the Planck Collaboration (2013) dust map. 36 | """ 37 | 38 | def __init__(self, map_fname=None, component='extragalactic'): 39 | """ 40 | Args: 41 | map_fname (Optional[:obj:`str`]): Filename of the Planck map. 42 | Defaults to ```None``, meaning that the default location is 43 | used. 44 | component (Optional[:obj:`str`]): Which measure of reddening to use. There 45 | are seven valid components. Three denote reddening measures: 46 | ``'extragalactic'``, ``'tau'`` and ``'radiance'``. Four refer 47 | to dust properties: ``'temperature'``, ``'beta'``, 48 | ``'err_temp'`` and ``'err_beta'``. Defaults to 49 | ``'extragalactic'``. 50 | """ 51 | 52 | if map_fname is None: 53 | map_fname = os.path.join( 54 | data_dir(), 55 | 'planck', 56 | 'HFI_CompMap_ThermalDustModel_2048_R1.20.fits' 57 | ) 58 | 59 | if component.lower() in ('ebv','extragalactic'): 60 | field = 'EBV' 61 | self._scale = 1. 62 | elif component.lower() in ('tau','tau353','tau_353','optical depth'): 63 | field = 'TAU353' 64 | self._scale = 1.49e4 65 | elif component.lower() in ('radiance','r'): 66 | field = 'RADIANCE' 67 | self._scale = 5.4e5 68 | elif component.lower() in ('temperature','temp','t'): 69 | field = 'TEMP' 70 | self._scale = units.Kelvin 71 | elif component.lower() in ('sigma_temp','sigma_t','err_temp','err_t'): 72 | field = 'ERR_TEMP' 73 | self._scale = units.Kelvin 74 | elif component.lower() in ('beta','b'): 75 | field = 'BETA' 76 | self._scale = 1. 77 | elif component.lower() in ('sigma_beta','sigma_b','err_beta','err_b'): 78 | field = 'ERR_BETA' 79 | self._scale = 1. 80 | else: 81 | raise ValueError(( 82 | "Invalid `component`: '{}'\n" 83 | "Valid components for reddening are 'extragalactic', 'tau', " 84 | "and 'radiance'. Valid components for dust properties are " 85 | "'temperature', 'err_temp', 'beta' and 'err_beta'." 86 | ).format(component)) 87 | 88 | try: 89 | with fits.open(map_fname) as hdulist: 90 | super(PlanckQuery, self).__init__( 91 | hdulist, 'galactic', 92 | hdu='COMP-MAP', 93 | field=field, 94 | dtype='f4', 95 | scale=self._scale 96 | ) 97 | except IOError as error: 98 | print(dustexceptions.data_missing_message('planck', 99 | 'Planck Collaboration')) 100 | raise error 101 | 102 | def query(self, coords, **kwargs): 103 | """ 104 | Returns E(B-V) (or a different Planck dust inference, depending on how 105 | the class was intialized) at the specified location(s) on the sky. 106 | 107 | Args: 108 | coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to 109 | query. 110 | 111 | Returns: 112 | A float array of the selected Planck component, at the given 113 | coordinates. The shape of the output is the same as the shape of the 114 | input coordinate array, ``coords``. If extragalactic E(B-V), tau_353 115 | or radiance was chosen, then the output has units of magnitudes of 116 | E(B-V). If the selected Planck component is temperature (or 117 | temperature error), then an :obj:`astropy.Quantity` is returned, 118 | with units of Kelvin. If beta (or beta error) was chosen, then the 119 | output is unitless. 120 | """ 121 | return super(PlanckQuery, self).query(coords, **kwargs) 122 | 123 | 124 | class PlanckGNILCQuery(HEALPixFITSQuery): 125 | """ 126 | Queries the Planck Collaboration (2016) GNILC dust map. 127 | """ 128 | def __init__(self, map_fname=None, load_errors=False): 129 | """ 130 | Args: 131 | map_fname (Optional[:obj:`str`]): Filename of the Planck map. 132 | Defaults to ```None``, meaning that the default location is 133 | used. 134 | load_errors (Optional[:obj:`str`]): If ``True``, then the error 135 | estimates will be loaded as well, and returned with any query. 136 | If ``False`` (the default), then queries will only return the 137 | the reddening estimate, without any error estimate. 138 | """ 139 | 140 | if load_errors: 141 | self._has_errors = True 142 | field = None 143 | dtype = [('EBV','f4'), ('EBV_err','f4')] 144 | else: 145 | self._has_errors = False 146 | field = 'TAU353' 147 | dtype = 'f4' 148 | 149 | if map_fname is None: 150 | map_fname = os.path.join( 151 | data_dir(), 152 | 'planck', 153 | 'COM_CompMap_Dust-GNILC-Model-Opacity_2048_R2.01.fits' 154 | ) 155 | 156 | try: 157 | with fits.open(map_fname) as hdulist: 158 | super(PlanckGNILCQuery, self).__init__( 159 | hdulist, 'galactic', 160 | hdu=1, 161 | field=field, 162 | dtype=dtype, 163 | scale=1.49e4 164 | ) 165 | except IOError as error: 166 | print(dustexceptions.data_missing_message('planck', 167 | 'Planck GNILC')) 168 | raise error 169 | 170 | def has_errors(self): 171 | """ 172 | Returns ``True`` if the error estimates have been loaded. 173 | """ 174 | return self._has_errors 175 | 176 | def query(self, coords, **kwargs): 177 | """ 178 | Returns E(B-V) at the specified location(s) on the sky. 179 | 180 | Args: 181 | coords (:obj:`astropy.coordinates.SkyCoord`): The coordinates to 182 | query. 183 | 184 | Returns: 185 | If the error estimates have been loaded, then a structured array 186 | containing ``'EBV'`` and ``'EBV_err'`` is returned. Otherwise, 187 | returns a float array of E(B-V), at the given coordinates. The 188 | shape of the output is the same as the shape of the input 189 | coordinate array, ``coords``. 190 | """ 191 | return super(PlanckGNILCQuery, self).query(coords, **kwargs) 192 | 193 | 194 | def fetch(which='2013'): 195 | """ 196 | Downloads the Planck dust maps, placing them in the default ``dustmaps`` 197 | directory. There are two different Planck dust maps that can be 198 | downloaded: the Planck Collaboration (2013) map and the "GNILC" (Planck 199 | Collaboration 2016) map. 200 | 201 | Args: 202 | which (Optional[:obj:`str`]): The name of the dust map to download. 203 | Should be either ``2013`` (the default) or ``GNILC``. 204 | """ 205 | planck_maps = { 206 | '2013': { 207 | 'url': 'http://pla.esac.esa.int/pla/aio/product-action?MAP.MAP_ID=HFI_CompMap_ThermalDustModel_2048_R1.20.fits', 208 | 'md5': '8d804f4e64e709f476a63f0dfed1fd11', 209 | 'fname': 'HFI_CompMap_ThermalDustModel_2048_R1.20.fits' 210 | }, 211 | 'GNILC': { 212 | 'url': 'http://pla.esac.esa.int/pla/aio/product-action?MAP.MAP_ID=COM_CompMap_Dust-GNILC-Model-Opacity_2048_R2.01.fits', 213 | 'md5': 'fc385c2ee5e82edf039cbca6e82d6872', 214 | 'fname': 'COM_CompMap_Dust-GNILC-Model-Opacity_2048_R2.01.fits' 215 | } 216 | } 217 | if which not in planck_maps: 218 | raise ValueError( 219 | 'Unknown map: "{}". Must be one of {}.'.format( 220 | which, tuple(planck_maps.keys()) 221 | ) 222 | ) 223 | props = planck_maps[which] 224 | fname = os.path.join(data_dir(), 'planck', props['fname']) 225 | fetch_utils.download_and_verify(props['url'], props['md5'], fname=fname) 226 | 227 | 228 | def main(): 229 | from astropy.coordinates import SkyCoord 230 | q = PlanckQuery() 231 | c = SkyCoord([0., 180., 0.], [0., 0., 90.], frame='galactic', unit='deg') 232 | print(q(c)) 233 | 234 | 235 | if __name__ == '__main__': 236 | main() 237 | -------------------------------------------------------------------------------- /dustmaps/sfd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # sfd.py 4 | # Reads the Schlegel, Finkbeiner & Davis (1998; SFD) dust reddening map. 5 | # 6 | # Copyright (C) 2016-2018 Gregory M. Green, Edward F. Schlafly 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import os 22 | import numpy as np 23 | 24 | import astropy.wcs as wcs 25 | import astropy.io.fits as fits 26 | from scipy.ndimage import map_coordinates 27 | 28 | from .std_paths import * 29 | from .map_base import DustMap, WebDustMap, ensure_flat_galactic 30 | from . import fetch_utils 31 | from . import dustexceptions 32 | 33 | 34 | class SFDBase(DustMap): 35 | """ 36 | Queries maps stored in the same format as Schlegel, Finkbeiner & Davis (1998). 37 | """ 38 | 39 | map_name = '' 40 | map_name_long = '' 41 | poles = ['ngp', 'sgp'] 42 | 43 | def __init__(self, base_fname): 44 | """ 45 | Args: 46 | base_fname (str): The map should be stored in two FITS files, named 47 | ``base_fname + '_' + X + '.fits'``, where ``X`` is ``'ngp'`` and 48 | ``'sgp'``. 49 | """ 50 | self._data = {} 51 | 52 | for pole in self.poles: 53 | fname = '{}_{}.fits'.format(base_fname, pole) 54 | try: 55 | with fits.open(fname) as hdulist: 56 | self._data[pole] = [hdulist[0].data, wcs.WCS(hdulist[0].header)] 57 | except IOError as error: 58 | print(dustexceptions.data_missing_message(self.map_name, 59 | self.map_name_long)) 60 | raise error 61 | 62 | @ensure_flat_galactic 63 | def query(self, coords, order=1): 64 | """ 65 | Returns the map value at the specified location(s) on the sky. 66 | 67 | Args: 68 | coords (`astropy.coordinates.SkyCoord`): The coordinates to query. 69 | order (Optional[int]): Interpolation order to use. Defaults to `1`, 70 | for linear interpolation. 71 | 72 | Returns: 73 | A float array containing the map value at every input coordinate. 74 | The shape of the output will be the same as the shape of the 75 | coordinates stored by `coords`. 76 | """ 77 | out = np.full(len(coords.l.deg), np.nan, dtype='f4') 78 | 79 | for pole in self.poles: 80 | m = (coords.b.deg >= 0) if pole == 'ngp' else (coords.b.deg < 0) 81 | 82 | if np.any(m): 83 | data, w = self._data[pole] 84 | x, y = w.wcs_world2pix(coords.l.deg[m], coords.b.deg[m], 0) 85 | out[m] = map_coordinates(data, [y, x], order=order, mode='nearest') 86 | 87 | return out 88 | 89 | 90 | class SFDQuery(SFDBase): 91 | """ 92 | Queries the Schlegel, Finkbeiner & Davis (1998) dust reddening map. 93 | """ 94 | 95 | map_name = 'sfd' 96 | map_name_long = "SFD'98" 97 | 98 | def __init__(self, map_dir=None): 99 | """ 100 | Args: 101 | map_dir (Optional[str]): The directory containing the SFD map. 102 | Defaults to `None`, which means that `dustmaps` will look in its 103 | default data directory. 104 | """ 105 | 106 | if map_dir is None: 107 | map_dir = os.path.join(data_dir(), 'sfd') 108 | 109 | base_fname = os.path.join(map_dir, 'SFD_dust_4096') 110 | 111 | super(SFDQuery, self).__init__(base_fname) 112 | 113 | def query(self, coords, order=1): 114 | """ 115 | Returns E(B-V) at the specified location(s) on the sky. See Table 6 of 116 | Schlafly & Finkbeiner (2011) for instructions on how to convert this 117 | quantity to extinction in various passbands. 118 | 119 | Args: 120 | coords (`astropy.coordinates.SkyCoord`): The coordinates to query. 121 | order (Optional[int]): Interpolation order to use. Defaults to `1`, 122 | for linear interpolation. 123 | 124 | Returns: 125 | A float array containing the SFD E(B-V) at every input coordinate. 126 | The shape of the output will be the same as the shape of the 127 | coordinates stored by `coords`. 128 | """ 129 | return super(SFDQuery, self).query(coords, order=order) 130 | 131 | 132 | class SFDWebQuery(WebDustMap): 133 | """ 134 | Remote query over the web for the Schlegel, Finkbeiner & Davis (1998) dust 135 | map. 136 | 137 | This query object does not require a local version of the data, but rather 138 | an internet connection to contact the web API. The query functions have the 139 | same inputs and outputs as their counterparts in ``SFDQuery``. 140 | """ 141 | 142 | def __init__(self, api_url=None): 143 | super(SFDWebQuery, self).__init__( 144 | api_url=api_url, 145 | map_name='sfd') 146 | 147 | 148 | def fetch(): 149 | """ 150 | Downloads the Schlegel, Finkbeiner & Davis (1998) dust map, placing it in 151 | the data directory for `dustmap`. 152 | """ 153 | doi = '10.7910/DVN/EWCNL5' 154 | 155 | for pole in ['ngp', 'sgp']: 156 | requirements = {'filename': 'SFD_dust_4096_{}.fits'.format(pole)} 157 | local_fname = os.path.join( 158 | data_dir(), 159 | 'sfd', 'SFD_dust_4096_{}.fits'.format(pole)) 160 | print('Downloading SFD data file to {}'.format(local_fname)) 161 | fetch_utils.dataverse_download_doi( 162 | doi, 163 | local_fname, 164 | file_requirements=requirements) 165 | -------------------------------------------------------------------------------- /dustmaps/std_paths.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # std_paths.py 4 | # Defines a set of paths used by scripts in the dustmaps module. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import os 22 | from .config import config 23 | 24 | 25 | script_dir = os.path.dirname(os.path.realpath(__file__)) 26 | data_dir_default = os.path.abspath(os.path.join(script_dir, 'data')) 27 | test_dir = os.path.abspath(os.path.join(script_dir, 'tests')) 28 | output_dir_default = os.path.abspath(os.path.join(script_dir, 'output')) 29 | 30 | 31 | def fix_path(path): 32 | """ 33 | Returns an absolute path, expanding both '~' (to the user's home 34 | directory) and other environmental variables in the path. 35 | """ 36 | return os.path.abspath(os.path.expandvars(os.path.expanduser(path))) 37 | 38 | 39 | def data_dir(): 40 | """ 41 | Returns the directory used to store large data files (e.g., dust maps). 42 | """ 43 | dirname = config.get('data_dir', data_dir_default) 44 | return fix_path(dirname) 45 | 46 | 47 | def output_dir(): 48 | """ 49 | Returns a directory that can be used to store temporary output. 50 | """ 51 | dirname = config.get('output_dir', output_dir_default) 52 | return fix_path(dirname) 53 | -------------------------------------------------------------------------------- /dustmaps/tests/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # __init__.py 4 | # Makes the tests in the package "dustmaps" discoverable. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | -------------------------------------------------------------------------------- /dustmaps/tests/ned_output.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "gal": [127.61792509, -62.79381769], 3 | "equ": ["01h00m00.00000s", "+00d00m00.0000s"], 4 | "ecl": [13.81161814, -5.90920279], 5 | "sf11_Av": 0.078 6 | }, { 7 | "gal": [173.50436382, -7.59575416], 8 | "equ": ["05h00m00.00000s", "+30d00m00.0000s"], 9 | "ecl": [76.94158051, 7.23815903], 10 | "sf11_Av": 1.327 11 | }, { 12 | "gal": [284.88378965, 46.06141592], 13 | "equ": ["12h00m00.00000s", "-15d00m00.0000s"], 14 | "ecl": [186.08384878, -13.73678386], 15 | "sf11_Av": 0.161 16 | }, { 17 | "gal": [327.87931013, 6.01653677], 18 | "equ": ["15h30m25.00000s", "-49d00m00.0000s"], 19 | "ecl": [242.89389815, -29.01943911], 20 | "sf11_Av": 1.247 21 | }, { 22 | "gal": [121.02405449, 23.19009091], 23 | "equ": ["23h15m05.00000s", "+85d42m00.0000s"], 24 | "ecl": [79.13765242, 67.02971473], 25 | "sf11_Av": 0.826 26 | }, { 27 | "gal": [0.0, 0.0], 28 | "equ": ["17h45m37.22141s", "-28d56m10.2289s"], 29 | "ecl": [266.83960714, -5.53631336], 30 | "sf11_Av": 273.371 31 | }, { 32 | "gal": [75.00000000, -89.00000000], 33 | "equ": ["00h48m07.27113s", "-26d27m20.7446s"], 34 | "ecl": [359.60684221, -28.87958340], 35 | "sf11_Av": 0.040 36 | }, { 37 | "gal": [130.00000000, 10.00000000], 38 | "equ": ["02h21m50.50134s", "+71d37m12.4236s"], 39 | "ecl": [64.78083766, 52.93136114], 40 | "sf11_Av": 2.679 41 | }, { 42 | "gal": [305.00000000, 37.00000000], 43 | "equ": ["12h58m46.69252s", "-25d50m32.3042s"], 44 | "ecl": [203.73862702, -18.00587784], 45 | "sf11_Av": 0.221 46 | }] 47 | -------------------------------------------------------------------------------- /dustmaps/tests/test_config.json: -------------------------------------------------------------------------------- 1 | {"data_dir": "/my/very/special/path"} -------------------------------------------------------------------------------- /dustmaps/tests/test_config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_config.py 4 | # Test code related to the configuration submodule. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import unittest 22 | 23 | import os 24 | import sys 25 | 26 | 27 | class TestConfig(unittest.TestCase): 28 | def test_config_override(self): 29 | """ 30 | Test overriding default config location. 31 | """ 32 | # Set the environment variable DUSTMAPS_CONFIG_FNAME 33 | test_dir = os.path.dirname(os.path.realpath(__file__)) 34 | os.environ['DUSTMAPS_CONFIG_FNAME'] = os.path.join(test_dir, 'test_config.json') 35 | 36 | # Reset the dustmaps.config module 37 | if 'dustmaps.config' in sys.modules: 38 | print('Unloading config module ...') 39 | del sys.modules['dustmaps.config'] 40 | from ..config import config 41 | 42 | # Check that the data directory has been loaded from the test config file 43 | self.assertEqual(config['data_dir'], '/my/very/special/path') 44 | 45 | # Reset the dustmaps.config module, in case other tests need it 46 | del os.environ['DUSTMAPS_CONFIG_FNAME'] 47 | del sys.modules['dustmaps.config'] 48 | from ..config import config 49 | 50 | def test_config_with_envvar(self): 51 | """ 52 | Test expansion of environmental variables in directory paths.' 53 | """ 54 | # Set the environment variable DUSTMAPS_CONFIG_FNAME 55 | test_dir = os.path.dirname(os.path.realpath(__file__)) 56 | os.environ['DUSTMAPS_CONFIG_FNAME'] = os.path.join(test_dir, 'test_config_with_envvar.json') 57 | 58 | # Set an environmental variable in the config path 59 | os.environ['VARIABLE_TO_BE_EXPANDED'] = 'expanded_variable' 60 | 61 | # Reset the dustmaps.config module 62 | if 'dustmaps.config' in sys.modules: 63 | print('Unloading config module ...') 64 | del sys.modules['dustmaps.config'] 65 | print('Unloading std_paths module ...') 66 | del sys.modules['dustmaps.std_paths'] 67 | from ..config import config 68 | from ..std_paths import data_dir 69 | 70 | # Check that the data directory has been loaded from the test config file 71 | self.assertEqual(data_dir(), '/path/with/expanded_variable') 72 | 73 | # Reset the dustmaps.config module, in case other tests need it 74 | del os.environ['DUSTMAPS_CONFIG_FNAME'] 75 | del sys.modules['dustmaps.config'] 76 | del sys.modules['dustmaps.std_paths'] 77 | from ..config import config 78 | from ..std_paths import data_dir 79 | 80 | 81 | if __name__ == '__main__': 82 | unittest.main() 83 | -------------------------------------------------------------------------------- /dustmaps/tests/test_config_with_envvar.json: -------------------------------------------------------------------------------- 1 | {"data_dir": "/path/with/${VARIABLE_TO_BE_EXPANDED}"} 2 | -------------------------------------------------------------------------------- /dustmaps/tests/test_edenhofer2023.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_edenhofer2023.py 4 | # Tests the query code for the Edenhofer2023 dust map (Edenhofer et al. 2023). 5 | # 6 | # Copyright (C) 2023 Gordian Edenhofer and Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import division, print_function 20 | 21 | import sys 22 | import time 23 | import unittest 24 | from functools import partial 25 | 26 | import numpy as np 27 | from astropy.coordinates import SkyCoord 28 | 29 | from .. import edenhofer2023 30 | 31 | log = partial(print, file=sys.stderr) 32 | 33 | 34 | def random_coords(rng, n_dim, min_r=40., max_r=4e+3, n_max_elements=7): 35 | shp = rng.integers(1, n_max_elements, size=(n_dim,)) 36 | l = rng.uniform(-180., +180., size=shp) 37 | b = rng.uniform(-90., +90., size=shp) 38 | dist = rng.uniform(min_r, max_r, size=shp) 39 | return SkyCoord( 40 | l=l, b=b, distance=dist, unit=('deg','deg','pc'), frame='galactic' 41 | ) 42 | 43 | 44 | class TestEdenhofer2023(unittest.TestCase): 45 | @classmethod 46 | def setUpClass(cls): 47 | msg = 'Loading all possible combinations of the data for {}...' 48 | log(msg.format(cls.__name__)) 49 | 50 | msg_timed = 'Loaded {} {} in {:.5f} s' 51 | fmt_timed = partial(msg_timed.format, cls.__name__) 52 | 53 | t0 = time.time() 54 | cls._query_wo_smpls = edenhofer2023.Edenhofer2023Query( 55 | load_samples=False 56 | ) 57 | log(fmt_timed("density w/o samples", time.time() - t0)) 58 | 59 | t0 = time.time() 60 | cls._query_w_smpls = edenhofer2023.Edenhofer2023Query( 61 | load_samples=True 62 | ) 63 | log(fmt_timed("density w/ samples", time.time() - t0)) 64 | 65 | t0 = time.time() 66 | cls._query_wo_smpls_int = edenhofer2023.Edenhofer2023Query( 67 | load_samples=False, integrated=True 68 | ) 69 | log(fmt_timed("integrated w/o samples", time.time() - t0)) 70 | 71 | # Do not test integrated samples b/c it is too memory intensive :( 72 | # t0 = time.time() 73 | # cls._query_w_smpls_int = edenhofer2023.Edenhofer2023Query( 74 | # load_samples=True, integrated=True 75 | # ) 76 | # log(fmt_timed("integrated w/ samples", time.time() - t0)) 77 | 78 | def test_samples_no_samples_consistency(self): 79 | for seed in (42, 314, 271828): 80 | rng = np.random.default_rng(seed) 81 | for mode in ("mean", "std"): 82 | n_dim = rng.integers(1, 5) 83 | coords = random_coords(rng, n_dim, min_r=59.0, max_r=1.3e+3) 84 | r1 = self._query_wo_smpls(coords, mode=mode) 85 | assert r1.shape == coords.shape 86 | r2 = self._query_w_smpls(coords, mode=mode) 87 | assert r2.shape == coords.shape 88 | # It makes a difference whether we inerpolate the mean or the 89 | # samples. Hence, allow for some "significant" tolerance. 90 | np.testing.assert_allclose(r1, r2, atol=2e-4, rtol=0.1) 91 | 92 | if mode == "mean": 93 | r1i = self._query_wo_smpls_int(coords, mode=mode) 94 | assert r1i.shape == coords.shape 95 | np.testing.assert_array_equal( 96 | (r1i > r1)[~np.isnan(r1)], True 97 | ) 98 | # The following would be too expensive memory-wise :( 99 | # r2i = self._query_w_smpls_int(coords, mode=mode) 100 | # assert r2i.shape == coords.shape 101 | # np.testing.assert_allclose(r1i, r2i, atol=1e-3, rtol=1e-3) 102 | 103 | def test_samples_shape(self): 104 | mode = "samples" 105 | for seed in (42, 314, 271828): 106 | rng = np.random.default_rng(seed) 107 | n_dim = rng.integers(1, 5) 108 | coords = random_coords(rng, n_dim) 109 | r_density = self._query_w_smpls(coords, mode=mode) 110 | assert r_density.shape[:-1] == coords.shape 111 | # The following would be too expensive memory-wise :( 112 | # r_int = self._query_w_smpls_int(coords, mode=mode) 113 | # assert r_int.shape[:-1] == coords.shape 114 | # np.testing.assert_equal(r_int > r_density, True) 115 | 116 | def test_random_sample_shape(self): 117 | mode = "random_sample" 118 | for seed in (42, 314, 271828): 119 | rng = np.random.default_rng(seed) 120 | n_dim = rng.integers(1, 5) 121 | coords = random_coords(rng, n_dim) 122 | r_density = self._query_w_smpls(coords, mode=mode) 123 | assert r_density.shape == coords.shape 124 | 125 | def test_monotonicty_of_integrated(self): 126 | mode = "mean" 127 | for seed in (42, 314, 271828): 128 | rng = np.random.default_rng(seed) 129 | n_dim = rng.integers(1, 4) 130 | coords = random_coords(rng, n_dim, min_r=65.0, max_r=75.0) 131 | for query in (self._query_wo_smpls_int,): 132 | r1 = query(coords, mode=mode) 133 | for _ in range(4): 134 | coords = SkyCoord( 135 | l=coords.l, 136 | b=coords.b, 137 | distance=2. * coords.distance, 138 | frame="galactic" 139 | ) 140 | r2 = query(coords, mode=mode) 141 | np.testing.assert_equal((r2 > r1)[~np.isnan(r1)], True) 142 | r1 = r2 143 | 144 | 145 | if __name__ == '__main__': 146 | unittest.main() 147 | -------------------------------------------------------------------------------- /dustmaps/tests/test_iphas.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_iphas.py 4 | # Test query code for the IPHAS dust extinction map of Sale et al. (2014). 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import unittest 22 | 23 | import numpy as np 24 | import astropy.coordinates as coords 25 | import astropy.units as units 26 | import os 27 | import re 28 | import time 29 | 30 | from .. import iphas 31 | from ..std_paths import * 32 | 33 | 34 | class TestIPHAS(unittest.TestCase): 35 | @classmethod 36 | def setUpClass(self): 37 | t0 = time.time() 38 | 39 | # Set up IPHAS query object 40 | self._iphas = iphas.IPHASQuery() 41 | 42 | t1 = time.time() 43 | print('Loaded IPHAS test data in {:.5f} s.'.format(t1-t0)) 44 | 45 | def test_bounds(self): 46 | """ 47 | Test that out-of-bounds coordinates return NaN reddening, and that 48 | in-bounds coordinates do not return NaN reddening. 49 | """ 50 | 51 | for mode in (['random_sample', 'random_sample_per_pix', 52 | 'median', 'samples', 'mean']): 53 | # Draw random coordinates on the sphere 54 | n_pix = 10000 55 | u, v = np.random.random((2,n_pix)) 56 | l = 360. * u 57 | b = 90. - np.degrees(np.arccos(2.*v - 1.)) 58 | c = coords.SkyCoord(l, b, frame='galactic', unit='deg') 59 | 60 | A_calc = self._iphas(c, mode=mode) 61 | 62 | in_bounds = (l > 32.) & (l < 213.) & (b < 4.5) & (b > -4.5) 63 | out_of_bounds = (l < 28.) | (l > 217.) | (b > 7.) | (b < -7.) 64 | 65 | n_nan_in_bounds = np.sum(np.isnan(A_calc[in_bounds])) 66 | n_finite_out_of_bounds = np.sum(np.isfinite(A_calc[out_of_bounds])) 67 | 68 | self.assertTrue(n_nan_in_bounds == 0) 69 | self.assertTrue(n_finite_out_of_bounds == 0) 70 | 71 | def test_shape(self): 72 | """ 73 | Test that the output shapes are as expected with input coordinate arrays 74 | of different shapes. 75 | """ 76 | 77 | for mode in (['random_sample', 'random_sample_per_pix', 78 | 'median', 'mean', 'samples']): 79 | for include_dist in [False, True]: 80 | for reps in range(5): 81 | # Draw random coordinates, with different shapes 82 | n_dim = np.random.randint(1,4) 83 | shape = np.random.randint(1,7, size=(n_dim,)) 84 | 85 | ra = (-180. + 360.*np.random.random(shape)) * units.deg 86 | dec = (-90. + 180. * np.random.random(shape)) * units.deg 87 | if include_dist: 88 | dist = 5. * np.random.random(shape) * units.kpc 89 | else: 90 | dist = None 91 | c = coords.SkyCoord(ra, dec, distance=dist, frame='icrs') 92 | 93 | A_calc = self._iphas(c, mode=mode) 94 | 95 | np.testing.assert_equal(A_calc.shape[:n_dim], shape) 96 | 97 | extra_dims = 0 98 | if mode == 'samples': 99 | extra_dims += 1 100 | if not include_dist: 101 | extra_dims += 1 102 | 103 | self.assertEqual(len(A_calc.shape), n_dim+extra_dims) 104 | 105 | 106 | 107 | if __name__ == '__main__': 108 | unittest.main() 109 | -------------------------------------------------------------------------------- /dustmaps/tests/test_marshall.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_marshall.py 4 | # Test query code for the dust extinction map of Marshall et al. (2006). 5 | # 6 | # Copyright (C) 2018 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import unittest 22 | 23 | import numpy as np 24 | import astropy.coordinates as coords 25 | import astropy.units as units 26 | import os 27 | import re 28 | import time 29 | 30 | from .. import marshall 31 | from ..std_paths import * 32 | 33 | 34 | class TestMarshall(unittest.TestCase): 35 | @classmethod 36 | def setUpClass(self): 37 | t0 = time.time() 38 | 39 | # Set up IPHPAS query object 40 | self._marshall = marshall.MarshallQuery() 41 | 42 | t1 = time.time() 43 | print('Loaded Marshall+(2006) test data in {:.5f} s.'.format(t1-t0)) 44 | 45 | def test_bounds(self): 46 | """ 47 | Test that out-of-bounds coordinates return NaN reddening, and that 48 | in-bounds coordinates do not return NaN reddening. 49 | """ 50 | 51 | for return_sigma in [False, True]: 52 | # Draw random coordinates on the sphere 53 | n_pix = 10000 54 | u, v = np.random.random((2,n_pix)) 55 | l = 360. * u - 180. 56 | b = 90. - np.degrees(np.arccos(2.*v - 1.)) 57 | d = 5. * np.random.random(l.shape) 58 | c = coords.SkyCoord(l*units.deg, b*units.deg, 59 | distance=d*units.kpc, frame='galactic') 60 | 61 | res = self._marshall(c, return_sigma=return_sigma) 62 | 63 | if return_sigma: 64 | self.assertTrue(len(res) == 2) 65 | A, sigma = res 66 | np.testing.assert_equal(A.shape, sigma.shape) 67 | else: 68 | self.assertFalse(isinstance(res, tuple)) 69 | A = res 70 | 71 | in_bounds = (l > -99.) & (l < 99.) & (b < 9.5) & (b > -9.5) 72 | out_of_bounds = (l < -101.) | (l > 101.) | (b > 10.5) | (b < -10.5) 73 | 74 | n_nan_in_bounds = np.sum(np.isnan(A[in_bounds])) 75 | n_finite_out_of_bounds = np.sum(np.isfinite(A[out_of_bounds])) 76 | 77 | self.assertTrue(n_nan_in_bounds == 0) 78 | self.assertTrue(n_finite_out_of_bounds == 0) 79 | 80 | def test_shape(self): 81 | """ 82 | Test that the output shapes are as expected with input coordinate arrays 83 | of different shapes. 84 | """ 85 | 86 | for return_sigma in [False, True]: 87 | for include_dist in [False, True]: 88 | for reps in range(5): 89 | # Draw random coordinates, with different shapes 90 | n_dim = np.random.randint(1,4) 91 | shape = np.random.randint(1,7, size=(n_dim,)) 92 | 93 | ra = (-180. + 360.*np.random.random(shape)) * units.deg 94 | dec = (-90. + 180. * np.random.random(shape)) * units.deg 95 | if include_dist: 96 | dist = 5. * np.random.random(shape) * units.kpc 97 | else: 98 | dist = None 99 | c = coords.SkyCoord(ra, dec, distance=dist, frame='icrs') 100 | 101 | if not include_dist: 102 | self.assertRaises(ValueError, self._marshall, 103 | c, return_sigma=return_sigma) 104 | continue 105 | 106 | res = self._marshall(c, return_sigma=return_sigma) 107 | 108 | if return_sigma: 109 | self.assertTrue(len(res) == 2) 110 | A, sigma = res 111 | np.testing.assert_equal(A.shape, sigma.shape) 112 | else: 113 | self.assertFalse(isinstance(res, tuple)) 114 | A = res 115 | 116 | np.testing.assert_equal(A.shape, shape) 117 | 118 | 119 | 120 | if __name__ == '__main__': 121 | unittest.main() 122 | -------------------------------------------------------------------------------- /dustmaps/tests/test_planck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_planck.py 4 | # Test query code for the Planck Collaboration (2013) dust reddening map. 5 | # 6 | # Copyright (C) 2018 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import unittest 22 | 23 | import numpy as np 24 | import astropy.coordinates as coords 25 | import astropy.units as units 26 | import os 27 | import re 28 | import time 29 | 30 | from .. import planck 31 | from ..std_paths import * 32 | 33 | 34 | class TestPlanck(unittest.TestCase): 35 | component = 'extragalactic' 36 | load_errors = False 37 | 38 | @classmethod 39 | def setUpClass(self): 40 | print('Loading Planck {} dust map ...'.format(self.component)) 41 | t0 = time.time() 42 | 43 | # Set up Planck query object 44 | if self.component == 'GNILC': 45 | self._planck = planck.PlanckGNILCQuery(load_errors=self.load_errors) 46 | else: 47 | self._planck = planck.PlanckQuery(component=self.component) 48 | 49 | t1 = time.time() 50 | print('Loaded Planck test data in {:.5f} s.'.format(t1-t0)) 51 | 52 | def test_shape(self): 53 | """ 54 | Test that the output shapes are as expected with input coordinate arrays 55 | of different shapes. 56 | """ 57 | 58 | for reps in range(5): 59 | # Draw random coordinates, with different shapes 60 | n_dim = np.random.randint(1,4) 61 | shape = np.random.randint(1,7, size=(n_dim,)) 62 | 63 | ra = np.random.uniform(-180., 180., size=shape) * units.deg 64 | dec = np.random.uniform(-90., 90., size=shape) * units.deg 65 | c = coords.SkyCoord(ra, dec, frame='icrs') 66 | 67 | E = self._planck(c) 68 | 69 | np.testing.assert_equal(E.shape, shape) 70 | 71 | def test_frame(self): 72 | """ 73 | Test that the results are independent of the coordinate frame. 74 | """ 75 | frames = ('icrs', 'galactic', 'fk5', 'fk4', 'barycentrictrueecliptic') 76 | shape = (100,) 77 | 78 | ra = np.random.uniform(-180., 180., size=shape) * units.deg 79 | dec = np.random.uniform(-90., 90., size=shape) * units.deg 80 | c = coords.SkyCoord(ra, dec, frame='icrs') 81 | E0 = self._planck(c) 82 | 83 | for fr in frames: 84 | cc = c.transform_to(fr) 85 | E = self._planck(cc) 86 | np.testing.assert_equal(E, E0) 87 | 88 | u,v,w = np.random.uniform(0., 5., size=(3,100)) 89 | try: 90 | c = coords.SkyCoord( 91 | u=u, v=v, w=w, 92 | unit='kpc', 93 | representation_type='cartesian', 94 | frame='galactic' 95 | ) 96 | except ValueError as err: 97 | # Astropy version < 3.0 98 | c = coords.SkyCoord( 99 | u=u, v=v, w=w, 100 | unit='kpc', 101 | representation='cartesian', 102 | frame='galactic' 103 | ) 104 | E0 = self._planck(c) 105 | 106 | for fr in frames: 107 | cc = c.transform_to(fr) 108 | E = self._planck(cc) 109 | np.testing.assert_equal(E, E0) 110 | 111 | 112 | 113 | class TestPlanckTau(TestPlanck): 114 | component = 'tau' 115 | 116 | 117 | class TestPlanckRadiance(TestPlanck): 118 | component = 'radiance' 119 | 120 | 121 | class TestPlanckTemperature(TestPlanck): 122 | component = 'temperature' 123 | 124 | 125 | class TestPlanckBeta(TestPlanck): 126 | component = 'beta' 127 | 128 | 129 | class TestPlanckTemperature(TestPlanck): 130 | component = 'temperature' 131 | 132 | 133 | class TestPlanckBeta(TestPlanck): 134 | component = 'beta' 135 | 136 | 137 | class TestPlanckTemperatureErr(TestPlanck): 138 | component = 'err_temp' 139 | 140 | 141 | class TestPlanckBetaErr(TestPlanck): 142 | component = 'err_beta' 143 | 144 | 145 | class TestPlanckBetaErr(TestPlanck): 146 | component = 'GNILC' 147 | 148 | 149 | class TestPlanckBetaErr(TestPlanck): 150 | component = 'GNILC' 151 | load_errors = True 152 | 153 | 154 | if __name__ == '__main__': 155 | unittest.main() 156 | -------------------------------------------------------------------------------- /dustmaps/tests/test_serializers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_serializers.py 4 | # Test code that serializes and deserializes numpy and Astropy objects. 5 | # 6 | # Copyright (C) 2020 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import unittest 22 | 23 | import numpy as np 24 | import json 25 | import astropy.units as units 26 | from astropy.coordinates import SkyCoord 27 | 28 | from .. import json_serializers 29 | 30 | 31 | class TestSerializers(unittest.TestCase): 32 | def test_numpy_readable(self): 33 | """ 34 | Test serializing/deserializing an array using human-readable serializer. 35 | """ 36 | x = np.random.random(size=(15,7)) 37 | o = json_serializers.serialize_ndarray_readable(x) 38 | y = json_serializers.deserialize_ndarray(o) 39 | np.testing.assert_allclose(x, y, atol=1.e-5, rtol=1.e-5) 40 | 41 | def test_numpy_b64(self): 42 | """ 43 | Test serializing/deserializing an array using b64 serializer. 44 | """ 45 | x = np.random.random(size=(15,7)) 46 | o = json_serializers.serialize_ndarray_b64(x) 47 | y = json_serializers.deserialize_ndarray(o) 48 | np.testing.assert_allclose(x, y, atol=1.e-5, rtol=1.e-5) 49 | 50 | def test_numpy_npy(self): 51 | """ 52 | Test serializing/deserializing an array using npy serializer. 53 | """ 54 | x = np.random.random(size=(15,7)) 55 | d = json_serializers.serialize_ndarray_npy(x) 56 | y = json_serializers.deserialize_ndarray(d) 57 | np.testing.assert_allclose(x, y, atol=1.e-5, rtol=1.e-5) 58 | 59 | def test_skycoord(self): 60 | """ 61 | Test serializing/deserializing SkyCoord objects. 62 | """ 63 | lon = np.random.uniform(0., 360., 23) * units.deg 64 | lat = np.random.uniform(-90., 90., lon.size) * units.deg 65 | d = np.random.uniform(0.1, 10., lon.size) * units.kpc 66 | 67 | decoder = json_serializers.MultiJSONDecoder 68 | 69 | for mode in ('b64', 'readable', 'npy'): 70 | for frame in ('galactic', 'icrs'): 71 | encoder = json_serializers.get_encoder(ndarray_mode=mode) 72 | 73 | # Without distance 74 | c = SkyCoord(lon, lat, frame=frame) 75 | s = json.dumps(c, cls=encoder) 76 | c_dec = json.loads(s, cls=decoder) 77 | sep = c.separation(c_dec).to('rad').value 78 | np.testing.assert_allclose(sep, np.zeros_like(sep), atol=1.e-7, rtol=0.) 79 | 80 | # With distance 81 | c = SkyCoord(lon, lat, distance=d, frame=frame) 82 | s = json.dumps(c, cls=encoder) 83 | c_dec = json.loads(s, cls=decoder) 84 | sep = c.separation_3d(c_dec).to('kpc').value 85 | np.testing.assert_allclose(sep, np.zeros_like(sep), atol=1.e-7, rtol=0.) 86 | 87 | 88 | if __name__ == '__main__': 89 | unittest.main() 90 | 91 | -------------------------------------------------------------------------------- /dustmaps/tests/test_sfd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # test_sfd.py 4 | # Test query code for Schlegel, Finkbeiner & Davis (1998) dust reddening map. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | from __future__ import print_function, division 20 | 21 | import unittest 22 | 23 | import numpy as np 24 | import astropy.coordinates as coords 25 | 26 | try: 27 | import ujson as json 28 | except ImportError as error: 29 | import json 30 | 31 | import os 32 | import time 33 | 34 | from .. import sfd 35 | from ..std_paths import * 36 | 37 | class TestSFD(unittest.TestCase): 38 | @classmethod 39 | def setUpClass(self): 40 | t0 = time.time() 41 | 42 | # Test data comes from NED 43 | with open(os.path.join(test_dir, 'ned_output.json'), 'r') as f: 44 | self._test_data = json.load(f) 45 | 46 | # Set up SFD query object 47 | self._sfd = sfd.SFDQuery() 48 | 49 | t1 = time.time() 50 | print('Loaded SFD test data in {:.5f} s.'.format(t1-t0)) 51 | 52 | def _get_equ(self, d): 53 | """ 54 | Get Equatorial (ICRS) coordinates of test data point. 55 | """ 56 | return coords.SkyCoord(d['equ'][0], d['equ'][1], frame='icrs') 57 | 58 | def _get_gal(self, d): 59 | """ 60 | Get Galactic coordinates of test data point. 61 | """ 62 | return coords.SkyCoord( 63 | d['gal'][0], d['gal'][1], 64 | frame='galactic', unit='deg' 65 | ) 66 | 67 | def test_sfd_equ_scalar(self): 68 | """ 69 | Test SFD query of individual ICRS coordinates. 70 | """ 71 | # print 'Equatorial' 72 | # print '==========' 73 | 74 | for d in self._test_data: 75 | c = self._get_equ(d) 76 | Av = 2.742 * self._sfd(c) 77 | # c_gal = c.transform_to('galactic') 78 | # print '* (l, b) = ({: >16.8f} {: >16.8f})'.format(c_gal.l.deg, c_gal.b.deg) 79 | # print (d['sf11_Av'] - Av) / (0.001 + 0.001 * d['sf11_Av']) 80 | np.testing.assert_allclose(d['sf11_Av'], Av, atol=0.001, rtol=0.001) 81 | 82 | def test_sfd_gal_scalar(self): 83 | """ 84 | Test SFD query of individual Galactic coordinates. 85 | """ 86 | # print 'Galactic' 87 | # print '========' 88 | 89 | for d in self._test_data: 90 | c = self._get_gal(d) 91 | Av = 2.742 * self._sfd(c) 92 | # print '* (l, b) = ({: >16.8f} {: >16.8f})'.format(c.l.deg, c.b.deg) 93 | # print (d['sf11_Av'] - Av) / (0.001 + 0.001 * d['sf11_Av']) 94 | np.testing.assert_allclose(d['sf11_Av'], Av, atol=0.001, rtol=0.001) 95 | 96 | def test_sfd_equ_vector(self): 97 | """ 98 | Test SFD query of multiple ICRS coordinates at once. 99 | """ 100 | ra = [d['equ'][0] for d in self._test_data] 101 | dec = [d['equ'][1] for d in self._test_data] 102 | sf11_Av = np.array([d['sf11_Av'] for d in self._test_data]) 103 | c = coords.SkyCoord(ra, dec, frame='icrs') 104 | 105 | Av = 2.742 * self._sfd(c) 106 | 107 | np.testing.assert_allclose(sf11_Av, Av, atol=0.001, rtol=0.001) 108 | 109 | def test_sfd_gal_vector(self): 110 | """ 111 | Test SFD query of multiple Galactic coordinates at once. 112 | """ 113 | l = [d['gal'][0] for d in self._test_data] 114 | b = [d['gal'][1] for d in self._test_data] 115 | sf11_Av = np.array([d['sf11_Av'] for d in self._test_data]) 116 | c = coords.SkyCoord(l, b, frame='galactic', unit='deg') 117 | 118 | Av = 2.742 * self._sfd(c) 119 | 120 | np.testing.assert_allclose(sf11_Av, Av, atol=0.001, rtol=0.001) 121 | 122 | def test_shape(self): 123 | """ 124 | Test that the output shapes are as expected with input coordinate arrays 125 | of different shapes. 126 | """ 127 | 128 | for reps in range(10): 129 | # Draw random coordinates, with different shapes 130 | n_dim = np.random.randint(1,4) 131 | shape = np.random.randint(1,7, size=(n_dim,)) 132 | 133 | ra = -180. + 360.*np.random.random(shape) 134 | dec = -90. + 180. * np.random.random(shape) 135 | c = coords.SkyCoord(ra, dec, frame='icrs', unit='deg') 136 | 137 | ebv_calc = self._sfd(c) 138 | 139 | np.testing.assert_equal(ebv_calc.shape, shape) 140 | 141 | def test_malformed_coords(self): 142 | """ 143 | Test that SFD query errors with malformed input. 144 | """ 145 | c = np.array([ 146 | [d['equ'][0] for d in self._test_data], 147 | [d['equ'][1] for d in self._test_data] 148 | ]) 149 | 150 | with self.assertRaises(TypeError): 151 | self._sfd(c) 152 | 153 | if __name__ == '__main__': 154 | unittest.main() 155 | -------------------------------------------------------------------------------- /dustmaps/unstructured_map.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # unstructured_map.py 4 | # Implements a class for querying dust maps with unstructured pixels. Sky 5 | # coordinates are assigned to the nearest pixel. 6 | # 7 | # Copyright (C) 2016 Gregory M. Green 8 | # 9 | # dustmaps is free software: you can redistribute it and/or modify 10 | # it under the terms of either: 11 | # 12 | # - The GNU General Public License as published by the Free Software Foundation, 13 | # either version 2 of the License, or (at your option) any later version, or 14 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 15 | # 16 | # You should have received copies of the GNU General Public License 17 | # and the BSD License along with this program. 18 | # 19 | 20 | from __future__ import print_function, division 21 | 22 | import numpy as np 23 | import astropy.coordinates as coordinates 24 | import astropy.units as units 25 | from scipy.spatial import cKDTree as KDTree 26 | 27 | from .map_base import DustMap 28 | 29 | 30 | class UnstructuredDustMap(DustMap): 31 | """ 32 | A class for querying dust maps with unstructured pixels. Sky coordinates are 33 | assigned to the nearest pixel. 34 | """ 35 | 36 | def __init__(self, pix_coords, max_pix_scale, metric_p=2, frame=None): 37 | """ 38 | Args: 39 | pix_coords (array-like :obj:`astropy.coordinates.SkyCoord`): The sky 40 | coordinates of the pixels. 41 | max_pix_scale (scalar :obj:`astropy.units.Quantity`): Maximum angular 42 | extent of a pixel. If no pixel is within this distance of a 43 | query point, NaN will be returned for that query point. 44 | metric_p (Optional[:obj:`float`]): The metric to use. Defaults to 2, which 45 | is the Euclidean metric. A value of 1 corresponds to the 46 | Manhattan metric, while a value approaching infinity yields the 47 | maximum component metric. 48 | frame (Optional[:obj:`str`]): The coordinate frame to use internally. Must 49 | be a frame understood by :obj:`astropy.coordinates.SkyCoord`. 50 | Defaults to :obj:`None`, meaning that the frame will be inferred 51 | from :obj:`pix_coords`. 52 | """ 53 | self._n_pix = pix_coords.shape[0] 54 | self._metric_p = metric_p 55 | 56 | if frame is None: 57 | self._frame = pix_coords.frame 58 | else: 59 | self._frame = frame 60 | 61 | # Tesselate the space 62 | self._pix_vec = self._coords2vec(pix_coords) 63 | self._kd = KDTree(self._pix_vec) 64 | 65 | # Don't query more than this distance from any point 66 | self._max_pix_scale = max_pix_scale.to('rad').value 67 | 68 | def _coords2vec(self, coords): 69 | """ 70 | Converts from sky coordinates to unit vectors. Before conversion to unit 71 | vectors, the coordiantes are transformed to the coordinate system used 72 | internally by the :obj:`UnstructuredDustMap`, which can be set during 73 | initialization of the class. 74 | 75 | Args: 76 | coords (:obj:`astropy.coordinates.SkyCoord`): Input coordinates to 77 | convert to unit vectors. 78 | 79 | Returns: 80 | Cartesian unit vectors corresponding to the input coordinates, after 81 | transforming to the coordinate system used internally by the 82 | :obj:`UnstructuredDustMap`. 83 | """ 84 | 85 | # c = coords.transform_to(self._frame) 86 | # vec = np.empty((c.shape[0], 2), dtype='f8') 87 | # vec[:,0] = coordinates.Longitude(coords.l, wrap_angle=360.*units.deg).deg[:] 88 | # vec[:,1] = coords.b.deg[:] 89 | # return np.radians(vec) 90 | 91 | c = coords.transform_to(self._frame).represent_as('cartesian') 92 | vec_norm = np.sqrt(c.x**2 + c.y**2 + c.z**2) 93 | 94 | vec = np.empty((c.shape[0], 3), dtype=c.x.dtype) 95 | vec[:,0] = (c.x / vec_norm).value[:] 96 | vec[:,1] = (c.y / vec_norm).value[:] 97 | vec[:,2] = (c.z / vec_norm).value[:] 98 | 99 | return vec 100 | 101 | def _coords2idx(self, coords): 102 | """ 103 | Converts from sky coordinates to pixel indices. 104 | 105 | Args: 106 | coords (:obj:`astropy.coordinates.SkyCoord`): Sky coordinates. 107 | 108 | Returns: 109 | Pixel indices of the coordinates, with the same shape as the input 110 | coordinates. Pixels which are outside the map are given an index 111 | equal to the number of pixels in the map. 112 | """ 113 | 114 | x = self._coords2vec(coords) 115 | idx = self._kd.query(x, p=self._metric_p, 116 | distance_upper_bound=self._max_pix_scale) 117 | return idx[1] 118 | -------------------------------------------------------------------------------- /paper/codemeta.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://raw.githubusercontent.com/codemeta/codemeta/master/codemeta.jsonld", 3 | "@type": "Code", 4 | "author": [ 5 | { 6 | "name": "Gregory M. Green", 7 | "email": "gregorymgreen@gmail.com", 8 | "orcid": "0000-0001-5417-2260", 9 | "affiliation": "Porat Fellow, Stanford/KIPAC", 10 | "commits": 128 11 | } 12 | ], 13 | "identifier": "", 14 | "codeRepository": "https://github.com/gregreen/dustmaps", 15 | "datePublished": "2018-04-19", 16 | "dateModified": "2018-04-19", 17 | "dateCreated": "2018-04-19", 18 | "description": "A Python interface for maps of interstellar dust.", 19 | "keywords": "Python, astronomy, interstellar medium, interstellar reddening, interstellar extinction", 20 | "license": "GPL v2.0", 21 | "title": "dustmaps", 22 | "version": "v1.0" 23 | } 24 | -------------------------------------------------------------------------------- /paper/figure.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gregreen/dustmaps/035a65a7bbf02431172113883993a7da1ba1cacb/paper/figure.pdf -------------------------------------------------------------------------------- /paper/paper.bib: -------------------------------------------------------------------------------- 1 | @online{github_dustmaps, 2 | author={{Green}, G.~M.}, 3 | title="{dustmaps on GitHub}", 4 | year=2018, 5 | url={https://github.com/gregreen/dustmaps}, 6 | urldate={2018-04-19} 7 | } 8 | 9 | @article{astropy, 10 | author = {{Astropy Collaboration}}, 11 | title = "{Astropy: A community Python package for astronomy}", 12 | journal = {Astronomy \& Astrophysics}, 13 | archivePrefix = "arXiv", 14 | eprint = {1307.6212}, 15 | primaryClass = "astro-ph.IM", 16 | keywords = {methods: data analysis, methods: miscellaneous, virtual 17 | observatory tools}, 18 | year = 2013, 19 | month = oct, 20 | volume = 558, 21 | doi = {10.1051/0004-6361/201322068}, 22 | adsurl = {http://adsabs.harvard.edu/abs/2013A%26A...558A..33A} 23 | } 24 | 25 | @article{Schlegel:1998, 26 | author = {{Schlegel}, D.~J. and {Finkbeiner}, D.~P. and {Davis}, M.}, 27 | title = "{Maps of Dust Infrared Emission for Use in Estimation of Reddening and Cosmic Microwave Background Radiation Foregrounds}", 28 | journal = {The Astrophysical Journal}, 29 | year = 1998, 30 | month = jun, 31 | volume = 500, 32 | pages = {525-553}, 33 | doi = {10.1086/305772}, 34 | adsurl = {http://adsabs.harvard.edu/abs/1998ApJ...500..525S}, 35 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 36 | } 37 | 38 | @article{Marshall:2006, 39 | author = {{Marshall}, D.~J. and {Robin}, A.~C. and {Reyl{\'e}}, C. and 40 | {Schultheis}, M. and {Picaud}, S.}, 41 | title = "{Modelling the Galactic interstellar extinction distribution in three dimensions}", 42 | journal = {Astronomy \& Astrophysics}, 43 | year = 2006, 44 | month = jul, 45 | volume = 453, 46 | pages = {635-651}, 47 | doi = {10.1051/0004-6361:20053842}, 48 | adsurl = {http://adsabs.harvard.edu/abs/2006A%26A...453..635M}, 49 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 50 | } 51 | 52 | @article{Planck:2013, 53 | author = {{Planck Collaboration} and {Abergel}, A. and {Ade}, P.~A.~R. and 54 | {Aghanim}, N. and {Alves}, M.~I.~R. and {Aniano}, G. and {Armitage-Caplan}, C. and 55 | {Arnaud}, M. and {Ashdown}, M. and {Atrio-Barandela}, F. and et al.}, 56 | title = "{Planck 2013 results. XI. All-sky model of thermal dust emission}", 57 | journal = {Astronomy \& Astrophysics}, 58 | archivePrefix = "arXiv", 59 | eprint = {1312.1300}, 60 | year = 2014, 61 | month = nov, 62 | volume = 571, 63 | pages = {A11}, 64 | doi = {10.1051/0004-6361/201323195}, 65 | adsurl = {http://adsabs.harvard.edu/abs/2014A%26A...571A..11P}, 66 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 67 | } 68 | 69 | @article{Green:2015, 70 | author = {{Green}, G.~M. and {Schlafly}, E.~F. and {Finkbeiner}, D.~P. and 71 | {Rix}, H.-W. and {Martin}, N. and {Burgett}, W. and {Draper}, P.~W. and 72 | {Flewelling}, H. and {Hodapp}, K. and {Kaiser}, N. and {Kudritzki}, R.~P. and 73 | {Magnier}, E. and {Metcalfe}, N. and {Price}, P. and {Tonry}, J. and 74 | {Wainscoat}, R.}, 75 | title = "{A Three-dimensional Map of Milky Way Dust}", 76 | journal = {The Astrophysical Journal}, 77 | year = 2015, 78 | month = sep, 79 | volume = 810, 80 | pages = {25}, 81 | doi = {10.1088/0004-637X/810/1/25}, 82 | adsurl = {http://adsabs.harvard.edu/abs/2015ApJ...810...25G}, 83 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 84 | } 85 | 86 | @article{Lenz:2017, 87 | author = {{Lenz}, D. and {Hensley}, B.~S. and {Dor{\'e}}, O.}, 88 | title = "{A New, Large-scale Map of Interstellar Reddening Derived from H I Emission}", 89 | journal = {The Astrophysical Journal}, 90 | year = 2017, 91 | month = sep, 92 | volume = 846, 93 | pages = {38}, 94 | doi = {10.3847/1538-4357/aa84af}, 95 | adsurl = {http://adsabs.harvard.edu/abs/2017ApJ...846...38L}, 96 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 97 | } 98 | 99 | @article{Green:2018, 100 | author = {{Green}, G.~M. and {Schlafly}, E.~F. and {Finkbeiner}, D. and 101 | {Rix}, H.-W. and {Martin}, N. and {Burgett}, W. and {Draper}, P.~W. and 102 | {Flewelling}, H. and {Hodapp}, K. and {Kaiser}, N. and {Kudritzki}, R.-P. and 103 | {Magnier}, E.~A. and {Metcalfe}, N. and {Tonry}, J.~L. and {Wainscoat}, R. and 104 | {Waters}, C.}, 105 | title = "{Galactic Reddening in 3D from Stellar Photometry - An Improved Map}", 106 | journal = {ArXiv e-prints}, 107 | archivePrefix = "arXiv", 108 | eprint = {1801.03555}, 109 | year = 2018, 110 | month = jan, 111 | adsurl = {http://adsabs.harvard.edu/abs/2018arXiv180103555G}, 112 | adsnote = {Provided by the SAO/NASA Astrophysics Data System} 113 | } 114 | 115 | -------------------------------------------------------------------------------- /paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'dustmaps: A Python interface for maps of interstellar dust' 3 | tags: 4 | - Python 5 | - astronomy 6 | - interstellar medium 7 | - interstellar reddening 8 | - interstellar extinction 9 | authors: 10 | - name: Gregory M. Green 11 | orcid: 0000-0001-5417-2260 12 | affiliation: 1 13 | affiliations: 14 | - name: Porat Fellow, Stanford/KIPAC 15 | index: 1 16 | date: 19 April 2018 17 | bibliography: paper.bib 18 | --- 19 | 20 | # Summary 21 | 22 | Correcting for interstellar dust extinction is a critical step in many analyses of astrophysical data. Indeed, a standard dust reddening map, @Schlegel:1998, is one of the highest cited papers in astrophysics. 23 | 24 | The ``dustmaps`` package provides a uniform Python interface for several commonly used maps of interstellar dust, including two-dimensional maps such as @Schlegel:1998, @Planck:2013 and @Lenz:2017, and three-dimensional maps such as @Marshall:2006 and @Green:2015. ``dustmaps`` makes use of ``Astropy``'s coordinate-system package [``astropy.coordinates.SkyCoord``, @astropy], making it easy to query dust maps in a wide variety of coordinate systems (Equatorial, Galactic, Ecliptic, etc.). Additionally, ``dustmaps`` handles the downloading of the supported dust maps for users, and allows users to query some dust maps from a remote server, avoiding the need to download large data files. 25 | 26 | Development of ``dustmaps`` takes place on GitHub [@github_dustmaps], and any issues with the software or feature suggestions (e.g., the addition of new dust maps) should be raised there. 27 | 28 | An example of the type of analysis which can be carried out with ``dustmaps`` is given below. The left panel is a plot of dust reddening in @Green:2018 to a distance of 800 pc, while the right panel shows the correlation between @Green:2018 and @Planck:2013. 29 | 30 | ![Example of the type of analysis made easy by ``dustmaps``.](figure.pdf) 31 | 32 | # References 33 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # setup.py 4 | # Package "dustmaps" for pip. 5 | # 6 | # Copyright (C) 2016 Gregory M. Green 7 | # 8 | # dustmaps is free software: you can redistribute it and/or modify 9 | # it under the terms of either: 10 | # 11 | # - The GNU General Public License as published by the Free Software Foundation, 12 | # either version 2 of the License, or (at your option) any later version, or 13 | # - The 2-Clause BSD License (also known as the Simplified BSD License). 14 | # 15 | # You should have received copies of the GNU General Public License 16 | # and the BSD License along with this program. 17 | # 18 | 19 | 20 | from __future__ import print_function, division 21 | 22 | from setuptools import setup, Extension 23 | from setuptools.command.install import install 24 | import distutils.cmd 25 | 26 | import os 27 | import json 28 | import io 29 | 30 | 31 | class InstallCommand(install): 32 | description = install.description 33 | user_options = install.user_options + [ 34 | ('large-data-dir=', None, 'Directory to store large data files in.') 35 | ] 36 | 37 | def initialize_options(self): 38 | install.initialize_options(self) 39 | self.large_data_dir = None 40 | 41 | def finalize_options(self): 42 | if not self.large_data_dir is None: 43 | self.large_data_dir = os.path.abspath(os.path.expanduser(self.large_data_dir)) 44 | 45 | install.finalize_options(self) 46 | 47 | def run(self): 48 | if not self.large_data_dir is None: 49 | print('Large data directory is set to: {}'.format(self.large_data_dir)) 50 | with open(os.path.expanduser('~/.dustmapsrc'), 'w') as f: 51 | json.dump({'data_dir': self.large_data_dir}, f, indent=2) 52 | 53 | # install.do_egg_install(self) # Due to bug in setuptools that causes old-style install 54 | install.run(self) 55 | 56 | 57 | def fetch_sfd(): 58 | import dustmaps.sfd 59 | dustmaps.sfd.fetch() 60 | 61 | def fetch_csfd(): 62 | import dustmaps.csfd 63 | dustmaps.csfd.fetch() 64 | 65 | def fetch_planck(): 66 | import dustmaps.planck 67 | dustmaps.planck.fetch() 68 | 69 | def fetch_planck_GNILC(): 70 | import dustmaps.planck 71 | dustmaps.planck.fetch(which='GNILC') 72 | 73 | def fetch_bayestar(**kwargs): 74 | import dustmaps.bayestar 75 | dustmaps.bayestar.fetch(**kwargs) 76 | 77 | def fetch_iphas(): 78 | import dustmaps.iphas 79 | dustmaps.iphas.fetch() 80 | 81 | def fetch_marshall(): 82 | import dustmaps.marshall 83 | dustmaps.marshall.fetch() 84 | 85 | def fetch_chen2014(): 86 | import dustmaps.chen2014 87 | dustmaps.chen2014.fetch() 88 | 89 | def fetch_leikeensslin2019(): 90 | import dustmaps.leike_ensslin_2019 91 | dustmaps.leike_ensslin_2019.fetch() 92 | 93 | def fetch_leike2020(): 94 | import dustmaps.leike2020 95 | dustmaps.leike2020.fetch() 96 | 97 | def fetch_edenhofer2023(): 98 | import dustmaps.edenhofer2023 99 | dustmaps.edenhofer2023.fetch() 100 | 101 | def fetch_lenz2017(): 102 | import dustmaps.lenz2017 103 | dustmaps.lenz2017.fetch() 104 | 105 | def fetch_pg2010(): 106 | import dustmaps.pg2010 107 | dustmaps.pg2010.fetch() 108 | 109 | def fetch_gaia_tge(): 110 | import dustmaps.gaia_tge 111 | dustmaps.gaia_tge.fetch() 112 | 113 | def fetch_bh(): 114 | print('Burstein & Heiles (1982) is already installed by default.') 115 | 116 | def fetch_decaps(): 117 | import dustmaps.decaps 118 | dustmaps.decaps.fetch() 119 | 120 | 121 | class FetchCommand(distutils.cmd.Command): 122 | description = ('Fetch dust maps from the web, and store them in the data ' 123 | 'directory.') 124 | user_options = [ 125 | ('map-name=', None, 'Which map to load.')] 126 | 127 | map_funcs = { 128 | 'sfd': fetch_sfd, 129 | 'csfd': fetch_csfd, 130 | 'planck': fetch_planck, 131 | 'planckGNILC': fetch_planck_GNILC, 132 | 'bayestar': fetch_bayestar, 133 | 'bayestar2015': lambda: fetch_bayestar(version='bayestar2015'), 134 | 'bayestar2017': lambda: fetch_bayestar(version='bayestar2017'), 135 | 'bayestar2019': lambda: fetch_bayestar(version='bayestar2019'), 136 | 'bh': fetch_bh, 137 | 'iphas': fetch_iphas, 138 | 'marshall': fetch_marshall, 139 | 'chen2014': fetch_chen2014, 140 | 'lenz2017': fetch_lenz2017, 141 | 'pg2010': fetch_pg2010, 142 | 'leikeensslin2019': fetch_leikeensslin2019, 143 | 'leike2020': fetch_leike2020, 144 | 'edenhofer2023': fetch_edenhofer2023, 145 | 'gaia_tge': fetch_gaia_tge, 146 | 'decaps':fetch_decaps 147 | } 148 | 149 | def initialize_options(self): 150 | self.map_name = None 151 | 152 | def finalize_options(self): 153 | try: 154 | import dustmaps 155 | except ImportError: 156 | print('You must install the package dustmaps before running the ' 157 | 'fetch command.') 158 | if not self.map_name in self.map_funcs: 159 | print('Valid map names are: {}'.format(self.map_funcs.keys())) 160 | 161 | def run(self): 162 | print('Fetching map: {}'.format(self.map_name)) 163 | self.map_funcs[self.map_name]() 164 | 165 | 166 | def readme(): 167 | with io.open('README.md', mode='r', encoding='utf-8') as f: 168 | return f.read() 169 | 170 | 171 | setup( 172 | name='dustmaps', 173 | version='1.0.14', 174 | description='Uniform interface for multiple dust reddening maps.', 175 | long_description=readme(), 176 | long_description_content_type='text/markdown', 177 | url='https://github.com/gregreen/dustmaps', 178 | download_url='https://github.com/gregreen/dustmaps/archive/v1.0.14.tar.gz', 179 | author='Gregory M. Green', 180 | author_email='gregorymgreen@gmail.com', 181 | license='GPL-2.0 OR BSD-2-Clause', 182 | packages=['dustmaps'], 183 | install_requires=[ 184 | 'numpy', 185 | 'scipy', 186 | 'astropy', 187 | 'h5py', 188 | 'healpy', 189 | 'requests', 190 | 'progressbar2', 191 | 'six', 192 | 'tqdm' 193 | ], 194 | include_package_data=True, 195 | test_suite='nose.collector', 196 | tests_require=['nose'], 197 | zip_safe=False, 198 | cmdclass = { 199 | 'install': InstallCommand, 200 | 'fetch': FetchCommand, 201 | }, 202 | ) 203 | --------------------------------------------------------------------------------