├── .gitignore ├── .gitlab-ci.yml ├── CHANGELOG.md ├── CITATION.cff ├── LICENSE ├── NOTICE ├── README.md ├── doc ├── Makefile └── source │ ├── _static │ ├── logo-wide.png │ └── logo.png │ ├── conf.py │ ├── index.rst │ ├── install.rst │ ├── reference.rst │ ├── reference │ ├── data.rst │ ├── io.rst │ ├── ogip.rst │ └── user.rst │ ├── tutorials.rst │ └── tutorials │ ├── apec.rst │ ├── bkgfitting.rst │ ├── datasets.rst │ ├── myusermodel.rst │ ├── ogip2spex.rst │ ├── ogipgenrsp.rst │ ├── simres.rst │ └── tg2spex.rst ├── examples ├── apec.py └── myusermodel.py ├── pyproject.toml ├── pyspextools ├── __init__.py ├── color.py ├── data │ ├── __init__.py │ ├── badchannels.py │ └── response.py ├── io │ ├── __init__.py │ ├── arf.py │ ├── convert.py │ ├── dataset.py │ ├── ogip.py │ ├── pha.py │ ├── pha2.py │ ├── region.py │ ├── res.py │ ├── rmf.py │ ├── spo.py │ └── tg.py ├── messages.py ├── model │ ├── __init__.py │ └── user.py └── scripts │ ├── ogip2spex.py │ ├── ogipgenrsp.py │ ├── simres.py │ └── tg2spex.py ├── requirements.txt └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Python compile output 2 | build 3 | dist 4 | *.pyc 5 | pyspextools.egg-info 6 | **/__pycache__ 7 | doc/build 8 | 9 | # Other ignored files 10 | .idea 11 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | image: python:3.8-slim 2 | 3 | stages: 4 | - deploy 5 | 6 | before_script: 7 | - pip install build 8 | - pip install twine 9 | - pip install numpy 10 | - pip install astropy 11 | - pip install sphinx sphinx-argparse 12 | - python -m build --sdist --wheel 13 | 14 | deploy_staging: 15 | stage: deploy 16 | variables: 17 | TWINE_USERNAME: $STAGING_USERNAME 18 | TWINE_PASSWORD: $STAGING_PASSWORD 19 | script: 20 | - echo $STAGING_USERNAME 21 | - echo $TWINE_USERNAME 22 | - twine upload --verbose --repository-url $PYPITEST_REPOSITORY_URL dist/* 23 | only: 24 | - master 25 | except: 26 | - tags 27 | 28 | deploy_production: 29 | stage: deploy 30 | variables: 31 | TWINE_USERNAME: $PRODUCTION_USERNAME 32 | TWINE_PASSWORD: $PRODUCTION_PASSWORD 33 | script: 34 | - twine upload dist/* 35 | only: 36 | - tags 37 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Pyspextools change log 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [0.7.0] - 2025-02-18 9 | 10 | ### Changed 11 | 12 | - Move to pyproject.toml file to build and install pyspextools 13 | - Determine PHA type in cases when HDUCLAS3 keyword is not defined 14 | 15 | ## [0.6.1] 16 | 17 | ### Fixed 18 | 19 | - Fixed an issue with large XRISM response matrices where the second matrix 20 | extension was not properly converted. 21 | 22 | ## [0.6.0] 23 | 24 | ### Added 25 | 26 | - Added a force_poisson option to PHA reading routine and ogip2spex for cases where other 27 | types of statistics are used and Poisson is desired. 28 | - Added code to fix bins with zero width in ROSAT response matrices. 29 | - Added tutorial about how to create .spo and .res files with more complicated sectors and regions. 30 | 31 | ### Fixed 32 | 33 | - Fixed ogipgenrsp which now uses the rmf structure with multiple MATRIX extensions. 34 | 35 | ## [0.5.0] 36 | 37 | ### Changed 38 | 39 | - SPEX spectra and responses are now written in double precision. 40 | 41 | ### Fixed 42 | 43 | - Fix for converting spectra from LEM and HEX-P responses where channel numbers are shifted by 1. 44 | - Fix for converting spectra from rgscombine. 45 | 46 | ### Removed 47 | 48 | - Discontinued Python 2 support. 49 | 50 | 51 | ## [0.4.0] 52 | 53 | ### Added 54 | 55 | - Added capability to read and convert OGIP RMF files with multiple MATRIX extensions. 56 | 57 | ### Changed 58 | 59 | - Updated apec.py to work with pyatomdb 0.8.0 and above. 60 | 61 | ## [0.3.4] 62 | 63 | ### Changed 64 | 65 | - Consistently changed pyspex to pyspextools in anticipation of the pyspex release with SPEX 3.06.00. 66 | (Thanks to Anna Ogorzałek for reporting these issues). 67 | 68 | ### Fixed 69 | 70 | - Fixed issue with reading multiple res and spo files. 71 | 72 | ## [0.3.2] 73 | 74 | ### Added 75 | 76 | - Added no-area option to the ogipgenrsp script. 77 | 78 | ### Changed 79 | 80 | - Updated apec.py example script for pyATOMDB >= 0.6.0 (only supports Python 3 now). 81 | (Thanks to Francois Mernier for reporting.) 82 | 83 | ## [0.3.1] 84 | 85 | ### Added 86 | 87 | - Produce an error message in rare cases where all detector channels are bad. 88 | 89 | ### Fixed 90 | 91 | - Fixed issue reading PHA files without HDUCLAS2 and HDUCLAS3 keywords. 92 | - Fixed issue with using setup.py on Python 2. 93 | 94 | ## [0.3.0] 95 | 96 | ### Added 97 | 98 | - Initial release of pyspextools 99 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # This CITATION.cff file was generated with cffinit. 2 | # Visit https://bit.ly/cffinit to generate yours today! 3 | 4 | cff-version: 1.2.0 5 | title: pyspextools 6 | message: >- 7 | If you use this software, please cite it using the 8 | metadata from this file. 9 | type: software 10 | authors: 11 | - given-names: Jelle 12 | name-particle: de 13 | family-names: Plaa 14 | email: j.de.plaa@sron.nl 15 | affiliation: SRON Netherlands Institute for Space Research 16 | orcid: 'https://orcid.org/0000-0002-2697-7106' 17 | identifiers: 18 | - type: doi 19 | value: 10.5281/zenodo.3245804 20 | description: >- 21 | The Zenodo identifier for all versions of this 22 | software. 23 | - type: doi 24 | value: 10.5281/zenodo.5128614 25 | description: >- 26 | Pyspextools is a collection of python tools for 27 | the SPEX X-ray spectral fitting package. The 28 | package contains tools to create and manipulate 29 | spectra in SPEX format and tools to create user 30 | defined models. This release contains a few 31 | bugfixes that are described in the CHANGELOG of 32 | this package. 33 | repository-code: 'https://github.com/spex-xray/pyspextools' 34 | url: 'https://spex-xray.github.io/pyspextools/' 35 | abstract: >- 36 | Pyspextools is a collection of python tools for the 37 | SPEX X-ray spectral fitting package. The package 38 | contains tools to create and manipulate spectra in 39 | SPEX format and tools to create user defined 40 | models. 41 | keywords: 42 | - X-ray 43 | - spectroscopy 44 | - astrophysics 45 | license: Apache-2.0 46 | doi: 10.5281/zenodo.5128614 47 | commit: >- 48 | https://github.com/spex-xray/pyspextools/commit/bad3f54eaf287265f1c3a7d169c22032731cf9e6 49 | version: 0.7.0 50 | date-released: '2025-02-18' 51 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright 2017-2024 Jelle de Plaa, SRON Netherlands Institute for Space Research 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Pyspextools 2 | 3 | Pyspextools is a collection of python tools for the SPEX X-ray spectral 4 | fitting package (see the [SPEX website](http://www.sron.nl/spex)). 5 | 6 | ## Install 7 | 8 | Installing pyspextools can be done directly using pip. Before you continue, 9 | please think about where you want to install pyspextools. It may be wise to 10 | create an Anaconda environment where pyspextools can be compiled with 11 | the most compatible versions of the modules that pyspextools depends on. 12 | 13 | Install pyspextools as follows: 14 | 15 | ``` 16 | (conda) linux:~/> pip install pyspextools 17 | ``` 18 | 19 | Other install options can be found on the [install page of the 20 | documentation](https://spex-xray.github.io/pyspextools/install.html). 21 | 22 | ## Usage 23 | 24 | Please see the documentation pages for tutorials and reference documentation: 25 | 26 | [Full documentation of pyspextools](https://spex-xray.github.io/pyspextools/) 27 | 28 | ## Scientific citations 29 | 30 | If pyspextools and SPEX were useful for your scientific paper, please cite 31 | these packages with their DOIs on Zenodo: 32 | 33 | - [Pyspextools publication on Zenodo](https://doi.org/10.5281/zenodo.3245804) 34 | - [SPEX publication on Zenodo](https://doi.org/10.5281/zenodo.1924563) 35 | 36 | ## Under development 37 | 38 | Please note that pyspextools is still under development. Feel free to submit 39 | bug reports or feature requests by raising an issue on our 40 | [Github page](https://github.com/spex-xray/pyspextools/issues). 41 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = pyspextools 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /doc/source/_static/logo-wide.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spex-xray/pyspextools/f52e417288f6903798f7c2a664db481f6441eb34/doc/source/_static/logo-wide.png -------------------------------------------------------------------------------- /doc/source/_static/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/spex-xray/pyspextools/f52e417288f6903798f7c2a664db481f6441eb34/doc/source/_static/logo.png -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # pyspextools documentation build configuration file, created by 5 | # sphinx-quickstart on Fri Nov 3 10:18:23 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | # import os 21 | # import sys 22 | # sys.path.insert(0, os.path.abspath('.')) 23 | import pyspextools 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # 29 | # needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = ['sphinx.ext.autodoc', 35 | 'sphinx.ext.doctest', 36 | 'sphinx.ext.coverage', 37 | 'sphinx.ext.imgmath', 38 | 'sphinx.ext.viewcode', 39 | 'sphinx.ext.githubpages', 40 | 'sphinxarg.ext'] 41 | 42 | # Add any paths that contain templates here, relative to this directory. 43 | templates_path = ['_templates'] 44 | 45 | # The suffix(es) of source filenames. 46 | # You can specify multiple suffix as a list of string: 47 | # 48 | # source_suffix = ['.rst', '.md'] 49 | source_suffix = '.rst' 50 | 51 | # The master toctree document. 52 | master_doc = 'index' 53 | 54 | # General information about the project. 55 | project = 'pyspextools' 56 | copyright = '2018-2024, SRON Netherlands Institute for Space Research' 57 | author = 'Jelle de Plaa' 58 | 59 | # The version info for the project you're documenting, acts as replacement for 60 | # |version| and |release|, also used in various other places throughout the 61 | # built documents. 62 | # 63 | # The short X.Y version. 64 | version = pyspextools.__version__ 65 | # The full version, including alpha/beta/rc tags. 66 | release = pyspextools.__version__ 67 | 68 | # The language for content autogenerated by Sphinx. Refer to documentation 69 | # for a list of supported languages. 70 | # 71 | # This is also used if you do content translation via gettext catalogs. 72 | # Usually you set "language" from the command line for these cases. 73 | language = 'en' 74 | 75 | # List of patterns, relative to source directory, that match files and 76 | # directories to ignore when looking for source files. 77 | # This patterns also effect to html_static_path and html_extra_path 78 | exclude_patterns = [] 79 | 80 | # The name of the Pygments (syntax highlighting) style to use. 81 | pygments_style = 'sphinx' 82 | 83 | # If true, `todo` and `todoList` produce output, else they produce nothing. 84 | todo_include_todos = False 85 | 86 | 87 | # -- Options for HTML output ---------------------------------------------- 88 | 89 | # The theme to use for HTML and HTML Help pages. See the documentation for 90 | # a list of builtin themes. 91 | # 92 | html_theme = 'alabaster' 93 | 94 | # Theme options are theme-specific and customize the look and feel of a theme 95 | # further. For a list of options available for each theme, see the 96 | # documentation. 97 | # 98 | # html_theme_options = {} 99 | html_theme_options = { 100 | 'logo': 'logo-wide.png', 101 | 'logo_name': 'SPEX', 102 | 'font_family': 'Times, serif', 103 | } 104 | # Add any paths that contain custom static files (such as style sheets) here, 105 | # relative to this directory. They are copied after the builtin static files, 106 | # so a file named "default.css" will overwrite the builtin "default.css". 107 | html_static_path = ['_static'] 108 | 109 | 110 | # -- Options for HTMLHelp output ------------------------------------------ 111 | 112 | # Output file base name for HTML help builder. 113 | htmlhelp_basename = 'pyspextoolsdoc' 114 | 115 | 116 | # -- Options for LaTeX output --------------------------------------------- 117 | 118 | latex_elements = { 119 | # The paper size ('letterpaper' or 'a4paper'). 120 | # 121 | # 'papersize': 'letterpaper', 122 | 123 | # The font size ('10pt', '11pt' or '12pt'). 124 | # 125 | # 'pointsize': '10pt', 126 | 127 | # Additional stuff for the LaTeX preamble. 128 | # 129 | # 'preamble': '', 130 | 131 | # Latex figure (float) alignment 132 | # 133 | # 'figure_align': 'htbp', 134 | } 135 | 136 | # Grouping the document tree into LaTeX files. List of tuples 137 | # (source start file, target name, title, 138 | # author, documentclass [howto, manual, or own class]). 139 | latex_documents = [ 140 | (master_doc, 'pyspextools.tex', 'pyspextools Documentation', 141 | 'Jelle de Plaa', 'manual'), 142 | ] 143 | 144 | 145 | # -- Options for manual page output --------------------------------------- 146 | 147 | # One entry per manual page. List of tuples 148 | # (source start file, name, description, authors, manual section). 149 | man_pages = [ 150 | (master_doc, 'pyspextools', 'pyspextools Documentation', 151 | [author], 1) 152 | ] 153 | 154 | 155 | # -- Options for Texinfo output ------------------------------------------- 156 | 157 | # Grouping the document tree into Texinfo files. List of tuples 158 | # (source start file, target name, title, author, 159 | # dir menu entry, description, category) 160 | texinfo_documents = [ 161 | (master_doc, 'pyspextools', 'pyspextools Documentation', 162 | author, 'pyspextools', 'One line description of project.', 163 | 'Miscellaneous'), 164 | ] 165 | 166 | autodoc_member_order = 'bysource' 167 | 168 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. pyspextools documentation master file, created by 2 | sphinx-quickstart on Fri Nov 3 10:18:23 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to pyspextools documentation! 7 | ===================================== 8 | 9 | Welcome to pyspextools, the python helper module for the `SPEX spectral fitting package `_. 10 | This module offers a growing number of utilities and scripts that can help with analysing X-ray spectra using 11 | SPEX. 12 | 13 | This page offers the following help and documentation: 14 | 15 | .. toctree:: 16 | :maxdepth: 2 17 | 18 | install 19 | tutorials 20 | reference 21 | 22 | 23 | 24 | Indices and tables 25 | ================== 26 | 27 | * :ref:`genindex` 28 | * :ref:`modindex` 29 | * :ref:`search` 30 | -------------------------------------------------------------------------------- /doc/source/install.rst: -------------------------------------------------------------------------------- 1 | Install pyspextools 2 | =================== 3 | 4 | Before you install 5 | ------------------ 6 | 7 | From pyspextools version 0.5.0, you need a Python 3 environment to install pyspextools. 8 | We recommend to install the pyspextools module in a virtual environment like conda to manage the dependencies. 9 | To do so, create your own conda environment first (See this 10 | `tutorial `_). 11 | In the example below, we create the conda environment `spex` assuming you already have installed conda successfully. 12 | The commands below create a new conda environment and activate it in your shell:: 13 | 14 | linux:~> conda create -n spex python=3.9 numpy astropy sphinx sphinx-argparse 15 | linux:~> conda activate spex 16 | (spex) linux:~> 17 | 18 | To use pyspextools later, each time you open a terminal, you need to activate the conda spex environment 19 | using the `conda activate spex` command. 20 | 21 | Install pyspextools through Anaconda 22 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 23 | 24 | Pyspextools can be installed directly in conda for python 3.8, 3.9 and 3.10. Of course, you can use the conda 25 | environment created in the previous step, but it should also work in other conda environments. 26 | 27 | The command below installs pyspextools in your conda environment:: 28 | 29 | (spex) user@linux:~> conda install -c spexxray pyspextools 30 | 31 | It downloads pyspextools from the `spexxray channel at Anaconda `_. 32 | 33 | Non-conda environments 34 | ~~~~~~~~~~~~~~~~~~~~~~ 35 | 36 | It is also possible to install pyspextools natively on your operating system (Linux and Mac OS). To avoid dependency 37 | issues, make sure you have installed the python modules that pyspextools depends on. On Linux, that can be done using 38 | `apt-get install` commands for Debian-like systems and `yum install` for RedHat-like systems. For Debian-like systems, 39 | the following command should install all dependencies for python 3:: 40 | 41 | linux:~> sudo apt-get install python3-future python3-numpy python3-astropy python3-sphinx-argparse 42 | 43 | If you have installed `pip`, then you can try to just run the `pip install` commands below and hope there are no 44 | dependency conflicts. 45 | 46 | Install using pip 47 | ----------------- 48 | 49 | Pyspextools can be easily installed with pip using the following command (with or without conda environment):: 50 | 51 | (spex) linux:~> pip install pyspextools 52 | 53 | 54 | Install from GitHub 55 | ~~~~~~~~~~~~~~~~~~~ 56 | 57 | Pyspextools can also be installed with pip using the Github git link to get the latest (bleeding edge) version:: 58 | 59 | (spex) linux:~> pip install git+https://github.com/spex-xray/pyspextools.git 60 | 61 | 62 | Install using python build 63 | ~~~~~~~~~~~~~~~~~~~~~~~~~~ 64 | 65 | Download the pyspextools source code from Github and, if necessary, extract it in a convenient directory. 66 | 67 | Before you continue, please think about where you want to install pyspextools. If you have a 68 | dedicated conda environment, conda will take care of this. Otherwise, you need to specify a 69 | suitable environment to install pyspextools in. 70 | 71 | Within an environment, install pyspextools as follows:: 72 | 73 | (spex) linux:~/pyspextools> python -m build && pip install dist/pyspextools-0.7.0.tar.gz 74 | 75 | 76 | Dependency issues 77 | ----------------- 78 | 79 | The install instructions above should take care of all the mandatory dependencies of the module. If not, 80 | then please consider to create a fresh conda environment to install pyspextools in. If that does not help, create 81 | an issue report on our `Github issues page `_ and describe the problem. 82 | 83 | Notes on the pyspextools dependencies 84 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 85 | 86 | Pyspextools works Python 3. We try to keep the dependencies limited to ensure the stability of the module. 87 | Currently, the dependencies are: 88 | 89 | - numpy 90 | - astropy 91 | 92 | For the generation of documentation, the following packages are needed: 93 | 94 | - sphinx (version >= 1.4) 95 | - sphinx-argparse 96 | 97 | For some of the examples, other packages are used, like pyatomdb, but they are not required for the pyspextools 98 | module to function. 99 | -------------------------------------------------------------------------------- /doc/source/reference.rst: -------------------------------------------------------------------------------- 1 | Module reference 2 | ================ 3 | 4 | The pyspextools module contains classes and methods to read and write SPEX and OGIP type files. Also, 5 | a helper class for the SPEX user model is included. 6 | 7 | .. toctree:: 8 | :maxdepth: 2 9 | 10 | reference/io 11 | reference/ogip 12 | reference/data 13 | reference/user 14 | -------------------------------------------------------------------------------- /doc/source/reference/data.rst: -------------------------------------------------------------------------------- 1 | Data filtering and optimization 2 | =============================== 3 | 4 | Spectra and response matrices are not always delivered in their most optimal form. They, for example, contain 5 | bad channels or channels with zero response. In addition, spectra and responses can be optimized for storage, 6 | calculation speed and statistical accuracy. The pyspextools.data module contains functions to filter or optimize 7 | spectra and response matrices with respect to their originals. 8 | 9 | .. currentmodule:: pyspextools.data 10 | 11 | Filtering for bad channels 12 | -------------------------- 13 | 14 | The clean_region function returns a region without bad channels and zero response bins. The function reads the 15 | original region object from the function input and returns a cleaned region. The function call is quite simple: 16 | 17 | .. autofunction:: clean_region 18 | 19 | The function first checks whether the input object is indeed an instance of the Region class and that the spectrum 20 | and response are loaded. Then masks are created for several arrays in the spectrum and response files: 21 | 22 | * Chanmask is a boolean array, based on the 'used' array from the spo file, which identifies which spectral channels 23 | can be used or not. This logical value can be based on the Quality flag in the original OGIP spectrum. The channel 24 | should be usable if the channel is good in both the source and background spectrum. 25 | 26 | * Respmask identifies zero response values in the response array. They are found by looping through the response groups 27 | and through the channels (from ic1 to ic2, with a total of nc channels per group). If a channel has a zero response 28 | every time, it is also marked bad in the chanmask array (previous bullet). Then the zero response element is masked 29 | and the array indices (ic1, ic2 and nc) are modified to point to the correct response elements. 30 | 31 | * Groupmask masks response groups that have a zero response in total, so with no useful channels within the group. 32 | 33 | When the masks are finished, they are applied to the input region, both to the spo and res component. The new 34 | filtered region object is returned to the user. Such a call could look like this:: 35 | 36 | filtered_region = clean_region(region) 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /doc/source/reference/io.rst: -------------------------------------------------------------------------------- 1 | .. _iomodule: 2 | 3 | SPEX data format support 4 | ======================== 5 | 6 | ``pyspextools.io`` is a Python submodule to read, write and manipulate 7 | SPEX spectrum and response file (.spo and .res). The 8 | format of these FITS files is described in Chapter 5 9 | of the SPEX manual: http://var.sron.nl/SPEX-doc/ 10 | 11 | .. automodule:: pyspextools.io 12 | 13 | 14 | .. _dataset_class: 15 | 16 | Dataset class description 17 | ------------------------- 18 | 19 | SPEX res and spo files can contain a set of spectra and responses from, for example, 20 | different instruments used in the same observation or spectra from different spatial 21 | regions. Each combination of a spectrum and response is called a region in SPEX 22 | (see the region class below). The dataset class is basically a list of regions and 23 | allows the user to add and remove regions from a dataset. 24 | 25 | .. autoclass:: pyspextools.io.Dataset 26 | :members: 27 | 28 | The region class 29 | ---------------- 30 | 31 | A combination of a spectrum and its response matrix is called a region in SPEX. This 32 | name originates from the idea that a dataset (see above) can contain spectra extracted 33 | from multiple regions in a spatial image. The region class combines a spo and res 34 | object into one region and provides tests to see if the spo and res files actually 35 | belong to each other. 36 | 37 | .. autoclass:: pyspextools.io.Region 38 | :members: 39 | 40 | The spo class 41 | ------------- 42 | 43 | The SPEX .spo file contains the source and background spectra, including information 44 | about systematic uncertainties and grouping. This class manages the reading and writing 45 | of (regions) in spo files. 46 | 47 | .. autoclass:: pyspextools.io.Spo 48 | :members: 49 | 50 | The res class 51 | ------------- 52 | 53 | The SPEX .res file contains the response matrix and effective area information for a 54 | spectrum. This class manages the reading and writing of (regions) in res files. 55 | 56 | .. autoclass:: pyspextools.io.Res 57 | :members: 58 | -------------------------------------------------------------------------------- /doc/source/reference/ogip.rst: -------------------------------------------------------------------------------- 1 | .. _ogipmodule: 2 | 3 | OGIP data format support 4 | ======================== 5 | 6 | The pyspextools module offers to import OGIP spectral files and convert them to 7 | SPEX format. There are basicly two types of OGIP files: regular type I files, 8 | which are distributed with most X-ray missions, and type II files distributed 9 | with the Chandra transmission grating (TG) spectra. Pyspextools can handle both 10 | types. 11 | 12 | pyspextools.io.ogip: Importing OGIP files 13 | ----------------------------------------- 14 | 15 | The pyspextools OGIPRegion class contains methods to read a source spectrum, background 16 | spectrum, response file and effective area file, and save them as a SPEX region, which 17 | can be used by the other methods in the pyspextools module. Please note that the 18 | module does a direct translation of the OGIP files. No filtering of bad channels or 19 | other optimizations are performed. Bad channel filtering and optimizations can be 20 | found in pyspextools.data. 21 | 22 | The main method of the OGIPRegion class is read_region. We recommend to use this 23 | method to read a combination of a spectrum and response matrix. This method will 24 | do the necessary checks to make sure that the final region object is consistent. 25 | The OGIP class can be imported separately if needed:: 26 | 27 | import pyspextools.io.ogip as ogip 28 | oregion = ogip.OGIPRegion() 29 | 30 | The oregion instance above will be an extended version of the parent region class of 31 | pyspextools and will have the same functionality. 32 | 33 | The 'read_region' method can be called to read the OGIP spectra and responses and return 34 | a SPEX region object, which can be written to spo and res files. 35 | 36 | For some missions, the provided PHA file contains counts, but also a ``STAT_ERR`` column with non-Poisson errors. 37 | It is usually a good idea to use Poisson statistics in these cases. The ``read_region`` routine 38 | and other methods have an optional ``force_poisson`` flag to make sure that the error on the counts 39 | is the square root of the number of observed counts. 40 | 41 | A convenient script called :ref:`ogip2spex` is available to convert OGIP spectra 42 | and responses to SPEX format. 43 | 44 | pyspextools.io.tg: Importing Chandra grating files 45 | -------------------------------------------------- 46 | 47 | Spectra from the Chandra gratings are stored in a special format, called PHA2 format, 48 | and contain the spectra of the specific orders of the spectrum. The TGRegion class allows 49 | a user to import a Chandra grating spectrum into a SPEX region. 50 | 51 | Similar to the OGIP files, there is also a region class for grating spectra called TGRegion:: 52 | 53 | import pyspextools.io.tg as tg 54 | tgregion = tg.TGRegion() 55 | 56 | The 'read_region' method can be called to read the grating spectra and responses and return 57 | a SPEX region object, which can be written to spo and res files. 58 | 59 | A convenient script called :ref:`tg2spex` is available to convert one Chandra grating 60 | observation to SPEX format. 61 | 62 | Notes about converting PHA to SPEX format 63 | ----------------------------------------- 64 | 65 | The methods in this class perform an almost direct copy of the PHA spectrum files into 66 | SPEX format. The conversion of source and background spectra into (background subtracted) 67 | source and background rates with errors is relatively straightforward. The areascal and 68 | backscal values from the PHA file are used to scale the spectral rates and errors as they 69 | are stored in SPEX format. 70 | 71 | .. NOTE:: 72 | The method of subtracting the background from a spectrum provides wrong 73 | results in case of Poisson distributed data and C-statistics. It would be better to 74 | use a model for the background instead. Such alternatives will be implemented from 75 | SPEX version >=3.05.00. 76 | 77 | This method does not yet ignore bad channels. In pyspextools this is regarded as additional 78 | filtering of the data and is implemented in a different method (ref). Therefore, this 79 | method alone could give different results from the SPEX trafo program. 80 | 81 | 82 | Notes about converting RMF to SPEX format 83 | ----------------------------------------- 84 | 85 | While the conversion of PHA format spectra to SPEX format is relatively trivial, the 86 | conversion of response matrices is more interesting. The RMF format chooses to store 87 | response groups, which describe a particular redistribution function, per model energy 88 | bin. Since there can be multiple physical effects causing their own characteristic 89 | response function, there can be multiple groups associated with a particular model energy, 90 | each describing the redistribution function for a particular physical effect. 91 | 92 | In SPEX, the groups are not organised per model energy bin, but per physical effect, which 93 | is called a response component. This allows more freedom in the binning that is needed for 94 | the group and thus the storage space required. The optimal range and binning needed for a sharp 95 | Gaussian response feature is obviously different from a broad-band power-law redistribution 96 | function. The latter can be binned in a much coarser way than the Gaussian case. 97 | 98 | However, the RMF files do not contain information about the physical response components. 99 | There are methods to detect similar groups in an RMF file and organise them in a component, 100 | but we did not add this method to this class. This class does a direct copy of all the groups 101 | of the RMF to one SPEX response component. If a more optimal re-arrangement of groups in 102 | components is desired, that should be done with other methods in the pyspextools module (ref). 103 | 104 | In some rare cases (e.g. ROSAT responses), the energy grid in the response matrix contains 105 | bins with a zero width. The Rmf class has a method called ``fix_energy_grid``, which just moves 106 | the bin boundary by a tiny amount. If this case is recognized by the ``check`` method, it will 107 | return ``2`` with a warning. The user can decide what to do with the warning. In ``ogip2spex``, 108 | the bins are fixed automatically while issueing a warning at the same time. 109 | 110 | **Update (version 0.4.0)**: There is a new type of RMF file that contains two MATRIX extensions 111 | in the fits file that both model different components of the instrument response. Pyspextools can 112 | convert these spectra now and in the resulting SPEX format the separate MATRIX extensions are 113 | translated into SPEX response components. 114 | 115 | .. _ogipregion_class: 116 | 117 | The OGIPRegion class description 118 | -------------------------------- 119 | 120 | .. autoclass:: pyspextools.io.ogip.OGIPRegion 121 | :members: 122 | 123 | The TGRegion class description 124 | ------------------------------ 125 | 126 | .. autoclass:: pyspextools.io.tg.TGRegion 127 | :members: 128 | 129 | The OGIP file classes 130 | --------------------- 131 | 132 | Pha spectra 133 | ''''''''''' 134 | 135 | .. autoclass:: pyspextools.io.pha.Pha 136 | :members: 137 | 138 | Pha2 grating spectra 139 | '''''''''''''''''''' 140 | 141 | .. autoclass:: pyspextools.io.pha2.Pha2 142 | :members: 143 | 144 | Rmf instrument response matrix 145 | '''''''''''''''''''''''''''''' 146 | 147 | .. autoclass:: pyspextools.io.rmf.Rmf 148 | :members: 149 | 150 | The Rmf consists of two parts. The first is an EBOUNDS extension, which is defined in the class below: 151 | 152 | .. autoclass:: pyspextools.io.rmf.RmfEbounds 153 | :members: 154 | 155 | And one or more response matrix extensions. The matrices are of type RmfMatrix and are kept in the list 156 | called ``matrix``. 157 | 158 | .. autoclass:: pyspextools.io.rmf.RmfMatrix 159 | :members: 160 | 161 | Arf mirror effective area 162 | ''''''''''''''''''''''''' 163 | 164 | .. autoclass:: pyspextools.io.arf.Arf 165 | :members: 166 | -------------------------------------------------------------------------------- /doc/source/reference/user.rst: -------------------------------------------------------------------------------- 1 | SPEX user model support 2 | ======================= 3 | 4 | .. automodule:: pyspextools.model 5 | 6 | The pyspextools module provides interface routines to create SPEX user models. There is 7 | a helper class for the SPEX 'user' and 'musr' models, which provides the opportunity for 8 | users to import their own models into SPEX. 9 | 10 | .. _usermodel: 11 | 12 | SPEX user model interface module 13 | -------------------------------- 14 | 15 | This is a Python helper class for the development of user defined models in SPEX. 16 | The 'user' and 'musr' components in SPEX can calculate an additive and multiplicative 17 | model, respectively, based on a user program. See the SPEX manual for details: 18 | http://var.sron.nl/SPEX-doc/ 19 | 20 | The 'user' component can run an executable provided by the user. This module provides 21 | the class that helps to exchange the relevant information between SPEX and the user 22 | provided executable. Documentation for an example program is here: :ref:`user-example`. 23 | 24 | **SPEX output and executable input:** 25 | 26 | The SPEX user component will provide an energy grid and a list of model parameters to 27 | the executable through a text file, for example input-01-01.prm. Using this information, 28 | the user provided program should calculate the spectrum on the provided grid using the 29 | input parameters. 30 | 31 | **Executable output and SPEX input:** 32 | 33 | The spectrum (and the weights) can be written to the provided sener (and wener) arrays 34 | in this module. The write_spc function will write the calculated spectrum to an output 35 | file named, for example, output-01-01.spc. This file will be read again by the SPEX 36 | user model and the resulting values will be used in the SPEX fit. 37 | 38 | .. autoclass:: pyspextools.model.User 39 | :members: 40 | -------------------------------------------------------------------------------- /doc/source/tutorials.rst: -------------------------------------------------------------------------------- 1 | Pyspextools tutorials 2 | ===================== 3 | 4 | Using the pyspextools scripts 5 | ----------------------------- 6 | 7 | The pyspextools package contains a number of ready-to-use scripts to perform commonly used actions: 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | tutorials/ogip2spex 13 | tutorials/tg2spex 14 | tutorials/ogipgenrsp 15 | tutorials/simres 16 | 17 | Using the pyspextools API 18 | ------------------------- 19 | 20 | The pyspextools classes can be used in python to do custom file format operations. Below we show examples 21 | of the use of the pyspextools API: 22 | 23 | .. toctree:: 24 | :maxdepth: 2 25 | 26 | tutorials/datasets 27 | tutorials/bkgfitting 28 | 29 | Using the SPEX user model 30 | ------------------------- 31 | 32 | The ``user`` and ``musr`` models in SPEX are designed to support custom models from users. Pyspextools contains 33 | a helper class to create these user models. Examples are shown below: 34 | 35 | .. toctree:: 36 | :maxdepth: 2 37 | 38 | tutorials/myusermodel 39 | tutorials/apec 40 | -------------------------------------------------------------------------------- /doc/source/tutorials/apec.rst: -------------------------------------------------------------------------------- 1 | .. _apec-example: 2 | 3 | .. highlight:: none 4 | 5 | APEC interface for SPEX 6 | ========================= 7 | 8 | This example program creates an interface between ATOMDB/APEC and 9 | the SPEX user model. Using this program, the APEC model can be used 10 | within SPEX. 11 | 12 | Dependencies 13 | ------------ 14 | 15 | Before you start, make sure the following things are installed or set: 16 | 17 | - Install numpy 18 | - Install pyspextools (pip install pyspextools) 19 | - Install pyatomdb (pip install pyatomdb) and import this into python once before use to install ATOMDB. 20 | - Set the ATOMDB environment variable to your local ATOMDB installation (see also the ATOMDB installation instructions):: 21 | 22 | linux:~> export ATOMDB=/path/to/my/atomdb (bash shell) 23 | linux:~> setenv ATOMDB /path/to/my/atomdb (c shell) 24 | 25 | For more information about installing ATOMDB and pyatomdb see `atomdb.org `_ 26 | 27 | .. note:: pyATOMDB only supports Python 3 from version 0.6.0 onwards. If you are using Python 2, 28 | you need to install an older pyATOMDB version. 29 | 30 | .. note:: This example apec.py script has been updated to work with pyATOMDB 0.8.0 and above. 31 | 32 | Usage 33 | ----- 34 | 35 | Use this program directly in the SPEX user model. Make the apec.py file executable before you start SPEX:: 36 | 37 | linux:~> chmod u+x apec.py 38 | 39 | If the apec.py is located in the working directory, it can be added to the user model easily:: 40 | 41 | SPEX> par 1 1 exec av ./apec.py 42 | 43 | If apec.py is located somewhere else, provide the full path:: 44 | 45 | SPEX> par 1 1 exec av /path/to/apec.py 46 | 47 | The ATOMDB environment variable needs to be set to the ATOMDB installation at all times. 48 | Please note that by default, ATOMDB uses solar abundances by Anders & Grevesse (1989). 49 | 50 | The SPEX user model also needs the number of parameters in the model. For APEC this needs to be set to 30:: 51 | 52 | SPEX> par 1 1 npar v 30 53 | 54 | The APEC model is now ready for use. See the table below for the parameter information. 55 | 56 | Parameters 57 | ---------- 58 | 59 | User model parameter translation table: 60 | 61 | +------+---------------+-----------------------------------------+ 62 | |Param |Variable |Corresponding value and unit | 63 | |in |in | | 64 | |SPEX |script | | 65 | +======+===============+=========================================+ 66 | |p01 | usr.par[0] |Normalisation (Photons m^-3 s^-1 keV^-1) | 67 | +------+---------------+-----------------------------------------+ 68 | |p02 | usr.par[1] |Temperature (keV) | 69 | +------+---------------+-----------------------------------------+ 70 | |p03 | usr.par[2] | | 71 | +------+---------------+-----------------------------------------+ 72 | |p04 | usr.par[3] | | 73 | +------+---------------+-----------------------------------------+ 74 | |p05 | usr.par[4] | | 75 | +------+---------------+-----------------------------------------+ 76 | |p06 | usr.par[5] |06 C | 77 | +------+---------------+-----------------------------------------+ 78 | |p07 | usr.par[6] |07 N | 79 | +------+---------------+-----------------------------------------+ 80 | |p08 | usr.par[7] |08 O | 81 | +------+---------------+-----------------------------------------+ 82 | |p09 | usr.par[8] |09 F | 83 | +------+---------------+-----------------------------------------+ 84 | |p10 | usr.par[9] |10 Ne | 85 | +------+---------------+-----------------------------------------+ 86 | |p11 | usr.par[10] |11 Na | 87 | +------+---------------+-----------------------------------------+ 88 | |p12 | usr.par[11] |12 Mg | 89 | +------+---------------+-----------------------------------------+ 90 | |p13 | usr.par[12] |13 Al | 91 | +------+---------------+-----------------------------------------+ 92 | |p14 | usr.par[13] |14 Si | 93 | +------+---------------+-----------------------------------------+ 94 | |p15 | usr.par[14] |15 P | 95 | +------+---------------+-----------------------------------------+ 96 | |p16 | usr.par[15] |16 S | 97 | +------+---------------+-----------------------------------------+ 98 | |p17 | usr.par[16] |17 Cl | 99 | +------+---------------+-----------------------------------------+ 100 | |p18 | usr.par[17] |18 Ar | 101 | +------+---------------+-----------------------------------------+ 102 | |p19 | usr.par[18] |19 K | 103 | +------+---------------+-----------------------------------------+ 104 | |p20 | usr.par[19] |20 Ca | 105 | +------+---------------+-----------------------------------------+ 106 | |p21 | usr.par[20] |21 Sc | 107 | +------+---------------+-----------------------------------------+ 108 | |p22 | usr.par[21] |22 Ti | 109 | +------+---------------+-----------------------------------------+ 110 | |p23 | usr.par[22] |23 V | 111 | +------+---------------+-----------------------------------------+ 112 | |p24 | usr.par[23] |24 Cr | 113 | +------+---------------+-----------------------------------------+ 114 | |p25 | usr.par[24] |25 Mn | 115 | +------+---------------+-----------------------------------------+ 116 | |p26 | usr.par[25] |26 Fe | 117 | +------+---------------+-----------------------------------------+ 118 | |p27 | usr.par[26] |27 Co | 119 | +------+---------------+-----------------------------------------+ 120 | |p28 | usr.par[27] |28 Ni | 121 | +------+---------------+-----------------------------------------+ 122 | |p29 | usr.par[28] |29 Cu | 123 | +------+---------------+-----------------------------------------+ 124 | |p30 | usr.par[29] |30 Zn | 125 | +------+---------------+-----------------------------------------+ 126 | -------------------------------------------------------------------------------- /doc/source/tutorials/bkgfitting.rst: -------------------------------------------------------------------------------- 1 | .. _bkgfitting: 2 | 3 | How to create SPEX files for background fitting 4 | =============================================== 5 | 6 | In this example, we are using pyspextools to build a .spo and .res file suitable for 7 | fitting instrumental and particle background simultaneously with fitting the source 8 | spectrum. We do this by reading in the source spectrum twice. During the first read, 9 | we read in both the RMF and ARF file. This is to model the photons from the sky. 10 | During the second read, we just read in the source spectrum and RMF. This way, 11 | the effective area of the mirror is not applied, so only the redistribution 12 | function is used. 13 | 14 | The files used in this example are: 15 | * M1_1.pi : The source spectrum from XMM-Newton MOS1. 16 | * M1_1.rmf : The response matrix from XMM-Newton MOS1. 17 | * M1_1.arf : The effective area from XMM-Newton MOS1. 18 | 19 | First import the pyspextools classes :ref:`ogipregion_class` and :ref:`dataset_class` 20 | needed for this example:: 21 | 22 | from pyspextools.io.ogip import OGIPRegion 23 | from pyspextools.io.dataset import Dataset 24 | 25 | Reading the spectrum the first time (for source modeling). The ``read_region`` 26 | function automatically converts the region to SPEX format in the background:: 27 | 28 | m1_source = OGIPRegion() 29 | m1_source.read_region("M1_1.pi", "M1_1.rmf", arffile="M1_1.arf") 30 | 31 | Reading the spectrum for the second time (for instrumental and particle background modeling):: 32 | 33 | m1_back = OGIPRegion() 34 | m1_back.read_region("M1_1.pi", "M1_1.rmf") 35 | 36 | Now we have two region objects. The first one for the source and the second one for the background. 37 | To model the background separately and add the background model to the source model when fitting, 38 | we need to set up two sectors (model sets). The first sector will contain the model components for the 39 | X-ray photons from the sky and the second sector the background models. The models themselves will 40 | be defined later in SPEX. But, we need to configure the response file such that the spectrum from 41 | sector 2 is folded with the RMF only and then applied to the spectrum in region 1. We can set this 42 | when combining our regions into a SPEX dataset:: 43 | 44 | spex_data = Dataset() 45 | spex_data.append_region(m1_source, 1, 1) 46 | spex_data.append_region(m1_back, 2, 1) 47 | 48 | The second and third arguments of ``append_region`` contain the sector number and region number respectively. 49 | Once we have appended the regions to the SPEX dataset, we can write it to file:: 50 | 51 | spex_data.write_all_regions("M1_example.spo", "M1_example.res", overwrite=True) 52 | 53 | You should now have a .spo and .res file that is ready for particle background fitting. 54 | -------------------------------------------------------------------------------- /doc/source/tutorials/datasets.rst: -------------------------------------------------------------------------------- 1 | Managing datasets in pyspextools 2 | ================================ 3 | 4 | The best starting point to manipulate SPEX files is to use the Dataset class in pyspextools. A dataset can be a 5 | collection of several spectrum-response combinations, which are called regions. This way, multiple related spectra can 6 | be stored in a spo and res file. 7 | 8 | The different spectra and responses inside a dataset can be related to each other in multiple ways. To distinguish 9 | between different cases, each spectrum-response combination can have a sector and region number. These numbers are 10 | defined as follows: 11 | 12 | **Sector** 13 | A sector number refers to the model that needs to be associated to the dataset. Suppose one has spectra from two 14 | areas of the sky that share a common absorption component, but have different emission components. One would like 15 | to fit these spectra simultaneously with coupled absorption components. The SPEX solution is to assign different 16 | sector numbers to the different spectra, such that SPEX applies the right models to the right dataset. 17 | 18 | **Region** 19 | A region number identifies the dataset that a model applies to. Usually the region number just iterates over the 20 | number of spectra, so four spectra are just counted as 1, 2, 3 and 4. 21 | 22 | However, in some cases it can be helpful to point different sectors to one region. For example, a particle background 23 | model is not supposed to be multiplied by the effective area, but just the response. Then this model needs to be added 24 | to the model spectrum of the previous region. In that case, two spectrum-response combinations are required (one with 25 | and one without ARF), where each combination obtains its own sector number (1,2), but only a single region number(1). 26 | See the `SPEX Cookbook `_ for more information. 27 | 28 | Example use of the dataset class 29 | -------------------------------- 30 | 31 | Let's perform a simple operation to read a few existing spo and res files and combine them into one dataset. 32 | First, start python and then import the pyspextools.io module:: 33 | 34 | >>> import pyspextools.io 35 | 36 | The pyspextools.io module contains a Dataset class that needs to be initiated first:: 37 | 38 | >>> data = pyspextools.io.Dataset() 39 | 40 | The Dataset class contains methods to read spo and res files into a region. Suppose we have two spectra from the 41 | RGS instrument aboard XMM-Newton. We have two sets of spectra and responses: RGS1.spo, RGS1.res, RGS2.spo and RGS2.res. 42 | We can read them into the dataset easily using the read_all_regions method:: 43 | 44 | >>> data.read_all_regions("RGS1.spo","RGS1.res") 45 | >>> data.read_all_regions("RGS2.spo","RGS2.res") 46 | 47 | This method reads all regions within a file and adds it to the dataset. In this case, the spectral files only contain 48 | one region, so by reading them in both, we have now two parts of the dataset:: 49 | 50 | >>> data.show() 51 | =========================================================== 52 | Part 1 53 | =========================================================== 54 | Sector: 1 => Region: 1 55 | Label: 56 | -------------------- Spectrum ------------------------- 57 | Original spo file : RGS1.spo 58 | Number of data channels : 2831 59 | Data energy range : 0.32 - 2.41 keV 60 | Exposure time mean : 104813.48 s 61 | -------------------- Response ------------------------- 62 | Original response file name : RGS1.res 63 | Number of data channels in response : 2831 64 | Number of response components : 8 65 | 66 | =========================================================== 67 | Part 2 68 | =========================================================== 69 | Sector: 1 => Region: 2 70 | Label: 71 | -------------------- Spectrum ------------------------- 72 | Original spo file : RGS2.spo 73 | Number of data channels : 2728 74 | Data energy range : 0.33 - 2.64 keV 75 | Exposure time mean : 104409.24 s 76 | -------------------- Response ------------------------- 77 | Original response file name : RGS2.res 78 | Number of data channels in response : 2728 79 | Number of response components : 8 80 | 81 | 82 | 83 | Suppose one wants to apply a different model for RGS2 than for RGS1 and put RGS2 in a different sector, then we can do 84 | that as follows:: 85 | 86 | >>> data.assign_sector(2,2) 87 | >>> data.show() 88 | =========================================================== 89 | Part 1 90 | =========================================================== 91 | Sector: 1 => Region: 1 92 | Label: 93 | -------------------- Spectrum ------------------------- 94 | Original spo file : RGS1.spo 95 | Number of data channels : 2831 96 | Data energy range : 0.32 - 2.41 keV 97 | Exposure time mean : 104813.48 s 98 | -------------------- Response ------------------------- 99 | Original response file name : RGS1.res 100 | Number of data channels in response : 2831 101 | Number of response components : 8 102 | 103 | =========================================================== 104 | Part 2 105 | =========================================================== 106 | Sector: 2 => Region: 2 107 | Label: 108 | -------------------- Spectrum ------------------------- 109 | Original spo file : RGS2.spo 110 | Number of data channels : 2728 111 | Data energy range : 0.33 - 2.64 keV 112 | Exposure time mean : 104409.24 s 113 | -------------------- Response ------------------------- 114 | Original response file name : RGS2.res 115 | Number of data channels in response : 2728 116 | Number of response components : 8 117 | 118 | In the data.show() command, the part with the RGS2 spectrum (Part 2) has now sector 2 assigned to it. We can now save 119 | the created structure to one spo and res file:: 120 | 121 | >>> data.write_all_regions("RGS.spo","RGS.res") 122 | 123 | The dataset has been successfully written to RGS.spo and RGS.res. 124 | -------------------------------------------------------------------------------- /doc/source/tutorials/myusermodel.rst: -------------------------------------------------------------------------------- 1 | .. _user-example: 2 | 3 | User model example 4 | ================== 5 | 6 | The myusermodel.py file in the examples directory contains a basic program 7 | that interacts with the user or musr model in SPEX. It reads the input energy 8 | grid and model parameters from SPEX, calculates a spectrum or transmission, and 9 | returns the calculated values by writing a text file. The user can adapt this 10 | example to his/her own needs. 11 | 12 | 13 | **Imports** 14 | 15 | The following imports are mandatory:: 16 | 17 | import math # Import math module 18 | import numpy # Import numpy module 19 | from pyspextools.model import User # Import the user module from pyspextools.model 20 | 21 | 22 | **Initialisation** 23 | 24 | The user module from pyspextools.model needs to be initialized first. This will read in 25 | the input file from SPEX automatically and initialize the arrays:: 26 | 27 | usr=User() 28 | 29 | 30 | **Calculation** 31 | 32 | This step needs the creativity of the user. The module reference page :ref:`usermodel` provides an overview of the 33 | parameters of the user module that can be used in the program. At least, the output spectrum or transmission 34 | needs to be calculated and written to the 'usr.sener' output array. This array will be passed back to SPEX after 35 | the calculation:: 36 | 37 | # For each energy bin in the provided energy grid 38 | # calculate the spectrum in photons/s/bin: 39 | for i in numpy.arange(usr.neg): 40 | usr.sener[i]=1.- usr.par[0]*math.exp(-usr.eg[i]) 41 | usr.wener[i]=0. 42 | 43 | In this case, a simple exponential function is returned as an example, but this 44 | function can be as simple or complicated as needed. The unit for the 'usr.sener' array is 45 | 'photons/s/bin' and this example function also uses the first model input parameter 'user.par[0]' 46 | from SPEX and the bin energy `usr.eg` (see :ref:`usermodel` for a full description). 47 | 48 | **NOTE:** There is a difference between the index of the array and the parameter number in SPEX. 49 | The parameter number in SPEX is equal to the array index + 1, since Python starts counting 50 | at 0 and SPEX at 1. 51 | 52 | The 'usr.wener' array can be used to optimize the calculations in SPEX. 53 | If Delta E = average photon energy within the bin (keV) minus the bin 54 | centroid then wener = sener * Delta E. 55 | 56 | 57 | **Write output** 58 | 59 | The result needs to be handed back to SPEX through a text file, which can be 60 | written by calling the 'usr.write_spc()' function:: 61 | 62 | # Write the calculated spectrum to the output file: 63 | usr.write_spc() 64 | 65 | Usage 66 | ----- 67 | 68 | When the program is ready, copy it to a convenient location and make it 69 | executable using the shell command:: 70 | 71 | linux:~> chmod u+x myusermodel.py 72 | 73 | In SPEX, this program can be loaded into the 'user' or 'musr' model using 74 | the following commands:: 75 | 76 | SPEX> com user 77 | SPEX> par 1 1 exec av ./myusermodel.py 78 | 79 | The './' is used if the program is in the current working directory. Otherwise, 80 | please use the full path to the program. 81 | 82 | The 'user' and 'musr' model also needs the number of parameters that the 83 | model needs. Set it using:: 84 | 85 | SPEX> par 1 1 npar v 1 86 | 87 | You can set your model parameters in a similar way. Have fun! 88 | 89 | 90 | 91 | 92 | 93 | -------------------------------------------------------------------------------- /doc/source/tutorials/ogip2spex.rst: -------------------------------------------------------------------------------- 1 | .. _ogip2spex: 2 | 3 | OGIP2spex - Convert OGIP files to SPEX format 4 | ============================================= 5 | 6 | OGIP2spex is a script that converts OGIP spectra and responses to SPEX format, similar to the trafo program in SPEX. 7 | 8 | The ogip2spex works with command-line arguments to gather all the input, contrary to trafo, which asks questions 9 | interactively. When pyspextools is installed, the ogip2spex program can show an overview of the available command-line 10 | arguments directly from the command line:: 11 | 12 | ogip2spex -h 13 | 14 | A full overview of the arguments is given below in the section `Command-line arguments `_. 15 | 16 | .. NOTE:: 17 | By default ogip2spex creates spo files with the 'Exp_Rate' column. This column was added in SPEX version 3.05.00. 18 | The Exp_Rate column contains the ratio between the backscale values of the source and background spectrum and is 19 | used to properly simulate spectra. If you need to simulate spectra including background, then make sure the Exp_Rate 20 | column is created. 21 | 22 | For some missions, the provided PHA file contains counts, but also a ``STAT_ERR`` column with non-Poisson errors. 23 | It is usually a good idea to use Poisson statistics in these cases. Ogip2spex has the ``--force-poisson`` 24 | flag to make sure that the error on the counts is the square root of the number of observed counts. 25 | 26 | Example 27 | ------- 28 | 29 | .. highlight:: none 30 | 31 | Suppose one would like to convert a set of OGIP spectra from the MOS instrument aboard XMM-Newton. The XMM-Newton SAS 32 | software has provided a *M1.pi* file with the source spectrum, *M1_bkg.pi* with the source background, *M1.rmf* 33 | containing the response matrix and *M1.arf* with the effective area. These files can be converted into SPEX format 34 | within one go:: 35 | 36 | linux:~> ogip2spex --phafile M1.pi --bkgfile M1_bkg.pi --rmffile M1.rmf --arffile M1.arf --spofile M1.spo --resfile M1.res 37 | 38 | The output starts by listing and checking the input OGIP files:: 39 | 40 | ================================== 41 | This is ogip2spex version 0.2 42 | ================================== 43 | 44 | Input PHA file: M1.pi 45 | Input Background file: M1_bkg.pi 46 | Input Response file: M1.rmf 47 | Input Effective area file: M1.arf 48 | Check OGIP source spectrum... OK 49 | Check OGIP background spectrum... OK 50 | Check OGIP response matrix... OK 51 | Check OGIP effective area file... OK 52 | 53 | Then the program converts the OGIP files to SPEX format:: 54 | 55 | Convert OGIP spectra to spo format... WARNING Lowest channel boundary energy is 0. Set to 1E-5 to avoid problems. 56 | OK 57 | Convert OGIP response to res format... OK 58 | 59 | In this case, the lowest energy boundary of the first channel of the MOS spectrum is 0, which SPEX does not like. Since 60 | this bin is not used, it does not hurt to change this zero into a small number (smaller than the upper boundary), in 61 | this case 1E-5. 62 | 63 | The program continues with detecting bad channels in the spectrum and channels with zero response in the response 64 | matrix:: 65 | 66 | Identify bad channels in spectrum and response matrix and re-index matrix... OK 67 | Number of good channels: 795 68 | Number of bad channels: 5 69 | Removing bad channels from spectral region... OK 70 | Number of original groups: 2394 71 | Number of zero-response groups: 0 72 | Number of original response elements: 1000056 73 | Number of bad response elements: 0 74 | Removing bad channels from response matrix... OK 75 | 76 | The removal of bad channels is default behavior. In cases where the bad channels should be kept, one can add the 77 | '--keep-badchannels' argument to the call to ogip2spex. 78 | 79 | At the end of the program, the spo and res files are saved:: 80 | 81 | Writing SPO to file: M1.spo 82 | Writing RES to file: M1.res 83 | 84 | One can overwrite existing files by adding the '--overwrite' option to the ogip2spex call. 85 | 86 | This spo file has an 'Exp_Rate' column. To generate a spo file without such a column, add '--no-exprate' to the 87 | ogip2spex call. 88 | 89 | By default, ogip2spex shows colored output for warnings, errors and OKs. If it is hard for you to see, use the 90 | '--no-color' argument to show the output without colors. 91 | 92 | .. highlight:: python 93 | 94 | .. _ogip2spex_commandline: 95 | 96 | Command-line arguments 97 | ---------------------- 98 | 99 | .. argparse:: 100 | :filename: scripts/ogip2spex 101 | :func: ogip2spex_arguments 102 | :prog: ogip2spex 103 | -------------------------------------------------------------------------------- /doc/source/tutorials/ogipgenrsp.rst: -------------------------------------------------------------------------------- 1 | .. _ogipgenrsp: 2 | 3 | OGIPGENRSP - Create dummy response files 4 | ======================================== 5 | 6 | Ogipgenrsp is a script to generate a dummy (Gaussian) response file with a user provided effective area, energy scale 7 | and energy resolution. This is particularly helpful when creating responses for possible future missions. The resulting 8 | response file is in OGIP RSP format. If you need a spectrum and response in SPEX format based on this 9 | new matrix, you can run the :ref:`simres` script in this package with the RSP file from this script as input. 10 | 11 | Since the bin size of the response depends on the input resolution, the original binning of the ARF file may not be 12 | appropriate. Therefore, this script linearly interpolates the energies from the input ARF array to the new output grid. 13 | 14 | - Please note that this script creates a simple diagonal matrix with a Gaussian distribution function. Although this 15 | is usually a good approximation, X-ray detectors will have a much more complicated response in reality. 16 | 17 | - Please also note that this script creates a square matrix, which is usually not optimal. Optimal binning, as 18 | explained in `Kaastra & Bleeker 2016 `_, is still to 19 | be implemented. 20 | 21 | The parameters of simres can be shown on the command line by the '-h' flag:: 22 | 23 | ogipgenrsp -h 24 | 25 | Energy dependent resolution 26 | --------------------------- 27 | 28 | Instead of a constant spectral resolution (FWHM) for the entire band, it is also possible to make the resolution 29 | linearly depend on the energy. The linear dependence is implemented in data/response.py and integrated in the 30 | gaussrsp function. The resulting FWHM(E) for energy E in keV is calculated as follows:: 31 | 32 | FWHM(E) = FWHM(@1 keV) + DELTA_FWHM * (E - 1.0) 33 | 34 | The FWHM(@ 1keV) is set by the --resolution flag (in eV) and DELTA_FWHM is set by the --resgradient flag (in eV per keV). 35 | 36 | 37 | .. highlight:: none 38 | 39 | Example 40 | ------- 41 | 42 | In the example below, we create a new matrix for new mission with a spectral resolution of 1 eV and an energy range 43 | of 0.1 to 10 keV:: 44 | 45 | linux:~> ogipgenrsp --arffile athena.arf --resolution 1.0 --range 0.1:10 --rspfile newmission.rsp --overwrite 46 | ================================== 47 | This is ogipgenrsp version 0.2.8 48 | ================================== 49 | 50 | Reading ARF file... OK 51 | Reading ARF file... OK 52 | Number of energy bins: 49500 53 | Number of channels per group: 50 54 | Calculate response matrix... OK 55 | Check the created RSP matrix... OK 56 | Write RSP to file... OK 57 | 58 | The new matrix will contain 49500 bins and each response group will have 50 response elements for which the Gaussian 59 | response is calculated. 60 | 61 | The resulting response file is in OGIP RSP format. If you need a spectrum and response in SPEX format based on this 62 | new matrix, you can run the :ref:`simres` script in this package with the RSP file from this script as input. 63 | 64 | Command-line arguments 65 | ---------------------- 66 | 67 | .. argparse:: 68 | :filename: scripts/ogipgenrsp 69 | :func: genrsp_arguments 70 | :prog: ogipgenrsp 71 | -------------------------------------------------------------------------------- /doc/source/tutorials/simres.rst: -------------------------------------------------------------------------------- 1 | .. _simres: 2 | 3 | SIMRES - Create simulation files based on existing response 4 | =========================================================== 5 | 6 | Simres is a script to create a SPEX format spo and res file from just an input response file, optionally with an 7 | effective area file (ARF) and background spectrum. This is very useful to create spo and res files 8 | for future missions, where only responses and effective area files are provided. The spo and res files can be used 9 | to simulate spectra in SPEX. 10 | 11 | The parameters of simres can be shown on the command line by the '-h' flag:: 12 | 13 | simres -h 14 | 15 | Example 16 | ------- 17 | 18 | .. highlight:: none 19 | 20 | Suppose we have a response matrix from a future instrument like X-IFU aboard Athena called athena_xifu_A.rsp, then 21 | we can create a spo and res file with the simres command, and call them xifu.spo and xifu.res:: 22 | 23 | linux:~> simres --rmffile athena_xifu_A.rsp --spofile xifu.spo --resfile xifu.res 24 | ================================== 25 | This is simres version 0.2.8 26 | ================================== 27 | 28 | Read RMF response matrix... WARNING This is an RSP file with the effective area included. 29 | Do not read an ARF file, unless you know what you are doing. 30 | OK 31 | Check OGIP source spectrum... OK 32 | Check OGIP response matrix... OK 33 | Convert OGIP spectra to spo format... OK 34 | Convert OGIP response to res format... OK 35 | Identify bad channels in spectrum and response matrix and re-index matrix... OK 36 | Number of good channels: 29600 37 | Number of bad channels: 0 38 | Removing bad channels from spectral region... OK 39 | Number of original groups: 29600 40 | Number of zero-response groups: 0 41 | Number of original response elements: 8898275 42 | Number of bad response elements: 0 43 | Removing bad channels from response matrix... OK 44 | Writing SPO to file: xifu.spo 45 | Writing RES to file: xifu.res 46 | 47 | In this case, an RSP file is provided, which should contain the effective area already. The program issues a warning, 48 | because usually also separate arf files are provided. This warning cautions the user to not apply the effective area 49 | correction twice. 50 | 51 | The output spectrum is obviously wrong with a constant count rate across the entire band. The idea is to set up a new 52 | model in SPEX and simulate a new spectrum with this response matrix to replace the dummy spectrum with something more 53 | realistic. 54 | 55 | .. highlight:: python 56 | 57 | Command-line arguments 58 | ---------------------- 59 | 60 | .. argparse:: 61 | :filename: scripts/simres 62 | :func: simres_arguments 63 | :prog: simres 64 | -------------------------------------------------------------------------------- /doc/source/tutorials/tg2spex.rst: -------------------------------------------------------------------------------- 1 | .. _tg2spex: 2 | 3 | TG2Spex - Convert Chandra grating data 4 | ====================================== 5 | 6 | Tg2spex is a script to convert spectra from Chandra grating observations to SPEX format. Tg2spex works 7 | with input flags from the command line. The '-h' flag shows the help of the tg2spex command:: 8 | 9 | linux:~> tg2spex -h 10 | 11 | A full overview of the arguments is given below in the section `Command-line arguments `_. 12 | 13 | Example 14 | ------- 15 | 16 | .. highlight:: none 17 | 18 | Tg2spex detects the standard file names in a directory, so providing the path to the directory 19 | should be enough. In addition, flags can be provided to, for example, overwrite existing spo and res 20 | files:: 21 | 22 | linux:~> tg2spex --overwrite /data/user/tgcat/obs_11387_tgid_3191 23 | ================================== 24 | This is tg2spex version 0.2.1 25 | ================================== 26 | 27 | Read source spectrum... OK 28 | Detected grating: Chandra LEG. 29 | Combining orders of the spectra... OK 30 | Convert to spo and write spo file... OK 31 | Reading response for order... -1 OK 32 | Reading response for order... -2 OK 33 | Reading response for order... -3 OK 34 | Reading response for order... -4 OK 35 | Reading response for order... -5 OK 36 | Reading response for order... -6 OK 37 | Reading response for order... -7 OK 38 | Reading response for order... -8 OK 39 | Reading response for order... 1 OK 40 | Reading response for order... 2 OK 41 | Reading response for order... 3 OK 42 | Reading response for order... 4 OK 43 | Reading response for order... 5 OK 44 | Reading response for order... 6 OK 45 | Reading response for order... 7 OK 46 | Reading response for order... 8 OK 47 | Reading effective area for order... -1 OK 48 | Reading effective area for order... -2 OK 49 | Reading effective area for order... -3 OK 50 | Reading effective area for order... -4 OK 51 | Reading effective area for order... -5 OK 52 | Reading effective area for order... -6 OK 53 | Reading effective area for order... -7 OK 54 | Reading effective area for order... -8 OK 55 | Reading effective area for order... 1 OK 56 | Reading effective area for order... 2 OK 57 | Reading effective area for order... 3 OK 58 | Reading effective area for order... 4 OK 59 | Reading effective area for order... 5 OK 60 | Reading effective area for order... 6 OK 61 | Reading effective area for order... 7 OK 62 | Reading effective area for order... 8 OK 63 | Write combined res file... OK 64 | 65 | In this example, the directory contains a Chandra LETG spectrum with 16 orders. In the first step 66 | the spectra from the pha2 file are combined and saved. After that, the responses and effective areas 67 | are read in and combined into a single leg.spo and leg.res. 68 | 69 | .. highlight:: python 70 | 71 | 72 | .. _tg2spex_commandline: 73 | 74 | Command-line arguments 75 | ---------------------- 76 | 77 | .. argparse:: 78 | :filename: scripts/tg2spex 79 | :func: tg2spex_arguments 80 | :prog: tg2spex -------------------------------------------------------------------------------- /examples/apec.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """ 4 | This example program creates an interface between ATOMDB and 5 | the SPEX user model. Using this program, the APEC model can be used 6 | within SPEX. 7 | 8 | This model needs the pyatomdb module from atomdb.org and numpy. 9 | Also set the ATOMDB environment variable to a local ATOMDB installation. 10 | This file is designed to work with Pyatomdb 0.8.0 and above. 11 | """ 12 | 13 | # Stuff to import for compatibility between python 2 and 3 14 | from __future__ import print_function 15 | from __future__ import unicode_literals 16 | from __future__ import division 17 | from __future__ import absolute_import 18 | from future import standard_library 19 | 20 | import sys 21 | import numpy 22 | from pyspextools.model import User 23 | 24 | standard_library.install_aliases() 25 | 26 | try: 27 | import pyatomdb 28 | except ImportError: 29 | print("This SPEX user model depends on the pyATOMDB module. Please install" 30 | "pyATOMDB through the command 'pip install pyatomdb'. ") 31 | print("Note that from version 0.6.0 onwards pyATOMDB only supports Python 3.") 32 | 33 | """ 34 | User model parameter translation table: 35 | p01 usr.par[0] Normalisation Photons m^-3 s^-1 keV^-1 36 | p02 usr.par[1] Temperature in keV 37 | p03 usr.par[2] - 38 | p04 usr.par[3] - 39 | p05 usr.par[4] - 40 | p06 usr.par[5] 06 C 41 | p07 usr.par[6] 07 N 42 | p08 usr.par[7] 08 O 43 | p09 usr.par[8] 09 F 44 | p10 usr.par[9] 10 Ne 45 | p11 usr.par[10] 11 Na 46 | p12 usr.par[11] 12 Mg 47 | p13 usr.par[12] 13 Al 48 | p14 usr.par[13] 14 Si 49 | p15 usr.par[14] 15 P 50 | p16 usr.par[15] 16 S 51 | p17 usr.par[16] 17 Cl 52 | p18 usr.par[17] 18 Ar 53 | p19 usr.par[18] 19 K 54 | p20 usr.par[19] 20 Ca 55 | p21 usr.par[20] 21 Sc 56 | p22 usr.par[21] 22 Ti 57 | p23 usr.par[22] 23 V 58 | p24 usr.par[23] 24 Cr 59 | p25 usr.par[24] 25 Mn 60 | p26 usr.par[25] 26 Fe 61 | p27 usr.par[26] 27 Co 62 | p28 usr.par[27] 28 Ni 63 | p29 usr.par[28] 29 Cu 64 | p30 usr.par[29] 30 Zn 65 | """ 66 | 67 | 68 | def main(): 69 | # Initialize the IO class. The input file from SPEX will be read automatically. 70 | usr = User() 71 | 72 | if usr.npar != 30: 73 | print("Please set 'npar' parameter to 30 for this model") 74 | sys.exit() 75 | 76 | # Create a pyatomdb spectrum session 77 | data = pyatomdb.spectrum.CIESession() 78 | 79 | # Set the energy grid for the calculation 80 | # Pyatomdb expects bin boundaries (n+1) 81 | # SPEX has an array of upper boundary, 82 | # so we need to calculate the lower boundary 83 | # for the first bin: 84 | ebin = numpy.array([usr.egb[0] - usr.deg[0]]) 85 | ebin = numpy.append(ebin, usr.egb) 86 | 87 | # And set the energy bins 88 | data.set_response(ebin, raw=True) 89 | 90 | # Set abundance table 91 | data.set_abundset("AG89") # Anders & Grevesse (1989) 92 | 93 | # Set atomic number array 94 | atom = numpy.arange(30) + 1 95 | 96 | # Set model parameters 97 | norm = 1E+14 * usr.par[0] # Photons cm^-3 s^-1 keV^-1 98 | # APEC norm is 1E-14 times emission measure 99 | temp = usr.par[1] # Temperature in keV 100 | 101 | # Parameters with index 2,3,4 are not used 102 | 103 | # Set abundances for C to Zn 104 | for a in numpy.arange(25) + 5: 105 | data.set_abund(atom[a], usr.par[a]) 106 | 107 | # Calculate the APEC spectrum 108 | aspec = data.return_spectrum(temp, teunit='keV') 109 | 110 | # Write the calculated spectrum to the sener array: 111 | for i in numpy.arange(usr.neg): 112 | usr.sener[i] = norm * aspec[i] 113 | 114 | # Write the calculated spectrum to the output file: 115 | usr.write_spc() 116 | 117 | 118 | main() 119 | -------------------------------------------------------------------------------- /examples/myusermodel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math # Import math module 4 | import numpy # Import numpy module 5 | from pyspextools.model import User # Import user from pyspextools.model 6 | 7 | 8 | def main(): 9 | # Initialize the user class. The input file from SPEX will be read automatically. 10 | usr = User() 11 | 12 | # For each energy bin in the provided energy grid 13 | # calculate the spectrum in photons/s/bin: 14 | for i in numpy.arange(usr.neg): 15 | usr.sener[i] = 1. - usr.par[0] * math.exp(-usr.eg[i]) 16 | usr.wener[i] = 0. 17 | 18 | # Write the calculated spectrum to the output file: 19 | usr.write_spc() 20 | 21 | 22 | main() 23 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "pyspextools" 7 | dependencies = [ 8 | "astropy", "numpy", "matplotlib", "sphinx", "setuptools>=40.8.0" 9 | ] 10 | authors = [ 11 | { name="Jelle de Plaa", email="j.de.plaa@sron.nl" }, 12 | ] 13 | description = "SPEX Python tools" 14 | readme = "README.md" 15 | requires-python = ">=3.8" 16 | classifiers = [ 17 | "Programming Language :: Python :: 3", 18 | "Operating System :: OS Independent", 19 | ] 20 | license = {text = "Apache-2.0"} 21 | dynamic = ["version"] 22 | 23 | [project.urls] 24 | Homepage = "https://github.com/spex-xray/pyspextools" 25 | Documentation = "https://spex-xray.github.io/pyspextools/" 26 | Repository = "https://github.com/spex-xray/pyspextools.git" 27 | Issues = "https://github.com/spex-xray/pyspextools/issues" 28 | 29 | [tool.setuptools] 30 | include-package-data = true 31 | 32 | [tool.setuptools.dynamic] 33 | version = {attr = "pyspextools.__version__"} 34 | 35 | [project.scripts] 36 | ogip2spex = "pyspextools.scripts.ogip2spex:main" 37 | ogipgenrsp = "pyspextools.scripts.ogipgenrsp:main" 38 | simres = "pyspextools.scripts.simres:main" 39 | tg2spex = "pyspextools.scripts.tg2spex:main" 40 | 41 | -------------------------------------------------------------------------------- /pyspextools/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | __version__ = "0.7.0" 4 | -------------------------------------------------------------------------------- /pyspextools/color.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # ========================================================= 4 | """ 5 | Set the color theme of the module 6 | """ 7 | # ========================================================= 8 | 9 | 10 | class Colors: 11 | """This class contains the color codes needed to output colored text to the 12 | terminal. By default, colors are shown, but the user can request to output 13 | the text without color, either using a flag or manually setting the color 14 | scheme with the method below. 15 | 16 | :ivar HEADER: Color for headers. 17 | :vartype HEADER: str 18 | :ivar OKBLUE: Color blue. 19 | :vartype OKBLUE: str 20 | :ivar OKGREEN: Color green for OK. 21 | :vartype OKGREEN: str 22 | :ivar WARNING: Color yellow for Warnings. 23 | :vartype WARNING: str 24 | :ivar FAIL: Color red for Errors. 25 | :vartype FAIL: str 26 | :ivar ENDC: End character for color. 27 | :vartype ENDC: str 28 | :ivar BOLD: Bold font. 29 | :vartype BOLD: str 30 | :ivar UNDERLINE: Underline font. 31 | :vartype UNDERLINE: str 32 | """ 33 | 34 | def __init__(self): 35 | self.HEADER = '' 36 | self.OKBLUE = '' 37 | self.OKGREEN = '' 38 | self.WARNING = '' 39 | self.FAIL = '' 40 | self.ENDC = '' 41 | self.BOLD = '' 42 | self.UNDERLINE = '' 43 | 44 | self.set_color(True) 45 | 46 | def set_color(self, setcol): 47 | """Set color output on (True) or off (False). 48 | 49 | :param setcol: Put color output on? (True/False) 50 | :type setcol: bool 51 | """ 52 | 53 | if setcol: 54 | self.HEADER = '\033[95m' 55 | self.OKBLUE = '\033[94m' 56 | self.OKGREEN = '\033[92m' 57 | self.WARNING = '\033[93m' 58 | self.FAIL = '\033[91m' 59 | self.ENDC = '\033[0m' 60 | self.BOLD = '\033[1m' 61 | self.UNDERLINE = '\033[4m' 62 | else: 63 | self.HEADER = '' 64 | self.OKBLUE = '' 65 | self.OKGREEN = '' 66 | self.WARNING = '' 67 | self.FAIL = '' 68 | self.ENDC = '' 69 | self.BOLD = '' 70 | self.UNDERLINE = '' 71 | -------------------------------------------------------------------------------- /pyspextools/data/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from .badchannels import * -------------------------------------------------------------------------------- /pyspextools/data/badchannels.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from pyspextools.io.region import Region 4 | import pyspextools.messages as message 5 | 6 | import numpy as np 7 | 8 | 9 | def clean_region(reg): 10 | """Remove bad channels and channels with zero response from the region. 11 | 12 | :param reg: Input Region object. 13 | :type reg: pyspextools.io.Region 14 | """ 15 | 16 | if not isinstance(reg, Region): 17 | message.error("The input object is not of type Region.") 18 | return -1 19 | 20 | if reg.spo.empty: 21 | message.error("The input spo object is empty.") 22 | return -1 23 | 24 | if reg.res.empty: 25 | message.error("The input spo object is empty.") 26 | return -1 27 | 28 | message.proc_start("Identify bad channels in spectrum and response matrix and re-index matrix") 29 | 30 | (chanmask, groupmask, respmask) = __get_bad_channel_masks(reg) 31 | 32 | if not isinstance(chanmask, np.ndarray): 33 | return -1 34 | 35 | message.proc_end(0) 36 | 37 | # Print number of good and bad channels 38 | goodchan = np.sum(chanmask) 39 | badchan = chanmask.size - goodchan 40 | 41 | print("Number of good channels: {0}".format(goodchan)) 42 | print("Number of bad channels: {0}".format(badchan)) 43 | 44 | if goodchan == 0: 45 | message.error("All channels appear to be bad. Please check your input files.") 46 | return -1 47 | 48 | message.proc_start("Removing bad channels from spectral region") 49 | 50 | # Fix binning issues first. Make sure bin ends before bad channel and starts after bad channel. 51 | for i in np.arange(reg.spo.nchan): 52 | if not chanmask[i]: 53 | if i != 0: 54 | reg.spo.last[i-1] = True 55 | if i != reg.spo.nchan - 1: 56 | reg.spo.first[i+1] = True 57 | 58 | spo = reg.spo 59 | 60 | spo.echan1 = reg.spo.echan1[chanmask] 61 | spo.echan2 = reg.spo.echan2[chanmask] 62 | spo.tints = reg.spo.tints[chanmask] 63 | spo.ochan = reg.spo.ochan[chanmask] 64 | spo.dochan = reg.spo.dochan[chanmask] 65 | spo.mbchan = reg.spo.mbchan[chanmask] 66 | spo.dbchan = reg.spo.dbchan[chanmask] 67 | spo.brat = reg.spo.brat[chanmask] 68 | spo.ssys = reg.spo.ssys[chanmask] 69 | spo.bsys = reg.spo.bsys[chanmask] 70 | spo.used = reg.spo.used[chanmask] 71 | spo.first = reg.spo.first[chanmask] 72 | spo.last = reg.spo.last[chanmask] 73 | 74 | # Count the number of good channels 75 | for i in np.arange(spo.nregion): 76 | spo.nchan[i] = np.sum(chanmask) 77 | 78 | # Check the consistency of the new object 79 | stat = spo.check() 80 | 81 | # Show result to user 82 | message.proc_end(stat) 83 | 84 | # Copy the filtered object to the original region 85 | reg.spo = spo 86 | 87 | # Print number of good and bad groups 88 | badgroup = groupmask.size - np.sum(groupmask) 89 | 90 | print("Number of original groups: {0}".format(groupmask.size)) 91 | print("Number of zero-response groups: {0}".format(badgroup)) 92 | 93 | # Print number of removed response elements 94 | badelements = respmask.size - np.sum(respmask) 95 | 96 | print("Number of original response elements: {0}".format(respmask.size)) 97 | print("Number of bad response elements: {0}".format(badelements)) 98 | 99 | message.proc_start("Removing bad channels from response matrix") 100 | 101 | # Mask response array 102 | reg.res.resp = reg.res.resp[respmask] 103 | if reg.res.resp_der: 104 | reg.res.dresp = reg.res.dresp[respmask] 105 | 106 | # Mask group arrays 107 | reg.res.eg1 = reg.res.eg1[groupmask] 108 | reg.res.eg2 = reg.res.eg2[groupmask] 109 | reg.res.ic1 = reg.res.ic1[groupmask] 110 | reg.res.ic2 = reg.res.ic2[groupmask] 111 | reg.res.nc = reg.res.nc[groupmask] 112 | if reg.res.area_scal: 113 | reg.res.relarea = reg.res.relarea[groupmask] 114 | 115 | eg_start = 0 116 | 117 | for icomp in np.arange(reg.res.ncomp): 118 | reg.res.nchan[icomp] = np.sum(chanmask) 119 | eg_end = reg.res.neg[icomp] + eg_start 120 | reg.res.neg[icomp] = np.sum(groupmask[eg_start:eg_end]) 121 | eg_start = eg_end + 1 122 | 123 | stat = reg.res.check() 124 | 125 | message.proc_end(stat) 126 | 127 | return reg 128 | 129 | 130 | def __get_bad_channel_masks(reg): 131 | """Identify channels with zero response. 132 | 133 | :param reg: Input Region object. 134 | :type reg: pyspextools.io.Region 135 | """ 136 | 137 | # Get the amount of channels and create a channel chanmask with that size 138 | chanmask = np.zeros(reg.spo.used.size, dtype=bool) 139 | 140 | if reg.spo.nchan != reg.spo.used.size: 141 | message.error("Mismatch in number of channels in spo object.") 142 | return -1 143 | 144 | if chanmask.size != reg.res.nchan[0]: 145 | message.error("Mismatch in number of channels between res and spo object.") 146 | return -1 147 | 148 | # Create a mask array for the number of groups (all true) 149 | groupmask = np.ones(reg.res.nc.size, dtype=bool) 150 | 151 | if groupmask.size != np.sum(reg.res.neg): 152 | message.error("Mismatch between the number of groups in the ICOMP and GROUP extensions.") 153 | return -1 154 | 155 | # Create a mask array for the number of response elements (all true) 156 | respmask = np.ones(reg.res.resp.size, dtype=bool) 157 | 158 | if respmask.size != np.sum(reg.res.nc): 159 | message.error("Mismatch between the number of response elements in the GROUP and RESP extensions.") 160 | return -1 161 | 162 | ir = 0 163 | 164 | # Loop over groups to find zero response elements and finalize channel mask 165 | for ie in np.arange(reg.res.eg1.size): 166 | ic1 = reg.res.ic1[ie] # Original first channel of group 167 | ic2 = reg.res.ic2[ie] # Original last channel of group 168 | # Masking entire group if response is zero 169 | #grsum = np.sum(reg.res.resp[ir:ir+reg.res.nc[ie]+1]) 170 | #if grsum <= 0.0: 171 | # groupmask[ie] = False 172 | 173 | for j in np.arange(reg.res.nc[ie]): 174 | ic = ic1 + j - 1 # -1 because Python array starts at 0, not 1 175 | if ic > ic2 - 1: 176 | message.error("Error: Mismatch in number of channels.") 177 | if reg.res.resp[ir] <= 0.0: 178 | chanmask[ic] = False or chanmask[ic] 179 | else: 180 | chanmask[ic] = True 181 | 182 | ir = ir + 1 183 | 184 | chanmask = np.logical_and(chanmask, reg.spo.used) 185 | 186 | # Loop over groups to set the new channel boundaries and fill respmask 187 | newie = 0 # Index of energy bins 188 | ir = 0 # Index of response elements in original array (at maximum reg.res.resp.size) 189 | 190 | for ie in np.arange(reg.res.eg1.size): 191 | 192 | ic1 = reg.res.ic1[ie] # Original first channel of group 193 | first = True # Is this the first channel of the group? 194 | newnc = 0 195 | 196 | for j in np.arange(reg.res.nc[ie]): 197 | ic = ic1 + j - 1 198 | if chanmask[ic] and groupmask[ie]: # If channel is good and group is good 199 | newnc = newnc + 1 # Count number of good channels in group 200 | if first: # If this is the first good bin of the group, set ic1 201 | first = False 202 | reg.res.ic1[ie] = np.sum(chanmask[0:ic]) + 1 203 | else: 204 | respmask[ir] = False 205 | 206 | ir = ir + 1 207 | 208 | # Set new ic1, ic2 and nc 209 | reg.res.ic2[ie] = reg.res.ic1[ie] + newnc - 1 210 | reg.res.nc[ie] = reg.res.ic2[ie] - reg.res.ic1[ie] + 1 211 | 212 | if newnc == 0: 213 | groupmask[ie] = False 214 | else: 215 | newie = newie + 1 216 | 217 | return chanmask, groupmask, respmask 218 | -------------------------------------------------------------------------------- /pyspextools/data/response.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | import pyspextools.messages as message 5 | 6 | 7 | def gaussrsp(x, mu, fwhm, dfwhm): 8 | """Gaussian response function, optionally as a linear function of energy. The inputs are the energy value to 9 | calculate the response for (x), the center of the Gauss function (mu), the resolution of the detector 10 | (FWHM in eV at 1 keV), and the gradient of the FWHM as a function of energy. 11 | 12 | :param x: X value to calculate response for. 13 | :type x: float 14 | :param mu: Center of the Gauss function. 15 | :type mu: float 16 | :param fwhm: The full-width at half maximum of the detector resolution (FWHM in eV at 1 keV). 17 | :type fwhm: float 18 | :param dfwhm: Gradient of the detector resolution as a function of energy. 19 | :type dfwhm: float 20 | """ 21 | 22 | # FWHM at the center energy of the response 23 | fwhm_mu = fwhm + dfwhm * (mu - 1.0) 24 | if fwhm_mu <= 0.: 25 | message.error('The FWHM has become (less than) 0 in the provided energy range. ' 26 | 'Please check your input gradient.') 27 | return -1 28 | 29 | # Convert the FWHM to sigma 30 | sigma = fwhm_mu / (2.0 * math.sqrt(2.0*math.log(2.0))) 31 | 32 | # Convert sigma to keV 33 | sigma = sigma * 1E-3 34 | 35 | # Calculate the response value for this response element x 36 | resp = 1./(sigma * math.sqrt(2.*math.pi)) * math.exp(-(x-mu)**2/(2. * sigma**2)) 37 | 38 | return resp -------------------------------------------------------------------------------- /pyspextools/io/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from .spo import Spo 4 | from .res import Res 5 | 6 | from .pha import Pha 7 | from .pha2 import Pha2 8 | from .rmf import Rmf 9 | from .arf import Arf 10 | 11 | from .region import Region 12 | from .dataset import Dataset 13 | from .ogip import OGIPRegion 14 | from .tg import TGRegion 15 | 16 | from .convert import * 17 | 18 | 19 | -------------------------------------------------------------------------------- /pyspextools/io/arf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import pyspextools.messages as message 4 | import numpy as np 5 | import astropy.io.fits as fits 6 | 7 | 8 | class Arf: 9 | """Class to read OGIP ARF files. The variable naming is made consistent with the HEASOFT HEASP module by 10 | Keith Arnaud. 11 | 12 | :ivar LowEnergy: Low Energy boundary of bin 13 | :vartype LowEnergy: numpy.ndarray 14 | :ivar HighEnergy: High Energy boundary of bin 15 | :vartype HighEnergy: numpy.ndarray 16 | :ivar EffArea: Effective area of bin 17 | :vartype EffArea: numpy.ndarray 18 | 19 | :ivar EnergyUnits: Energy units 20 | :vartype EnergyUnits: str 21 | :ivar ARFUnits: Unit of effective area 22 | :vartype ARFUnits: str 23 | :ivar Order: Grating order (for grating arrays, else 0) 24 | :vartype Order: int 25 | :ivar Grating: Grating instrument (if available, 1 = HEG, 2 = MEG, 3 = LEG) 26 | :vartype Grating: int 27 | """ 28 | 29 | def __init__(self): 30 | """Initialize an ARF object.""" 31 | 32 | self.LowEnergy = np.array([], dtype=float) # Low Energy of bin 33 | self.HighEnergy = np.array([], dtype=float) # High Energy of bin 34 | self.CentEnergy = np.array([], dtype=float) # Center Energy of bin 35 | self.EffArea = np.array([], dtype=float) # Effective Area of bin 36 | 37 | self.EnergyUnits = 'keV' # Energy units 38 | self.ARFUnits = 'cm2' 39 | self.Order = 0 # Grating order (for grating arrays, else 0) 40 | self.Grating = 0 # Grating instrument (if available, 1 = HEG, 2 = MEG, 3 = LEG) 41 | 42 | def read(self, arffile): 43 | """Read the effective area from an OGIP ARF file. 44 | 45 | :param arffile: Effective area file name (FITS/OGIP format) 46 | :type arffile: str 47 | """ 48 | 49 | (data, header) = fits.getdata(arffile, 'SPECRESP', header=True) 50 | 51 | self.LowEnergy = data['ENERG_LO'] 52 | self.HighEnergy = data['ENERG_HI'] 53 | self.CentEnergy = (self.LowEnergy + self.HighEnergy) / 2.0 54 | self.EffArea = data['SPECRESP'] 55 | 56 | self.EnergyUnits = header['TUNIT1'] 57 | 58 | if header['TUNIT3'] == 'cm**2': 59 | self.ARFUnits = 'cm2' 60 | elif header['TUNIT3'] == 'cm2': 61 | self.ARFUnits = 'cm2' 62 | else: 63 | message.warning("ARF units are not recognized.") 64 | 65 | try: 66 | self.Order = header['TG_M'] 67 | self.Grating = header['TG_PART'] 68 | except KeyError: 69 | self.Order = 0 70 | self.Grating = 0 71 | 72 | # Check for NULL values 73 | nans = np.isnan(self.EffArea) 74 | if np.any(nans): 75 | for i in np.arange(self.EffArea.size): 76 | if nans[i]: 77 | self.EffArea[i] = 0.0 78 | 79 | return 0 80 | 81 | def write(self, arffile, telescop=None, instrume=None, filter=None, overwrite=False): 82 | """Write an OGIP compatible ARF file (Non-grating format). 83 | 84 | :param arffile: Effective area file name to write (FITS/OGIP format) 85 | :type arffile: str 86 | :param telescop: Telescope name (optional) 87 | :type telescop: str 88 | :param instrume: Instrument name (optional) 89 | :type instrume: str 90 | :param filter: Filter setting (optional) 91 | :type filter: str 92 | :param overwrite: Overwrite existing file? (True/False) 93 | :type overwrite: bool 94 | """ 95 | 96 | # Write the ARF arrays into FITS column format 97 | col1 = fits.Column(name='ENERG_LO', format='D', unit=self.EnergyUnits, array=self.LowEnergy) 98 | col2 = fits.Column(name='ENERG_HI', format='D', unit=self.EnergyUnits, array=self.HighEnergy) 99 | col3 = fits.Column(name='SPECRESP', format='D', unit=self.ARFUnits, array=self.EffArea) 100 | 101 | hdu = fits.BinTableHDU.from_columns([col1, col2, col3]) 102 | 103 | hdr = hdu.header 104 | hdr.set('EXTNAME', 'SPECRESP') 105 | 106 | # Set the TELESCOP keyword (optional) 107 | if telescop == None: 108 | hdr.set('TELESCOP', 'None', 'Telescope name') 109 | else: 110 | hdr.set('TELESCOP', telescop, 'Telescope name') 111 | 112 | # Set the INSTRUME keyword (optional) 113 | if instrume == None: 114 | hdr.set('INSTRUME', 'None', 'Instrument name') 115 | else: 116 | hdr.set('INSTRUME', instrume, 'Instrument name') 117 | 118 | # Set the FILTER keyword (optional) 119 | if filter is None: 120 | hdr.set('FILTER', 'None', 'Filter setting') 121 | else: 122 | hdr.set('FILTER', filter, 'Filter setting') 123 | 124 | hdr.set('DETNAM', 'None') 125 | hdr.set('HDUCLASS', 'OGIP') 126 | hdr.set('HDUCLAS1', 'RESPONSE') 127 | hdr.set('HDUCLAS2', 'SPECRESP') 128 | hdr.set('HDUVERS', '1.1.0') 129 | hdr.set('ORIGIN', 'SRON') 130 | 131 | hdu.header['HISTORY'] = 'Created by pyspextools:' 132 | hdu.header['HISTORY'] = 'https://github.com/spex-xray/pyspextools' 133 | 134 | try: 135 | hdu.writeto(arffile, overwrite=overwrite) 136 | except IOError: 137 | message.error("File {0} already exists. I will not overwrite it!".format(arffile)) 138 | return 1 139 | 140 | return 0 141 | 142 | def check(self): 143 | """Check if the basic information is read in.""" 144 | if self.LowEnergy.size <= 0: 145 | message.error("Energy array has zero length.") 146 | return 1 147 | if self.EffArea.size <= 0: 148 | message.error("Effective area array has zero length.") 149 | return 1 150 | 151 | return 0 152 | 153 | def disp(self): 154 | """Display a summary of the ARF object.""" 155 | print("ARF effective area:") 156 | print("LowEnergy array: {0} Low Energy of bin".format(self.LowEnergy.size)) 157 | print("HighEnergy array: {0} High Energy of bin".format(self.HighEnergy.size)) 158 | print("EffArea array: {0} Effective Area of bin".format(self.EffArea.size)) 159 | print("Energy units: {0} Energy units".format(self.EnergyUnits)) 160 | print("Area units: {0} Area units".format(self.ARFUnits)) 161 | 162 | return 163 | 164 | 165 | -------------------------------------------------------------------------------- /pyspextools/io/convert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import pyspextools.messages as message 4 | import numpy as np 5 | import math 6 | 7 | from .region import Region 8 | from .res import Res 9 | from .spo import Spo 10 | from .pha import Pha 11 | from .rmf import Rmf 12 | from .arf import Arf 13 | 14 | 15 | # ----------------------------------------------------- 16 | # Return a spo object derived from the OGIP data 17 | # ----------------------------------------------------- 18 | 19 | 20 | def pha_to_spo(src, rmf, back=None, corr=None, save_grouping=False): 21 | """Convert the source (src) and optional background (back) and correction spectra (corr) from OGIP to SPEX format. 22 | Please also provide an OGIP rmf object from the Rmf class to this function to read the channel energies. 23 | When the save_grouping flag is true, the grouping information in the PHA file will be copied to the spo file. 24 | The default behaviour is to ignore the grouping. 25 | This method returns a pyspextools Spo object containing the source and background rates. 26 | 27 | :param src: Input PHA source spectrum object. 28 | :type src: pyspextools.io.Pha 29 | :param rmf: Input RMF response matrix object. 30 | :type rmf: pyspextools.io.Rmf 31 | :param back: Input PHA background spectrum object (optional). 32 | :type back: pyspextools.io.Pha 33 | :param corr: Input PHA correction spectrum object (optional). 34 | :type corr: pyspextools.io.Pha 35 | :param save_grouping: Save the group information (True/False) 36 | :type save_grouping: bool 37 | """ 38 | 39 | if not isinstance(src, Pha): 40 | message.error("Input source spectrum is not a PHA object.") 41 | return 1 42 | 43 | if not isinstance(rmf, Rmf): 44 | message.error("Input response matrix is not an RMF object.") 45 | return 1 46 | 47 | if back is not None: 48 | if not isinstance(back, Pha): 49 | message.error("Input background spectrum is not a PHA object.") 50 | return 1 51 | input_back = True 52 | else: 53 | input_back = False 54 | 55 | if corr is not None: 56 | if not isinstance(corr, Pha): 57 | message.error("Input correction spectrum is not a PHA object.") 58 | return 1 59 | input_corr = True 60 | else: 61 | input_corr = False 62 | 63 | spo = Spo() 64 | 65 | # Determine number of channels and add to spo 66 | spo.nchan = np.append(spo.nchan, src.DetChans) 67 | spo.sponame = None 68 | spo.nregion = 1 69 | 70 | # Create zero arrays of length nchan to fill in the loop later 71 | spo.zero_spo(src.DetChans) 72 | 73 | # Loop through all the bins for the relevant spectral arrays 74 | for i in np.arange(src.DetChans): 75 | spo.tints[i] = src.Exposure * src.AreaScaling[i] 76 | 77 | # Calculate the source rates and errors 78 | if spo.tints[i] > 0: 79 | spo.ochan[i] = src.Rate[i] / src.AreaScaling[i] 80 | spo.dochan[i] = (src.StatError[i])**2 / src.AreaScaling[i] # Add the errors in square later 81 | else: 82 | spo.ochan[i] = 0. 83 | spo.dochan[i] = 0. 84 | 85 | # Subtract background if available 86 | if input_back: 87 | btints = back.Exposure * back.AreaScaling[i] 88 | # Calculate backscale ratio 89 | if back.BackScaling[i] > 0: 90 | fb = src.BackScaling[i] / back.BackScaling[i] 91 | else: 92 | fb = 0. 93 | 94 | # Subtract background and calculate errors 95 | spo.ochan[i] = spo.ochan[i] - back.Rate[i] * fb / back.AreaScaling[i] 96 | spo.dochan[i] = spo.dochan[i] + (back.StatError[i] * fb / back.AreaScaling[i]) ** 2 97 | spo.mbchan[i] = back.Rate[i] * fb / back.AreaScaling[i] 98 | spo.dbchan[i] = (back.StatError[i] * fb / back.AreaScaling[i]) ** 2 99 | 100 | # Calculate the Exp_Rate backscale ratio 101 | if fb > 0 and src.Exposure > 0: 102 | spo.brat[i] = btints / spo.tints[i] / fb 103 | else: 104 | spo.brat[i] = 0. 105 | 106 | # Subtract correction spectrum, if available 107 | if input_corr: 108 | ctints = corr.Exposure * corr.AreaScaling[i] 109 | # Note: The influence of brat on the corr spectrum is not taken into account! 110 | if corr.BackScaling[i] > 0: 111 | fc = src.BackScaling[i] / corr.BackScaling[i] 112 | else: 113 | fc = 0. 114 | 115 | # Subtract correction spectrum and calculate errors 116 | spo.ochan[i] = spo.ochan[i] - corr.Rate[i] * fc / ctints 117 | spo.dochan[i] = spo.dochan[i] + corr.Rate[i] * (fc / ctints) ** 2 118 | spo.mbchan[i] = spo.mbchan[i] + corr.Rate[i] * fc / ctints 119 | spo.dbchan[i] = spo.dbchan[i] + corr.Rate[i] * (fc / ctints) ** 2 120 | 121 | # Set background to zero for zero exposure bins 122 | if spo.tints[i] <= 0.: 123 | spo.mbchan[i] = 0. 124 | spo.dbchan[i] = 0. 125 | spo.brat[i] = 0. 126 | 127 | spo.dochan[i] = math.sqrt(spo.dochan[i]) 128 | spo.dbchan[i] = math.sqrt(spo.dbchan[i]) 129 | 130 | # Set first, last and used variables 131 | if src.Quality[i] != 0: 132 | spo.used[i] = False 133 | 134 | if input_back: 135 | if back.Quality[i] != 0: 136 | spo.used[i] = False 137 | 138 | if input_corr: 139 | if corr.Quality[i] != 0: 140 | spo.used[i] = False 141 | 142 | if save_grouping: 143 | if src.Grouping[i] == 1: 144 | spo.first[i] = True 145 | spo.last[i] = False 146 | if src.Grouping[i] <= 0: 147 | spo.first[i] = False 148 | if i < src.DetChans - 1: 149 | if src.Grouping[i + 1] == 1: 150 | spo.last[i] = True 151 | else: 152 | spo.last[i] = False 153 | elif i == src.DetChans - 1: 154 | spo.last[i] = True 155 | else: 156 | spo.last[i] = False 157 | 158 | # Get channel boundaries from response 159 | # Channel boundary cannot be 0. 160 | if rmf.ebounds.EnergyUnits != "keV": 161 | message.warning("Energy units of keV are expected in the response file!") 162 | 163 | if rmf.ebounds.ChannelLowEnergy[i] <= 0.: 164 | spo.echan1[i] = 1e-5 165 | message.warning("Lowest channel boundary energy is 0. Set to 1E-5 to avoid problems.") 166 | else: 167 | spo.echan1[i] = rmf.ebounds.ChannelLowEnergy[i] 168 | spo.echan2[i] = rmf.ebounds.ChannelHighEnergy[i] 169 | 170 | # Check if channel order needs to be swapped 171 | if src.DetChans > 1: 172 | if spo.echan1[0] > spo.echan1[1]: 173 | spo.swap = True 174 | spo.swap_order() 175 | 176 | spo.empty = False 177 | 178 | return spo 179 | 180 | 181 | # ----------------------------------------------------- 182 | # Return a res object derived from the OGIP data 183 | # ----------------------------------------------------- 184 | 185 | 186 | def rmf_to_res(rmf, matext=0, arf=None): 187 | """Convert an response matrix object from OGIP to SPEX format. The response matrix is translated one-to-one 188 | without optimizations. Providing an ARF object is optional. All groups in the OGIP matrix are put into one 189 | SPEX response component. This method returns a pyspextools Res object containing the response matrix. 190 | 191 | :param rmf: Input RMF response object. 192 | :type rmf: pyspextools.io.Rmf 193 | :param arf: Input ARF effective area object. 194 | :type arf: pyspextools.io.Arf 195 | :param matext: RMF matrix number to convert (start counting at 0) 196 | :type matext: int 197 | """ 198 | 199 | if not isinstance(rmf, Rmf): 200 | message.error("The input RMF object is not of type Rmf.") 201 | return 1 202 | 203 | if matext >= rmf.NumberMatrixExt or matext < 0: 204 | message.error("The supplied matrix extension number is not available.") 205 | return 1 206 | 207 | if arf is not None: 208 | if not isinstance(arf, Arf): 209 | message.error("The input ARF object is not of type Arf.") 210 | input_area = True 211 | else: 212 | input_area = False 213 | 214 | try: 215 | rmf.ebounds.NumberChannels 216 | except NameError: 217 | message.error("The OGIP response matrix has not been initialised yet.") 218 | return 0 219 | 220 | res = Res() 221 | 222 | # Read the number of energy bins and groups 223 | res.nchan = np.append(res.nchan, rmf.ebounds.NumberChannels) 224 | res.nsector = 1 225 | res.nregion = 1 226 | res.sector = np.append(res.sector, 1) 227 | res.region = np.append(res.region, 1) 228 | res.resname = None 229 | 230 | # Read the total number of groups (which is neg in SPEX format) 231 | res.neg = np.append(res.neg, rmf.matrix[matext].NumberTotalGroups) 232 | 233 | res.eg1 = np.zeros(res.neg, dtype=float) 234 | res.eg2 = np.zeros(res.neg, dtype=float) 235 | res.nc = np.zeros(res.neg, dtype=int) 236 | res.ic1 = np.zeros(res.neg, dtype=int) 237 | res.ic2 = np.zeros(res.neg, dtype=int) 238 | 239 | # Read the total number of matrix elements 240 | nm = rmf.matrix[matext].NumberTotalElements 241 | res.resp = np.zeros(nm, dtype=float) 242 | 243 | # Set the number of components to 1 (no optimization or re-ordering) 244 | res.ncomp = 1 245 | 246 | # Read the energy bin boundaries and group information 247 | g = 0 # Index for the group number 248 | m = 0 # Index for the matrix element number 249 | for i in np.arange(rmf.matrix[matext].NumberEnergyBins): 250 | # Number of response groups for this energy bin 251 | ngrp = rmf.matrix[matext].NumberGroups[i] 252 | for j in np.arange(ngrp): 253 | # Energy bin boundaries 254 | if rmf.matrix[matext].LowEnergy[i] <= 0.: 255 | res.eg1[g] = 1e-7 256 | message.warning("Lowest energy boundary is 0. Set to 1E-7 to avoid problems.") 257 | else: 258 | res.eg1[g] = rmf.matrix[matext].LowEnergy[i] 259 | 260 | res.eg2[g] = rmf.matrix[matext].HighEnergy[i] 261 | if res.eg2[g] <= res.eg1[g]: 262 | message.error("Discontinous bins in energy array in channel {0}. Please check the numbers.".format( 263 | i + 1)) 264 | return 265 | 266 | res.nc[g] = rmf.matrix[matext].NumberChannelsGroup[g] 267 | # Add the start channel to the IC to correct for cases where we start at channel 0/1 268 | res.ic1[g] = rmf.matrix[matext].FirstChannelGroup[g] 269 | ic2 = res.ic1[g] + res.nc[g] - 1 270 | res.ic2[g] = ic2 271 | 272 | if input_area: 273 | # Interpolate the effective area in case the response energy bins do not match the arf bins 274 | # Center energy of response bin 275 | renergy = (res.eg1[g] + res.eg2[g]) / 2.0 276 | area = np.interp(renergy, arf.CentEnergy, arf.EffArea) 277 | else: 278 | area = 1.0 279 | 280 | for k in np.arange(res.nc[g]): 281 | res.resp[m] = rmf.matrix[matext].Matrix[m] * area 282 | if res.resp[m] < 0.0: 283 | res.resp[m] = 0.0 284 | m = m + 1 285 | g = g + 1 286 | 287 | if g > res.neg: 288 | message.error("Mismatch between number of groups.") 289 | return 0 290 | 291 | if m > nm: 292 | message.error("Mismatch between number of matrix elements.") 293 | return 0 294 | 295 | # Convert matrix to m**2 units for SPEX 296 | if input_area: 297 | if arf.ARFUnits == "cm2": 298 | res.resp *= 1.E-4 299 | else: 300 | res.resp *= 1.E-4 301 | 302 | # Check if channel order needs to be swapped 303 | if res.nchan > 1: 304 | if rmf.ebounds.ChannelLowEnergy[0] > rmf.ebounds.ChannelLowEnergy[1]: 305 | res.swap = True 306 | res.swap_order() 307 | 308 | res.empty = False 309 | 310 | return res 311 | -------------------------------------------------------------------------------- /pyspextools/io/dataset.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # ========================================================= 4 | """ 5 | Python module to read and write SPEX res and spo files. 6 | See this page for the format specification: 7 | 8 | https://spex-xray.github.io/spex-help/theory/response.html 9 | 10 | This module contains the data class: 11 | 12 | DATA: Contains the collection of spectra and 13 | responses organized in SPEX regions 14 | 15 | Dependencies: 16 | - numpy: Array operations 17 | - spo: The spo class from this pyspextools data module 18 | - res: The res class from this pyspextools data module 19 | """ 20 | # ========================================================= 21 | 22 | import numpy as np 23 | import pyspextools.messages as message 24 | 25 | from .region import Region 26 | from .spo import Spo 27 | from .res import Res 28 | 29 | 30 | # ========================================================= 31 | # Data class 32 | # ========================================================= 33 | 34 | class Dataset: 35 | """The dataset class is the most general class containing a 36 | dataset with multiple regions. Using this class, users 37 | can read, write and manipulate spectral datasets. 38 | 39 | :ivar regions: List of regions. 40 | :vartype regions: list 41 | :ivar config: Response configuration (combinations of sector and region values). 42 | :vartype config: numpy.ndarray 43 | """ 44 | 45 | def __init__(self): 46 | """Initialise a SPEX dataset.""" 47 | self.regions = [] 48 | self.config = np.empty(shape=[0, 2], dtype=int) 49 | 50 | # ----------------------------------------------------- 51 | # Read one region from a spo and res file. 52 | # ----------------------------------------------------- 53 | 54 | def read_region(self, isector, iregion, spofile, resfile, label=""): 55 | """Read one region with number iregion from the two SPEX files and add it to the dataset. 56 | 57 | :param isector: Sector number associated with the region to select. 58 | :type isector: int 59 | :param iregion: Region number to select. 60 | :type iregion: int 61 | :param spofile: File name of the .spo file. 62 | :type spofile: str 63 | :param resfile: File name of the .res file. 64 | :type resfile: str 65 | :param label: Text string to identify the region (optional). 66 | :type label: str 67 | """ 68 | 69 | # Read the spo and res files in a temporary object 70 | tspo = Spo() 71 | tspo.read_file(spofile) 72 | 73 | tres = Res() 74 | tres.read_file(resfile) 75 | 76 | # Create new region 77 | reg = Region() 78 | 79 | # Return desired region and save into local region object 80 | reg.spo = tspo.return_region(iregion) 81 | reg.res = tres.return_region(isector, iregion) 82 | 83 | # Adapt region number to local set 84 | reg.res.region = reg.res.region + len(self.regions) 85 | 86 | # Run consistency checks 87 | reg.check() 88 | 89 | # Add label to the region 90 | self.label = label 91 | 92 | # Add region to list of regions 93 | self.regions.append(reg) 94 | 95 | # Add the sector and region to the config variable of this dataset 96 | self.config = np.append(self.config, [[isector, iregion]], axis=0) 97 | 98 | # ----------------------------------------------------- 99 | # Read all the regions from a spo and res file. 100 | # ----------------------------------------------------- 101 | 102 | def read_all_regions(self, spofile, resfile): 103 | """Read all the regions from a spo and res file and add them to the dataset. 104 | 105 | :param spofile: File name of the input .spo file. 106 | :type spofile: str 107 | :param resfile: File name of the input .res file. 108 | :type resfile: str 109 | """ 110 | 111 | # Read the spo and res files in a temporary object 112 | tspo = Spo() 113 | tspo.read_file(spofile) 114 | 115 | tres = Res() 116 | tres.read_file(resfile) 117 | 118 | # Check if the number of regions in both files are the same 119 | if tspo.nregion != tres.nregion: 120 | print("Error: the spo and res files do not have the same number of regions!") 121 | return 122 | 123 | # Read the response configuration 124 | config = self.read_config(tres) 125 | 126 | # Update the response configuration 127 | if self.config.size == 0: 128 | self.config = config 129 | regmax = 0 130 | secmax = 0 131 | else: 132 | secmax, regmax = np.amax(self.config, axis=0) 133 | 134 | for i in np.arange(tspo.nregion): 135 | # Initialize a new region 136 | reg = Region() 137 | 138 | reg.spo = tspo.return_region(config[i, 1]) 139 | reg.res = tres.return_region(config[i, 0], config[i, 1]) 140 | 141 | # Run consistency checks 142 | reg.check() 143 | 144 | # Add region to list of regions 145 | self.regions.append(reg) 146 | 147 | reg.increase_region(regmax) 148 | 149 | self.update_config() 150 | 151 | # ----------------------------------------------------- 152 | # Append a region object to the dataset 153 | # ----------------------------------------------------- 154 | def append_region(self, region, isector, iregion): 155 | """Append a region object to the dataset. 156 | 157 | :param region: Input region object. 158 | :type region: pyspextools.io.Region 159 | :param isector: Sector number to be selected from the region object. 160 | :type isector: int 161 | :param iregion: Region number to be selected from the region object. 162 | :type iregion: int 163 | """ 164 | 165 | # Reset sector and region for incoming region 166 | for i in np.arange(len(region.res.region)): 167 | region.res.region[i] = iregion 168 | region.res.sector[i] = isector 169 | 170 | # Append the region 171 | self.regions.append(region) 172 | 173 | # Add the sector and region to the config variable of this dataset 174 | self.config = np.append(self.config, np.array([[isector, iregion]]), axis=0) 175 | 176 | # ----------------------------------------------------- 177 | # Write one region to a spo and res file. 178 | # ----------------------------------------------------- 179 | 180 | def write_region(self, spofile, resfile, iregion, exp_rate=True, overwrite=False, history=None): 181 | """Write one region to a spo and res file. 182 | 183 | :param spofile: File name of the input .spo file. 184 | :type spofile: str 185 | :param resfile: File name of the input .res file. 186 | :type resfile: str 187 | :param iregion: Region number to be selected from the region object. 188 | :type iregion: int 189 | :param exp_rate: Write an EXP_RATE column or not. 190 | :type exp_rate: bool 191 | :param overwrite: Should we overwrite existing files? 192 | :type overwrite: bool 193 | :param history: History information. 194 | :type history: List/Array of strings 195 | """ 196 | 197 | if len(self.regions) >= iregion > 0: 198 | self.regions[iregion - 1].spo.write_file(spofile, exp_rate=exp_rate, overwrite=overwrite, history=history) 199 | self.regions[iregion - 1].res.write_file(resfile, overwrite=overwrite, history=history) 200 | else: 201 | print("Error: region number not found!") 202 | return 1 203 | 204 | return 0 205 | 206 | # ----------------------------------------------------- 207 | # Write all the regions to a spo and res file. 208 | # ----------------------------------------------------- 209 | 210 | def write_all_regions(self, spofile, resfile, exp_rate=True, overwrite=False, history=None): 211 | """Write all regions in the data object to spo and res. 212 | 213 | :param spofile: File name of the input .spo file. 214 | :type spofile: str 215 | :param resfile: File name of the input .res file. 216 | :type resfile: str 217 | :param exp_rate: Write an EXP_RATE column or not. 218 | :type exp_rate: bool 219 | :param overwrite: Should we overwrite existing files? 220 | :type overwrite: bool 221 | :param history: History information. 222 | :type history: List/Array of strings 223 | """ 224 | tspo = Spo() 225 | tres = Res() 226 | 227 | i = 0 228 | for ireg in self.regions: 229 | tspo.add_spo_region(ireg.spo) 230 | tres.add_res_region(ireg.res, isector=self.config[i, 0], iregion=self.config[i, 1]) 231 | i = i + 1 232 | 233 | stat = tspo.write_file(spofile, exp_rate=exp_rate, overwrite=overwrite, history=history) 234 | if stat != 0: 235 | message.error("Writing SPO file failed.") 236 | return 1 237 | 238 | stat = tres.write_file(resfile, overwrite=overwrite, history=history) 239 | if stat != 0: 240 | message.error("Writing RES file failed.") 241 | return 1 242 | 243 | return 0 244 | 245 | # ----------------------------------------------------- 246 | # Function to read the response configuration 247 | # ----------------------------------------------------- 248 | 249 | def read_config(self, res): 250 | """Read the response configuration. 251 | 252 | :param res: SPEX response object. 253 | :type res: pyspextools.io.Res 254 | """ 255 | config = np.empty(shape=[0, 2], dtype=int) 256 | psector = 0 257 | pregion = 0 258 | for i in np.arange(res.ncomp): 259 | # Loop through components to find sector-region combinations 260 | if (res.region[i] != pregion) or (res.sector[i] != psector): 261 | config = np.append(config, np.array([[res.sector[i], res.region[i]]]), axis=0) 262 | psector = res.sector[i] 263 | pregion = res.region[i] 264 | 265 | return config 266 | 267 | # ----------------------------------------------------- 268 | # Function to update the response configuration 269 | # ----------------------------------------------------- 270 | 271 | def update_config(self): 272 | """Update the response configuration.""" 273 | self.config = np.empty(shape=[0, 2], dtype=int) 274 | pregion = 0 # Set previous region 275 | psector = 0 # Set previous sector 276 | for reg in self.regions: 277 | # Loop through components to find sector-region combinations 278 | if (reg.res.region[0] != pregion) or (reg.res.sector[0] != psector): 279 | self.config = np.append(self.config, np.array([[reg.res.sector[0], reg.res.region[0]]]), axis=0) 280 | pregion = reg.res.region[0] 281 | psector = reg.res.sector[0] 282 | else: 283 | message.error("Double sector and region identification.") 284 | 285 | # ----------------------------------------------------- 286 | # Function to assign a new sector number to a region 287 | # ----------------------------------------------------- 288 | 289 | def assign_sector(self, iregion, newsector): 290 | """Assign a new sector number to a specific region. 291 | 292 | :param iregion: Region number to assign new sector number for. 293 | :type iregion: int 294 | :param newsector: New sector number. 295 | :type newsector: int 296 | """ 297 | if len(self.regions) >= iregion > 0: 298 | self.regions[iregion-1].set_sector(newsector) 299 | self.update_config() 300 | else: 301 | print("Error: region number not found!") 302 | return 1 303 | 304 | # ----------------------------------------------------- 305 | # Function to assign a new region number to a region 306 | # ----------------------------------------------------- 307 | 308 | def assign_region(self, iregion, newregion): 309 | """Assign a new region number to a specific region. 310 | 311 | :param iregion: Region number to assign new number for. 312 | :type iregion: int 313 | :param newregion: New region number. 314 | :type newregion: int 315 | """ 316 | if len(self.regions) >= iregion > 0: 317 | self.regions[iregion-1].set_region(newregion) 318 | self.update_config() 319 | else: 320 | print("Error: region number not found!") 321 | return 1 322 | 323 | # ----------------------------------------------------- 324 | # Function to assign a new region number to a region 325 | # ----------------------------------------------------- 326 | 327 | def assign_sector_region(self, iregion, newsector, newregion): 328 | """Assign a new sector and region number to a specific region. 329 | 330 | :param iregion: Region number to assign new sector number for. 331 | :type iregion: int 332 | :param newsector: New sector number. 333 | :type newsector: int 334 | :param newregion: New region number. 335 | :type newregion: int 336 | """ 337 | if len(self.regions) >= iregion > 0: 338 | self.regions[iregion-1].set_sector(newsector) 339 | self.regions[iregion-1].set_region(newregion) 340 | self.update_config() 341 | else: 342 | print("Error: region number not found!") 343 | return 1 344 | 345 | # ----------------------------------------------------- 346 | # Show a summary of the dataset, similar to data show in SPEX 347 | # ----------------------------------------------------- 348 | 349 | def show(self): 350 | """Show a summary for the entire dataset""" 351 | for ireg in np.arange(len(self.regions)): 352 | print("===========================================================") 353 | print(" Part {0}".format(ireg+1)) 354 | self.regions[ireg].show(isector=self.config[ireg, 0], iregion=self.config[ireg, 1]) 355 | print("") 356 | -------------------------------------------------------------------------------- /pyspextools/io/pha.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Import stuff for compatibility between python 2 and 3 4 | 5 | import pyspextools.messages as message 6 | import numpy as np 7 | import math 8 | import astropy.io.fits as fits 9 | from .rmf import Rmf 10 | 11 | 12 | class Pha: 13 | """Class to read OGIP PHA files. The variable naming is made consistent with the HEASOFT HEASP module by 14 | Keith Arnaud. 15 | 16 | :ivar FirstChannel: First valid spectral channel. 17 | :vartype FirstChannel: int 18 | :ivar DetChans: Total number of legal channels. 19 | :vartype DetChans: int 20 | 21 | :ivar Channel: Spectrum channels. 22 | :vartype Channel: numpy.ndarray 23 | :ivar Rate: Spectrum spectrum count rate. 24 | :vartype Rate: numpy.ndarray 25 | :ivar StatError: Spectrum error rate (if exists). 26 | :vartype StatError: numpy.ndarray 27 | :ivar SysError: Spectrum systematic error. 28 | :vartype SysError: numpy.ndarray 29 | 30 | :ivar Quality: Quality flag. 31 | :vartype Quality: numpy.ndarray 32 | :ivar Grouping: Grouping information. 33 | :vartype Grouping: numpy.ndarray 34 | 35 | :ivar AreaScaling: Areascal keyword/array. 36 | :vartype AreaScaling: numpy.ndarray 37 | :ivar BackScaling: Backscal keyword/array. 38 | :vartype BackScaling: numpy.ndarray 39 | :ivar CorrScal: Correction spectrum scaling. 40 | :vartype CorrScal: float 41 | 42 | :ivar Exposure: Exposure time of the spectrum. 43 | :vartype Exposure: float 44 | :ivar Poisserr: Are the errors Poissonian. 45 | :vartype Poisserr: bool 46 | :ivar Spectrumtype: Spectrumtype (TOTAL, NET or BKG). 47 | :vartype SpectrumType: str 48 | :ivar PhaType: Whether the spectrum is in COUNTS or RATE. 49 | :vartype PhaType: str 50 | 51 | :ivar rmffile: Associated Response matrix file. 52 | :vartype rmffile: str 53 | :ivar arffile: Associated Effective area file. 54 | :vartype arffile: str 55 | :ivar bkgfile: Associated Background file. 56 | :vartype bkgfile: str 57 | :ivar corfile: Associated Correction spectrum file. 58 | :vartype corfile: str 59 | 60 | :ivar Pha2Back: Is there a PHA2 background available? 61 | :vartype Pha2Back: bool 62 | :ivar BackRate: PHA2 Background Rate. 63 | :vartype BackRate: numpy.ndarray 64 | :ivar BackStatError: PHA2 Background Error. 65 | :vartype BackStatError: numpy.ndarray 66 | :ivar Pha2BackScal: Backscale value for background. 67 | :vartype Pha2BackScal: float 68 | """ 69 | 70 | def __init__(self): 71 | # Spectrum arrays 72 | self.FirstChannel = 0 # First valid spectral channel 73 | self.DetChans = 0 # Total number of legal channels 74 | 75 | self.Channel = np.array([], dtype=int) # Spectrum channels 76 | self.Rate = np.array([], dtype=float) # Spectrum spectrum count rate 77 | self.StatError = np.array([], dtype=float) # Spectrum error rate (if exists) 78 | self.SysError = np.array([], dtype=float) # Spectrum systematic error 79 | 80 | self.Quality = np.array([], dtype=int) # Quality flag 81 | self.Grouping = np.array([], dtype=int) # Grouping information 82 | 83 | self.AreaScaling = np.array([], dtype=float) # Areascal keyword/array 84 | self.BackScaling = np.array([], dtype=float) # Backscal keyword/array 85 | self.CorrScal = 1.0 # Correction spectrum scaling 86 | 87 | self.Exposure = 0.0 # Exposure time of the spectrum 88 | self.Poisserr = True # Are the errors Poissonian 89 | self.Spectrumtype = 'TOTAL' # Spectrumtype (TOTAL, NET or BKG) 90 | self.PhaType = 'COUNTS' # Whether the spectrum is in COUNTS or RATE 91 | 92 | self.rmffile = None # Associated Response matrix file 93 | self.arffile = None # Associated Effective area file 94 | self.bkgfile = None # Associated Background file 95 | self.corfile = None # Associated Correction spectrum file 96 | 97 | # Only applicable for PHA2 files: 98 | self.Pha2Back = False # Is there a PHA2 background available 99 | self.BackRate = np.array([], dtype=float) # PHA2 Background Rate 100 | self.BackStatError = np.array([], dtype=float) # PHA2 Background Error 101 | self.Pha2BackScal = 1.0 # Backscale value for background 102 | 103 | def read(self, filename, force_poisson=False): 104 | """Read a spectrum from a PHA file. 105 | 106 | :param filename: PHA file name to be read. 107 | :type filename: str 108 | :param force_poisson: Force the calculation of Poisson errors (default: False) 109 | :type force_poisson: bool 110 | """ 111 | 112 | # Read the data and header from the SPECTRUM extension 113 | (data, header) = fits.getdata(filename, 'SPECTRUM', header=True) 114 | 115 | # Read the number of channels (outside the header call because of PHAII files). 116 | self.DetChans = header['DETCHANS'] 117 | 118 | # Read the header 119 | self.read_header(header) 120 | 121 | # Read Channel information 122 | self.Channel = data['CHANNEL'] 123 | self.FirstChannel = self.Channel[0] 124 | 125 | # Read the spectrum and convert to rate if necessary 126 | if self.PhaType == 'RATE': 127 | self.Rate = data['RATE'] 128 | else: 129 | self.Rate = np.zeros(self.DetChans, dtype=float) 130 | for i in np.arange(self.DetChans): 131 | self.Rate[i] = float(data['COUNTS'][i]) / self.Exposure 132 | # Only force Poisson errors for COUNTS spectra when flag is present 133 | if force_poisson: 134 | self.Poisserr = True 135 | 136 | # See if there are Statistical Errors present 137 | if not self.Poisserr: 138 | try: 139 | self.StatError = data['STAT_ERR'] 140 | except KeyError: 141 | self.StatError = None 142 | message.warning("No Poisson errors, but no STAT_ERR keyword found.") 143 | else: 144 | self.StatError = np.zeros(self.DetChans, dtype=float) 145 | for i in np.arange(self.DetChans): 146 | self.StatError[i] = math.sqrt(self.Rate[i] / self.Exposure) 147 | 148 | # Are there systematic errors? 149 | try: 150 | self.SysError = data['SYS_ERR'] 151 | except KeyError: 152 | self.SysError = np.zeros(self.DetChans, dtype=float) 153 | 154 | if self.PhaType == 'RATE': 155 | self.SysError = self.SysError / self.Exposure 156 | 157 | # Are there quality flags? 158 | try: 159 | self.Quality = data['QUALITY'] 160 | except KeyError: 161 | self.Quality = np.zeros(self.DetChans, dtype=int) 162 | 163 | # Are there grouping flags? 164 | try: 165 | self.Grouping = data['GROUPING'] 166 | except KeyError: 167 | self.Grouping = np.zeros(self.DetChans, dtype=int) 168 | 169 | # Is there a backscale column? 170 | try: 171 | self.BackScaling = data['BACKSCAL'] 172 | except KeyError: 173 | self.BackScaling = np.ones(self.DetChans, dtype=float) * header['BACKSCAL'] 174 | 175 | # Is there an areascale column? 176 | try: 177 | self.AreaScaling = data['AREASCAL'] 178 | except KeyError: 179 | self.AreaScaling = np.ones(self.DetChans, dtype=float) * header['AREASCAL'] 180 | 181 | return 0 182 | 183 | def read_header(self, header): 184 | """Utility function to read the header from a SPECTRUM extension for both PHA and PHAII files. 185 | 186 | :param header: Header of the SPECTRUM extension. 187 | :type header: astropy.io.fits.Header 188 | """ 189 | 190 | # Read Exposure information 191 | self.Exposure = header['EXPOSURE'] 192 | 193 | # Read how the spectrum is stored (COUNTS or RATE) 194 | try: 195 | self.Spectrumtype = header['HDUCLAS2'] 196 | except KeyError: 197 | self.Spectrumtype = 'TOTAL' 198 | message.warning("HDUCLAS2 keyword not found. Assuming spectrumtype is TOTAL.") 199 | 200 | try: 201 | self.PhaType = header['HDUCLAS3'] 202 | except KeyError: 203 | message.warning("HDUCLAS3 keyword not found.") 204 | # When the HDUCLAS3 keyword does not exist, try to find either a COUNTS or RATE column in the table 205 | message.proc_start("Figuring out the type of PHA file") 206 | ncolumns = int(header['TFIELDS']) 207 | self.PhaType = None 208 | for i in np.arange(ncolumns): 209 | keyword = 'TTYPE{0}'.format(i+1) 210 | if header[keyword] == "RATE": 211 | self.PhaType = 'RATE' 212 | print(self.PhaType) 213 | break 214 | elif header[keyword] == "COUNTS": 215 | self.PhaType = 'COUNTS' 216 | print(self.PhaType) 217 | break 218 | 219 | if self.PhaType is None: 220 | message.error("The provided PHA file does not have a COUNTS or RATE column. Is this a spectrum?") 221 | return 1 222 | 223 | # Read the POISERR keyword 224 | try: 225 | self.Poisserr = header['POISSERR'] 226 | except KeyError: 227 | self.Poisserr = False 228 | 229 | # Read Correction scaling factor 230 | self.CorrScal = header['CORRSCAL'] 231 | 232 | # Read a background file, if available 233 | try: 234 | self.bkgfile = header['BACKFILE'] 235 | except KeyError: 236 | self.bkgfile = None 237 | 238 | # Read an respoonse file, if available 239 | try: 240 | self.rmffile = header['RESPFILE'] 241 | except KeyError: 242 | self.rmffile = None 243 | 244 | # Read an effective area file, if available 245 | try: 246 | self.arffile = header['ANCRFILE'] 247 | except KeyError: 248 | self.arffile = None 249 | 250 | # Read a correction file, if available 251 | try: 252 | self.corfile = header['CORRFILE'] 253 | except KeyError: 254 | self.corfile = None 255 | 256 | def check(self): 257 | """Check if the object contains the minimum required data.""" 258 | # Check exposure value 259 | if self.Exposure <= 0.0: 260 | message.error("Exposure time of spectrum is zero or smaller.") 261 | return 1 262 | if self.DetChans <= 0: 263 | message.error("Number of channels is zero.") 264 | return 1 265 | if self.Rate.size <= 0: 266 | message.error("Size of rate array is zero.") 267 | return 1 268 | 269 | return 0 270 | 271 | def create_dummy(self, resp): 272 | """Generate dummy spectrum based on rmf channel information. 273 | 274 | :param resp: Input RMF response object. 275 | :type resp: pyspextools.io.rmf.Rmf 276 | """ 277 | 278 | if not isinstance(resp, Rmf): 279 | message.error("Input response object is not the required Rmf object.") 280 | return 281 | 282 | # First copy the channel information to the PHA object 283 | self.Channel = resp.ebounds.Channel 284 | self.FirstChannel = resp.ebounds.Channel[0] 285 | self.DetChans = resp.ebounds.NumberChannels 286 | 287 | # Generate a dummy spectrum (obviously not realistic, should be simulated in SPEX later) 288 | # Set exposure, statistic and type of spectrum 289 | self.Exposure = 1000.0 290 | self.Poisserr = True 291 | self.Spectrumtype = 'TOTAL' 292 | 293 | # Generate spectrum values and quality flags 294 | self.Rate = np.ones(self.DetChans, dtype=float) / self.Exposure 295 | self.StatError = np.ones(self.DetChans, dtype=float) / self.Exposure 296 | self.SysError = np.zeros(self.DetChans, dtype=float) 297 | self.Quality = np.zeros(self.DetChans, dtype=float) 298 | self.Grouping = np.zeros(self.DetChans, dtype=float) 299 | self.AreaScaling = np.ones(self.DetChans, dtype=float) 300 | self.BackScaling = np.ones(self.DetChans, dtype=float) 301 | 302 | def disp(self): 303 | """Display a summary of the PHA file.""" 304 | print("") 305 | print("FirstChannel {0} First valid spectral channel".format(self.FirstChannel)) 306 | print("DetChans {0} Total number of legal channels".format(self.DetChans)) 307 | print("Exposure {0} Exposure time of the spectrum".format(self.Exposure)) 308 | print("Poisserr {0} Are the errors Poissonian".format(self.Poisserr)) 309 | print("Spectrumtype {0} Spectrumtype (TOTAL, NET or BKG)".format(self.Spectrumtype)) 310 | print("PhaType {0} Whether the spectrum is in COUNTS or RATE".format(self.PhaType)) 311 | print("AreaScaling {0} Areascal keyword/array".format(self.AreaScaling)) 312 | print("BackScaling {0} Backscal keyword/array".format(self.BackScaling)) 313 | print("CorrScal {0} Correction spectrum scaling".format(self.CorrScal)) 314 | print("") 315 | print("Arrays:") 316 | print("Channel {0} Spectrum channels".format(self.Channel.size)) 317 | print("Rate {0} Spectrum spectrum count rate".format(self.Rate.size)) 318 | print("StatError {0} Spectrum error rate (if exists)".format(self.StatError.size)) 319 | print("SysError {0} Spectrum systematic error".format(self.SysError.size)) 320 | print("Quality {0} Quality flag".format(self.Quality.size)) 321 | print("Grouping {0} Grouping information".format(self.Grouping.size)) 322 | print("") 323 | print("Associated files:") 324 | print("rmffile {0} Associated Response matrix file".format(self.rmffile)) 325 | print("arffile {0} Associated Effective area file".format(self.arffile)) 326 | print("bkgfile {0} Associated Background file".format(self.bkgfile)) 327 | print("corfile {0} Associated Correction spectrum file".format(self.corfile)) 328 | print("") 329 | 330 | def check_compatibility(self, pha): 331 | """Check if another PHA object is compatible with the current one in terms of number of channels. 332 | 333 | :param pha: PHA object to check compatibility for. 334 | :type pha: pyspextools.io.pha.Pha 335 | """ 336 | 337 | # Check equal number of channels 338 | if self.DetChans != pha.DetChans: 339 | message.error("Number of channels not equal for both PHA files.") 340 | return 1 341 | 342 | return 0 343 | 344 | def number_channels(self): 345 | return self.DetChans 346 | -------------------------------------------------------------------------------- /pyspextools/io/pha2.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import pyspextools.messages as message 4 | import numpy as np 5 | import math 6 | import astropy.io.fits as fits 7 | 8 | from .pha import Pha 9 | 10 | 11 | class Pha2: 12 | """ Class to read PHA2 type OGIP spectra. 13 | 14 | :ivar NumberSpectra: Number of spectra in PHAII file. 15 | :vartype NumberSpectra: int 16 | :ivar phalist: List of PHA spectra. 17 | :vartype phalist: list 18 | :ivar tg_m: Array of order numbers. 19 | :vartype tg_m: numpy.ndarray 20 | :ivar tg_part: Array of grating numbers. 21 | :vartype tg_part: numpy.ndarray 22 | :ivar instrument: Instrument name. 23 | :vartype instrument: str 24 | :ivar telescope: Telescope name. 25 | :vartype telescope: str 26 | :ivar grating: Grating name 27 | :vartype grating: str 28 | 29 | :ivar gratings: Dictionary of grating names. 30 | :vartype gratings: dict 31 | """ 32 | 33 | def __init__(self): 34 | self.NumberSpectra = 0 # Number of spectra in PHAII file 35 | self.phalist = [] # List of PHA spectra 36 | self.tg_m = np.array([]) # Array of order numbers 37 | self.tg_part = np.array([]) # Array of grating numbers 38 | self.instrument = '' # Instrument name 39 | self.telescope = '' # Telescope name 40 | self.grating = '' # Grating name 41 | 42 | self.gratings = {'1': 'heg', '2': 'meg', '3': 'leg'} 43 | 44 | def read(self, phafile, force_poisson=True, background=False): 45 | """Read a type II pha file. Many time Gehrels errors are provided, but we prefer Poisson. Therefore, the 46 | optional 'force_poisson' flag is True by default. Set force_poisson to false to obtain the errors from 47 | the file. If the user wants to subtract the background, the flag 'background' should be set to True. 48 | 49 | :param phafile: Name of the type II PHA file. 50 | :type phafile: str 51 | :param force_poisson: Flag to set the enforcement of Poisson errors. 52 | :type force_poisson: bool 53 | :param background: Subtract the background (True/False)? 54 | :type background: bool 55 | """ 56 | 57 | file = fits.open(phafile) 58 | header = file['SPECTRUM'].header 59 | data = file['SPECTRUM'].data 60 | 61 | self.NumberSpectra = header['NAXIS2'] 62 | self.tg_m = data['TG_M'] 63 | self.tg_part = data['TG_PART'] 64 | self.instrument = header['INSTRUME'] 65 | self.telescope = header['TELESCOP'] 66 | self.grating = header['GRATING'] 67 | 68 | for i in np.arange(self.NumberSpectra): 69 | pha = Pha() 70 | 71 | # Read Channel information 72 | pha.read_header(header) 73 | 74 | # Read Channel information 75 | pha.Channel = data['CHANNEL'][i] 76 | pha.FirstChannel = pha.Channel[0] 77 | pha.DetChans = pha.Channel.size 78 | 79 | # Read the spectrum and convert to rate if necessary 80 | if pha.PhaType == 'RATE': 81 | pha.Rate = data['RATE'][i] 82 | else: 83 | pha.Rate = np.zeros(pha.DetChans, dtype=float) 84 | for j in np.arange(pha.DetChans): 85 | pha.Rate[j] = float(data['COUNTS'][i][j]) / pha.Exposure 86 | 87 | if force_poisson: 88 | poisson = True 89 | else: 90 | poisson = pha.Poisserr 91 | 92 | # See if there are Statistical Errors present 93 | if not poisson: 94 | try: 95 | pha.StatError = data['STAT_ERR'][i] 96 | except KeyError: 97 | pha.StatError = None 98 | message.error("No Poisson errors, but no STAT_ERR keyword found.") 99 | return 1 100 | else: 101 | pha.StatError = np.zeros(pha.DetChans, dtype=float) 102 | for j in np.arange(pha.DetChans): 103 | pha.StatError[j] = math.sqrt(pha.Rate[j] / pha.Exposure) 104 | 105 | # Are there systematic errors? 106 | try: 107 | pha.SysError = data['SYS_ERR'][i] 108 | except KeyError: 109 | pha.SysError = np.zeros(pha.DetChans, dtype=float) 110 | 111 | if pha.PhaType == 'RATE': 112 | pha.SysError = pha.SysError / pha.Exposure 113 | 114 | # Are there quality flags? 115 | try: 116 | pha.Quality = data['QUALITY'][i] 117 | except KeyError: 118 | pha.Quality = np.zeros(pha.DetChans, dtype=int) 119 | 120 | # Are there grouping flags? 121 | try: 122 | pha.Grouping = data['GROUPING'][i] 123 | except KeyError: 124 | pha.Grouping = np.zeros(pha.DetChans, dtype=int) 125 | 126 | # Is there a backscale column? 127 | try: 128 | pha.BackScaling = data['BACKSCAL'][i] 129 | except KeyError: 130 | pha.BackScaling = np.ones(pha.DetChans, dtype=float) * header['BACKSCAL'] 131 | 132 | # Is there an areascale column? 133 | try: 134 | pha.AreaScaling = data['AREASCAL'][i] 135 | except KeyError: 136 | pha.AreaScaling = np.ones(pha.DetChans, dtype=float) * header['AREASCAL'] 137 | 138 | if background: 139 | pha.Pha2Back = True 140 | pha.BackRate = (data['BACKGROUND_UP'][i] + data['BACKGROUND_DOWN'][i]) / pha.Exposure 141 | pha.BackStatError = np.zeros(data['BACKGROUND_UP'].size, dtype=float) 142 | for j in np.arange(pha.DetChans): 143 | pha.BackStatError[j] = math.sqrt(pha.BackRate[j] / pha.Exposure) 144 | pha.Pha2BackScal = header['BACKSCUP'] + header['BACKSCDN'] 145 | else: 146 | pha.Pha2Back = False 147 | pha.BackRate = np.zeros(pha.DetChans, dtype=float) 148 | pha.BackStatError = np.zeros(pha.DetChans, dtype=float) 149 | pha.Pha2BackScal = 1.0 150 | 151 | self.phalist.append(pha) 152 | 153 | file.close() 154 | return 0 155 | 156 | def combine_orders(self, grating): 157 | """Combine the orders for spectra from the same grating (1 = HETG, 2 = METG, 3 = LETG). 158 | 159 | :param grating: Grating number to combine the orders for. 160 | :type grating: int 161 | """ 162 | 163 | # Select rows to combine 164 | tocombine = np.where(self.tg_part == grating)[0] 165 | 166 | if tocombine.size == 0: 167 | message.error("Grating number not found in dataset.") 168 | return 1 169 | 170 | if tocombine.size == 1: 171 | message.error("Only a single order found. No combining will be done.") 172 | return 1 173 | 174 | # Create new PHA file to output (set first row as default). 175 | srcpha = self.phalist[tocombine[0]] 176 | bkgpha = Pha() 177 | bkgpha.StatError = np.zeros(srcpha.DetChans, dtype=float) 178 | 179 | for i in np.arange(tocombine.size): 180 | if i == 0: 181 | continue 182 | 183 | ipha = self.phalist[tocombine[i]] 184 | 185 | srcpha.Rate = srcpha.Rate + ipha.Rate 186 | bkgpha.Rate = srcpha.BackRate + ipha.BackRate 187 | 188 | for j in np.arange(srcpha.DetChans): 189 | if srcpha.StatError is not None: 190 | srcpha.StatError[j] = math.sqrt(srcpha.StatError[j]**2 + ipha.StatError[j]**2) 191 | bkgpha.StatError[j] = math.sqrt(srcpha.BackStatError[j]**2 + ipha.BackStatError[j]**2) 192 | srcpha.SysError[j] = math.sqrt(srcpha.SysError[j]**2 + ipha.SysError[j]**2) 193 | if ipha.Quality[j] != 0: 194 | srcpha.Quality = 1 195 | 196 | # Remove grouping for now (maybe implemented later) 197 | srcpha.Grouping = 0 * srcpha.Grouping 198 | 199 | srcpha.AreaScaling = srcpha.AreaScaling + ipha.AreaScaling 200 | srcpha.BackScaling = srcpha.BackScaling + ipha.BackScaling 201 | 202 | # Calculate the average AreaScaling and BackScaling (Probably wrong!) 203 | srcpha.AreaScaling = srcpha.AreaScaling / tocombine.size 204 | srcpha.BackScaling = srcpha.BackScaling / tocombine.size 205 | 206 | bkgpha.AreaScaling = np.ones(srcpha.DetChans, dtype=float) 207 | bkgpha.BackScaling = srcpha.Pha2BackScal * np.ones(srcpha.DetChans, dtype=float) 208 | bkgpha.Quality = np.zeros(srcpha.DetChans, dtype=int) 209 | bkgpha.Exposure = srcpha.Exposure 210 | 211 | return srcpha, bkgpha 212 | -------------------------------------------------------------------------------- /pyspextools/io/region.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # ========================================================= 4 | """ 5 | Python module to organise SPEX res and spo files into regions. 6 | See this page for the format specification: 7 | 8 | https://spex-xray.github.io/spex-help/theory/response.html 9 | 10 | This module contains the Region class: 11 | 12 | Region: Contains the the combination of a spectrum and a 13 | response organized in a SPEX region 14 | 15 | Dependencies: 16 | - numpy: Array operations 17 | - spo: The spo class from this pyspextools data module 18 | - res: The res class from this pyspextools data module 19 | """ 20 | # ========================================================= 21 | 22 | import numpy as np 23 | 24 | from pyspextools.io.spo import Spo 25 | from pyspextools.io.res import Res 26 | import pyspextools.messages as message 27 | 28 | 29 | # ========================================================= 30 | # Region class 31 | # ========================================================= 32 | 33 | class Region: 34 | """A SPEX region is a spectrum/response combination for a 35 | specific observation, instrument or region on the sky. 36 | It combines the spectrum and response file in one object. 37 | 38 | :ivar spo: Spo object 39 | :vartype spo: pyspextools.io.Spo 40 | :ivar res: Res object 41 | :vartype res: pyspextools.io.Res 42 | """ 43 | 44 | def __init__(self): 45 | self.spo = Spo() # Spo object 46 | self.res = Res() # Res object 47 | self.label = "" # Optional region label (will not be written to file). For example: MOS1, annulus2, etc. 48 | 49 | def change_label(self, label): 50 | """Attach a label to this region to easily identify it. For example: MOS1, annulus 2, etc. 51 | 52 | :param label: Text string to identify region. 53 | :type label: str 54 | """ 55 | self.label = str(label) 56 | 57 | def set_sector(self, sector): 58 | """Set the sector number for this region. 59 | 60 | :param sector: Sector number to set for this region. 61 | :type sector: int 62 | """ 63 | 64 | for i in np.arange(self.res.sector.size): 65 | self.res.sector[i] = sector 66 | 67 | def set_region(self, region): 68 | """Set the region number for this region. 69 | 70 | :param region: Region number to set for this region. 71 | :type region: int 72 | """ 73 | 74 | for i in np.arange(self.res.region.size): 75 | self.res.region[i] = region 76 | 77 | def increase_region(self, amount): 78 | """Increase the region numbers by an integer amount. 79 | 80 | :param amount: Integer amount to add to region numbers. 81 | :type amount: int 82 | """ 83 | 84 | for i in np.arange(self.res.region.size): 85 | self.res.region[i] = self.res.region[i] + amount 86 | 87 | def check(self, nregion=False): 88 | """Check whether spectrum and response are compatible 89 | and whether the arrays really consist of one region (if nregion flag is set). 90 | 91 | :param nregion: Flag to check whether the arrays just contain one region. 92 | :type nregion: bool 93 | """ 94 | 95 | if self.res.nchan[0] != self.spo.nchan[0]: 96 | message.error("Number of channels in spectrum is not equal to number of channels in response.") 97 | return -1 98 | 99 | if nregion: 100 | if self.spo.nchan.size != 1: 101 | message.error("SPO object consists of more than one region according to nchan array size.") 102 | return -1 103 | 104 | if self.spo.nregion != 1: 105 | message.error("SPO object consists of more than one region according to nregion parameter.") 106 | return -1 107 | 108 | return 0 109 | 110 | def show(self, isector=1, iregion=1): 111 | """Show a summary of the region metadata. 112 | 113 | :param isector: Sector number to show. 114 | :type isector: int 115 | :param iregion: Region number to show. 116 | :type iregion: int 117 | """ 118 | 119 | print("===========================================================") 120 | print(" Sector: {0} => Region: {1}".format(str(self.res.sector[0]), 121 | str(self.res.region[0]))) 122 | print(" Label: {0}".format(self.label)) 123 | 124 | print(" -------------------- Spectrum -------------------------") 125 | self.spo.show() 126 | 127 | print(" -------------------- Response -------------------------") 128 | self.res.show(isector=isector, iregion=iregion) 129 | -------------------------------------------------------------------------------- /pyspextools/io/rmf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import pyspextools.messages as message 4 | import numpy as np 5 | import astropy.io.fits as fits 6 | from pyspextools.io.arf import Arf 7 | 8 | 9 | class RmfEbounds: 10 | """Class to read the EBOUNDS extension from an RMF or RSP file. 11 | 12 | :ivar FirstChannel: First channel number. 13 | :vartype FirstChannel: int 14 | :ivar Channel: Channel numbers. 15 | :vartype Channel: numpy.ndarray 16 | :ivar ChannelLowEnergy: Start energy of channel. 17 | :vartype ChannelLowEnergy: numpy.ndarray 18 | :ivar ChannelHighEnergy: End energy of channel. 19 | :vartype ChannelHighEnergy: numpy.ndarray 20 | :ivar NumberChannels: Number of data channels. 21 | :vartype NumberChannels: int 22 | :ivar EnergyUnits: Unit of the energy scale 23 | :vartype EnergyUnits: string 24 | """ 25 | 26 | def __init__(self): 27 | self.FirstChannel = 0 # First channel number 28 | self.Channel = np.array([], dtype=int) # Channel numbers 29 | self.ChannelLowEnergy = np.array([], dtype=float) # Start energy of channel 30 | self.ChannelHighEnergy = np.array([], dtype=float) # End energy of channel 31 | self.NumberChannels = 0 # Number of data channels 32 | self.EnergyUnits = '' # Unit of the energy scale 33 | 34 | def read(self, rmffile): 35 | # Read the Ebounds table 36 | (data, header) = fits.getdata(rmffile, 'EBOUNDS', header=True) 37 | 38 | self.Channel = data['CHANNEL'] 39 | self.ChannelLowEnergy = data['E_MIN'] 40 | self.ChannelHighEnergy = data['E_MAX'] 41 | self.NumberChannels = self.Channel.size 42 | self.FirstChannel = self.Channel[0] 43 | 44 | try: 45 | self.EnergyUnits = header['TUNIT2'] 46 | except KeyError: 47 | message.warning("Could not find energy units in the Energy column. Assuming unit keV") 48 | self.EnergyUnits = 'keV' 49 | 50 | 51 | class RmfMatrix: 52 | """Class to read a MATRIX extension from an OGIP RMF or RSP file. 53 | 54 | :ivar NumberGroups: Number of response groups. 55 | :vartype NumberGroups: numpy.ndarray 56 | :ivar FirstGroup: First response group for this energy bin. 57 | :vartype FirstGroup: numpy.ndarray 58 | :ivar FirstChannelGroup: First channel number in this group. 59 | :vartype FirstChannelGroup: numpy.ndarray 60 | :ivar NumberChannelsGroup: Number of channels in this group. 61 | :vartype NumberChannelsGroup: numpy.ndarray 62 | :ivar FirstElement: First response element for this group. 63 | :vartype FirstElement: numpy.ndarray 64 | :ivar LowEnergy: Start energy of bin. 65 | :vartype LowEnergy: numpy.ndarray 66 | :ivar HighEnergy: End energy of bin. 67 | :vartype HighEnergy: numpy.ndarray 68 | :ivar Matrix: Response matrix elements. 69 | :vartype Matrix: numpy.ndarray 70 | 71 | :ivar NumberEnergyBins: Number of energy bins. 72 | :vartype NumberEnergyBins: int 73 | :ivar NumberTotalGroups: Total number of groups. 74 | :vartype NumberTotalGroups: int 75 | :ivar NumberTotalElements: Total number of response elements. 76 | :vartype NumberTotalElements: int 77 | 78 | :ivar AreaScaling: Value of EFFAREA keyword. 79 | :vartype AreaScaling: float 80 | :ivar ResponseThreshold: Minimum value in response. 81 | :vartype ResponseThreshold: float 82 | :ivar EnergyUnits: Units of the energy scale. 83 | :vartype EnergyUnits: str 84 | :ivar RMFUnits: Units for RMF values. 85 | :vartype RMFUnits: str 86 | :ivar AreaIncluded: Is the effective area included in the response? 87 | :vartype AreaIncluded: bool 88 | 89 | :ivar Order: Order of the matrix. 90 | :vartype Order: int 91 | """ 92 | 93 | def __init__(self): 94 | self.NumberGroups = np.array([], dtype=int) # Number of response groups 95 | self.FirstGroup = np.array([], dtype=int) # First response group for this energy bin 96 | self.FirstChannelGroup = np.array([], dtype=int) # First channel number in this group 97 | self.NumberChannelsGroup = np.array([], dtype=int) # Number of channels in this group 98 | self.FirstElement = np.array([], dtype=int) # First response element for this group 99 | self.LowEnergy = np.array([], dtype=float) # Start energy of bin 100 | self.HighEnergy = np.array([], dtype=float) # End energy of bin 101 | self.Matrix = np.array([], dtype=float) # Matrix elements 102 | 103 | self.NumberEnergyBins = 0 # Number of energy bins 104 | self.NumberTotalGroups = 0 # Total number of groups 105 | self.NumberTotalElements = 0 # Total number of response elements 106 | 107 | self.AreaScaling = 1.0 # Value of EFFAREA keyword 108 | self.ResponseThreshold = 1E-7 # Minimum value in response 109 | self.EnergyUnits = '' # Units of the energy scale 110 | self.RMFUnits = '' # Units for RMF values 111 | self.AreaIncluded = False # Is the effective area included in the response? 112 | 113 | self.Order = 0 # Order of the matrix 114 | 115 | def read(self, rmfhdu): 116 | 117 | # Read the Matrix table 118 | data = rmfhdu.data 119 | header = rmfhdu.header 120 | 121 | if rmfhdu.name == 'MATRIX': 122 | pass 123 | elif rmfhdu.name == 'SPECRESP MATRIX': 124 | message.warning("This is an RSP file with the effective area included.") 125 | print("Do not read an ARF file, unless you know what you are doing.") 126 | self.AreaIncluded = True 127 | else: 128 | message.error("MATRIX extension not successfully found in RMF file.") 129 | return 130 | 131 | self.LowEnergy = data['ENERG_LO'] 132 | self.HighEnergy = data['ENERG_HI'] 133 | self.NumberEnergyBins = self.LowEnergy.size 134 | 135 | try: 136 | self.EnergyUnits = header['TUNIT1'] 137 | except KeyError: 138 | message.warning("Could not find units in the file for the Energy grid. Assuming keV.") 139 | self.EnergyUnits = 'keV' 140 | 141 | self.NumberGroups = data['N_GRP'] 142 | self.NumberTotalGroups = np.sum(self.NumberGroups) 143 | 144 | self.FirstGroup = np.zeros(self.NumberEnergyBins, dtype=int) 145 | 146 | self.FirstChannelGroup = np.zeros(self.NumberTotalGroups, dtype=int) 147 | self.NumberChannelsGroup = np.zeros(self.NumberTotalGroups, dtype=int) 148 | self.FirstElement = np.zeros(self.NumberTotalGroups, dtype=int) 149 | 150 | self.Matrix = np.array([], dtype=float) 151 | 152 | try: 153 | self.Order = header['ORDER'] 154 | except KeyError: 155 | pass 156 | 157 | fgroup = 0 # Count total number of groups 158 | felem = 0 # Count total number of response elements 159 | nelem = np.zeros(self.NumberEnergyBins, dtype=int) # Count number of response elements per energy bin 160 | k = 0 161 | 162 | fchan_local = data['F_CHAN'] 163 | nchan_local = data['N_CHAN'] 164 | matrix_local = data['MATRIX'] 165 | 166 | for i in np.arange(self.NumberEnergyBins): 167 | self.FirstGroup[i] = fgroup 168 | fgroup = fgroup + self.NumberGroups[i] 169 | 170 | if self.NumberGroups[i] != 0: 171 | for j in np.arange(self.NumberGroups[i]): 172 | try: 173 | self.FirstChannelGroup[k] = fchan_local[i][j] 174 | self.NumberChannelsGroup[k] = nchan_local[i][j] 175 | except IndexError: 176 | self.FirstChannelGroup[k] = fchan_local[i] 177 | self.NumberChannelsGroup[k] = nchan_local[i] 178 | self.FirstElement[k] = felem 179 | felem = felem + self.NumberChannelsGroup[k] 180 | nelem[i] = nelem[i] + self.NumberChannelsGroup[k] 181 | 182 | k = k + 1 183 | 184 | self.Matrix = np.zeros(felem, dtype=float) 185 | 186 | r = 0 187 | for i in np.arange(self.NumberEnergyBins): 188 | if nelem[i] != 0: 189 | for j in np.arange(nelem[i]): 190 | self.Matrix[r] = matrix_local[i][j] 191 | r = r + 1 192 | 193 | self.NumberTotalElements = self.Matrix.size 194 | self.ResponseThreshold = np.amin(self.Matrix) 195 | 196 | 197 | class Rmf: 198 | """Class to read OGIP RMF files. The response is given in two parts: an EBOUNDS extension, containing 199 | the energy boundries of the instrument channels, and one or more MATRIX extensions, which contain components 200 | of the response matrix. 201 | 202 | :ivar ebounds: Represents the EBOUNDS extension in the RMF file, which contains the channel energy scale. 203 | :vartype ebounds: pyspextools.io.rmf.RmfEbounds 204 | :ivar matrix: List containing the matrix extensions (type pyspextools.io.rmf.RmfMatrix) 205 | :vartype matrix: list 206 | :ivar NumberMatrixExt: The number of matrix extensions. 207 | :vartype NumberMatrixExt: int 208 | :ivar MatrixExt: Array containing the FITS extension numbers that contain a response matrix. 209 | :vartype MatrixExt: numpy.ndarray 210 | """ 211 | 212 | def __init__(self): 213 | self.ebounds = RmfEbounds() 214 | self.matrix = [] 215 | self.NumberMatrixExt = 0 216 | self.MatrixExt = np.array([], dtype=int) 217 | 218 | def read(self, rmffile): 219 | """Method to read OGIP RMF files. The variable naming is made consistent with the HEASOFT HEASP module by 220 | Keith Arnaud. 221 | 222 | :param rmffile: RMF file name to read. 223 | :type rmffile: str 224 | """ 225 | 226 | # Read the Ebounds table 227 | self.ebounds.read(rmffile) 228 | 229 | # Empty lists for safety 230 | self.NumberMatrixExt = 0 231 | self.MatrixExt = np.array([], dtype=int) 232 | self.matrix = [] 233 | 234 | # Read the number of MATRIX extensions 235 | rmf = fits.open(rmffile) 236 | for i in range(len(rmf)): 237 | if rmf[i].name == 'MATRIX' or rmf[i].name == 'SPECRESP MATRIX': 238 | self.NumberMatrixExt += 1 239 | self.MatrixExt = np.append(self.MatrixExt, i) 240 | 241 | # Read the individual matrix extensions 242 | for i in self.MatrixExt: 243 | mat = RmfMatrix() 244 | mat.read(rmf[i]) 245 | self.matrix.append(mat) 246 | 247 | rmf.close() 248 | 249 | return 0 250 | 251 | def write(self, rmffile, telescop=None, instrume=None, filterkey=None, overwrite=False): 252 | """Method to write an OGIP format RMF file. 253 | 254 | :param rmffile: RMF file name to write. 255 | :type rmffile: str 256 | :param telescop: Name of the telescope to be put in the TELESCOP keyword. 257 | :type telescop: str 258 | :param instrume: Name of the instrument to be put in the INSTRUME keyword. 259 | :type instrume: str 260 | :param filterkey: Name of the filter to be put in the FILTER keyword. 261 | :type filterkey: str 262 | :param overwrite: Overwrite existing file names? (True/False) 263 | :type overwrite: bool 264 | """ 265 | 266 | # 267 | # Generate warning if there are multiple groups per energy 268 | # 269 | if np.amax(self.matrix[0].NumberGroups) != 1: 270 | message.warning("This method has not been tested for responses with multiple response groups per energy.") 271 | 272 | # 273 | # Create Primary HDU 274 | # 275 | primary = fits.PrimaryHDU() 276 | 277 | # 278 | # Create the EBOUNDS extension 279 | # 280 | ecol1 = fits.Column(name='CHANNEL', format='J', array=self.ebounds.Channel) 281 | ecol2 = fits.Column(name='E_MIN', format='D', unit=self.ebounds.EnergyUnits, array=self.ebounds.ChannelLowEnergy) 282 | ecol3 = fits.Column(name='E_MAX', format='D', unit=self.ebounds.EnergyUnits, array=self.ebounds.ChannelHighEnergy) 283 | 284 | ebnds = fits.BinTableHDU.from_columns([ecol1, ecol2, ecol3]) 285 | 286 | ehdr = ebnds.header 287 | ehdr.set('EXTNAME', 'EBOUNDS') 288 | ehdr.set('DETCHANS', self.ebounds.NumberChannels) 289 | 290 | # Set the TELESCOP keyword (optional) 291 | if telescop is None: 292 | ehdr.set('TELESCOP', 'None', 'Telescope name') 293 | else: 294 | ehdr.set('TELESCOP', telescop, 'Telescope name') 295 | 296 | # Set the INSTRUME keyword (optional) 297 | if instrume is None: 298 | ehdr.set('INSTRUME', 'None', 'Instrument name') 299 | else: 300 | ehdr.set('INSTRUME', instrume, 'Instrument name') 301 | 302 | # Set the FILTER keyword (optional) 303 | if filterkey is None: 304 | ehdr.set('FILTER', 'None', 'Filter setting') 305 | else: 306 | ehdr.set('FILTER', filterkey, 'Filter setting') 307 | 308 | ehdr.set('DETNAM ', 'None') 309 | ehdr.set('CHANTYPE', 'PI') 310 | ehdr.set('HDUCLASS', 'OGIP') 311 | ehdr.set('HDUCLAS1', 'RESPONSE') 312 | ehdr.set('HDUCLAS2', 'EBOUNDS ') 313 | ehdr.set('HDUVERS1', '1.2.0') 314 | ehdr.set('ORIGIN ', 'SRON') 315 | 316 | hdu = fits.HDUList([primary, ebnds]) 317 | 318 | # 319 | # Create SPECRESP MATRIX extension 320 | # 321 | for e in range(self.NumberMatrixExt): 322 | print("Writing matrix for matrix number: {0}".format(e)) 323 | 324 | mcol1 = fits.Column(name='ENERG_LO', format='D', unit=self.matrix[e].EnergyUnits, array=self.matrix[e].LowEnergy) 325 | mcol2 = fits.Column(name='ENERG_HI', format='D', unit=self.matrix[e].EnergyUnits, array=self.matrix[e].HighEnergy) 326 | mcol3 = fits.Column(name='N_GRP', format='J', array=self.matrix[e].NumberGroups) 327 | mcol4 = fits.Column(name='F_CHAN', format='J', array=self.matrix[e].FirstChannelGroup) 328 | mcol5 = fits.Column(name='N_CHAN', format='J', array=self.matrix[e].NumberChannelsGroup) 329 | 330 | # Determine the width of the matrix 331 | width = np.amax(self.matrix[e].NumberChannelsGroup) 332 | formatstr = str(width)+'D' 333 | 334 | # 335 | # THIS PART COULD BE UPDATED TO OPTIMIZE THE SIZE USING VARIABLE SIZE ARRAYS IN FITS. 336 | # 337 | 338 | # Building the MATRIX column 339 | newmatrix = np.zeros(self.matrix[e].NumberEnergyBins * width, dtype=float).reshape(self.matrix[e].NumberEnergyBins, width) 340 | 341 | re = 0 342 | for i in np.arange(self.matrix[e].NumberEnergyBins): 343 | for j in np.arange(self.matrix[e].NumberGroups[i]): 344 | for k in np.arange(self.matrix[e].NumberChannelsGroup[i]): 345 | newmatrix[i, k] = self.matrix[e].Matrix[re] 346 | re = re + 1 347 | 348 | mcol6 = fits.Column(name='MATRIX', format=formatstr, array=newmatrix) 349 | 350 | matrix = fits.BinTableHDU.from_columns([mcol1, mcol2, mcol3, mcol4, mcol5, mcol6]) 351 | 352 | mhdr = matrix.header 353 | 354 | if self.matrix[e].AreaIncluded: 355 | mhdr.set('EXTNAME', 'SPECRESP MATRIX') 356 | else: 357 | mhdr.set('EXTNAME', 'MATRIX') 358 | 359 | # Set the TELESCOP keyword (optional) 360 | if telescop is None: 361 | mhdr.set('TELESCOP', 'None', 'Telescope name') 362 | else: 363 | mhdr.set('TELESCOP', telescop, 'Telescope name') 364 | 365 | # Set the INSTRUME keyword (optional) 366 | if instrume is None: 367 | mhdr.set('INSTRUME', 'None', 'Instrument name') 368 | else: 369 | mhdr.set('INSTRUME', instrume, 'Instrument name') 370 | 371 | # Set the FILTER keyword (optional) 372 | if filterkey is None: 373 | mhdr.set('FILTER', 'None', 'Filter setting') 374 | else: 375 | mhdr.set('FILTER', filterkey, 'Filter setting') 376 | 377 | mhdr.set('DETCHANS', self.ebounds.NumberChannels) 378 | mhdr.set('LO_THRES', self.matrix[e].ResponseThreshold) 379 | 380 | mhdr.set('CHANTYPE', 'PI') 381 | mhdr.set('HDUCLASS', 'OGIP') 382 | mhdr.set('HDUCLAS1', 'RESPONSE') 383 | mhdr.set('HDUCLAS2', 'RSP_MATRIX') 384 | 385 | if self.matrix[e].AreaIncluded: 386 | mhdr.set('HDUCLAS3', 'FULL') 387 | else: 388 | mhdr.set('HDUCLAS3', 'REDIST') 389 | mhdr.set('HDUVERS1', '1.3.0') 390 | mhdr.set('ORIGIN ', 'SRON') 391 | 392 | matrix.header['HISTORY'] = 'Created by pyspextools:' 393 | matrix.header['HISTORY'] = 'https://github.com/spex-xray/pyspextools' 394 | 395 | hdu.append(matrix) 396 | 397 | try: 398 | hdu.writeto(rmffile, overwrite=overwrite) 399 | except IOError: 400 | message.error("File {0} already exists. I will not overwrite it!".format(rmffile)) 401 | return 1 402 | 403 | return 0 404 | 405 | def check(self): 406 | """Check the RMF for internal consistency.""" 407 | if self.ebounds.NumberChannels <= 0: 408 | message.error("Number of Channels in response is zero.") 409 | return 1 410 | 411 | for e in range(self.NumberMatrixExt): 412 | if self.matrix[e].NumberEnergyBins <= 0: 413 | message.error("Number of Energy bins in response is zero.") 414 | return 1 415 | 416 | c = 0 417 | r = 0 418 | # Check if matrix array is consistent with the indexing 419 | for i in np.arange(self.matrix[e].NumberEnergyBins): 420 | for j in np.arange(self.matrix[e].NumberGroups[i]): 421 | for k in np.arange(self.matrix[e].NumberChannelsGroup[c]): 422 | r = r + 1 423 | c = c + 1 424 | 425 | if r != self.matrix[e].Matrix.size: 426 | message.error("Matrix size does not correspond to index arrays. Response inconsistent.") 427 | return 1 428 | 429 | # Check for energy bins with zero width 430 | wzero = np.where(self.matrix[e].LowEnergy == self.matrix[e].HighEnergy)[0] 431 | if wzero.size != 0: 432 | for i in np.arange(wzero.size): 433 | message.warning("Energy bin with zero width found in row {0}!".format(wzero[i]+1)) 434 | return 2 435 | 436 | return 0 437 | 438 | def disp(self): 439 | """Display a summary of the RMF object.""" 440 | print("RMF Response matrix:") 441 | print("") 442 | print("Channel energy bounds:") 443 | print("FirstChannel: {0:>20} First channel number".format(self.ebounds.FirstChannel)) 444 | print("NumberChannels: {0:>20} Number of data channels".format(self.ebounds.NumberChannels)) 445 | print("Channel {0:>20} Channel numbers".format(self.ebounds.Channel.size)) 446 | print("ChannelLowEnergy {0:>20} Start energy of channel".format(self.ebounds.ChannelLowEnergy.size)) 447 | print("ChannelHighEnergy {0:>20} End energy of channel".format(self.ebounds.ChannelHighEnergy.size)) 448 | print("") 449 | print("NumberMatrixExt {0:>10} Number of MATRIX extensions".format(self.NumberMatrixExt)) 450 | for i in range(self.NumberMatrixExt): 451 | print("") 452 | print("Information for MATRIX number {0}:".format(i+1)) 453 | print("NumberEnergyBins: {0:>20} Number of energy bins".format(self.matrix[i].NumberEnergyBins)) 454 | print("NumberTotalGroups: {0:>20} Total number of groups".format(self.matrix[i].NumberTotalGroups)) 455 | print("NumberTotalElements: {0:>20} Total number of response elements".format(self.matrix[i].NumberTotalElements)) 456 | print("AreaScaling {0:>20} Value of EFFAREA keyword".format(self.matrix[i].AreaScaling)) 457 | print("ResponseThreshold {0:>20g} Minimum value in response".format(self.matrix[i].ResponseThreshold)) 458 | print("EnergyUnits {0:>20} Units of the energy scale".format(self.matrix[i].EnergyUnits)) 459 | print("RMFUnits {0:>20} Units for RMF values".format(self.matrix[i].RMFUnits)) 460 | print("") 461 | print("NumberGroups {0:>20} Number of response groups".format(self.matrix[i].NumberGroups.size)) 462 | print("FirstGroup {0:>20} First response group for this energy bin".format(self.matrix[i].FirstGroup.size)) 463 | print("FirstChannelGroup {0:>20} First channel number in this group".format(self.matrix[i].FirstChannelGroup.size)) 464 | print("NumberChannelsGroup {0:>20} Number of channels in this group".format(self.matrix[i].NumberChannelsGroup.size)) 465 | print("FirstElement {0:>20} First response element for this group".format(self.matrix[i].FirstElement.size)) 466 | print("LowEnergy {0:>20} Start energy of bin".format(self.matrix[i].LowEnergy.size)) 467 | print("HighEnergy {0:>20} End energy of bin".format(self.matrix[i].HighEnergy.size)) 468 | print("Matrix {0:>20} Matrix elements".format(self.matrix[i].Matrix.size)) 469 | print("") 470 | 471 | return 472 | 473 | def check_compatibility(self, arf): 474 | """Check whether the input arf object is really an ARF object with data and consistent with this RMF file. 475 | 476 | :param arf: Input arf object to check. 477 | :type arf: pyspextools.io.Arf 478 | """ 479 | 480 | # Check if arf is really an Arf object 481 | if not isinstance(arf, Arf): 482 | message.error("Input arf is not an Arf class instance.") 483 | return 1 484 | 485 | # Check if the size of the energy arrays is the same. 486 | if arf.LowEnergy.size != self.matrix[0].LowEnergy.size: 487 | message.error("Size of ARF and RMF are not the same.") 488 | return 1 489 | 490 | # Check if the numbers of the high energy column are the same 491 | # Here we check the high values, because sometimes the first low value is different... 492 | if arf.HighEnergy[0] != self.matrix[0].HighEnergy[0]: 493 | message.error("First high-energy boundaries of arrays are not the same.") 494 | return 1 495 | 496 | # Check if the last values of the array are similar. 497 | size = arf.HighEnergy.size - 1 498 | 499 | if arf.HighEnergy[size] != self.matrix[0].HighEnergy[size]: 500 | message.error("Last high-energy boundaries of arrays are not the same.") 501 | return 1 502 | 503 | return 0 504 | 505 | def fix_energy_grid(self): 506 | """In some exceptional cases, like with ROSAT response matrices, the matrix contains 507 | energy bins with zero width, which is not physical. This method changes them to a small 508 | finite width to allow them to be processed with ogip2spex and trafo.""" 509 | 510 | for e in range(self.NumberMatrixExt): 511 | wzero = np.where(self.matrix[e].LowEnergy == self.matrix[e].HighEnergy)[0] 512 | 513 | if wzero.size != 0: 514 | for i in np.arange(wzero.size): 515 | self.matrix[e].HighEnergy[wzero[i]] = self.matrix[e].HighEnergy[wzero[i]] + 1.0E-6 516 | if i < self.matrix[e].NumberEnergyBins: 517 | self.matrix[e].LowEnergy[wzero[i]+1] = self.matrix[e].LowEnergy[wzero[i]+1] + 1.0E-6 518 | -------------------------------------------------------------------------------- /pyspextools/io/tg.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import numpy as np 5 | import pyspextools.messages as message 6 | 7 | from .region import Region 8 | from .res import Res 9 | from .spo import Spo 10 | from .pha2 import Pha2 11 | from .pha import Pha 12 | from .rmf import Rmf 13 | from .arf import Arf 14 | from .convert import rmf_to_res 15 | from .convert import pha_to_spo 16 | 17 | 18 | class TGRegion(Region): 19 | """The TGRegion class contains methods to read Chandra grating data into the pyspextools module and convert 20 | these to spo and res format objects. 21 | 22 | :ivar grating: Grating name. 23 | :vartype grating: str 24 | """ 25 | 26 | def __init__(self): 27 | 28 | Region.__init__(self) 29 | 30 | self.grating = '' # Grating name 31 | 32 | # ----------------------------------------------------- 33 | # Read a set of Chandra grating files into a region 34 | # ----------------------------------------------------- 35 | 36 | def read_region(self, pha2file, rmflist, arflist, grating, bkgsubtract=True): 37 | """Add a Chandra spectrum and response to a SPEX region. The pha2 file and the rmf and arf file lists 38 | are mandatory. The grating option can be either HETG, METG or LETG. 39 | 40 | :param pha2file: PHA2 file name to read. 41 | :type pha2file: str 42 | :param rmflist: List of RMF response files. 43 | :type rmflist: list 44 | :param arflist: List of ARF effective area files. 45 | :type arflist: list 46 | :param grating: Grating name. 47 | :type grating: str 48 | :param bkgsubtract: Subtract the background? 49 | :type bkgsubtract: bool 50 | """ 51 | 52 | self.grating = grating 53 | 54 | # Read the PHA2 file for a particular grating 55 | (src, bkg) = self.__read_pha2(pha2file, grating, bkgsubtract=bkgsubtract) 56 | if not isinstance(src, Pha): 57 | message.error("Failed to read spectrum file.") 58 | return 1 59 | 60 | # Convert the PHA2 file to spo 61 | 62 | rmf = Rmf() 63 | rmf.read(rmflist[0]) 64 | 65 | self.spo = pha_to_spo(src, rmf, back=bkg) 66 | if not isinstance(self.spo, Spo): 67 | message.error("Failed to convert spectrum file.") 68 | return 1 69 | 70 | # Convert the responses to res 71 | self.res = self.__rmflist_to_res(rmflist, arflist) 72 | if not isinstance(self.res, Res): 73 | message.error("Failed to combine and convert response files.") 74 | return 1 75 | 76 | self.label = grating 77 | 78 | return 0 79 | 80 | def __read_pha2(self, pha2file, grating, bkgsubtract=True): 81 | """Method to read a PHA type II file. 82 | 83 | :param pha2file: PHA type II file name to read. 84 | :type pha2file: str 85 | :param grating: Name of the grating to read (HETG, METG or LETG). 86 | :type grating: str 87 | :param bkgsubtract: Subtract the background? 88 | :type bkgsubtract: bool 89 | """ 90 | 91 | # Initialize PHA2 file type 92 | 93 | spec = Pha2() 94 | 95 | # Is the source spectrum there? 96 | message.proc_start("Read source spectrum") 97 | if os.path.isfile(pha2file): 98 | stat = spec.read(pha2file, background=bkgsubtract) 99 | if stat != 0: 100 | message.proc_end(stat) 101 | message.error("Failed to read source spectrum.") 102 | return 1 103 | else: 104 | message.proc_end(stat) 105 | else: 106 | message.proc_end(1) 107 | message.error("Spectrum file {0} not found in path.".format(pha2file)) 108 | return 1 109 | 110 | # Convert grating name to number 111 | if grating == 'HETG': 112 | ngrating = 1 113 | elif grating == 'METG': 114 | ngrating = 2 115 | elif grating == 'LETG': 116 | ngrating = 3 117 | else: 118 | message.error("Unsupported grating: '{0}'.".format(grating)) 119 | return 1 120 | 121 | # Combine spectra from a single grating 122 | message.proc_start("Combining orders of the spectrum") 123 | (src, bkg) = spec.combine_orders(ngrating) 124 | if isinstance(src, Pha) and isinstance(bkg, Pha): 125 | message.proc_end(0) 126 | else: 127 | message.proc_end(1) 128 | return 1 129 | 130 | return src, bkg 131 | 132 | # ----------------------------------------------------- 133 | # Return a res object derived from Chandra grating data 134 | # ----------------------------------------------------- 135 | 136 | def __rmflist_to_res(self, rmflist, arflist): 137 | """Convert a list of compatible rmf and arf file into one res file. This is convenient for combining responses 138 | that are provided separately, like the Transmission Grating spectra from Chandra. 139 | 140 | :param rmflist: List of RMF file names. 141 | :type rmflist: list 142 | :param arflist: List of ARF file names. 143 | :type arflist: list 144 | """ 145 | 146 | if len(rmflist) != len(arflist): 147 | message.error("ARF list and RMF list do not have the same length.") 148 | return 0 149 | 150 | rmfobjs = np.zeros(len(rmflist), dtype=object) 151 | arfobjs = np.zeros(len(arflist), dtype=object) 152 | rmf_orders = np.zeros(len(rmflist), dtype=int) 153 | arf_orders = np.zeros(len(arflist), dtype=int) 154 | 155 | i = 0 156 | for file in rmflist: 157 | message.proc_start("Reading response for order") 158 | rmf = Rmf() 159 | rmf.read(file) 160 | rmf_orders[i] = rmf.matrix[0].Order 161 | print(str(rmf_orders[i])+" ", end='') 162 | if len(np.where(rmf_orders == rmf.matrix[0].Order)) != 1: 163 | message.error("There are two response files with the same order.") 164 | message.proc_end(1) 165 | return 1 166 | else: 167 | rmfobjs[i] = rmf 168 | message.proc_end(0) 169 | i = i + 1 170 | 171 | i = 0 172 | for file in arflist: 173 | message.proc_start("Reading effective area for order") 174 | arf = Arf() 175 | arf.read(file) 176 | arf_orders[i] = arf.Order 177 | print(str(arf_orders[i])+" ", end='') 178 | if len(np.where(arf_orders == arf.Order)) != 1: 179 | message.error("There are two effective area files for the same order.") 180 | message.proc_end(1) 181 | return 1 182 | else: 183 | arfobjs[i] = arf 184 | message.proc_end(0) 185 | i = i + 1 186 | 187 | arfsort = np.argsort(arf_orders) 188 | rmfsort = np.argsort(rmf_orders) 189 | 190 | # Calculate first response: 191 | res = rmf_to_res(rmfobjs[rmfsort[0]], arf=arfobjs[arfsort[0]]) 192 | 193 | # Append the components from the other responses 194 | for i in np.arange(len(rmfsort)-1)+1: 195 | restmp = rmf_to_res(rmfobjs[rmfsort[i]], arf=arfobjs[arfsort[i]]) 196 | res.append_component(restmp) 197 | 198 | return res 199 | -------------------------------------------------------------------------------- /pyspextools/messages.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # ========================================================= 4 | """ 5 | Methods to send messages to the user. 6 | """ 7 | # ========================================================= 8 | 9 | import sys 10 | import pyspextools 11 | from pyspextools.color import Colors 12 | 13 | # Set general messages for argument parsing 14 | docs = 'See full documentation at: https://spex-xray.github.io/pyspextools' 15 | version = '%(prog)s {:s} (C) 2018-2025, Jelle de Plaa, SRON Netherlands Institute for Space Research, ' \ 16 | 'Apache 2.0 License'.format(pyspextools.__version__) 17 | 18 | # Initialize colors 19 | color = Colors() 20 | 21 | 22 | def set_color(setcol): 23 | """Set whether text output can be colored or not. 24 | 25 | :param setcol: Set colored text output? (True/False) 26 | :type setcol: bool 27 | """ 28 | 29 | if setcol: 30 | color.set_color(True) 31 | else: 32 | color.set_color(False) 33 | 34 | 35 | # Create methods to do show processes and their result 36 | def proc_start(text): 37 | """Print text to terminal at the start of the procedure. 38 | 39 | :param text: Text to print in front of result. 40 | :type text: str 41 | """ 42 | 43 | print(text+'... ', end='') 44 | sys.stdout.flush() 45 | 46 | 47 | def proc_end(result): 48 | """Report the result of a procedure to terminal. 49 | 50 | :param result: Returned error code by program. Successful execution returns 0. 51 | :type result: int 52 | """ 53 | 54 | if result == 0: 55 | print(color.OKGREEN+"OK"+color.ENDC) 56 | else: 57 | print(color.FAIL+"FAILED"+color.ENDC) 58 | 59 | 60 | # Print a warning to screen 61 | def warning(text): 62 | """Print a warning text. 63 | 64 | :param text: Warning text to print. 65 | :type text: str 66 | """ 67 | 68 | print(color.WARNING+"WARNING "+color.ENDC+text) 69 | 70 | 71 | def error(text): 72 | """Print an error text. 73 | 74 | :param text: Error text to print. 75 | :type text: str 76 | """ 77 | 78 | print(color.FAIL+"ERROR "+color.ENDC+text) 79 | 80 | 81 | def print_header(scriptname): 82 | """Print the header when executing a script. 83 | 84 | :param scriptname: Name of the script being executed. 85 | :type scriptname: str 86 | """ 87 | print("==================================") 88 | print(" This is {:s} version {:s}".format(scriptname, pyspextools.__version__)) 89 | print("==================================") 90 | print("(C) 2018-2024 Jelle de Plaa") 91 | print("SRON Netherlands Institute for Space Research") 92 | print("Github: https://github.com/spex-xray/pyspextools") 93 | print("") 94 | -------------------------------------------------------------------------------- /pyspextools/model/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from .user import User 4 | -------------------------------------------------------------------------------- /pyspextools/model/user.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """This is a module containing the necessary methods to 4 | develop an executable for use with the SPEX user model. 5 | """ 6 | 7 | # Stuff to import for compatibility between python 2 and 3 8 | from __future__ import print_function 9 | from __future__ import unicode_literals 10 | from __future__ import division 11 | from __future__ import absolute_import 12 | from builtins import int 13 | from builtins import open 14 | from builtins import str 15 | from future import standard_library 16 | 17 | from builtins import object 18 | 19 | import sys 20 | import math 21 | import numpy 22 | 23 | standard_library.install_aliases() 24 | 25 | 26 | class User: 27 | """Class structure to contain the input and output parameters of the user function. 28 | 29 | :ivar npar: Numper of model input parameters from SPEX. 30 | :vartype npar: int 31 | :ivar par: Array containing the parameter values from SPEX (length npar). 32 | :vartype par: numpy.ndarray 33 | :ivar neg: Number of bins in the input energy grid from SPEX. 34 | :vartype neg: int 35 | :ivar egb: Upper boundaries of the energy bins (length neg). 36 | :vartype egb: numpy.ndarray 37 | :ivar eg: Bin centroids of the energy bins (length neg). 38 | :vartype eg: numpy.ndarray 39 | :ivar deg: Bin widths of the energy bins (length deg). 40 | :vartype deg: numpy.ndarray 41 | 42 | :ivar sener: Spectrum or transmission (e.g. in ph/s/bin) 43 | :vartype sener: numpy.ndarray 44 | :ivar wener: If Delta E = average photon energy within the bin (keV) minus the bin centroid then wener = sener * Delta E 45 | :vartype wener: numpy.ndarray 46 | 47 | :ivar fprm: Input file name from command line. 48 | :vartype fprm: str 49 | :ivar fspc: Output file name from command line. 50 | :vartype fspc: str 51 | """ 52 | 53 | def __init__(self): 54 | """Read the input file names from command line. Then read in the input parameters 55 | and energy grid from the SPEX provided input file.""" 56 | 57 | # Input parameters: 58 | self.npar = 0 # Number of input parameters from SPEX 59 | self.par = numpy.array([], dtype=float) # Array containing the parameter values from SPEX (length npar) 60 | self.neg = 0 # Number of bins in the input energy grid from SPEX 61 | self.egb = numpy.array([], dtype=float) # Upper boundaries of the energy bins (length neg) 62 | self.eg = numpy.array([], dtype=float) # Bin centroids of the energy bins (length neg) 63 | self.deg = numpy.array([], dtype=float) # Bin widths of the energy bins (length deg) 64 | 65 | # Output parameters: 66 | self.sener = numpy.array([], dtype=float) # Spectrum or transmission (e.g. in ph/s/bin) 67 | self.wener = numpy.array([], dtype=float) # If Delta E = average photon energy within the bin (keV) minus 68 | # the bin centroid then wener = sener * Delta E 69 | 70 | # Filenames: 71 | self.fprm = '' # Input file name 72 | self.fspc = '' # Output file name 73 | 74 | self.read_prm() 75 | 76 | def read_prm(self): 77 | """Read the parameter file that SPEX creates at the beginning of the model evaluation. This is done 78 | automatically when the User class is initialised.""" 79 | 80 | try: 81 | self.fprm = sys.argv[1] 82 | except IndexError: 83 | print( 84 | "Error: Cannot read input filename from command line.\n Please only use this module in an executable.") 85 | sys.exit(1) 86 | 87 | try: 88 | self.fspc = sys.argv[2] 89 | except IndexError: 90 | print( 91 | "Error: Cannot read output filename from command line.\n Please only use this module in an executable.") 92 | sys.exit(1) 93 | 94 | # Open input file 95 | try: 96 | f = open(self.fprm, 'r') 97 | except IOError: 98 | print("Error: Input file does not exist...") 99 | sys.exit(1) 100 | 101 | # Read the number of parameters 102 | self.npar = int(f.readline()) 103 | 104 | # Allocate an array containing the parameters 105 | self.par = numpy.zeros(self.npar, dtype=float) 106 | 107 | # Read parameters into the array for the parameters 108 | spar = [] 109 | for _ in numpy.arange(math.ceil(self.npar / 5)): 110 | spar.append(str(f.readline()).split()) 111 | 112 | # Flatten list 113 | spar = sum(spar, []) 114 | 115 | for i in numpy.arange(self.npar): 116 | self.par[i] = float(spar[i]) 117 | 118 | # Read the number of model grid bins 119 | self.neg = int(f.readline()) 120 | 121 | # Allocate an array for the energy bin boundary egb, bin center eg, and delta e, deg 122 | self.egb = numpy.zeros(self.neg) 123 | self.eg = numpy.zeros(self.neg) 124 | self.deg = numpy.zeros(self.neg) 125 | 126 | # Read the energy grid from the input file 127 | for i in numpy.arange(self.neg): 128 | row = str(f.readline()).split() 129 | self.egb[i] = float(row[0]) 130 | self.eg[i] = float(row[1]) 131 | self.deg[i] = float(row[2]) 132 | 133 | # Close the file 134 | f.close() 135 | 136 | # Set size of sener and wener arrays 137 | self.sener = numpy.zeros(self.neg, dtype=float) 138 | self.wener = numpy.zeros(self.neg, dtype=float) 139 | 140 | def write_spc(self): 141 | """Write the calculated spectrum to the output file for SPEX. 142 | Make sure that the sener and wener arrays are initialized and 143 | filled with a spectrum before calling this function.""" 144 | 145 | # Do some checks on the sener and wener arrays that should be set. 146 | if len(self.sener) == 0: 147 | print("Error: sener array not initialized yet.") 148 | sys.exit(1) 149 | 150 | if len(self.sener) != self.neg: 151 | print("Error: sener array has an incorrect size.") 152 | sys.exit(1) 153 | 154 | if len(self.wener) == 0: 155 | print("Error: wener array not initialized yet.") 156 | sys.exit(1) 157 | 158 | if len(self.wener) != self.neg: 159 | print("Error: wener array has an incorrect size.") 160 | sys.exit(1) 161 | 162 | # Open the output file 163 | try: 164 | f = open(self.fspc, 'w') 165 | except IOError: 166 | print("Error: unable to open output file.") 167 | sys.exit(1) 168 | 169 | # Write the number of model bins to the output file 170 | f.write(str(self.neg) + '\n') 171 | 172 | # Write the sener and wener columns to the output file 173 | for i in numpy.arange(self.neg): 174 | f.write(str(self.sener[i]) + ' ' + str(self.wener[i]) + '\n') 175 | 176 | # Close the file 177 | f.close() 178 | return 179 | -------------------------------------------------------------------------------- /pyspextools/scripts/ogip2spex.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import argparse 6 | import pyspextools 7 | from pyspextools.io.ogip import OGIPRegion 8 | from pyspextools.data.badchannels import clean_region 9 | import pyspextools.messages as message 10 | 11 | 12 | def main(): 13 | """The OGIP2spex script offers a quick way to convert OGIP type spectra to SPEX format. 14 | It reads OGIP PHA type I spectra and responses. After the conversion to SPEX format, the 15 | files are filtered for bad channels (optional).""" 16 | 17 | # Obtain command line arguments 18 | parser = ogip2spex_arguments() 19 | args = parser.parse_args() 20 | 21 | # Print message header 22 | message.print_header(os.path.basename(__file__)) 23 | 24 | # Set color in the terminal 25 | message.set_color(args.color) 26 | 27 | # Load OGIP spectra and response files 28 | ogip = OGIPRegion() 29 | 30 | print("Input PHA file: {0}".format(args.phafile)) 31 | print("Input Background file: {0}".format(args.bkgfile)) 32 | print("Input Response file: {0}".format(args.rmffile)) 33 | print("Input Effective area file: {0}".format(args.arffile)) 34 | 35 | ogip.read_region(args.phafile, args.rmffile, bkgfile=args.bkgfile, arffile=args.arffile, grouping=args.group, 36 | force_poisson=args.force_poisson) 37 | 38 | # Filter for bad channels (if not blocked by command line argument) 39 | if args.badchan: 40 | ogip = clean_region(ogip) 41 | if not isinstance(ogip, OGIPRegion): 42 | sys.exit(1) 43 | 44 | # Add the ogip2spex command to the file history 45 | history = [] 46 | history.append("OGIP2SPEX version: {0}".format(pyspextools.__version__)) 47 | command = '' 48 | for arg in sys.argv: 49 | command = command + ' ' + arg 50 | history.append("Command used: {0}".format(command)) 51 | history.append("Variables derived from commandline:") 52 | for arg in vars(args): 53 | line = "{0} : {1}".format(arg, getattr(args, arg)) 54 | history.append(line) 55 | 56 | # Check output file names 57 | spofile = ogip.spo.check_filename(args.spofile) 58 | resfile = ogip.res.check_filename(args.resfile) 59 | 60 | # Write output spo and res file 61 | print("Writing SPO to file: {0}".format(spofile)) 62 | ogip.spo.write_file(spofile, exp_rate=args.exprate, overwrite=args.overwrite, history=history) 63 | 64 | print("Writing RES to file: {0}".format(resfile)) 65 | ogip.res.write_file(resfile, overwrite=args.overwrite, history=history) 66 | 67 | 68 | # Get command line arguments 69 | def ogip2spex_arguments(): 70 | """Obtain command line arguments.""" 71 | parser = argparse.ArgumentParser(description=message.docs) 72 | parser.add_argument('--phafile', help='Input PHA source spectrum (required)', type=str, required=True) 73 | parser.add_argument('--bkgfile', help='Input Background spectrum', type=str) 74 | parser.add_argument('--rmffile', help='Input Response matrix (required)', type=str, required=True) 75 | parser.add_argument('--arffile', help='Input Effective area file', type=str) 76 | parser.add_argument('--spofile', help='Output SPEX spectrum file (.spo, required)', type=str, required=True) 77 | parser.add_argument('--resfile', help='Output SPEX response file (.res, required)', type=str, required=True) 78 | parser.add_argument('--keep-badchannels', help='Do not remove bad channels.', dest="badchan", action="store_false", 79 | default=True) 80 | parser.add_argument('--keep-grouping', help='Retain the grouping information from the PHA file.', dest="group", 81 | action="store_true", default=False) 82 | parser.add_argument('--overwrite', help="Overwrite existing spo and res files with same name.", action="store_true", 83 | default=False) 84 | parser.add_argument('--no-exprate', help="Do not write additional Exp_Rate column (SPEX <=3.04.00).", 85 | dest="exprate", action="store_false", default=True) 86 | parser.add_argument('--force-poisson', help="Force the use of Poisson statistics for the input spectra.", 87 | dest="force_poisson", action="store_true", default=False) 88 | parser.add_argument('--no-color', help="Suppress color output.", dest="color", action="store_false", default=True) 89 | parser.add_argument('--version', action='version', version=message.version) 90 | 91 | return parser 92 | 93 | 94 | if __name__ == "__main__": 95 | main() 96 | -------------------------------------------------------------------------------- /pyspextools/scripts/ogipgenrsp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import math 6 | import numpy as np 7 | import argparse 8 | from pyspextools.io.arf import Arf 9 | from pyspextools.io.rmf import Rmf, RmfMatrix 10 | from pyspextools.data.response import gaussrsp 11 | 12 | import pyspextools.messages as message 13 | 14 | 15 | def main(): 16 | """This program can generate response files for new detectors with an arbitrary (Gaussian) redistribution 17 | function. The size of the response file and the respective energy ranges can be varied within the limits 18 | of the supplied effective area (ARF) file. The output format for this tool is OGIP.""" 19 | 20 | # Obtain command line arguments 21 | parser = genrsp_arguments() 22 | args = parser.parse_args() 23 | 24 | # Print header 25 | message.print_header(os.path.basename(__file__)) 26 | 27 | # Set color in the terminal 28 | message.set_color(args.color) 29 | 30 | # Read the input ARF file 31 | message.proc_start('Reading ARF file') 32 | arf_in = Arf() 33 | stat = arf_in.read(args.arffile) 34 | message.proc_end(stat) 35 | 36 | # Create input array with bin centers 37 | x = (arf_in.LowEnergy + arf_in.HighEnergy) / 2.0 38 | 39 | # Define the new grid 40 | low = args.range[0] # Lowest energy 41 | high = args.range[1] # Highest energy 42 | step = 1E-3 * args.resolution / args.sampling # Energy stepsize (in keV) 43 | 44 | # Create a new RSP output object 45 | rsp_out = Rmf() 46 | matx = RmfMatrix() 47 | rsp_out.matrix.append(matx) 48 | rsp_out.NumberMatrixExt += 1 49 | rsp_out.MatrixExt = np.array([1]) 50 | 51 | rsp_out.matrix[0].NumberEnergyBins = int((high - low) / step) 52 | rsp_out.ebounds.EnergyUnits = arf_in.EnergyUnits 53 | 54 | if args.noarea: 55 | rsp_out.matrix[0].AreaIncluded = False 56 | rsp_out.matrix[0].RMFUnits = '' 57 | else: 58 | rsp_out.matrix[0].AreaIncluded = True 59 | rsp_out.matrix[0].RMFUnits = arf_in.ARFUnits 60 | 61 | # Calculate new arrays 62 | rsp_out.matrix[0].LowEnergy = low + step * np.arange(rsp_out.matrix[0].NumberEnergyBins,dtype=float) 63 | rsp_out.matrix[0].HighEnergy = low + step * (np.arange(rsp_out.matrix[0].NumberEnergyBins,dtype=float)+1.0) 64 | EffArea = np.zeros(rsp_out.matrix[0].NumberEnergyBins, dtype=float) 65 | 66 | # Linear interpolation of Effective area 67 | for i in np.arange(rsp_out.matrix[0].NumberEnergyBins): 68 | e = (rsp_out.matrix[0].LowEnergy[i] + rsp_out.matrix[0].HighEnergy[i]) / 2.0 69 | EffArea[i] = np.interp(e, x, arf_in.EffArea) 70 | 71 | # Assume same binning for energy channels (Not optimal, but ok...) 72 | rsp_out.ebounds.NumberChannels = rsp_out.matrix[0].NumberEnergyBins 73 | 74 | rsp_out.ebounds.Channel = np.arange(rsp_out.ebounds.NumberChannels) + 1 75 | rsp_out.ebounds.FirstChannel = rsp_out.ebounds.Channel[0] 76 | rsp_out.ebounds.ChannelLowEnergy = rsp_out.matrix[0].LowEnergy 77 | rsp_out.ebounds.ChannelHighEnergy = rsp_out.matrix[0].HighEnergy 78 | 79 | # We choose only one response group per energy (simple Gaussian response) 80 | rsp_out.matrix[0].NumberGroups = np.ones(rsp_out.ebounds.NumberChannels) 81 | 82 | # Set response thresholds 83 | rsp_out.matrix[0].ResponseThreshold = 1.E-7 84 | 85 | # Determine maximum width in channels for group 86 | # We assume that 10 times the FWHM at 1 keV is enough 87 | 88 | nbin_group = math.ceil(10 * args.resolution * 1E-3 / step) 89 | 90 | try: 91 | rsp_out.matrix[0].Matrix = np.zeros(nbin_group * rsp_out.matrix[0].NumberEnergyBins, dtype=float) 92 | except MemoryError: 93 | message.error('Not enough memory to create matrix...') 94 | 95 | rsp_out.matrix[0].NumberChannelsGroup = np.zeros(rsp_out.matrix[0].NumberEnergyBins, dtype=int) 96 | rsp_out.matrix[0].FirstChannelGroup = np.zeros(rsp_out.matrix[0].NumberEnergyBins, dtype=int) 97 | 98 | # Generate response matrix 99 | print("Number of energy bins: {0}".format(rsp_out.matrix[0].NumberEnergyBins)) 100 | print("Number of channels per group: {0}".format(nbin_group)) 101 | 102 | message.proc_start('Calculate response matrix') 103 | 104 | r = 0 105 | for i in np.arange(rsp_out.matrix[0].NumberEnergyBins): 106 | # The number of channels per group is nbin_group 107 | rsp_out.matrix[0].NumberChannelsGroup[i] = nbin_group 108 | 109 | # Calculate the start channel of the group 110 | rsp_out.matrix[0].FirstChannelGroup[i] = int(1 + i - math.ceil(nbin_group / 2)) 111 | if rsp_out.matrix[0].FirstChannelGroup[i] < 1: 112 | rsp_out.matrix[0].FirstChannelGroup[i] = 1 113 | if rsp_out.matrix[0].FirstChannelGroup[i] > rsp_out.ebounds.NumberChannels - nbin_group: 114 | rsp_out.matrix[0].FirstChannelGroup[i] = int(rsp_out.ebounds.NumberChannels - nbin_group) 115 | 116 | # Fill the Matrix 117 | mu = (rsp_out.ebounds.ChannelLowEnergy[i] + rsp_out.ebounds.ChannelHighEnergy[i]) / 2.0 118 | sum = 0 119 | for j in np.arange(nbin_group): 120 | k = rsp_out.matrix[0].FirstChannelGroup[i] + j 121 | e = (rsp_out.ebounds.ChannelLowEnergy[k - 1] + rsp_out.ebounds.ChannelHighEnergy[k - 1]) / 2.0 122 | resp = gaussrsp(e, mu, args.resolution, args.resgradient) 123 | if resp < 0.: 124 | message.error('Negative response value detected. Quitting program...') 125 | sys.exit(0) 126 | rsp_out.matrix[0].Matrix[r] = resp * step 127 | if (rsp_out.matrix[0].Matrix[r] < rsp_out.matrix[0].ResponseThreshold): 128 | rsp_out.matrix[0].Matrix[r] = 0. 129 | if not args.noarea: 130 | rsp_out.matrix[0].Matrix[r] = rsp_out.matrix[0].Matrix[r] * EffArea[i] 131 | sum = sum + rsp_out.matrix[0].Matrix[r] 132 | r = r + 1 133 | 134 | rsp_out.matrix[0].NumberTotalGroups = rsp_out.matrix[0].NumberEnergyBins 135 | rsp_out.matrix[0].NumberTotalElements = r 136 | message.proc_end(0) 137 | 138 | if args.noarea: 139 | if os.path.splitext(args.rspfile)[1] == '.rsp': 140 | message.warning("You are creating a .rsp file without area. Rename output file extension to .rmf.") 141 | 142 | # Check the created matrix 143 | message.proc_start('Check the created RSP/RMF matrix') 144 | stat = rsp_out.check() 145 | message.proc_end(stat) 146 | 147 | # Write the new matrix to file 148 | message.proc_start('Write RSP/RMF to file') 149 | stat = rsp_out.write(args.rspfile, overwrite=args.overwrite) 150 | message.proc_end(stat) 151 | 152 | 153 | def parserange(string): 154 | """Split the input range string into two float values.""" 155 | values = string.split(':') 156 | try: 157 | range = np.array([float(values[0]), float(values[1])]) 158 | except ValueError: 159 | message.error('Invalid range format. Please input a range separated by a colon. For example: --range 0.1:10') 160 | sys.exit() 161 | return range 162 | 163 | 164 | def genrsp_arguments(): 165 | """Obtain command line arguments.""" 166 | parser = argparse.ArgumentParser(description=message.docs) 167 | parser.add_argument('--arffile', help='Input Effective area file (.arf, required).', type=str, required=True) 168 | parser.add_argument('--resolution', help='Spectral resolution at 1 keV (FWHM, eV, required).', type=float, 169 | required=True) 170 | parser.add_argument('--resgradient', help='Energy dependence of spectral resolution (in eV per keV, see documentation).', 171 | type=float, default=0.0) 172 | parser.add_argument('--range', help='Energy range of the response files (in keV), for example --range 0.1:10.', 173 | type=parserange, default='0.1:10') 174 | parser.add_argument('--sampling', help='The number of channels per resolution element (FWHM) at 1 keV.', 175 | type=int, default=5) 176 | parser.add_argument('--rspfile', help='Output filename for the OGIP RSP file (.rsp required).', type=str, 177 | required=True) 178 | parser.add_argument('--no-area', help="Do NOT include effective area in response file (create .rmf).", dest="noarea", 179 | action="store_true", default=False) 180 | parser.add_argument('--overwrite', help="Overwrite existing rsp files with same name.", action="store_true", 181 | default=False) 182 | parser.add_argument('--no-color', help="Suppress color output.", dest="color", action="store_false", default=True) 183 | parser.add_argument('--version', action='version', version=message.version) 184 | 185 | return parser 186 | 187 | 188 | if __name__ == "__main__": 189 | main() 190 | -------------------------------------------------------------------------------- /pyspextools/scripts/simres.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import argparse 6 | import pyspextools 7 | from pyspextools.io.ogip import OGIPRegion 8 | from pyspextools.data.badchannels import clean_region 9 | 10 | import pyspextools.messages as message 11 | 12 | 13 | def main(): 14 | """The simres program generates a spo and res file from an input arf and rmf file, and optionally a background 15 | file. This is particularly useful for simulating spectra for future missions, where the source spectrum file is 16 | not supplied.""" 17 | 18 | # Obtain command line arguments 19 | parser = simres_arguments() 20 | args = parser.parse_args() 21 | 22 | # Print message header 23 | message.print_header(os.path.basename(__file__)) 24 | 25 | # Set color in the terminal 26 | message.set_color(args.color) 27 | 28 | # Load OGIP response files and background spectrum if provided 29 | ogipreg = OGIPRegion() 30 | 31 | # Read the background PHA file if specified: 32 | if args.bkgfile is not None: 33 | ogipreg.input_bkg = True 34 | ogipreg.read_background_pha(args.bkgfile) 35 | ogipreg.back.BackScaling = ogipreg.back.BackScaling / args.backscale 36 | else: 37 | ogipreg.input_bkg = False 38 | ogipreg.back = None 39 | 40 | # Corr spectrum is not used here 41 | ogipreg.corr = None 42 | 43 | # Read the response matrix 44 | ogipreg.read_rmf(args.rmffile) 45 | 46 | # Read the effective area 47 | if args.arffile is not None: 48 | ogipreg.input_area = True 49 | ogipreg.read_arf(args.arffile) 50 | else: 51 | ogipreg.input_area = False 52 | ogipreg.area = None 53 | 54 | # Generate dummy spectrum based on rmf channels 55 | ogipreg.spec.create_dummy(ogipreg.resp) 56 | 57 | # Convert spectra and responses to SPEX format 58 | stat = ogipreg.add_region(ogipreg.spec, ogipreg.resp, back=ogipreg.back, corr=ogipreg.corr, area=ogipreg.area) 59 | if stat != 0: 60 | sys.exit() 61 | 62 | # Filter for bad channels (if not blocked by command line argument) 63 | if args.badchan: 64 | ogipreg = clean_region(ogipreg) 65 | 66 | # Add the simres command to the file history 67 | history = [] 68 | history.append("SIMRES version: {0}".format(pyspextools.__version__)) 69 | command = '' 70 | for arg in sys.argv: 71 | command = command + ' ' + arg 72 | history.append("Command used: {0}".format(command)) 73 | history.append("Variables derived from commandline:") 74 | for arg in vars(args): 75 | line = "{0} : {1}".format(arg, getattr(args, arg)) 76 | history.append(line) 77 | 78 | # Check output file names 79 | spofile = ogipreg.spo.check_filename(args.spofile) 80 | resfile = ogipreg.res.check_filename(args.resfile) 81 | 82 | # Write output spo and res file 83 | print("Writing SPO to file: {0}".format(spofile)) 84 | ogipreg.spo.write_file(spofile, exp_rate=args.exprate, overwrite=args.overwrite, history=history) 85 | 86 | print("Writing RES to file: {0}".format(resfile)) 87 | ogipreg.res.write_file(resfile, overwrite=args.overwrite, history=history) 88 | 89 | 90 | # Get command line arguments 91 | def simres_arguments(): 92 | """Obtain command line arguments.""" 93 | parser = argparse.ArgumentParser(description=message.docs) 94 | parser.add_argument('--rmffile', help='Input Response matrix (required)', type=str, required=True) 95 | parser.add_argument('--bkgfile', help='Input Background spectrum', type=str) 96 | parser.add_argument('--arffile', help='Input Effective area file', type=str) 97 | parser.add_argument('--spofile', help='Output SPEX spectrum file (.spo, required)', type=str, required=True) 98 | parser.add_argument('--resfile', help='Output SPEX response file (.res, required)', type=str, required=True) 99 | parser.add_argument('--keep-badchannels', help='Do not remove bad channels.', dest="badchan", action="store_false", 100 | default=True) 101 | parser.add_argument('--overwrite', help="Overwrite existing spo and res files with same name.", action="store_true", 102 | default=False) 103 | parser.add_argument('--no-exprate', help="Do not write additional Exp_Rate column (SPEX <=3.04.00).", 104 | dest='exprate', action="store_false", default=True) 105 | parser.add_argument('--backscale', help="Set a background scaling factor.", type=float, default=1.0) 106 | parser.add_argument('--no-color', help="Suppress color output.", dest="color", action="store_false", default=True) 107 | parser.add_argument('--version', action='version', version=message.version) 108 | 109 | return parser 110 | 111 | 112 | if __name__ == "__main__": 113 | main() 114 | -------------------------------------------------------------------------------- /pyspextools/scripts/tg2spex.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import glob 6 | import argparse 7 | import astropy.io.fits as fits 8 | import pyspextools 9 | import pyspextools.io 10 | from pyspextools.io.tg import TGRegion 11 | from pyspextools.io.dataset import Dataset 12 | from pyspextools.data.badchannels import clean_region 13 | 14 | import pyspextools.messages as message 15 | 16 | 17 | def main(): 18 | """Program to convert Chandra grating spectra to SPEX format.""" 19 | 20 | # Obtain command line arguments 21 | parser = tg2spex_arguments() 22 | args = parser.parse_args() 23 | 24 | # Print message header 25 | message.print_header(os.path.basename(__file__)) 26 | 27 | # Set color in the terminal 28 | message.set_color(args.color) 29 | 30 | # Set the path from the command line 31 | path = args.path 32 | 33 | # Add the tg2spex command to the file history 34 | history = [] 35 | history.append("TG2SPEX version: {0}".format(pyspextools.__version__)) 36 | command = '' 37 | for arg in sys.argv: 38 | command = command + ' ' + arg 39 | history.append("Command used: {0}".format(command)) 40 | history.append("Variables derived from commandline:") 41 | for arg in vars(args): 42 | line = "{0} : {1}".format(arg, getattr(args, arg)) 43 | history.append(line) 44 | 45 | # Find out the origin of the observation (CIAO or TGCAT) and set filename mode. 46 | print("Origin of the observation is: ", end='') 47 | input_prefix = args.input_prefix 48 | 49 | # Check if default TGCAT filenames are there 50 | if os.path.isfile(path+"/pha2.gz"): 51 | tgcat = True 52 | phafile = path+"/pha2.gz" 53 | print("TGCAT") 54 | elif os.path.isfile(path+"/pha2"): 55 | tgcat = True 56 | phafile = path+"/pha2" 57 | print("TGCAT") 58 | # If not, start detecting the CIAO type file names ('prefix'_pha2.fits) 59 | else: 60 | tgcat = False 61 | print("CIAO") 62 | # Check if the input prefix is specified. 63 | if input_prefix != '': 64 | # If input prefix is specified 65 | if os.path.isfile(path+input_prefix+'pha2.fits'): 66 | # Is the suggested file there? If yes, save the name and path, and cheer. 67 | phafile = path+input_prefix+'pha2.fits' 68 | print("Found PHA2 file at: {0}".format(phafile)) 69 | else: 70 | # If the file is not there, suggest to change the flag. 71 | message.error("Could not find file with name: {0}".format(path+input_prefix+'pha2.fits')) 72 | print("Please change the '--input-prefix flag to a correct name.") 73 | sys.exit() 74 | else: 75 | # If there is no prefix specified, do a detection 76 | filelist = glob.glob(path+"/*pha2.fits") 77 | if len(filelist) > 1: 78 | # If there are more pha2 files, ask for a more detailed specification. 79 | message.error("More than one pha2 files found. Please provide the input prefix through the " 80 | "'--input-prefix' flag.") 81 | sys.exit() 82 | elif len(filelist) == 1: 83 | # If there is one pha2 file, this is probably the right one: 84 | phafile = filelist[0] 85 | print("Found PHA2 file at: {0}".format(phafile)) 86 | # Deduce input_prefix from 87 | input_prefix = phafile.replace(path+"/", '') 88 | input_prefix = input_prefix.replace('pha2.fits', '') 89 | print("Autodetected input file prefix to be: {0}".format(input_prefix)) 90 | else: 91 | # If there is no pha2 file, we are obviously looking in the wrong place. 92 | message.error("No PHA2 file found at this path. Check your input.") 93 | sys.exit() 94 | 95 | # Read the data and header of the PHA2 file to get the origin right 96 | (phadata, phaheader) = fits.getdata(phafile, 'SPECTRUM', header=True) 97 | 98 | # Detected instrument and gratings 99 | print("Autodetected mission and instruments:") 100 | print("Mission: {0}".format(phaheader['TELESCOP'])) 101 | print("Grating: {0}".format(phaheader['GRATING'])) 102 | print("Detector: {0}".format(phaheader['INSTRUME'])) 103 | 104 | # Do the conversion from PHA2 to SPEX format for each grating 105 | dataset = Dataset() 106 | 107 | if phaheader['GRATING'] == 'HETG': 108 | hetg = TGRegion() 109 | metg = TGRegion() 110 | 111 | # Obtain rmf and arf file list for hetg and metg 112 | print("Start HETG file conversion:") 113 | (heg_rmf_list, heg_arf_list) = search_response_files(path, tgcat, input_prefix, 'heg') 114 | if len(heg_rmf_list) == 0: 115 | message.error("HETG response list generation failed.") 116 | sys.exit() 117 | stat = hetg.read_region(phafile, heg_rmf_list, heg_arf_list, 'HETG', bkgsubtract=args.bkgsubtract) 118 | if stat != 0: 119 | message.error("HETG conversion failed") 120 | sys.exit() 121 | 122 | # Clean bad channels 123 | if args.badchan: 124 | print("Clean bad channels for HETG:") 125 | hetg = clean_region(hetg) 126 | 127 | dataset.append_region(hetg, 1, 1) 128 | 129 | print("Start METG file conversion:") 130 | (meg_rmf_list, meg_arf_list) = search_response_files(path, tgcat, input_prefix, 'meg') 131 | if len(meg_rmf_list) == 0: 132 | message.error("METG response list generation failed.") 133 | sys.exit() 134 | stat = metg.read_region(phafile, meg_rmf_list, meg_arf_list, 'METG', bkgsubtract=args.bkgsubtract) 135 | if stat != 0: 136 | message.error("METG conversion failed") 137 | sys.exit() 138 | 139 | # Clean bad channels 140 | 141 | if args.badchan: 142 | print("Clean bad channels for METG:") 143 | metg = clean_region(metg) 144 | 145 | dataset.append_region(metg, 1, 2) 146 | 147 | elif phaheader['GRATING'] == 'LETG': 148 | 149 | letg = TGRegion() 150 | 151 | # Obtain rmf and arf file list for letg 152 | print("Start LETG file conversion:") 153 | (leg_rmf_list, leg_arf_list) = search_response_files(path, tgcat, input_prefix, 'leg') 154 | if len(leg_rmf_list) == 0: 155 | message.error("LETG response list generation failed.") 156 | sys.exit() 157 | stat = letg.read_region(phafile, leg_rmf_list, leg_arf_list, 'LETG', bkgsubtract=args.bkgsubtract) 158 | if stat != 0: 159 | message.error("LETG conversion failed") 160 | sys.exit() 161 | 162 | # Clean bad channels 163 | if args.badchan: 164 | print("Clean bad channels for LETG:") 165 | letg = clean_region(letg) 166 | 167 | dataset.append_region(letg, 1, 1) 168 | 169 | else: 170 | message.error("Grating name not recognized.") 171 | sys.exit() 172 | 173 | # Set the file names for the res and spo file. 174 | if args.output_prefix == '': 175 | args.output_prefix = phaheader['GRATING'] 176 | spofile = args.output_prefix+'.spo' 177 | resfile = args.output_prefix+'.res' 178 | 179 | # Write regions to file 180 | message.proc_start("Write spectra and response to SPEX format") 181 | stat = dataset.write_all_regions(spofile, resfile, exp_rate=args.exprate, overwrite=args.overwrite, history=history) 182 | message.proc_end(stat) 183 | 184 | 185 | # Search RMF and ARF files for certain grating 186 | def search_response_files(path, tgcat, prefix, gname): 187 | """Search the accompanying response file for a certain grating. gname can be 'heg', 'meg' or 'leg'.""" 188 | 189 | if tgcat: 190 | rmflist = glob.glob(path+'/'+gname+'*.rmf.gz') 191 | arflist = glob.glob(path+'/'+gname+'*.arf.gz') 192 | else: 193 | rmflist = glob.glob(path+'/'+prefix+gname+'*.rmf') 194 | arflist = glob.glob(path+'/'+prefix+gname+'*.arf') 195 | if len(rmflist) <= 0 or len(arflist) <= 0: 196 | rmflist = glob.glob(path+'/tg/'+prefix+gname+'*.rmf') 197 | arflist = glob.glob(path+'/tg/'+prefix+gname+'*.arf') 198 | 199 | if len(rmflist) <= 0 or len(arflist) <= 0: 200 | message.error("Could not find suitable rmf or arf files in path.") 201 | print("We suggest to run the 'mktgresp' task from CIAO to obtain all the responses.") 202 | 203 | rmflist.sort() 204 | arflist.sort() 205 | 206 | return rmflist, arflist 207 | 208 | 209 | # Get command line arguments 210 | def tg2spex_arguments(): 211 | """Obtain command line arguments.""" 212 | parser = argparse.ArgumentParser(description=message.docs) 213 | parser.add_argument('path', help="Path to the observation directory where the pha2, arfs and rmfs are.") 214 | parser.add_argument('--input-prefix', help="Input filename prefix (example: 'hrcf04149_repro_').", 215 | dest="input_prefix", default='') 216 | parser.add_argument('--output-prefix', help="Output filename prefix (names the output to 'prefix'leg.spo/res", 217 | dest="output_prefix", default='') 218 | parser.add_argument('--no-bkgsubtract', help="Substract the background spectrum.", dest="bkgsubtract", 219 | action="store_false", default=True) 220 | parser.add_argument('--keep-badchannels', help='Do not remove bad channels.', dest="badchan", action="store_false", 221 | default=True) 222 | parser.add_argument('--overwrite', help="Overwrite existing spo and res files with same name.", action="store_true", 223 | default=True) 224 | parser.add_argument('--no-exprate', help="Do not write additional Exp_Rate column (SPEX <=3.04.00).", 225 | dest="exprate", action="store_false", default=True) 226 | parser.add_argument('--no-color', help="Suppress color output.", dest="color", action="store_false", default=True) 227 | parser.add_argument('--version', action='version', version=message.version) 228 | 229 | return parser 230 | 231 | 232 | if __name__ == "__main__": 233 | main() 234 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy >= 1.22.0 2 | astropy 3 | matplotlib 4 | sphinx 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import setuptools 4 | 5 | setuptools.setup() 6 | --------------------------------------------------------------------------------