├── .codecov.yaml ├── .codespellrc ├── .coveragerc ├── .cruft.json ├── .flake8 ├── .github └── workflows │ ├── ci.yml │ ├── label_sync.yml │ └── sub_package_update.yml ├── .gitignore ├── .isort.cfg ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── .rtd-environment.yaml ├── .ruff.toml ├── CHANGELOG.rst ├── LICENSE.rst ├── MANIFEST.in ├── README.rst ├── changelog └── README.rst ├── docs ├── Makefile ├── api.rst ├── conf.py ├── data_types │ ├── index.rst │ ├── raster.rst │ └── spectrogram.rst ├── index.rst ├── installation.rst ├── make.bat ├── nitpick-exceptions └── whatsnew │ ├── changelog.rst │ └── index.rst ├── licenses ├── LICENSE.rst ├── README.rst └── TEMPLATE_LICENSE.rst ├── pyproject.toml ├── pytest.ini ├── setup.py ├── sunraster ├── __init__.py ├── _dev │ ├── __init__.py │ └── scm_version.py ├── data │ └── README.rst ├── instr │ ├── __init__.py │ ├── spice.py │ └── tests │ │ ├── __init__.py │ │ └── test_spice.py ├── meta.py ├── spectrogram.py ├── spectrogram_sequence.py ├── tests │ ├── __init__.py │ ├── data │ │ ├── solo_L2_spice-n-ras-db_20200602T081733_V01_12583760-000.fits │ │ └── solo_L2_spice-n-sit_20200620T235901_V01_16777431-000.fits │ ├── test_spectrogram.py │ └── test_spectrogramsequence.py └── version.py └── tox.ini /.codecov.yaml: -------------------------------------------------------------------------------- 1 | comment: off 2 | coverage: 3 | status: 4 | project: 5 | default: 6 | threshold: 0.2% 7 | 8 | codecov: 9 | require_ci_to_pass: false 10 | notify: 11 | wait_for_ci: true 12 | -------------------------------------------------------------------------------- /.codespellrc: -------------------------------------------------------------------------------- 1 | [codespell] 2 | skip = *.asdf,*.fits,*.fts,*.header,*.json,*.xsh,*cache*,*egg*,*extern*,.git,.idea,.tox,_build,*truncated,*.svg,.asv_env,.history 3 | ignore-words-list = 4 | alog, 5 | nd, 6 | nin, 7 | observ, 8 | ot, 9 | te, 10 | upto, 11 | afile, 12 | precessed, 13 | precess 14 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | sunraster/conftest.py 4 | sunraster/*setup_package* 5 | sunraster/extern/* 6 | sunraster/version* 7 | */sunraster/conftest.py 8 | */sunraster/*setup_package* 9 | */sunraster/extern/* 10 | */sunraster/version* 11 | 12 | [report] 13 | exclude_lines = 14 | # Have to re-enable the standard pragma 15 | pragma: no cover 16 | # Don't complain about packages we have installed 17 | except ImportError 18 | # Don't complain if tests don't hit assertions 19 | raise AssertionError 20 | raise NotImplementedError 21 | # Don't complain about script hooks 22 | def main(.*): 23 | # Ignore branches that don't pertain to this version of Python 24 | pragma: py{ignore_python_version} 25 | # Don't complain about IPython completion helper 26 | def _ipython_key_completions_ 27 | # typing.TYPE_CHECKING is False at runtime 28 | if TYPE_CHECKING: 29 | # Ignore typing overloads 30 | @overload 31 | -------------------------------------------------------------------------------- /.cruft.json: -------------------------------------------------------------------------------- 1 | { 2 | "template": "https://github.com/sunpy/package-template", 3 | "commit": "bea8060eef60df35f7df61d9db1d56b838bda01a", 4 | "checkout": null, 5 | "context": { 6 | "cookiecutter": { 7 | "package_name": "sunraster", 8 | "module_name": "sunraster", 9 | "short_description": "sunraster is an open-source Python library that provides the tools to read in and analyze spectrogram data.", 10 | "author_name": "The SunPy Community", 11 | "author_email": "sunpy@googlegroups.com", 12 | "project_url": "https://sunpy.org", 13 | "github_repo": "sunpy/sunraster", 14 | "sourcecode_url": "https://github.com/sunpy/sunraster", 15 | "download_url": "https://pypi.org/project/sunraster", 16 | "documentation_url": "https://docs.sunpy.org/projects/sunraster", 17 | "changelog_url": "https://docs.sunpy.org/projects/sunraster/en/stable/whatsnew/changelog.html", 18 | "issue_tracker_url": "https://github.com/sunpy/sunraster/issues", 19 | "license": "BSD 2-Clause", 20 | "minimum_python_version": "3.12", 21 | "use_compiled_extensions": "n", 22 | "enable_dynamic_dev_versions": "y", 23 | "include_example_code": "n", 24 | "include_cruft_update_github_workflow": "y", 25 | "use_extended_ruff_linting": "n", 26 | "_sphinx_theme": "sunpy", 27 | "_parent_project": "", 28 | "_install_requires": "", 29 | "_copy_without_render": [ 30 | "docs/_templates", 31 | "docs/_static", 32 | ".github/workflows/sub_package_update.yml" 33 | ], 34 | "_template": "https://github.com/sunpy/package-template", 35 | "_commit": "bea8060eef60df35f7df61d9db1d56b838bda01a" 36 | } 37 | }, 38 | "directory": null 39 | } 40 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = 3 | # missing-whitespace-around-operator 4 | E225 5 | # missing-whitespace-around-arithmetic-operator 6 | E226 7 | # line-too-long 8 | E501 9 | # unused-import 10 | F401 11 | # undefined-local-with-import-star 12 | F403 13 | # redefined-while-unused 14 | F811 15 | # Line break occurred before a binary operator 16 | W503, 17 | # Line break occurred after a binary operator 18 | W504 19 | max-line-length = 110 20 | exclude = 21 | .git 22 | __pycache__ 23 | docs/conf.py 24 | build 25 | sunraster/__init__.py 26 | rst-directives = 27 | plot 28 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | # Main CI Workflow 2 | name: CI 3 | 4 | on: 5 | push: 6 | branches: 7 | - 'main' 8 | - '*.*' 9 | - '!*backport*' 10 | tags: 11 | - 'v*' 12 | - '!*dev*' 13 | - '!*pre*' 14 | - '!*post*' 15 | pull_request: 16 | # Allow manual runs through the web UI 17 | workflow_dispatch: 18 | schedule: 19 | # ┌───────── minute (0 - 59) 20 | # │ ┌───────── hour (0 - 23) 21 | # │ │ ┌───────── day of the month (1 - 31) 22 | # │ │ │ ┌───────── month (1 - 12 or JAN-DEC) 23 | # │ │ │ │ ┌───────── day of the week (0 - 6 or SUN-SAT) 24 | - cron: '0 7 * * 3' # Every Wed at 07:00 UTC 25 | 26 | concurrency: 27 | group: ${{ github.workflow }}-${{ github.ref }} 28 | cancel-in-progress: true 29 | 30 | jobs: 31 | core: 32 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v2 33 | with: 34 | submodules: false 35 | coverage: codecov 36 | toxdeps: tox-pypi-filter 37 | posargs: -n auto 38 | envs: | 39 | - linux: py313 40 | secrets: 41 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 42 | 43 | sdist_verify: 44 | runs-on: ubuntu-latest 45 | steps: 46 | - uses: actions/checkout@v5 47 | - uses: actions/setup-python@v6 48 | with: 49 | python-version: '3.13' 50 | - run: python -m pip install -U --user build 51 | - run: python -m build . --sdist 52 | - run: python -m pip install -U --user twine 53 | - run: python -m twine check dist/* 54 | 55 | test: 56 | needs: [core, sdist_verify] 57 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v2 58 | with: 59 | submodules: false 60 | coverage: codecov 61 | toxdeps: tox-pypi-filter 62 | posargs: -n auto 63 | envs: | 64 | - linux: py314 65 | - windows: py312 66 | - macos: py312 67 | - linux: py312-oldestdeps 68 | - linux: py314-devdeps 69 | secrets: 70 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 71 | 72 | docs: 73 | needs: [core] 74 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@v2 75 | with: 76 | default_python: '3.13' 77 | submodules: false 78 | pytest: false 79 | toxdeps: tox-pypi-filter 80 | cache-key: docs-${{ github.run_id }} 81 | libraries: | 82 | apt: 83 | - graphviz 84 | envs: | 85 | - linux: build_docs 86 | 87 | online: 88 | if: "!startsWith(github.event.ref, 'refs/tags/v')" 89 | needs: [docs] 90 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main 91 | with: 92 | submodules: false 93 | coverage: codecov 94 | toxdeps: tox-pypi-filter 95 | posargs: -n 1 --dist loadgroup 96 | envs: | 97 | - linux: py313-online 98 | 99 | publish: 100 | # Build wheels on PRs only when labelled. Releases will only be published if tagged ^v.* 101 | # see https://github-actions-workflows.openastronomy.org/en/latest/publish.html#upload-to-pypi 102 | if: | 103 | github.event_name != 'pull_request' || 104 | ( 105 | github.event_name != 'pull_request' && ( 106 | github.ref_name != 'main' || 107 | github.event_name == 'workflow_dispatch' 108 | ) 109 | ) || ( 110 | github.event_name == 'pull_request' && 111 | contains(github.event.pull_request.labels.*.name, 'Run publish') 112 | ) 113 | needs: [test, docs] 114 | uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@v2 115 | with: 116 | python-version: "3.13" 117 | test_extras: 'all,tests' 118 | test_command: 'pytest -p no:warnings --doctest-rst -m "not mpl_image_compare" --pyargs sunraster' 119 | submodules: false 120 | secrets: 121 | pypi_token: ${{ secrets.pypi_token }} 122 | -------------------------------------------------------------------------------- /.github/workflows/label_sync.yml: -------------------------------------------------------------------------------- 1 | name: Label Sync 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | # ┌───────── minute (0 - 59) 6 | # │ ┌───────── hour (0 - 23) 7 | # │ │ ┌───────── day of the month (1 - 31) 8 | # │ │ │ ┌───────── month (1 - 12 or JAN-DEC) 9 | # │ │ │ │ ┌───────── day of the week (0 - 6 or SUN-SAT) 10 | - cron: '0 0 * * *' # run every day at midnight UTC 11 | 12 | # Give permissions to write issue labels 13 | permissions: 14 | issues: write 15 | 16 | jobs: 17 | label_sync: 18 | runs-on: ubuntu-latest 19 | name: Label Sync 20 | steps: 21 | - uses: srealmoreno/label-sync-action@850ba5cef2b25e56c6c420c4feed0319294682fd 22 | with: 23 | config-file: https://raw.githubusercontent.com/sunpy/.github/main/labels.yml 24 | -------------------------------------------------------------------------------- /.github/workflows/sub_package_update.yml: -------------------------------------------------------------------------------- 1 | # This template is taken from the cruft example code, for further information please see: 2 | # https://cruft.github.io/cruft/#automating-updates-with-github-actions 3 | name: Automatic Update from package template 4 | permissions: 5 | contents: write 6 | pull-requests: write 7 | 8 | on: 9 | # Allow manual runs through the web UI 10 | workflow_dispatch: 11 | schedule: 12 | # ┌───────── minute (0 - 59) 13 | # │ ┌───────── hour (0 - 23) 14 | # │ │ ┌───────── day of the month (1 - 31) 15 | # │ │ │ ┌───────── month (1 - 12 or JAN-DEC) 16 | # │ │ │ │ ┌───────── day of the week (0 - 6 or SUN-SAT) 17 | - cron: '0 7 * * 1' # Every Monday at 7am UTC 18 | 19 | jobs: 20 | update: 21 | runs-on: ubuntu-latest 22 | strategy: 23 | fail-fast: true 24 | steps: 25 | - uses: actions/checkout@v5 26 | 27 | - uses: actions/setup-python@v6 28 | with: 29 | python-version: "3.11" 30 | 31 | - name: Install Cruft 32 | run: python -m pip install git+https://github.com/Cadair/cruft@patch-p1 33 | 34 | - name: Check if update is available 35 | continue-on-error: false 36 | id: check 37 | run: | 38 | CHANGES=0 39 | if [ -f .cruft.json ]; then 40 | if ! cruft check; then 41 | CHANGES=1 42 | fi 43 | else 44 | echo "No .cruft.json file" 45 | fi 46 | 47 | echo "has_changes=$CHANGES" >> "$GITHUB_OUTPUT" 48 | 49 | - name: Run update if available 50 | id: cruft_update 51 | if: steps.check.outputs.has_changes == '1' 52 | run: | 53 | git config --global user.email "${{ github.actor }}@users.noreply.github.com" 54 | git config --global user.name "${{ github.actor }}" 55 | 56 | cruft_output=$(cruft update --skip-apply-ask --refresh-private-variables) 57 | echo $cruft_output 58 | git restore --staged . 59 | 60 | if [[ "$cruft_output" == *"Failed to cleanly apply the update, there may be merge conflicts."* ]]; then 61 | echo merge_conflicts=1 >> $GITHUB_OUTPUT 62 | else 63 | echo merge_conflicts=0 >> $GITHUB_OUTPUT 64 | fi 65 | 66 | - name: Check if only .cruft.json is modified 67 | id: cruft_json 68 | if: steps.check.outputs.has_changes == '1' 69 | run: | 70 | git status --porcelain=1 71 | if [[ "$(git status --porcelain=1)" == " M .cruft.json" ]]; then 72 | echo "Only .cruft.json is modified. Exiting workflow early." 73 | echo "has_changes=0" >> "$GITHUB_OUTPUT" 74 | else 75 | echo "has_changes=1" >> "$GITHUB_OUTPUT" 76 | fi 77 | 78 | - name: Create pull request 79 | if: steps.cruft_json.outputs.has_changes == '1' 80 | uses: peter-evans/create-pull-request@v7 81 | with: 82 | token: ${{ secrets.GITHUB_TOKEN }} 83 | add-paths: "." 84 | commit-message: "Automatic package template update" 85 | branch: "cruft/update" 86 | delete-branch: true 87 | draft: ${{ steps.cruft_update.outputs.merge_conflicts == '1' }} 88 | title: "Updates from the package template" 89 | labels: | 90 | No Changelog Entry Needed 91 | body: | 92 | This is an autogenerated PR, which will applies the latest changes from the [SunPy Package Template](https://github.com/sunpy/package-template). 93 | If this pull request has been opened as a draft there are conflicts which need fixing. 94 | 95 | **To run the CI on this pull request you will need to close it and reopen it.** 96 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### Python: https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | tmp/ 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | pip-wheel-metadata/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | sunraster/_version.py 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | # automodapi 78 | docs/api 79 | docs/sg_execution_times.rst 80 | 81 | # PyBuilder 82 | .pybuilder/ 83 | target/ 84 | 85 | # Jupyter Notebook 86 | .ipynb_checkpoints 87 | 88 | # IPython 89 | profile_default/ 90 | ipython_config.py 91 | 92 | # pyenv 93 | # For a library or package, you might want to ignore these files since the code is 94 | # intended to run in multiple environments; otherwise, check them in: 95 | # .python-version 96 | 97 | # pipenv 98 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 99 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 100 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 101 | # install all needed dependencies. 102 | #Pipfile.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Rope project settings 132 | .ropeproject 133 | 134 | # mkdocs documentation 135 | /site 136 | 137 | # mypy 138 | .mypy_cache/ 139 | 140 | # Pyre type checker 141 | .pyre/ 142 | 143 | # IDE 144 | # PyCharm 145 | .idea 146 | 147 | # Spyder project settings 148 | .spyderproject 149 | .spyproject 150 | 151 | ### VScode: https://raw.githubusercontent.com/github/gitignore/master/Global/VisualStudioCode.gitignore 152 | .vscode/* 153 | .vs/* 154 | 155 | ### https://raw.github.com/github/gitignore/master/Global/OSX.gitignore 156 | .DS_Store 157 | .AppleDouble 158 | .LSOverride 159 | 160 | # Icon must ends with two \r. 161 | Icon 162 | 163 | # Thumbnails 164 | ._* 165 | 166 | # Files that might appear on external disk 167 | .Spotlight-V100 168 | .Trashes 169 | 170 | ### Linux: https://raw.githubusercontent.com/github/gitignore/master/Global/Linux.gitignore 171 | *~ 172 | 173 | # temporary files which can be created if a process still has a handle open of a deleted file 174 | .fuse_hidden* 175 | 176 | # KDE directory preferences 177 | .directory 178 | 179 | # Linux trash folder which might appear on any partition or disk 180 | .Trash-* 181 | 182 | # .nfs files are created when an open file is removed but is still being accessed 183 | .nfs* 184 | 185 | # pytype static type analyzer 186 | .pytype/ 187 | 188 | # General 189 | .DS_Store 190 | .AppleDouble 191 | .LSOverride 192 | 193 | # Icon must end with two \r 194 | Icon 195 | 196 | 197 | # Thumbnails 198 | ._* 199 | 200 | # Files that might appear in the root of a volume 201 | .DocumentRevisions-V100 202 | .fseventsd 203 | .Spotlight-V100 204 | .TemporaryItems 205 | .Trashes 206 | .VolumeIcon.icns 207 | .com.apple.timemachine.donotpresent 208 | 209 | # Directories potentially created on remote AFP share 210 | .AppleDB 211 | .AppleDesktop 212 | Network Trash Folder 213 | Temporary Items 214 | .apdisk 215 | 216 | ### Windows: https://raw.githubusercontent.com/github/gitignore/master/Global/Windows.gitignore 217 | 218 | # Windows thumbnail cache files 219 | Thumbs.db 220 | ehthumbs.db 221 | ehthumbs_vista.db 222 | 223 | # Dump file 224 | *.stackdump 225 | 226 | # Folder config file 227 | [Dd]esktop.ini 228 | 229 | # Recycle Bin used on file shares 230 | $RECYCLE.BIN/ 231 | 232 | # Windows Installer files 233 | *.cab 234 | *.msi 235 | *.msix 236 | *.msm 237 | *.msp 238 | 239 | # Windows shortcuts 240 | *.lnk 241 | 242 | ### Extra Python Items and sunraster Specific 243 | .hypothesis 244 | .pytest_cache 245 | sunraster/_compiler.c 246 | sunraster/cython_version.py 247 | docs/_build 248 | docs/generated 249 | docs/api/ 250 | docs/whatsnew/latest_changelog.txt 251 | examples/**/*.csv 252 | examples/**/*.asdf 253 | figure_test_images* 254 | tags 255 | baseline 256 | 257 | # Release script 258 | .github_cache 259 | 260 | # Misc Stuff 261 | .history 262 | *.orig 263 | .tmp 264 | node_modules/ 265 | package-lock.json 266 | package.json 267 | .prettierrc 268 | 269 | # Log files generated by 'vagrant up' 270 | *.log 271 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | balanced_wrapping = true 3 | skip = 4 | docs/conf.py 5 | sunraster/__init__.py 6 | default_section = THIRDPARTY 7 | include_trailing_comma = true 8 | known_astropy = astropy, asdf 9 | known_sunpy = sunpy, ndcube 10 | known_first_party = sunraster 11 | length_sort = false 12 | length_sort_sections = stdlib 13 | line_length = 110 14 | multi_line_output = 3 15 | no_lines_before = LOCALFOLDER 16 | sections = STDLIB, THIRDPARTY, ASTROPY, SUNPY, FIRSTPARTY, LOCALFOLDER 17 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # This should be before any formatting hooks like isort 3 | - repo: https://github.com/astral-sh/ruff-pre-commit 4 | rev: "v0.13.2" 5 | hooks: 6 | - id: ruff 7 | args: ["--fix"] 8 | - repo: https://github.com/PyCQA/isort 9 | rev: 6.1.0 10 | hooks: 11 | - id: isort 12 | exclude: ".*(.fits|.fts|.fit|.header|.txt|tca.*|extern.*|{{ cookiecutter.module_name }}/extern)$" 13 | - repo: https://github.com/pre-commit/pre-commit-hooks 14 | rev: v6.0.0 15 | hooks: 16 | - id: check-ast 17 | - id: check-case-conflict 18 | - id: trailing-whitespace 19 | exclude: ".*(.fits|.fts|.fit|.header|.txt)$" 20 | - id: check-yaml 21 | - id: debug-statements 22 | - id: check-added-large-files 23 | args: ["--enforce-all", "--maxkb=1054"] 24 | - id: end-of-file-fixer 25 | exclude: ".*(.fits|.fts|.fit|.header|.txt|tca.*|.json)$|^CITATION.rst$" 26 | - id: mixed-line-ending 27 | exclude: ".*(.fits|.fts|.fit|.header|.txt|tca.*)$" 28 | - repo: https://github.com/codespell-project/codespell 29 | rev: v2.4.1 30 | hooks: 31 | - id: codespell 32 | args: [ "--write-changes" ] 33 | ci: 34 | autofix_prs: false 35 | autoupdate_schedule: "quarterly" 36 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-lts-latest 5 | tools: 6 | python: "mambaforge-latest" 7 | jobs: 8 | post_checkout: 9 | - git fetch --unshallow || true 10 | pre_install: 11 | - git update-index --assume-unchanged .rtd-environment.yaml docs/conf.py 12 | 13 | conda: 14 | environment: .rtd-environment.yaml 15 | 16 | sphinx: 17 | builder: html 18 | configuration: docs/conf.py 19 | fail_on_warning: false 20 | 21 | formats: 22 | - htmlzip 23 | 24 | python: 25 | install: 26 | - method: pip 27 | extra_requirements: 28 | - docs 29 | path: . 30 | -------------------------------------------------------------------------------- /.rtd-environment.yaml: -------------------------------------------------------------------------------- 1 | name: sunraster 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python=3.13 6 | - pip 7 | - graphviz!=2.42.*,!=2.43.* 8 | -------------------------------------------------------------------------------- /.ruff.toml: -------------------------------------------------------------------------------- 1 | target-version = "py310" 2 | line-length = 120 3 | exclude = [ 4 | ".git,", 5 | "__pycache__", 6 | "build", 7 | "sunraster/version.py", 8 | ] 9 | 10 | [lint] 11 | select = [ 12 | "E", 13 | "F", 14 | "W", 15 | "UP", 16 | "PT", 17 | ] 18 | extend-ignore = [ 19 | # pycodestyle (E, W) 20 | "E501", # ignore line length will use a formatter instead 21 | # pytest (PT) 22 | "PT001", # Always use pytest.fixture() 23 | "PT023", # Always use () on pytest decorators 24 | # flake8-pie (PIE) 25 | "PIE808", # Disallow passing 0 as the first argument to range 26 | # flake8-use-pathlib (PTH) 27 | "PTH123", # open() should be replaced by Path.open() 28 | # Ruff (RUF) 29 | "RUF003", # Ignore ambiguous quote marks, doesn't allow ' in comments 30 | "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` 31 | "RUF013", # PEP 484 prohibits implicit `Optional` 32 | "RUF015", # Prefer `next(iter(...))` over single element slice 33 | ] 34 | 35 | [lint.per-file-ignores] 36 | "setup.py" = [ 37 | "INP001", # File is part of an implicit namespace package. 38 | ] 39 | "conftest.py" = [ 40 | "INP001", # File is part of an implicit namespace package. 41 | ] 42 | "docs/conf.py" = [ 43 | "E402" # Module imports not at top of file 44 | ] 45 | "docs/*.py" = [ 46 | "INP001", # File is part of an implicit namespace package. 47 | ] 48 | "examples/**.py" = [ 49 | "T201", # allow use of print in examples 50 | "INP001", # File is part of an implicit namespace package. 51 | ] 52 | "__init__.py" = [ 53 | "E402", # Module level import not at top of cell 54 | "F401", # Unused import 55 | "F403", # from {name} import * used; unable to detect undefined names 56 | "F405", # {name} may be undefined, or defined from star imports 57 | ] 58 | "test_*.py" = [ 59 | "E402", # Module level import not at top of cell 60 | ] 61 | 62 | [lint.pydocstyle] 63 | convention = "numpy" 64 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | 0.7.0 (2025-10-16) 2 | ================== 3 | 4 | Breaking Changes 5 | ---------------- 6 | 7 | - Increased the minimum version of Python to 3.10.0 (`#257 `__) 8 | - Increased minimum required version of ``ndcube`` to 2.3.0. 9 | This comes with the removal of older metadata handling methods which are now using upstreamed methods from ndcube. (`#273 `__) 10 | - Increased minimum version of Python to 3.12. 11 | Increased minimum version of NumPy to 1.26.0. 12 | Increased minimum version of Astropy to 6.1.0. 13 | Increased minimum version of sunpy to 7.0.0. (`#293 `__) 14 | 15 | 16 | 0.6.0 (2025-06-12) 17 | ================== 18 | 19 | Breaking Changes 20 | ---------------- 21 | 22 | - Increased the minimum version of ``sunpy`` to 6.0.0 23 | - Increased the minimum version of ``ndcube`` to 2.3.2 24 | - Increased the minimum version of Python to 3.10 25 | - Removed internal metadata handling and replaced it with the new ndcube version. 26 | 27 | 0.5.1 (2024-01-17) 28 | ================== 29 | 30 | Bug Fixes 31 | --------- 32 | 33 | - Fixed SPICE reader from hardcoing ``u.adu`` and instead using the "BUNIT" keyword in the FITS header. (`#254 `__) 34 | 35 | 0.5.0 (2023-11-16) 36 | ================== 37 | 38 | Breaking Changes 39 | ---------------- 40 | 41 | - Increased the minimum version of ``sunpy`` to 5.0.0 42 | - Increased the minimum version of ``ndcube`` to 2.1.2 43 | - Increased the minimum version of Python to 3.9 44 | 45 | 0.4.3 (2022-10-06) 46 | ================== 47 | 48 | Bug Fixes 49 | --------- 50 | 51 | - Fixed SPICE reader failing on FITS files containing ``WCSDVARR`` HDUs (with additional distortion information), by ignoring these HDUs. (`#215 `__) 52 | 53 | 54 | 0.4.2 (2022-06-08) 55 | ================== 56 | 57 | Bug Fixes 58 | --------- 59 | 60 | - Allow SPICE FITS reader to read handle wide-slit files. (`#204 `__) 61 | - Allow SPICE FITS reader to handle files with missing telemetry. (`#205 `__) 62 | 63 | 64 | 0.4.1 (2022-05-24) 65 | ================== 66 | 67 | Breaking Changes 68 | ---------------- 69 | 70 | - Increased the minimum version of ``sunpy`` to 4.0.0 71 | 72 | 0.4.0 (2022-03-08) 73 | ================== 74 | 75 | Breaking Changes 76 | ---------------- 77 | 78 | - Removed IRIS reader, you will want to install and use ``irispy-lmsal`` instead. 79 | - Removed support for Python 3.7. (`#198 `__) 80 | 81 | 82 | 0.3.0 (2021-11-19) 83 | ================== 84 | 85 | Breaking Changes 86 | ---------------- 87 | 88 | - In IRIS spectrograph read, move all metadata to the meta objects of the raster cubes. (`#182 `__) 89 | - Remove extra_coords keyword from `~sunraster.spectrogram.SpectrogramCube` in accordance with new ndcube 2.0 API. 90 | Extra coords can by added through the ndcube ExtraCoords.add API which is new in ndcube 2.0. (`#182 `__) 91 | - In IRIS spectrograph reader, all extra coords except time have been moved to the meta object. (`#182 `__) 92 | - Removed ``lon`` and ``lat`` properties from all objects in sunraster. (`#184 `__) 93 | 94 | 95 | New Features 96 | ------------ 97 | 98 | - Create new property ``sunraster.spectrogram.SpectrogramSequence.celestial``, on ``sunraster.spectrogram.SpectrogramSequence`` to return a `~astropy.coordinates.SkyCoord` holding the celestial world coords of the pixels. (`#182 `__) 99 | - Create new property `~sunraster.spectrogram.SpectrogramCube.celestial`, on `~sunraster.spectrogram.SpectrogramCube` to return a `~astropy.coordinates.SkyCoord` holding the celestial world coords of the pixels. (`#182 `__) 100 | - Create a new ``~sunraster.instr.iris.IRISSGMeta`` metadata object. (`#182 `__) 101 | - Added a sliceable ``Meta`` class for axis-associated metadata. (`#184 `__) 102 | 103 | 104 | 0.2.0 (2021-01-28) 105 | ================== 106 | 107 | Features 108 | -------- 109 | 110 | - Include a base time to output of `sunraster.SpectrogramCube.time` when time is derived from WCS and a recognized base time can be found in meta. (`#168 `__) 111 | - Add optional instrument_axes attribute to SpectrogramCube to enable users to keep track of axes (including through slicing) when axes may have a significance not fully described by the world axis physical types. (`#169 `__) 112 | - Create new Metadata classes for defining mapping of metadata from instrument-specific files to a general metedata API. Includes a specific mapping for SolO/SPICE. (`#171 `__) 113 | - Replace RasterSequence world_axis_physical_type properties with versions using NDCubeSequence.array_axis_physical_types. (`#173 `__) 114 | - Provide functions to read SPICE file. Also refactor Meta class to be dict-like. (`#173 `__) 115 | - Enable SPICE FITS reader to handle multiple files. (`#178 `__) 116 | 117 | Bug Fixes 118 | --------- 119 | 120 | - Bump min ndcube version to fix bug caused when OS is bot 64-bit. (`#162 `__) 121 | - Stop `~sunraster.spectrogram_sequence.SpectrogramSequence` crashing when time coord not 1-D. (`#178 `__) 122 | - Allow SPICE FITS reader to read handle dumbbell windows. (`#178 `__) 123 | - Ensure args are passed correctly to NDCube constructor by SpectrogramCube by entering them as kwargs instead of ordered args. (`#179 `__) 124 | 125 | Trivial/Internal Changes 126 | ------------------------ 127 | 128 | - Altered names of some SPICEMeta properties. (`#178 `__) 129 | -------------------------------------------------------------------------------- /LICENSE.rst: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013-2025 The SunPy Developers 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | * Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # Exclude specific files 2 | # All files which are tracked by git and not explicitly excluded here are included by setuptools_scm 3 | # Prune folders 4 | prune sunraster/_dev 5 | prune build 6 | prune docs/_build 7 | prune docs/api 8 | global-exclude *.pyc *.o 9 | 10 | # This subpackage is only used in development checkouts 11 | # and should not be included in built tarballs 12 | prune sunraster/_dev 13 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | sunraster 3 | ========= 4 | 5 | |Latest Version| |codecov| |matrix| |DOI| |Powered by NumFOCUS| |Powered by SunPy| 6 | 7 | .. |Latest Version| image:: https://img.shields.io/pypi/v/sunraster.svg 8 | :target: https://pypi.python.org/pypi/sunraster/ 9 | .. |matrix| image:: https://img.shields.io/matrix/sunpy:openastronomy.org.svg?colorB=%23FE7900&label=Chat&logo=matrix&server_fqdn=openastronomy.modular.im 10 | :target: https://openastronomy.element.io/#/room/#sunpy:openastronomy.org 11 | .. |codecov| image:: https://codecov.io/gh/sunpy/sunraster/branch/main/graph/badge.svg 12 | :target: https://codecov.io/gh/sunpy/sunraster 13 | .. |DOI| image:: https://zenodo.org/badge/2165383.svg 14 | :target: https://zenodo.org/badge/latestdoi/2165383 15 | .. |Powered by NumFOCUS| image:: https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A 16 | :target: https://numfocus.org 17 | .. |Powered by SunPy| image:: https://img.shields.io/badge/powered%20by-SunPy-orange.svg?style=flat 18 | :target: https://www.sunpy.org 19 | 20 | ``sunraster`` is an open-source Python library that provides the tools to read in and analyze spectrogram data. 21 | 22 | Installation 23 | ============ 24 | 25 | An easy way to install ``sunraster`` is to do so with the anaconda distribution using the conda-forge channel, with the following command at the terminal: 26 | 27 | .. code-block:: console 28 | 29 | conda install --channel conda-forge sunraster 30 | 31 | Another equally easy way to install ``sunraster`` is with pip: 32 | 33 | .. code-block:: console 34 | 35 | pip install sunraster 36 | 37 | Developing 38 | ========== 39 | 40 | If you want to develop ``sunraster`` you will need to install from GitHub. 41 | We suggest you fork ``sunraster`` so you can work on it. 42 | The best way to do this is to create a new python virtual environment (conda/pipenv or others) and then install the git version of ``sunraster``: 43 | 44 | .. code:: bash 45 | 46 | $ git clone https://github.com//sunraster.git 47 | $ cd sunraster 48 | $ pip install -e .\[dev\] 49 | 50 | 51 | For detailed installation instructions (aimed at installing ``sunpy``), see the `Newcomers' guide`_ in the sunpy docs. 52 | 53 | Getting help 54 | ============ 55 | 56 | For more information or to ask questions about ``sunraster``, check out: 57 | 58 | - `sunraster Documentation`_ 59 | - `sunpy Matrix Channel`_ 60 | - `sunpy Mailing List`_ 61 | 62 | .. _sunraster Documentation: https://docs.sunpy.org/projects/sunraster/en/latest/ 63 | .. _sunpy Matrix Channel: https://chat.openastronomy.org/#/room/#sunpy:openastronomy.org 64 | .. _sunpy Mailing List: https://groups.google.com/forum/#!forum/sunpy 65 | 66 | 67 | License 68 | ======= 69 | 70 | This project is Copyright (c) The SunPy Community and licensed under 71 | the terms of the BSD 2-Clause license. This package is based upon 72 | the `Openastronomy packaging guide `_ 73 | which is licensed under the BSD 3-clause licence. See the licenses folder for 74 | more information. 75 | 76 | Usage of Generative AI 77 | ====================== 78 | 79 | We expect authentic engagement in our community. 80 | Be wary of posting output from Large Language Models or similar generative AI as comments on GitHub or any other platform, as such comments tend to be formulaic and low quality content. 81 | If you use generative AI tools as an aid in developing code or documentation changes, ensure that you fully understand the proposed changes and can explain why they are the correct approach and an improvement to the current state. 82 | 83 | Contributing 84 | ============ 85 | 86 | We love contributions! sunraster is open source, 87 | built on open source, and we'd love to have you hang out in our community. 88 | 89 | **Imposter syndrome disclaimer**: We want your help. No, really. 90 | 91 | There may be a little voice inside your head that is telling you that you're not 92 | ready to be an open source contributor; that your skills aren't nearly good 93 | enough to contribute. What could you possibly offer a project like this one? 94 | 95 | We assure you - the little voice in your head is wrong. If you can write code at 96 | all, you can contribute code to open source. Contributing to open source 97 | projects is a fantastic way to advance one's coding skills. Writing perfect code 98 | isn't the measure of a good developer (that would disqualify all of us!); it's 99 | trying to create something, making mistakes, and learning from those 100 | mistakes. That's how we all improve, and we are happy to help others learn. 101 | 102 | Being an open source contributor doesn't just mean writing code, either. You can 103 | help out by writing documentation, tests, or even giving feedback about the 104 | project (and yes - that includes giving feedback about the contribution 105 | process). Some of these contributions may be the most valuable to the project as 106 | a whole, because you're coming to the project with fresh eyes, so you can see 107 | the errors and assumptions that seasoned contributors have glossed over. 108 | 109 | For more information on contributing to sunraster, please read SunPy's `Newcomers' guide`_. 110 | 111 | .. _SunPy mailing list: https://groups.google.com/forum/#!forum/sunpy 112 | .. _Developers Guide: https://docs.sunpy.org/en/latest/dev_guide/index.html 113 | .. _`#sunpy:openastronomy.org`: https://chat.openastronomy.org/#/room/#sunpy:openastronomy.org 114 | .. _issues page: https://github.com/sunpy/sunraster/issues 115 | .. _Newcomers' guide: https://docs.sunpy.org/en/latest/dev_guide/contents/newcomers.html 116 | 117 | 118 | Note: This disclaimer was originally written by 119 | `Adrienne Lowe `_ for a 120 | `PyCon talk `_, and was adapted by 121 | sunraster based on its use in the README file for the 122 | `MetPy project `_. 123 | -------------------------------------------------------------------------------- /changelog/README.rst: -------------------------------------------------------------------------------- 1 | ========= 2 | Changelog 3 | ========= 4 | 5 | .. note:: 6 | 7 | This README was adapted from the pytest changelog readme under the terms of the MIT licence. 8 | 9 | This directory contains "news fragments" which are short files that contain a small **ReST**-formatted text that will be added to the next ``CHANGELOG``. 10 | 11 | The ``CHANGELOG`` will be read by users, so this description should be aimed at sunraster users instead of describing internal changes which are only relevant to the developers. 12 | 13 | Make sure to use full sentences with correct case and punctuation, for example:: 14 | 15 | Add support for Helioprojective coordinates in `sunpy.coordinates.frames`. 16 | 17 | Please try to use Sphinx intersphinx using backticks. 18 | 19 | Each file should be named like ``.[.].rst``, where ```` is a pull request number, ``COUNTER`` is an optional number if a PR needs multiple entries with the same type and ```` is one of: 20 | 21 | * ``breaking``: A change which requires users to change code and is not backwards compatible. (Not to be used for removal of deprecated features.) 22 | * ``feature``: New user facing features and any new behavior. 23 | * ``bugfix``: Fixes a reported bug. 24 | * ``doc``: Documentation addition or improvement, like rewording an entire session or adding missing docs. 25 | * ``removal``: Feature deprecation and/or feature removal. 26 | * ``trivial``: A change which has no user facing effect or is tiny change. 27 | 28 | So for example: ``123.feature.rst``, ``456.bugfix.rst``. 29 | 30 | If you are unsure what pull request type to use, don't hesitate to ask in your PR. 31 | 32 | Note that the ``towncrier`` tool will automatically reflow your text, so it will work best if you stick to a single paragraph, but multiple sentences and links are OK and encouraged. 33 | You can install ``towncrier`` and then run ``towncrier --draft`` if you want to get a preview of how your change will look in the final release notes. 34 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/api.rst: -------------------------------------------------------------------------------- 1 | .. _api: 2 | 3 | API Reference 4 | ============= 5 | 6 | .. automodapi:: sunraster 7 | 8 | .. automodapi:: sunraster.spectrogram 9 | 10 | .. automodapi:: sunraster.meta 11 | 12 | .. automodapi:: sunraster.instr.spice 13 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file does only contain a selection of the most common options. For a 4 | # full list see the documentation: 5 | # http://www.sphinx-doc.org/en/master/config 6 | 7 | import datetime 8 | from pathlib import Path 9 | from packaging.version import Version 10 | 11 | # -- Project information ----------------------------------------------------- 12 | 13 | # The full version, including alpha/beta/rc tags 14 | from sunraster import __version__ 15 | 16 | _version = Version(__version__) 17 | version = release = str(_version) 18 | # Avoid "post" appearing in version string in rendered docs 19 | if _version.is_postrelease: 20 | version = release = _version.base_version 21 | # Avoid long githashes in rendered Sphinx docs 22 | elif _version.is_devrelease: 23 | version = release = f"{_version.base_version}.dev{_version.dev}" 24 | is_development = _version.is_devrelease 25 | is_release = not (_version.is_prerelease or _version.is_devrelease) 26 | 27 | project = "sunraster" 28 | author = "The SunPy Community" 29 | copyright = f"{datetime.datetime.now().year}, {author}" # noqa: A001 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # Wrap large function/method signatures 35 | maximum_signature_line_length = 80 36 | 37 | # Add any Sphinx extension module names here, as strings. They can be 38 | # extensions coming with Sphinx (named "sphinx.ext.*") or your custom 39 | # ones. 40 | extensions = [ 41 | "sphinx.ext.autodoc", 42 | "sphinx.ext.intersphinx", 43 | "sphinx.ext.todo", 44 | "sphinx.ext.coverage", 45 | "sphinx.ext.inheritance_diagram", 46 | "sphinx.ext.viewcode", 47 | "sphinx.ext.napoleon", 48 | "sphinx.ext.doctest", 49 | "sphinx.ext.mathjax", 50 | "sphinx_automodapi.automodapi", 51 | "sphinx_automodapi.smart_resolver", 52 | "sphinx_changelog", 53 | ] 54 | 55 | # Set automodapi to generate files inside the generated directory 56 | automodapi_toctreedirnm = "generated/api" 57 | 58 | # Add any paths that contain templates here, relative to this directory. 59 | # templates_path = ["_templates"] 60 | 61 | # List of patterns, relative to source directory, that match files and 62 | # directories to ignore when looking for source files. 63 | # This pattern also affects html_static_path and html_extra_path. 64 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 65 | 66 | # The suffix(es) of source filenames. 67 | source_suffix = {".rst": "restructuredtext"} 68 | 69 | # The master toctree document. 70 | master_doc = "index" 71 | 72 | # Treat everything in single ` as a Python reference. 73 | default_role = "py:obj" 74 | 75 | # Enable and configure nitpicky mode 76 | nitpicky = True 77 | # This is not used. See docs/nitpick-exceptions file for the actual listing. 78 | nitpick_ignore = [] 79 | with Path("nitpick-exceptions").open() as nitpick_exceptions: 80 | for line in nitpick_exceptions: 81 | if line.strip() == "" or line.startswith("#"): 82 | continue 83 | dtype, target = line.split(None, 1) 84 | target = target.strip() 85 | nitpick_ignore.append((dtype, target)) 86 | 87 | # -- Options for intersphinx extension --------------------------------------- 88 | 89 | # Example configuration for intersphinx: refer to the Python standard library. 90 | intersphinx_mapping = { 91 | "python": ("https://docs.python.org/3/", None), 92 | "numpy": ("https://docs.scipy.org/doc/numpy/", None), 93 | "scipy": ("https://docs.scipy.org/doc/scipy/reference/", None), 94 | "matplotlib": ("https://matplotlib.org/", None), 95 | "astropy": ("http://docs.astropy.org/en/latest/", None), 96 | "sunpy": ("https://docs.sunpy.org/en/latest/", None), 97 | "ndcube": ("https://docs.sunpy.org/projects/ndcube/en/latest/", None), 98 | } 99 | 100 | # -- Options for HTML output ------------------------------------------------- 101 | 102 | # The theme to use for HTML and HTML Help pages. See the documentation for 103 | # a list of builtin themes. 104 | html_theme = "sunpy" 105 | 106 | # Render inheritance diagrams in SVG 107 | graphviz_output_format = "svg" 108 | 109 | graphviz_dot_args = [ 110 | "-Nfontsize=10", 111 | "-Nfontname=Helvetica Neue, Helvetica, Arial, sans-serif", 112 | "-Efontsize=10", 113 | "-Efontname=Helvetica Neue, Helvetica, Arial, sans-serif", 114 | "-Gfontsize=10", 115 | "-Gfontname=Helvetica Neue, Helvetica, Arial, sans-serif", 116 | ] 117 | 118 | # Add any paths that contain custom static files (such as style sheets) here, 119 | # relative to this directory. They are copied after the builtin static files, 120 | # so a file named "default.css" will overwrite the builtin "default.css". 121 | # html_static_path = ["_static"] 122 | 123 | # By default, when rendering docstrings for classes, sphinx.ext.autodoc will 124 | # make docs with the class-level docstring and the class-method docstrings, 125 | # but not the __init__ docstring, which often contains the parameters to 126 | # class constructors across the scientific Python ecosystem. The option below 127 | # will append the __init__ docstring to the class-level docstring when rendering 128 | # the docs. For more options, see: 129 | # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autoclass_content 130 | autoclass_content = "both" 131 | 132 | # -- Other options ---------------------------------------------------------- 133 | -------------------------------------------------------------------------------- /docs/data_types/index.rst: -------------------------------------------------------------------------------- 1 | .. _data_classes: 2 | 3 | ============ 4 | Data Classes 5 | ============ 6 | 7 | Contents 8 | ======== 9 | 10 | .. toctree:: 11 | 12 | spectrogram 13 | raster 14 | -------------------------------------------------------------------------------- /docs/data_types/raster.rst: -------------------------------------------------------------------------------- 1 | .. _raster_sequence: 2 | 3 | RasterSequence 4 | -------------- 5 | 6 | Slit spectrographs are often used to produce rasters. 7 | In fact, it is from this data product that ``sunraster`` derives its name. 8 | 9 | A raster is produced by scanning the slit in discrete steps perpendicular to its long axis, recording an exposure at each position. 10 | Thus a spectral image over a region is built up over time despite the slit spectrograph's necessarily narrow horizontal field of view. 11 | Another motivation can be to perform fast repeat raster scans in order to improve the chances of catching an event with the slit, e.g., a solar flare. 12 | In a raster, the slit-step axis is convolved with time. 13 | 14 | Depending on the type of analysis being performed, users may want to think of their data as if it were in raster mode/4D (``scan number``, ``slit step``, ``position along slit``, ``wavelength``) or sit-and-stare mode/3D (``time``, ``position along slit``, ``spectral``). 15 | 16 | 17 | In order to access the data in the way they want, scientists may often have two copies, a 3D version and a 4D version. 18 | However, this means scientists have to keep track of two data structures which is memory intensive both for the scientist and the computer and increases the chances mistakes in analysis. 19 | 20 | Solving this problem is the purpose of the `~sunraster.RasterSequence` class. 21 | It inherits from `~sunraster.SpectrogramSequence` but enables users to label one of the axes as the slit-step axis. 22 | This in turn facilitates a new set of APIs which allows users to interact with their data in sit-and-stare (sns) or rastering mode seamlessly and interchangeably without having to reformat their data. 23 | 24 | Initialization 25 | ^^^^^^^^^^^^^^ 26 | 27 | A `~sunraster.RasterSequence`, is instantiated just like a `~sunraster.SpectrogramCube`. 28 | Let's first create some `~sunraster.SpectrogramCube` instances where each represents a single raster scan. 29 | As before, we will add the timestamps and exposure times as extra coordinates. 30 | 31 | .. code-block:: python 32 | 33 | >>> import numpy as np 34 | >>> import astropy.wcs 35 | >>> import astropy.units as u 36 | >>> from astropy.nddata import StdDevUncertainty 37 | >>> from datetime import datetime, timedelta 38 | >>> from astropy.time import Time 39 | >>> from sunraster import SpectrogramCube 40 | >>> from ndcube.meta import NDMeta 41 | 42 | >>> # Define primary data array and WCS object. 43 | >>> data = np.ones((3, 4, 5)) 44 | >>> wcs_input_dict = { 45 | ... 'CTYPE1': 'WAVE ', 'CUNIT1': 'Angstrom', 'CDELT1': 0.2, 'CRPIX1': 0, 'CRVAL1': 10, 'NAXIS1': 5, 46 | ... 'CTYPE2': 'HPLT-TAN', 'CUNIT2': 'deg', 'CDELT2': 0.5, 'CRPIX2': 2, 'CRVAL2': 0.5, 'NAXIS2': 4, 47 | ... 'CTYPE3': 'HPLN-TAN', 'CUNIT3': 'deg', 'CDELT3': 0.4, 'CRPIX3': 2, 'CRVAL3': 1, 'NAXIS3': 3} 48 | >>> input_wcs = astropy.wcs.WCS(wcs_input_dict) 49 | >>> # Define a mask with all pixel unmasked, i.e. mask values = False 50 | >>> mask = np.zeros(data.shape, dtype=bool) 51 | >>> # Define some RasterSequence metadata. 52 | >>> exposure_times = np.ones(data.shape[0])/2 * u.s 53 | >>> scan_meta = NDMeta({"exposure time": exposure_times}, axes={"exposure time": 0}, 54 | ... data_shape=data.shape) 55 | >>> seq_meta = {"description": "This is a RasterSequence.", "exposure time" : exposure_times} 56 | 57 | >>> # Define uncertainties for data, 2*data and data/2. 58 | >>> uncertainties = StdDevUncertainty(np.sqrt(data)) 59 | >>> uncertainties2 = StdDevUncertainty(np.sqrt(data * 2)) 60 | >>> uncertainties05 = StdDevUncertainty(np.sqrt(data * 0.5)) 61 | 62 | >>> # Create 1st raster 63 | >>> axis_length = int(data.shape[0]) 64 | >>> timestamps0 = Time([datetime(2000, 1, 1) + timedelta(minutes=i) 65 | ... for i in range(axis_length)], format='datetime', scale='utc') 66 | >>> extra_coords_input0 = [("time", 0, timestamps0)] 67 | >>> raster0 = SpectrogramCube(data, input_wcs, uncertainty=uncertainties, mask=mask, 68 | ... meta=scan_meta, unit=u.ct) 69 | >>> for extra in extra_coords_input0: 70 | ... raster0.extra_coords.add(*extra) 71 | >>> # Create 2nd raster 72 | >>> timestamps1 = Time([timestamps0[-1].to_datetime() + timedelta(minutes=i) 73 | ... for i in range(1, axis_length+1)], format='datetime', scale='utc') 74 | >>> extra_coords_input1 = [("time", 0, timestamps1)] 75 | >>> raster1 = SpectrogramCube(data*2, input_wcs, uncertainty=uncertainties, mask=mask, 76 | ... meta=scan_meta, unit=u.ct) 77 | >>> for extra in extra_coords_input1: 78 | ... raster1.extra_coords.add(*extra) 79 | >>> # Create 3rd raster 80 | >>> timestamps2 = Time([timestamps1[-1].to_datetime() + timedelta(minutes=i) 81 | ... for i in range(1, axis_length+1)], format='datetime', scale='utc') 82 | >>> extra_coords_input2 = [("time", 0, timestamps2)] 83 | >>> raster2 = SpectrogramCube(data*0.5, input_wcs, uncertainty=uncertainties, mask=mask, 84 | ... meta=scan_meta, unit=u.ct) 85 | >>> for extra in extra_coords_input2: 86 | ... raster2.extra_coords.add(*extra) 87 | 88 | The last thing we need to do before creating our `~sunraster.RasterSequence` is to identity the slit-step of the `~sunraster.SpectrogramCube`. 89 | In the above ``raster`` instances both the 0th and 1st axes correspond to spatial dimensions. 90 | Therefore let's define the 0th axes as the slit-step. 91 | We will do this by setting the ``common_axis`` argument 0. 92 | 93 | .. code-block:: python 94 | 95 | >>> from sunraster import RasterSequence 96 | >>> my_rasters = RasterSequence([raster0, raster1, raster2], common_axis=0, meta=seq_meta) 97 | 98 | Dimensions 99 | ^^^^^^^^^^ 100 | 101 | `~sunraster.RasterSequence` provides a version of the `~sunraster.SpectrogramSequence.array_axis_physical_axis_types` property for both raster and sns representations. 102 | 103 | .. code-block:: python 104 | 105 | >>> my_rasters.raster_array_axis_physical_types 106 | [('meta.obs.sequence',), ('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon', 'time'), ('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon'), ('em.wl',)] 107 | 108 | >>> my_rasters.sns_array_axis_physical_types 109 | [('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon', 'time'), ('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon'), ('em.wl',)] 110 | 111 | In the raster case, ``'meta.obs.sequence'`` represents the raster scan number axis. 112 | For those familiar with `~ndcube.NDCubeSequence`, these are simply aliases for the `~ndcube.NDCubeSequence.array_axis_physical_axis_types` and `~ndcube.NDCubeSequence.cube_like_world_axis_physical_axis_types`, respectively. 113 | 114 | The length of each axis can also be displayed in either the raster or sns representation. 115 | 116 | .. code-block:: python 117 | 118 | >>> my_rasters.raster_dimensions 119 | (3, 3, 4, 5) 120 | 121 | `~sunraster.RasterSequence.raster_dimensions` always represents the length of the scan number axis in the 0th position. 122 | We can therefore see that we have 3 raster scans in our `~sunraster.RasterSequence`. 123 | This means that the slit-step axis is shifted by one. 124 | Since we defined ``common_axis=0`` during instantiation, this means that the length of the slit-step can be found in the 1st element. 125 | From this we can see that we have 3 slit positions per raster scan. 126 | 127 | To see the length of the axes as though the data is in sit-and-stare mode, simply do: 128 | 129 | .. code-block:: python 130 | 131 | >>> my_rasters.sns_dimensions 132 | [9, 4, 5] 133 | 134 | Note that scan number and slit-step axes have been combined into the 0th position. 135 | From this we can see that we have 9 (3x3) spectrograms or times in our `~sunraster.RasterSequence`. 136 | 137 | Coordinates 138 | ^^^^^^^^^^^ 139 | 140 | Coordinate properties 141 | ********************* 142 | 143 | `~sunraster.RasterSequence` provides the same convenience properties as `~sunraster.SpectrogramSequence` to retrieve the real world coordinate values for each pixel along each axis. 144 | `sunraster.RasterSequence.celestial`, and `sunraster.RasterSequence.spectral` return their values in the raster representation while `sunraster.RasterSequence.time` and `sunraster.RasterSequence.exposure_time` return their values in the sns representation. 145 | 146 | sns axis extra coordinates 147 | ************************** 148 | 149 | As well as `~sunraster.RasterSequence.time` and `~sunraster.RasterSequence.exposure_time`, some `sunraster.SpectrogramCube.extra_coords` may contain other coordinates that are aligned with the slit step axis. 150 | The `sunraster.RasterSequence.sns_axis_coords` property enables users to access these coordinates at the `~sunraster.RasterSequence` level in the form of an abbreviated ``extra_coords`` dictionary. 151 | Just like `~sunraster.RasterSequence.time` and `sunraster.RasterSequence.exposure_time`, the coordinates are concatenated so they mimic the sit-and-stare-like dimensionality returned in the 0th element of `sunraster.RasterSequence.sns_dimensions`. 152 | `sunraster.RasterSequence.sns_axis_coords` is equivalent to `ndcube.NDCubeSequence.common_axis_extra_coords`. 153 | To see examples of how to use this property, see the `NDCubeSequence Common Axis Extra Coordinates documentation `__. 154 | 155 | Raster axis extra coordinates 156 | ***************************** 157 | 158 | Analogous to `~sunraster.RasterSequence.sns_axis_coords`, it is also possible to access the coordinates that are not assigned to any `~sunraster.SpectrogramCube` data axis via the `~sunraster.RasterSequence.raster_axis_coords` property. 159 | This property is equivalent to `ndcube.NDCubeSequence.sequence_axis_coords` and can be used to return coordinates along the repeat raster scan axis. 160 | 161 | Slicing 162 | ^^^^^^^ 163 | 164 | `~sunraster.RasterSequence` not only enables users to inspect their data in the raster and sit-and-stare representations. 165 | It also enables them to slice the data in either representation as well. 166 | This is done via the `~sunraster.RasterSequence.slice_as_raster` and `~sunraster.RasterSequence.slice_as_sns` properties. 167 | As with `~sunraster.SpectrogramCube` and `~sunraster.SpectrogramSequence`, these slicing properties ensure that not only the data is sliced, but also all relevant supporting metadata including uncertainties, mask, WCS object, extra_coords, etc. 168 | 169 | To slice a `~sunraster.RasterSequence` using the raster representation, do: 170 | 171 | .. code-block:: python 172 | 173 | >>> my_rasters_roi = my_rasters.slice_as_raster[1:3, 0:2, 1:3, 1:4] 174 | 175 | We can see the result of slicing using the ``dimensions`` properties. 176 | 177 | .. code-block:: python 178 | 179 | >>> print(my_rasters.raster_dimensions) # Check dimensionality before slicing. 180 | (3, 3, 4, 5) 181 | >>> print(my_rasters_roi.raster_dimensions) # See how slicing has changed dimensionality. 182 | (2, 2, 2, 3) 183 | >>> my_rasters_roi.sns_dimensions # Dimensionality can still be represented in sns form. 184 | [4, 2, 3] 185 | 186 | To slice in the sit-and-stare representation, do the following: 187 | 188 | .. code-block:: python 189 | 190 | >>> my_rasters_roi = my_rasters.slice_as_sns[1:7, 1:3, 1:4] 191 | 192 | Let's check the effect of the slicing once again. 193 | 194 | .. code-block:: python 195 | 196 | >>> print(my_rasters.sns_dimensions) # Check dimensionality before slicing. 197 | [9, 4, 5] 198 | >>> print(my_rasters_roi.sns_dimensions) # See how slicing has changed dimensionality. 199 | [6, 2, 3] 200 | >>> print(my_rasters_roi.raster_dimensions) # Dimensionality can still be represented in raster form. 201 | (3, (2, 3, 1), 2, 3) 202 | 203 | Notice that after slicing the data can still be inspected and interpreted in the raster or sit-and-stare format, irrespective of which slicing representation was used. 204 | Also notice that the ``my_sequence.slice_as_sns[1:7, 1:3, 1:4]`` command led to different `~sunraster.SpectrogramCube` objects to have different lengths along the slit step axis. 205 | This can be seen from the fact that the slit step axis entry in the output of ``my_sequence_roi.raster_dimensions`` has a length greater than 1. 206 | Each element represents the length of each `~sunraster.SpectrogramCube` in the `~sunraster.SpectrogramSequence` along that axis. 207 | 208 | As with `~sunraster.SpectrogramSequence`, slicing can reduce a `~sunraster.RasterSequence` dimensionality. 209 | As in the :ref:`sequence_slicing` section, let's slice out the 2nd pixel along the slit. 210 | This reduces the number of dimensions in the raster representation to 3 (``raster scan``, ``slit step``, ``spectral``) and to 2 in the sit-and-stare representation (``time``, ``spectral``). 211 | However, the raster and sit-and-stare representations are still valid. 212 | 213 | .. code-block:: python 214 | 215 | >>> slit_pixel_rasters = my_rasters.slice_as_raster[:, :, 2] 216 | >>> print(slit_pixel_rasters.raster_dimensions) 217 | (3, 3, 5) 218 | >>> print(slit_pixel_rasters.sns_dimensions) 219 | [9, 5] 220 | 221 | This demonstrates that the difference between the raster and sit-and-stare representations is more subtle than simply a 4-D or 3-D dimensionality. 222 | The difference is whether the raster scan and slit step axes are convolved into a time axis or whether they are represented separately. 223 | And because of this definition, the raster and sit-and-stare representations are valid and accessible for any dimensionality in which the raster scan and slit step axes are maintained. 224 | 225 | Plotting 226 | ^^^^^^^^ 227 | 228 | To quickly and easily visualize slit spectrograph data, `~sunraster.RasterSequence` supplies simple-to-use, yet powerful plotting APIs. 229 | They are intended to be a useful quicklook tool and not a replacement for high quality plots or animations, e.g. for publications. 230 | As with slicing, there are two plot methods for plotting in each of the raster and sit-and-stare representations. 231 | 232 | To visualize in the raster representation, simply call the following: 233 | 234 | .. code-block:: python 235 | 236 | >>> my_rasters.plot_as_raster() # doctest: +SKIP 237 | 238 | To visualize in the sit-and-stare representation, do: 239 | 240 | .. code-block:: python 241 | 242 | >>> my_rasters.plot_as_sns() # doctest: +SKIP 243 | 244 | These methods produce different types of visualizations including line plots, 2-D images and 1- and 2-D animations. 245 | Which is displayed depends on the dimensionality of the `~sunraster.RasterSequence` and the inputs of the user. 246 | `~sunraster.RasterSequence.plot_as_raster` and `~sunraster.RasterSequence.plot_as_sns` are in fact simply aliases for the ``ndcube.NDCubeSequence.plot`` and ``ndcube.NDCubeSequence.plot_as_cube`` methods, respectively. 247 | For learn more about how these routines work and the optional inputs that enable users to customize their output, see the `NDCubeSequence plotting documentation `__. 248 | 249 | Extracting Data Arrays 250 | ^^^^^^^^^^^^^^^^^^^^^^ 251 | 252 | It is possible that you may have some procedures that are designed to operate on arrays instead of `~sunraster.SpectrogramSequence` or `~sunraster.RasterSequence` objects. 253 | Therefore it may be useful to extract the data (or other array-like information such as ``uncertainty`` or ``mask``) into a single `~numpy.ndarray`. 254 | A succinct way of doing this operation is using python's list comprehension. 255 | 256 | To make a 4-D array from the data arrays in ``my_rasters``, use `numpy.stack`. 257 | 258 | .. code-block:: python 259 | 260 | >>> print(my_rasters.shape) # Print sequence dimensions as a reminder. 261 | (3, 3, 4, 5) 262 | >>> data = np.stack([cube.data for cube in my_rasters.data]) 263 | >>> print(data.shape) 264 | (3, 3, 4, 5) 265 | 266 | To define a 3D array where the data arrays of each `~sunraster.SpectrogramCube` 267 | in the sequence is concatenated along an axis, use `numpy.vstack`. 268 | 269 | .. code-block:: python 270 | 271 | >>> data = np.vstack([cube.data for cube in my_rasters.data]) 272 | >>> print(data.shape) 273 | (9, 4, 5) 274 | 275 | To create 3D arrays by slicing sequences, do: 276 | 277 | .. code-block:: python 278 | 279 | >>> data = np.stack([cube[2].data for cube in my_rasters.data]) 280 | >>> print(data.shape) 281 | (3, 4, 5) 282 | -------------------------------------------------------------------------------- /docs/data_types/spectrogram.rst: -------------------------------------------------------------------------------- 1 | 2 | .. _spectrogramcube: 3 | 4 | SpectrogramCube 5 | --------------- 6 | 7 | The fundamental data class of the ``sunraster`` package is `~sunraster.SpectrogramCube`. 8 | It is designed to handle data representing one or more spectrograms of solar regions. 9 | `~sunraster.SpectrogramCube` stores its data as an array whose transformations between pixel and real world coordinates are described by a single ``astropy`` WCS (World Coordinate System) object. 10 | (For data that is described by multiple WCS objects, see the :ref:`sequence` :ref:`raster_sequence` sections.) 11 | 12 | `~sunraster.SpectrogramCube` is subclassed from `ndcube.NDCube` and so inherits the same attributes and methods. 13 | It also inherits much of the same slicing, coordinate transformation and visualization API and provides some additional convenience properties relevant to spectrogram data. 14 | 15 | Initialization 16 | ^^^^^^^^^^^^^^ 17 | 18 | To initialize a basic `~sunraster.SpectrogramCube` object, all you need is an array containing the data and an `astropy.wcs.WCS` object describing the transformation from array-element (or pixel) space to real world coordinates. 19 | 20 | Let's create a 3D `numpy.ndarray` representing a series of spectrograms, it will have a shape of (3, 4, 5) and let every value be 1. 21 | The first axis will represent time (and/or space if the spectrogram slit is rastering across a solar region). 22 | Let the second represent the position along a dispersing slit, and the third represent the spectral axis. 23 | Although a WCS object can often be easily created by feeding a FITS header into the `astropy.wcs.WCS` class, we will create one manually here to be explicit. 24 | Note that due to (confusing) convention, the order of the axes in the WCS object is reversed relative to the data array. 25 | 26 | .. code-block:: python 27 | 28 | >>> import numpy as np 29 | >>> data = np.ones((3, 4, 5)) 30 | >>> import astropy.wcs 31 | >>> wcs_input_dict = { 32 | ... 'CTYPE1': 'WAVE ', 'CUNIT1': 'Angstrom', 'CDELT1': 0.2, 'CRPIX1': 0, 'CRVAL1': 10, 'NAXIS1': 5, 33 | ... 'CTYPE2': 'HPLT-TAN', 'CUNIT2': 'deg', 'CDELT2': 0.5, 'CRPIX2': 2, 'CRVAL2': 0.5, 'NAXIS2': 4, 34 | ... 'CTYPE3': 'HPLN-TAN', 'CUNIT3': 'deg', 'CDELT3': 0.4, 'CRPIX3': 2, 'CRVAL3': 1, 'NAXIS3': 3} 35 | >>> input_wcs = astropy.wcs.WCS(wcs_input_dict) 36 | 37 | We have defined the first axis to be spatial (helioprojective longitude and latitude) which implies that this series of spectrograms represents a raster scan across a solar region. 38 | The second axis (position along slit) also has coordinates of helioprojective longitude and latitude. 39 | Although we often think of the x-dimension as longitude and the y-dimension as latitude, latitude and longitude are in fact coupled dimensions. 40 | This means that -- except in a small number of edge cases -- moving along the slit in y-direction will cause both the latitude AND longitude to change, even if only slightly. 41 | This is important to understand when interacting with the WCS object, and hence the `~sunraster.SpectrogramCube` class. 42 | The 3rd axis (spectral) has coordinates of wavelength. 43 | 44 | Now that we have a data array and a corresponding WCS object, we can create a `~sunraster.SpectrogramCube` instance simply by doing: 45 | 46 | .. code-block:: python 47 | 48 | >>> from sunraster import SpectrogramCube 49 | >>> my_spectrograms = SpectrogramCube(data, input_wcs) 50 | 51 | The data array is stored in the ``my_spectrograms.data`` attribute while the WCS object is stored in the ``my_spectrograms.wcs`` attribute. 52 | However, when manipulating/slicing the data is it better to slice the object as a whole as all relevant data and metadata is sliced simultaneously. 53 | See section on :ref:`spectrogram_slicing`. 54 | 55 | Thanks to the fact that `~sunraster.SpectrogramCube` is subclassed from `~ndcube.NDCube`, you can also supply additional data to the instance. 56 | These include: metadata (`dict` or dict-like) located in `sunraster.SpectrogramCube.meta`; a data mask (boolean `numpy.ndarray`) located in ``sunraster.SpectrogramCube.mask`` for marking reliable and unreliable pixels; a unit (``astropy.units.Unit`` or unit `str`) located at ``sunraster.SpectrogramCube.unit``; and an uncertainty array (`numpy.ndarray`) located in `~sunraster.SpectrogramCube.uncertainty` describing the uncertainty of each data array value. 57 | It is advised that you use one of astropy's uncertainty classes to describe your uncertainty. 58 | However, this is not required by `~sunraster.SpectrogramCube`. 59 | A simple array will still work but will cause a warning to be raised. 60 | Here is an example of how to instantiate these attributes. 61 | 62 | .. code-block:: python 63 | 64 | >>> import astropy.units as u 65 | >>> from astropy.nddata import StdDevUncertainty 66 | >>> 67 | >>> uncertainties = StdDevUncertainty(np.sqrt(data)) 68 | >>> # Create a mask where all pixels are unmasked, i.e. all mask values are False. 69 | >>> mask = np.zeros_like(data, dtype=bool) 70 | >>> my_spectrograms = SpectrogramCube(data, input_wcs, uncertainty=uncertainties, mask=mask) 71 | 72 | Coordinates 73 | ^^^^^^^^^^^ 74 | 75 | WCS Coordinates 76 | *************** 77 | 78 | The primary location for coordinate information in a `~sunraster.SpectrogramCube` instance is its WCS. 79 | The coordinate values for each axis and pixel can be accessed via the `~sunraster.SpectrogramCube.axis_world_coords`, `~sunraster.SpectrogramCube.pixel_to_world` and `~sunraster.SpectrogramCube.world_to_pixel` methods inherited from `ndcube.NDCube`. 80 | To learn how to use these coordinate transformation methods, see the `NDCube coordinate transformations documentation `__. 81 | 82 | Extra Coordinates 83 | ***************** 84 | 85 | `~sunraster.SpectrogramCube` can also store array-based real world coordinates that aren't described by the WCS object. 86 | These can be accessed via the ``sunraster.SpectrogramCube.extra_coords`` property, also inherited from `~ndcube.NDCube`. 87 | `~sunraster.SpectrogramCube.extra_coords` is particularly useful if the temporal axis is convolved with space, as is the case for raster scans. 88 | Therefore, if the WCS object only supplies (lat, lon) for the x-axis, the timestamp of each exposure can be attached separately, e.g. as an ``astropy.time.Time`` object. `~sunraster.SpectrogramCube.extra_coords` is not restricted to timestamps. 89 | To learn how to attach extra coordinates to a `~sunraster.SpectrogramCube` instance and how to access them once attached, see the `NDCube extra coordinates documentation `__. 90 | 91 | Coordinate Properties 92 | ********************* 93 | 94 | For convenience, `~sunraster.SpectrogramCube` provides shortcuts to the three primary coordinate types that define spectrogram data. 95 | These are `sunraster.SpectrogramCube.celestial`, `sunraster.SpectrogramCube.spectral`, and `sunraster.SpectrogramCube.time` which return the relevant coordinates of each pixel. 96 | Note that `sunraster.SpectrogramCube.celestial` returns a `~astropy.coordinates.SkyCoord` object which contains the values of the two spatial dimensions, i.e. longitude and latitude. 97 | These properties inspect the WCS and extra coords objects and locate where and how the relevant coordinate information is stored. 98 | This is possible only if the coordinate name is supported by ``sunraster``. 99 | To see these supported names, see ``sunraster.SpectrogramCube.SUPPORTED_LONGITUDE_NAMES``, ``sunraster.spectrogram.SUPPORTED_LATITUDE_NAMES``, ``sunraster.spectrogram.SUPPORTED_SPECTRAL_NAMES``, and ``sunraster.spectrogram.SUPPORTED_TIME_NAMES``. 100 | If the coordinate name cannot be found, these properties will raise an error. 101 | If you think additional coordinate names should be supported, please let us know by `raising an issue on our GitHub repo. `__. 102 | 103 | In addition to the three primary coordinate types, there is also a convenience for the exposure time, ``sunraster.SpectrogramCube.exposure_time``. 104 | The supported exposure time coordinate names can be found under ``sunraster.spectrogram.SUPPORTED_EXPOSURE_NAMES``. 105 | 106 | Dimensions 107 | ^^^^^^^^^^ 108 | 109 | The `~sunraster.SpectrogramCube.dimensions` and `~sunraster.SpectrogramCube.array_axis_physical_types` methods enable users to inspect the shape and WCS axis types of the `~sunraster.SpectrogramCube` instance. 110 | 111 | .. code-block:: python 112 | 113 | >>> my_spectrograms.shape 114 | (3, 4, 5) 115 | >>> my_spectrograms.array_axis_physical_types 116 | [('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon'), 117 | ('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon'), 118 | ('em.wl',)] 119 | 120 | `~sunraster.SpectrogramCube.dimensions` returns a `~astropy.units.Quantity` giving the length of each dimension in pixel units while `~sunraster.SpectrogramCube.array_axis_physical_types` returns an list of tuples where each tuple contains the types of physical properties associated with each array axis. 121 | Since more than one physical type be associated with an array axis because they are dependent, e.g. latitude/longitude, or because of the rastering nature of the instrument, e.g. latitude/longitude and time, the length of each tuple can be greater than one. 122 | The axis names are in accordance with the International Virtual Observatory Alliance (IVOA) `UCD1+ controlled vocabulary `__. 123 | 124 | .. _spectrogram_slicing: 125 | 126 | Slicing 127 | ^^^^^^^ 128 | 129 | `~sunraster.SpectrogramCube` inherits a powerful and simple slicing API from `~ndcube.NDCube`. 130 | It enables users to access sub-regions of their data while simultaneously slicing all relevant attributes including uncertainty, mask, wcs, extra_coords, etc. 131 | Slicing in pixel space is achieved via the standard Python slicing API while a separate API is provided for cropping a `~sunraster.SpectrogramCube` instance by real world coordinates. 132 | See the `NDCube slicing documentation `__ to learn more. 133 | 134 | .. _spectrogram_plotting: 135 | 136 | Plotting 137 | ^^^^^^^^ 138 | 139 | To quickly and easily visualize spectrograms, `~sunraster.SpectrogramCube` inherits a simple-to-use, yet powerful plotting method from `~ndcube.NDCube`. 140 | It is intended to be a useful quicklook tool and not a replacement for high quality plots or animations, e.g. for publications. 141 | The plot method can be called very simply. 142 | 143 | .. code-block:: python 144 | 145 | >>> my_spectrograms.plot() # doctest: +SKIP 146 | 147 | This method produces different types of visualizations including line plots, 2-D images and 1- and 2-D animations. 148 | Which is displayed depends on the dimensionality of the `~sunraster.SpectrogramCube` and the inputs of the user. 149 | For learn more about how to customize plots and animations through the `~sunraster.SpectrogramCube.plot` method, see the `NDCubeSequence plotting documentation `__. 150 | 151 | .. _cube_exposure_time_correction: 152 | 153 | Exposure Time Correction 154 | ^^^^^^^^^^^^^^^^^^^^^^^^ 155 | 156 | An important step in analyzing any form of photon-based observations is normalizing the data to the exposure time. 157 | This is important both for converting between instrumental and physical units, e.g. DN to energy, and comparing spectral features between exposure, e.g. line intensity. 158 | 159 | `~sunraster.SpectrogramCube` provides a simple API for performing this correction: `~sunraster.SpectrogramCube.apply_exposure_time_correction`. 160 | It requires that the exposure time is stored in the ``.meta`` attribute of the `~sunraster.SpectrogramCube` as a `~astropy.units.Quantity`. 161 | The ``.meta`` attribute must be an instance of `ndcube.meta.NDMeta`. 162 | Let's recreate our spectrogram object again, but this time with exposure times of 0.5 seconds stored as an extra coordinate and a data unit of counts. 163 | 164 | .. code-block:: python 165 | 166 | >>> import astropy.units as u 167 | >>> from ndcube.meta import NDMeta 168 | >>> exposure_times = np.ones(data.shape[0])/2 * u.s 169 | >>> # Create a metadata instance to hold the exposure times. 170 | >>> # We must also assign the exposure time to the time axis, in this case, the 0th array axis. 171 | >>> metadata = NDMeta({"exposure time": exposure_times}, axes={"exposure time": 0}, 172 | ... data_shape=data.shape) 173 | >>> my_spectrograms = SpectrogramCube(data, input_wcs, uncertainty=uncertainties, 174 | ... mask=mask, meta=metadata, unit=u.ct) 175 | 176 | Note that the API for supplying metadata allows us to supply an additional `dict` designating which axes the metadata corresponds. 177 | We must also supply the shape of the data array with which the metadata is associated to enable it to be preserved through slicing operations. 178 | Also note that the metadata array must be the same shape as its corresponding data axes. 179 | 180 | Applying the exposure time correction is now simple. 181 | 182 | .. code-block:: python 183 | 184 | >>> # First check the data unit and average data value before applying correction. 185 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 186 | ct 1.0 187 | >>> my_spectrograms = my_spectrograms.apply_exposure_time_correction() # Apply exposure time correction. 188 | >>> # Confirm effect by checking data unit and average data value again. 189 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 190 | ct / s 2.0 191 | 192 | Notice that the average data value has been doubled and the data unit is now counts per second. 193 | This method alters not only the data, but also the uncertainty if any is supplied. 194 | `~sunraster.SpectrogramCube.apply_exposure_time_correction` does not apply the scaling blindly, but first checks whether there is a per second (1/s) component in the data unit. 195 | If there is, it assumes that the correction has already been performed and raises an error. 196 | This helps users more easily keep track of whether they have applied the correction. 197 | However, if for some reason there is a per second component that doesn't refer to the exposure time and the user still wants to apply the correction, they can set the ``force`` keyword argument to override the check. 198 | 199 | .. code-block:: python 200 | 201 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 202 | ct / s 2.0 203 | >>> my_spectrograms = my_spectrograms.apply_exposure_time_correction(force=True) 204 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 205 | ct / s2 4.0 206 | 207 | Should users like to undo the correction, they can set the ``undo`` keyword argument. 208 | 209 | .. code-block:: python 210 | 211 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 212 | ct / s2 4.0 213 | >>> my_spectrograms = my_spectrograms.apply_exposure_time_correction(undo=True, force=True) 214 | >>> my_spectrograms = my_spectrograms.apply_exposure_time_correction(undo=True) # Undo correction twice. 215 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 216 | ct 1.0 217 | 218 | As before, `~sunraster.SpectrogramCube.apply_exposure_time_correction` only undoes the correction if there is a time component in the unit. 219 | And again as before, users can override this check by setting the ``force`` keyword argument. 220 | 221 | .. code-block:: python 222 | 223 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 224 | ct 1.0 225 | >>> my_spectrograms = my_spectrograms.apply_exposure_time_correction(undo=True, force=True) 226 | >>> print(my_spectrograms.unit, my_spectrograms.data.mean()) 227 | ct s 0.5 228 | 229 | .. _sequence: 230 | 231 | SpectrogramSequence 232 | ------------------- 233 | 234 | In some cases, a series of spectrograms may not be describable by a single set of WCS transformations. 235 | However, it still may make sense to combine them in order along a dimension. 236 | This is the purpose of the `~sunraster.SpectrogramSequence` class. 237 | It stores a sequence of `~sunraster.SpectrogramCube` instances and provides equivalent or analogous APIs so users can interact with the data as if it were a single data cube. 238 | `~sunraster.SpectrogramSequence` inherits from `~ndcube.NDCubeSequence` and so inherits much of the same API. 239 | 240 | Initialization 241 | ^^^^^^^^^^^^^^ 242 | 243 | To initialize a `~sunraster.SpectrogramSequence`, we first need spectrograms stored in multiple `~sunraster.SpectrogramCube` instances. 244 | Let's create some using what we learned in the :ref:`spectrogramcube` section and include timestamps and exposure times as extra coordinates. 245 | 246 | .. code-block:: python 247 | 248 | >>> from datetime import datetime, timedelta 249 | >>> import numpy as np 250 | >>> import astropy.wcs 251 | >>> import astropy.units as u 252 | >>> from astropy.nddata import StdDevUncertainty 253 | >>> from astropy.time import Time 254 | >>> from sunraster import SpectrogramCube 255 | 256 | >>> # Define primary data array and WCS object. 257 | >>> data = np.ones((3, 4, 5)) 258 | >>> wcs_input_dict = { 259 | ... 'CTYPE1': 'WAVE ', 'CUNIT1': 'Angstrom', 'CDELT1': 0.2, 'CRPIX1': 0, 'CRVAL1': 10, 'NAXIS1': 5, 260 | ... 'CTYPE2': 'HPLT-TAN', 'CUNIT2': 'deg', 'CDELT2': 0.5, 'CRPIX2': 2, 'CRVAL2': 0.5, 'NAXIS2': 4, 261 | ... 'CTYPE3': 'HPLN-TAN', 'CUNIT3': 'deg', 'CDELT3': 0.4, 'CRPIX3': 2, 'CRVAL3': 1, 'NAXIS3': 3} 262 | >>> input_wcs = astropy.wcs.WCS(wcs_input_dict) 263 | >>> # Define a mask with all pixel unmasked, i.e. mask values = False 264 | >>> mask = np.zeros(data.shape, dtype=bool) 265 | >>> # Define uncertaines for data, 2*data and data/2. 266 | >>> uncertainties = StdDevUncertainty(np.sqrt(data)) 267 | >>> uncertainties2 = StdDevUncertainty(np.sqrt(data * 2)) 268 | >>> uncertainties05 = StdDevUncertainty(np.sqrt(data * 0.5)) 269 | 270 | >>> # Define exposure times. 271 | >>> exposure_times = np.ones(data.shape[0])/2 * u.s 272 | >>> axis_length = int(data.shape[0]) 273 | >>> meta = NDMeta({"exposure time": exposure_times}, axes={"exposure time": 0}, 274 | ... data_shape=data.shape) 275 | 276 | >>> # Create 1st cube of spectrograms. 277 | >>> timestamps0 = Time([datetime(2000, 1, 1) + timedelta(minutes=i) 278 | ... for i in range(axis_length)], format='datetime', scale='utc') 279 | >>> extra_coords_input0 = [("time", 0, timestamps0), ("exposure time", 0, exposure_times)] 280 | >>> spectrograms0 = SpectrogramCube(data, input_wcs, uncertainty=uncertainties, mask=mask, 281 | ... meta=meta, unit=u.ct) 282 | >>> for extra in extra_coords_input0: 283 | ... spectrograms0.extra_coords.add(*extra) 284 | >>> # Create 2nd cube of spectrograms. 285 | >>> timestamps1 = Time([timestamps0[-1].to_datetime() + timedelta(minutes=i) 286 | ... for i in range(1, axis_length+1)], format='datetime', scale='utc') 287 | >>> extra_coords_input1 = [("time", 0, timestamps1), ("exposure time", 0, exposure_times)] 288 | >>> spectrograms1 = SpectrogramCube(data*2, input_wcs, uncertainty=uncertainties2, mask=mask, 289 | ... meta=meta, unit=u.ct) 290 | >>> for extra in extra_coords_input1: 291 | ... spectrograms1.extra_coords.add(*extra) 292 | >>> # Create 3rd cube of spectrograms. 293 | >>> timestamps2 = Time([timestamps1[-1].to_datetime() + timedelta(minutes=i) 294 | ... for i in range(1, axis_length+1)], format='datetime', scale='utc') 295 | >>> extra_coords_input2 = [("time", 0, timestamps2), ("exposure time", 0, exposure_times)] 296 | >>> spectrograms2 = SpectrogramCube(data*0.5, input_wcs, uncertainty=uncertainties05, mask=mask, 297 | ... meta=meta, unit=u.ct) 298 | >>> for extra in extra_coords_input2: 299 | ... spectrograms2.extra_coords.add(*extra) 300 | 301 | If we choose, we can define some sequence-level metadata in addition to any metadata attached to the individual raster scans: 302 | 303 | .. code-block:: python 304 | 305 | >>> seq_meta = {"description": "This is a SpectrogramSequence."} 306 | 307 | To create a `~sunraster.SpectrogramSequence`, simply supply the class with a list of `~sunraster.SpectrogramCube` instances. 308 | 309 | .. code-block:: python 310 | 311 | >>> from sunraster import SpectrogramSequence 312 | >>> my_sequence = SpectrogramSequence([spectrograms0, spectrograms1, spectrograms2], 313 | ... meta=seq_meta) 314 | 315 | Dimensions 316 | ^^^^^^^^^^ 317 | 318 | In order to inspect the dimensionality of our sequence and the physical properties to which the axes correspond, we can use the 319 | `~sunraster.SpectrogramSequence.dimensions` and `~sunraster.SpectrogramSequence.array_axis_physical_types` properties. 320 | 321 | .. code-block:: python 322 | 323 | >>> my_sequence.shape 324 | (3, 3, 4, 5) 325 | >>> my_sequence.array_axis_physical_types 326 | [('meta.obs.sequence',), ('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon', 'time', 'custom:CUSTOM'), ('custom:pos.helioprojective.lat', 'custom:pos.helioprojective.lon'), ('em.wl',)] 327 | 328 | Note that this is the same API as `~sunraster.SpectrogramCube` except that `sunraster.SpectrogramSequence.dimensions` returns an iterable of `~astropy.units.Quantity` objects, one for each axis. 329 | This is because of its inheritance from `~ndcube.NDCubeSequence` rather than `~ndcube.NDCube`. 330 | Also note that there are now four dimensions, as the sequence is treated as though it were an additional data axis. 331 | This can be very helpful if you have a series of 2D spectrograms and want to use the sequence axis to represent time. 332 | `sunraster.SpectrogramSequence.array_axis_physical_types` returns a list of tuples of the same `IVOA UCD1+ controlled words `__ used by `sunraster.SpectrogramCube.array_axis_physical_types`. 333 | The sequence axis is given the label ``'meta.obs.sequence'``. 334 | 335 | .. _sequence_coords: 336 | 337 | Coordinates 338 | ^^^^^^^^^^^ 339 | 340 | Coordinate Properties 341 | ********************* 342 | 343 | Just like `~sunraster.SpectrogramCube`, `~sunraster.SpectrogramSequence` provides convenience properties to retrieve the real world coordinate values for each pixel along each axis, namely `sunraster.SpectrogramSequence.celestial`, `sunraster.SpectrogramSequence.spectral`, `sunraster.SpectrogramSequence.time` and `sunraster.SpectrogramSequence.exposure_time`. 344 | Since there is no guarantee that `~sunraster.SpectrogramCube`'s WCS transformations are consistent between `~sunraster.SpectrogramCube` s, `sunraster.SpectrogramCube.celestial` return 3-D `~astropy.coordinates.SkyCoord` instances and `sunraster.SpectrogramCube.spectral` returns a 2-D `~astropy.units.Quantity` where the additional dimension represent the coordinates for different `~sunraster.SpectrogramCube` instances. 345 | 346 | .. _sequence_slicing: 347 | 348 | Exposure Time Correction 349 | ^^^^^^^^^^^^^^^^^^^^^^^^ 350 | 351 | Analogous to `~sunraster.SpectrogramCube`, `~sunraster.SpectrogramSequence` also provides a `~sunraster.SpectrogramSequence.apply_exposure_time_correction` method. This is simply a wrapper around the `~sunraster.SpectrogramCube` version that saves users from apply or removing the exposure time correction to each `~sunraster.SpectrogramCube` manually. To remind yourself how that method works, see the `~sunraster.SpectrogramCube` :ref:`cube_exposure_time_correction` section. 352 | Note that for this method to work, the exposure time values must be stored in the ``.meta`` attribute of the relevant constituent `~sunraster.SpectrogramCube` objects. 353 | 354 | Slicing 355 | ^^^^^^^ 356 | 357 | `~sunraster.SpectrogramSequence` provides an identical slicing API to `~sunraster.SpectrogramCube`. 358 | Although recall that a `~sunraster.SpectrogramSequence` has an additional dimension. 359 | As with `~sunraster.SpectrogramCube`, the slicing API manipulates not only the data, but also all relevant supporting metadata including uncertainties, mask, WCS object, extra_coords, etc. 360 | 361 | To slice a `~sunraster.SpectrogramSequence`, simply do: 362 | 363 | .. code-block:: python 364 | 365 | >>> my_sequence_roi = my_sequence[1:3, 0:2, 1:3, 1:4] 366 | 367 | We can check the effect of the slicing via the `~sunraster.SpectrogramSequence.dimensions` property. 368 | 369 | .. code-block:: python 370 | 371 | >>> print(my_sequence.shape) # Check dimensionality before slicing. 372 | (3, 3, 4, 5) 373 | >>> print(my_sequence_roi.shape) # See how slicing has changed dimensionality. 374 | (2, 2, 2, 3) 375 | 376 | Slicing can reduce the dimensionality of `~sunraster.SpectrogramSequence` instances. 377 | For example, let's slice out the 2nd pixel along the slit. 378 | 379 | .. code-block:: python 380 | 381 | >>> my_3d_sequence = my_sequence[:, :, 2] 382 | >>> print(my_3d_sequence.shape) 383 | (3, 3, 5) 384 | 385 | Plotting 386 | ^^^^^^^^ 387 | 388 | To quickly and easily visualize slit spectrograph data, `~sunraster.SpectrogramSequence` supplies a simple, yet powerful plotting API. 389 | It is intended as a useful quicklook tool and not a replacement for high quality plots or animations, e.g. for publications or presentations. 390 | 391 | .. code-block:: python 392 | 393 | >>> my_sequence.plot() # doctest: +SKIP 394 | 395 | As with `~sunraster.SpectrogramCube`, this method produces different types of visualizations including line plots, 2-D images and 1- and 2-D animations. 396 | Which is displayed depends on the dimensionality of the `~sunraster.SpectrogramSequence` and the inputs of the user. 397 | For learn more about how to customize plots and animations through the `~sunraster.SpectrogramSequence.plot` method, see the `NDCubeSequence plotting documentation `__. 398 | 399 | Spectrogram Collections 400 | ----------------------- 401 | 402 | During analysis of slit spectrograph data, it is often desirable to group different data sets together. 403 | For example, you may have several `~sunraster.SpectrogramCube` or `~sunraster.RasterSequence` objects representing observations in different spectral windows. 404 | Or we may have fit a spectral line in each pixel and extracted a property such as linewidth, thus collapsing the spectral axis. 405 | In both these cases, the `~sunraster.RasterSequence` objects share a common origin and set of coordinate transformations with the original observations (except in the spectral axis in the latter example). 406 | However, they do not have a sequential relationship in their common coordinate spaces and in the latter case the data represents a different physical property to the original observations. 407 | Therefore, combining them in a `~sunraster.RasterSequence` is not appropriate. 408 | 409 | ``sunraster`` does not provide a suitable object for this purpose. 410 | However, because `~sunraster.SpectrogramCube` `~sunraster.SpectrogramSequence` and `~sunraster.RasterSequence` are instances of ``ndcube`` classes underneath, users can employ the `ndcube.NDCollection` class for this purpose. 411 | `~ndcube.NDCollection` is a ``dict``-like class that provides additional slicing capabilities of its constituent data cubes along aligned axes. 412 | To see whether `~ndcube.NDCollection` could be helpful for your research, see the `NDCollection documentation `__. 413 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | *********************** 2 | sunraster Documentation 3 | *********************** 4 | 5 | Welcome to the ``sunraster`` User Guide. 6 | 7 | ``sunraster`` is a free, open-source, community-developed, SunPy-affiliated package that provides tools to manipulate and visualize slit spectrograph data. 8 | We are always glad to welcome new contributors and users. 9 | 10 | The ``sunraster`` classes link observations with various forms of supporting data including: measurement uncertainties; units; a data mask to mark pixels with unreliable or un-physical data values; WCS (World Coordinate System) transformations that describe the position, wavelengths and times represented by the pixels; and general metadata. 11 | These classes also provide methods for applying and removing exposure time corrections to/from the observations. 12 | Moreover, because the data unit is linked to the object, it is always obvious what unit(s) the data is in. 13 | This saves scientists the hassle of performing important, but laborious and repetitive data conversions and avoid confusion by always tracking the unit(s) of the data through those conversions. 14 | 15 | The ``sunraster`` classes inherit more fundamental functionalities from the `ndcube`_ package. 16 | These include a powerful, generic slicing API (application programmable interface) allowing users to manipulate the same data object as though it were 3D (time, position along slit, wavelength) or 4D (raster scan number, slit step, position along slit, wavelength), which is very useful when dealing with scanning slit-spectrograph data. 17 | The API simultaneously slices not only the data, but the uncertainties, data mask, and WCS transformations leading to faster and less error-prone data analysis. 18 | The ``sunraster`` classes also inherit the ability to crop by real world coordinates --- useful when locating a region of interest using information from other observatories --- and a visualization suite which allows users to easily and intuitively visually inspect their data. 19 | 20 | This guide explains the capabilities offered by ``sunraster`` and how to utilize them. 21 | It will describe the different data classes, as well as how to install ``sunraster``, contact the development team, and contribute to the package. 22 | 23 | .. _ndcube: https://docs.sunpy.org/projects/ndcube/en/stable/ 24 | 25 | Contents 26 | ======== 27 | 28 | .. toctree:: 29 | :maxdepth: 2 30 | :caption: Contents: 31 | 32 | installation 33 | data_types/index 34 | api 35 | whatsnew/index 36 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Below we will outline how to install ``sunraster``. 6 | The stable version of ``sunraster`` is what most people will want to install. 7 | If you do find a bug or a behavior you think is incorrect please let us know. 8 | 9 | However, if users would like to get new features at soon as possible or help to develop ``sunraster``, they will have to install the development version. 10 | 11 | .. _stable_install: 12 | 13 | Installing the stable version 14 | ----------------------------- 15 | 16 | There are two options for installing the stable version of ``sunraster``. 17 | The first is via the anaconda distribution using the conda-forge channel. 18 | For more information on installing the anaconda distribution, see the `anaconda website`_. 19 | 20 | .. code-block:: console 21 | 22 | conda install --channel conda-forge sunraster 23 | 24 | To update ``sunraster`` do: 25 | 26 | .. code-block:: console 27 | 28 | conda update sunraster 29 | 30 | The second option for installing the stable version of ``sunraster`` is via pip. 31 | 32 | .. code-block:: console 33 | 34 | pip install sunraster 35 | 36 | Then to update ``sunraster`` do: 37 | 38 | .. code-block:: console 39 | 40 | pip install sunraster --upgrade 41 | 42 | .. _dev_install: 43 | 44 | Installing the development version 45 | ---------------------------------- 46 | 47 | This section outlines how to install the development version of ``sunraster``. 48 | The two primary packages on which ``sunraster`` relies are `ndcube`_ and `sunpy`_. 49 | Both of these have stable released versions that work with ``sunraster``. 50 | However, some developers may want to use the latest updates of these packages in their work with ``sunraster``. 51 | 52 | To install these packages we will use a combination of conda, conda environments, pip and git. 53 | We will assume these are all installed on your current system. 54 | 55 | Stable dependencies install 56 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 57 | 58 | Create conda environment 59 | """""""""""""""""""""""" 60 | The first step is to create a conda environment (let's call it ``sunraster-dev``) in which to install the development version of ``sunraster``. 61 | This will allow you to keep your root environment clean of development packages. 62 | From the command line, type: 63 | 64 | .. code-block:: console 65 | 66 | conda config --append channels conda-forge 67 | conda create -n sunraster-dev pip 68 | 69 | The first line opens a conda channel so that ``sunraster`` and its dependencies can be installed. 70 | The second line creates the ``sunraster-dev`` conda environment with a list of dependencies. 71 | Next, you must activate that environment, i.e. switch into it. 72 | Windows users should type: 73 | 74 | .. code-block:: console 75 | 76 | activate sunraster-dev 77 | 78 | whereas Linux and MacOS users should type: 79 | 80 | .. code-block:: console 81 | 82 | conda activate sunraster-dev 83 | 84 | Clone ``sunraster`` repository 85 | """""""""""""""""""""""""""""" 86 | 87 | The second step is to clone the `sunraster repository`_ from `GitHub`_ into a directory. 88 | Let's call it ``sunraster-git``. From the directory in which you want ``sunraster-git`` to reside, type: 89 | 90 | .. code-block:: console 91 | 92 | git clone https://github.com/sunpy/sunraster.git sunraster-git 93 | 94 | If you want to develop ``sunraster``, you will need to fork the repository and clone your fork instead. 95 | 96 | Install ``sunraster`` 97 | """"""""""""""""""""" 98 | Finally, we can install the ``sunraster`` development version: 99 | 100 | .. code-block:: console 101 | 102 | cd sunraster-git 103 | pip install -e .\[dev\] 104 | 105 | You should now be ready to use ``sunraster``. 106 | To check it's installed, open an Python/IPython/Jupyter Notebook session from any directory and try: 107 | 108 | .. code-block:: python 109 | 110 | >>> import sunraster 111 | 112 | To make sure you have the latest updates, regularly do 113 | 114 | .. code-block:: console 115 | 116 | git pull origin main 117 | 118 | .. _ndcube: https://docs.sunpy.org/projects/ndcube/en/stable/ 119 | .. _SunPy: https://sunpy.org 120 | .. _anaconda website: https://docs.anaconda.com/anaconda/install.html 121 | .. _sunraster repository: https://github.com/sunpy/sunraster 122 | .. _GitHub: https://github.com/ 123 | .. _SunPy,: https://github.com/sunpy/sunpy 124 | .. _ndcube,: https://github.com/sunpy/ndcube 125 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/nitpick-exceptions: -------------------------------------------------------------------------------- 1 | # Prevents sphinx nitpicky mode picking up on optional 2 | # (see https://github.com/sphinx-doc/sphinx/issues/6861) 3 | # Even if it was "fixed", still broken 4 | py:class optional 5 | # See https://github.com/numpy/numpy/issues/10039 6 | py:obj numpy.datetime64 7 | # There's no specific file or function classes to link to 8 | py:class (Unit('deg'), Unit('pix')) 9 | py:class (Unit('Mm'), None) 10 | py:class any type 11 | py:class array-like 12 | py:class file object 13 | py:class function 14 | py:class path-like 15 | py:class str-like 16 | py:class time-like 17 | py:class Unit 18 | py:class Unit('%') 19 | py:class Unit('Angstrom') 20 | py:class Unit('arcsec / pix') 21 | py:class Unit('arcsec') 22 | py:class Unit('deg') 23 | py:class Unit('pix') 24 | py:class Unit('s') 25 | py:class Unit('W / m2') 26 | py:class Unit('ct / pix') 27 | py:obj function 28 | py:obj iterable 29 | py:obj parfive 30 | py:mod parfive 31 | py:obj astropy.io.fits.hdu.base.ExtensionHDU 32 | 33 | # This comes from Map.wcs 34 | py:class prop 35 | py:class Maxwell 36 | 37 | # These come from astropy.coordinates.baseframe.represent_as 38 | py:class data 39 | py:class keyword only 40 | py:class keyword-only 41 | py:class string 42 | py:class subclass of BaseRepresentation 43 | 44 | # These come from astropy QTable 45 | py:class list of lists 46 | py:class list of list 47 | py:class numpy ndarray 48 | py:class numpy ndarray 49 | py:class Table 50 | py:class table-like object 51 | 52 | # numpy inherited docstrings 53 | py:obj a 54 | py:obj a.size == 1 55 | py:obj args 56 | py:obj dtype 57 | py:obj n 58 | py:obj ndarray 59 | 60 | # other classes and functions that cannot be linked to 61 | py:class astropy.table.column.BaseColumn 62 | py:class docutils.parsers.rst.Directive 63 | py:class numpy.core.records.recarray 64 | py:class numpy.ma.core.MaskedArray 65 | py:class numpy.ma.mvoid 66 | py:class numpy.void 67 | py:class pandas.DataFrame 68 | py:class xmlrpc.client.Error 69 | py:class xmlrpc.client.Fault 70 | py:class xmlrpclib.Error 71 | py:class xmlrpclib.Fault 72 | py:obj aiohttp.ClientResponse 73 | py:obj astropy.visualization.wcsaxes.coordinates_map.CoordinatesMap.grid 74 | py:obj data 75 | py:obj numpy.ma.core.MaskedArray 76 | py:obj pkg_resources.parse_version 77 | py:obj sunpy.extern.parse.parse 78 | 79 | # Pending on python docs links issue #11975 80 | py:class classmethod 81 | py:class list 82 | py:meth list.pop 83 | py:obj Artist 84 | py:obj AttributeError 85 | py:obj BboxBase 86 | py:obj int 87 | py:obj list.append 88 | py:obj list.append 89 | py:obj list.count 90 | py:obj list.extend 91 | py:obj list.index 92 | py:obj list.insert 93 | py:obj list.remove 94 | py:obj NotImplementedError 95 | py:obj NotImplementedError 96 | py:obj RendererBase 97 | py:obj RuntimeError 98 | py:obj text 99 | py:obj Text 100 | 101 | # Add these to ndcube one day 102 | py:obj ndcube.NDCubeSequence.common_axis_extra_coords 103 | py:obj ndcube.NDCubeSequence.array_axis_physical_axis_types 104 | py:obj ndcube.NDCubeSequence.cube_like_world_axis_physical_axis_types 105 | -------------------------------------------------------------------------------- /docs/whatsnew/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changelog: 2 | 3 | ************** 4 | Full Changelog 5 | ************** 6 | 7 | .. changelog:: 8 | :towncrier: ../../ 9 | :towncrier-skip-if-empty: 10 | :changelog_file: ../../CHANGELOG.rst 11 | -------------------------------------------------------------------------------- /docs/whatsnew/index.rst: -------------------------------------------------------------------------------- 1 | .. _whatsnew: 2 | 3 | *************** 4 | Release History 5 | *************** 6 | 7 | This page documents the releases for sunraster 8 | 9 | .. toctree:: 10 | :maxdepth: 1 11 | 12 | changelog 13 | -------------------------------------------------------------------------------- /licenses/LICENSE.rst: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024, The SunPy Community 2 | 3 | Redistribution and use in source and binary forms, with or without modification, 4 | are permitted provided that the following conditions are met: 5 | 6 | 1. Redistributions of source code must retain the above copyright notice, this 7 | list of conditions and the following disclaimer. 8 | 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 20 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | -------------------------------------------------------------------------------- /licenses/README.rst: -------------------------------------------------------------------------------- 1 | Licenses 2 | ======== 3 | 4 | This directory holds license and credit information for the package, 5 | works the package is derived from, and/or datasets. 6 | 7 | Ensure that you pick a package licence which is in this folder and it matches 8 | the one mentioned in the top level README.rst file. If you are using the 9 | pre-rendered version of this template check for the word 'Other' in the README. 10 | -------------------------------------------------------------------------------- /licenses/TEMPLATE_LICENSE.rst: -------------------------------------------------------------------------------- 1 | This project is based upon the OpenAstronomy package template 2 | (https://github.com/OpenAstronomy/package-template/) which is licensed under the terms 3 | of the following licence. 4 | 5 | --- 6 | 7 | Copyright (c) 2018, OpenAstronomy Developers 8 | All rights reserved. 9 | 10 | Redistribution and use in source and binary forms, with or without modification, 11 | are permitted provided that the following conditions are met: 12 | 13 | * Redistributions of source code must retain the above copyright notice, this 14 | list of conditions and the following disclaimer. 15 | * Redistributions in binary form must reproduce the above copyright notice, this 16 | list of conditions and the following disclaimer in the documentation and/or 17 | other materials provided with the distribution. 18 | * Neither the name of the Astropy Team nor the names of its contributors may be 19 | used to endorse or promote products derived from this software without 20 | specific prior written permission. 21 | 22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 23 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 24 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 26 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 27 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 28 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 29 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 30 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 31 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 32 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools>=62.1", 4 | "setuptools_scm[toml]>=8.0.0", 5 | "wheel", 6 | ] 7 | build-backend = "setuptools.build_meta" 8 | 9 | [project] 10 | name = "sunraster" 11 | description = "sunraster is an open-source Python library that provides the tools to read in and analyze spectrogram data." 12 | requires-python = ">=3.12" 13 | readme = { file = "README.rst", content-type = "text/x-rst" } 14 | license = { file = "licenses/LICENSE.rst" } 15 | authors = [ 16 | { name = "The SunPy Community", email = "sunpy@googlegroups.com" }, 17 | ] 18 | dependencies = [ 19 | "numpy>=1.26.0", 20 | "astropy>=6.1.0", 21 | "ndcube[all]>=2.3.2" 22 | ] 23 | dynamic = ["version"] 24 | 25 | [project.optional-dependencies] 26 | all = ["sunraster[instr]"] 27 | instr = [ 28 | "sunpy>=7.0", 29 | ] 30 | tests = [ 31 | "pytest", 32 | "pytest-doctestplus", 33 | "pytest-cov", 34 | "pytest-astropy", 35 | "pytest-xdist", 36 | ] 37 | docs = [ 38 | "sphinx", 39 | "sphinx-automodapi", 40 | "sphinx-changelog", 41 | "sunpy-sphinx-theme", 42 | "packaging", 43 | "sphinx-changelog", 44 | "sphinx-gallery", 45 | ] 46 | 47 | [project.urls] 48 | Homepage = "https://sunpy.org" 49 | "Source Code" = "https://github.com/sunpy/sunraster" 50 | Download = "https://pypi.org/project/sunraster" 51 | Documentation = "https://docs.sunpy.org/projects/sunraster" 52 | Changelog = "https://docs.sunpy.org/projects/sunraster/en/stable/whatsnew/changelog.html" 53 | "Issue Tracker" = "https://github.com/sunpy/sunraster/issues" 54 | 55 | [tool.setuptools] 56 | zip-safe = false 57 | include-package-data = true 58 | 59 | [tool.setuptools.packages.find] 60 | include = ["sunraster*"] 61 | exclude = ["sunraster._dev*"] 62 | 63 | [tool.setuptools_scm] 64 | version_file = "sunraster/_version.py" 65 | 66 | [tool.gilesbot] 67 | [tool.gilesbot.pull_requests] 68 | enabled = true 69 | 70 | [tool.gilesbot.towncrier_changelog] 71 | enabled = true 72 | verify_pr_number = true 73 | changelog_skip_label = "No Changelog Entry Needed" 74 | help_url = "https://github.com/sunpy/sunraster/blob/main/changelog/README.rst" 75 | 76 | changelog_missing_long = "There isn't a changelog file in this pull request. Please add a changelog file to the `changelog/` directory following the instructions in the changelog [README](https://github.com/sunpy/sunraster/blob/main/changelog/README.rst)." 77 | 78 | type_incorrect_long = "The changelog file you added is not one of the allowed types. Please use one of the types described in the changelog [README](https://github.com/sunpy/sunraster/blob/main/changelog/README.rst)" 79 | 80 | number_incorrect_long = "The number in the changelog file you added does not match the number of this pull request. Please rename the file." 81 | 82 | # TODO: This should be in towncrier.toml but Giles currently only works looks in 83 | # pyproject.toml we should move this back when it's fixed. 84 | [tool.towncrier] 85 | package = "sunraster" 86 | filename = "CHANGELOG.rst" 87 | directory = "changelog/" 88 | issue_format = "`#{issue} `__" 89 | title_format = "{version} ({project_date})" 90 | 91 | [[tool.towncrier.type]] 92 | directory = "breaking" 93 | name = "Breaking Changes" 94 | showcontent = true 95 | 96 | [[tool.towncrier.type]] 97 | directory = "deprecation" 98 | name = "Deprecations" 99 | showcontent = true 100 | 101 | [[tool.towncrier.type]] 102 | directory = "removal" 103 | name = "Removals" 104 | showcontent = true 105 | 106 | [[tool.towncrier.type]] 107 | directory = "feature" 108 | name = "New Features" 109 | showcontent = true 110 | 111 | [[tool.towncrier.type]] 112 | directory = "bugfix" 113 | name = "Bug Fixes" 114 | showcontent = true 115 | 116 | [[tool.towncrier.type]] 117 | directory = "doc" 118 | name = "Documentation" 119 | showcontent = true 120 | 121 | [[tool.towncrier.type]] 122 | directory = "trivial" 123 | name = "Internal Changes" 124 | showcontent = true 125 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | minversion = 7.0 3 | testpaths = 4 | sunraster 5 | docs 6 | norecursedirs = 7 | .tox 8 | build 9 | docs/_build 10 | docs/generated 11 | *.egg-info 12 | examples 13 | sunraster/_dev 14 | .history 15 | sunraster/extern 16 | doctest_plus = enabled 17 | doctest_optionflags = 18 | NORMALIZE_WHITESPACE 19 | FLOAT_CMP 20 | ELLIPSIS 21 | text_file_format = rst 22 | addopts = 23 | --doctest-rst 24 | -p no:unraisableexception 25 | -p no:threadexception 26 | filterwarnings = 27 | error 28 | # Do not fail on pytest config issues (i.e. missing plugins) but do show them 29 | always::pytest.PytestConfigWarning 30 | # 31 | # A list of warnings to ignore follows. If you add to this list, you MUST 32 | # add a comment or ideally a link to an issue that explains why the warning 33 | # is being ignored 34 | # 35 | # 36 | # This is due to dependencies building with a numpy version different from 37 | # the local installed numpy version, but should be fine 38 | # See https://github.com/numpy/numpy/issues/15748#issuecomment-598584838 39 | ignore:numpy.ufunc size changed:RuntimeWarning 40 | ignore:numpy.ndarray size changed:RuntimeWarning 41 | ignore:invalid value encountered in sqrt:RuntimeWarning 42 | # FITS header issues 43 | ignore::astropy.wcs.wcs.FITSFixedWarning 44 | ignore::astropy.io.fits.verify.VerifyWarning 45 | # https://github.com/astropy/astropy/issues/11309 46 | ignore:target cannot be converted to ICRS, so will not be set on SpectralCoord 47 | # test_ndcube_components_after_slicing raises this and it is unclear if its a problem. 48 | ignore: invalid value encountered in true_divide 49 | # https://github.com/pytest-dev/pytest-cov/issues/557 50 | ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning 51 | ignore:Please use astropy.wcs.wcsapi.high_level_api.values_to_high_level_objects:DeprecationWarning 52 | # oldest deps 53 | ignore:pkg_resources is deprecated as an API 54 | # devdeps from gwcs 55 | ignore:The isiterable function is deprecated 56 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from setuptools import setup 3 | 4 | setup() 5 | -------------------------------------------------------------------------------- /sunraster/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | sunraster 3 | ========= 4 | """ 5 | 6 | from .spectrogram import SpectrogramCube 7 | from .spectrogram_sequence import RasterSequence, SpectrogramSequence 8 | from .version import version as __version__ 9 | 10 | __all__ = ["SpectrogramCube", "SpectrogramSequence", "RasterSequence"] 11 | -------------------------------------------------------------------------------- /sunraster/_dev/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package contains utilities that are only used when developing in a 3 | copy of the source repository. 4 | These files are not installed, and should not be assumed to exist at 5 | runtime. 6 | """ 7 | -------------------------------------------------------------------------------- /sunraster/_dev/scm_version.py: -------------------------------------------------------------------------------- 1 | # Try to use setuptools_scm to get the current version; this is only used 2 | # in development installations from the git repository. 3 | from pathlib import Path 4 | 5 | try: 6 | from setuptools_scm import get_version 7 | 8 | version = get_version(root=Path('../..'), relative_to=__file__) 9 | except ImportError: 10 | raise 11 | except Exception as e: 12 | raise ValueError("setuptools_scm can not determine version.") from e 13 | -------------------------------------------------------------------------------- /sunraster/data/README.rst: -------------------------------------------------------------------------------- 1 | Data directory 2 | ============== 3 | 4 | This directory contains data files included with the package source 5 | code distribution. Note that this is intended only for relatively small files 6 | - large files should be externally hosted and downloaded as needed. 7 | -------------------------------------------------------------------------------- /sunraster/instr/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunpy/sunraster/b0f8fcd5673283a4424b5693f3473c87ba429d1d/sunraster/instr/__init__.py -------------------------------------------------------------------------------- /sunraster/instr/spice.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import numbers 3 | import textwrap 4 | 5 | import numpy as np 6 | 7 | import astropy.units as u 8 | from astropy.coordinates import SkyCoord 9 | from astropy.io import fits 10 | from astropy.time import Time 11 | from astropy.wcs import WCS 12 | 13 | from ndcube import NDCollection 14 | from ndcube.meta import NDMeta 15 | 16 | from sunraster import RasterSequence, SpectrogramCube, SpectrogramSequence 17 | from sunraster.meta import SlitSpectrographMetaABC 18 | 19 | __all__ = ["read_spice_l2_fits", "SPICEMeta"] 20 | 21 | 22 | INCORRECT_OBSID_MESSAGE = "File has incorrect SPIOBSID." 23 | 24 | 25 | def read_spice_l2_fits(filenames, windows=None, memmap=True, read_dumbbells=False): 26 | """ 27 | Read SPICE level 2 FITS file. 28 | 29 | Parameters 30 | ---------- 31 | filenames: iterable of `str` 32 | The name(s), including path, of the SPICE FITS file(s) to read. 33 | windows: iterable of `str` 34 | The names of the windows to read. 35 | All windows must of the same type: dumbbell and regular. 36 | Default=None implies all narrow-slit or dumbbell windows read out 37 | depending on value of read_dumbells kwarg. See below. 38 | memmap: `bool` 39 | If True, FITS file is reading with memory mapping. 40 | read_dumbbells: `bool` 41 | Defines whether dumbbell or regular windows are returned. 42 | If True, returns the dumbbell windows. 43 | If False, returns regular windows. 44 | Default=False 45 | Ignored if windows kwarg is set. 46 | 47 | Returns 48 | ------- 49 | output: `ndcube.NDCollection` or `sunraster.SpectrogramCube`, `sunraster.RasterSequence`, 50 | `sunraster.SpectrogramSequence` 51 | A collection of spectrogram/raster cubes/sequences, one for each window. 52 | If only one window present or requested, a single spectrogram cube 53 | or sequence is returned. 54 | """ 55 | # Sanitize inputs. 56 | if isinstance(filenames, str): 57 | filenames = [filenames] 58 | # Read first file. 59 | first_cubes = _read_single_spice_l2_fits( 60 | filenames[0], windows=windows, memmap=memmap, read_dumbbells=read_dumbbells 61 | ) 62 | # Derive information for consistency checks between files and read subsequent files. 63 | if len(filenames) > 1: 64 | # Wrap windows from first file in lists 65 | # so windows from other files can be appended. 66 | cube_lists = {key: [value] for key, value in first_cubes.items()} 67 | # Get info from first file for consistency checks between files. 68 | first_meta = _get_meta_from_last_added(cube_lists) 69 | first_obs_id = _get_obsid(first_meta) 70 | if windows is None: 71 | windows = list(cube_lists.keys()) 72 | # Read subsequent files and append output to relevant window in cube_lists. 73 | for i, filename in enumerate(filenames[1:]): 74 | try: 75 | cube_lists = _read_single_spice_l2_fits( 76 | filename, 77 | windows=windows, 78 | memmap=memmap, 79 | read_dumbbells=read_dumbbells, 80 | output=cube_lists, 81 | spice_id=first_obs_id, 82 | ) 83 | except ValueError as err: # NOQA: PERF203 84 | err_message = err.args[0] 85 | if INCORRECT_OBSID_MESSAGE in err_message: 86 | this_obs_id = err_message.split()[-1] 87 | raise ValueError( 88 | "All files must correspond to same observing campaign/SPICE OBS ID. " 89 | f"First file SPICE OBS ID: {first_obs_id}; " 90 | f"{i+1}th file SPICE OBS ID: {this_obs_id}" 91 | ) from err 92 | # Depending on type of file, combine data from different files into 93 | # SpectrogramSequences and RasterSequences. 94 | is_raster = "ras" in first_meta.get("FILENAME") and not any( 95 | window[1].meta.contains_dumbbell for window in cube_lists.values() 96 | ) 97 | sequence_class = RasterSequence if is_raster else SpectrogramSequence 98 | window_sequences = [ 99 | (key, sequence_class([v[0] for v in value], common_axis=-1)) for key, value in cube_lists.items() 100 | ] 101 | else: 102 | # If only one file being read, leave data in SpectrogramCube objects. 103 | window_sequences = list(first_cubes.items()) 104 | if len(window_sequences) > 1: 105 | # Data should be aligned along all axes except the spectral axis. 106 | # But they should be aligned along all axes if they come from the 107 | # same spectral window, e.g. because they are dumbbell windows. 108 | first_sequence = window_sequences[0][1] 109 | first_spectral_window = first_sequence[0].meta.spectral_window 110 | if all(window[1][0].meta.spectral_window == first_spectral_window for window in window_sequences): 111 | aligned_axes = tuple( 112 | range(len(first_sequence.shape if hasattr(first_sequence, "shape") else first_sequence.dimensions)) 113 | ) 114 | else: 115 | aligned_axes = tuple( 116 | i for i, phys_type in enumerate(first_sequence.array_axis_physical_types) if "em.wl" not in phys_type 117 | ) 118 | else: 119 | aligned_axes = None 120 | return NDCollection(window_sequences, aligned_axes=aligned_axes) 121 | 122 | 123 | def _get_meta_from_last_added(obj): 124 | return list(obj.values())[0][-1].meta 125 | 126 | 127 | def _get_obsid(spice_meta): 128 | return spice_meta.spice_observation_id 129 | 130 | 131 | def _read_single_spice_l2_fits( 132 | filename, 133 | windows=None, 134 | memmap=True, 135 | read_dumbbells=False, 136 | output=None, 137 | spice_id=None, 138 | ): 139 | """ 140 | Read SPICE level 2 FITS file(s). 141 | 142 | Parameters 143 | ---------- 144 | filename: `str` 145 | The name, including path, of the SPICE FITS file to read. 146 | windows: iterable of `str` 147 | The names of the windows to read. 148 | All windows must of the same type: dumbbell and regular. 149 | Default=None implies all narrow-slit or dumbbell windows read out 150 | depending on value of read_dumbells kwarg. See below. 151 | memmap: `bool` 152 | If True, FITS file is reading with memory mapping. 153 | read_dumbbells: `bool` 154 | Defines whether dumbbell or regular windows are returned. 155 | If True, returns the dumbbell windows. 156 | If False, returns regular windows. 157 | Default=False 158 | Ignored if windows kwarg is set. 159 | output: `dict` of `list`s (optional) 160 | A dictionary of lists with the same keys are the windows kwarg. 161 | The output for each window will be appended to the list corresponding 162 | the window's name. 163 | spice_id: `int` (optional) 164 | If not None, file must have a SPIOBSID equal to this value. 165 | Otherwise an error is raised 166 | 167 | Returns 168 | ------- 169 | output: `dict` of `sunraster.SpectrogramCube` 170 | A collection of spectrogram cubes, one for each window. 171 | """ 172 | window_cubes = [] 173 | dumbbell_label = "DUMBBELL" 174 | excluded_labels = ["WCSDVARR"] 175 | with fits.open(filename, memmap=memmap) as hdulist: 176 | if isinstance(spice_id, numbers.Integral) and hdulist[0].header["SPIOBSID"] != spice_id: 177 | raise ValueError(f"{INCORRECT_OBSID_MESSAGE} Expected {spice_id}. Got {hdulist[0].header['SPIOBSID']}.") 178 | # Derive names of windows to be read. 179 | if windows is None: 180 | if read_dumbbells: 181 | windows = [ 182 | hdu.header["EXTNAME"] 183 | for hdu in hdulist 184 | if ( 185 | isinstance( 186 | hdu, 187 | ( 188 | fits.hdu.image.PrimaryHDU, 189 | fits.hdu.image.ImageHDU, 190 | ), 191 | ) 192 | ) 193 | and dumbbell_label in hdu.header["EXTNAME"] 194 | ] 195 | else: 196 | windows = [ 197 | hdu.header["EXTNAME"] 198 | for hdu in hdulist 199 | if ( 200 | isinstance( 201 | hdu, 202 | ( 203 | fits.hdu.image.PrimaryHDU, 204 | fits.hdu.image.ImageHDU, 205 | ), 206 | ) 207 | ) 208 | and dumbbell_label not in hdu.header["EXTNAME"] 209 | and hdu.header["EXTNAME"] not in excluded_labels 210 | ] 211 | dumbbells_requested = [dumbbell_label in window for window in windows] 212 | if any(dumbbells_requested) and not all(dumbbells_requested): 213 | raise ValueError("Cannot read dumbbell and other window types simultaneously.") 214 | # Retrieve window names from FITS file. 215 | for hdu in hdulist: 216 | if hdu.header["EXTNAME"] in windows: 217 | # Define metadata object. 218 | meta = SPICEMeta( 219 | hdu.header, 220 | key_comments=_convert_fits_comments_to_key_value_pairs(hdu.header), 221 | data_shape=hdu.data.shape, 222 | ) 223 | # Rename WCS time axis to time. 224 | meta.update([("CTYPE4", "TIME")]) 225 | new_header = copy.deepcopy(hdu.header) 226 | new_header["CTYPE4"] = "TIME" 227 | # Define WCS from new header 228 | wcs = WCS(new_header) 229 | # Define exposure times from metadata. 230 | exp_times = u.Quantity(np.zeros(hdu.data.shape[-1]) + meta.get("XPOSURE"), unit=u.s) 231 | # Define data cube. 232 | data = hdu.data 233 | spectrogram = SpectrogramCube( 234 | data=data, 235 | wcs=wcs, 236 | mask=np.isnan(data), 237 | unit=u.Unit(meta.get("BUNIT"), format="fits"), 238 | meta=meta, 239 | instrument_axes=("raster scan", "spectral", "slit", "slit step"), 240 | ) 241 | spectrogram.meta.add("exposure time", exp_times, None, 3) 242 | window_name = meta.get("EXTNAME") 243 | if output is None: 244 | window_cubes.append((window_name, spectrogram)) 245 | else: 246 | output[window_name].append(spectrogram) 247 | return dict(window_cubes) if output is None else output 248 | 249 | 250 | def _convert_fits_comments_to_key_value_pairs(fits_header): 251 | keys = np.unique(np.array(list(fits_header.keys()))) 252 | keys = keys[keys != ""] 253 | return dict([(key, fits_header.comments[key]) for key in keys]) 254 | 255 | 256 | class SPICEMeta(SlitSpectrographMetaABC, NDMeta): 257 | # ---------- SPICE-specific convenience methods ---------- 258 | def _get_unit(self, key): 259 | if comment := self.key_comments.get(key): 260 | try: 261 | return [s.split("]") for s in comment.split("[")[1:]][0][:-1][0] 262 | except IndexError: 263 | pass 264 | return None 265 | 266 | def _construct_quantity(self, key): 267 | val = self.get(key) 268 | if val: 269 | val *= u.Unit(self._get_unit(key)) 270 | return val 271 | 272 | def _construct_time(self, key): 273 | val = self.get(key) 274 | scale = self._get_unit(key).lower() 275 | if val: 276 | val = Time(val, format="fits", scale=scale) 277 | return val 278 | 279 | def __str__(self): 280 | return textwrap.dedent( 281 | f"""\ 282 | SPICEMeta 283 | --------- 284 | Observatory:\t\t\t\t{self.observatory} 285 | Instrument:\t\t\t\t{self.instrument} 286 | Detector:\t\t\t\t{self.detector} 287 | Spectral Window:\t\t\t{self.spectral_window} 288 | Date:\t\t\t\t\t{self.date_reference} 289 | OBS_ID (SOC Observation ID):\t\t{self.observing_mode_id_solar_orbiter} 290 | SPIOBSID (SPICE Observation ID):\t{self.spice_observation_id} 291 | """ 292 | ) 293 | 294 | def __repr__(self): 295 | return f"{object.__repr__(self)}\n{self!s}" 296 | 297 | # ---------- Inherited ABC properties ---------- 298 | @property 299 | def spectral_window(self): 300 | spectral_window = self.get("EXTNAME") 301 | # Remove redundant text associated with dumbbells. 302 | joiner = "_" 303 | if self.contains_dumbbell: 304 | dummy_txt = "" 305 | spectral_window = spectral_window.replace("DUMBBELL", dummy_txt) 306 | spectral_window = spectral_window.replace("UPPER", dummy_txt) 307 | spectral_window = spectral_window.replace("LOWER", dummy_txt) 308 | spectral_window = joiner.join(list(filter((dummy_txt).__ne__, spectral_window.split(joiner)))) 309 | # Remove other redundant text from window name. 310 | redundant_txt = "WINDOW" 311 | if redundant_txt in spectral_window: 312 | spectral_window = joiner.join([comp for comp in spectral_window.split(joiner) if "WINDOW" not in comp]) 313 | return spectral_window 314 | 315 | @property 316 | def detector(self): 317 | return self.get("DETECTOR") 318 | 319 | @property 320 | def instrument(self): 321 | return self.get("INSTRUME") 322 | 323 | @property 324 | def observatory(self): 325 | return self.get("OBSRVTRY") 326 | 327 | @property 328 | def processing_level(self): 329 | return self.get("LEVEL") 330 | 331 | @property 332 | def rsun_meters(self): 333 | return self._construct_quantity("RSUN_REF") 334 | 335 | @property 336 | def rsun_angular(self): 337 | return self._construct_quantity("RSUN_ARC") 338 | 339 | @property 340 | def spice_observation_id(self): 341 | return self.get("SPIOBSID") 342 | 343 | @property 344 | def observer_radial_velocity(self): 345 | return self._construct_quantity("OBS_VR") 346 | 347 | @property 348 | def distance_to_sun(self): 349 | return self._construct_quantity("DSUN_OBS") 350 | 351 | @property 352 | def date_reference(self): 353 | return self._construct_time("DATE-OBS") 354 | 355 | @property 356 | def date_start(self): 357 | return self._construct_time("DATE-BEG") 358 | 359 | @property 360 | def date_end(self): 361 | return self._construct_time("DATE-END") 362 | 363 | @property 364 | def observer_location(self): 365 | from sunpy.coordinates import HeliographicStonyhurst 366 | 367 | lon_unit = u.deg 368 | lat_unit = u.deg 369 | radius_unit = u.m 370 | lon_key = "HGLN_OBS" 371 | lat_key = "HGLT_OBS" 372 | kwargs = { 373 | "lon": u.Quantity(self.get(lon_key), unit=self._get_unit(lon_key)).to_value(lon_unit), 374 | "lat": u.Quantity(self.get(lat_key), unit=self._get_unit(lat_key)).to_value(lat_unit), 375 | "radius": self.distance_to_sun.to_value(radius_unit), 376 | "unit": (lon_unit, lat_unit, radius_unit), 377 | "frame": HeliographicStonyhurst, 378 | } 379 | return SkyCoord(obstime=self.date_reference, **kwargs) 380 | 381 | @property 382 | def version(self): 383 | return self.get("VERSION") 384 | 385 | # ---------- SPICE-specific metadata properties ---------- 386 | @property 387 | def observing_mode_id_solar_orbiter(self): 388 | return self.get("OBS_ID") 389 | 390 | @property 391 | def darkmap_subtracted_onboard(self): 392 | return bool(self.get("DARKMAP")) 393 | 394 | @property 395 | def bias_frame_subtracted_onboard(self): 396 | return bool(self.get("BLACKLEV")) 397 | 398 | @property 399 | def window_type(self): 400 | return self.get("WIN_TYPE") 401 | 402 | @property 403 | def slit_id(self): 404 | return self.get("SLIT_ID") 405 | 406 | @property 407 | def slit_width(self): 408 | return self._construct_quantity("SLIT_WID") 409 | 410 | @property 411 | def contains_dumbbell(self): 412 | return self.get("DUMBBELL") in [1, 2] 413 | 414 | @property 415 | def dumbbell_type(self): 416 | dumbbell_types = [None, "lower", "upper"] 417 | dumbbell_idx = self.get("DUMBBELL") 418 | return dumbbell_types[dumbbell_idx] 419 | 420 | @property 421 | def solar_B0(self): 422 | """ 423 | Tilt angle of solar north toward spacecraft. 424 | """ 425 | return self._construct_quantity("SOLAR_B0") 426 | 427 | @property 428 | def solar_P0(self): 429 | """ 430 | Angle from spacecraft celestial north to solar north. 431 | """ 432 | return self._construct_quantity("SOLAR_P0") 433 | 434 | @property 435 | def solar_ep(self): 436 | """ 437 | Angle from spacecraft ecliptic north to solar north angle. 438 | """ 439 | return self._construct_quantity("SOLAR_EP") 440 | 441 | @property 442 | def carrington_rotation(self): 443 | """ 444 | Carrington Rotation number of observation. 445 | """ 446 | return self.get("CAR_ROT") 447 | 448 | @property 449 | def date_start_earth(self): 450 | """ 451 | Time at which photons reaching SPICE at start time would have reach 452 | Earth. 453 | """ 454 | return self._construct_time("DATE_EAR") 455 | 456 | @property 457 | def date_start_sun(self): 458 | """ 459 | Time at which photons reaching SPICE at start time would have left Sun. 460 | 461 | The Sun is defined as the center of the Sun assuming photon was 462 | not impeded. 463 | """ 464 | return self._construct_time("DATE_SUN") 465 | -------------------------------------------------------------------------------- /sunraster/instr/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunpy/sunraster/b0f8fcd5673283a4424b5693f3473c87ba429d1d/sunraster/instr/tests/__init__.py -------------------------------------------------------------------------------- /sunraster/instr/tests/test_spice.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import astropy.units as u 5 | from astropy.coordinates import SkyCoord 6 | from astropy.io import fits 7 | from astropy.time import Time 8 | 9 | from ndcube import NDCollection 10 | from sunpy.coordinates import HeliographicStonyhurst 11 | 12 | from sunraster import RasterSequence, SpectrogramCube, SpectrogramSequence 13 | from sunraster.instr.spice import SPICEMeta, read_spice_l2_fits 14 | from sunraster.tests import TEST_DATA_PATH 15 | 16 | READ_SPICE_L2_FITS_RETURN_TYPE = NDCollection 17 | SPECTRAL_WINDOW = ("WINDOW0_74.73", "Extension name") 18 | DETECTOR = ("SW", "Detector array name") 19 | INSTRUMENT = ("SPICE", "Instrument name") 20 | OBSERVATORY = ("Solar Orbiter", "Observatory Name") 21 | PROCESSING_LEVEL = ("L2", "Data processing level") 22 | RSUN_METERS = (695700000.0, "[m] Assumed photospheric Solar radius") 23 | RSUN_ANGULAR = (1764.0728936, "[arcsec] Apparent photospheric Solar radius") 24 | OBSERVING_MODE_ID = (10, "") 25 | OBSERVATORY_RADIAL_VELOCITY = ( 26 | -7036.06122832, 27 | "[m/s] Radial velocity of S/C away from the Sun", 28 | ) 29 | DISTANCE_TO_SUN = (81342963151.0, "[m] S/C distance from Sun") 30 | DATE_REFERENCE = ("2020-06-02T07:47:58.017", "[UTC] Equals DATE-BEG") 31 | DATE_START = ("2020-06-02T07:47:58.017", "[UTC] Beginning of data acquisition") 32 | DATE_END = ("2020-06-02T07:47:58.117", "[UTC] End of data acquisition") 33 | HGLN_OBS = (35.8382263864, "[deg] S/C Heliographic longitude") 34 | HGLT_OBS = (4.83881036748, "[deg] S/C Heliographic latitude (B0 angle)") 35 | SPICE_OBSERVING_MODE_ID = (12583744, "SPICE Observation ID") 36 | DARKMAP = (0, "If set, a dark map was subtracted on-board") 37 | BLACKLEV = (0, "If set, a bias frame was subtracted on-board") 38 | WINDOW_TYPE = ("Full Detector Narrow-slit", "Description of window type") 39 | WINDOW_TABLE_ID = (255, "Index in on-board window data table (0-255)") 40 | SLIT_ID = (2, "Slit ID (0-3)") 41 | SLIT_WIDTH = (4, "[arcsec] Slit width") 42 | DUMBBELL = (0, "0/1/2: not a dumbbell/lower dumbbel/upper dumbb") 43 | SOLAR_B0 = (4.83881036748, "[deg] Tilt angle of Solar North toward S/C") 44 | SOLAR_P0 = (1.49702480927, "[deg] S/C Celestial North to Solar North angle") 45 | SOLAR_EP = (-6.14143491727, "[deg] S/C Ecliptic North to Solar North angle") 46 | CARRINGTON_ROTATION_NUMBER = (2231, "Carrington rotation number") 47 | DATE_START_EARTH = ("2020-06-02T07:51:52.799", "[UTC] DATE-BEG + EAR_TDEL") 48 | DATE_START_SUN = ("2020-06-02T07:43:26.686", "[UTC] DATE-BEG - SUN_TIME") 49 | 50 | 51 | @pytest.fixture 52 | def spice_fits_header(): 53 | hdr = fits.Header() 54 | hdr.append(("EXTNAME", *list(SPECTRAL_WINDOW))) 55 | hdr.append(("DETECTOR", *list(DETECTOR))) 56 | hdr.append(("INSTRUME", *list(INSTRUMENT))) 57 | hdr.append(("OBSRVTRY", *list(OBSERVATORY))) 58 | hdr.append(("LEVEL", *list(PROCESSING_LEVEL))) 59 | hdr.append(("RSUN_REF", *list(RSUN_METERS))) 60 | hdr.append(("RSUN_ARC", *list(RSUN_ANGULAR))) 61 | hdr.append(("OBS_ID", *list(OBSERVING_MODE_ID))) 62 | hdr.append(("OBS_VR", *list(OBSERVATORY_RADIAL_VELOCITY))) 63 | hdr.append(("DSUN_OBS", *list(DISTANCE_TO_SUN))) 64 | hdr.append(("DATE-OBS", *list(DATE_REFERENCE))) 65 | hdr.append(("DATE-BEG", *list(DATE_START))) 66 | hdr.append(("DATE-END", *list(DATE_END))) 67 | hdr.append(("HGLN_OBS", *list(HGLN_OBS))) 68 | hdr.append(("HGLT_OBS", *list(HGLT_OBS))) 69 | hdr.append(("SPIOBSID", *list(SPICE_OBSERVING_MODE_ID))) 70 | hdr.append(("DARKMAP", *list(DARKMAP))) 71 | hdr.append(("BLACKLEV", *list(BLACKLEV))) 72 | hdr.append(("WIN_TYPE", *list(WINDOW_TYPE))) 73 | hdr.append(("WINTABID", *list(WINDOW_TABLE_ID))) 74 | hdr.append(("SLIT_ID", *list(SLIT_ID))) 75 | hdr.append(("SLIT_WID", *list(SLIT_WIDTH))) 76 | hdr.append(("DUMBBELL", *list(DUMBBELL))) 77 | hdr.append(("SOLAR_B0", *list(SOLAR_B0))) 78 | hdr.append(("SOLAR_P0", *list(SOLAR_P0))) 79 | hdr.append(("SOLAR_EP", *list(SOLAR_EP))) 80 | hdr.append(("CAR_ROT", *list(CARRINGTON_ROTATION_NUMBER))) 81 | hdr.append(("DATE_EAR", *list(DATE_START_EARTH))) 82 | hdr.append(("DATE_SUN", *list(DATE_START_SUN))) 83 | return hdr 84 | 85 | 86 | @pytest.fixture 87 | def spice_meta(spice_fits_header): 88 | return SPICEMeta( 89 | spice_fits_header, 90 | key_comments=dict(zip(spice_fits_header.keys(), spice_fits_header.comments)), 91 | ) 92 | 93 | 94 | @pytest.fixture 95 | def spice_rasdb_filename(tmp_path): 96 | """ 97 | Inserts data into a raster SPICE FITS file with dumbbells and returns new 98 | filename. 99 | 100 | A new FITS file is saved in a tmp file path. 101 | """ 102 | rng_gen = np.random.default_rng() 103 | filename = "solo_L2_spice-n-ras-db_20200602T081733_V01_12583760-000.fits" 104 | with fits.open(TEST_DATA_PATH / filename) as hdulist: 105 | new_hdulist = fits.HDUList() 106 | new_hdulist.append(fits.PrimaryHDU(rng_gen.random((1, 48, 832, 30)), header=hdulist[0].header)) 107 | new_hdulist.append(fits.ImageHDU(rng_gen.random((1, 48, 832, 30)), header=hdulist[1].header)) 108 | new_hdulist.append(fits.ImageHDU(rng_gen.random((1, 56, 64, 30)), header=hdulist[2].header)) 109 | new_hdulist.append(fits.ImageHDU(rng_gen.random((1, 56, 64, 30)), header=hdulist[3].header)) 110 | new_hdulist.append(hdulist[-1]) 111 | tmp_spice_path = tmp_path / "spice" 112 | if not tmp_spice_path.exists(): 113 | tmp_spice_path.mkdir() 114 | new_filename = tmp_spice_path / filename 115 | new_hdulist.writeto(new_filename, overwrite=True) 116 | return str(new_filename) 117 | 118 | 119 | @pytest.fixture 120 | def spice_sns_filename(tmp_path): 121 | """ 122 | Inserts data into a sit-and-stare SPICE FITS file and returns new filename. 123 | 124 | A new FITS file is saved in a tmp file path. 125 | """ 126 | rng_gen = np.random.default_rng() 127 | filename = "solo_L2_spice-n-sit_20200620T235901_V01_16777431-000.fits" 128 | with fits.open(TEST_DATA_PATH / filename) as hdulist: 129 | new_hdulist = fits.HDUList() 130 | new_hdulist.append(fits.PrimaryHDU(rng_gen.random((32, 48, 1024, 1)), header=hdulist[0].header)) 131 | new_hdulist.append(fits.ImageHDU(rng_gen.random((32, 48, 1024, 1)), header=hdulist[1].header)) 132 | new_hdulist.append(hdulist[-1]) 133 | tmp_spice_path = tmp_path / "spice" 134 | if not tmp_spice_path.exists(): 135 | tmp_spice_path.mkdir() 136 | new_filename = tmp_spice_path / filename 137 | new_hdulist.writeto(new_filename, output_verify="fix+ignore", overwrite=True) 138 | return new_filename 139 | 140 | 141 | def _construct_expected_time(date_info): 142 | return Time(date_info[0], format="fits", scale=date_info[1][1:4].lower()) 143 | 144 | 145 | def test_meta_spectral_window(spice_meta): 146 | assert spice_meta.spectral_window == SPECTRAL_WINDOW[0][8:] 147 | 148 | 149 | def test_meta_detector(spice_meta): 150 | assert spice_meta.detector == DETECTOR[0] 151 | 152 | 153 | def test_meta_instrument(spice_meta): 154 | assert spice_meta.instrument == INSTRUMENT[0] 155 | 156 | 157 | def test_meta_observatory(spice_meta): 158 | assert spice_meta.observatory == OBSERVATORY[0] 159 | 160 | 161 | def test_meta_processing_level(spice_meta): 162 | assert spice_meta.processing_level == PROCESSING_LEVEL[0] 163 | 164 | 165 | def test_meta_rsun_meters(spice_meta): 166 | assert spice_meta.rsun_meters == RSUN_METERS[0] * u.m 167 | 168 | 169 | def test_meta_rsun_angular(spice_meta): 170 | assert spice_meta.rsun_angular == RSUN_ANGULAR[0] * u.arcsec 171 | 172 | 173 | def test_meta_spice_observation_id(spice_meta): 174 | assert spice_meta.spice_observation_id == SPICE_OBSERVING_MODE_ID[0] 175 | 176 | 177 | def test_meta_observer_radial_velocity(spice_meta): 178 | assert spice_meta.observer_radial_velocity == OBSERVATORY_RADIAL_VELOCITY[0] * u.m / u.s 179 | 180 | 181 | def test_meta_distance_to_sun(spice_meta): 182 | assert spice_meta.distance_to_sun == DISTANCE_TO_SUN[0] * u.m 183 | 184 | 185 | def test_meta_date_reference(spice_meta): 186 | assert spice_meta.date_reference == _construct_expected_time(DATE_REFERENCE) 187 | 188 | 189 | def test_meta_date_start(spice_meta): 190 | assert spice_meta.date_start == _construct_expected_time(DATE_START) 191 | 192 | 193 | def test_meta_date_end(spice_meta): 194 | assert spice_meta.date_end == _construct_expected_time(DATE_END) 195 | 196 | 197 | def test_meta_observer_location(spice_meta): 198 | obstime = _construct_expected_time(DATE_REFERENCE) 199 | observer_location = SkyCoord( 200 | lon=HGLN_OBS[0], 201 | lat=HGLT_OBS[0], 202 | radius=DISTANCE_TO_SUN[0], 203 | unit=(u.deg, u.deg, u.m), 204 | obstime=obstime, 205 | frame=HeliographicStonyhurst, 206 | ) 207 | assert spice_meta.observer_location.lon == observer_location.lon 208 | assert spice_meta.observer_location.lat == observer_location.lat 209 | assert spice_meta.observer_location.radius == observer_location.radius 210 | assert spice_meta.observer_location.obstime == observer_location.obstime 211 | assert spice_meta.observer_location.frame.name == observer_location.frame.name 212 | 213 | 214 | def test_meta_observing_mode_id_solar_orbiter(spice_meta): 215 | assert spice_meta.observing_mode_id_solar_orbiter == OBSERVING_MODE_ID[0] 216 | 217 | 218 | def test_meta_darkmap_subtracted_onboard(spice_meta): 219 | assert spice_meta.darkmap_subtracted_onboard is False 220 | 221 | 222 | def test_meta_bias_frame_subtracted_onboard(spice_meta): 223 | assert spice_meta.bias_frame_subtracted_onboard is False 224 | 225 | 226 | def test_meta_window_type(spice_meta): 227 | assert spice_meta.window_type == WINDOW_TYPE[0] 228 | 229 | 230 | def test_meta_slit_id(spice_meta): 231 | assert spice_meta.slit_id == SLIT_ID[0] 232 | 233 | 234 | def test_meta_slit_width(spice_meta): 235 | assert spice_meta.slit_width == SLIT_WIDTH[0] * u.arcsec 236 | 237 | 238 | def test_meta_contains_dumbbell(spice_meta): 239 | assert spice_meta.contains_dumbbell is False 240 | 241 | 242 | def test_meta_dumbbell_type(spice_meta): 243 | assert spice_meta.dumbbell_type is None 244 | 245 | 246 | def test_meta_solar_B0(spice_meta): 247 | assert spice_meta.solar_B0 == SOLAR_B0[0] * u.deg 248 | 249 | 250 | def test_meta_solar_P0(spice_meta): 251 | assert spice_meta.solar_P0 == SOLAR_P0[0] * u.deg 252 | 253 | 254 | def test_meta_solar_ep(spice_meta): 255 | assert spice_meta.solar_ep == SOLAR_EP[0] * u.deg 256 | 257 | 258 | def test_meta_carrington_rotation(spice_meta): 259 | assert spice_meta.carrington_rotation == CARRINGTON_ROTATION_NUMBER[0] 260 | 261 | 262 | def test_meta_date_start_earth(spice_meta): 263 | date_start_earth = _construct_expected_time(DATE_START_EARTH) 264 | assert spice_meta.date_start_earth == date_start_earth 265 | 266 | 267 | def test_meta_date_start_sun(spice_meta): 268 | date_start_sun = _construct_expected_time(DATE_START_SUN) 269 | assert spice_meta.date_start_sun == date_start_sun 270 | 271 | 272 | def test_read_spice_l2_fits_single_file_multiple_windows(spice_rasdb_filename): 273 | filename = spice_rasdb_filename 274 | result = read_spice_l2_fits(filename) 275 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 276 | assert set(result.aligned_axes.values()) == {(0, 2, 3)} 277 | assert len(result) == 2 278 | assert all(isinstance(window, SpectrogramCube) for window in result.values()) 279 | 280 | 281 | def test_read_spice_l2_fits_single_file_window(spice_rasdb_filename): 282 | filename = spice_rasdb_filename 283 | result = read_spice_l2_fits(filename, windows=["WINDOW0_70.51"]) 284 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 285 | assert result.aligned_axes is None 286 | assert len(result) == 1 287 | assert all(isinstance(window, SpectrogramCube) for window in result.values()) 288 | 289 | 290 | def test_read_spice_l2_fits_single_file_dumbbells(spice_rasdb_filename): 291 | filename = spice_rasdb_filename 292 | result = read_spice_l2_fits(filename, read_dumbbells=True) 293 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 294 | assert all(window.meta.contains_dumbbell for window in result.values()) 295 | assert set(result.aligned_axes.values()) == {tuple(range(4))} 296 | assert all(isinstance(window, SpectrogramCube) for window in result.values()) 297 | 298 | 299 | def test_read_spice_l2_fits_multiple_rasters_multiple_windows(spice_rasdb_filename): 300 | filenames = [spice_rasdb_filename] * 2 301 | result = read_spice_l2_fits(filenames) 302 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 303 | assert set(result.aligned_axes.values()) == {(0, 2, 3)} 304 | assert len(result) == 2 305 | for window in result.values(): 306 | assert isinstance(window, RasterSequence) 307 | data_length = window.shape[0] if hasattr(window, "shape") else window.dimensions[0].value 308 | assert data_length == len(filenames) 309 | 310 | 311 | def test_read_spice_l2_fits_multiple_rasters_single_window(spice_rasdb_filename): 312 | filenames = [spice_rasdb_filename] * 2 313 | result = read_spice_l2_fits(filenames, windows=["WINDOW0_70.51"]) 314 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 315 | assert result.aligned_axes is None 316 | assert len(result) == 1 317 | for window in result.values(): 318 | assert isinstance(window, RasterSequence) 319 | data_length = window.shape[0] if hasattr(window, "shape") else window.dimensions[0].value 320 | assert data_length == len(filenames) 321 | 322 | 323 | def test_read_spice_l2_fits_multiple_sns_multiple_windows(spice_sns_filename): 324 | filenames = [spice_sns_filename] * 2 325 | result = read_spice_l2_fits(filenames) 326 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 327 | assert set(result.aligned_axes.values()) == {(0, 2, 3)} 328 | assert len(result) == 2 329 | for window in result.values(): 330 | assert isinstance(window, SpectrogramSequence) 331 | data_length = window.shape[0] if hasattr(window, "shape") else window.dimensions[0].value 332 | assert data_length == len(filenames) 333 | 334 | 335 | def test_read_spice_l2_fits_multiple_files_dumbbells(spice_rasdb_filename): 336 | filenames = [spice_rasdb_filename] * 2 337 | result = read_spice_l2_fits(filenames, read_dumbbells=True) 338 | assert isinstance(result, READ_SPICE_L2_FITS_RETURN_TYPE) 339 | assert all(window[0].meta.contains_dumbbell for window in result.values()) 340 | assert set(result.aligned_axes.values()) == {tuple(range(4))} 341 | for window in result.values(): 342 | assert isinstance(window, SpectrogramSequence) 343 | data_length = window.shape[0] if hasattr(window, "shape") else window.dimensions[0].value 344 | assert data_length == len(filenames) 345 | 346 | 347 | def test_read_spice_l2_fits_incompatible_files(spice_rasdb_filename, spice_sns_filename): 348 | filenames = [spice_rasdb_filename, spice_sns_filename] 349 | with pytest.raises(ValueError, match="A"): 350 | read_spice_l2_fits(filenames) 351 | -------------------------------------------------------------------------------- /sunraster/meta.py: -------------------------------------------------------------------------------- 1 | import abc 2 | 3 | from ndcube.meta import NDMetaABC 4 | 5 | __all__ = ["MetaABC", "RemoteSensorMetaABC", "SlitSpectrographMetaABC"] 6 | 7 | 8 | class MetaABC(NDMetaABC): 9 | @property 10 | @abc.abstractmethod 11 | def detector(self): 12 | pass 13 | 14 | @property 15 | @abc.abstractmethod 16 | def instrument(self): 17 | pass 18 | 19 | @property 20 | @abc.abstractmethod 21 | def observatory(self): 22 | pass 23 | 24 | @property 25 | @abc.abstractmethod 26 | def processing_level(self): 27 | """ 28 | The level to which the data has been processed. 29 | """ 30 | 31 | @property 32 | @abc.abstractmethod 33 | def observer_location(self): 34 | """ 35 | Coordinate of observatory location based on header info. 36 | """ 37 | 38 | @property 39 | @abc.abstractmethod 40 | def date_reference(self): 41 | """ 42 | The base time from which time axis values are measured. 43 | 44 | Often the same or very similar to date_start. 45 | """ 46 | 47 | @property 48 | @abc.abstractmethod 49 | def date_start(self): 50 | pass 51 | 52 | @property 53 | @abc.abstractmethod 54 | def date_end(self): 55 | pass 56 | 57 | @property 58 | @abc.abstractmethod 59 | def version(self): 60 | """ 61 | The data version. 62 | """ 63 | 64 | 65 | class RemoteSensorMetaABC(MetaABC): 66 | @property 67 | @abc.abstractmethod 68 | def rsun_meters(self): 69 | """ 70 | Solar radius in units of length. 71 | """ 72 | 73 | @property 74 | @abc.abstractmethod 75 | def rsun_angular(self): 76 | """ 77 | Solar radius in angular units as seen from observatory. 78 | """ 79 | 80 | @property 81 | @abc.abstractmethod 82 | def distance_to_sun(self): 83 | """ 84 | Distance to Sun center from observatory. 85 | """ 86 | 87 | 88 | class SlitSpectrographMetaABC(RemoteSensorMetaABC): 89 | @property 90 | @abc.abstractmethod 91 | def spectral_window(self): 92 | pass 93 | 94 | @property 95 | @abc.abstractmethod 96 | def observing_mode_id(self): 97 | """ 98 | Unique identifier for the observing mode. 99 | 100 | Often referred to as OBS ID. 101 | """ 102 | 103 | @property 104 | @abc.abstractmethod 105 | def observer_radial_velocity(self): 106 | """ 107 | Velocity of observatory in direction of source. 108 | """ 109 | -------------------------------------------------------------------------------- /sunraster/spectrogram.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import textwrap 3 | from copy import deepcopy 4 | 5 | import numpy as np 6 | 7 | import astropy.units as u 8 | from astropy.time import Time 9 | 10 | import ndcube.utils.wcs as nuw 11 | from ndcube import NDMeta 12 | from ndcube.ndcube import NDCube 13 | 14 | __all__ = ["SpectrogramABC","SpectrogramCube"] 15 | 16 | 17 | # Define some custom error messages. 18 | APPLY_EXPOSURE_TIME_ERROR = ( 19 | "Exposure time correction has probably already " 20 | "been applied since the unit already includes " 21 | "inverse time. To apply exposure time correction " 22 | "anyway, set 'force' kwarg to True." 23 | ) 24 | UNDO_EXPOSURE_TIME_ERROR = ( 25 | "Exposure time correction has probably already " 26 | "been undone since the unit does not include " 27 | "inverse time. To undo exposure time correction " 28 | "anyway, set 'force' kwarg to True." 29 | ) 30 | AXIS_NOT_FOUND_ERROR = "axis not found. If in extra_coords, axis name must be supported:" 31 | 32 | # Define supported coordinate names for coordinate properties. 33 | SUPPORTED_LONGITUDE_NAMES = [ 34 | "custom:pos.helioprojective.lon", 35 | "pos.helioprojective.lon", 36 | "longitude", 37 | "lon", 38 | ] 39 | SUPPORTED_LONGITUDE_NAMES += [name.upper() for name in SUPPORTED_LONGITUDE_NAMES] + [ 40 | name.capitalize() for name in SUPPORTED_LONGITUDE_NAMES 41 | ] 42 | SUPPORTED_LONGITUDE_NAMES = np.array(SUPPORTED_LONGITUDE_NAMES) 43 | 44 | SUPPORTED_LATITUDE_NAMES = [ 45 | "custom:pos.helioprojective.lat", 46 | "pos.helioprojective.lat", 47 | "latitude", 48 | "lat", 49 | ] 50 | SUPPORTED_LATITUDE_NAMES += [name.upper() for name in SUPPORTED_LATITUDE_NAMES] + [ 51 | name.capitalize() for name in SUPPORTED_LATITUDE_NAMES 52 | ] 53 | SUPPORTED_LATITUDE_NAMES = np.array(SUPPORTED_LATITUDE_NAMES) 54 | 55 | SUPPORTED_SPECTRAL_NAMES = [ 56 | "em.wl", 57 | "em.energy", 58 | "em.freq", 59 | "wavelength", 60 | "energy", 61 | "frequency", 62 | "freq", 63 | "lambda", 64 | "spectral", 65 | ] 66 | SUPPORTED_SPECTRAL_NAMES += [name.upper() for name in SUPPORTED_SPECTRAL_NAMES] + [ 67 | name.capitalize() for name in SUPPORTED_SPECTRAL_NAMES 68 | ] 69 | SUPPORTED_SPECTRAL_NAMES = np.array(SUPPORTED_SPECTRAL_NAMES) 70 | 71 | SUPPORTED_TIME_NAMES = ["time"] 72 | SUPPORTED_TIME_NAMES += [name.upper() for name in SUPPORTED_TIME_NAMES] + [ 73 | name.capitalize() for name in SUPPORTED_TIME_NAMES 74 | ] 75 | SUPPORTED_TIME_NAMES = np.array(SUPPORTED_TIME_NAMES) 76 | 77 | SUPPORTED_EXPOSURE_NAMES = [ 78 | "exposure time", 79 | "exposure_time", 80 | "exposure times", 81 | "exposure_times", 82 | "exp time", 83 | "exp_time", 84 | "exp times", 85 | "exp_times", 86 | ] 87 | SUPPORTED_EXPOSURE_NAMES += [name.upper() for name in SUPPORTED_EXPOSURE_NAMES] + [ 88 | name.capitalize() for name in SUPPORTED_EXPOSURE_NAMES 89 | ] 90 | SUPPORTED_EXPOSURE_NAMES = np.array(SUPPORTED_EXPOSURE_NAMES) 91 | 92 | 93 | class SpectrogramABC(abc.ABC): 94 | # Abstract Base Class to define the basic API of Spectrogram classes. 95 | 96 | @property 97 | @abc.abstractmethod 98 | def spectral_axis(self): 99 | """ 100 | Return the spectral coordinates for each pixel. 101 | """ 102 | 103 | @property 104 | @abc.abstractmethod 105 | def time(self): 106 | """ 107 | Return the time coordinates for each pixel. 108 | """ 109 | 110 | @property 111 | @abc.abstractmethod 112 | def exposure_time(self): 113 | """ 114 | Return the exposure time for each exposure. 115 | """ 116 | 117 | @property 118 | @abc.abstractmethod 119 | def celestial(self): 120 | """ 121 | Return the celestial coordinates for each pixel. 122 | """ 123 | 124 | @abc.abstractmethod 125 | def apply_exposure_time_correction(self, undo=False, force=False): 126 | """ 127 | Applies or undoes exposure time correction to data and uncertainty and 128 | adjusts unit. 129 | 130 | Correction is only applied (undone) if the object's unit doesn't (does) 131 | already include inverse time. This can be overridden so that correction 132 | is applied (undone) regardless of unit by setting force=True. 133 | 134 | Parameters 135 | ---------- 136 | undo: `bool` 137 | If False, exposure time correction is applied. 138 | If True, exposure time correction is undone. 139 | Default=False 140 | force: `bool` 141 | If not True, applies (undoes) exposure time correction only if unit 142 | doesn't (does) already include inverse time. 143 | If True, correction is applied (undone) regardless of unit. Unit is still 144 | adjusted accordingly. 145 | 146 | Returns 147 | ------- 148 | result: `sunraster.SpectrogramCube` 149 | New SpectrogramCube in new units. 150 | """ 151 | 152 | 153 | class SpectrogramCube(NDCube, SpectrogramABC): 154 | """ 155 | Class representing a sit-and-stare or single raster of slit spectrogram 156 | data. 157 | 158 | Must be described by a single WCS. 159 | 160 | Parameters 161 | ---------- 162 | data: `numpy.ndarray` 163 | The array holding the actual data in this object. 164 | wcs: `astropy.wcs.WCS` 165 | The WCS object containing the axes' information 166 | unit : `astropy.units.Unit` or `str`, optional 167 | Unit for the dataset. Strings that can be converted to a Unit are allowed. 168 | uncertainty : any type, optional 169 | Uncertainty in the dataset. Should have an attribute uncertainty_type 170 | that defines what kind of uncertainty is stored, for example "std" 171 | for standard deviation or "var" for variance. A metaclass defining 172 | such an interface is NDUncertainty - but isn't mandatory. If the uncertainty 173 | has no such attribute the uncertainty is stored as UnknownUncertainty. 174 | Defaults to None. 175 | meta : `dict` object, optional 176 | Additional meta information about the dataset. 177 | mask : any type, optional 178 | Mask for the dataset. Masks should follow the numpy convention 179 | that valid data points are marked by False and invalid ones with True. 180 | Defaults to None. 181 | instrument_axes : list, optional 182 | This is the relationship between the array axes and the instrument, 183 | i.e. repeat raster axis, slit position, position along slit, and spectral. 184 | These are needed because they cannot be inferred simply from the physical types. 185 | copy : `bool`, optional 186 | Indicates whether to save the arguments as copy. True copies every attribute 187 | before saving it while False tries to save every parameter as reference. 188 | Note however that it is not always possible to save the input as reference. 189 | Default is False. 190 | 191 | Attributes 192 | ---------- 193 | array_axis_physical_types 194 | axis_world_coords 195 | dimensions 196 | extra_coords 197 | meta 198 | pixel_to_world 199 | plot 200 | spectral 201 | uncertainty 202 | world_to_pixel 203 | """ 204 | 205 | def __init__( 206 | self, 207 | data, 208 | wcs, 209 | unit=None, 210 | uncertainty=None, 211 | meta=None, 212 | mask=None, 213 | instrument_axes=None, 214 | copy=False, 215 | **kwargs, 216 | ): 217 | if instrument_axes is not None: 218 | if len(instrument_axes) != data.ndim: 219 | raise ValueError("Length of instrument_axes must match number of data axes.") 220 | if meta is None: 221 | meta = NDMeta() 222 | if not isinstance(meta, NDMeta): 223 | meta = NDMeta(meta) 224 | meta.add("instrument_axes", np.asarray(instrument_axes, dtype=str), axes=np.arange(data.ndim, dtype=int), overwrite=True) 225 | 226 | super().__init__( 227 | data, 228 | wcs=wcs, 229 | uncertainty=uncertainty, 230 | mask=mask, 231 | meta=meta, 232 | unit=unit, 233 | copy=copy, 234 | **kwargs, 235 | ) 236 | # Determine labels and location of each key real world coordinate. 237 | self_extra_coords = self.extra_coords 238 | world_axis_physical_types = np.array(self.wcs.world_axis_physical_types) 239 | self._longitude_name, self._longitude_loc = _find_axis_name( 240 | SUPPORTED_LONGITUDE_NAMES, 241 | world_axis_physical_types, 242 | self_extra_coords, 243 | self.meta, 244 | ) 245 | self._latitude_name, self._latitude_loc = _find_axis_name( 246 | SUPPORTED_LATITUDE_NAMES, 247 | world_axis_physical_types, 248 | self_extra_coords, 249 | self.meta, 250 | ) 251 | self._spectral_name, self._spectral_loc = _find_axis_name( 252 | SUPPORTED_SPECTRAL_NAMES, 253 | world_axis_physical_types, 254 | self_extra_coords, 255 | self.meta, 256 | ) 257 | self._time_name, self._time_loc = _find_axis_name( 258 | SUPPORTED_TIME_NAMES, 259 | world_axis_physical_types, 260 | self_extra_coords, 261 | self.meta, 262 | ) 263 | self._exposure_time_name, self._exposure_time_loc = _find_axis_name( 264 | SUPPORTED_EXPOSURE_NAMES, 265 | world_axis_physical_types, 266 | self_extra_coords, 267 | self.meta, 268 | ) 269 | 270 | def __str__(self): 271 | try: 272 | if self.time.isscalar: 273 | time_period = self.time 274 | else: 275 | times = self.time 276 | time_period = Time([times.min(), times.max()]).iso 277 | except ValueError as err: 278 | if AXIS_NOT_FOUND_ERROR in err.args[0]: 279 | time_period = None 280 | else: 281 | raise err 282 | try: 283 | sc = self.celestial 284 | component_names = {item: key for key, item in sc.representation_component_names.items()} 285 | lon = getattr(sc, component_names["lon"]) 286 | lat = getattr(sc, component_names["lat"]) 287 | if sc.isscalar: 288 | lon_range = lon 289 | lat_range = lat 290 | elif sc.size == 0: 291 | lon_range = None 292 | lat_range = None 293 | else: 294 | lon_range = u.Quantity([lon.min(), lon.max()]) 295 | lat_range = u.Quantity([lat.min(), lat.max()]) 296 | except ValueError as err: 297 | if AXIS_NOT_FOUND_ERROR not in err.args[0]: 298 | raise err 299 | lon_range = None 300 | lat_range = None 301 | try: 302 | if self.spectral_axis.isscalar: 303 | spectral_range = self.spectral_axis 304 | else: 305 | spectral_range = u.Quantity([self.spectral_axis.min(), self.spectral_axis.max()]) 306 | except ValueError as err: 307 | if AXIS_NOT_FOUND_ERROR in err.args[0]: 308 | spectral_range = None 309 | else: 310 | raise err 311 | return textwrap.dedent( 312 | f"""\ 313 | {self.__class__.__name__} 314 | {"".join(["-"] * len(self.__class__.__name__))} 315 | Time Period: {time_period} 316 | Instrument axes: {self.instrument_axes} 317 | Pixel dimensions: {self.shape if hasattr(self, "shape") else self.dimensions.astype(int)} 318 | Longitude range: {lon_range} 319 | Latitude range: {lat_range} 320 | Spectral range: {spectral_range} 321 | Data unit: {self.unit}""" 322 | ) 323 | 324 | def __repr__(self): 325 | return f"{object.__repr__(self)}\n{self!s}" 326 | 327 | @property 328 | def instrument_axes(self): 329 | """ 330 | The relationship between the array axes and the instrument, 331 | i.e. repeat raster axis, slit position, position along slit, and spectral. 332 | """ 333 | return self.meta.get("instrument_axes") 334 | 335 | @property 336 | def spectral_axis(self): 337 | if not self._spectral_name: 338 | self._spectral_name, self._spectral_loc = _find_axis_name( 339 | SUPPORTED_SPECTRAL_NAMES, 340 | self.wcs.world_axis_physical_types, 341 | self.extra_coords, 342 | self.meta, 343 | ) 344 | if not self._spectral_name: 345 | raise ValueError(f"Spectral{AXIS_NOT_FOUND_ERROR}" + f"{SUPPORTED_SPECTRAL_NAMES}") 346 | return self._get_axis_coord(self._spectral_name, self._spectral_loc) 347 | 348 | @property 349 | def time(self): 350 | if not self._time_name: 351 | self._time_name, self._time_loc = _find_axis_name( 352 | SUPPORTED_TIME_NAMES, 353 | self.wcs.world_axis_physical_types, 354 | self.extra_coords, 355 | self.meta, 356 | ) 357 | if not self._time_name: 358 | raise ValueError(f"Time {AXIS_NOT_FOUND_ERROR} {SUPPORTED_TIME_NAMES}") 359 | return Time(self._get_axis_coord(self._time_name, self._time_loc)) 360 | 361 | @property 362 | def exposure_time(self): 363 | if not self._exposure_time_name or not hasattr(self, "_exposure_time_loc"): 364 | self._exposure_time_name, self._exposure_time_loc = _find_axis_name( 365 | SUPPORTED_EXPOSURE_NAMES, 366 | self.wcs.world_axis_physical_types, 367 | self.extra_coords, 368 | self.meta, 369 | ) 370 | if not self._exposure_time_name: 371 | raise ValueError(f"Exposure time {AXIS_NOT_FOUND_ERROR} {SUPPORTED_EXPOSURE_NAMES}") 372 | return self._get_axis_coord(self._exposure_time_name, self._exposure_time_loc) 373 | 374 | @property 375 | def celestial(self): 376 | if not self._longitude_name: 377 | self._longitude_name, self._longitude_loc = _find_axis_name( 378 | SUPPORTED_LONGITUDE_NAMES, 379 | self.wcs.world_axis_physical_types, 380 | self.extra_coords, 381 | self.meta, 382 | ) 383 | if not self._latitude_name: 384 | self._latitude_name, self._latitude_loc = _find_axis_name( 385 | SUPPORTED_LATITUDE_NAMES, 386 | self.wcs.world_axis_physical_types, 387 | self.extra_coords, 388 | self.meta, 389 | ) 390 | if self._longitude_name: 391 | celestial_name = self._longitude_name 392 | celestial_loc = self._longitude_loc 393 | elif self._latitude_name: 394 | celestial_name = self._latitude_name 395 | celestial_loc = self._latitude_loc 396 | else: 397 | raise ValueError( 398 | f"Celestial {AXIS_NOT_FOUND_ERROR} " 399 | f"{np.concatenate([SUPPORTED_LONGITUDE_NAMES, SUPPORTED_LATITUDE_NAMES])}" 400 | ) 401 | return self._get_axis_coord(celestial_name, celestial_loc) 402 | 403 | def apply_exposure_time_correction(self, undo=False, force=False): 404 | # Get exposure time in seconds. 405 | exposure_time_s = self.exposure_time.to(u.s).value 406 | # If exposure time is not scalar, change array's shape so that 407 | # it can be broadcast with data and uncertainty arrays. 408 | if not np.isscalar(exposure_time_s): 409 | (exposure_axis,) = self._get_axis_coord_index(self._exposure_time_name, self._exposure_time_loc) 410 | # Change array shape for broadcasting 411 | item = [np.newaxis] * self.data.ndim 412 | item[exposure_axis] = slice(None) 413 | exposure_time_s = exposure_time_s[tuple(item)] 414 | # Based on value on undo kwarg, apply or remove exposure time correction. 415 | if undo is True: 416 | new_data, new_uncertainty, new_unit = _uncalculate_exposure_time_correction( 417 | self.data, self.uncertainty, self.unit, exposure_time_s, force=force 418 | ) 419 | else: 420 | new_data, new_uncertainty, new_unit = _calculate_exposure_time_correction( 421 | self.data, self.uncertainty, self.unit, exposure_time_s, force=force 422 | ) 423 | # Return new instance of SpectrogramCube with correction applied/undone. 424 | new_cube = deepcopy(self) 425 | new_cube._data = new_data 426 | new_cube._uncertainty = new_uncertainty 427 | new_cube._extra_coords = self.extra_coords 428 | new_cube._unit = new_unit 429 | return new_cube 430 | 431 | def _get_axis_coord(self, axis_name, coord_loc): 432 | if coord_loc == "wcs": 433 | return self.axis_world_coords(axis_name)[0] 434 | if coord_loc == "extra_coords": 435 | return self.axis_world_coords(wcs=self.extra_coords[axis_name])[0] 436 | if coord_loc == "global_coords": 437 | return self.global_coords[axis_name] 438 | if coord_loc == "meta": 439 | return self.meta[axis_name] 440 | raise ValueError(f"{coord_loc} is not a valid coordinate location.") 441 | 442 | def _get_axis_coord_index(self, axis_name, coord_loc): 443 | if coord_loc == "wcs": 444 | coord_pix_axes = nuw.physical_type_to_pixel_axes(axis_name, self.wcs) 445 | coord_array_axes = nuw.convert_between_array_and_pixel_axes(coord_pix_axes, len(self.dimensions)) 446 | return coord_array_axes.tolist()[0] 447 | if coord_loc == "extra_coords": 448 | return self.extra_coords[axis_name].mapping[0] 449 | if coord_loc == "meta": 450 | return self.meta.axes[axis_name] 451 | raise ValueError(f"{coord_loc} is not a valid coordinate location.") 452 | 453 | def _get_axis_coord_values(self, axis_name, coord_loc): 454 | if coord_loc == "wcs": 455 | return self.axis_world_coords_values(axis_name)[0] 456 | if coord_loc == "extra_coords": 457 | return self.axis_world_coords_values(wcs=self.extra_coords[axis_name])[0] 458 | if coord_loc == "global_coords": 459 | return self.global_coords[axis_name] 460 | raise ValueError(f"{coord_loc} is not a valid coordinate location.") 461 | 462 | 463 | def _find_axis_name(supported_names, world_axis_physical_types, extra_coords, meta): 464 | """ 465 | Finds name of a SpectrogramCube axis type from WCS and extra coords. 466 | 467 | Parameters 468 | ---------- 469 | supported_names: 1D `numpy.ndarray` 470 | The names for the axis supported by `sunraster.SpectrogramCube`. 471 | world_axis_physical_types: 1D `numpy.ndarray` 472 | Output of SpectrogramCube.world_axis_physical_types converted to an array. 473 | extra_coords: `ndcube.ExtraCoords` or `None` 474 | Output of SpectrogramCube.extra_coords 475 | meta: Meta or `None` 476 | Output of SpectrogramCube.meta 477 | 478 | Returns 479 | ------- 480 | axis_name: `str` 481 | The coordinate name of the axis. 482 | loc: `str` 483 | The location where the coordinate is stored: "wcs" or "extra_coords". 484 | """ 485 | axis_name = None 486 | loc = None 487 | # Check WCS for axis name. 488 | axis_name = _find_name_in_array(supported_names, world_axis_physical_types) 489 | if axis_name: 490 | loc = "wcs" 491 | elif extra_coords: # If axis name not in WCS, check extra_coords. 492 | axis_name = _find_name_in_array(supported_names, np.array(list(extra_coords.keys()))) 493 | if axis_name: 494 | loc = "extra_coords" 495 | if axis_name is None and meta: # If axis name not in WCS, check meta. 496 | axis_name = _find_name_in_array(supported_names, np.array(list(meta.keys()))) 497 | if axis_name: 498 | loc = "meta" 499 | return axis_name, loc 500 | 501 | 502 | def _find_name_in_array(supported_names, names_array): 503 | name_index = np.isin(names_array, supported_names) 504 | if name_index.sum() > 0: 505 | name_index = np.arange(len(names_array))[name_index][0] 506 | return names_array[name_index] 507 | return None 508 | 509 | 510 | def _calculate_exposure_time_correction(data, uncertainty, unit, exposure_time, force=False): 511 | """ 512 | Applies exposure time correction to data and uncertainty arrays. 513 | 514 | Parameters 515 | ---------- 516 | data: `numpy.ndarray` 517 | Data array to be converted. 518 | uncertainty: `astropy.nddata.nduncertainty.NDUncertainty` 519 | The uncertainty of each element in data. 520 | old_unit: `astropy.units.Unit` 521 | Unit of data arrays. 522 | exposure_time: `numpy.ndarray` 523 | Exposure time in seconds for each exposure in data arrays. 524 | 525 | Returns 526 | ------- 527 | new_data: `numpy.ndarray` 528 | Data array with exposure time corrected for. 529 | new_uncertainty: `astropy.nddata.nduncertainty.NDUncertainty` 530 | The uncertainty of each element in new_data. 531 | new_unit: `astropy.units.Unit` 532 | Unit of new_data array after exposure time correction. 533 | """ 534 | if force is not True and u.s in unit.decompose().bases: 535 | raise ValueError(APPLY_EXPOSURE_TIME_ERROR) 536 | # Else, either unit does not include inverse time and so 537 | # exposure does need to be applied, or 538 | # user has set force=True and wants the correction applied 539 | # regardless of the unit. 540 | new_data = data / exposure_time 541 | if uncertainty: 542 | uncertainty_unit = uncertainty.unit / u.s if uncertainty.unit else uncertainty.unit 543 | new_uncertainty = uncertainty.__class__(uncertainty.array / exposure_time, unit=uncertainty_unit) 544 | else: 545 | new_uncertainty = uncertainty 546 | new_unit = unit / u.s 547 | return new_data, new_uncertainty, new_unit 548 | 549 | 550 | def _uncalculate_exposure_time_correction(data, uncertainty, unit, exposure_time, force=False): 551 | """ 552 | Removes exposure time correction from data and uncertainty arrays. 553 | 554 | Parameters 555 | ---------- 556 | data: `numpy.ndarray` 557 | Data array to be converted. 558 | uncertainty: `astropy.nddata.nduncertainty.NDUncertainty` 559 | The uncertainty of each element in data. 560 | old_unit: `astropy.units.Unit` 561 | Unit of data arrays. 562 | exposure_time: `numpy.ndarray` 563 | Exposure time in seconds for each exposure in data arrays. 564 | 565 | Returns 566 | ------- 567 | new_data: `numpy.ndarray` 568 | Data array with exposure time corrected for. 569 | new_uncertainty: `astropy.nddata.nduncertainty.NDUncertainty` 570 | The uncertainty of each element in new_data. 571 | new_unit: `astropy.units.Unit` 572 | Unit of new_data array after exposure time correction. 573 | """ 574 | if force is not True and u.s in (unit * u.s).decompose().bases: 575 | raise ValueError(UNDO_EXPOSURE_TIME_ERROR) 576 | # Else, either unit does include inverse time and so 577 | # exposure does need to be removed, or 578 | # user has set force=True and wants the correction removed 579 | # regardless of the unit. 580 | new_data = data * exposure_time 581 | if uncertainty: 582 | uncertainty_unit = uncertainty.unit * u.s if uncertainty.unit else uncertainty.unit 583 | new_uncertainty = uncertainty.__class__(uncertainty.array * exposure_time, unit=uncertainty_unit) 584 | else: 585 | new_uncertainty = uncertainty 586 | new_unit = unit * u.s 587 | return new_data, new_uncertainty, new_unit 588 | -------------------------------------------------------------------------------- /sunraster/spectrogram_sequence.py: -------------------------------------------------------------------------------- 1 | import numbers 2 | import textwrap 3 | 4 | import numpy as np 5 | 6 | import astropy.units as u 7 | from astropy.coordinates import SkyCoord 8 | from astropy.time import Time 9 | 10 | from ndcube import NDCubeSequence 11 | 12 | from sunraster.spectrogram import ( 13 | SUPPORTED_LATITUDE_NAMES, 14 | SUPPORTED_LONGITUDE_NAMES, 15 | SUPPORTED_SPECTRAL_NAMES, 16 | SUPPORTED_TIME_NAMES, 17 | SpectrogramABC, 18 | _find_axis_name, 19 | ) 20 | 21 | __all__ = ["SpectrogramSequence", "RasterSequence"] 22 | 23 | RASTER_AXIS_NAME = "raster scan" 24 | SNS_AXIS_NAME = "temporal" 25 | SLIT_STEP_AXIS_NAME = "slit step" 26 | SLIT_AXIS_NAME = "position along slit" 27 | SPECTRAL_AXIS_NAME = "spectral" 28 | 29 | 30 | class SpectrogramSequence(NDCubeSequence, SpectrogramABC): 31 | """ 32 | Class for holding, slicing and plotting a sequence of spectrogram cubes. 33 | 34 | Spectrogram cubes can be 2D or higher. 35 | 36 | Parameters 37 | ---------- 38 | data_list: `list` 39 | List of `sunraster.SpectrogramCube` objects from the same spectral window and OBS ID. 40 | Must also contain the 'detector type' in its meta attribute. 41 | common_axis: `int` or `None` (optional) 42 | If the sequence axis is aligned with an axis of the component SpectrogramCube 43 | instances, e.g. Spectrogram cubes have a time dimension and are arranged within 44 | the sequence in chronological order, set this input to the axis number of the 45 | time axis within the cubes. 46 | Default=None implies there is no common axis. 47 | meta: `dict` or header object (optional) 48 | Metadata associated with the sequence. 49 | 50 | Attributes 51 | ---------- 52 | array_axis_physical_axis_types 53 | array_axis_physical_types 54 | axis_world_coords 55 | dimensions 56 | extra_coords 57 | meta 58 | pixel_to_world 59 | plot 60 | spectral 61 | uncertainty 62 | world_to_pixel 63 | """ 64 | 65 | def __init__(self, data_list, common_axis=None, meta=None): 66 | # Initialize Sequence. 67 | super().__init__(data_list, common_axis=common_axis, meta=meta) 68 | 69 | @property 70 | def spectral_axis(self): 71 | return u.Quantity([raster.spectral_axis for raster in self.data]) 72 | 73 | @property 74 | def time(self): 75 | return Time(np.concatenate([raster.time for raster in self.data])) 76 | 77 | @property 78 | def exposure_time(self): 79 | exposure_type = type(self.data[0].exposure_time) 80 | exposure_time = np.concatenate([raster.exposure_time for raster in self.data]) 81 | try: 82 | return exposure_type(exposure_time) 83 | except Exception: # NOQA: BLE001 84 | return exposure_time 85 | 86 | @property 87 | def celestial(self): 88 | sc = SkyCoord([raster.celestial for raster in self.data]) 89 | sc_shape = list(sc.shape) 90 | sc_shape.insert(0, len(self.data)) 91 | sc_shape[1] = int(sc_shape[1] / sc_shape[0]) 92 | return sc.reshape(sc_shape) 93 | 94 | def apply_exposure_time_correction(self, undo=False, copy=False, force=False): 95 | """ 96 | Applies or undoes exposure time correction to data and uncertainty and 97 | adjusts unit. 98 | 99 | Correction is only applied (undone) if the object's unit doesn't (does) 100 | already include inverse time. This can be overridden so that correction 101 | is applied (undone) regardless of unit by setting force=True. 102 | 103 | Parameters 104 | ---------- 105 | undo: `bool` 106 | If False, exposure time correction is applied. 107 | If True, exposure time correction is removed. 108 | Default=False 109 | copy: `bool` 110 | If True a new instance with the converted data values is returned. 111 | If False, the current instance is overwritten. 112 | Default=False 113 | force: `bool` 114 | If not True, applies (undoes) exposure time correction only if unit 115 | doesn't (does) already include inverse time. 116 | If True, correction is applied (undone) regardless of unit. Unit is still 117 | adjusted accordingly. 118 | 119 | Returns 120 | ------- 121 | `None` or `sunraster.SpectrogramSequence` 122 | If copy=False, the original input is modified with the 123 | exposure time correction applied (undone). 124 | If copy=True, a new `sunraster.SpectrogramSequence` is returned with the correction 125 | applied (undone). 126 | """ 127 | converted_data_list = [cube.apply_exposure_time_correction(undo=undo, force=force) for cube in self.data] 128 | if copy is True: 129 | return self.__class__(converted_data_list, meta=self.meta, common_axis=self._common_axis) 130 | self.data = converted_data_list 131 | return None 132 | 133 | def __str__(self): 134 | data0 = self.data[0] 135 | if not (data0._time_name and data0._longitude_name and data0._latitude_name and data0._spectral_name): 136 | for i, cube in enumerate(self): 137 | self.data[i]._time_name, self.data[i]._time_loc = _find_axis_name( 138 | SUPPORTED_TIME_NAMES, 139 | cube.wcs.world_axis_physical_types, 140 | cube.extra_coords, 141 | cube.meta, 142 | ) 143 | ( 144 | self.data[i]._longitude_name, 145 | self.data[i]._longitude_loc, 146 | ) = _find_axis_name( 147 | SUPPORTED_LONGITUDE_NAMES, 148 | cube.wcs.world_axis_physical_types, 149 | cube.extra_coords, 150 | cube.meta, 151 | ) 152 | ( 153 | self.data[i]._latitude_name, 154 | self.data[i]._latitude_loc, 155 | ) = _find_axis_name( 156 | SUPPORTED_LATITUDE_NAMES, 157 | cube.wcs.world_axis_physical_types, 158 | cube.extra_coords, 159 | cube.meta, 160 | ) 161 | ( 162 | self.data[i]._spectral_name, 163 | self.data[i]._spectral_loc, 164 | ) = _find_axis_name( 165 | SUPPORTED_SPECTRAL_NAMES, 166 | cube.wcs.world_axis_physical_types, 167 | cube.extra_coords, 168 | cube.meta, 169 | ) 170 | data0 = self.data[0] 171 | if data0._time_name: 172 | start_time = data0.time if data0.time.isscalar else data0.time.squeeze()[0] 173 | data_1 = self.data[-1] 174 | stop_time = data_1.time if data_1.time.isscalar else data_1.time.squeeze()[-1] 175 | time_period = start_time if start_time == stop_time else Time([start_time.iso, stop_time.iso]) 176 | else: 177 | time_period = None 178 | if data0._longitude_name or data0._latitude_name: 179 | sc = self.celestial 180 | component_names = {item: key for key, item in sc.representation_component_names.items()} 181 | lon = getattr(sc, component_names["lon"]) 182 | lat = getattr(sc, component_names["lat"]) 183 | if sc.isscalar: 184 | lon_range = lon 185 | lat_range = lat 186 | else: 187 | lon_range = u.Quantity([lon.min(), lon.max()]) 188 | lat_range = u.Quantity([lat.min(), lat.max()]) 189 | if lon_range[0] == lon_range[1]: 190 | lon_range = lon_range[0] 191 | if lat_range[0] == lat_range[1]: 192 | lat_range = lat_range[0] 193 | else: 194 | lon_range = None 195 | lat_range = None 196 | if data0._spectral_name: 197 | spectral_vals = self.spectral_axis 198 | spectral_min = spectral_vals.min() 199 | spectral_max = spectral_vals.max() 200 | spectral_range = spectral_min if spectral_min == spectral_max else u.Quantity([spectral_min, spectral_max]) 201 | else: 202 | spectral_range = None 203 | return textwrap.dedent( 204 | f"""\ 205 | {self.__class__.__name__} 206 | {"".join(["-"] * len(self.__class__.__name__))} 207 | Time Range: {time_period} 208 | Pixel Dimensions: {self.shape if hasattr(self, "shape") else self.dimensions.astype(int)} 209 | Longitude range: {lon_range} 210 | Latitude range: {lat_range} 211 | Spectral range: {spectral_range} 212 | Data unit: {self.data[0].unit}""" 213 | ) 214 | 215 | def __repr__(self): 216 | return f"{object.__repr__(self)}\n{self!s}" 217 | 218 | 219 | class RasterSequence(SpectrogramSequence): 220 | """ 221 | Class for holding, slicing and plotting series of spectrograph raster 222 | scans. 223 | 224 | Parameters 225 | ---------- 226 | data_list: `list` 227 | List of `sunraster.SpectrogramCube` objects from the same spectral window and OBS ID. 228 | Must also contain the 'detector type' in its meta attribute. 229 | common_axis: `int` 230 | The axis of the SpectrogramCube instances corresponding to the slit step axis. 231 | meta: `dict` or header object (optional) 232 | Metadata associated with the sequence. 233 | 234 | Attributes 235 | ---------- 236 | celestial 237 | exposure_time 238 | spectral 239 | time 240 | """ 241 | 242 | def __init__(self, data_list, common_axis, meta=None): 243 | # Initialize Sequence. 244 | super().__init__(data_list, common_axis=common_axis, meta=meta) 245 | 246 | # Determine axis indices of instrument axis types. 247 | self._raster_axis_name = RASTER_AXIS_NAME 248 | self._sns_axis_name = SNS_AXIS_NAME 249 | self._slit_step_axis_name = SLIT_STEP_AXIS_NAME 250 | self._slit_axis_name = SLIT_AXIS_NAME 251 | self._spectral_axis_name = SPECTRAL_AXIS_NAME 252 | self._set_single_scan_instrument_axes_types() 253 | 254 | raster_dimensions = SpectrogramSequence.shape 255 | sns_dimensions = SpectrogramSequence.cube_like_shape 256 | raster_array_axis_physical_types = SpectrogramSequence.array_axis_physical_types 257 | sns_array_axis_physical_types = SpectrogramSequence.cube_like_array_axis_physical_types 258 | raster_axis_coords = SpectrogramSequence.sequence_axis_coords 259 | sns_axis_coords = SpectrogramSequence.common_axis_coords 260 | plot_as_raster = SpectrogramSequence.plot 261 | plot_as_sns = SpectrogramSequence.plot_as_cube 262 | 263 | def _set_single_scan_instrument_axes_types(self): 264 | if len(self.data) < 1: 265 | self._single_scan_instrument_axes_types = np.empty((0,), dtype=object) 266 | else: 267 | self._single_scan_instrument_axes_types = np.empty(self.data[0].data.ndim, dtype=object) 268 | # Slit step axis name. 269 | if self._common_axis is not None: 270 | self._single_scan_instrument_axes_types[self._common_axis] = self._slit_step_axis_name 271 | # Spectral axis name. 272 | # If spectral name not present in raster cube, try finding it. 273 | if not self.data[0]._spectral_name: 274 | for i, cube in enumerate(self): 275 | ( 276 | self.data[i]._spectral_name, 277 | self.data[i]._spectral_name, 278 | ) = _find_axis_name( 279 | SUPPORTED_SPECTRAL_NAMES, 280 | cube.wcs.world_axis_physical_types, 281 | cube.extra_coords, 282 | cube.meta, 283 | ) 284 | spectral_name = self.data[0]._spectral_name 285 | array_axis_physical_types = self.data[0].array_axis_physical_types 286 | spectral_raster_index = [physical_type == (spectral_name,) for physical_type in array_axis_physical_types] 287 | spectral_raster_index = np.arange(self.data[0].data.ndim)[spectral_raster_index] 288 | if len(spectral_raster_index) == 1: 289 | self._single_scan_instrument_axes_types[spectral_raster_index] = self._spectral_axis_name 290 | # Slit axis name. 291 | w = self._single_scan_instrument_axes_types == None # NOQA: E711 292 | if w.sum() > 1: 293 | raise ValueError( 294 | "Unable to parse the WCS or common_axis to work out either or both the slit-step axis nor the spectral (aka the slit) axis." 295 | ) 296 | self._single_scan_instrument_axes_types[w] = self._slit_axis_name 297 | # Remove any instrument axes types whose axes are missing. 298 | self._single_scan_instrument_axes_types.astype(str) 299 | 300 | @property 301 | def slice_as_sns(self): 302 | """ 303 | Method to slice instance as though data were taken as a sit-and-stare, 304 | i.e. slit position and raster number are combined into a single axis. 305 | """ 306 | return _snsSlicer(self) 307 | 308 | @property 309 | def slice_as_raster(self): 310 | """ 311 | Method to slice instance as though data were 4D, i.e. raster number, 312 | slit step position, position along slit, wavelength. 313 | """ 314 | return _SequenceSlicer(self) 315 | 316 | def __getitem__(self, item): 317 | result = super().__getitem__(item) 318 | if isinstance(result, self.__class__): 319 | # If slit step axis sliced out, return SpectrogramSequence 320 | # as the spectrogram cubes no longer represent a raster. 321 | if len(item) > self._common_axis and isinstance(item[1:][self._common_axis], numbers.Integral): 322 | result = SpectrogramSequence(result.data, common_axis=None, meta=result.meta) 323 | else: 324 | # Else, slice the instrument axis types accordingly. 325 | result._set_single_scan_instrument_axes_types() 326 | # result._single_scan_instrument_axes_types = _slice_scan_axis_types( 327 | # self._single_scan_instrument_axes_types, item[1:]) 328 | 329 | return result 330 | 331 | @property 332 | def raster_instrument_axes_types(self): 333 | return (self._raster_axis_name, *list(self._single_scan_instrument_axes_types)) 334 | 335 | @property 336 | def sns_instrument_axes_types(self): 337 | return ( 338 | self._sns_axis_name, 339 | *list( 340 | self._single_scan_instrument_axes_types[ 341 | self._single_scan_instrument_axes_types != self._slit_step_axis_name 342 | ] 343 | ), 344 | ) 345 | 346 | 347 | class _snsSlicer: 348 | """ 349 | Helper class to make slicing in index_as_cube sliceable/indexable like a 350 | numpy array. 351 | 352 | Parameters 353 | ---------- 354 | seq : `ndcube.NDCubeSequence` 355 | Object of NDCubeSequence. 356 | """ 357 | 358 | def __init__(self, seq): 359 | self.seq = seq 360 | 361 | def __getitem__(self, item): 362 | result = self.seq.index_as_cube[item] 363 | if isinstance(item, tuple) and not isinstance(item[0], numbers.Integral): 364 | result._set_single_scan_instrument_axes_types() 365 | # result._single_scan_instrument_axes_types = _slice_scan_axis_types( 366 | # self.seq._single_scan_instrument_axes_types, item) 367 | return result 368 | 369 | 370 | class _SequenceSlicer: 371 | def __init__(self, seq): 372 | self.seq = seq 373 | 374 | def __getitem__(self, item): 375 | return self.seq[item] 376 | 377 | 378 | def _slice_scan_axis_types(single_scan_axes_types, item): 379 | """ 380 | Updates RasterSequence._single_scan_axes_types according to slicing. 381 | 382 | Parameters 383 | ---------- 384 | single_scan_axes_types: `numpy.ndarray` 385 | Value of RasterSequence._single_scan_axes_types, 386 | i.e. array of strings giving type of each axis. 387 | item: `int`, `slice` or `tuple` of `slice`s. 388 | The slicing item that get applied to the Raster instances within the RasterSequences. 389 | 390 | Returns 391 | ------- 392 | new_single_scan_axes_types: `numpy.ndarray` 393 | Update value of axis types with which to replace RasterSequence._single_scan_axes_types. 394 | """ 395 | # Get boolean axes indices of axis items that aren't int, 396 | # i.e. axes that are not sliced away. 397 | not_int_axis_items = [not isinstance(axis_item, numbers.Integral) for axis_item in item] 398 | # Add boolean indices for axes not included in item. 399 | not_int_axis_items += [True] * (len(single_scan_axes_types) - len(not_int_axis_items)) 400 | return single_scan_axes_types[np.array(not_int_axis_items)] 401 | -------------------------------------------------------------------------------- /sunraster/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module contains package tests. 3 | """ 4 | 5 | from pathlib import Path 6 | 7 | import sunraster 8 | 9 | __all__ = ["TEST_DATA_PATH"] 10 | 11 | 12 | TEST_DATA_PATH = Path(sunraster.__file__).parent / "tests" / "data" 13 | -------------------------------------------------------------------------------- /sunraster/tests/data/solo_L2_spice-n-ras-db_20200602T081733_V01_12583760-000.fits: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunpy/sunraster/b0f8fcd5673283a4424b5693f3473c87ba429d1d/sunraster/tests/data/solo_L2_spice-n-ras-db_20200602T081733_V01_12583760-000.fits -------------------------------------------------------------------------------- /sunraster/tests/data/solo_L2_spice-n-sit_20200620T235901_V01_16777431-000.fits: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sunpy/sunraster/b0f8fcd5673283a4424b5693f3473c87ba429d1d/sunraster/tests/data/solo_L2_spice-n-sit_20200620T235901_V01_16777431-000.fits -------------------------------------------------------------------------------- /sunraster/tests/test_spectrogram.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import astropy.units as u 5 | from astropy.time import Time, TimeDelta 6 | from astropy.wcs import WCS 7 | 8 | from ndcube.meta import NDMeta 9 | from ndcube.tests.helpers import assert_cubes_equal 10 | 11 | import sunraster.spectrogram 12 | from sunraster import SpectrogramCube 13 | 14 | # Define a sample wcs object 15 | H0 = { 16 | "CTYPE1": "WAVE ", 17 | "CUNIT1": "Angstrom", 18 | "CDELT1": 0.2, 19 | "CRPIX1": 0, 20 | "CRVAL1": 10, 21 | "NAXIS1": 3, 22 | "CTYPE2": "HPLT-TAN", 23 | "CUNIT2": "deg", 24 | "CDELT2": 0.5, 25 | "CRPIX2": 2, 26 | "CRVAL2": 0.5, 27 | "NAXIS2": 2, 28 | "CTYPE3": "HPLN-TAN", 29 | "CUNIT3": "deg", 30 | "CDELT3": 0.4, 31 | "CRPIX3": 2, 32 | "CRVAL3": 1, 33 | "NAXIS3": 2, 34 | } 35 | WCS0 = WCS(header=H0, naxis=3) 36 | 37 | H_NO_COORDS = { 38 | "CTYPE1": "PIX ", 39 | "CUNIT1": "", 40 | "CDELT1": 1, 41 | "CRPIX1": 0, 42 | "CRVAL1": 0, 43 | "NAXIS1": 3, 44 | "CTYPE2": "PIX ", 45 | "CUNIT2": "", 46 | "CDELT2": 1, 47 | "CRPIX2": 0, 48 | "CRVAL2": 0, 49 | "NAXIS2": 3, 50 | "CTYPE3": "PIX ", 51 | "CUNIT3": "", 52 | "CDELT3": 1, 53 | "CRPIX3": 0, 54 | "CRVAL3": 0, 55 | "NAXIS3": 3, 56 | } 57 | WCS_NO_COORDS = WCS(header=H_NO_COORDS, naxis=3) 58 | 59 | SOURCE_DATA_DN = np.array( 60 | [ 61 | [[0.563, 1.132, -1.343], [-0.719, 1.441, 1.566]], 62 | [[0.563, 1.132, -1.343], [-0.719, 1.441, 1.566]], 63 | ] 64 | ) 65 | SOURCE_UNCERTAINTY_DN = np.sqrt(SOURCE_DATA_DN) 66 | MASK = SOURCE_DATA_DN > 1 67 | TIME_DIM_LEN = SOURCE_DATA_DN.shape[0] 68 | SINGLES_EXPOSURE_TIME = 2.0 69 | EXPOSURE_TIME = u.Quantity(np.zeros(TIME_DIM_LEN) + SINGLES_EXPOSURE_TIME, unit=u.s) 70 | EXTRA_COORDS0 = [ 71 | ("time", 0, Time("2017-01-01") + TimeDelta(np.arange(TIME_DIM_LEN), format="sec")), 72 | ] 73 | EXTRA_COORDS1 = [ 74 | ( 75 | "time", 76 | 0, 77 | (Time("2017-01-01") + TimeDelta(np.arange(TIME_DIM_LEN, TIME_DIM_LEN * 2), format="sec")), 78 | ), 79 | ] 80 | meta_exposure0 = NDMeta({"exposure time": EXPOSURE_TIME}, axes={"exposure time": 0}, data_shape=SOURCE_DATA_DN.shape) 81 | 82 | spectrogram_DN0 = SpectrogramCube( 83 | SOURCE_DATA_DN, wcs=WCS0, unit=u.ct, uncertainty=SOURCE_UNCERTAINTY_DN, meta=meta_exposure0 84 | ) 85 | spectrogram_DN0.extra_coords.add(*EXTRA_COORDS0[0]) 86 | spectrogram_DN_per_s0 = SpectrogramCube( 87 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME, 88 | wcs=WCS0, 89 | unit=u.ct / u.s, 90 | uncertainty=SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME, 91 | meta=meta_exposure0, 92 | ) 93 | spectrogram_DN_per_s0.extra_coords.add(*EXTRA_COORDS0[0]) 94 | spectrogram_DN_per_s_per_s0 = SpectrogramCube( 95 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 96 | wcs=WCS0, 97 | unit=u.ct / u.s / u.s, 98 | uncertainty=SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 99 | meta=meta_exposure0, 100 | ) 101 | spectrogram_DN_per_s_per_s0.extra_coords.add(*EXTRA_COORDS0[0]) 102 | spectrogram_DN_s0 = SpectrogramCube( 103 | SOURCE_DATA_DN * SINGLES_EXPOSURE_TIME, 104 | wcs=WCS0, 105 | unit=u.ct * u.s, 106 | uncertainty=SOURCE_UNCERTAINTY_DN * SINGLES_EXPOSURE_TIME, 107 | meta=meta_exposure0, 108 | ) 109 | spectrogram_DN_s0.extra_coords.add(*EXTRA_COORDS0[0]) 110 | spectrogram_DN1 = SpectrogramCube( 111 | SOURCE_DATA_DN, wcs=WCS0, unit=u.ct, uncertainty=SOURCE_UNCERTAINTY_DN, meta=meta_exposure0 112 | ) 113 | spectrogram_DN1.extra_coords.add(*EXTRA_COORDS1[0]) 114 | spectrogram_DN_per_s1 = SpectrogramCube( 115 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME, 116 | wcs=WCS0, 117 | unit=u.ct / u.s, 118 | uncertainty=SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME, 119 | meta=meta_exposure0, 120 | ) 121 | spectrogram_DN_per_s1.extra_coords.add(*EXTRA_COORDS1[0]) 122 | spectrogram_DN_per_s_per_s1 = SpectrogramCube( 123 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 124 | wcs=WCS0, 125 | unit=u.ct / u.s / u.s, 126 | uncertainty=SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 127 | meta=meta_exposure0, 128 | ) 129 | spectrogram_DN_per_s_per_s1.extra_coords.add(*EXTRA_COORDS1[0]) 130 | spectrogram_DN_s1 = SpectrogramCube( 131 | SOURCE_DATA_DN * SINGLES_EXPOSURE_TIME, 132 | wcs=WCS0, 133 | unit=u.ct * u.s, 134 | uncertainty=SOURCE_UNCERTAINTY_DN * SINGLES_EXPOSURE_TIME, 135 | meta=meta_exposure0, 136 | ) 137 | spectrogram_DN_s1.extra_coords.add(*EXTRA_COORDS1[0]) 138 | spectrogram_NO_COORDS = SpectrogramCube(SOURCE_DATA_DN, WCS_NO_COORDS) 139 | spectrogram_instrument_axes = SpectrogramCube( 140 | SOURCE_DATA_DN, 141 | wcs=WCS0, 142 | unit=u.ct, 143 | uncertainty=SOURCE_UNCERTAINTY_DN, 144 | mask=MASK, 145 | instrument_axes=np.asanyarray(("a", "b", "c")), 146 | meta=meta_exposure0, 147 | ) 148 | spectrogram_instrument_axes.extra_coords.add(*EXTRA_COORDS0[0]) 149 | 150 | 151 | def test_spectral_axis(): 152 | assert all(spectrogram_DN0.spectral_axis == spectrogram_DN0.axis_world_coords("em.wl")[0]) 153 | 154 | 155 | def test_spectral_axis_error(): 156 | with pytest.raises(ValueError, match="A"): 157 | spectrogram_NO_COORDS.spectral_axis 158 | 159 | 160 | def test_time(): 161 | assert all(spectrogram_DN0.time == EXTRA_COORDS0[0][2]) 162 | 163 | 164 | def test_time_error(): 165 | with pytest.raises(ValueError, match="Time axis not found. If in extra_coords, axis"): 166 | spectrogram_NO_COORDS.time 167 | 168 | 169 | def test_exposure_time(): 170 | assert all(spectrogram_DN0.exposure_time == EXPOSURE_TIME) 171 | 172 | 173 | def test_exposure_time_error(): 174 | with pytest.raises(ValueError, match="Exposure time axis not found."): 175 | spectrogram_NO_COORDS.exposure_time 176 | 177 | 178 | @pytest.mark.parametrize( 179 | ("input_cube", "undo", "force", "expected_cube"), 180 | [ 181 | (spectrogram_DN0, False, False, spectrogram_DN_per_s0), 182 | (spectrogram_DN_per_s0, True, False, spectrogram_DN0), 183 | (spectrogram_DN_per_s0, False, True, spectrogram_DN_per_s_per_s0), 184 | (spectrogram_DN0, True, True, spectrogram_DN_s0), 185 | ], 186 | ) 187 | def test_apply_exposure_time_correction(input_cube, undo, force, expected_cube): 188 | output_cube = input_cube.apply_exposure_time_correction(undo=undo, force=force) 189 | assert_cubes_equal(output_cube, expected_cube) 190 | 191 | 192 | def test_calculate_exposure_time_correction_error(): 193 | with pytest.raises(ValueError, match="Exposure time correction has probably already been "): 194 | sunraster.spectrogram._calculate_exposure_time_correction(SOURCE_DATA_DN, None, u.s, EXPOSURE_TIME) 195 | 196 | 197 | def test_uncalculate_exposure_time_correction_error(): 198 | with pytest.raises(ValueError, match="Exposure time correction has probably already been undone since"): 199 | sunraster.spectrogram._uncalculate_exposure_time_correction(SOURCE_DATA_DN, None, u.ct, EXPOSURE_TIME) 200 | 201 | 202 | @pytest.mark.parametrize( 203 | ("item", "expected"), 204 | [ 205 | (0, ["b", "c"]), 206 | (slice(0, 1), ["a", "b", "c"]), 207 | ((slice(None), 0), ["a", "c"]), 208 | ((slice(None), slice(None), slice(0, 1)), ["a", "b", "c"]), 209 | ], 210 | ) 211 | def test_instrument_axes_slicing(item, expected): 212 | sliced_cube = spectrogram_instrument_axes[item] 213 | assert all(sliced_cube.instrument_axes == expected) 214 | 215 | 216 | def test_components_after_slicing(): 217 | """ 218 | Tests all cube components are correctly propagated by slicing. 219 | """ 220 | item = tuple([slice(0, 1)] * 3) 221 | sliced_cube = spectrogram_instrument_axes[item] 222 | 223 | data = spectrogram_instrument_axes.data[item] 224 | uncertainty = spectrogram_instrument_axes.uncertainty[item] 225 | mask = spectrogram_instrument_axes.mask[item] 226 | extra_coords = list(EXTRA_COORDS0) 227 | ec_axis = 0 228 | ec0 = list(extra_coords[0]) 229 | ec0[-1] = ec0[-1][item[ec_axis]] 230 | wcs = spectrogram_instrument_axes.wcs[item] 231 | expected_cube = SpectrogramCube( 232 | data=data, 233 | wcs=wcs, 234 | uncertainty=uncertainty, 235 | mask=mask, 236 | meta=sliced_cube.meta, 237 | unit=spectrogram_instrument_axes.unit, 238 | instrument_axes=spectrogram_instrument_axes.instrument_axes, 239 | ) 240 | expected_cube.extra_coords.add(*ec0) 241 | assert str(sliced_cube) 242 | assert str(expected_cube) 243 | 244 | assert_cubes_equal(sliced_cube, expected_cube) 245 | -------------------------------------------------------------------------------- /sunraster/tests/test_spectrogramsequence.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pytest 3 | 4 | import astropy.units as u 5 | from astropy.time import Time, TimeDelta 6 | from astropy.wcs import WCS 7 | 8 | from ndcube.meta import NDMeta 9 | from ndcube.tests.helpers import assert_cubesequences_equal 10 | 11 | from sunraster import RasterSequence, SpectrogramCube, SpectrogramSequence 12 | 13 | # Define an sample wcs objects. 14 | H0 = { 15 | "CTYPE1": "WAVE ", 16 | "CUNIT1": "Angstrom", 17 | "CDELT1": 0.2, 18 | "CRPIX1": 0, 19 | "CRVAL1": 10, 20 | "NAXIS1": 3, 21 | "CTYPE2": "HPLT-TAN", 22 | "CUNIT2": "deg", 23 | "CDELT2": 0.5, 24 | "CRPIX2": 2, 25 | "CRVAL2": 0.5, 26 | "NAXIS2": 2, 27 | "CTYPE3": "HPLN-TAN", 28 | "CUNIT3": "deg", 29 | "CDELT3": 0.4, 30 | "CRPIX3": 2, 31 | "CRVAL3": 1, 32 | "NAXIS3": 2, 33 | } 34 | WCS0 = WCS(header=H0, naxis=3) 35 | 36 | h2 = { 37 | "CTYPE1": "HPLN-TAN", 38 | "CUNIT1": "deg", 39 | "CDELT1": 0.2, 40 | "CRPIX1": 0, 41 | "CRVAL1": 10, 42 | "NAXIS1": 3, 43 | "CTYPE2": "HPLT-TAN", 44 | "CUNIT2": "deg", 45 | "CDELT2": 0.5, 46 | "CRPIX2": 2, 47 | "CRVAL2": 0.5, 48 | "NAXIS2": 2, 49 | "CTYPE3": "WAVE ", 50 | "CUNIT3": "Angstrom", 51 | "CDELT3": 0.4, 52 | "CRPIX3": 2, 53 | "CRVAL3": 1, 54 | "NAXIS3": 2, 55 | } 56 | wcs2 = WCS(header=h2, naxis=3) 57 | 58 | # WCS with no spectral axis. 59 | h_no_wave = { 60 | "CTYPE1": "HPLT-TAN", 61 | "CUNIT1": "deg", 62 | "CDELT1": 0.5, 63 | "CRPIX1": 2, 64 | "CRVAL1": 0.5, 65 | "NAXIS1": 2, 66 | "CTYPE2": "HPLN-TAN", 67 | "CUNIT2": "deg", 68 | "CDELT2": 0.4, 69 | "CRPIX2": 2, 70 | "CRVAL2": 1, 71 | "NAXIS2": 2, 72 | } 73 | wcs_no_wave = WCS(header=h_no_wave, naxis=2) 74 | 75 | SOURCE_DATA_DN = np.array( 76 | [ 77 | [[0.563, 1.132, -1.343], [-0.719, 1.441, 1.566]], 78 | [[0.563, 1.132, -1.343], [-0.719, 1.441, 1.566]], 79 | ] 80 | ) 81 | SOURCE_UNCERTAINTY_DN = np.sqrt(SOURCE_DATA_DN) 82 | 83 | TIME_DIM_LEN = SOURCE_DATA_DN.shape[0] 84 | SINGLES_EXPOSURE_TIME = 2.0 85 | EXPOSURE_TIME = u.Quantity(np.zeros(TIME_DIM_LEN) + SINGLES_EXPOSURE_TIME, unit=u.s) 86 | 87 | # Define sample extra coords 88 | EXTRA_COORDS0 = [ 89 | ("time", 0, Time("2017-01-01") + TimeDelta(np.arange(TIME_DIM_LEN), format="sec")), 90 | ] 91 | EXTRA_COORDS1 = [ 92 | ( 93 | "time", 94 | 0, 95 | (Time("2017-01-01") + TimeDelta(np.arange(TIME_DIM_LEN, TIME_DIM_LEN * 2), format="sec")), 96 | ), 97 | ] 98 | 99 | extra_coords20 = [ 100 | ( 101 | "time", 102 | 2, 103 | Time("2017-01-01") + TimeDelta(np.arange(SOURCE_DATA_DN.shape[2]), format="sec"), 104 | ), 105 | ] 106 | extra_coords21 = [ 107 | ( 108 | "time", 109 | 2, 110 | ( 111 | Time("2017-01-01") 112 | + TimeDelta( 113 | np.arange(SOURCE_DATA_DN.shape[2], SOURCE_DATA_DN.shape[2] * 2), 114 | format="sec", 115 | ) 116 | ), 117 | ), 118 | ] 119 | 120 | # Define meta data 121 | meta_seq = { 122 | "a": 0, 123 | } 124 | meta_exposure0 = NDMeta({"exposure time": EXPOSURE_TIME}, axes={"exposure time": 0}, data_shape=SOURCE_DATA_DN.shape) 125 | meta_exposure2 = NDMeta( 126 | {"exposure time": u.Quantity(np.zeros(SOURCE_DATA_DN.shape[2]) + SINGLES_EXPOSURE_TIME, unit=u.s)}, 127 | axes={"exposure time": 2}, 128 | data_shape=SOURCE_DATA_DN.shape, 129 | ) 130 | 131 | # Define RasterSequences in various units. 132 | spectrogram_DN0 = SpectrogramCube( 133 | SOURCE_DATA_DN, 134 | WCS0, 135 | u.ct, 136 | SOURCE_UNCERTAINTY_DN, 137 | meta=meta_exposure0, 138 | ) 139 | spectrogram_DN0.extra_coords.add(*EXTRA_COORDS0[0]) 140 | spectrogram_DN1 = SpectrogramCube( 141 | SOURCE_DATA_DN, 142 | WCS0, 143 | u.ct, 144 | SOURCE_UNCERTAINTY_DN, 145 | meta=meta_exposure0, 146 | ) 147 | spectrogram_DN1.extra_coords.add(*EXTRA_COORDS1[0]) 148 | sequence_DN = RasterSequence([spectrogram_DN0, spectrogram_DN1], meta=meta_seq, common_axis=0) 149 | sequence_DN0 = RasterSequence([spectrogram_DN0, spectrogram_DN1], meta=meta_seq, common_axis=0) 150 | sequence_DN1 = RasterSequence([spectrogram_DN0, spectrogram_DN1], meta=meta_seq, common_axis=1) 151 | 152 | 153 | spectrogram_DN20 = SpectrogramCube(SOURCE_DATA_DN, wcs2, u.ct, SOURCE_UNCERTAINTY_DN, meta=meta_exposure2) 154 | spectrogram_DN20.extra_coords.add(*extra_coords20[0]) 155 | spectrogram_DN21 = SpectrogramCube(SOURCE_DATA_DN, wcs2, u.ct, SOURCE_UNCERTAINTY_DN, meta=meta_exposure2) 156 | spectrogram_DN21.extra_coords.add(*extra_coords21[0]) 157 | sequence_DN2 = RasterSequence([spectrogram_DN20, spectrogram_DN21], meta=meta_seq, common_axis=2) 158 | 159 | spectrogram_DN_per_s0 = SpectrogramCube( 160 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME, 161 | WCS0, 162 | u.ct / u.s, 163 | SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME, 164 | meta=meta_exposure0, 165 | ) 166 | spectrogram_DN_per_s0.extra_coords.add(*EXTRA_COORDS0[0]) 167 | spectrogram_DN_per_s1 = SpectrogramCube( 168 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME, 169 | WCS0, 170 | u.ct / u.s, 171 | SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME, 172 | meta=meta_exposure0, 173 | ) 174 | spectrogram_DN_per_s1.extra_coords.add(*EXTRA_COORDS1[0]) 175 | sequence_DN_per_s = RasterSequence([spectrogram_DN_per_s0, spectrogram_DN_per_s1], meta=meta_seq, common_axis=0) 176 | spectrogram_DN_per_s_per_s0 = SpectrogramCube( 177 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 178 | WCS0, 179 | u.ct / u.s / u.s, 180 | SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 181 | meta=meta_exposure0, 182 | ) 183 | spectrogram_DN_per_s_per_s0.extra_coords.add(*EXTRA_COORDS0[0]) 184 | spectrogram_DN_per_s_per_s1 = SpectrogramCube( 185 | SOURCE_DATA_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 186 | WCS0, 187 | u.ct / u.s / u.s, 188 | SOURCE_UNCERTAINTY_DN / SINGLES_EXPOSURE_TIME / SINGLES_EXPOSURE_TIME, 189 | meta=meta_exposure0, 190 | ) 191 | spectrogram_DN_per_s_per_s1.extra_coords.add(*EXTRA_COORDS1[0]) 192 | sequence_DN_per_s_per_s = RasterSequence( 193 | [spectrogram_DN_per_s_per_s0, spectrogram_DN_per_s_per_s1], 194 | meta=meta_seq, 195 | common_axis=0, 196 | ) 197 | spectrogram_DN_s0 = SpectrogramCube( 198 | SOURCE_DATA_DN * SINGLES_EXPOSURE_TIME, 199 | WCS0, 200 | u.ct * u.s, 201 | SOURCE_UNCERTAINTY_DN * SINGLES_EXPOSURE_TIME, 202 | meta=meta_exposure0, 203 | ) 204 | spectrogram_DN_s0.extra_coords.add(*EXTRA_COORDS0[0]) 205 | spectrogram_DN_s1 = SpectrogramCube( 206 | SOURCE_DATA_DN * SINGLES_EXPOSURE_TIME, 207 | WCS0, 208 | u.ct * u.s, 209 | SOURCE_UNCERTAINTY_DN * SINGLES_EXPOSURE_TIME, 210 | meta=meta_exposure0, 211 | ) 212 | spectrogram_DN_s1.extra_coords.add(*EXTRA_COORDS1[0]) 213 | sequence_DN_s = RasterSequence([spectrogram_DN_s0, spectrogram_DN_s1], meta=meta_seq, common_axis=0) 214 | 215 | # Define raster sequence with no spectral axes. 216 | raster_no_wave0 = SpectrogramCube(SOURCE_DATA_DN[:, :, 0], wcs_no_wave, u.ct, meta=meta_exposure0) 217 | raster_no_wave0.extra_coords.add(*EXTRA_COORDS0[0]) 218 | raster_no_wave1 = SpectrogramCube(SOURCE_DATA_DN[:, :, 0], wcs_no_wave, u.ct, meta=meta_exposure0) 219 | raster_no_wave1.extra_coords.add(*EXTRA_COORDS0[0]) 220 | sequence_DN_no_wave = RasterSequence([raster_no_wave0, raster_no_wave1], 0) 221 | 222 | # Define raster sequence with missing slit axes. 223 | raster_no_slit0 = raster_no_slit1 = spectrogram_DN0[:, 0] 224 | sequence_DN_no_slit = RasterSequence([raster_no_slit0, raster_no_slit1], 0) 225 | 226 | # Define raster sequence with missing slit step axes. 227 | raster_no_step0 = raster_no_step1 = spectrogram_DN0[0] 228 | sequence_DN_no_step = RasterSequence([raster_no_step0, raster_no_step1], None) 229 | 230 | 231 | def test_spectral_axis(): 232 | spectral_axis = u.Quantity([d.spectral_axis for d in sequence_DN.data]) 233 | assert (sequence_DN.spectral_axis == spectral_axis).all() 234 | 235 | 236 | def test_time(): 237 | times = np.concatenate([d.time for d in sequence_DN.data]) 238 | assert (sequence_DN.time == times).all() 239 | 240 | 241 | def test_exposure_time(): 242 | exposure_time = np.concatenate([d.exposure_time for d in sequence_DN.data]) 243 | assert (sequence_DN.exposure_time == exposure_time).all() 244 | 245 | 246 | @pytest.mark.parametrize( 247 | ("input_sequence", "undo", "force", "expected_sequence"), 248 | [ 249 | (sequence_DN, False, False, sequence_DN_per_s), 250 | (sequence_DN_per_s, True, False, sequence_DN), 251 | (sequence_DN_per_s, False, True, sequence_DN_per_s_per_s), 252 | (sequence_DN, True, True, sequence_DN_s), 253 | ], 254 | ) 255 | def test_apply_exposure_time_correction(input_sequence, undo, force, expected_sequence): 256 | output_sequence = input_sequence.apply_exposure_time_correction(undo, copy=True, force=force) 257 | assert_cubesequences_equal(output_sequence, expected_sequence) 258 | 259 | 260 | @pytest.mark.parametrize( 261 | ("input_sequence", "expected_raster_axes_types"), 262 | [ 263 | ( 264 | sequence_DN0, 265 | ( 266 | sequence_DN0._raster_axis_name, 267 | sequence_DN0._slit_step_axis_name, 268 | sequence_DN0._slit_axis_name, 269 | sequence_DN0._spectral_axis_name, 270 | ), 271 | ), 272 | ( 273 | sequence_DN0.slice_as_raster[:, :, 0, 0], 274 | (sequence_DN0._raster_axis_name, sequence_DN0._slit_step_axis_name), 275 | ), 276 | ( 277 | sequence_DN_no_wave, 278 | ( 279 | sequence_DN_no_wave._raster_axis_name, 280 | sequence_DN_no_wave._slit_step_axis_name, 281 | sequence_DN_no_wave._slit_axis_name, 282 | ), 283 | ), 284 | ( 285 | sequence_DN_no_slit, 286 | ( 287 | sequence_DN_no_slit._raster_axis_name, 288 | sequence_DN_no_slit._slit_step_axis_name, 289 | sequence_DN_no_slit._spectral_axis_name, 290 | ), 291 | ), 292 | ( 293 | sequence_DN_no_step, 294 | ( 295 | sequence_DN_no_step._raster_axis_name, 296 | sequence_DN_no_step._slit_axis_name, 297 | sequence_DN_no_step._spectral_axis_name, 298 | ), 299 | ), 300 | ], 301 | ) 302 | def test_raster_instrument_axes_types(input_sequence, expected_raster_axes_types): 303 | assert input_sequence.raster_instrument_axes_types == expected_raster_axes_types 304 | 305 | 306 | @pytest.mark.parametrize( 307 | ("input_sequence", "expected_sns_axes_types"), 308 | [ 309 | ( 310 | sequence_DN0, 311 | ( 312 | sequence_DN0._sns_axis_name, 313 | sequence_DN0._slit_axis_name, 314 | sequence_DN0._spectral_axis_name, 315 | ), 316 | ), 317 | (sequence_DN0.slice_as_raster[:, :, 0, 0], (sequence_DN0._sns_axis_name,)), 318 | ], 319 | ) 320 | def test_sns_instrument_axes_types(input_sequence, expected_sns_axes_types): 321 | assert input_sequence.sns_instrument_axes_types == expected_sns_axes_types 322 | 323 | 324 | def test_slice_as_raster(): 325 | assert isinstance(sequence_DN[:, 0], SpectrogramSequence) 326 | -------------------------------------------------------------------------------- /sunraster/version.py: -------------------------------------------------------------------------------- 1 | # NOTE: First try _dev.scm_version if it exists and setuptools_scm is installed 2 | # This file is not included in wheels/tarballs, so otherwise it will 3 | # fall back on the generated _version module. 4 | try: 5 | try: 6 | from ._dev.scm_version import version 7 | except ImportError: 8 | from ._version import version 9 | except Exception: # NOQA: BLE001 10 | import warnings 11 | 12 | warnings.warn(f'could not determine {__name__.split(".")[0]} package version; this indicates a broken installation') 13 | del warnings 14 | 15 | version = "0.0.0" 16 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | min_version = 4.0 3 | requires = 4 | tox-pypi-filter>=0.14 5 | envlist = 6 | py{312,313,314}{,-online} 7 | py314-devdeps 8 | py312-oldestdeps 9 | codestyle 10 | build_docs 11 | 12 | [testenv] 13 | pypi_filter = https://raw.githubusercontent.com/sunpy/sunpy/main/.test_package_pins.txt 14 | # Run the tests in a temporary directory to make sure that we don't import 15 | # the package from the source tree 16 | change_dir = .tmp/{envname} 17 | description = 18 | run tests 19 | oldestdeps: with the oldest supported version of key dependencies 20 | devdeps: with the latest developer version of key dependencies 21 | pass_env = 22 | # A variable to tell tests we are on a CI system 23 | CI 24 | # Custom compiler locations (such as ccache) 25 | CC 26 | # Location of locales (needed by sphinx on some systems) 27 | LOCALE_ARCHIVE 28 | # If the user has set a LC override we should follow it 29 | LC_ALL 30 | set_env = 31 | MPLBACKEND = agg 32 | COLUMNS = 180 33 | build_docs,online: HOME = {envtmpdir} 34 | devdeps: PIP_EXTRA_INDEX_URL = https://pypi.anaconda.org/astropy/simple https://pypi.anaconda.org/scientific-python-nightly-wheels/simple 35 | deps = 36 | # For packages which publish nightly wheels this will pull the latest nightly 37 | devdeps: astropy>=0.0.dev0 38 | devdeps: sunpy>=0.0.dev0 39 | # Packages without nightly wheels will be built from source like this 40 | devdeps: git+https://github.com/sunpy/ndcube 41 | oldestdeps: minimum_dependencies 42 | online: pytest-rerunfailures 43 | online: pytest-timeout 44 | pytest-cov 45 | pytest-xdist 46 | # The following indicates which extras_require will be installed 47 | extras = 48 | all 49 | tests 50 | commands_pre = 51 | oldestdeps: minimum_dependencies sunraster --filename requirements-min.txt 52 | oldestdeps: pip install -r requirements-min.txt 53 | pip freeze --all --no-input 54 | commands = 55 | # To amend the pytest command for different factors you can add a line 56 | # which starts with a factor like `online: --remote-data=any \` 57 | # If you have no factors which require different commands this is all you need: 58 | pytest \ 59 | -vvv \ 60 | -r fEs \ 61 | --pyargs sunraster \ 62 | --cov-report=xml \ 63 | --cov=sunraster \ 64 | --cov-config={toxinidir}/.coveragerc \ 65 | {toxinidir}/docs \ 66 | {posargs} 67 | 68 | [testenv:codestyle] 69 | pypi_filter = 70 | skip_install = true 71 | description = Run all style and file checks with pre-commit 72 | deps = 73 | pre-commit 74 | commands = 75 | pre-commit install-hooks 76 | pre-commit run --color always --all-files --show-diff-on-failure 77 | 78 | [testenv:build_docs] 79 | description = invoke sphinx-build to build the HTML docs 80 | change_dir = 81 | docs 82 | extras = 83 | docs 84 | commands = 85 | sphinx-build -j auto --color -W --keep-going -b html -d _build/.doctrees . _build/html {posargs} 86 | --------------------------------------------------------------------------------