├── .coveragerc
├── .coveralls.yml
├── .dockerignore
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── docker_build.yml
│ ├── push_docker.yml
│ ├── python_tests.yml
│ └── pythonpublish.yml
├── .gitignore
├── .readthedocs.yml
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── GITHUB_ACTIONS.md
├── LICENSE
├── MAINTENANCE.md
├── MANIFEST.in
├── README.md
├── VERSION-HISTORY.md
├── blimpy
├── __init__.py
├── bl_scrunch.py
├── calcload.py
├── calib_utils
│ ├── .sil.swp
│ ├── Full_Stokes_Calibration_with_Breakthrough_Listen_Data.pdf
│ ├── __init__.py
│ ├── calib_plots.py
│ ├── calibrators.txt
│ ├── explore_calibrators.py
│ ├── fluxcal.py
│ └── stokescal.py
├── dice.py
├── dsamp.py
├── ephemeris
│ ├── __init__.py
│ ├── compute_lsrk.py
│ ├── compute_lst.py
│ ├── config.py
│ ├── observatory.py
│ └── observatory_info.csv
├── fil2h5.py
├── guppi.py
├── h52fil.py
├── h5diag.py
├── io
│ ├── __init__.py
│ ├── base_reader.py
│ ├── fil_reader.py
│ ├── fil_writer.py
│ ├── file_wrapper.py
│ ├── hdf_reader.py
│ ├── hdf_writer.py
│ └── sigproc.py
├── peek.py
├── plotting
│ ├── __init__.py
│ ├── config.py
│ ├── plot_all.py
│ ├── plot_kurtosis.py
│ ├── plot_spectrum.py
│ ├── plot_spectrum_min_max.py
│ ├── plot_time_series.py
│ ├── plot_utils.py
│ └── plot_waterfall.py
├── rawhdr.py
├── signal_processing
│ ├── __init__.py
│ └── dedoppler.py
├── srcname.py
├── stax.py
├── stix.py
├── utils.py
└── waterfall.py
├── dependencies.txt
├── docker_guide.md
├── docs
├── Makefile
├── blimpy.calib_utils.rst
├── blimpy.rst
├── conf.py
├── contents.rst
├── index.rst
├── license.rst
├── make.bat
├── modules.rst
├── overview.md
└── writing_docs.rst
├── examples
└── voyager.ipynb
├── paper.bib
├── paper.md
├── pyproject.toml
├── requirements.txt
├── requirements_test.txt
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── data.py
├── download_data.sh
├── run_tests.sh
├── test_bl_scrunch.py
├── test_calc_n_coarse_chan.py
├── test_calcload.py
├── test_compare_voyager.py
├── test_dedoppler.py
├── test_dice.py
├── test_dsamp.py
├── test_ephemeris.py
├── test_fil2h5.py
├── test_file_wrapper.py
├── test_filterbank_voyager.py
├── test_guppi.py
├── test_h52fil.py
├── test_h5diag.py
├── test_h5py.py
├── test_heavy.py
├── test_observatory.py
├── test_plotting.py
├── test_setup.py
├── test_sigproc.py
├── test_stax.py
├── test_stix.py
├── test_unpack.py
├── test_utils.py
├── test_voyager_data_load.py
├── test_waterfall.py
├── test_waterfall2.py
└── test_write_to_fil.py
└── tutorial
└── blimpy_voyager_tour.ipynb
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit = *calib_utils*, *deprecated*
--------------------------------------------------------------------------------
/.coveralls.yml:
--------------------------------------------------------------------------------
1 | service_name: travis
2 | parallel: true
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .ipynb_checkpoints
3 | *.pyc
4 | *.fil
5 | *.egg-info/
6 | .virtualenv*/
7 | .lib/
8 | .venv*/
9 | .eggs/
10 | .pytest_cache/
11 | tests/test.h5
12 | tests/test_large.h5
13 | test.h5
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 |
5 | ---
6 |
7 | **Describe the bug**
8 | A clear and concise description of what the bug is.
9 |
10 | **To Reproduce**
11 | Steps to reproduce the behavior:
12 | 1. Go to '...'
13 | 2. Click on '....'
14 | 3. Scroll down to '....'
15 | 4. See error
16 |
17 | **Expected behavior**
18 | A clear and concise description of what you expected to happen.
19 |
20 | **Screenshots**
21 | If applicable, add screenshots to help explain your problem.
22 |
23 | **Setup**
24 | - Python version: 2.X or 3.X?
25 | - Blimpy version?
26 |
27 | **Additional context**
28 | Add any other context about the problem here.
29 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 |
5 | ---
6 |
7 | **Is your feature request related to a problem? Please describe.**
8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
9 |
10 | **Describe the solution you'd like**
11 | A clear and concise description of what you want to happen.
12 |
13 | **Describe alternatives you've considered**
14 | A clear and concise description of any alternative solutions or features you've considered.
15 |
16 | **Additional context**
17 | Add any other context or screenshots about the feature request here.
18 |
--------------------------------------------------------------------------------
/.github/workflows/docker_build.yml:
--------------------------------------------------------------------------------
1 | name: Test Dockerfile
2 |
3 | on:
4 | push:
5 | paths-ignore:
6 | - '**.ipynb'
7 | - '**.png'
8 | - '**.rst'
9 | - '**.md'
10 | pull_request:
11 | paths-ignore:
12 | - '**.ipynb'
13 | - '**.png'
14 | - '**.rst'
15 | - '**.md'
16 |
17 | jobs:
18 | build:
19 |
20 | runs-on: ubuntu-latest
21 |
22 | steps:
23 | - uses: actions/checkout@v2
24 | - name: Build the Docker image
25 | run: docker build . --file Dockerfile --tag blimpy-docker:$(date +%s)
26 |
27 |
--------------------------------------------------------------------------------
/.github/workflows/push_docker.yml:
--------------------------------------------------------------------------------
1 | name: Push to Docker Hub
2 |
3 | on:
4 | push:
5 | paths-ignore:
6 | - '**.rst'
7 | branches:
8 | - master
9 |
10 | jobs:
11 | build:
12 |
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 | - name: Push to Docker Hub
18 | uses: docker/build-push-action@v1
19 | with:
20 | username: ${{ secrets.DOCKER_USER }}
21 | password: ${{ secrets.DOCKER_PASS }}
22 | repository: ucberkeleyseti/blimpy
23 | tags: latest
24 |
25 |
--------------------------------------------------------------------------------
/.github/workflows/python_tests.yml:
--------------------------------------------------------------------------------
1 | name: Test Blimpy
2 |
3 | on:
4 | push:
5 | paths-ignore:
6 | - '**.ipynb'
7 | - '**.png'
8 | - '**.rst'
9 | - '**.md'
10 | pull_request:
11 | paths-ignore:
12 | - '**.ipynb'
13 | - '**.png'
14 | - '**.rst'
15 | - '**.md'
16 |
17 | jobs:
18 | build:
19 |
20 | runs-on: ubuntu-latest
21 | strategy:
22 | matrix:
23 | python-verison: [3.7, 3.8, 3.9]
24 |
25 | steps:
26 | - uses: actions/checkout@v2
27 | - name: Set up Python ${{ matrix.python-version }}
28 | uses: actions/setup-python@v2
29 | with:
30 | python-version: ${{ matrix.python-version }}
31 | - name: Install system dependencies
32 | run: |
33 | sudo apt update
34 | cat dependencies.txt | sudo xargs -n 1 apt install -y
35 | - name: Install dependencies
36 | run: |
37 | python3 -m pip install --user --upgrade pip
38 | python3 -m pip install --user -r requirements.txt
39 | python3 setup.py install --user
40 | python3 -m pip install --user -r requirements_test.txt
41 | - name: Download test files
42 | run: |
43 | cd tests
44 | bash download_data.sh
45 | cd ..
46 | - name: Run coverage test
47 | run: |
48 | export PATH=/home/runner/.local/bin:$PATH
49 | pytest --cov=./ --cov-report=xml
50 | - name: Upload coverage to Codecov
51 | uses: codecov/codecov-action@v1
52 | with:
53 | token: ${{ secrets.CODECOV_TOKEN }}
54 | name: blimpy-codecov-p${{ matrix.python-version }}
55 |
56 |
--------------------------------------------------------------------------------
/.github/workflows/pythonpublish.yml:
--------------------------------------------------------------------------------
1 | # This workflows will upload a Python Package using Twine when a release is created.
2 |
3 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
4 |
5 | name: Upload Python Package
6 |
7 | on:
8 | release:
9 | types: [published]
10 |
11 | jobs:
12 | deploy:
13 |
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | - name: Set up Python
19 | uses: actions/setup-python@v1
20 | with:
21 | python-version: '3.x'
22 | - name: Install dependencies
23 | run: |
24 | python -m pip install --upgrade pip
25 | pip install setuptools wheel twine
26 | - name: Build and publish
27 | env:
28 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
29 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
30 | run: |
31 | python setup.py sdist bdist_wheel
32 | twine upload dist/*
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.png
2 | *.dat
3 | *.fil
4 | *.h5
5 | *.log
6 | *.pyc
7 | *.egg-info/
8 |
9 | .eggs/
10 | .idea/
11 | .ipynb_checkpoints/
12 | .coverage
13 | .virtualenv*/
14 | .venv*/
15 | coverage.xml
16 |
17 | tests/test_data
18 | build/
19 | dist/
20 | docs/_build/
21 | exec
22 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | sphinx:
6 | configuration: docs/conf.py
7 |
8 | python:
9 | setup_py_install: true
10 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, sex characteristics, gender identity and expression,
9 | level of experience, education, socio-economic status, nationality, personal
10 | appearance, race, religion, or sexual identity and orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behavior that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behavior by participants include:
24 |
25 | * The use of sexualized language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behavior and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behavior.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviors that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
58 | reported by contacting the project team at dancpr [at] berkeley [dot] edu. All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 | For answers to common questions about this code of conduct, see
76 | https://www.contributor-covenant.org/faq
77 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | ### Contributing to blimpy
2 |
3 | Thanks for thinking about contributing code to `blimpy`! Here's a few quick guidelines:
4 |
5 | * You'll need to follow our [code of conduct](https://github.com/UCBerkeleySETI/blimpy/blob/master/CODE_OF_CONDUCT.md). Basically, be nice.
6 | * If you have a small improvement, such as fixing a typo in the documentation, just open an issue and explain in as best detail as you can.
7 | * If you have a moderate change that you've already coded up, issue a pull request with as much detail as possible about why it's awesome.
8 | * We run continuous integration to check if new code breaks stuff, and checks code coverage. We probably won't merge code that breaks stuff, or lowers the coverage drastically. This probably means you'll need some unit tests. If you need help writing tests, please open a pull request anyway and we'll try to work with you to bring it up to a mergable state.
9 | * If you issue a push request with new functionality, please write up some examples and unit tests so we can figure out what it does.
10 | * If you're planning major changes, please open up a discussion on the issue tracker first! We use `blimpy` in our telescope processing pipelines, so need to make sure our observation scripts won't break.
11 | * `blimpy` is used in installations where Python 2.7 is still used. So code should, for the time being, be Py2 and Py3 compatible.
12 |
13 | ### Syle: PEP8 and docstrings
14 |
15 | We favour the more concise [google docstrings](http://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings),
16 | and generally try to follow the [PEP8 style guide](https://www.python.org/dev/peps/pep-0008/), but we do like to line up equal signs over multiple lines.
17 | That is: try and write PEP8 compliant code, but we don't mind well motivated deviations.
18 |
19 | ### Reporting issues or problems with the software
20 |
21 | * Please use the [github issues](https://github.com/UCBerkeleySETI/blimpy/issues) page to open an issue. Try and explain in the most detail, so we can recreate and fix it.
22 |
23 | ### Getting support
24 |
25 | * [Open an issue](https://github.com/UCBerkeleySETI/blimpy/issues). If it's a bug report, or a feature request, please use the templates; otherwise try to be as descriptive as you can about what you're trying to do. Include as much relevant information as possible.
26 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:20.04
2 | ARG DEBIAN_FRONTEND=noninteractive
3 |
4 | RUN apt-get update
5 |
6 | COPY . /blimpy
7 | WORKDIR /blimpy
8 |
9 | RUN cat dependencies.txt | xargs -n 1 apt install --no-install-recommends -y
10 |
11 | RUN cd tests && bash download_data.sh && cd ..
12 |
13 | RUN python3 -m pip install --user -r requirements.txt
14 | RUN python3 setup.py install --user
15 | RUN python3 -m pip install --user -r requirements_test.txt
16 | RUN python3 setup.py test
17 |
18 | RUN rm -fr tests/test_data
19 | RUN find . -path '*/__pycache__*' -delete
20 |
21 | WORKDIR /home
22 |
--------------------------------------------------------------------------------
/GITHUB_ACTIONS.md:
--------------------------------------------------------------------------------
1 | This repository uses GitHub Actions workflows to test, validate, and publish code. The configuration files are listed inside `.github/workflows`.
2 |
3 | ### Workflow
4 |
5 | #### On Commit or Pull-Request
6 | Test and validate the integrity of each commit to any branch.
7 |
8 | 1. `python_tests.yml`: Run Python tests with coverage report.
9 | 2. `docker_build.yml`: Run build test with Docker.
10 |
11 | #### On Master Commit
12 | Publish the image to Docker Hub after a commit to `master` branch.
13 |
14 | 1. `push_docker.yml`: Build & publish the image on Docker Hub.
15 |
16 | ### Required Secrets
17 | - **DOCKER_USER**: Docker Hub Username.
18 | - **DOCKER_PASS**: Docker Hub Password.
19 | - **CODECOV_TOKEN**: Codecov Blimpy Token.
20 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2018, Berkeley SETI Research Center
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/MAINTENANCE.md:
--------------------------------------------------------------------------------
1 |
2 | blimpy Maintenance & Regression Testing
3 | =======================================
4 |
5 |
6 | ### Introduction
7 |
8 | The purpose of the regression testing suite is to exercise and validate results from blimpy functional modules. This is important in order to minimize potential inadvertent breakage when new development has occured. It is always best to catch bugs as soon as possible after they are introduced.
9 |
10 | The primary method of launching regression testing is through the use of the `pytest` executable. This is invoked in the following ways:
11 | * Manually by a developer, on the command line in a terminal window. This would follow downloading blimpy and setting up the development/testing environment (discussed later).
12 | * Automatically as part of a Github Pull Request (PR) after finalizing a fork of blimpy.
13 | * Automatically as part of a Github Merge after a PR is approved.
14 |
15 |
16 | ### Development/Test Preparation
17 |
18 | * The development of an amendment to `blimpy` begins with taking a fork from a github site, normally from `https://github.com/UCBerkeleySETI/blimpy`.
19 | * Also, from the same site, `blimpy` is downloaded to a local computer. The download operations can be performed in a few different ways but the simplest might be to download the zip file by clicking on the `Code` button and selecting `Download ZIP`. Once the the zip file is in a local directory, unzip it and move the blimpy directory tree to wherever is appropriate for testing. The zip file can now be discarded.
20 | * Change directory into the `tests` directory (where this file is located) and execute `bash download_data.sh` which will perform all required regression testing initialization.
21 | * When the previous step has completed, change directory up one level to the top of the `blimpy` directory tree.
22 | * Execute: ```python3 setup.py install```
23 | * Then, install `pytest` and `pyslalib` from pypi.org: `python3 -m pip install pytest pyslalib`.
24 |
25 | ### Regression Test Operations
26 |
27 | * Running the full suite of regression tests is invoked by executing `pytest` with no parameters specified. It is possible to run a single regression test file by specifying it as an argument to `pytest`. For example, if one wishes to only run the plotting tests, the following is the command line to use: `pytest tests/test_plotting.py`.
28 | * It is **highly encouraged** for developers to perform regression testing frequently in order to avoid surprises later on.
29 | * Once, development activity on the local machine is complete and the last regression test has run verifying the absence of negative side effects, then the new and/or modified blimpy files can be uploaded to the developer's fork github site.
30 | * At the fork github site, the developer can request a pull clicking on the `Pull request` button. This automatically starts the PR process mentioned in the introduction section.
31 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE
2 | include *.md
3 | include MANIFEST.in
4 | include requirements*.txt
5 | recursive-include blimpy/ephemeris *.csv
6 | include tests
7 | recursive-include tests *
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [](https://github.com/UCBerkeleySETI/blimpy/actions)
2 | [](https://blimpy.readthedocs.io/en/latest/?badge=latest)
3 | [](https://codecov.io/gh/UCBerkeleySETI/blimpy)
4 | [](http://joss.theoj.org/papers/e58ef21f0a924041bf9438fd75f8aed0)
5 |
6 | ## Breakthrough Listen I/O Methods for Python.
7 |
8 | ### Filterbank + Raw file readers
9 |
10 | This repository contains Python 2/3 readers for interacting with [Sigproc filterbank](http://sigproc.sourceforge.net/sigproc.pdf) (.fil), HDF5 (.h5) and [guppi raw](https://baseband.readthedocs.io/en/stable/guppi/) (.raw) files,
11 | as used in the [Breakthrough Listen](https://seti.berkeley.edu) search for intelligent life.
12 |
13 |
14 | ### Installation
15 |
16 | #### System Dependencies
17 | Sometimes the `pip` installation can fail if a system dependency is not installed. To fix this, make sure you have `curl` and install the required system dependencies with the command bellow:
18 |
19 | ##### Debian/Ubuntu
20 | ```
21 | curl https://raw.githubusercontent.com/UCBerkeleySETI/blimpy/master/dependencies.txt | xargs -n 1 sudo apt install --no-install-recommends -y
22 | ```
23 |
24 | #### Manual Installation
25 |
26 | The latest release can be installed via pip directly from this repository:
27 |
28 | ```
29 | python3 -m pip install -U git+https://github.com/UCBerkeleySETI/blimpy
30 | ```
31 |
32 | Or, the latest version of the development code can be installed from the github [repo](https://github.com/UCBerkeleySETI/blimpy) and then run `python setup.py install` or `pip install .` (with sudo if required), or by using the following terminal command:
33 |
34 | ```
35 | python3 -m pip install -U https://github.com/UCBerkeleySETI/blimpy/tarball/master
36 | ```
37 |
38 | To install everything required to run the unit tests, run:
39 |
40 | ```
41 | python3 -m pip install -e .[full]
42 | ```
43 |
44 | You will need `numpy`, `h5py`, `astropy`, `scipy`, and `matplotlib` as dependencies. A `pip install` should pull in numpy, h5py, and astropy, but you may still need to install scipy and matplotlib separately.
45 | To interact with compressed files, you'll need the `hdf5plugin` package too.
46 |
47 | Note that h5py generally needs to be installed in this way:
48 |
49 | ```
50 | $ python3 -m pip install --no-binary=h5py h5py
51 | ```
52 |
53 | ### Command line utilities
54 |
55 | After installation, the following command will display the metadata (header) values and some information about the data matrix:
56 |
57 | `watutil -i`
58 |
59 | Other command line utilities available post-installation:
60 | * `bl_scrunch`, Scrunch an HDF5 file (.h5) or a Sigproc Filterbank file (.fil) to an output HDF5 file. This has the effect of down-sampling adjacent fine frequency cells.
61 | * `bldice`, Extract a smaller frequency region from a Filterbank file (.fil or .h5 file).
62 | * `calcload`, Calculate the Waterfall max_load value needed to load the entire data array for a given Filterbank file.
63 | * `dsamp`, Down-sample (time-dimension only) from one Filterbank file to another.
64 | * `fil2h5`, Convert a .fil file into .h5 format.
65 | * `h52fil`, Convert an .h5 file into .fil format.
66 | * `peek`, Display a selected portion of values from the data matrix of a Filterbank file.
67 | * `rawhdr`, Display the header fields of a raw guppi file.
68 | * `rawutil`, Plot data in a guppi raw file.
69 | * `srcname`, Patch the header source_name field in a .h5 file.
70 | * `stax`, For a collection of .h5 or .fil files sharing the same frequency range, create a vertical stack of waterfall plots as a single PNG file.
71 | * `stix`, For a single very large Filterbank file, create a horizontal or vertical stack of waterfall plots as a single PNG file.
72 | * `watutil`, Information/read/write/plot utility for Filterbank files.
73 |
74 | Use the `-h` flag to any of the above command line utilities to display their available arguments.
75 |
76 | ### Reading blimpy filterbank files in .fil or .h5 format
77 |
78 | The `blimpy.Waterfall` provides a Python API for interacting with filterbank data. It supports all BL filterbank data products; see this [example Jupyter notebook](https://github.com/UCBerkeleySETI/blimpy/blob/master/examples/voyager.ipynb) for an overview.
79 |
80 | From the python, ipython or jupiter notebook environments.
81 |
82 | ```python
83 | from blimpy import Waterfall
84 | fb = Waterfall('/path/to/filterbank.fil')
85 | #fb = Waterfall('/path/to/filterbank.h5') #works the same way
86 | fb.info()
87 | data = fb.data
88 | ```
89 |
90 | ### Reading guppi raw files
91 | The [Guppi Raw format](https://github.com/UCBerkeleySETI/breakthrough/blob/master/doc/RAW-File-Format.md) can be read using the `GuppiRaw` class from `guppi.py`:
92 |
93 | ```python
94 | from blimpy import GuppiRaw
95 | gr = GuppiRaw('/path/to/guppirawfile.raw')
96 |
97 | header, data = gr.read_next_data_block()
98 | ```
99 |
100 | or
101 |
102 | ```python
103 | from blimpy import GuppiRaw
104 | gr = GuppiRaw('/path/to/guppirawfile.raw')
105 |
106 | for header, data_x, data_y in gr.get_data():
107 | # process data
108 | ```
109 |
110 | Note: most users should start analysis with filterbank files, which are smaller in size and have been generated from the guppi raw files.
111 |
112 | ### Using blimpy inside Docker
113 | The blimpy images are pushed to a public repository after each successful build on Travis.
114 | If you have Docker installed, you can run the following commands to pull our images, which have the environment and dependencies set up for you.
115 |
116 | `docker pull fx196/blimpy:py3_kern_stable`
117 |
118 | Here is a [more complete guide](./docker_guide.md) on using blimpy in Docker.
119 |
120 | ### Further reading
121 |
122 | A detailed overview of the data formats used in Breakthrough Listen can be found in our [data format paper](https://ui.adsabs.harvard.edu/abs/2019arXiv190607391L/abstract). An archive of data files from the Breakthrough Listen program is provided at [seti.berkeley.edu/opendata](http://seti.berkeley.edu/opendata).
123 |
124 | ### If you have any requests or questions, please lets us know!
125 |
--------------------------------------------------------------------------------
/VERSION-HISTORY.md:
--------------------------------------------------------------------------------
1 | This file is a version history of blimpy amendments, beginning with version 2.0.2. Entries appear in version descending order (newest first, oldest last).
2 |
3 |
4 | | Date | Version | Contents |
5 | | :--: | :--: | :-- |
6 | | 2022-11-16 | 2.1.4 | Make bl_scrunch actually work (issue #276). |
7 | | 2022-08-02 | 2.1.3 | Write .fil files as well as .h5 files (issue #272). |
8 | | 2022-07-22 | 2.1.2 | More container fields needed (issue #270). |
9 | | 2022-07-21 | 2.1.1 | New Waterfall class option, an alternative to file loading (issue #264). |
10 | | 2022-07-08 | 2.1.0 | New utility: dsamp (issue #267). |
11 | | 2022-04-19 | 2.0.40 | Fixed blimpy to show plots when the display supports it (issue #263). |
12 | | 2022-03-30 | 2.0.39 | examine_h5 in hdf_reader.py is loading too much data (issue #261). |
13 | | | | Version/Release 2.0.38 cannot be used. |
14 | | 2022-02-03 | 2.0.37 | Having trouble with publishing on pypi. Seems to be stuck on blimpy v2.0.35. |
15 | | 2022-02-02 | 2.0.36 | Enhance h5diag to show frequency channel information. |
16 | | 2022-01-28 | 2.0.35 | Revamp rawhdr (issue #253). |
17 | | | | Fix misinterpretation of numbers in guppi.py (issue #254). |
18 | | 2022-01-20 | 2.0.34 | Stop mangled output file names from fil2h5 & h52fil (issue #251). |
19 | | 2022-01-18 | 2.0.33 | Support rawspec 3.0 FBH5 format reporting (issue #249). |
20 | | 2021-11-17 | 2.0.32 | New utility: srcname (issue #246). |
21 | | 2021-11-10 | 2.0.31 | New utility: peek. |
22 | | 2021-10-03 | 2.0.30 | Fix issue #243. |
23 | | 2021-08-18 | 2.0.29 | Clean up messages when writing files (Issue #241). |
24 | | 2021-08-18 | 2.0.28 | Fix utility stax difficulties with the time-axis (Issue #238). |
25 | | | | More fixes to the regression tests. |
26 | | 2021-08-18 | 2.0.27.1 | Fix problems with stix and stax. |
27 | | 2021-08-17 | 2.0.27 | Fix problems in various regression tests. |
28 | | 2021-08-17 | 2.0.26 | Implement requirements_test.txt (Issue #234). |
29 | | 2021-08-16 | 2.0.25 | Try to remove HDF5 files that cannot be fully written due to an exception (Issue #232). |
30 | | 2021-08-13 | 2.0.24 | Check the data B-tree in issue #226. |
31 | | 2021-08-13 | 2.0.23 | A better fix to issue #226. |
32 | | 2021-08-10 | 2.0.22 | Non/broken HDF5 input files need better diagnosis (Issue #226). |
33 | | 2021-08-07 | 2.0.21 | New signal_processing source file, "dedoppler.py" (discussion in PR #220). |
34 | | 2021-08-06 | 2.0.20 | New utility, "stix" (Issue #221). |
35 | | 2021-07-28 | 2.0.19 | Update fil2h5 to handle YUGE data matrixes. |
36 | | 2021-07-17 | 2.0.18 | Get rid of numpy "RuntimeWarning: Mean of empty slice" messages (Issue #212). |
37 | | 2021-07-13 | 2.0.17 | New utility: stax. |
38 | | 2021-07-08 | 2.0.16 | Increase test coverage of calc_n_coarse_chan(). |
39 | | | | Improve messaging when calc_n_coarse_chan() emits warnings. |
40 | | 2021-06-14 | 2.0.15 | Fix issue #210 - Guard against unusual Filterbank headers created by setigen apps. |
41 | | 2021-06-12 | 2.0.14 | Fix issue #208 - Miscalculated max_data_array_size when available RAM < 1 GB. |
42 | | 2021-06-10 | 2.0.13 | Fix issue #205 - Define MeerKAT in the list of observatories. |
43 | | | | Fix issue #207 guppi.py generate_filterban_header(). |
44 | | 2021-05-29 | 2.0.12 | Fix issue #203 - calc_n_coarse_chan default to 64. |
45 | | 2021-04-14 | 2.0.11 | Fix issue #196 - automate memory requirements. |
46 | | 2021-03-11 | 2.0.10 | Reopened enhancement #178 - calcload utility - added a verbose parameter. |
47 | | 2021-03-08 | 2.0.9 | Implemented enhancement #182 - rawhdr utility (get header from raw files). |
48 | | | | Amended setup.cfg to enable hdf5plugin to be installed optimized by installation. |
49 | | 2021-03-05 | 2.0.8 | Yanked NUMBA from requirements.txt and waterfall.py due to observed instability in a large data array. |
50 | | 2021-03-04 | 2.0.7 | Fix issue #177 - Amend waterfall.py by adding a \_\_del\_\_ function to ensure that HDF5 files are closed. |
51 | | | | Fix issue #178 - Introduce a new utility (calcload) to calculate max_load for Waterfall. |
52 | | 2021-03-01 | 2.0.6 | Fix issue #171 - grab_data() needed a clear error message when "heavy" data had not been loaded. |
53 | | 2020-12-18 | 2.0.5 | Ignore documentation files in CI (PR #166). |
54 | | | | Slice and dice by time as well as frequencies in the `dice` command (PR #167). |
55 | | 2020-12-18 | 2.0.4 | Deprecate Travis CI in favor of Github Actions (PR #164). |
56 | | 2020-12-16 | 2.0.3 | Numba acceleration in Waterfall.block_dc() (PR #162). |
57 | | | | Removed references to nonexistent `filutil` command. |
58 | | | | Removed generated pop-up plot windows while running `pytest`; the figures are still saved. |
59 | | | | Removed outdated Docker files. |
60 | | | | Updated setuptools build requirements. |
61 | | | | Updated documentation. |
62 | | 2020-12-15 | 2.0.2 | Current as of 2020-12-15. |
63 |
--------------------------------------------------------------------------------
/blimpy/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | try:
4 | from . import waterfall
5 | from .waterfall import Waterfall
6 | from .guppi import GuppiRaw
7 | from . import utils
8 | from . import fil2h5
9 | from . import h52fil
10 | from . import h5diag
11 | from . import bl_scrunch
12 | from . import calcload
13 | from . import rawhdr
14 | from . import stax
15 | from . import stix
16 | from . import dsamp
17 | from blimpy.io import file_wrapper
18 | except:
19 | print("Warning: At least one utility could not be imported!")
20 |
21 | from pkg_resources import get_distribution, DistributionNotFound
22 |
23 | try:
24 | __version__ = get_distribution('blimpy').version
25 | except DistributionNotFound:
26 | __version__ = '0.0.0 - please install via pip/setup.py'
27 |
--------------------------------------------------------------------------------
/blimpy/bl_scrunch.py:
--------------------------------------------------------------------------------
1 | r"""
2 | From an input HDF file or Filterbank file, perform channel averaging,
3 | producing a new HDF5 file of Filterbank file.
4 | """
5 |
6 | import os, sys
7 | from argparse import ArgumentParser
8 | from blimpy.waterfall import Waterfall
9 | from .utils import change_the_ext
10 |
11 |
12 | def bl_scrunch(in_path, out_dir='./', new_filename='', max_load=None, f_scrunch=None):
13 | r""" Frequency scrunch (lower resolution by averaging)
14 |
15 | Args:
16 | in_path : str
17 | Path of input file to open.
18 | out_dir : str
19 | Output directory.
20 | new_filename : str
21 | Output file name.
22 | max_load : int
23 | Waterfall object instantiation max_load parameter value.
24 | f_scrunch : int
25 | Number of frequency channels to average together at one time.
26 | """
27 |
28 | print("bl_scrunch: Input path: {}".format(in_path))
29 | in_ext = os.path.splitext(in_path)[1]
30 | if in_ext not in ('.fil', '.h5'):
31 | raise ValueError('Oops, input file extension must be .fil or .h5; saw: {} !'.format(in_ext))
32 | print("bl_scrunch: Averaging {} frequency channels at a time.".format(f_scrunch))
33 |
34 | wf = Waterfall(in_path, max_load=max_load)
35 | if new_filename == '':
36 | if in_ext == '.h5':
37 | out_path = out_dir + '/' + change_the_ext(in_path, 'h5', 'scrunched.h5').split('/')[-1]
38 | else: # .fil
39 | out_path = out_dir + '/' + change_the_ext(in_path, 'fil', 'scrunched.h5').split('/')[-1]
40 | else:
41 | out_path = out_dir + new_filename
42 |
43 | if f_scrunch < 2 or f_scrunch >= wf.header["nchans"] :
44 | print("\n*** Number of frequency channels to average together must be > 1 and < the input file header nchans value!!\n")
45 | sys.exit(1)
46 |
47 | print("bl_scrunch: Output path: {}".format(out_path))
48 | wf.write_to_hdf5(out_path, f_scrunch=f_scrunch)
49 | print("bl_scrunch: End")
50 |
51 |
52 | def cmd_tool(args=None):
53 | r""" Command line utility for scrunching an input HDF5 file or Filterbank file.
54 | """
55 |
56 | p = ArgumentParser(description='Command line utility for scrunching an HDF5 file (.h5) or a Sigproc Filterbank file (.fil) to an output HDF5 file.')
57 | p.add_argument('filepath', type=str, help='Input file path to open (.h5 or .fil).')
58 | p.add_argument('-f', '--fscrunch', dest='f_scrunch', type=int, required=True,
59 | help='Number of frequency channels to average (scrunch) together.')
60 | p.add_argument('-o', '--out_dir', dest='out_dir', type=str, default='./',
61 | help='Location for output files. Default: current directory.')
62 | p.add_argument('-n', '--new_filename', dest='new_filename', type=str, default='',
63 | help='New filename. Default: replaces the file extension with .scrunched.fil or .scrunched .h5.')
64 | p.add_argument('-l', '--max_load', action='store', default=None, dest='max_load', type=float,
65 | help='Maximum data limit to load. Default: 1.0 GB.')
66 | p.add_argument('-d', '--delete_input', dest='delete_input', action='store_true', default=False,
67 | help='This option deletes the input file after conversion.')
68 |
69 | if args is None:
70 | args = p.parse_args()
71 | else:
72 | args = p.parse_args(args)
73 |
74 | bl_scrunch(args.filepath, out_dir=args.out_dir, new_filename=args.new_filename,
75 | max_load=args.max_load, f_scrunch=args.f_scrunch)
76 |
77 | if args.delete_input:
78 | print("'Deleting input file: %s"%(args.filepath))
79 | os.remove(args.filepath)
80 |
81 |
82 | if __name__ == "__main__":
83 |
84 | cmd_tool()
85 |
86 |
--------------------------------------------------------------------------------
/blimpy/calcload.py:
--------------------------------------------------------------------------------
1 | ''' calcload.py - Calculate the Waterfall max_load value needed to load the data array for a given file.'''
2 |
3 | import sys
4 | from argparse import ArgumentParser
5 | import numpy as np
6 | import blimpy as bl
7 |
8 |
9 | def calc_max_load(arg_path, verbose=False):
10 | r''' Calculate the max_load parameter value for a subsequent Waterfall instantiation.
11 |
12 | Algorithm:
13 | * A = minimum Waterfall object size.
14 | * B = data array size within one polarisation.
15 | * Return ceil(A + B in GB)
16 | '''
17 | wf = bl.Waterfall(arg_path, load_data=False)
18 | min_size = float(sys.getsizeof(wf.header)) + float(sys.getsizeof(wf))
19 | data_size = float(wf.header['nchans'] * wf.n_ints_in_file * wf.header['nbits']) / 8.0
20 | ngbytes = (min_size + data_size) / 1e9
21 | max_load = np.ceil(ngbytes)
22 | if verbose:
23 | print('calc_max_load: Waterfall object size excluding data = {}, data array size = {}, total GBs = {:.1f}'
24 | .format(min_size, data_size, ngbytes))
25 | return max_load
26 |
27 |
28 | def cmd_tool(args=None):
29 | r'''Command line entrypoint for "calcload"'''
30 | p = ArgumentParser(description='Calculate the Waterfall max_load value needed to load the data array for a given file.')
31 | p.add_argument('filepath', type=str, help='Name of filepath to open (h5 or fil)')
32 | p.add_argument('-v', action='store_true', default=False, dest='verbose',
33 | help='verbose output if True.')
34 |
35 | if args is None:
36 | args = p.parse_args()
37 | else:
38 | args = p.parse_args(args)
39 |
40 | gb = calc_max_load(args.filepath, args.verbose)
41 | if gb > 1.0:
42 | print('Use Waterfall instantiation with a max_load={}'.format(gb))
43 | else:
44 | print('Use Waterfall without a max_load= specification')
45 |
46 |
47 | if __name__ == "__main__":
48 | cmd_tool()
49 |
--------------------------------------------------------------------------------
/blimpy/calib_utils/.sil.swp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UCBerkeleySETI/blimpy/3ebf04342227a95405aa32e5bc75832d1dd17f28/blimpy/calib_utils/.sil.swp
--------------------------------------------------------------------------------
/blimpy/calib_utils/Full_Stokes_Calibration_with_Breakthrough_Listen_Data.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/UCBerkeleySETI/blimpy/3ebf04342227a95405aa32e5bc75832d1dd17f28/blimpy/calib_utils/Full_Stokes_Calibration_with_Breakthrough_Listen_Data.pdf
--------------------------------------------------------------------------------
/blimpy/calib_utils/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 |
4 | try:
5 | from . import fluxcal
6 | from . import stokescal
7 | from . import calib_plots
8 | except ImportError:
9 | print("Warning: Cannot import calibration utilities")
10 |
11 |
12 |
--------------------------------------------------------------------------------
/blimpy/calib_utils/calibrators.txt:
--------------------------------------------------------------------------------
1 | Full Freq J0133 3C48 ForA 3C123 J0444 3C138 PicA 3C144 3C147 3C196 3C218 3C274 3C286 3C295 3C348 3C353 3C380 3C405 3C444 3C461 Err
2 | 1 0.074 0.00 72.800 0 409.00 0.00 0.000 0.0 1750 57.40 131.000 601.00 2020 29.90 125.000 0.00 422.0 127.00 17100 0.000 18500 0.5
3 | 2 0.232 25.20 53.500 416 189.00 30.70 18.100 243.0 1240 61.60 60.600 216.00 870 27.80 72.800 281.00 190.0 52.90 7940 52.600 6960 1.6
4 | 3 0.247 19.50 52.100 433 180.00 30.10 18.200 234.0 1220 60.90 57.900 209.00 822 28.20 70.500 263.00 181.0 52.40 7570 50.700 6620 1.0
5 | 4 0.275 24.20 47.800 395 164.00 27.20 17.600 216.0 1170 57.70 52.600 185.00 769 27.20 65.900 238.00 171.0 47.20 6970 45.500 6040 0.3
6 | 5 0.296 19.70 46.400 366 157.00 25.80 17.000 207.0 1160 56.40 50.300 177.00 729 27.00 64.100 222.00 164.0 45.40 6580 42.500 5720 0.2
7 | 6 0.312 22.50 45.000 346 150.00 24.80 16.700 197.0 1130 55.00 48.300 169.00 702 26.20 61.900 211.00 158.0 43.30 6310 39.700 5580 0.7
8 | 7 0.328 21.60 43.700 345 145.00 23.70 16.500 190.0 1130 53.70 46.500 161.00 672 26.30 60.400 201.00 152.0 41.70 6060 38.400 5270 0.2
9 | 8 0.344 19.60 42.700 335 141.00 22.80 16.200 186.0 1130 53.00 45.100 155.00 647 26.20 59.200 193.00 149.0 40.40 5830 37.500 5110 0.3
10 | 9 0.357 19.60 41.500 318 137.00 22.20 15.800 180.0 1110 51.70 43.600 150.00 628 25.80 57.700 185.00 144.0 39.00 5650 35.000 4950 0.3
11 | 10 0.382 18.10 40.100 313 130.00 21.10 15.500 171.0 1090 50.10 41.700 141.00 596 25.30 55.900 173.00 138.0 37.30 5340 33.400 4700 0.4
12 | 11 0.392 18.90 39.400 309 128.00 20.70 15.500 169.0 1100 49.50 40.800 138.00 589 25.30 55.000 169.00 136.0 36.20 5230 32.200 4590 0.5
13 | 12 0.403 19.60 38.800 309 126.00 20.60 15.300 169.0 1090 49.10 40.300 136.00 583 25.00 54.200 166.00 133.0 35.50 5110 31.200 4480 0.5
14 | 13 0.422 19.10 37.500 294 121.00 19.40 14.900 159.0 1070 47.50 38.300 129.00 556 24.50 52.400 158.00 129.0 34.10 4910 29.600 4330 0.4
15 | 14 0.437 17.70 36.400 283 117.00 18.60 14.600 154.0 1060 46.20 37.000 125.00 538 24.20 51.100 151.00 125.0 33.00 4770 28.700 4200 0.7
16 | 15 0.457 17.80 35.800 274 114.00 18.60 14.700 157.0 1060 45.50 36.100 121.00 525 24.00 50.200 147.00 122.0 32.10 4590 27.700 4090 0.6
17 | 16 0.471 17.90 35.000 275 112.00 18.00 14.200 152.0 1050 44.70 35.100 118.00 520 23.60 49.100 142.00 121.0 31.00 4470 26.500 3990 0.6
18 | 17 1.040 10.30 20.300 0 60.60 8.96 9.980 83.1 880 27.40 18.600 57.40 270 17.10 28.400 64.00 70.4 16.50 2140 12.200 2210 0.5
19 | 18 1.490 8.18 15.500 0 45.70 6.50 8.260 64.1 817 21.20 13.600 41.60 202 14.50 21.300 45.40 54.9 12.60 1490 8.500 1700 0.5
20 | 19 1.810 7.15 13.300 0 39.20 5.43 7.430 55.7 785 18.40 11.500 34.90 172 13.20 18.000 36.50 47.8 10.90 1210 6.990 1480 0.5
21 | 20 2.050 6.58 12.000 0 35.50 0.00 6.910 50.5 752 16.70 10.200 30.80 154 12.40 16.100 31.90 43.2 9.91 1040 6.110 1300 0.4
22 | 21 2.950 4.93 8.650 0 25.60 0.00 5.500 37.3 681 12.20 7.130 21.20 0 10.10 11.200 21.30 32.4 7.49 682 4.090 973 0.5
23 | 22 3.670 0.00 7.070 0 21.00 0.00 4.780 30.6 632 10.10 5.700 16.90 0 8.78 8.820 16.50 26.4 6.43 521 3.150 814 0.5
24 | 23 4.760 0.00 5.520 0 16.40 0.00 4.000 0.0 0 7.90 4.310 13.19 0 7.44 6.580 12.40 0.0 5.33 377 2.310 0 0.5
25 | 24 6.560 0.00 4.040 0 11.90 0.00 3.170 0.0 0 5.90 3.040 9.60 0 6.03 4.520 8.49 0.0 4.38 259 1.560 0 0.6
26 | 25 8.590 0.00 3.100 0 9.11 0.00 2.600 0.0 0 4.60 2.260 7.31 0 5.00 3.250 6.16 0.0 3.77 187 1.100 0 0.4
27 | 26 11.100 0.00 2.410 0 7.01 0.00 2.150 0.0 0 3.60 1.700 5.55 0 4.19 2.370 4.52 0.0 3.36 136 0.774 0 0.5
28 | 27 14.200 0.00 1.880 0 5.40 0.00 1.790 0.0 0 2.90 1.280 4.37 0 3.50 1.730 0.00 0.0 3.04 0 0.000 0 0.5
29 | 28 16.600 0.00 1.620 0 4.61 0.00 1.600 0.0 0 2.51 1.070 3.77 0 3.15 1.400 0.00 0.0 2.87 0 0.000 0 0.5
30 | 29 19.100 0.00 1.410 0 3.95 0.00 1.430 0.0 0 2.21 0.904 0.00 0 2.82 1.180 0.00 0.0 2.67 0 0.000 0 0.6
31 | 30 25.600 0.00 1.050 0 2.88 0.00 1.150 0.0 0 1.71 0.638 0.00 0 2.27 0.805 0.00 0.0 2.32 0 0.000 0 0.7
32 | 31 32.100 0.00 0.839 0 2.18 0.00 0.968 0.0 0 1.42 0.488 0.00 0 1.92 0.599 0.00 0.0 2.06 0 0.000 0 0.8
33 | 32 37.100 0.00 0.731 0 1.87 0.00 0.871 0.0 0 1.27 0.413 0.00 0 1.71 0.493 0.00 0.0 1.86 0 0.000 0 0.7
34 | 33 42.100 0.00 0.651 0 1.58 0.00 0.795 0.0 0 1.16 0.357 0.00 0 1.58 0.410 0.00 0.0 1.74 0 0.000 0 0.8
35 | 34 48.100 0.00 0.587 0 1.36 0.00 0.724 0.0 0 1.07 0.298 0.00 0 1.42 0.350 0.00 0.0 1.64 0 0.000 0 4.7
36 |
--------------------------------------------------------------------------------
/blimpy/calib_utils/explore_calibrators.py:
--------------------------------------------------------------------------------
1 | from astropy.io import ascii
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from scipy.optimize import curve_fit
5 |
6 | data = ascii.read('calibrators.txt')
7 |
8 |
9 | def func_powerlaw(x,alph,c):
10 | """
11 | Power-law function
12 | """
13 | return c*x**alph
14 |
15 | def get_freqs(MHz=False):
16 | """
17 | Return frequency array in calibrators.txt
18 | If MHz is true, then units of values are in MHz;
19 | Else the units are in GHz (default).
20 | """
21 | if MHz:
22 | return np.array(data['Freq']) * 1000
23 | return np.array(data['Freq'])
24 |
25 | def cal_fluxes(source):
26 | """
27 | Return spectrum of a particular source (e.g. '3C295') in calibrators.txt
28 | """
29 | return np.array(data[source])
30 |
31 | def errors():
32 | """
33 | Get errors in the data matrix.
34 | """
35 | return np.array(data['Err'])
36 |
37 | def source_power_law_fit(source,minfreq,maxfreq):
38 | """
39 | Calculates optimized power-law parameters. Considers data for a particular source
40 | from calibrators.txt between minfreq and maxfreq (in MHz)
41 | """
42 | freqs = get_freqs(MHz=True) #In MHz
43 | fluxes = cal_fluxes(source)
44 | freqs_cut = freqs[np.where(np.logical_and(freqs>=minfreq, freqs<=maxfreq))]
45 | fluxes_cut = fluxes[np.where(np.logical_and(freqs>=minfreq, freqs<=maxfreq))]
46 | popt, dummy = curve_fit(func_powerlaw,freqs_cut,fluxes_cut)
47 | return popt
48 |
49 | def plot_flux_comp(source,name=None,custom_minfreq=None,custom_maxfreq=None):
50 | """
51 | Plots the result of source_power_law_fit() along with the data from calibrators.txt
52 | for the source in question. Use 'name' to save the resulting plot.
53 | """
54 | freqs = get_freqs(MHz=True) #In MHz
55 | fluxes = cal_fluxes(source)
56 | errs = errors()
57 | if custom_minfreq is not None:
58 | minfreq = custom_minfreq
59 | else: minfreq = freqs.min()
60 |
61 | if custom_maxfreq is not None:
62 | maxfreq = custom_maxfreq
63 | else: maxfreq = freqs.max()
64 |
65 | alph,c = source_power_law_fit(source,minfreq,maxfreq)
66 | print(alph)
67 |
68 | freqvec = np.linspace(freqs.min(),freqs.max(),5000)
69 | model = func_powerlaw(freqvec,alph,c)
70 |
71 | plt.plot(freqvec,model,'r-',label='Power-law model')
72 | plt.errorbar(freqs,fluxes,yerr=fluxes*errs/100.0,marker='o',mfc='blue',markersize=2.5,capsize=2,capthick=0.5,fmt='go',label='Actual values')
73 |
74 | plt.xlabel('Frequency (MHz)')
75 | plt.ylabel('Flux (Jy)')
76 | plt.yscale('log')
77 | plt.title('Predicted and Actual Spectrum of '+source)
78 | plt.legend()
79 | if name is not None:
80 | plt.savefig(name,dpi=250)
81 | plt.show()
82 |
--------------------------------------------------------------------------------
/blimpy/dice.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Script to dice data to course channel level. From BL FIL of HDF5 files, and outputs HDF5 with '_diced' appended to the file name.
4 |
5 | ..author: Greg Hellbourg (gregory.hellbourg@berkeley.edu)
6 |
7 | March 2018
8 | """
9 |
10 |
11 | from .waterfall import Waterfall
12 | import argparse
13 | import math
14 | import sys
15 | from .utils import change_the_ext
16 |
17 | #------
18 | # Logging set up
19 | import logging
20 | logger = logging.getLogger(__name__)
21 |
22 | level_log = logging.INFO
23 |
24 | if level_log == logging.INFO:
25 | stream = sys.stdout
26 | format = '%(name)-15s %(levelname)-8s %(message)s'
27 | else:
28 | stream = sys.stderr
29 | format = '%%(relativeCreated)5d (name)-15s %(levelname)-8s %(message)s'
30 |
31 | logging.basicConfig(format=format,stream=stream,level = level_log)
32 | #------
33 |
34 | def cmd_tool(args=None):
35 | """ Dices (extracts frequency range) hdf5 or fil files to new file.
36 |
37 | optional arguments:
38 | -h, --help show this help message and exit
39 | -f IN_FNAME, --input_filename IN_FNAME
40 | Name of file to write from (HDF5 or FIL)
41 | -b F_START Start frequency in MHz
42 | -e F_STOP Stop frequency in MHz
43 | -x OUT_FORMAT, --output_file OUT_FORMAT
44 | Output file format [.h5 or .fil].
45 | -o OUT_FNAME, --output_filename OUT_FNAME
46 | Ouput file name to write (to HDF5 or FIL).
47 | -l MAX_LOAD Maximum data limit to load.
48 | """
49 |
50 | parser = argparse.ArgumentParser(description='Dices (extracts frequency range) hdf5 or fil files and writes to hdf5 or fil.')
51 | parser.add_argument('-f', '--input_filename', action='store', default=None, dest='in_fname', type=str, help='Name of file to write from (HDF5 or FIL)')
52 | parser.add_argument('-b', action='store', default=None, dest='f_start', type=float, help='Start frequency in MHz')
53 | parser.add_argument('-e', action='store', default=None, dest='f_stop', type=float, help='Stop frequency in MHz')
54 | parser.add_argument('-x', '--output_file', action='store', default=None, dest='out_format', type=str, help='Output file format [.h5 or .fil].')
55 | parser.add_argument('-o', '--output_filename', action='store', default=None, dest='out_fname', type=str, help='Ouput file name to write (to HDF5 or FIL).')
56 | parser.add_argument('-l', action='store', default=None, dest='max_load', type=float,help='Maximum data limit to load.')
57 |
58 | if args is None:
59 | args = sys.argv[1:]
60 |
61 | if len(sys.argv) == 1:
62 | logger.error('Indicate file name and start and stop frequencies')
63 | sys.exit()
64 |
65 | args = parser.parse_args(args)
66 |
67 | if args.in_fname is None:
68 | logger.error('Need to indicate input file name')
69 | sys.exit()
70 |
71 | if args.out_fname is None:
72 | if (args.out_format is None) or (args.out_format == 'h5'):
73 | if args.in_fname[len(args.in_fname)-4:] == '.fil':
74 | args.out_fname = args.in_fname
75 | args.out_fname = change_the_ext(args.out_fname, 'fil','_diced.h5')
76 | elif args.in_fname[len(args.in_fname)-3:] == '.h5':
77 | args.out_fname = args.in_fname
78 | args.out_fname = change_the_ext(args.out_fname,'.h5','_diced.h5')
79 | else:
80 | logger.error('Input file not recognized')
81 | sys.exit()
82 | elif args.out_format == 'fil':
83 | if args.in_fname[len(args.in_fname)-4:] == '.fil':
84 | args.out_fname = args.in_fname
85 | args.out_fname = change_the_ext(args.out_fname, 'fil','_diced.fil')
86 | elif args.in_fname[len(args.in_fname)-3:] == '.h5':
87 | args.out_fname = args.in_fname
88 | args.out_fname = change_the_ext(args.out_fname,'.h5','_diced.fil')
89 | else:
90 | logger.error('input file not recognized.')
91 | sys.exit()
92 |
93 | else:
94 | logger.error('Must indicate either output file name or valid output file extension.')
95 | sys.exit()
96 |
97 | elif (args.out_fname[len(args.out_fname)-4:] == '.fil') and (args.out_format == 'h5'):
98 | logger.error('Output file extension does not match output file name')
99 | sys.exit()
100 |
101 | elif (args.out_fname[len(args.out_fname)-3:] == '.h5') and (args.out_format == 'fil'):
102 | logger.error('Output file extension does not match output file name.')
103 | sys.exit()
104 |
105 | if (args.out_fname[len(args.out_fname)-3:] != '.h5') and (args.out_fname[len(args.out_fname)-4:] != '.fil'):
106 | logger.error('Indicate output file name with extension, or simply output file extension.')
107 | sys.exit()
108 |
109 | if args.f_start == None and args.f_stop == None:
110 | logger.error('Please give either start and/or end frequencies. Otherwise use fil2h5 or h52fil functions.')
111 | sys.exit()
112 |
113 | #Read start frequency and bandwidth from data set
114 | file_big = Waterfall(args.in_fname, max_load = args.max_load)
115 | f_min_file = file_big.header['fch1']
116 | f_max_file = file_big.header['fch1'] + file_big.header['nchans'] * file_big.header['foff']
117 |
118 | if args.f_start == None:
119 | logger.warning('Lower frequency not given, setting to ' + str(f_min_file) + ' MHz to match file.')
120 |
121 | if args.f_stop == None:
122 | logger.warning('Higher frequency not given, setting to ' + str(f_max_file) + ' MHz to match file.')
123 |
124 |
125 | if f_max_file < f_min_file:
126 | f_max_file,f_min_file = f_min_file,f_max_file
127 |
128 | FreqBWFile = f_max_file-f_min_file
129 | stdDF = FreqBWFile / float(file_big.calc_n_coarse_chan()) #stdDF = 2.9296875
130 |
131 | if args.f_stop < args.f_start:
132 | args.f_stop,args.f_start = args.f_start,args.f_stop
133 |
134 | if args.f_start < f_max_file and args.f_start > f_min_file and args.f_stop > f_max_file:
135 | args.f_stop = f_max_file
136 | logger.warning('Higher frequency set to ' + str(f_max_file) + ' MHz to match file.')
137 |
138 | if args.f_stop < f_max_file and args.f_stop > f_min_file and args.f_start < f_min_file:
139 | args.f_start = f_min_file
140 | logger.warning('Lower frequency set to ' + str(f_min_file) + ' MHz to match file.')
141 |
142 | if args.f_start < f_min_file and args.f_stop > f_max_file:
143 | args.f_start = f_min_file
144 | args.f_stop = f_max_file
145 | logger.warning('Lower frequency set to ' + str(f_min_file) + ' MHz and higher frequency set to ' + str(f_max_file) + ' MHz to match file.')
146 | # print '\nindicated frequencies include file frequency span - no need to dice\n'
147 | # sys.exit()
148 |
149 | if min(args.f_start,args.f_stop) < f_min_file or max(args.f_start,args.f_stop) > f_max_file:
150 | logger.error('Bandwidth to extract must be within ' + str(f_min_file) + ' MHz and ' + str(f_max_file) + ' MHz.')
151 | sys.exit()
152 |
153 | # calculate real coarse channel begin and end freqs
154 | f_start_real = math.floor((min(args.f_start,args.f_stop) - f_min_file)/stdDF)*stdDF + f_min_file
155 | f_stop_real = f_max_file - math.floor((f_max_file - max(args.f_start,args.f_stop))/stdDF)*stdDF
156 |
157 | # print "true start frequency is " + str(f_start_real)
158 | # print "true stop frequency is " + str(f_stop_real)
159 |
160 | logger.info('Writing to ' + args.out_fname)
161 | logger.info('Extacting from ' + str(f_start_real) + ' MHz to ' + str(f_stop_real) + ' MHz.')
162 |
163 | # create waterfall object
164 | file_small = Waterfall(args.in_fname, f_start = f_start_real, f_stop = f_stop_real, max_load = args.max_load)
165 |
166 | # write waterfall object
167 | if args.out_fname[len(args.out_fname)-4:] == '.fil':
168 | file_small.write_to_fil(args.out_fname)
169 | elif args.out_fname[len(args.out_fname)-3:] == '.h5':
170 | file_small.write_to_hdf5(args.out_fname)
171 | else:
172 | logger.error('Error in output file creation : verify output file name and extension.')
173 | sys.exit()
174 |
175 |
176 | if __name__ == "__main__":
177 | cmd_tool()
178 |
--------------------------------------------------------------------------------
/blimpy/dsamp.py:
--------------------------------------------------------------------------------
1 | """
2 | Downsample an input Filterbank file (.fil or .h5)
3 | to an output .h5 Filterbank file.
4 | """
5 |
6 |
7 | # External dependencies:
8 | import sys
9 | import pathlib
10 | import time
11 | from argparse import ArgumentParser
12 | import numpy as np
13 |
14 |
15 | # Logging set up:
16 | import logging
17 | LOGGER = logging.getLogger(__name__)
18 | FMT = "%(name)-15s %(levelname)-8s %(message)s"
19 | logging.basicConfig(format=FMT, stream=sys.stdout, level = logging.INFO)
20 |
21 |
22 | # Blimpy functions required:
23 | from blimpy import Waterfall
24 | from blimpy.io.hdf_writer import __write_to_hdf5_heavy as write_to_h5
25 | from blimpy.io.fil_writer import write_to_fil
26 |
27 |
28 | def downer(in_np_array, in_tsamp, group_size, out_dtype="float32"):
29 | """
30 | This is a downsample function.
31 |
32 | For every every group_size time samples of the input array,
33 | sum the element values into one total.
34 | The number of output samples = input array time dimension
35 | integer-divided by group_size.
36 | If the remainder of that division > 0,
37 | then the excess samples from the input array are dropped.
38 |
39 | Parameters
40 | ----------
41 | in_np_array : Numpy array
42 | Input data before summing.
43 | in_tsamp : float
44 | Input time sample size.
45 | group_size : int
46 | Group size for the purpose of summing 2 or more time samples.
47 | out_dtype : str
48 | Output data type. Default is "float32".
49 |
50 | Returns
51 | -------
52 | Success:
53 | Downsampled data
54 | Output time sample size
55 | Output number of time integrations
56 | Failure: None, None, None.
57 |
58 | """
59 | # Get input array shape
60 | in_shape = in_np_array.shape
61 | if len(in_shape) != 3:
62 | LOGGER.error(f"Input array has {len(in_shape)} dimensions but 3 are required (time, nifs, fine-freqs) !!")
63 | return None, None, None
64 | if group_size < 2:
65 | LOGGER.error(f"Input group size ({group_size}) but it must be at least 2 !!")
66 | return None, None, None
67 |
68 | # Compute the number of sums.
69 | out_nints = np.floor_divide(in_shape[0], group_size)
70 | if out_nints < 1:
71 | LOGGER.error(f"Input group size ({group_size}) is larger than the time dimension of the input data ({in_shape[0]}) !!")
72 | return None, None, None
73 | LOGGER.info(f"Total input time samples to be dropped just before EOF = {in_shape[0] % group_size}")
74 |
75 | # Compute output time sample size.
76 | out_tsamp = in_tsamp * group_size
77 |
78 | # Initialise output array.
79 | out_np_array = np.zeros((out_nints, in_shape[1], in_shape[2]), dtype=out_dtype)
80 |
81 | # ii1 : time index that is bumped by group_size
82 | ii1 = 0
83 |
84 | # For each output row .....
85 | for mm in range(0, out_nints):
86 |
87 | # For each time row of the input array to sum for current output row .....
88 | for ii2 in range(ii1, ii1 + group_size):
89 |
90 | # For each polarisation in the row .....
91 | for jj in range(0, in_shape[1]):
92 |
93 | # For each find channel column in the polarisation .....
94 | for kk in range(0, in_shape[2]):
95 |
96 | # Increment output element by an input element.
97 | out_np_array[mm, jj, kk] += in_np_array[ii2, jj, kk]
98 |
99 | # Log progress.
100 | LOGGER.info(f"Completed {mm + 1} of {out_nints} output time samples.")
101 |
102 | # Point to the next group.
103 | ii1 += group_size
104 |
105 | # Done. Return output array.
106 | return out_np_array, out_tsamp, out_nints
107 |
108 |
109 | def make_output_file(in_path, out_path, group_size, flag_h5):
110 | """
111 | 1. Load input filterbank .fil or .h5 file.
112 | 2. Call downer to perform down-sampling.
113 | 3. Save result to the specified file.
114 |
115 | Args:
116 | in_path (str): Name of filterbank file to load
117 | out_path (str): Name of output filename. If not set, will default
118 | to same as input, but with .h5 instead of .fil
119 | group_size (int): Group size for the purpose of summing 2 or more time samples.
120 | """
121 |
122 | # Load input filterbank .fil or .h5 file.
123 | wf = Waterfall(in_path, max_load=None)
124 |
125 | # Down-sample input.
126 | t0 = time.time()
127 | out_data, out_tsamp, out_ntints = downer(wf.data, wf.header["tsamp"], group_size)
128 | if out_data is None:
129 | return 1
130 | LOGGER.info(f"Down-sampling time: {time.time() - t0 :f}s")
131 | LOGGER.info(f"Input data shape: {wf.data.shape}")
132 |
133 | # Write output file.
134 | wf.header["tsamp"] = out_tsamp
135 | wf.n_ints_in_file = out_ntints
136 | wf.selection_shape = (out_ntints, wf.header["nifs"], wf.n_channels_in_file)
137 | wf.file_shape = wf.selection_shape
138 | wf.data = out_data
139 | LOGGER.info(f"Output data shape: {wf.data.shape}")
140 | t0 = time.time()
141 | if flag_h5:
142 | write_to_h5(wf, out_path)
143 | else:
144 | write_to_fil(wf, out_path)
145 | LOGGER.info(f"Write-output time: {time.time() - t0 :f}s")
146 | return 0
147 |
148 |
149 | def cmd_tool(args=None):
150 | """ Command line utility for downsampling a Filterbank file.
151 | """
152 |
153 | parser = ArgumentParser(description="Downsample an input Filterbank file (.fil or .h5) to an output .h5 Filterbank file.")
154 | parser.add_argument("in_path", type=str, help="Path of input Filterbank file (.fil or .h5)")
155 | parser.add_argument("out_path", type=str, help="Path of output Filterbank file (.fil or .h5)")
156 | parser.add_argument("-s", "--group_size", dest="group_size", type=int, required=True,
157 | help="Group size for the purpose of summing 2 or more time samples. Required.")
158 |
159 | if args is None:
160 | args = parser.parse_args()
161 | else:
162 | args = parser.parse_args(args)
163 |
164 | if args.group_size < 2:
165 | LOGGER.error(f"Input group size = {args.group_size} but it must be at least 2 !!")
166 | sys.exit(1)
167 |
168 | in_ext = pathlib.Path(args.in_path).suffix
169 | out_ext = pathlib.Path(args.out_path).suffix
170 | if in_ext not in [".fil", ".h5"]:
171 | LOGGER.error("Input file extension must be .fil or .h5 !!")
172 | sys.exit(1)
173 | if out_ext not in [".fil", ".h5"]:
174 | LOGGER.error("Output file extension must be .fil or .h5 !!")
175 | sys.exit(1)
176 | flag_h5 = bool(out_ext == ".h5")
177 |
178 | rc = make_output_file(args.in_path,
179 | args.out_path,
180 | args.group_size,
181 | flag_h5)
182 |
183 | if rc != 0:
184 | sys.exit(rc)
185 |
186 |
187 | if __name__ == "__main__":
188 | cmd_tool()
189 |
--------------------------------------------------------------------------------
/blimpy/ephemeris/__init__.py:
--------------------------------------------------------------------------------
1 | from .compute_lst import compute_lst
2 | from .compute_lsrk import compute_lsrk
3 | from .observatory import Observatory
--------------------------------------------------------------------------------
/blimpy/ephemeris/compute_lsrk.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | from .compute_lst import compute_lst
3 |
4 | def compute_lsrk(wf):
5 | """ Computes the LSR in km/s
6 |
7 | Computes the Local standard of rest kinematic using the time (MJD),
8 | RA and DEC of the observation to compute along with the telescope location.
9 | Requires pyslalib
10 |
11 | Args:
12 | wf (bl.Waterfall): Waterfall object for which to compute LSR
13 | """
14 | ra = Angle(wf.header['src_raj'], unit='hourangle')
15 | dec = Angle(wf.header['src_dej'], unit='degree')
16 | mjdd = wf.header['tstart']
17 | rarad = ra.to('radian').value
18 | dcrad = dec.to('radian').value
19 | last = compute_lst(wf)
20 | tellat = np.deg2rad(wf.coords[0])
21 | tellong = np.deg2rad(wf.coords[1])
22 |
23 | # convert star position to vector
24 | starvect = s.sla_dcs2c(rarad, dcrad)
25 |
26 | # velocity component in ra,dec due to Earth rotation
27 | Rgeo = s.sla_rverot(tellat, rarad, dcrad, last)
28 |
29 | # get Barycentric and heliocentric velocity and position of the Earth.
30 | evp = s.sla_evp(mjdd, 2000.0)
31 | dvb = evp[0] # barycentric velocity vector, in AU/sec
32 | dpb = evp[1] # barycentric position vector, in AU
33 | dvh = evp[2] # heliocentric velocity vector, in AU/sec
34 | dph = evp[3] # heliocentric position vector, in AU
35 |
36 | # dot product of vector to object and heliocentric velocity
37 | # convert AU/sec to km/sec
38 | vcorhelio = -s.sla_dvdv(starvect, dvh) * 149.597870e6
39 | vcorbary = -s.sla_dvdv(starvect, dvb) * 149.597870e6
40 |
41 | # rvlsrd is velocity component in ra,dec direction due to the Sun's
42 | # motion with respect to the "dynamical" local standard of rest
43 | rvlsrd = s.sla_rvlsrd(rarad, dcrad)
44 |
45 | # rvlsrk is velocity component in ra,dec direction due to i
46 | # the Sun's motion w.r.t the "kinematic" local standard of rest
47 | rvlsrk = s.sla_rvlsrk(rarad, dcrad)
48 |
49 | # rvgalc is velocity component in ra,dec direction due to
50 | # the rotation of the Galaxy.
51 | rvgalc = s.sla_rvgalc(rarad, dcrad)
52 | totalhelio = Rgeo + vcorhelio
53 | totalbary = Rgeo + vcorbary
54 | totallsrk = totalhelio + rvlsrk
55 | totalgal = totalbary + rvlsrd + rvgalc
56 |
57 | return totallsrk
58 |
--------------------------------------------------------------------------------
/blimpy/ephemeris/compute_lst.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 |
3 | def compute_lst(wf):
4 | """ Compute LST for observation
5 |
6 | Computes local sidereal time (LST) for the observation, using SLALIB.
7 |
8 | Args:
9 | wf (bl.Waterfall): blimpy Waterfall object.
10 | """
11 | if wf.header['telescope_id'] == 6:
12 | wf.coords = gbt_coords
13 | elif wf.header['telescope_id'] == 4:
14 | wf.coords = parkes_coords
15 | else:
16 | raise RuntimeError("Currently only Parkes and GBT supported")
17 | if HAS_SLALIB:
18 | # dut1 = (0.2 /3600.0) * np.pi/12.0
19 | dut1 = 0.0
20 | mjd = wf.header['tstart']
21 | tellong = np.deg2rad(wf.coords[1])
22 | last = s.sla_gmst(mjd) - tellong + s.sla_eqeqx(mjd) + dut1
23 | # lmst = s.sla_gmst(mjd) - tellong
24 | if last < 0.0: last = last + 2.0 * np.pi
25 | return last
26 | else:
27 | raise RuntimeError("This method requires pySLALIB")
28 |
--------------------------------------------------------------------------------
/blimpy/ephemeris/config.py:
--------------------------------------------------------------------------------
1 | try:
2 | from pyslalib import slalib as s
3 | HAS_SLALIB = True
4 | except ImportError:
5 | HAS_SLALIB = False
6 |
7 | import numpy as np
8 | from astropy.coordinates import Angle
9 |
10 | # Telescope coordinates (needed for LSR calc)
11 | parkes_coords = (-32.998370, 148.263659, 324.00)
12 | gbt_coords = (38.4331294, 79.8398397, 824.36)
13 |
--------------------------------------------------------------------------------
/blimpy/ephemeris/observatory.py:
--------------------------------------------------------------------------------
1 | r"""
2 | observatory.py
3 | blimpy
4 | """
5 |
6 | import os
7 | import pandas as pd
8 |
9 | DEBUGGING = False
10 |
11 |
12 | class Observatory:
13 | r""" Class for handling observatory data.
14 | The Pandas dataframe is defined in ./observatory_info.csv
15 | """
16 |
17 |
18 | def __init__(self, telescope_id=None, telescope_name=None):
19 | """ init method for Observatory class
20 | Parameters (one or the other or neither):
21 | telescope_id (int): sigproc telescope_id
22 | telescope_name (str): telescope name
23 | If neither parameter is specified, then the fake telescope is used.
24 | Returns:
25 | self
26 | """
27 | abs_path = os.path.dirname(os.path.realpath(__file__))
28 | path_csv = os.path.join(abs_path, 'observatory_info.csv')
29 | df_full = pd.read_csv(path_csv, sep=',', engine='python', comment='#')
30 | if DEBUGGING:
31 | print("\nObservatory __init__ path_csv:", path_csv)
32 | print("Observatory __init__ df_full:\n", df_full)
33 | if telescope_id is not None:
34 | try:
35 | fields_dict = df_full.loc[df_full['TELESCOPE_ID'] == telescope_id].to_dict('list')
36 | except Exception as exc:
37 | raise RuntimeError("Observatory __init__ telescope_id={} is invalid!"
38 | .format(telescope_id)) from exc
39 | elif telescope_name is not None:
40 | try:
41 | fields_dict = df_full.loc[df_full['TELESCOPE_NAME'] == telescope_name].to_dict('list')
42 | except Exception as exc:
43 | raise RuntimeError("Observatory __init__ telescope_name={} is invalid!"
44 | .format(telescope_name)) from exc
45 | else:
46 | try:
47 | fields_dict = df_full.loc[df_full['TELESCOPE_ID'] == 0].to_dict('list')
48 | except Exception as exc:
49 | raise RuntimeError("Observatory __init__ Cannot find the fake telescope!") from exc
50 | if DEBUGGING:
51 | print("\nObservatory __init__ fields_dict:\n", fields_dict)
52 | self.telescope_name = fields_dict.get('TELESCOPE_NAME')[0]
53 | self.telescope_name_short = fields_dict.get('TELESCOPE_NAME_SHORT')[0]
54 | self.telescope_id = fields_dict.get('TELESCOPE_ID')[0]
55 | self.dish_diameter = fields_dict.get('DISH_DIAMETER')[0]
56 | self.xyz_coords = [
57 | fields_dict.get('X')[0],
58 | fields_dict.get('Y')[0],
59 | fields_dict.get('Z')[0] ]
60 |
61 |
62 | def get_telescope_name(self):
63 | r""" Return the telescope name to caller. """
64 | return self.telescope_name
65 |
66 |
67 | def get_telescope_name_short(self):
68 | r""" Return the short telescope name to caller. """
69 | return self.telescope_name_short
70 |
71 |
72 | def get_telescope_id(self):
73 | r""" Return the SIGPROC ID to caller. """
74 | return self.telescope_id
75 |
76 |
77 | def get_xyz_coords(self):
78 | r""" Return the X Y Z coordinates to caller. """
79 | return self.xyz_coords
80 |
81 |
82 | def get_dish_diameter(self):
83 | r""" Return the dish diameter to caller. """
84 | return self.dish_diameter
85 |
86 |
87 | def calc_beam_halfwidth(self, freq):
88 | """ Calculates beam halfwidth
89 | Code adapted from PRESTO
90 | Note: returns -1 if dish diameter = -1.
91 | Parameters:
92 | freq (int or float): frequency in MHz
93 | Returns:
94 | float: beam halfwidth in arcsec
95 | """
96 | # constants from PRESTO
97 | rad_to_deg = 57.29577951308232087679815481410517033240547246656
98 | sol = 299792458.0
99 | if self.dish_diameter == -1:
100 | return -1
101 | return 0.5 * 1.2 * sol / (freq * 1e6) / \
102 | self.dish_diameter * rad_to_deg * 3600.0
103 |
104 |
105 | def get_string(self):
106 | """ str method overload """
107 | fmt = 'Observatory: {}, short name: {}, telescope_id: {}, dish diameter: {}' \
108 | + ', (X, Y, Z): ({}, {}, {})'
109 | return fmt.format(self.telescope_name, self.telescope_name_short,
110 | self.telescope_id, self.dish_diameter,
111 | self.xyz_coords[0], self.xyz_coords[1], self.xyz_coords[2])
112 |
113 |
114 | if __name__ == "__main__":
115 | obs = Observatory(telescope_id=6)
116 | print(obs.get_string())
117 |
--------------------------------------------------------------------------------
/blimpy/ephemeris/observatory_info.csv:
--------------------------------------------------------------------------------
1 | #### observatory_info.csv ###
2 | # Geodetic locations of Earth-based observatories
3 | #
4 | ### Table fields ###
5 | # TELESCOPE_NAME: Name of telescope
6 | # TELESCOPE_ID: Observatory identifier as assigned by SIGPROC standard
7 | # X Y Z: Coordinates of telescope
8 | # DISH_DIAMETER: Dish diameter if applicable; else -1
9 | # DATE_ADDED: date record was added or updated
10 | # ADDED_BY: authour of update
11 | TELESCOPE_NAME,TELESCOPE_NAME_SHORT,TELESCOPE_ID,X,Y,Z,DISH_DIAMETER,DATE_ADDED,ADDED_BY
12 | "Fake","F",0,0.000,0.000,0.000,-1,"2020-01-01","Daniel R"
13 | "ARECIBO","AO",1,2390490.0,-5564764.0,1994727.0,200.0,"2020-01-01","Daniel R"
14 | "Nancay","NC",3,4324165.81,165927.11,4670132.83,100.0,"2020-01-01","Daniel R"
15 | "PARKES","PK",4,-4554231.5,2816759.1,-3454036.3,64.0,"2020-01-01","Daniel R"
16 | "JODRELL","JB",5,3822252.643,-153995.683,5086051.443,76.0,"2020-01-01","Daniel R"
17 | "GBT","GB",6,882589.65,-4924872.32,3943729.348,100.0,"2020-01-01","Daniel R"
18 | "GMRT","GM",7,1656342.30,5797947.77,2073243.16,-1,"2020-01-01","Daniel R"
19 | "Effelsberg","EF",8,4033949.5,486989.4,4900430.8,100.0,"2020-01-01","Daniel R"
20 | "ATA","AT",9,404904.0,1212824.0,1280.0,-1,"2020-01-01","Daniel R"
21 | "SRT","SR",10,4865182.7660,791922.6890,4035137.1740,-1,"2020-01-01","Daniel R"
22 | "LOFAR","LF",11,3826577.462,461022.624,5064892.526,-1,"2020-01-01","Daniel R"
23 | "VLA","VL",12,-1601192.0,-5041981.4,3554871.4,-1,"2020-01-01","Daniel R"
24 | "CHIME","CH",20,-2059164.942,-3621108.403,4814432.276,20.0,"2020-01-01","Daniel R"
25 | "MeerKAT","MK",64,0.000,0.000,0.000,-1,"2021-06-09","Richard Elkins"
26 |
27 |
--------------------------------------------------------------------------------
/blimpy/fil2h5.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Simple script for making an h5 file from a .fil.
4 |
5 | ..author: Emilio Enriquez (jeenriquez@gmail.com)
6 |
7 | July 28th 2017
8 | """
9 |
10 | import sys
11 | import os
12 | import time
13 | from argparse import ArgumentParser
14 | from .utils import change_the_ext
15 |
16 | # Logging set up
17 | import logging
18 | logger = logging.getLogger(__name__)
19 |
20 | level_log = logging.INFO
21 |
22 | if level_log == logging.INFO:
23 | stream = sys.stdout
24 | fmt = '%(name)-15s %(levelname)-8s %(message)s'
25 | else:
26 | stream = sys.stderr
27 | fmt = '%%(relativeCreated)5d (name)-15s %(levelname)-8s %(message)s'
28 | logging.basicConfig(format=fmt,stream=stream,level = level_log)
29 |
30 |
31 | from blimpy import Waterfall
32 | from blimpy.io.hdf_writer import __write_to_hdf5_heavy as write_to_h5
33 |
34 |
35 | def make_h5_file(filename, out_dir='./', new_filename=None, t_start=None, t_stop=None):
36 | """ Converts file to HDF5 (.h5) format. Default saves output in current dir.
37 |
38 | Args:
39 | filename (str): Name of filterbank file to read
40 | out_dir (str): Output directory path. Defaults to cwd
41 | new_filename (None or str): Name of output filename. If not set, will default
42 | to same as input, but with .h5 instead of .fil
43 | t_start (int): Start integration ID to be extracted from file
44 | t_stop (int): Stop integration ID to be extracted from file
45 | """
46 |
47 | wf = Waterfall(filename, load_data=False, t_start=t_start, t_stop=t_stop)
48 |
49 | if not new_filename:
50 | new_filename = out_dir + change_the_ext(filename, 'fil', 'h5').split('/')[-1]
51 |
52 | t0 = time.time()
53 | write_to_h5(wf, new_filename)
54 | wf.logger.info('Conversion time: %2.2fsec' % (time.time()- t0))
55 |
56 |
57 | def cmd_tool(args=None):
58 | """ Command line utility for converting Sigproc filterbank (.fil) to HDF5 (.h5) format
59 |
60 | Usage:
61 | fil2h5 [options]
62 |
63 | Options:
64 | -h, --help show this help message and exit
65 | -o OUT_DIR, --out_dir=OUT_DIR
66 | Location for output files. Default: local dir.
67 | -n NEW_FILENAME, --new_filename=NEW_FILENAME
68 | New name. Default: replaces extention to .h5
69 | -d, --delete_input This option deletes the input file after conversion.
70 | -l MAX_LOAD Maximum data limit to load. Default:1GB
71 | """
72 |
73 | parser = ArgumentParser(description="Command line utility for converting Sigproc filterbank (.fil) to HDF5 (.h5) format \n >>fil2h5 [options]")
74 | parser.add_argument("filepath_in", type=str, help="Path of input Filterbank file")
75 | parser.add_argument('-o', '--out_dir', dest='out_dir', type=str, default='./', help='Location for output files. Default: local dir. ')
76 | parser.add_argument('-n', '--new_filename', dest='new_filename', type=str, default='', help='New name. Default: replaces extention to .h5')
77 | parser.add_argument('-d', '--delete_input', dest='delete_input', action='store_true', default=False, help='This option deletes the input file after conversion.')
78 | parser.add_argument('-s', '--start_id', dest='t_start', type=int, default=None, help='start integration ID')
79 | parser.add_argument('-t', '--stop_id', dest='t_stop', type=int, default=None, help='stop integration ID')
80 |
81 | if args is None:
82 | args = parser.parse_args()
83 | else:
84 | args = parser.parse_args(args)
85 |
86 | make_h5_file(args.filepath_in,
87 | out_dir = args.out_dir,
88 | new_filename = args.new_filename,
89 | t_start=args.t_start,
90 | t_stop=args.t_stop)
91 |
92 | if args.delete_input:
93 | logger.info("'Deleting input file: {}".format(args.filename_in))
94 | os.remove(args.filename_in)
95 |
96 |
97 | if __name__ == "__main__":
98 | cmd_tool()
99 |
--------------------------------------------------------------------------------
/blimpy/h52fil.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Simple script for making a .fil file from a .h5.
4 |
5 | ..author: Emilio Enriquez (jeenriquez@gmail.com)
6 |
7 | July 28th 2017
8 | """
9 |
10 |
11 | from argparse import ArgumentParser
12 | import sys
13 | import os
14 | from blimpy import Waterfall
15 | from .utils import change_the_ext
16 |
17 | import logging
18 | logger = logging.getLogger(__name__)
19 |
20 | level_log = logging.INFO
21 |
22 | if level_log == logging.INFO:
23 | stream = sys.stdout
24 | fmt = '%(name)-15s %(levelname)-8s %(message)s'
25 | else:
26 | stream = sys.stderr
27 | fmt = '%%(relativeCreated)5d (name)-15s %(levelname)-8s %(message)s'
28 |
29 | logging.basicConfig(format=fmt, stream=stream, level = level_log)
30 |
31 |
32 | def make_fil_file(filename,out_dir='./', new_filename=None, max_load = None):
33 | """ Converts file to Sigproc filterbank (.fil) format. Default saves output in current dir.
34 | """
35 |
36 | wf = Waterfall(filename, max_load = max_load)
37 |
38 | if not new_filename:
39 | new_filename = out_dir + change_the_ext(filename, 'h5', 'fil').split('/')[-1]
40 |
41 | wf.write_to_fil(new_filename)
42 |
43 |
44 | def cmd_tool(args=None):
45 | """ Command line utility for converting HDF5 (.h5) to Sigproc filterbank (.fil) format
46 |
47 | Usage:
48 | h52fil [options]
49 |
50 | Options:
51 | -h, --help show this help message and exit
52 | -o OUT_DIR, --out_dir=OUT_DIR
53 | Location for output files. Default: local dir.
54 | -n NEW_FILENAME, --new_filename=NEW_FILENAME
55 | New filename. Default: replaces extension to .fil
56 | -d, --delete_input This option deletes the input file after conversion.
57 | -l MAX_LOAD Maximum data limit to load. Default:1GB
58 | """
59 |
60 | parser = ArgumentParser('Command line utility for converting HDF5 (.h5) to Sigproc filterbank (.fil) format \n >>h52fil [options]')
61 | parser.add_argument("filepath_in", type=str, help="Path of input HDF5 Filterbank file")
62 | parser.add_argument('-o', '--out_dir', dest='out_dir', type=str, default='./',
63 | help='Location for output files. Default: local dir. ')
64 | parser.add_argument('-n', '--new_filename', dest='new_filename', type=str, default='',
65 | help='New filename. Default: replaces extension to .fil')
66 | parser.add_argument('-d', '--delete_input', dest='delete_input', action='store_true', default=False,
67 | help='This option deletes the input file after conversion.')
68 | parser.add_argument('-l', action='store', default=None, dest='max_load', type=float,
69 | help='Maximum data limit to load. Default:1GB')
70 | if args is None:
71 | args = parser.parse_args()
72 | else:
73 | args = parser.parse_args(args)
74 |
75 | make_fil_file(args.filepath_in, out_dir = args.out_dir, new_filename=args.new_filename, max_load = args.max_load)
76 |
77 | if args.delete_input:
78 | logger.info("Deleting input file: {}".format(args.filepath_in))
79 | os.remove(args.filepath_in)
80 |
81 | if __name__ == "__main__":
82 |
83 | cmd_tool()
84 |
--------------------------------------------------------------------------------
/blimpy/h5diag.py:
--------------------------------------------------------------------------------
1 | r""" h5diag """
2 |
3 | import os
4 | import sys
5 | from argparse import ArgumentParser
6 | import h5py
7 | from astropy.coordinates import Angle
8 | from blimpy.io.hdf_reader import examine_h5
9 |
10 |
11 | def oops(msg):
12 | print(F"\n*** h5diag: {msg}\n")
13 | sys.exit(86)
14 |
15 |
16 | def read_header(h5):
17 | """ Read header and return a Python dictionary of key:value pairs
18 | """
19 |
20 | header = {} # Initialise as a nil dictionary.
21 |
22 | for key, val in h5["data"].attrs.items():
23 | #if six.PY3:
24 | # key = bytes(key, "ascii")
25 | if isinstance(val, bytes):
26 | val = val.decode("ascii")
27 | if key == "src_raj":
28 | header[key] = Angle(val, unit="hr")
29 | elif key == "src_dej":
30 | header[key] = Angle(val, unit="deg")
31 | else:
32 | header[key] = val
33 |
34 | return header
35 |
36 |
37 | def examine(filename):
38 | r""" Diagnose the given HDF5 file"""
39 | h5file = h5py.File(filename, mode="r")
40 | version = examine_h5(h5file)
41 | print("VERSION attribute:", version)
42 | header = read_header(h5file)
43 | print("header:", header)
44 | print("data shape:", h5file["data"].shape)
45 | if version >= 1.999:
46 | print("Number of fine channels:", header["nchans"])
47 | print("NFPC:", header["nfpc"])
48 | print("Number of coarse channels:", int(header["nchans"] / header["nfpc"]))
49 | print("Rawspec version:", h5file.attrs["VERSION_RAWSPEC"].decode('utf-8'))
50 | print("Librawspec version:", h5file.attrs["VERSION_LIBRAWSPEC"].decode('utf-8'))
51 | print("cuFFT version:", h5file.attrs["VERSION_CUFFT"].decode('utf-8'))
52 | print("HDF5 library version:", h5file.attrs["VERSION_HDF5"].decode('utf-8'))
53 | print("Bitshuffle:", h5file.attrs["BITSHUFFLE"].decode('utf-8'))
54 |
55 |
56 | def cmd_tool(args=None):
57 | """ Command line tool h5diag """
58 | parser = ArgumentParser(description="Command line utility for diagnosing HDF5 files.")
59 | parser.add_argument("filename", type=str, help="Path of file to read")
60 | if args is None:
61 | args = sys.argv[1:]
62 | parse_args = parser.parse_args(args)
63 |
64 | if not os.path.isfile(parse_args.filename):
65 | oops("Not a file: {}".format(parse_args.filename))
66 | if not h5py.is_hdf5(parse_args.filename):
67 | oops("Not an HDF5 file: {}".format(parse_args.filename))
68 |
69 | examine(parse_args.filename)
70 | print("\nNo errors detected")
71 |
72 |
73 | if __name__ == "__main__":
74 | cmd_tool()
75 |
--------------------------------------------------------------------------------
/blimpy/io/__init__.py:
--------------------------------------------------------------------------------
1 | from .fil_writer import write_to_fil
2 | from .hdf_writer import write_to_hdf5
--------------------------------------------------------------------------------
/blimpy/io/fil_writer.py:
--------------------------------------------------------------------------------
1 | """
2 | Procedures for writing to a Filterbank File
3 | """
4 | import time
5 | import numpy as np
6 | from .sigproc import generate_sigproc_header
7 |
8 |
9 | def write_to_fil(wf, filename_out):
10 | """ Write data to .fil file.
11 | It checks the file size then decides how to write the file.
12 |
13 | Args:
14 | wf : Waterfall object
15 | filename_out : str
16 | Name of output file
17 | """
18 |
19 | # For timing how long it takes to write a file.
20 | t0 = time.time()
21 |
22 | # Update header
23 | wf._update_header()
24 |
25 | if wf.container.isheavy():
26 | __write_to_fil_heavy(wf, filename_out)
27 | else:
28 | __write_to_fil_light(wf, filename_out)
29 |
30 | t1 = time.time()
31 | wf.logger.info('Conversion time: %2.2fsec' % (t1 - t0))
32 |
33 |
34 | def __write_to_fil_heavy(wf, filename_out):
35 | """ Write data to .fil file.
36 |
37 | Args:
38 | wf : Waterfall object
39 | filename_out : str
40 | Name of output file
41 | """
42 |
43 | # Note that a chunk is not a blob!!
44 | chunk_dim = wf._get_chunk_dimensions()
45 | blob_dim = wf._get_blob_dimensions(chunk_dim)
46 | n_blobs = wf.container.calc_n_blobs(blob_dim)
47 |
48 | # Calculate number of bytes per data element
49 | n_bytes = wf.header['nbits'] / 8
50 |
51 | wf.logger.info("__write_to_fil_heavy: For {}, chunk_dim={}, blob_dim={}, n_blobs={}"
52 | .format(filename_out, chunk_dim, blob_dim, n_blobs))
53 |
54 | with open(filename_out, "wb") as fileh:
55 |
56 | # Write header of .fil file
57 | fileh.write(generate_sigproc_header(wf))
58 |
59 | # For each blob
60 | for ii in range(0, n_blobs):
61 |
62 | wf.logger.info('__write_to_fil_heavy: Processing %i of %i' % (ii + 1, n_blobs))
63 | bob = wf.container.read_blob(blob_dim, n_blob=ii)
64 |
65 | # Write data of .fil file.
66 | if n_bytes == 4:
67 | np.float32(bob.ravel()).tofile(fileh)
68 | elif n_bytes == 2:
69 | np.int16(bob.ravel()).tofile(fileh)
70 | elif n_bytes == 1:
71 | np.int8(bob.ravel()).tofile(fileh)
72 |
73 |
74 | def __write_to_fil_light(wf, filename_out):
75 | """ Write data to .fil file.
76 |
77 | Args:
78 | wf : Waterfall object
79 | filename_out : str
80 | Name of output file
81 | """
82 |
83 | wf.logger.info("__write_to_fil_light: Writing the spectra matrix for {} in one go."
84 | .format(filename_out))
85 | n_bytes = wf.header['nbits'] / 8
86 | with open(filename_out, "wb") as fileh:
87 | fileh.write(generate_sigproc_header(wf))
88 | if n_bytes == 4:
89 | np.float32(wf.data.ravel()).tofile(fileh)
90 | elif n_bytes == 2:
91 | np.int16(wf.data.ravel()).tofile(fileh)
92 | elif n_bytes == 1:
93 | np.int8(wf.data.ravel()).tofile(fileh)
94 |
--------------------------------------------------------------------------------
/blimpy/io/file_wrapper.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """ This model handles file types.
3 | """
4 |
5 | import os
6 | import h5py
7 | import six
8 |
9 | # This import relates to a failing project, so we will
10 | # probably want to remove the import at some point in the future.
11 | import blimpy.io.sigproc
12 |
13 | # import pdb;# pdb.set_trace()
14 |
15 | from blimpy.io.fil_reader import FilReader
16 | from blimpy.io.hdf_reader import H5Reader
17 |
18 | def open_file(filename, f_start=None, f_stop=None,t_start=None, t_stop=None,load_data=True,max_load=None):
19 | """Open a HDF5 or filterbank file
20 |
21 | Returns instance of a Reader to read data from file.
22 |
23 | ================== ==================================================
24 | Filename extension File type
25 | ================== ==================================================
26 | h5, hdf5 HDF5 format
27 | fil fil format
28 | *other* Will raise NotImplementedError
29 | ================== ==================================================
30 |
31 | """
32 | if not os.path.isfile(filename):
33 | type(filename)
34 | print(filename)
35 | raise IOError("No such file or directory: " + filename)
36 |
37 | filename = os.path.expandvars(os.path.expanduser(filename))
38 | # Get file extension to determine type
39 | ext = filename.split(".")[-1].strip().lower()
40 |
41 | if six.PY3:
42 | ext = bytes(ext, 'ascii')
43 |
44 | if h5py.is_hdf5(filename):
45 | # Open HDF5 file
46 | return H5Reader(filename, f_start=f_start, f_stop=f_stop, t_start=t_start, t_stop=t_stop,
47 | load_data=load_data, max_load=max_load)
48 | if blimpy.io.sigproc.is_filterbank(filename):
49 | # Open FIL file
50 | return FilReader(filename, f_start=f_start, f_stop=f_stop, t_start=t_start, t_stop=t_stop, load_data=load_data, max_load=max_load)
51 | raise NotImplementedError('Cannot open this type of file with Waterfall: {}'.format(filename))
52 |
--------------------------------------------------------------------------------
/blimpy/peek.py:
--------------------------------------------------------------------------------
1 | """
2 | Simple script for quickly peeking at the data matrix.
3 | """
4 |
5 | import sys
6 | from argparse import ArgumentParser
7 |
8 | from blimpy import Waterfall
9 |
10 |
11 | def oops(msg):
12 | print("\n*** OOPS, {} !!!".format(msg))
13 | sys.exit(86)
14 |
15 |
16 | def cmd_tool(args=None):
17 | """ Command line utility for peeking at the data matrix.
18 | """
19 |
20 | parser = ArgumentParser(description="Command line utility for peeking at a .fil or .h5 file")
21 | parser.add_argument("filepath_in", type=str, help="Path of input .fil or .h5 file")
22 | parser.add_argument("-i", "--start_tint", dest="start_tint", type=int, default=0,
23 | help="Starting integration index relative to 0. Default: 0")
24 | parser.add_argument("-Z", "--if", dest="the_IF", type=int, default=0,
25 | help="Starting IF index relative to 0. Default: 0")
26 | parser.add_argument("-c", "--start_fchan", dest="start_fchan", type=int, default=0,
27 | help="Starting fine channel index relative to 0. Default: 0")
28 |
29 |
30 | if args is None:
31 | args = parser.parse_args()
32 | else:
33 | args = parser.parse_args(args)
34 |
35 | wf = Waterfall(args.filepath_in)
36 |
37 | if args.start_tint < 0 or args.start_tint > (wf.n_ints_in_file - 2):
38 | oops("--start_tint is not a valid time integration index")
39 | if args.the_IF < 0:
40 | oops("--if is not a valid IF index")
41 | if args.start_fchan < 0 or args.start_fchan > (wf.header["nchans"] - 3):
42 | oops("--start_fchan is not a valid fine channel index")
43 |
44 | print("Fine channel frequency columns go this way ------->")
45 | print("Integration_{}: {} {} {}"
46 | .format(args.start_tint,
47 | wf.data[args.start_tint, args.the_IF, args.start_fchan],
48 | wf.data[args.start_tint, args.the_IF, args.start_fchan + 1],
49 | wf.data[args.start_tint, args.the_IF, args.start_fchan + 2]))
50 | print("Integration_{}: {} {} {}"
51 | .format(args.start_tint + 1,
52 | wf.data[args.start_tint + 1, args.the_IF, args.start_fchan],
53 | wf.data[args.start_tint + 1, args.the_IF, args.start_fchan + 1],
54 | wf.data[args.start_tint + 1, args.the_IF, args.start_fchan + 2]))
55 |
56 |
57 | if __name__ == "__main__":
58 | cmd_tool()
59 |
--------------------------------------------------------------------------------
/blimpy/plotting/__init__.py:
--------------------------------------------------------------------------------
1 | from .plot_waterfall import plot_waterfall
2 | from .plot_spectrum import plot_spectrum
3 | from .plot_spectrum_min_max import plot_spectrum_min_max
4 | from .plot_kurtosis import plot_kurtosis
5 | from .plot_time_series import plot_time_series
6 | from .plot_all import plot_all
7 | from . import config as plot_config
8 |
--------------------------------------------------------------------------------
/blimpy/plotting/config.py:
--------------------------------------------------------------------------------
1 | """
2 | Blimpy Plotting Configuration
3 | This file is imported by the other plotting source files and blimpy/waterfall.py.
4 | matplotlib backends info:
5 | https://matplotlib.org/3.5.0/users/explain/backends.html#:~:text=By%20default%2C%20Matplotlib%20should%20automatically,to%20worry%20about%20the%20backend.
6 | """
7 | import os
8 | import numpy as np
9 | import matplotlib
10 |
11 | # Define plt for caller.
12 | import matplotlib.pyplot as plt
13 | plt.rcParams['axes.formatter.useoffset'] = False
14 |
15 | # Define NullFormatter for caller.
16 | from matplotlib.ticker import NullFormatter
17 |
18 | #Define some constants for caller.
19 | MAX_PLT_POINTS = 65536 # Max number of points in matplotlib plot
20 | MAX_IMSHOW_POINTS = (8192, 4096) # Max number of points in imshow plot
21 |
22 |
23 | def ok_to_show():
24 | """
25 | Tell caller if the DISPLAY environment variable is set
26 | and therefore if plt.show() can be executed.
27 | Parameters
28 | ----------
29 | None.
30 | Returns
31 | -------
32 | bool
33 | Can plt.show() be executed (True/False)?
34 | """
35 | display = os.environ.get("DISPLAY", "empty")
36 | if display == "empty":
37 | print("blimpy plotting config.py setup_plotting_backend: DISPLAY is *empty*")
38 | return False
39 | print(f"blimpy plotting config.py setup_plotting_backend: DISPLAY is {display}")
40 | return True
41 |
42 |
43 | def print_plotting_backend(arg_context):
44 | """ Show which matplotlib backend is in use."""
45 | ok_to_show()
46 | print(f"blimpy plotting config.py ({arg_context}): matplotlib backend is {matplotlib.get_backend()}")
47 |
48 |
49 | def get_mpl_backend():
50 | return matplotlib.get_backend()
51 |
52 |
53 | def set_mpl_backend(backend):
54 | matplotlib.use(backend)
55 |
56 |
--------------------------------------------------------------------------------
/blimpy/plotting/plot_all.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | from . import plot_time_series, plot_kurtosis, plot_spectrum_min_max, plot_waterfall, plot_spectrum
3 | from astropy import units as u
4 |
5 |
6 | def plot_all(wf, t=0, f_start=None, f_stop=None, logged=False, if_id=0, kurtosis=True, **kwargs):
7 | """ Plot waterfall of data as well as spectrum; also, placeholder to make even more complicated plots in the future.
8 |
9 | Args:
10 | f_start (float): start frequency, in MHz
11 | f_stop (float): stop frequency, in MHz
12 | logged (bool): Plot in linear (False) or dB units (True),
13 | t (int): integration number to plot (0 -> len(data))
14 | logged (bool): Plot in linear (False) or dB units (True)
15 | if_id (int): IF identification (if multiple IF signals in file)
16 | kwargs: keyword args to be passed to matplotlib plot() and imshow()
17 | """
18 | if wf.header['nbits'] <= 2:
19 | logged = False
20 |
21 | nullfmt = NullFormatter() # no labels
22 |
23 | # definitions for the axes
24 | left, width = 0.35, 0.5
25 | bottom, height = 0.45, 0.5
26 | width2, height2 = 0.1125, 0.15
27 | bottom2, left2 = bottom - height2 - .025, left - width2 - .02
28 | bottom3, left3 = bottom2 - height2 - .025, 0.075
29 |
30 | rect_waterfall = [left, bottom, width, height]
31 | rect_colorbar = [left + width, bottom, .025, height]
32 | rect_spectrum = [left, bottom2, width, height2]
33 | rect_min_max = [left, bottom3, width, height2]
34 | rect_timeseries = [left + width, bottom, width2, height]
35 | rect_kurtosis = [left3, bottom3, 0.25, height2]
36 | rect_header = [left3 - .05, bottom, 0.2, height]
37 |
38 | # --------
39 | # axColorbar = plt.axes(rect_colorbar)
40 | # print 'Ploting Colorbar'
41 | # print plot_data.max()
42 | # print plot_data.min()
43 | #
44 | # plot_colorbar = range(plot_data.min(),plot_data.max(),int((plot_data.max()-plot_data.min())/plot_data.shape[0]))
45 | # plot_colorbar = np.array([[plot_colorbar],[plot_colorbar]])
46 | #
47 | # plt.imshow(plot_colorbar,aspect='auto', rasterized=True, interpolation='nearest',)
48 |
49 | # axColorbar.xaxis.set_major_formatter(nullfmt)
50 | # axColorbar.yaxis.set_major_formatter(nullfmt)
51 |
52 | # heatmap = axColorbar.pcolor(plot_data, edgecolors = 'none', picker=True)
53 | # plt.colorbar(heatmap, cax = axColorbar)
54 | # --------
55 |
56 | axMinMax = plt.axes(rect_min_max)
57 | print('Plotting Min Max')
58 | plot_spectrum_min_max(wf, logged=logged, f_start=f_start, f_stop=f_stop, t=t)
59 | plt.title('')
60 | axMinMax.yaxis.tick_right()
61 | axMinMax.yaxis.set_label_position("right")
62 |
63 | # --------
64 | axSpectrum = plt.axes(rect_spectrum,sharex=axMinMax)
65 | print('Plotting Spectrum')
66 | plot_spectrum(wf, logged=logged, f_start=f_start, f_stop=f_stop, t=t)
67 | plt.title('')
68 | axSpectrum.yaxis.tick_right()
69 | axSpectrum.yaxis.set_label_position("right")
70 | plt.xlabel('')
71 | # axSpectrum.xaxis.set_major_formatter(nullfmt)
72 | plt.setp(axSpectrum.get_xticklabels(), visible=False)
73 |
74 | # --------
75 | axWaterfall = plt.axes(rect_waterfall,sharex=axMinMax)
76 | print('Plotting Waterfall')
77 | plot_waterfall(wf, f_start=f_start, f_stop=f_stop, logged=logged, cb=False)
78 | plt.xlabel('')
79 |
80 | # no labels
81 | # axWaterfall.xaxis.set_major_formatter(nullfmt)
82 | plt.setp(axWaterfall.get_xticklabels(), visible=False)
83 |
84 | # --------
85 | axTimeseries = plt.axes(rect_timeseries)
86 | print('Plotting Timeseries')
87 | plot_time_series(wf, f_start=f_start, f_stop=f_stop, orientation='v')
88 | axTimeseries.yaxis.set_major_formatter(nullfmt)
89 | # axTimeseries.xaxis.set_major_formatter(nullfmt)
90 |
91 | # --------
92 | # Could exclude since it takes much longer to run than the other plots.
93 | if kurtosis:
94 | axKurtosis = plt.axes(rect_kurtosis)
95 | print('Plotting Kurtosis')
96 | plot_kurtosis(wf, f_start=f_start, f_stop=f_stop)
97 |
98 |
99 | # --------
100 | axHeader = plt.axes(rect_header)
101 | print('Plotting Header')
102 | # Generate nicer header
103 | telescopes = {0: 'Fake data',
104 | 1: 'Arecibo',
105 | 2: 'Ooty',
106 | 3: 'Nancay',
107 | 4: 'Parkes',
108 | 5: 'Jodrell',
109 | 6: 'GBT',
110 | 8: 'Effelsberg',
111 | 10: 'SRT',
112 | 64: 'MeerKAT',
113 | 65: 'KAT7'
114 | }
115 |
116 | telescope = telescopes.get(wf.header['telescope_id'], wf.header['telescope_id'])
117 |
118 | plot_header = "%14s: %s\n" % ("TELESCOPE_ID", telescope)
119 | for key in ('SRC_RAJ', 'SRC_DEJ', 'TSTART', 'NCHANS', 'NBEAMS', 'NIFS', 'NBITS'):
120 | try:
121 | plot_header += "%14s: %s\n" % (key, wf.header[key.lower()])
122 | except KeyError:
123 | pass
124 | fch1 = "%6.6f MHz" % wf.header['fch1']
125 |
126 | foff = (wf.header['foff'] * 1e6 * u.Hz)
127 | if np.abs(foff) > 1e6 * u.Hz:
128 | foff = str(foff.to('MHz'))
129 | elif np.abs(foff) > 1e3 * u.Hz:
130 | foff = str(foff.to('kHz'))
131 | else:
132 | foff = str(foff.to('Hz'))
133 |
134 | plot_header += "%14s: %s\n" % ("FCH1", fch1)
135 | plot_header += "%14s: %s\n" % ("FOFF", foff)
136 |
137 | plt.text(0.05, .95, plot_header, ha='left', va='top', wrap=True)
138 |
139 | axHeader.set_facecolor('white')
140 | axHeader.xaxis.set_major_formatter(nullfmt)
141 | axHeader.yaxis.set_major_formatter(nullfmt)
142 |
--------------------------------------------------------------------------------
/blimpy/plotting/plot_kurtosis.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | import scipy.stats
3 |
4 |
5 | def plot_kurtosis(wf, f_start=None, f_stop=None, if_id=0, **kwargs):
6 | """ Plot kurtosis
7 |
8 | Args:
9 | f_start (float): start frequency, in MHz
10 | f_stop (float): stop frequency, in MHz
11 | kwargs: keyword args to be passed to matplotlib imshow()
12 | """
13 | ax = plt.gca()
14 |
15 | plot_f, plot_data = wf.grab_data(f_start, f_stop, if_id)
16 |
17 | # Using accending frequency for all plots.
18 | if wf.header['foff'] < 0:
19 | plot_data = plot_data[..., ::-1] # Reverse data
20 | plot_f = plot_f[::-1]
21 |
22 | try:
23 | pltdata = scipy.stats.kurtosis(plot_data, axis=0, nan_policy='omit')
24 | except:
25 | pltdata = plot_data * 0.0
26 |
27 | plt.plot(plot_f, pltdata, **kwargs)
28 | plt.ylabel("Kurtosis")
29 | plt.xlabel("Frequency [MHz]")
30 |
31 | plt.xlim(plot_f[0], plot_f[-1])
32 |
--------------------------------------------------------------------------------
/blimpy/plotting/plot_spectrum.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | from ..utils import rebin, db
3 |
4 |
5 | def plot_spectrum(wf, t=0, f_start=None, f_stop=None, logged=False, if_id=0, c=None, **kwargs):
6 | """ Plot frequency spectrum of a given file
7 |
8 | Args:
9 | t (int): integration number to plot (0 -> len(data))
10 | logged (bool): Plot in linear (False) or dB units (True)
11 | if_id (int): IF identification (if multiple IF signals in file)
12 | c: color for line
13 | kwargs: keyword args to be passed to matplotlib plot()
14 | """
15 | if wf.header['nbits'] <= 2:
16 | logged = False
17 | t = 'all'
18 | ax = plt.gca()
19 |
20 | plot_f, plot_data = wf.grab_data(f_start, f_stop, if_id)
21 |
22 | # Using accending frequency for all plots.
23 | if wf.header['foff'] < 0:
24 | plot_data = plot_data[..., ::-1] # Reverse data
25 | plot_f = plot_f[::-1]
26 |
27 | if isinstance(t, int):
28 | print("extracting integration %i..." % t)
29 | plot_data = plot_data[t]
30 | elif t == 'all':
31 | print("averaging along time axis...")
32 | # Since the data has been squeezed, the axis for time goes away if only one bin, causing a bug with axis=1
33 | if len(plot_data.shape) > 1:
34 | plot_data = plot_data.mean(axis=0)
35 | else:
36 | plot_data = plot_data.mean()
37 | else:
38 | raise RuntimeError("Unknown integration %s" % t)
39 |
40 | # Rebin to max number of points
41 | dec_fac_x = 1
42 | if plot_data.shape[0] > MAX_PLT_POINTS:
43 | dec_fac_x = int(plot_data.shape[0] / MAX_PLT_POINTS)
44 |
45 | plot_data = rebin(plot_data, dec_fac_x, 1)
46 | plot_f = rebin(plot_f, dec_fac_x, 1)
47 |
48 | if not c:
49 | kwargs['c'] = '#333333'
50 |
51 | if logged:
52 | plt.plot(plot_f, db(plot_data), label='Stokes I', **kwargs)
53 | plt.ylabel("Power [dB]")
54 | else:
55 |
56 | plt.plot(plot_f, plot_data, label='Stokes I', **kwargs)
57 | plt.ylabel("Power [counts]")
58 | plt.xlabel("Frequency [MHz]")
59 | plt.legend()
60 |
61 | try:
62 | plt.title(wf.header['source_name'])
63 | except KeyError:
64 | plt.title(wf.filename)
65 |
66 | plt.xlim(plot_f[0], plot_f[-1])
67 |
--------------------------------------------------------------------------------
/blimpy/plotting/plot_spectrum_min_max.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | from ..utils import rebin, db
3 |
4 | def plot_spectrum_min_max(wf, t=0, f_start=None, f_stop=None, logged=False, if_id=0, c=None, **kwargs):
5 | """ Plot frequency spectrum of a given file
6 |
7 | Args:
8 | logged (bool): Plot in linear (False) or dB units (True)
9 | if_id (int): IF identification (if multiple IF signals in file)
10 | c: color for line
11 | kwargs: keyword args to be passed to matplotlib plot()
12 | """
13 | ax = plt.gca()
14 |
15 | plot_f, plot_data = wf.grab_data(f_start, f_stop, if_id)
16 |
17 | # Using accending frequency for all plots.
18 | if wf.header['foff'] < 0:
19 | plot_data = plot_data[..., ::-1] # Reverse data
20 | plot_f = plot_f[::-1]
21 |
22 | if logged:
23 | db_plot_data = db(plot_data[0])
24 | fig_max = np.nanmax(db_plot_data[db_plot_data != np.inf])
25 | fig_min = np.nanmin(db_plot_data[db_plot_data != -np.inf])
26 | else:
27 | fig_max = plot_data[0].max()
28 | fig_min = plot_data[0].min()
29 |
30 | print("averaging along time axis...")
31 |
32 | # Since the data has been squeezed, the axis for time goes away if only one bin, causing a bug with axis=1
33 | if len(plot_data.shape) > 1:
34 | plot_max = plot_data.max(axis=0)
35 | plot_min = plot_data.min(axis=0)
36 | plot_data = plot_data.mean(axis=0)
37 | else:
38 | plot_max = plot_data.max()
39 | plot_min = plot_data.min()
40 | plot_data = plot_data.mean()
41 |
42 | # Rebin to max number of points
43 | dec_fac_x = 1
44 | MAX_PLT_POINTS = 8 * 64 # Low resoluition to see the difference.
45 | if plot_data.shape[0] > MAX_PLT_POINTS:
46 | dec_fac_x = int(plot_data.shape[0] / MAX_PLT_POINTS)
47 |
48 | plot_data = rebin(plot_data, dec_fac_x, 1)
49 | plot_min = rebin(plot_min, dec_fac_x, 1)
50 | plot_max = rebin(plot_max, dec_fac_x, 1)
51 | plot_f = rebin(plot_f, dec_fac_x, 1)
52 |
53 | if logged:
54 | plt.plot(plot_f, db(plot_data), "#333333", label='mean', **kwargs)
55 | plt.plot(plot_f, db(plot_max), "#e74c3c", label='max', **kwargs)
56 | plt.plot(plot_f, db(plot_min), '#3b5b92', label='min', **kwargs)
57 | plt.ylabel("Power [dB]")
58 | else:
59 | plt.plot(plot_f, plot_data, "#333333", label='mean', **kwargs)
60 | plt.plot(plot_f, plot_max, "#e74c3c", label='max', **kwargs)
61 | plt.plot(plot_f, plot_min, '#3b5b92', label='min', **kwargs)
62 | plt.ylabel("Power [counts]")
63 | plt.xlabel("Frequency [MHz]")
64 | plt.legend()
65 |
66 | try:
67 | plt.title(wf.header['source_name'])
68 | except KeyError:
69 | plt.title(wf.filename)
70 |
71 | plt.xlim(plot_f[0], plot_f[-1])
72 | if logged:
73 | try:
74 | plt.ylim(fig_min - 1, fig_max + 1)
75 | except ValueError:
76 | plt.ylim(-10, 20)
77 |
--------------------------------------------------------------------------------
/blimpy/plotting/plot_time_series.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | from ..utils import rebin, db
3 | from .plot_utils import calc_extent
4 |
5 | def plot_time_series(wf, f_start=None, f_stop=None, if_id=0, logged=True, orientation='h', MJD_time=False, **kwargs):
6 | """ Plot the time series.
7 |
8 | Args:
9 | f_start (float): start frequency, in MHz
10 | f_stop (float): stop frequency, in MHz
11 | logged (bool): Plot in linear (False) or dB units (True),
12 | kwargs: keyword args to be passed to matplotlib imshow()
13 | """
14 |
15 | ax = plt.gca()
16 | plot_f, plot_data = wf.grab_data(f_start, f_stop, if_id)
17 |
18 | # Since the data has been squeezed, the axis for time goes away if only one bin, causing a bug with axis=1
19 | if len(plot_data.shape) > 1:
20 | plot_data = np.nanmean(plot_data, axis=1)
21 | else:
22 | plot_data = np.nanmean(plot_data)
23 |
24 | if logged and wf.header['nbits'] >= 8:
25 | plot_data = db(plot_data)
26 |
27 | # Make proper time axis for plotting (but only for plotting!). Note that this makes the values inclusive.
28 | extent = calc_extent(wf, plot_f=plot_f, plot_t=wf.timestamps, MJD_time=MJD_time)
29 | plot_t = np.linspace(extent[2], extent[3], len(wf.timestamps))
30 |
31 | if MJD_time:
32 | tlabel = "Time [MJD]"
33 | else:
34 | tlabel = "Time [s]"
35 |
36 | if logged:
37 | plabel = "Power [dB]"
38 | else:
39 | plabel = "Power [counts]"
40 |
41 | # Reverse oder if vertical orientation.
42 | if 'v' in orientation:
43 | plt.plot(plot_data, plot_t, **kwargs)
44 | plt.xlabel(plabel)
45 |
46 | else:
47 | plt.plot(plot_t, plot_data, **kwargs)
48 | plt.xlabel(tlabel)
49 | plt.ylabel(plabel)
50 |
51 | ax.autoscale(axis='both', tight=True)
52 |
--------------------------------------------------------------------------------
/blimpy/plotting/plot_utils.py:
--------------------------------------------------------------------------------
1 | def calc_extent(self, plot_f=None, plot_t=None, MJD_time=False):
2 | """ Setup plotting edges.
3 | """
4 |
5 | plot_f_begin = plot_f[0]
6 | plot_f_end = plot_f[-1] + (plot_f[1] - plot_f[0])
7 |
8 | plot_t_begin = self.timestamps[0]
9 | plot_t_end = self.timestamps[-1] + (self.timestamps[1] - self.timestamps[0])
10 |
11 | if MJD_time:
12 | extent = (plot_f_begin, plot_f_end, plot_t_begin, plot_t_end)
13 | else:
14 | extent = (plot_f_begin, plot_f_end, 0.0, (plot_t_end - plot_t_begin) * 24. * 60. * 60)
15 |
16 | return extent
--------------------------------------------------------------------------------
/blimpy/plotting/plot_waterfall.py:
--------------------------------------------------------------------------------
1 | from .config import *
2 | from ..utils import rebin, db
3 | from .plot_utils import calc_extent
4 |
5 |
6 | def plot_waterfall(wf, f_start=None, f_stop=None, if_id=0, logged=True, cb=True, MJD_time=False, **kwargs):
7 | """ Plot waterfall of data
8 |
9 | Args:
10 | f_start (float): start frequency, in MHz
11 | f_stop (float): stop frequency, in MHz
12 | logged (bool): Plot in linear (False) or dB units (True),
13 | cb (bool): for plotting the colorbar
14 | kwargs: keyword args to be passed to matplotlib imshow()
15 | """
16 | plot_f, plot_data = wf.grab_data(f_start, f_stop, if_id)
17 |
18 | # imshow does not support int8, so convert to floating point
19 | plot_data = plot_data.astype('float32')
20 |
21 | # Using accending frequency for all plots.
22 | if wf.header['foff'] < 0:
23 | plot_data = plot_data[..., ::-1] # Reverse data
24 | plot_f = plot_f[::-1]
25 |
26 | if logged:
27 | plot_data = db(plot_data)
28 |
29 | # Make sure waterfall plot is under 4k*4k
30 | dec_fac_x, dec_fac_y = 1, 1
31 | if plot_data.shape[0] > MAX_IMSHOW_POINTS[0]:
32 | dec_fac_x = int(plot_data.shape[0] / MAX_IMSHOW_POINTS[0])
33 |
34 | if plot_data.shape[1] > MAX_IMSHOW_POINTS[1]:
35 | dec_fac_y = int(plot_data.shape[1] / MAX_IMSHOW_POINTS[1])
36 |
37 | plot_data = rebin(plot_data, dec_fac_x, dec_fac_y)
38 |
39 | try:
40 | plt.title(wf.header['source_name'])
41 | except KeyError:
42 | plt.title(wf.filename)
43 |
44 | extent = calc_extent(wf, plot_f=plot_f, plot_t=wf.timestamps, MJD_time=MJD_time)
45 |
46 | plt.imshow(plot_data,
47 | aspect='auto',
48 | origin='lower',
49 | rasterized=True,
50 | interpolation='nearest',
51 | extent=extent,
52 | cmap='viridis',
53 | **kwargs
54 | )
55 | if cb:
56 | plt.colorbar()
57 | plt.xlabel("Frequency [MHz]")
58 | if MJD_time:
59 | plt.ylabel("Time [MJD]")
60 | else:
61 | plt.ylabel("Time [s]")
62 |
--------------------------------------------------------------------------------
/blimpy/rawhdr.py:
--------------------------------------------------------------------------------
1 | r"""
2 | Read the specified raw file.
3 | Examine & print the required fields.
4 | If verbose, print every header field value.
5 | """
6 |
7 | import sys
8 | from argparse import ArgumentParser
9 | import blimpy
10 |
11 |
12 | DEBUGGING = False
13 |
14 |
15 | def check_int_field(header, key, valid_values, required=True):
16 | """
17 | Check an integer header field for validity.
18 |
19 | Parameters
20 | ----------
21 | header : dict
22 | Header of the .raw file.
23 | key : str
24 | Field's key value.
25 | valid_values : tuple
26 | The list of valid values or None.
27 | required : boolean, optional
28 | Required? The default is True.
29 |
30 | Returns
31 | -------
32 | int
33 | 0 : valid value; 1 : invalid or missing (and required).
34 |
35 | """
36 | if key in header.keys():
37 | try:
38 | field = int(header[key])
39 | if valid_values is None:
40 | print(F"\t{key} = {field}")
41 | return 0
42 | if field in valid_values:
43 | print(F"\t{key} = {field}")
44 | return 0
45 | print(F"\t*** ERROR VALUE *** {key} = {field}")
46 | return 1
47 | except:
48 | print(F"\t*** NOT int *** {key} = {header[key]}")
49 | return 1
50 |
51 | if required:
52 | print(F"\t*** MISSING *** {key}")
53 | return 1
54 | print(F"\t{key} is not present but not required")
55 | return 0
56 |
57 |
58 | def check_float_field(header, key):
59 | """
60 | Check a float header field for validity.
61 |
62 | Parameters
63 | ----------
64 | header : dict
65 | Header of the .raw file.
66 | key : str
67 | Field's key value.
68 |
69 | Returns
70 | -------
71 | int
72 | 0 : valid value; 1 : invalid.
73 |
74 | """
75 | if key in header.keys():
76 | try:
77 | field = float(header[key])
78 | print(F"\t{key} = {field}")
79 | return 0
80 | except:
81 | print(F"\t*** NOT float *** {key} = {header[key]}")
82 | return 1
83 |
84 | print(F"\t*** MISSING *** {key}")
85 | return 1
86 |
87 |
88 | def examine_header(filepath):
89 | """
90 | Examine the critical .raw file header fields.
91 |
92 | Parameters
93 | ----------
94 | filepath : str
95 | Input .raw file path.
96 |
97 | Returns
98 | -------
99 | rc : int
100 | 0 : no errors; n>0 : at least one error.
101 |
102 | """
103 | if DEBUGGING:
104 | print("DEBUG calling GuppiRaw")
105 | gr = blimpy.GuppiRaw(filepath)
106 | if DEBUGGING:
107 | print("DEBUG called GuppiRaw")
108 | header, _ = gr.read_header()
109 | if DEBUGGING:
110 | print("DEBUG header =", header)
111 | rc = 0
112 | rc += check_int_field(header, "OBSNCHAN", None)
113 | rc += check_int_field(header, "NPOL", [1, 2, 4])
114 | rc += check_int_field(header, "NBITS", [2, 4, 8, 16])
115 | rc += check_int_field(header, "BLOCSIZE", None)
116 | rc += check_int_field(header, "PKTIDX", None)
117 | rc += check_int_field(header, "DIRECTIO", None, required=False)
118 | rc += check_int_field(header, "BEAM_ID", None, required=False)
119 | rc += check_int_field(header, "NBEAM", None, required=False)
120 | rc += check_int_field(header, "NANTS", None, required=False)
121 | rc += check_float_field(header, "TBIN")
122 | rc += check_float_field(header, "OBSFREQ")
123 | rc += check_float_field(header, "OBSBW")
124 |
125 | return rc
126 |
127 |
128 | def cmd_tool(args=None):
129 | """
130 | rawhdr command line entry point
131 |
132 | Parameters
133 | ----------
134 | args : ArgParse, optional
135 | Command line arguments. The default is None.
136 |
137 | Returns
138 | -------
139 | rc : int
140 | 0 : no errors; n>0 : at least one error.
141 |
142 | """
143 | p = ArgumentParser(description="Show the individual fields of the first header for a given raw file.")
144 | p.add_argument("filepath", type=str, help="Name of raw guppi file path to access")
145 | p.add_argument("--verbose", "-v", dest="verbose", action="store_true",
146 | help="Show all of the first header fields.")
147 | if args is None:
148 | args = p.parse_args()
149 | else:
150 | args = p.parse_args(args)
151 |
152 | if args.verbose:
153 | print("rawhdr: All fields of the first header .....")
154 | with open(args.filepath, "rb") as fh:
155 | while True:
156 | buffer = fh.read(80).decode("utf-8").strip()
157 | print("\t", buffer)
158 | if buffer[0:3] == "END":
159 | break
160 |
161 | print("rawhdr: Critical rawspec fields .....")
162 | rc = examine_header(args.filepath)
163 | if rc != 0:
164 | print("*** At least one required raw header field is missing or invalid!")
165 | return rc
166 |
167 | print("rawhdr: No errors found.")
168 | return rc
169 |
170 | if __name__ == "__main__":
171 | sys.exit(cmd_tool())
172 |
--------------------------------------------------------------------------------
/blimpy/signal_processing/__init__.py:
--------------------------------------------------------------------------------
1 | from .dedoppler import dedoppler_1
2 |
3 |
--------------------------------------------------------------------------------
/blimpy/signal_processing/dedoppler.py:
--------------------------------------------------------------------------------
1 | r""" De-doppler signal processing functions """
2 |
3 |
4 | import numpy as np
5 |
6 |
7 | def dedoppler_1(wf, drift_rate):
8 | """
9 | Simple de-doppler code for a Filterbank or HDF5 file.
10 | Parameters:
11 | ----------
12 | wf : object
13 | Blimpy Waterfall object, previously instantiated with a loaded data matrix.
14 | drift_rate : float
15 | Signal drift rate over time [Hz/s]
16 | """
17 |
18 | # Get the time sampling interval in seconda.
19 | tsamp = wf.header['tsamp']
20 |
21 | # Get the fine channel bandwidth in Hz.
22 | chan_bw = wf.header['foff'] * 1e6
23 |
24 | # Compute the number of numpy rolls to perform.
25 | n_roll = (drift_rate * tsamp) / chan_bw
26 |
27 | # For each time-row,
28 | # roll all of the data power values in each fine channel frequency column
29 | # given by -(n_roll * row number).
30 | for ii in range(wf.data.shape[0]):
31 | wf.data[ii][0][:] = np.roll(wf.data[ii][0][:], -int(n_roll * ii))
32 |
--------------------------------------------------------------------------------
/blimpy/srcname.py:
--------------------------------------------------------------------------------
1 | """ srcname
2 |
3 | Change the HDF5 Filterbank header source_name field value.
4 | """
5 |
6 | import os
7 | import sys
8 | from argparse import ArgumentParser
9 | import h5py
10 | from astropy.coordinates import Angle
11 |
12 |
13 | def oops(msg):
14 | print("\n*** srcname: {}\n".format(msg))
15 | sys.exit(86)
16 |
17 |
18 | def read_header(h5):
19 | """
20 | Read the Filterbank header and return a Python dictionary of key:value pairs
21 |
22 | Parameters
23 | ----------
24 | h5 : HDF5 file handle
25 | This represents an open HDF5 file.
26 |
27 | Returns
28 | -------
29 | header : dict
30 | The Filterbank header.
31 |
32 | """
33 | header = {} # Initialise as a nil dictionary.
34 |
35 | for key, val in h5["data"].attrs.items():
36 | if isinstance(val, bytes):
37 | val = val.decode("ascii")
38 | if key == "src_raj":
39 | header[key] = Angle(val, unit="hr")
40 | elif key == "src_dej":
41 | header[key] = Angle(val, unit="deg")
42 | else:
43 | header[key] = val
44 |
45 | return header
46 |
47 |
48 | def examine(filepath):
49 | """
50 | Diagnose the specified HDF5 file.
51 |
52 | Parameters
53 | ----------
54 | filepath : path
55 | An O/S path to an HDF5 file.
56 |
57 | Returns
58 | -------
59 | header : dict
60 | The Filterbank header.
61 |
62 | """
63 | h5 = h5py.File(filepath, mode="r")
64 | if "CLASS" in h5.attrs:
65 | classstr = h5.attrs["CLASS"]
66 | else:
67 | oops("CLASS attribute missing")
68 | if classstr != "FILTERBANK":
69 | oops("Expected CLASS attribute to be 'FILTERBANK' but saw '{}'".format(classstr))
70 | if "VERSION" in h5.attrs:
71 | versionstr = h5.attrs["VERSION"]
72 | else:
73 | oops("VERSION attribute missing")
74 | header = read_header(h5)
75 | print("Header:", header)
76 | if not "data" in h5:
77 | oops("data attribute missing")
78 | if h5["data"].ndim != 3:
79 | oops("Expected data.ndim to be 3 but saw '{}'".format(h5["data"].ndim))
80 | print("srcname: data shape:", h5["data"].shape)
81 | h5.close()
82 |
83 | return header
84 |
85 |
86 | def cmd_tool(args=None):
87 | """ Command line tool srcname """
88 | parser = ArgumentParser(description="Patch the header source field in an HDF5 file.")
89 | parser.add_argument("filepath", default=None, type=str, help="Path of file to read")
90 | parser.add_argument("new_source_name", default=None, type=str,
91 | help="New header source name field value")
92 |
93 | if args is None:
94 | args = parser.parse_args()
95 | else:
96 | args = parser.parse_args(args)
97 |
98 | if not os.path.isfile(args.filepath):
99 | oops("Not a file: {}".format(args.filepath))
100 | if not h5py.is_hdf5(args.filepath):
101 | oops("Not an HDF5 file: {}".format(args.filepath))
102 |
103 | header = examine(args.filepath)
104 | print("srcname: No errors detected in {}".format(args.filepath))
105 | print("\nThe current source_name field is [{}]".format(header["source_name"]))
106 | input("Are you sure you want to replace it with [{}]? Press Enter to continue. Ctrl-C to cancel: "
107 | .format(args.new_source_name))
108 | h5 = h5py.File(args.filepath, mode="r+")
109 | h5["data"].attrs["source_name"] = args.new_source_name
110 | h5.close()
111 | print("All done, best wishes!")
112 |
113 |
114 | if __name__ == "__main__":
115 | cmd_tool()
116 |
--------------------------------------------------------------------------------
/blimpy/utils.py:
--------------------------------------------------------------------------------
1 | """
2 | # utils.py
3 | useful helper functions for common data manipulation tasks
4 | """
5 | import os
6 | import math
7 | import numpy as np
8 |
9 |
10 | def db(x, offset=0):
11 | """ Convert linear to dB """
12 | return 10 * np.log10(x + offset)
13 |
14 |
15 | def lin(x):
16 | """ Convert dB to linear """
17 | return 10.0 ** (x / 10.0)
18 |
19 |
20 | def closest(xarr, val):
21 | """ Return the index of the closest in xarr to value val """
22 | idx_closest = np.argmin(np.abs(np.array(xarr) - val))
23 | return idx_closest
24 |
25 |
26 | def rebin(d, n_x=None, n_y=None, n_z=None):
27 | """ Rebin data by averaging bins together
28 |
29 | Args:
30 | d (np.array): data
31 | n_x (int): number of bins in x dir to rebin into one
32 | n_y (int): number of bins in y dir to rebin into one
33 |
34 | Returns:
35 | d: rebinned data with shape (n_x, n_y)
36 | """
37 | if n_x is None:
38 | n_x = 1
39 | else:
40 | n_x = math.ceil(n_x)
41 | if n_y is None:
42 | n_y = 1
43 | else:
44 | n_y = math.ceil(n_y)
45 | if n_z is None:
46 | n_z = 1
47 | else:
48 | n_z = math.ceil(n_z)
49 |
50 | if d.ndim == 3:
51 | d = d[:int(d.shape[0] // n_x) * n_x, :int(d.shape[1] // n_y) * n_y, :int(d.shape[2] // n_z) * n_z]
52 | d = d.reshape((d.shape[0] // n_x, n_x, d.shape[1] // n_y, n_y, d.shape[2] // n_z, n_z))
53 | d = d.mean(axis=5)
54 | d = d.mean(axis=3)
55 | d = d.mean(axis=1)
56 | elif d.ndim == 2:
57 | d = d[:int(d.shape[0] // n_x) * n_x, :int(d.shape[1] // n_y) * n_y]
58 | d = d.reshape((d.shape[0] // n_x, n_x, d.shape[1] // n_y, n_y))
59 | d = d.mean(axis=3)
60 | d = d.mean(axis=1)
61 | elif d.ndim == 1:
62 | d = d[:int(d.shape[0] // n_x) * n_x]
63 | d = d.reshape((d.shape[0] // n_x, n_x))
64 | d = d.mean(axis=1)
65 | else:
66 | raise RuntimeError("Only NDIM <= 3 supported")
67 | return d
68 |
69 |
70 | def unpack(data, nbit):
71 | """upgrade data from nbits to 8bits
72 |
73 | Notes: Pretty sure this function is a little broken!
74 | """
75 | if nbit > 8:
76 | raise ValueError("unpack: nbit must be <= 8")
77 | if 8 % nbit != 0:
78 | raise ValueError("unpack: nbit must divide into 8")
79 | if data.dtype not in (np.uint8, np.int8):
80 | raise TypeError("unpack: dtype must be 8-bit")
81 | if nbit == 8:
82 | return data
83 | if nbit == 4:
84 | data = unpack_4to8(data)
85 | return data
86 | if nbit == 2:
87 | data = unpack_2to8(data)
88 | return data
89 | if nbit == 1:
90 | data = unpack_1to8(data)
91 | return data
92 |
93 | return None ## SHOULD NEVER HAPPEN
94 |
95 |
96 | def unpack_1to8(data):
97 | """ Promote 1-bit unisgned data into 8-bit unsigned data.
98 |
99 | Args:
100 | data: Numpy array with dtype == uint8
101 | """
102 | return np.unpackbits(data)
103 |
104 |
105 | def unpack_2to8(data):
106 | """ Promote 2-bit unisgned data into 8-bit unsigned data.
107 |
108 | Args:
109 | data: Numpy array with dtype == uint8
110 |
111 | Notes:
112 | DATA MUST BE LOADED as np.array() with dtype='uint8'.
113 |
114 | This works with some clever shifting and AND / OR operations.
115 | Data is LOADED as 8-bit, then promoted to 32-bits:
116 | /ABCD EFGH/ (8 bits of data)
117 | /0000 0000/0000 0000/0000 0000/ABCD EFGH/ (8 bits of data as a 32-bit word)
118 |
119 | Once promoted, we can do some shifting, AND and OR operations:
120 | /0000 0000/0000 ABCD/EFGH 0000/0000 0000/ (shifted << 12)
121 | /0000 0000/0000 ABCD/EFGH 0000/ABCD EFGH/ (bitwise OR of previous two lines)
122 | /0000 0000/0000 ABCD/0000 0000/0000 EFGH/ (bitwise AND with mask 0xF000F)
123 | /0000 00AB/CD00 0000/0000 00EF/GH00 0000/ (prev. line shifted << 6)
124 | /0000 00AB/CD00 ABCD/0000 00EF/GH00 EFGH/ (bitwise OR of previous two lines)
125 | /0000 00AB/0000 00CD/0000 00EF/0000 00GH/ (bitwise AND with 0x3030303)
126 |
127 | Then we change the view of the data to interpret it as 4x8 bit:
128 | [000000AB, 000000CD, 000000EF, 000000GH] (change view from 32-bit to 4x8-bit)
129 |
130 | The converted bits are then mapped to values in the range [-40, 40] according to a lookup chart.
131 | The mapping is based on specifications in the breakthough docs:
132 | https://github.com/UCBerkeleySETI/breakthrough/blob/master/doc/RAW-File-Format.md
133 |
134 | """
135 | two_eight_lookup = {0: 40,
136 | 1: 12,
137 | 2: -12,
138 | 3: -40}
139 |
140 | tmp = data.astype(np.uint32)
141 | tmp = (tmp | (tmp << 12)) & 0xF000F
142 | tmp = (tmp | (tmp << 6)) & 0x3030303
143 | tmp = tmp.byteswap()
144 | tmp = tmp.view('uint8')
145 | mapped = np.array(tmp, dtype=np.int8)
146 | for k, v in two_eight_lookup.items():
147 | mapped[tmp == k] = v
148 | return mapped
149 |
150 |
151 | def unpack_4to8(data):
152 | """ Promote 2-bit unisgned data into 8-bit unsigned data.
153 |
154 | Args:
155 | data: Numpy array with dtype == uint8
156 |
157 | Notes:
158 | # The process is this:
159 | # ABCDEFGH [Bits of one 4+4-bit value]
160 | # 00000000ABCDEFGH [astype(uint16)]
161 | # 0000ABCDEFGH0000 [<< 4]
162 | # 0000ABCDXXXXEFGH [bitwise 'or' of previous two lines]
163 | # 0000111100001111 [0x0F0F]
164 | # 0000ABCD0000EFGH [bitwise 'and' of previous two lines]
165 | # ABCD0000EFGH0000 [<< 4]
166 | # which effectively pads the two 4-bit values with zeros on the right
167 | # Note: This technique assumes LSB-first ordering
168 | """
169 |
170 | tmpdata = data.astype(np.int16) # np.empty(upshape, dtype=np.int16)
171 | tmpdata = (tmpdata | (tmpdata << 4)) & 0x0F0F
172 | # tmpdata = tmpdata << 4 # Shift into high bits to avoid needing to sign extend
173 | updata = tmpdata.byteswap()
174 | return updata.view(data.dtype)
175 |
176 |
177 | def change_the_ext(path, old_ext, new_ext):
178 | """
179 | Change the file extension of the given path to new_ext.
180 |
181 | If the file path's current extension matches the old_ext,
182 | then the new_ext will replace the old_ext.
183 | Else, the new_ext will be appended to the argument path.
184 |
185 | In either case, the resulting string is returned to caller.
186 |
187 | E.g. /a/b/fil/d/foo.fil.bar.fil --> /a/b/fil/d/foo.fil.bar.h5
188 | E.g. /a/fil/b/foo.bar --> /a/fil/b/foo.bar.h5
189 | E.g. /a/fil/b/foo --> /a/fil/b/foo.h5
190 |
191 | Parameters
192 | ----------
193 | path : str
194 | Path of file to change the file extension..
195 | old_ext : str
196 | Old file extension (E.g. h5, fil, dat, log).
197 | new_ext : str
198 | New file extension (E.g. h5, fil, dat, log).
199 |
200 | Returns
201 | -------
202 | New file path, amended as described.
203 |
204 | """
205 | split_tuple = os.path.splitext(path)
206 | if split_tuple[1] == "." + old_ext:
207 | return split_tuple[0] + "." + new_ext
208 | return path + "." + new_ext
209 |
--------------------------------------------------------------------------------
/dependencies.txt:
--------------------------------------------------------------------------------
1 | python3-pip
2 | python3-dev
3 | libhdf5-dev
4 | gfortran
5 | curl
6 | git
7 | g++
8 | build-essential
9 |
--------------------------------------------------------------------------------
/docker_guide.md:
--------------------------------------------------------------------------------
1 | # Using `blimpy` with `docker`
2 |
3 | ## Quick Start
4 |
5 | Docker is a "containerized" service that is similar to a virtual machine. It's lighter than a virtual machine since the docker containers run directly on the host OS instead of on a guest OS.
6 |
7 | Docker helps prevent any installation errors or problems with environment settings by building containers which are identical to the ones tested on Travis CI.
8 |
9 | ### Some Terminology
10 |
11 | A **container** is like a remote machine with everything installed and ready to use. You can have bash sessions inside a container.
12 |
13 | An **image** is like a blueprint or a frozen state of a system. It tells docker exactly what a container should have.
14 |
15 |
16 | ### Pulling an image
17 | ```bash
18 | docker pull :
19 | ```
20 |
21 | Our blimpy images are stored on docker hub, which is basically Github for docker images. *Pulling* an image downloads it to your machine.
22 |
23 | Currently, our repo on docker hub is `fx196/blimpy`. You can specify which version of Python you want by using different tags.
24 |
25 | For python3, use:
26 |
27 | `docker pull fx196/blimpy:py3_kern_stable`
28 |
29 | For python2, use:
30 |
31 | `docker pull fx196/blimpy:py2_kern_stable`
32 |
33 | 
34 |
35 | ### Run container from image
36 |
37 | ```bash
38 | docker run --name -it : bash
39 | ```
40 |
41 | This command takes the image `:`, builds a container from it, and opens an interactive bash session inside it.
42 |
43 | For example:
44 |
45 | `docker run --name dratini -it fx196/blimpy:py3_kern_stable bash`
46 |
47 | Will build a container with the python3 version of blimpy that's named `dratini` and connect to it. Think of this as starting up your remote machine and then `ssh`-ing to it.
48 |
49 | After the above command, your command line should look like this:
50 | 
51 |
52 | Starting from the second line in the picture, we are inside the container.
53 |
54 | Exit the container after running it with ctrl+P then ctrl+Q.
55 |
56 | ### TLDR
57 |
58 | - `docker pull fx196/blimpy:py3_kern_stable` to pull python3 version image
59 | - `docker run --name blimpy_py3 -it fx196/blimpy:py3_kern_stable bash` to start container
60 | - ctrl+P then ctrl+Q to exit container
61 |
62 | ### After running the container
63 |
64 | Continuing with the anology of using a remote machine:
65 |
66 | | Remote Machine Command | Docker equivalent | Use | Example |
67 | | ----- | ----- | ----- | ----- |
68 | | exit | ctrl+P then ctrl+Q | disconnect from machine | |
69 | | ssh | docker attach \ | connect to machine | `docker attach dratini`|
70 | | `scp local_file remote:remote_path` | `docker cp local_file name:container_path`| copy from local to remote | `docker cp test.fil dratini:/data`|
71 | | `scp remote:remote_file local_path` | `docker cp name:container_path local_path` | copy from remote to local | `docker cp dratini:/data/test.fil .`
72 |
73 | ## Advanced
74 |
75 | ### Bind Mounts
76 |
77 | Bind mounts are a faster alternative to copying files into the container. This "binds" a directory on the host machine to a directory in the container. Bind mounting a directory allows the container to read and write data inside that directory.
78 |
79 | Let's say we have a directory `/root/data` that contains some `h5` files we want to use. We can bind mount it to a container called `dratini` using:
80 |
81 | ```bash
82 | docker run --name dratini --mount type=bind,source=/root/data,target=/mounted_data -it fx196/blimpy:py3_kern_stable bash
83 | ```
84 |
85 | This will create a path `/mounted_data` inside the container, allowing the container to access the contents of `/root/data` by accessing `/mounted_data`.
86 |
87 | 
88 |
89 | Changing the contents of `/mounted_data` in the containter will also change the contents of `/root/data` on the host. If we use `h52fil` inside the mounted directory, we can access the result directly on the host machine without needing to use `docker cp`. This is illustrated below:
90 |
91 |
92 | 
93 |
94 | ### Running commands outside the container
95 |
96 | You can run commands in the container while on the host machine using `docker exec`.
97 |
98 | For example, after starting `dratini` and bind mounting `/root/data` to `/mounted_data`, we can look at the files in `/mounted_data` and use `h52fil` on the `.h5` file inside:
99 |
100 | 
101 |
102 | Note that `blimpy` writes the resulting `.fil` file to the working directory, which happens to be `/home` inside the container. We can set the working directory for our command to `/mounted_data` by adding `-w /mounted_data` to our command. The `-w` flag lets us select the working directory.
103 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/blimpy.calib_utils.rst:
--------------------------------------------------------------------------------
1 | blimpy.calib\_utils package
2 | ===========================
3 |
4 | Submodules
5 | ----------
6 |
7 | blimpy.calib\_utils.calib\_plots module
8 | ---------------------------------------
9 |
10 | .. automodule:: blimpy.calib_utils.calib_plots
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | blimpy.calib\_utils.fluxcal module
16 | ----------------------------------
17 |
18 | .. automodule:: blimpy.calib_utils.fluxcal
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | blimpy.calib\_utils.stokescal module
24 | ------------------------------------
25 |
26 | .. automodule:: blimpy.calib_utils.stokescal
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 |
32 | Module contents
33 | ---------------
34 |
35 | .. automodule:: blimpy.calib_utils
36 | :members:
37 | :undoc-members:
38 | :show-inheritance:
39 |
--------------------------------------------------------------------------------
/docs/blimpy.rst:
--------------------------------------------------------------------------------
1 | blimpy package
2 | ==============
3 |
4 | Subpackages
5 | -----------
6 |
7 | .. toctree::
8 |
9 | blimpy.calib_utils
10 |
11 | Submodules
12 | ----------
13 |
14 | blimpy.calcload module
15 | ----------------------
16 |
17 | .. automodule:: blimpy.calcload
18 | :members:
19 | :undoc-members:
20 | :show-inheritance:
21 |
22 | blimpy.dice module
23 | ------------------
24 |
25 | .. automodule:: blimpy.dice
26 | :members:
27 | :undoc-members:
28 | :show-inheritance:
29 |
30 | blimpy.fil2h5 module
31 | --------------------
32 |
33 | .. automodule:: blimpy.fil2h5
34 | :members:
35 | :undoc-members:
36 | :show-inheritance:
37 |
38 | blimpy.guppi module
39 | -------------------
40 |
41 | .. automodule:: blimpy.guppi
42 | :members:
43 | :undoc-members:
44 | :show-inheritance:
45 |
46 | blimpy.h5diag module
47 | --------------------
48 |
49 | .. automodule:: blimpy.h5diag
50 | :members:
51 | :undoc-members:
52 | :show-inheritance:
53 |
54 | blimpy.h52fil module
55 | --------------------
56 |
57 | .. automodule:: blimpy.h52fil
58 | :members:
59 | :undoc-members:
60 | :show-inheritance:
61 |
62 | blimpy.match\_fils module
63 | -------------------------
64 |
65 | .. automodule:: blimpy.match_fils
66 | :members:
67 | :undoc-members:
68 | :show-inheritance:
69 |
70 | blimpy.rawhdr module
71 | --------------------
72 |
73 | .. automodule:: blimpy.rawhdr
74 | :members:
75 | :undoc-members:
76 | :show-inheritance:
77 |
78 | blimpy.stax module
79 | --------------------
80 |
81 | .. automodule:: blimpy.stax
82 | :members:
83 | :undoc-members:
84 | :show-inheritance:
85 |
86 | blimpy.stix module
87 | --------------------
88 |
89 | .. automodule:: blimpy.stix
90 | :members:
91 | :undoc-members:
92 | :show-inheritance:
93 |
94 | blimpy.utils module
95 | -------------------
96 |
97 | .. automodule:: blimpy.utils
98 | :members:
99 | :undoc-members:
100 | :show-inheritance:
101 |
102 | blimpy.waterfall module
103 | -----------------------
104 |
105 | .. automodule:: blimpy.waterfall
106 | :members:
107 | :undoc-members:
108 | :show-inheritance:
109 |
110 | blimpy.io.sigproc module
111 | ------------------------
112 |
113 | .. automodule:: blimpy.io.sigproc
114 | :members:
115 | :undoc-members:
116 | :show-inheritance:
117 |
118 |
119 | Module contents
120 | ---------------
121 |
122 | .. automodule:: blimpy
123 | :members:
124 | :undoc-members:
125 | :show-inheritance:
126 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # http://www.sphinx-doc.org/en/master/config
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 |
16 | sys.path.insert(0, os.path.abspath('../'))
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'blimpy'
21 | copyright = 'Berkeley SETI Research Center'
22 | author = 'Breakthrough Listen'
23 |
24 | # -- General configuration ---------------------------------------------------
25 |
26 | # Add any Sphinx extension module names here, as strings. They can be
27 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
28 | # ones.
29 | extensions = ['recommonmark', 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.napoleon']
30 |
31 | # Add any paths that contain templates here, relative to this directory.
32 | templates_path = ['_templates']
33 |
34 | # List of patterns, relative to source directory, that match files and
35 | # directories to ignore when looking for source files.
36 | # This pattern also affects html_static_path and html_extra_path.
37 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
38 |
39 | # -- Options for HTML output -------------------------------------------------
40 |
41 | # The theme to use for HTML and HTML Help pages. See the documentation for
42 | # a list of builtin themes.
43 | #
44 | html_theme = "sphinx_rtd_theme"
45 | html_theme_options = {
46 | 'logo_only': False,
47 | 'display_version': False,
48 | 'prev_next_buttons_location': 'bottom',
49 | 'style_external_links': False,
50 | # Toc options
51 | 'collapse_navigation': True,
52 | 'sticky_navigation': True,
53 | 'navigation_depth': 4,
54 | 'includehidden': True,
55 | 'titles_only': False
56 | }
57 |
58 | # Add any paths that contain custom static files (such as style sheets) here,
59 | # relative to this directory. They are copied after the builtin static files,
60 | # so a file named "default.css" will overwrite the builtin "default.css".
61 | #DELETED# html_static_path = ['_static']
62 |
63 | # If true, the current module name will be prepended to all description
64 | # unit titles (such as .. function::).
65 | # add_module_names = False
66 |
67 |
--------------------------------------------------------------------------------
/docs/contents.rst:
--------------------------------------------------------------------------------
1 | .. include:: index.rst
2 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | Welcome to blimpy's documentation!
2 | ==================================
3 |
4 | .. toctree::
5 | :maxdepth: 2
6 | :caption: Contents
7 |
8 | license.rst
9 | overview.md
10 | writing_docs.rst
11 | modules.rst
12 |
13 |
14 | .. Remember to add the name of your file here!
15 | Make sure to line up the filename with the left colon in ":caption:"
16 |
--------------------------------------------------------------------------------
/docs/license.rst:
--------------------------------------------------------------------------------
1 | License
2 | ========
3 |
4 | BSD 3-Clause License
5 |
6 | Copyright (c) 2018, Berkeley SETI Research Center
7 | All rights reserved.
8 |
9 | Redistribution and use in source and binary forms, with or without
10 | modification, are permitted provided that the following conditions are met:
11 |
12 | * Redistributions of source code must retain the above copyright notice, this
13 | list of conditions and the following disclaimer.
14 |
15 | * Redistributions in binary form must reproduce the above copyright notice,
16 | this list of conditions and the following disclaimer in the documentation
17 | and/or other materials provided with the distribution.
18 |
19 | * Neither the name of the copyright holder nor the names of its
20 | contributors may be used to endorse or promote products derived from
21 | this software without specific prior written permission.
22 |
23 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
24 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
26 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
27 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
28 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
29 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
30 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
31 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
32 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/modules.rst:
--------------------------------------------------------------------------------
1 | blimpy
2 | ======
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | blimpy
8 |
--------------------------------------------------------------------------------
/docs/overview.md:
--------------------------------------------------------------------------------
1 | [](https://blimpy.readthedocs.io/en/latest/?badge=latest)
2 | [](https://codecov.io/gh/UCBerkeleySETI/blimpy)
3 | [](http://joss.theoj.org/papers/e58ef21f0a924041bf9438fd75f8aed0)
4 |
5 | ## Breakthrough Listen I/O Methods for Python.
6 |
7 | ### Filterbank + Raw file readers
8 |
9 | This repository contains Python readers for interacting with [Sigproc filterbank](http://sigproc.sourceforge.net/sigproc.pdf) (.fil), HDF5 (.h5) and [guppi raw](https://baseband.readthedocs.io/en/stable/guppi/) (.raw) files,
10 | as used in the [Breakthrough Listen](https://seti.berkeley.edu) search for intelligent life.
11 |
12 |
13 | ### Installation
14 |
15 | The latest release can be installed via pip:
16 |
17 | ```
18 | pip install blimpy
19 | ```
20 |
21 | Or, the latest version of the development code can be installed from the github [repo](https://github.com/UCBerkeleySETI/blimpy) and then run `python setup.py install` or `pip install .` (with sudo if required), or by using the following terminal command:
22 |
23 | ```
24 | pip install https://github.com/UCBerkeleySETI/blimpy/tarball/master
25 | ```
26 |
27 | To install everything required to run the unit tests, run:
28 |
29 | ```
30 | pip install -e .[full]
31 | ```
32 |
33 | You will need `numpy`, `h5py`, `astropy`, `scipy`, and `matplotlib` as dependencies. A `pip install` should pull in numpy, h5py, and astropy, but you may still need to install scipy and matplotlib separately. To interact with files compressed with [bitshuffle](https://github.com/kiyo-masui/bitshuffle), you'll need the `bitshuffle` package too.
34 |
35 | Note that h5py generally needs to be installed in this way:
36 |
37 | ```
38 | $ pip install --no-binary=h5py h5py
39 | ```
40 |
41 |
42 |
43 | ### Command line utilities
44 |
45 | After installation, some command line utilities will be installed:
46 | * `watutil`, Read/write/plot an .h5 file or a .fil file.
47 | * `rawutil`, Plot data in a guppi raw file.
48 | * `fil2h5`, Convert a .fil file into .h5 format.
49 | * `h52fil`, Convert an .h5 file into .fil format.
50 | * `bldice`, Dice a smaller frequency region from (either from/to .h5 or .fil).
51 | * `matchfils`, Check if two .fil files are the same.
52 | * `calcload`, Calculate the Waterfall max_load value needed to load the data array for a given file.
53 | * `rawhdr`, Display the header fields of a raw guppi file.
54 | * `stax`, For a collection of .h5 or .fil files sharing the same frequency range, create a vertical stack of waterfall plots as a single PNG file.
55 | * `stix`, For a single very large .h5 or .fil file, create a horizontal or vertical stack of waterfall plots as a single PNG file.
56 |
57 | Use the `-h` flag to any of the above command line utilities to display their available arguments.
58 |
59 | ### Reading blimpy filterbank files in .fil or .h5 format
60 |
61 | The `blimpy.Waterfall` provides a Python API for interacting with filterbank data. It supports all BL filterbank data products; see this [example Jupyter notebook](https://github.com/UCBerkeleySETI/blimpy/blob/master/examples/voyager.ipynb) for an overview.
62 |
63 | From the python, ipython or jupiter notebook environments.
64 |
65 | ```python
66 | from blimpy import Waterfall
67 | fb = Waterfall('/path/to/filterbank.fil')
68 | #fb = Waterfall('/path/to/filterbank.h5') #works the same way
69 | fb.info()
70 | data = fb.data
71 | ```
72 |
73 | ### Reading guppi raw files
74 | The [Guppi Raw format](https://github.com/UCBerkeleySETI/breakthrough/blob/master/doc/RAW-File-Format.md) can be read using the `GuppiRaw` class from `guppi.py`:
75 |
76 | ```python
77 | from blimpy import GuppiRaw
78 | gr = GuppiRaw('/path/to/guppirawfile.raw')
79 |
80 | header, data = gr.read_next_data_block()
81 | ```
82 |
83 | or
84 |
85 | ```python
86 | from blimpy import GuppiRaw
87 | gr = GuppiRaw('/path/to/guppirawfile.raw')
88 |
89 | for header, data_x, data_y in gr.get_data():
90 | # process data
91 | ```
92 |
93 | Note: Most users should start analysis with filterbank files, which are smaller in size and have been generated from the guppi raw files.
94 |
95 | ### Further reading
96 |
97 | A detailed overview of the data formats used in Breakthrough Listen can be found in our [data format paper](https://ui.adsabs.harvard.edu/abs/2019arXiv190607391L/abstract). An archive of data files from the Breakthrough Listen program is provided at [seti.berkeley.edu/opendata](http://seti.berkeley.edu/opendata).
98 |
99 | ### If you have any requests or questions, please lets us know!
100 |
101 |
--------------------------------------------------------------------------------
/docs/writing_docs.rst:
--------------------------------------------------------------------------------
1 | Writing Docs
2 | ============
3 |
4 | This is a rough guide to contributing to blimpy documentation.
5 |
6 | TLDR
7 | ----
8 | * Write docs in ``.rst`` or ``.md``
9 | * Add the name of the new docs file to ``index.rst``
10 | * Preview docs by installing ``sphinx`` via ``pip`` and run ``make html`` in the ``docs/`` directory
11 | * Site will update automatically after pushing to the blimpy repo
12 |
13 | Creating a Page
14 | --------------------
15 | Currently, ``readthedocs`` is able to process two kinds of files: ``reStructuredText (.rst)`` and ``Markdown (.md)``.
16 | You can find a brief guide for reStructuredText `here `_
17 | and a guide for Markdown `here `_.
18 |
19 | The two file types are rendered equally by sphinx, so feel free to use whichever one you're more comfortable with.
20 |
21 | To create a new page, you can create a new file in the same directory as ``index.rst``. After creating the file,
22 | add the filename of the new file to ``index.rst`` to add a link it on the index page. The new file will also show up in the sidebar
23 | after this.
24 |
25 | Previewing Docs
26 | ---------------
27 | The docs are rendered using ``sphinx``, a python package. Use ``pip install sphinx`` to install the package.
28 |
29 | After ``sphinx`` is installed, you can preview your changes by running ``make html`` in the ``docs/`` directory.
30 | The rendered html files will be stored in ``docs/_build/html``. The actual site will look exactly like the rendered
31 | files when built.
32 |
33 | Automatic Documentation
34 | -----------------------
35 | You can run ``sphinx-apidoc -o . ../blimpy/ -f`` in ``blimpy/docs`` to generate autodoc pages from all the python modules in blimpy.
36 | Make sure to run this command every time a new file is added to blimpy.
37 |
38 | Updating the Site
39 | -----------------
40 | The blimpy Github repo is connected to `readthedocs.org` with a webhook. `readthedocs` will automatically update the site
41 | whenever a new commit is added to the repo.
42 |
--------------------------------------------------------------------------------
/paper.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: 'Blimpy: Breakthrough Listen I/O Methods for Python'
3 | tags:
4 | - Python
5 | - astronomy
6 | - radio astronomy
7 | - technosignatures
8 | - SETI
9 | authors:
10 | - name: Danny C. Price
11 | orcid: 0000-0003-2783-1608
12 | affiliation: "+, 1, 2" # (Multiple affiliations must be quoted)
13 | - name: J. Emilio Enriquez
14 | orcid: 0000-0003-2516-3546
15 | affiliation: "+, 1, 3"
16 | - name: Yuhong Chen
17 | affiliation: 1
18 | - name: Mark Siebert
19 | affiliation: 1
20 | affiliations:
21 | - name: Equal contribution from both authors
22 | index: +
23 | - name: Department of Astronomy, University of California Berkeley, Berkeley CA 94720, United States of America
24 | index: 1
25 | - name: Centre for Astrophysics & Supercomputing, Swinburne University of Technology, Hawthorn, VIC 3122, Australia
26 | index: 2
27 | - name: Department of Astrophysics/IMAPP, Radboud University, Nijmegen, Netherlands
28 | index: 3
29 | date: 27 June 2019
30 | bibliography: paper.bib
31 | ---
32 |
33 | # Summary
34 |
35 | The search for extraterrestrial intelligence (SETI) has historically used radio astronomy data as
36 | the main venue to search for artificial signals of extraterrestrial origin. The Breakthrough Listen program
37 | is the latest large-scale project for the search of technosignatures, and thanks to modern telescopes
38 | and instrumentation, as well as significant amounts of dedicated observing time, the program
39 | has become the largest SETI endeavour in history. This has also resulted in an unprecedented amount of
40 | publicly-available data [@Lebofsky:2019]. Over 1 PB of data from the Breakthrough Listen program may be downloaded from
41 | [seti.berkeley.edu/opendata](http://seti.berkeley.edu/opendata).
42 |
43 | The ``Blimpy``---Breakthrough Listen I/O Methods for Python---package provides Python 2.7+/3.6+ utilities
44 | for viewing and interacting with the data formats used within the Breakthrough Listen program.
45 | This includes Sigproc filterbank (.fil) and HDF5 (.h5) files that contain dynamic spectra (aka 'waterfalls'),
46 | and GUPPI raw (.raw) files that contain voltage-level data. Python methods for data extraction,
47 | calibration, and visualization are provided. A suite of command-line utilities are also available.
48 |
49 | The waterfall data product stores an array of detected power across frequency channel (i.e. spectra) over time.
50 | These files can be several GB in size, with up to billions of channels and/or hundreds of thousands of time steps. ``Blimpy`` provides
51 | convenient methods to extract frequencies and time slices of interest---without loading the full file into memory--which
52 | are presented as ``Numpy`` arrays [@Numpy:2011]. Methods for manipulating lower-level voltage data products
53 | stored in the GUPPI raw format, as generated by the Green Bank Telescope, are also provided.
54 | ``Blimpy`` uses the Matplotlib library [@Pylab:2007] to provide plotting of spectra, time series, and dynamic spectra;
55 | the ``Astropy`` package for handling of astronomical coordinates [@Astropy:2013; @Astropy:2018]; and,
56 | the ``H5py`` package to interact with data stored in HDF5 files [@H5py:2013]. The [turboSETI](https://github.com/UCBerkeleySETI/turbo_seti) package, which conducts doppler acceleration searches for narrowband signals that would indicate the presence of technologically-capable life beyond Earth, uses ``Blimpy`` for file handling and diagnostic plotting.
57 |
58 | ``Blimpy`` was designed to be used by radio astronomers, students and anyone else interested in accessing
59 | Breakthrough Listen data, whether searching for SETI signals, spectral lines, pulsars, fast radio bursts, or other astrophysical phenomena. It has already been used in a number of scientific publications
60 | [@Croft:2016; @Enriquez:2017; @Enriquez:2018; @Enriquez:2019; @Gajjar:2018; @Price:2019a; @Price:2019b].
61 |
62 | # Acknowledgements
63 |
64 | We thank G. Molenaar and B. Brzycki for their code contributions, along with G. Zhang, G. Hellbourg, N. Richard, M. Lebofsky, G. Foster, C. Gilbertson, and the wider _Breakthrough Listen_ collaboration. Breakthrough Listen is managed by the Breakthrough
65 | Initiatives, sponsored by the Breakthrough Prize Foundation.
66 |
67 | # References
68 |
69 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | # pep 518
2 | [build-system]
3 | requires = ["setuptools", "wheel", "numpy"]
4 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | astropy
2 | numpy>=1.19
3 | six
4 | setuptools
5 | matplotlib
6 | h5py
7 | scipy
8 | hdf5plugin
9 | pandas
10 | Pillow
11 | psutil
12 | pyparsing==2.4.7
13 |
--------------------------------------------------------------------------------
/requirements_test.txt:
--------------------------------------------------------------------------------
1 | codecov
2 | coverage
3 | pyslalib
4 | pytest
5 | pytest-cov
6 | pytest-order
7 | pytest-runner
8 | setigen
9 |
10 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [aliases]
2 | test=pytest
3 |
4 | [metadata]
5 | description-file=README.md
6 |
7 | [global]
8 | no-binary = h5py
9 |
10 | [tool:pytest]
11 | addopts=--verbose
12 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """
2 | setup.py -- setup script for use of packages.
3 | """
4 | from setuptools import setup, find_packages
5 |
6 | __version__ = '2.1.4'
7 |
8 | with open("README.md", "r") as fh:
9 | long_description = fh.read()
10 |
11 | # create entry points
12 | # see http://astropy.readthedocs.org/en/latest/development/scripts.html
13 | entry_points = {
14 | 'console_scripts' : [
15 | 'bldice = blimpy.dice:cmd_tool',
16 | 'bl_scrunch = blimpy.bl_scrunch:cmd_tool',
17 | 'calcload = blimpy.calcload:cmd_tool',
18 | 'dsamp = blimpy.dsamp:cmd_tool',
19 | 'fil2h5 = blimpy.fil2h5:cmd_tool',
20 | 'h52fil = blimpy.h52fil:cmd_tool',
21 | 'h5diag = blimpy.h5diag:cmd_tool',
22 | 'peek = blimpy.peek:cmd_tool',
23 | 'rawhdr = blimpy.rawhdr:cmd_tool',
24 | 'rawutil = blimpy.guppi:cmd_tool',
25 | 'srcname = blimpy.srcname:cmd_tool',
26 | 'stax = blimpy.stax:cmd_tool',
27 | 'stix = blimpy.stix:cmd_tool',
28 | 'watutil = blimpy.waterfall:cmd_tool',
29 | ]
30 | }
31 |
32 | with open("requirements.txt", "r") as fh:
33 | install_requires = fh.readlines()
34 |
35 | extras_require = {
36 | 'full': [
37 | 'pyslalib',
38 | ]
39 | }
40 |
41 | setup(name='blimpy',
42 | version=__version__,
43 | description='Python utilities for Breakthrough Listen SETI observations',
44 | long_description=long_description,
45 | long_description_content_type='text/markdown',
46 | license='BSD',
47 | install_requires=install_requires,
48 | url='https://github.com/ucberkeleyseti/blimpy',
49 | author='Danny Price, Emilio Enriquez, Yuhong Chen, Mark Siebert, and BL contributors',
50 | author_email='dancpr@berkeley.edu',
51 | entry_points=entry_points,
52 | packages=find_packages(),
53 | include_package_data=True,
54 | zip_safe=False,
55 | classifiers=[
56 | 'Development Status :: 5 - Production/Stable',
57 | 'Environment :: Console',
58 | 'Natural Language :: English',
59 | 'Operating System :: POSIX :: Linux',
60 | 'Programming Language :: Python :: 3.7',
61 | 'Intended Audience :: Science/Research',
62 | 'License :: OSI Approved :: BSD License',
63 | 'Topic :: Scientific/Engineering :: Astronomy',
64 | ],
65 | setup_requires=['pytest-runner'],
66 | tests_require=['pytest', 'pyslalib'],
67 | test_suite="blimpytests",
68 | )
69 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | import subprocess
2 | import sys
3 | from os import path, listdir
4 |
5 | print("Reached blimpy.tests init!")
6 | here = path.dirname(path.abspath(__file__))
7 | print("Running tests from {}".format(here))
8 | if "test_data" not in listdir(here):
9 | print("Test data has not yet been downloaded. Downloading Data...")
10 | if sys.version_info >= (3, 0):
11 | subprocess.run(["sh", "download_data.sh"])
12 | else:
13 | subprocess.call(["sh", "download_data.sh"])
14 |
--------------------------------------------------------------------------------
/tests/data.py:
--------------------------------------------------------------------------------
1 | from os import path
2 |
3 | here = path.dirname(path.abspath(__file__))
4 |
5 | voyager_fil = path.join(here, 'test_data/Voyager1.single_coarse.fine_res.fil')
6 | voyager_h5 = path.join(here, 'test_data/Voyager1.single_coarse.fine_res.h5')
7 | voyager_raw = path.join(here, 'test_data/blc3_2bit_guppi_57396_VOYAGER1_0006.0013.raw')
8 | voyager_block1 = path.join(here, 'test_data/Voyager1_block1.npy')
9 | test_ifs_fil = path.join(here, 'test_data/test_ifs.fil')
10 | test_h5 = path.join(here, 'test_data/test.h5')
11 | test_fil = path.join(here, 'test_data/test.fil')
12 |
--------------------------------------------------------------------------------
/tests/download_data.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | mkdir test_data
3 | curl --url "http://blpd0.ssl.berkeley.edu/Voyager_data/Voyager1_block1.npy" -o ./test_data/Voyager1_block1.npy
4 | curl --url "http://blpd0.ssl.berkeley.edu/Voyager_data/blc3_2bit_guppi_57396_VOYAGER1_0006.0013.raw" -o ./test_data/blc3_2bit_guppi_57396_VOYAGER1_0006.0013.raw
5 | curl --url "http://blpd0.ssl.berkeley.edu/Voyager_data/Voyager1.single_coarse.fine_res.fil" -o ./test_data/Voyager1.single_coarse.fine_res.fil
6 | curl --url "http://blpd0.ssl.berkeley.edu/Voyager_data/Voyager1.single_coarse.fine_res.h5" -o ./test_data/Voyager1.single_coarse.fine_res.h5
7 | curl --url "http://blpd0.ssl.berkeley.edu/Voyager_data/test_ifs.fil" -o ./test_data/test_ifs.fil
8 | ls ./test_data
9 |
--------------------------------------------------------------------------------
/tests/run_tests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | export MPLBACKEND=agg
3 | coverage run --source=blimpy -m pytest
4 | EXITCODE=$?
5 | if [ $EXITCODE -ne 0 ]; then
6 | echo
7 | echo '*** Oops, coverage pytest failed, exit code = '$EXITCODE' ***'
8 | echo
9 | exit $EXITCODE
10 | fi
11 | coverage html -d coverage_html
12 | EXITCODE=$?
13 | if [ $EXITCODE -ne 0 ]; then
14 | echo
15 | echo '*** Oops, coverage report failed, exit code = '$EXITCODE' ***'
16 | echo
17 | exit $EXITCODE
18 | fi
19 |
20 |
--------------------------------------------------------------------------------
/tests/test_bl_scrunch.py:
--------------------------------------------------------------------------------
1 | r"""
2 | test_bl_scrunch
3 | """
4 |
5 | import os
6 | import pytest
7 |
8 | import blimpy as bl
9 | from tests.data import voyager_h5
10 |
11 | OUTDIR = os.path.dirname(voyager_h5) + "/"
12 | print("test_bl_scrunch: output directory:", OUTDIR)
13 |
14 |
15 | def test_scrunch():
16 | r"""
17 | Tests the conversion of fil files into h5 in both light and heavy modes.
18 | But apparently it does not test the accuracy of the conversion.
19 | """
20 | print("\n===== test_scrunch BEGIN")
21 | # Creating test file.
22 | bl.bl_scrunch.bl_scrunch(voyager_h5, new_filename='test.scrunched.h5', f_scrunch=8)
23 |
24 | # Deleting test file
25 | os.remove('test.scrunched.h5')
26 | print("\n===== test_scrunch END")
27 |
28 |
29 | def test_nameless():
30 | r"""
31 | The script should trigger a system
32 | exit if the user fails to provide a file name.
33 | """
34 |
35 | print("\n===== test_nameless BEGIN")
36 | bl.bl_scrunch.bl_scrunch(voyager_h5, out_dir=OUTDIR, f_scrunch=8)
37 |
38 | with pytest.raises(SystemExit):
39 | args = ["-o", OUTDIR, "-n", "test.scrunched.h5", "-f", "8", "-l", "0.1"]
40 | bl.bl_scrunch.cmd_tool(args)
41 | print("\n===== test_nameless END")
42 |
43 |
44 | def test_cmd():
45 | r""" Pass in some example sets of arguments """
46 | # this is a malformed arg set with one giant string in the first entry
47 | print("\n===== test_cmd BEGIN")
48 | args = [voyager_h5, "-o", OUTDIR, "-n", "test.scrunched.h5", "-f", "8", "-l", "0.1"]
49 | bl.bl_scrunch.cmd_tool(args)
50 | args = [voyager_h5, "-f", "8", "-l", "0.1", "--out_dir", OUTDIR]
51 | bl.bl_scrunch.cmd_tool(args)
52 | args = [voyager_h5, "--mickey_mouse", "-f", "8", "-l", "0.1", "-o", OUTDIR]
53 | with pytest.raises(SystemExit):
54 | bl.bl_scrunch.cmd_tool(args)
55 | print("\n===== test_cmd END")
56 |
57 |
58 | if __name__ == "__main__":
59 | test_scrunch()
60 | test_nameless()
61 | test_cmd()
62 |
--------------------------------------------------------------------------------
/tests/test_calc_n_coarse_chan.py:
--------------------------------------------------------------------------------
1 | from blimpy import Waterfall
2 | from tests.data import voyager_h5, test_ifs_fil
3 |
4 |
5 | HIRES_THRESHOLD = 2**20
6 |
7 |
8 | def test_ncc_chan_bw():
9 | wf = Waterfall(voyager_h5)
10 | print("test_bcc_chan_bw: telescope_id:", wf.header['telescope_id'])
11 | print("test_ncc_chan_bw: nchans:", wf.header['nchans'])
12 |
13 | n_coarse_chan = wf.calc_n_coarse_chan(16)
14 | print("test_ncc_chan_bw: n_coarse_chan [chan_bw=16]:", n_coarse_chan)
15 | assert n_coarse_chan == 64
16 |
17 | n_coarse_chan = wf.calc_n_coarse_chan(1)
18 | print("test_ncc_chan_bw: n_coarse_chan [chan_bw=1]:", n_coarse_chan)
19 | assert n_coarse_chan > 2.9 and n_coarse_chan < 3.0
20 |
21 |
22 | def test_ncc_gbt():
23 | wf = Waterfall(test_ifs_fil)
24 | wf.header['telescope_id'] = 6
25 | print("test_ncc_gbt: telescope_id:", wf.header['telescope_id'])
26 | print("test_ncc_gbt: starting nchans:", wf.header['nchans'])
27 |
28 | n_coarse_chan = wf.calc_n_coarse_chan()
29 | print("test_ncc_gbt: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
30 | assert n_coarse_chan == 64
31 |
32 | wf.header['nchans'] = 3 * HIRES_THRESHOLD
33 | print("\ntest_ncc_gbt: nchans:", wf.header['nchans'])
34 | n_coarse_chan = wf.calc_n_coarse_chan()
35 | print("test_ncc_gbt: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
36 | assert n_coarse_chan == 3
37 |
38 | wf.header['nchans'] = HIRES_THRESHOLD
39 | print("\ntest_ncc_gbt: nchans:", wf.header['nchans'])
40 | n_coarse_chan = wf.calc_n_coarse_chan()
41 | print("test_ncc_gbt: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
42 | assert n_coarse_chan == 1
43 |
44 | wf.header['nchans'] = HIRES_THRESHOLD - 1
45 | print("\ntest_ncc_gbt: nchans:", wf.header['nchans'])
46 | n_coarse_chan = wf.calc_n_coarse_chan()
47 | print("test_ncc_gbt: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
48 | assert n_coarse_chan == 64
49 |
50 | wf.header['nchans'] = HIRES_THRESHOLD + 1
51 | print("\ntest_ncc_gbt: nchans:", wf.header['nchans'])
52 | n_coarse_chan = wf.calc_n_coarse_chan()
53 | print("test_ncc_gbt: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
54 | assert n_coarse_chan == 64
55 |
56 |
57 | def test_ncc_42():
58 | wf = Waterfall(test_ifs_fil)
59 | wf.header['telescope_id'] = 42
60 | print("test_ncc_42: telescope_id:", wf.header['telescope_id'])
61 | print("test_ncc_42: starting nchans:", wf.header['nchans'])
62 |
63 | n_coarse_chan = wf.calc_n_coarse_chan()
64 | print("test_ncc_42: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
65 | assert n_coarse_chan == 64
66 |
67 | wf.header['nchans'] = 3 * HIRES_THRESHOLD
68 | print("\ntest_ncc_42: nchans:", wf.header['nchans'])
69 | n_coarse_chan = wf.calc_n_coarse_chan()
70 | print("test_ncc_42: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
71 | assert n_coarse_chan == 3
72 |
73 | wf.header['nchans'] = HIRES_THRESHOLD
74 | print("\ntest_ncc_42: nchans:", wf.header['nchans'])
75 | n_coarse_chan = wf.calc_n_coarse_chan()
76 | print("test_ncc_42: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
77 | assert n_coarse_chan == 1
78 |
79 | wf.header['nchans'] = HIRES_THRESHOLD - 1
80 | print("\ntest_ncc_42: nchans:", wf.header['nchans'])
81 | n_coarse_chan = wf.calc_n_coarse_chan()
82 | print("test_ncc_42: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
83 | assert n_coarse_chan == 64
84 |
85 | wf.header['nchans'] = HIRES_THRESHOLD + 1
86 | print("\ntest_ncc_42: nchans:", wf.header['nchans'])
87 | n_coarse_chan = wf.calc_n_coarse_chan()
88 | print("test_ncc_42: n_coarse_chan [chan_bw=None]:", n_coarse_chan)
89 | assert n_coarse_chan == 64
90 |
91 |
92 | if __name__ == "__main__":
93 | wf = Waterfall(test_ifs_fil)
94 | wf.info()
95 | test_ncc_chan_bw()
96 | test_ncc_gbt()
97 | test_ncc_42()
98 |
--------------------------------------------------------------------------------
/tests/test_calcload.py:
--------------------------------------------------------------------------------
1 | """
2 | Test the calcload.py utility and function calc_max_load()
3 | """
4 | from tests.data import voyager_h5, voyager_fil
5 | from blimpy.calcload import cmd_tool, calc_max_load
6 |
7 | def test_calcload():
8 | r""" Test the calcload command line tool """
9 | args = [voyager_h5]
10 | cmd_tool(args)
11 | args = ['-v', voyager_fil]
12 | cmd_tool(args)
13 |
14 | def test_calc_max_load():
15 | gb1 = calc_max_load(voyager_h5)
16 | gb2 = calc_max_load(voyager_fil)
17 | assert(gb1 == gb2 == 1.0)
18 |
19 | if __name__ == "__main__":
20 | test_calcload()
21 | test_calc_max_load()
22 |
--------------------------------------------------------------------------------
/tests/test_compare_voyager.py:
--------------------------------------------------------------------------------
1 | import blimpy as bl
2 | import numpy as np
3 | from pprint import pprint
4 | from tests.data import voyager_fil, voyager_h5
5 | from blimpy.plotting.config import plt
6 |
7 |
8 | def test_compare_waterfall_fil_to_h5():
9 | """ Load Voyager dataset and test that both fil and hdf5 readers return same headers and data """
10 |
11 | print("Loading FIL and HDF5 data with Waterfall()..."),
12 | a = bl.Waterfall(voyager_h5)
13 | b = bl.Waterfall(voyager_fil)
14 |
15 | print("Reading headers..")
16 | pprint(a.header)
17 | print("\nFIL file header:")
18 | pprint(b.header)
19 | print("Headers are loading OK")
20 |
21 | print("\nChecking header values match..."),
22 | for key in b.header.keys():
23 | assert b.header[key] == a.header[key]
24 |
25 | print("Checking datatype matches..."),
26 | assert a.data.dtype == b.data.dtype
27 |
28 | print("Checking data matches..."),
29 | assert np.allclose(a.data, b.data)
30 | assert a.data.dtype == b.data.dtype
31 |
32 |
33 | def test_waterfall_fil_to_h5_methods_and_attributes():
34 | """ Compare attributes and check methods """
35 | a = bl.Waterfall(voyager_h5)
36 | b = bl.Waterfall(voyager_fil)
37 |
38 | print("Comparing attributes of classes match where expected")
39 | assert a.beam_axis == b.beam_axis
40 | assert a.freq_axis == b.freq_axis
41 | assert a.time_axis == b.time_axis
42 |
43 | assert a.calc_n_coarse_chan() == b.calc_n_coarse_chan()
44 | assert a.file_shape == b.file_shape
45 |
46 | assert a.n_channels_in_file == b.n_channels_in_file
47 | assert a.n_ints_in_file == b.n_ints_in_file
48 | assert a.selection_shape == b.selection_shape
49 |
50 | print("Checking if basic methods run without raising Exceptions")
51 | # Check they can be run
52 | a.container.populate_freqs()
53 | a.container.populate_timestamps()
54 | a.info()
55 | a.blank_dc(1)
56 | a.calibrate_band_pass_N1()
57 |
58 | b.container.populate_freqs()
59 | b.container.populate_timestamps()
60 | b.info()
61 | b.blank_dc(1)
62 | b.calibrate_band_pass_N1()
63 |
64 | dir_a = dir(a)
65 | dir_b = dir(b)
66 |
67 | print("Attr/methods in HDF5 but not in FIL:")
68 | for item in dir_a:
69 | if item not in dir_b:
70 | raise ValueError("HDF5 item is not in FIL:" + str(item))
71 |
72 | print("Attr/methods in FIL but not in HDF5:")
73 | for item in dir_b:
74 | if item not in dir_a:
75 | raise ValueError("FIL item is not in HDF5:" + str(item))
76 |
77 |
78 | def test_plotting_doesnt_cause_exceptions():
79 | """ Try running the plotting routines. They should not raise expections even without X windows """
80 | a = bl.Waterfall(voyager_h5)
81 | b = bl.Waterfall(voyager_fil)
82 |
83 | a.plot_all()
84 | plt.clf()
85 | a.plot_kurtosis()
86 | plt.clf()
87 | a.plot_spectrum()
88 | plt.clf()
89 | a.plot_spectrum_min_max()
90 | plt.clf()
91 | a.plot_waterfall()
92 | plt.clf()
93 | a.plot_time_series()
94 | plt.clf()
95 |
96 | b.plot_all()
97 | plt.clf()
98 | b.plot_kurtosis()
99 | plt.clf()
100 | b.plot_spectrum()
101 | plt.clf()
102 | b.plot_spectrum_min_max()
103 | plt.clf()
104 | b.plot_waterfall()
105 | plt.clf()
106 | b.plot_time_series()
107 | plt.clf()
108 |
109 |
--------------------------------------------------------------------------------
/tests/test_dedoppler.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname
2 | import numpy as np
3 | from astropy import units as u
4 | import setigen as stg
5 | import matplotlib.pyplot as plt
6 | from blimpy.signal_processing.dedoppler import dedoppler_1
7 | from blimpy import Waterfall
8 | from blimpy.plotting import plot_waterfall
9 | from tests.data import voyager_fil
10 |
11 |
12 | PLOT_DIR = dirname(voyager_fil)
13 | FIL_FILE = PLOT_DIR + "/test_dedoppler.fil"
14 | PNG_FILE = PLOT_DIR + "/test_dedoppler.png"
15 |
16 |
17 | # Plotting constants
18 | fontsize = 16
19 | font_dict = {"family" : "DejaVu Sans", "size" : fontsize}
20 | N_PLOTS = 6
21 |
22 |
23 | def sort2(x, y):
24 | r""" Return lowest value, highest value"""
25 | if y < x:
26 | return y, x
27 | return x, y
28 |
29 |
30 | def plotter(counter, drift_rate):
31 | wf = Waterfall(FIL_FILE)
32 | dedoppler_1(wf, drift_rate)
33 | wf.header["source_name"] = "Dedoppler at D.R. {} Hz".format(drift_rate)
34 | plt.subplot(N_PLOTS, 1, counter)
35 | return plot_waterfall(wf)
36 |
37 |
38 | def test_dedoppler_1():
39 |
40 | # Generate the Filterbank file.
41 | print("test_dedoppler_1: Creating Filterbank file {}".format(FIL_FILE))
42 | frame = stg.Frame(fchans=1024*u.pixel,
43 | tchans=32*u.pixel,
44 | df=2.7939677238464355*u.Hz,
45 | dt=18.253611008*u.s,
46 | fch1=6095.214842353016*u.MHz,
47 | ascending=True)
48 | frame.add_noise(x_mean=10, noise_type='chi2')
49 | frame.add_signal(stg.constant_path(f_start=frame.get_frequency(index=200),
50 | drift_rate=2*u.Hz/u.s),
51 | stg.constant_t_profile(level=frame.get_intensity(snr=30)),
52 | stg.gaussian_f_profile(width=40*u.Hz),
53 | stg.constant_bp_profile(level=1))
54 | frame.save_fil(FIL_FILE)
55 |
56 | # Load Filterban file.
57 | print("test_dedoppler_1: Loading Filterbank file {}".format(FIL_FILE))
58 | wf = Waterfall(FIL_FILE)
59 | freqs = wf.get_freqs()
60 | the_lowest, the_highest = sort2(freqs[0], freqs[-1])
61 | the_midpoint = np.abs(the_lowest + the_highest) / 2.
62 |
63 | # Initialise plotting.
64 | print("test_dedoppler_1: Plotting to file {}".format(PNG_FILE))
65 | plt.subplots(N_PLOTS, sharex=True, sharey=True, figsize=(10, 2 * N_PLOTS))
66 | wf.header["source_name"] = "Initial Data"
67 |
68 | # Plot 1.
69 | plt.subplot(N_PLOTS, 1, 1)
70 | plot_waterfall(wf)
71 |
72 | # Plot #2.
73 | plotter(2, 1.0)
74 |
75 | # Plot #3.
76 | plotter(3, 1.5)
77 |
78 | # Plot #4.
79 | plotter(4, 2.2)
80 |
81 | # Plot #5.
82 | plotter(5, 2.7)
83 |
84 | # Plot #6.
85 | plotter(6, 3.6)
86 |
87 | # Finish up plots.
88 | plt.xticks(np.linspace(the_lowest, the_highest, num=4), ["","","",""])
89 | factor = 1e6
90 | units = "Hz"
91 | xloc = np.linspace(the_lowest, the_highest, 5)
92 | xticks = [round(loc_freq) for loc_freq in (xloc - the_midpoint) * factor]
93 | if np.max(xticks) > 1000:
94 | xticks = [xt / 1000 for xt in xticks]
95 | units = "kHz"
96 | plt.xticks(xloc, xticks)
97 | plt.xlabel("Relative Frequency [%s] from %f MHz" % (units, the_midpoint), fontdict=font_dict)
98 | plt.subplots_adjust(hspace=0, wspace=0)
99 |
100 | plt.savefig(PNG_FILE, dpi=200, bbox_inches="tight")
101 | print("test_dedoppler_1: End")
102 |
103 |
104 | if __name__ == "__main__":
105 | test_dedoppler_1()
106 |
--------------------------------------------------------------------------------
/tests/test_dice.py:
--------------------------------------------------------------------------------
1 | import os
2 | from blimpy import dice
3 | import pytest
4 |
5 | from tests.data import voyager_h5
6 | from tests.data import voyager_fil
7 | TEST_DATA_DIR = os.path.dirname(voyager_h5)
8 | HERE = os.getcwd()
9 |
10 | # This section makes sure that the tool exits correctly
11 | # when certain issues are encountered.
12 |
13 | def test_no_args():
14 | """
15 | Make sure the tool closes if no arguments are passed in
16 | """
17 | with pytest.raises(SystemExit):
18 | dice.cmd_tool()
19 |
20 | def test_no_input_file():
21 | with pytest.raises(SystemExit):
22 | args = ['-b', '8419.24', '-e', '8419.35', '-x', 'h5', '-o', 'test_dice.h5']
23 | dice.cmd_tool(args)
24 |
25 | def test_missing_format_type():
26 | with pytest.raises(SystemExit):
27 | args = ['-f', voyager_h5, '-b', '8419.24', '-e', '8419.35', '-x']
28 | dice.cmd_tool(args)
29 |
30 | # This section makes sure that the tool can handle various
31 | # file formats without exception.
32 |
33 | def test_h5():
34 | os.chdir(TEST_DATA_DIR)
35 | args = ['-f', voyager_h5, '-b', '8419.24', '-e', '8419.35', '-x', 'h5', '-o', 'test_dice.h5']
36 | dice.cmd_tool(args)
37 | os.chdir(HERE)
38 |
39 | def test_h5_no_out_file():
40 | os.chdir(TEST_DATA_DIR)
41 | args = ['-f', voyager_h5, '-b', '8419.24', '-e', '8419.35', '-x', 'h5']
42 | dice.cmd_tool(args)
43 | os.chdir(HERE)
44 |
45 | def test_fil():
46 | os.chdir(TEST_DATA_DIR)
47 | args = ['-f', voyager_fil, '-b', '8419.24', '-e', '8419.35', '-x', 'fil', '-o', 'test_dice.fil']
48 | dice.cmd_tool(args)
49 | os.chdir(HERE)
50 |
51 | def test_fil_no_out_file():
52 | os.chdir(TEST_DATA_DIR)
53 | args = ['-f', voyager_fil, '-b', '8419.24', '-e', '8419.35', '-x', 'fil']
54 | dice.cmd_tool(args)
55 | os.chdir(HERE)
56 |
57 |
--------------------------------------------------------------------------------
/tests/test_dsamp.py:
--------------------------------------------------------------------------------
1 | """
2 | # test_dsamp
3 | """
4 |
5 | import pytest
6 | import blimpy as bl
7 | from tests.data import voyager_fil, voyager_h5, test_h5, test_fil
8 |
9 |
10 | GROUP_SIZE = 3
11 |
12 |
13 | def test_dsamp_fil_to_h5():
14 | """ fil to h5 test.
15 | """
16 | bl.dsamp.make_output_file(voyager_fil, test_h5, GROUP_SIZE, True)
17 |
18 |
19 | def test_dsamp_h5_to_h5():
20 | """ h5 to h5 test.
21 | """
22 | bl.dsamp.make_output_file(voyager_h5, test_h5, GROUP_SIZE, True)
23 |
24 |
25 | def test_dsamp_h5_to_fil():
26 | """ h5 to fil test.
27 | """
28 | bl.dsamp.make_output_file(voyager_h5, test_fil, GROUP_SIZE, False)
29 |
30 |
31 | def test_dsamp_fil_to_fil():
32 | """ fil to fil test.
33 | """
34 | bl.dsamp.make_output_file(voyager_fil, test_fil, GROUP_SIZE, False)
35 |
36 |
37 | def test_cmd_tool():
38 | """
39 | Exercise cmd_tool.
40 | """
41 | args = [voyager_fil, test_h5, "-s", str(GROUP_SIZE)]
42 | bl.dsamp.cmd_tool(args=args)
43 |
44 |
45 | def test_no_args():
46 | """
47 | The cmd tool needs to exit, mandating a file name.
48 | """
49 | with pytest.raises(SystemExit):
50 | bl.dsamp.cmd_tool("")
51 | with pytest.raises(SystemExit):
52 | bl.dsamp.cmd_tool("-h")
53 |
--------------------------------------------------------------------------------
/tests/test_ephemeris.py:
--------------------------------------------------------------------------------
1 | import blimpy as bl
2 | import numpy as np
3 | from pprint import pprint
4 | import pylab as plt
5 |
6 | from tests.data import voyager_fil, voyager_h5
7 |
8 | from blimpy.ephemeris import compute_lst, compute_lsrk
9 |
10 | def test_compute_lst():
11 | """ Load Voyager dataset and test plotting """
12 | print("Loading HDF5 data with Waterfall()..."),
13 | a = bl.Waterfall(voyager_h5)
14 | print(compute_lst(a))
15 |
16 |
17 | def test_compute_lsrk():
18 | a = bl.Waterfall(voyager_h5)
19 | print(compute_lsrk(a))
20 |
21 |
22 | if __name__ == "__main__":
23 | test_compute_lst()
24 | test_compute_lsrk()
--------------------------------------------------------------------------------
/tests/test_fil2h5.py:
--------------------------------------------------------------------------------
1 | """
2 | # test_fil2h5
3 | """
4 |
5 | import pytest
6 |
7 | import os
8 | import blimpy as bl
9 | from tests.data import voyager_fil
10 |
11 |
12 | VOYA_DIR = os.path.dirname(voyager_fil) + "/"
13 |
14 | def name_case(in_string, out_string):
15 | infile = in_string
16 | outfile = out_string
17 | os.system("cp " + voyager_fil + " " + infile)
18 | bl.fil2h5.make_h5_file(infile)
19 | if not os.path.exists(outfile):
20 | print("\n*** name_case: file {} does not exist. Input file {}\n".format(outfile, infile))
21 | assert False
22 | os.remove(infile)
23 | os.remove(outfile)
24 |
25 | def test_fil2h5_conversion():
26 | """ Tests the conversion of fil files into h5 in both light and heavy modes.
27 | """
28 |
29 | # Creating test file.
30 | bl.fil2h5.make_h5_file(voyager_fil, new_filename='test.h5')
31 |
32 | # Testing filename
33 | bl.fil2h5.make_h5_file(voyager_fil, new_filename='test')
34 |
35 | # Deleting test file
36 | os.remove('test.h5')
37 |
38 | def test_cmd_tool():
39 | """
40 | This is the same test file, but now through the cmd tool.
41 | """
42 | #with pytest.raises(SystemExit):
43 | args = [voyager_fil, '-n', VOYA_DIR + 'cmd.h5']
44 | bl.fil2h5.cmd_tool(args=args)
45 |
46 | def test_fil2h5_input_names():
47 | """ Make sure that the output name does not get mangled.
48 | """
49 | name_case("abcd.filter.def.fil", "abcd.filter.def.h5")
50 | name_case("abcd.efgh", "abcd.efgh.h5")
51 | name_case("abcd", "abcd.h5")
52 |
53 | def test_no_args():
54 | """
55 | The cmd tool needs to exit, mandating a file name.
56 | """
57 | with pytest.raises(SystemExit):
58 | bl.fil2h5.cmd_tool("")
59 |
60 |
61 | if __name__ == "__main__":
62 | test_fil2h5_conversion()
63 | test_cmd_tool()
64 | test_fil2h5_input_names()
65 | test_no_args()
66 |
--------------------------------------------------------------------------------
/tests/test_file_wrapper.py:
--------------------------------------------------------------------------------
1 | # the lack of some implementations causes a maximum of
2 | # 18 guaranteed misses in codecov
3 |
4 | import blimpy as bl
5 | import pytest
6 | from tests.data import voyager_fil, voyager_h5, here
7 |
8 | def test_read_fns():
9 | """ These read functions are currently not implemented. """
10 | """ L: if that is true, why do the first two lines
11 | NOT raise NotImplementedError's?"""
12 | a = bl.Waterfall(voyager_fil)
13 | b = bl.Waterfall(voyager_h5)
14 | with pytest.raises(NotImplementedError):
15 | a.container.read_all()
16 | a.container.read_row(0)
17 | a.container.read_rows(0, 2)
18 |
19 | b.container.read_all()
20 | b.container.read_row(0)
21 | b.container.read_rows(0, 2)
22 |
23 | def test_file_wrapper_open_h5_file():
24 | from blimpy.io.file_wrapper import open_file
25 | h5r = open_file(voyager_h5)
26 |
27 | # check all if branches
28 | h5r.populate_timestamps(update_header=False)
29 | h5r.populate_timestamps(update_header=True)
30 |
31 | with pytest.raises(ValueError):
32 | h5r.read_blob((300, 300, 300, 300), -1)
33 |
34 | # testing to make sure all internal functions are already used
35 | h5r._setup_selection_range(
36 | f_start=3, f_stop=4, t_stop=3, t_start=4
37 | )
38 | h5r._setup_selection_range()
39 |
40 | h5r._init_empty_selection()
41 |
42 | # perform reading again and we make sure that
43 | # the user-facing functions survive
44 |
45 | h5r.populate_freqs()
46 |
47 | # just arbitrary values
48 | h5r.calc_n_blobs(300)
49 | h5r.read_blob((300, 300, 300, 300))
50 |
51 | h5r.calc_n_coarse_chan()
52 | h5r.calc_n_coarse_chan(1)
53 | # we have at least 2 if branches that we are not exploring
54 |
55 | def test_file_wrapper_open_fil_file():
56 | from blimpy.io.file_wrapper import open_file
57 | filr = open_file(voyager_fil)
58 |
59 | filr.calc_n_blobs(300)
60 | filr.read_blob((300, 300, 300, 300))
61 |
62 | def test_invalid_files():
63 | from blimpy.io.file_wrapper import open_file
64 |
65 | # script should error-out if file does not exist
66 | with pytest.raises(IOError):
67 | open_file(here + 'file_does_not_exist.h5')
68 |
69 | # script should error-out if file is not a
70 | # valid blimpy data file
71 | with pytest.raises(NotImplementedError):
72 | open_file(here + '/run_tests.sh')
73 |
74 | if __name__ == "__main__":
75 | test_read_fns()
76 | test_file_wrapper_open_file()
77 |
--------------------------------------------------------------------------------
/tests/test_filterbank_voyager.py:
--------------------------------------------------------------------------------
1 | import blimpy as bl
2 | import numpy as np
3 | from pprint import pprint
4 | import pytest
5 | from tests.data import voyager_fil, voyager_h5
6 | from blimpy.plotting.config import plt
7 |
8 |
9 | def test_comparison_filterbank_fil_to_h5():
10 | """ Load Voyager dataset and test that both fil and hdf5 readers return same headers and data """
11 |
12 | print("Loading FIL and HDF5 data with Waterfall()..."),
13 | a = bl.Waterfall(voyager_h5)
14 | b = bl.Waterfall(voyager_fil)
15 |
16 | print("Reading headers..")
17 | print("\nHDF5 file header:")
18 | pprint(a.header)
19 | print("\nFIL file header:")
20 | pprint(b.header)
21 | print("Headers are loading OK")
22 |
23 | print("\nChecking header values match..."),
24 | for key in b.header.keys():
25 | assert b.header[key] == a.header[key]
26 |
27 | print("Checking datatype matches..."),
28 | assert a.data.dtype == b.data.dtype
29 |
30 | print("Checking data matches..."),
31 | assert np.allclose(a.data, b.data)
32 | assert a.data.dtype == b.data.dtype
33 |
34 |
35 | def test_plotting_doesnt_cause_exceptions():
36 | """ Try running the plotting routines. They should not raise expections even without X windows """
37 | a = bl.Waterfall(voyager_h5)
38 | b = bl.Waterfall(voyager_fil)
39 |
40 | a.plot_all()
41 | a.plot_kurtosis()
42 | a.plot_spectrum()
43 | a.plot_spectrum_min_max()
44 | a.plot_waterfall()
45 | a.plot_time_series()
46 | plt.clf() # Fix issue #140
47 |
48 | b.plot_all()
49 | b.plot_kurtosis()
50 | b.plot_spectrum()
51 | b.plot_spectrum_min_max()
52 | b.plot_waterfall()
53 | b.plot_time_series()
54 | plt.clf()
55 |
56 |
57 | def test_cmdtool():
58 | with pytest.raises(SystemExit):
59 | bl.waterfall.cmd_tool(args=[])
60 |
61 |
--------------------------------------------------------------------------------
/tests/test_guppi.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | import blimpy as bl
4 | from tests.data import voyager_raw, voyager_block1
5 |
6 | def test_guppi():
7 |
8 | gr = bl.guppi.GuppiRaw(voyager_raw)
9 | header, _ = gr.read_header()
10 | nchans = header['OBSNCHAN']
11 | assert (nchans == 64), 'test_guppi: OBSNCHAN should be 64 but observed to be {}'.format(nchans)
12 |
13 | _, data_block_x1, data_block_y1 = gr.read_next_data_block_int8()
14 | _, data_block_x2, data_block_y2 = gr.read_next_data_block_int8()
15 |
16 | assert not np.array_equal(data_block_x1, data_block_x2) \
17 | and not np.array_equal(data_block_y1, data_block_y2) \
18 | , "test_guppi: Data read from two blocks should not be equal"
19 |
20 | gr = bl.guppi.GuppiRaw(voyager_raw)
21 | _, data_block_1 = gr.read_next_data_block()
22 |
23 | data_block_reference_1 = np.load(voyager_block1)
24 |
25 | assert np.array_equal(data_block_1[:, :1000, :], data_block_reference_1) \
26 | , "test_guppi: Data read should be consistent with previous versions"
27 |
28 | data_block_casted_1 = np.append(data_block_x1,
29 | data_block_y1, axis=2).astype('float32').view('complex64')
30 |
31 | assert np.array_equal(data_block_1, data_block_casted_1) \
32 | , "test_guppi: Reading as int8 then casting should be equal to reading directly as complex64"
33 |
34 | # We have to keep instantiating objects because
35 | # the plotting routines read data in a manner
36 | # destructive to the object.
37 |
38 | def test_spectrum():
39 | gr = bl.guppi.GuppiRaw(voyager_raw)
40 | gr.plot_spectrum(flag_show=False)
41 |
42 | def test_histogram():
43 | gr = bl.guppi.GuppiRaw(voyager_raw)
44 | gr.plot_histogram(flag_show=False)
45 |
46 | def test_statistics():
47 | gr = bl.guppi.GuppiRaw(voyager_raw)
48 | gr.print_stats()
49 |
50 | def test_fil_header():
51 | gr = bl.guppi.GuppiRaw(voyager_raw)
52 | header = gr.generate_filterbank_header()
53 | print("Generated header:\n", header)
54 |
55 | def test_rawhdr():
56 | from blimpy.rawhdr import cmd_tool
57 | args = [voyager_raw]
58 | cmd_tool(args)
59 |
60 | if __name__ == "__main__":
61 | test_guppi()
62 | test_spectrum()
63 | test_histogram()
64 | test_statistics()
65 | test_fil_header()
66 | test_rawhdr()
67 |
68 |
--------------------------------------------------------------------------------
/tests/test_h52fil.py:
--------------------------------------------------------------------------------
1 | """
2 | # test_h52fil
3 | """
4 |
5 | import pytest
6 |
7 | import os
8 | import blimpy as bl
9 | from tests.data import voyager_h5
10 |
11 |
12 | VOYA_DIR = os.path.dirname(voyager_h5) + "/"
13 |
14 |
15 | def name_case(in_string, out_string):
16 | infile = in_string
17 | outfile = out_string
18 | os.system("cp " + voyager_h5 + " " + infile)
19 | bl.h52fil.make_fil_file(infile)
20 | if not os.path.exists(outfile):
21 | print("\n*** name_case: file {} does not exist. Input file {}\n".format(outfile, infile))
22 | assert False
23 | os.remove(infile)
24 | os.remove(outfile)
25 |
26 | def test_h52fil_conversion():
27 | """ Tests the conversion of fil files into h5 in both light and heavy modes.
28 | """
29 |
30 | # Creating test file.
31 | bl.h52fil.make_fil_file(voyager_h5, new_filename='test.fil')
32 |
33 | # Creating a "large" test file.
34 | bl.h52fil.make_fil_file(voyager_h5, new_filename='test_large.fil', max_load=0.001)
35 |
36 | # Testing filename
37 | bl.h52fil.make_fil_file(voyager_h5, new_filename='test')
38 |
39 | # Deleting test file
40 | os.remove('test.fil')
41 | os.remove('test_large.fil')
42 |
43 | def test_help():
44 | """
45 | The user of these tests should verify that the help statement
46 | was printed as expected; this test merely verifies that the
47 | system exited.
48 | """
49 | with pytest.raises(SystemExit):
50 | bl.h52fil.cmd_tool(['-h'])
51 |
52 | def test_cmd_tool():
53 | """
54 | This is the same Voyager test file, but now through the cmd tool.
55 | """
56 | bl.h52fil.cmd_tool([voyager_h5, '-n', VOYA_DIR + 'cmd.fil', '-l', '.001'])
57 |
58 | def test_no_args():
59 | """
60 | The cmd tool needs to exit, mandating a file name.
61 | """
62 | with pytest.raises(SystemExit):
63 | bl.h52fil.cmd_tool(['-n', VOYA_DIR + 'cmd.fil', '-l', '.001'])
64 |
65 | def test_fil2h5_input_names():
66 | """ Make sure that the output name does not get mangled.
67 | """
68 | name_case("abcd.filter.def.h5", "abcd.filter.def.fil")
69 | name_case("abcd.efgh", "abcd.efgh.fil")
70 | name_case("abcd", "abcd.fil")
71 |
72 | if __name__ == "__main__":
73 | test_h52fil_conversion()
74 | test_help()
75 | test_cmd_tool()
76 | test_no_args()
77 | test_fil2h5_input_names()
78 |
79 |
--------------------------------------------------------------------------------
/tests/test_h5diag.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname
2 | import numpy as np
3 | import hdf5plugin
4 | import h5py
5 | from blimpy.h5diag import cmd_tool
6 | from tests.data import voyager_h5, voyager_fil
7 | import pytest
8 |
9 |
10 | header = [
11 | ["fruit", "apple"],
12 | ["color", "red"],
13 | ["plant", "tree"]
14 | ]
15 | DIR = dirname(voyager_fil)
16 | TEST_H5 = DIR + "/test.h5"
17 | TIME_INSTANCES = 8
18 | FREQ_INSTANCES = 16
19 | DATA_BYTESIZE = TIME_INSTANCES * FREQ_INSTANCES * 4
20 |
21 |
22 | def my_writer(my_class):
23 | data_out = np.ndarray(shape=(TIME_INSTANCES, 1, FREQ_INSTANCES), dtype=float)
24 | for ii in range(TIME_INSTANCES):
25 | for jj in range(FREQ_INSTANCES):
26 | data_out[ii, 0, jj] = 42.0
27 | print("data_out shape:", data_out.shape)
28 |
29 | with h5py.File(TEST_H5, "w") as h5:
30 | h5.attrs["CLASS"] = my_class
31 | h5.attrs["VERSION"] = "1.0"
32 |
33 | bs_compression = hdf5plugin.Bitshuffle(nelems=0, lz4=True)["compression"]
34 | bs_compression_opts = hdf5plugin.Bitshuffle(nelems=0, lz4=True)["compression_opts"]
35 |
36 | dset = h5.create_dataset("data",
37 | data=data_out,
38 | compression=bs_compression,
39 | compression_opts=bs_compression_opts)
40 |
41 | dset_mask = h5.create_dataset("mask",
42 | shape=data_out.shape,
43 | compression=bs_compression,
44 | compression_opts=bs_compression_opts,
45 | dtype="uint8")
46 |
47 | dset.dims[2].label = b"frequency"
48 | dset.dims[1].label = b"feed_id"
49 | dset.dims[0].label = b"time"
50 |
51 | dset_mask.dims[2].label = b"frequency"
52 | dset_mask.dims[1].label = b"feed_id"
53 | dset_mask.dims[0].label = b"time"
54 |
55 | # Copy over header information as attributes
56 | for key, value in header:
57 | dset.attrs[key] = value
58 |
59 |
60 | def execute_command(args):
61 | print("\ntest_h5diag: args:", args)
62 | cmd_tool(args)
63 |
64 |
65 | def test_h5diag():
66 |
67 | args = [voyager_h5]
68 | execute_command(args)
69 |
70 | with pytest.raises(SystemExit):
71 | args = [voyager_fil]
72 | execute_command(args)
73 |
74 | my_writer("FRUITY")
75 | with pytest.raises(SystemExit):
76 | args = [TEST_H5]
77 | execute_command(args)
78 |
79 | with h5py.File(TEST_H5, "w") as h5:
80 | h5.attrs["VERSION"] = "42.0"
81 | with pytest.raises(SystemExit):
82 | args = [TEST_H5]
83 | execute_command(args)
84 |
85 | with h5py.File(TEST_H5, "w") as h5:
86 | h5.attrs["CLASS"] = "FILTERBANK"
87 | with pytest.raises(SystemExit):
88 | args = [TEST_H5]
89 | execute_command(args)
90 |
91 | with h5py.File(TEST_H5, "w") as h5:
92 | h5.attrs["CLASS"] = "FILTERBANK"
93 | h5.attrs["VERSION"] = "42.0"
94 | with pytest.raises(SystemExit):
95 | args = [TEST_H5]
96 | execute_command(args)
97 |
98 | my_writer("FILTERBANK")
99 | args = [TEST_H5]
100 | execute_command(args)
101 |
102 |
103 | if __name__ == "__main__":
104 | test_h5diag()
105 |
--------------------------------------------------------------------------------
/tests/test_h5py.py:
--------------------------------------------------------------------------------
1 | import h5py
2 | import hdf5plugin
3 | import numpy
4 | import tempfile
5 |
6 |
7 | def test_is_h5py_correctly_installed():
8 | """
9 | If this test fails you probably need to install h5py from source manually:
10 |
11 | $ pip install --no-binary=h5py h5py
12 | """
13 | f = h5py.File(tempfile.gettempdir() + '/h5testfile', "w")
14 | block_size = 0
15 | dataset = f.create_dataset(
16 | "data",
17 | (100, 100, 100),
18 | dtype='float32',
19 | **hdf5plugin.Bitshuffle(nelems=0, lz4=True)
20 | )
21 |
22 | array = numpy.random.rand(100, 100, 100)
23 | array = array.astype('float32')
24 | dataset[:] = array
25 | f.close()
26 |
27 |
--------------------------------------------------------------------------------
/tests/test_heavy.py:
--------------------------------------------------------------------------------
1 | """
2 | # test_heavy.py
3 |
4 | """
5 |
6 | import blimpy as bl
7 | from tests.data import voyager_fil, voyager_h5
8 |
9 |
10 | def test_max_data_array_size():
11 | fw = bl.Waterfall(voyager_fil, max_load=0.001)
12 | fw = bl.Waterfall(voyager_h5, max_load=0.001)
13 |
14 | if __name__ == "__main__":
15 | test_max_data_array_size()
16 |
--------------------------------------------------------------------------------
/tests/test_observatory.py:
--------------------------------------------------------------------------------
1 | r"""
2 | test_observatory.py
3 | """
4 |
5 | from blimpy.ephemeris import Observatory
6 |
7 |
8 | def error_msg(arg_string):
9 | r""" Just making clearer error messages """
10 | return "test_observatory.py: " + arg_string
11 |
12 |
13 | def test_observatory_construction():
14 | r""" Constructor test """
15 | print("\n===== Begin test_observatory_construction")
16 | obs = Observatory()
17 | assert obs.get_telescope_name() == "Fake", error_msg("Wrong name for the fake observatory")
18 | obs = Observatory(telescope_id=4)
19 | assert obs.get_telescope_name() == "PARKES", error_msg("Wrong name for the Parkes observatory")
20 | assert obs.get_telescope_name_short() == "PK", \
21 | error_msg("Wrong short name for the Parkes observatory")
22 | obs = Observatory(telescope_name="GBT")
23 | assert obs.get_telescope_id() == 6, error_msg("Wrong Sigproc ID for the GBT observatory")
24 | print("===== End test_observatory_construction")
25 |
26 |
27 | def test_observatory_values():
28 | r""" Observatory values test along with beam halfwidth calculation test"""
29 | print("\n===== Begin test_observatory_values")
30 | obs = Observatory(telescope_id=0)
31 | print(obs.get_string())
32 | assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect telescope name")
33 | assert obs.get_xyz_coords() == [0.0, 0.0, 0.0], error_msg("Incorrect XYZ coords")
34 |
35 | gbt = Observatory(telescope_id=6)
36 | beam_halfwidth = gbt.calc_beam_halfwidth(100)
37 | assert (beam_halfwidth - 3710.19799582) < .0000001, \
38 | error_msg("Incorrect beam haflwidth calculation")
39 | print("===== End test_observatory_values")
40 |
41 |
42 | def test_observatory_procs():
43 | r""" Try the member functions with Parkes """
44 | print("\n===== Begin test_observatory_procs")
45 | obs = Observatory(telescope_id=4)
46 | print(obs.get_string())
47 | print(obs.get_telescope_name())
48 | print(obs.get_telescope_name_short())
49 | print(obs.get_telescope_id())
50 | print(obs.get_xyz_coords())
51 | print(obs.get_dish_diameter())
52 | print(obs.get_string())
53 | print("===== End test_observatory_procs")
54 |
55 |
56 | if __name__ == "__main__":
57 | test_observatory_construction()
58 | test_observatory_values()
59 | test_observatory_procs()
60 |
--------------------------------------------------------------------------------
/tests/test_plotting.py:
--------------------------------------------------------------------------------
1 | import os
2 | import blimpy as bl
3 | import numpy as np
4 | from pprint import pprint
5 | import pylab as plt
6 |
7 | from tests.data import voyager_fil, voyager_h5
8 | from blimpy.plotting import plot_waterfall, plot_spectrum, plot_spectrum_min_max, \
9 | plot_kurtosis, plot_time_series, plot_all
10 |
11 | TEST_DATA_DIR = os.path.dirname(voyager_h5)
12 |
13 | def test_plot_waterfall():
14 | """ Load Voyager dataset and test plotting """
15 |
16 | a = bl.Waterfall(voyager_h5)
17 |
18 | plt.figure("TEST PLOTTING", figsize=(10, 8))
19 | plt.subplot(3, 2, 1)
20 | plot_waterfall(a)
21 |
22 | plt.subplot(3, 2, 2)
23 | plot_spectrum(a)
24 |
25 | plt.subplot(3, 2, 3)
26 | plot_spectrum_min_max(a)
27 |
28 | plt.subplot(3, 2, 4)
29 | plot_kurtosis(a)
30 |
31 | plt.subplot(3, 2, 5)
32 | plot_time_series(a)
33 |
34 | plt.tight_layout()
35 | plt.savefig(TEST_DATA_DIR + "/test_plotting.png")
36 |
37 | plt.figure("TEST PLOT_ALL", figsize=(10, 8))
38 | plot_all(a)
39 | plt.savefig(TEST_DATA_DIR + "/test_plotting_plot_all.png")
40 |
41 |
42 | def test_plot_waterfall_classmethod():
43 | """ Load Voyager dataset and test plotting """
44 |
45 | a = bl.Waterfall(voyager_h5)
46 |
47 | plt.figure("TEST PLOTTING CLASS", figsize=(10, 8))
48 | plt.subplot(3, 2, 1)
49 | a.plot_waterfall()
50 |
51 | plt.subplot(3, 2, 2)
52 | a.plot_spectrum()
53 |
54 | plt.subplot(3, 2, 3)
55 | a.plot_spectrum_min_max()
56 |
57 | plt.subplot(3, 2, 4)
58 | a.plot_kurtosis()
59 |
60 | plt.subplot(3, 2, 5)
61 | a.plot_time_series()
62 | plt.tight_layout()
63 |
64 | plt.savefig(TEST_DATA_DIR + "/test_plotting_classmethod.png")
65 |
66 | plt.figure("TEST PLOT_ALL CLASS", figsize=(10, 8))
67 | a.plot_all()
68 | plt.savefig(TEST_DATA_DIR + "/test_plotting_plot_all_classmethod.png")
69 |
70 |
71 | if __name__ == "__main__":
72 | test_plot_waterfall()
73 | test_plot_waterfall_classmethod()
74 |
75 |
--------------------------------------------------------------------------------
/tests/test_setup.py:
--------------------------------------------------------------------------------
1 | r""" Testspectra_gen functions"""
2 |
3 |
4 | def test_setup():
5 | import os
6 | cmd = "python3 setup.py check"
7 | os.system(cmd)
--------------------------------------------------------------------------------
/tests/test_sigproc.py:
--------------------------------------------------------------------------------
1 | from blimpy.io import sigproc
2 | import blimpy as bl
3 | from tests.data import voyager_fil, voyager_h5
4 | import numpy as np
5 | import os
6 |
7 |
8 | def test_sigproc_is_fil():
9 | """ Check that the is_fil function works """
10 |
11 | assert sigproc.is_filterbank(voyager_h5) is False
12 | assert sigproc.is_filterbank(voyager_fil) is True
13 |
14 |
15 | def test_sigproc_generate_headers():
16 | """ Test if you can generate headers OK from files """
17 | a = bl.Waterfall(voyager_h5)
18 | b = bl.Waterfall(voyager_fil)
19 | sigproc.generate_sigproc_header(a)
20 | sigproc.generate_sigproc_header(b)
21 |
22 | def test_fil_write():
23 | try:
24 | a = bl.Waterfall(voyager_h5)
25 | b = bl.Waterfall(voyager_fil)
26 |
27 | a.write_to_fil('test.fil')
28 | b.write_to_fil('test2.fil')
29 |
30 | c = bl.Waterfall('test.fil')
31 | d = bl.Waterfall('test2.fil')
32 |
33 | for key in a.header.keys():
34 | if key != 'DIMENSION_LABELS':
35 | assert a.header[key] == c.header[key]
36 | assert key in c.header.keys()
37 | assert a.header[key] == d.header[key]
38 | assert key in d.header.keys()
39 |
40 | assert np.allclose(a.data, c.data)
41 | assert np.allclose(a.data, d.data)
42 | except AssertionError:
43 | print(key, a.header[key], b.header[key], c.header[key], d.header[key])
44 | raise
45 |
46 | finally:
47 | os.remove('test.fil')
48 | os.remove('test2.fil')
49 |
50 | if __name__ == "__main__":
51 | test_sigproc_is_fil()
52 | test_sigproc_generate_headers()
53 | test_fil_write()
54 |
--------------------------------------------------------------------------------
/tests/test_stax.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname
2 | import pytest
3 | from blimpy.stax import cmd_tool
4 | from tests.data import voyager_fil, voyager_h5
5 |
6 |
7 | def test_stax():
8 | dir = dirname(voyager_h5)
9 | args = [voyager_fil, voyager_h5, "--plot_dir", dir]
10 | cmd_tool(args)
11 | args = [voyager_fil, voyager_h5, "--plot_dir", dir, "--f_start", "8419", "--f_stop", "8420"]
12 | cmd_tool(args)
13 |
14 |
15 | if __name__ == "__main__":
16 | test_stax()
17 |
--------------------------------------------------------------------------------
/tests/test_stix.py:
--------------------------------------------------------------------------------
1 | from os.path import dirname
2 | import pytest
3 | from blimpy.stix import cmd_tool
4 | from tests.data import voyager_fil
5 |
6 |
7 | PLOT_DIR = dirname(voyager_fil)
8 |
9 |
10 | def execute_command(args):
11 | print("\ntest_stix: args:", args)
12 | cmd_tool(args)
13 |
14 |
15 | def test_stix():
16 |
17 | args = [voyager_fil, "16", "--plot_dir", PLOT_DIR]
18 | execute_command(args)
19 |
20 | args = [voyager_fil, "4", "--plot_dir", PLOT_DIR, "-s", "v"]
21 | execute_command(args)
22 |
23 | args = [voyager_fil, "4", "-p", PLOT_DIR, "--stitch", "h"]
24 | execute_command(args)
25 |
26 | args = [voyager_fil, "4", "--plot_dir", PLOT_DIR, "--stitch", "n",
27 | "--dpi", "100", "--width", "8", "--height", "6"]
28 | execute_command(args)
29 |
30 | args = [voyager_fil, "4", "-p", PLOT_DIR, "-s", "n", "-d", "100", "-w", "8", "-t", "6"]
31 | execute_command(args)
32 |
33 | with pytest.raises(SystemExit):
34 | args = []
35 | execute_command(args)
36 |
37 | with pytest.raises(SystemExit):
38 | args = [voyager_fil, "0", "-p", PLOT_DIR, "-s", "n", "-d", "50", "-w", "8", "-t", "6"]
39 | execute_command(args)
40 |
41 | with pytest.raises(SystemExit):
42 | args = [voyager_fil, "4", "-p", PLOT_DIR, "-s", "n", "-d", "42", "-w", "8", "-t", "6"]
43 | execute_command(args)
44 |
45 | with pytest.raises(SystemExit):
46 | args = [voyager_fil, "4", "-p", PLOT_DIR, "-s", "n", "-d", "50", "-w", "5.9", "-t", "6"]
47 | execute_command(args)
48 |
49 | with pytest.raises(SystemExit):
50 | args = [voyager_fil, "4", "-p", PLOT_DIR, "-s", "n", "-d", "50", "-w", "8", "-t", "4.9"]
51 | execute_command(args)
52 |
53 |
54 | if __name__ == "__main__":
55 | test_stix()
56 |
--------------------------------------------------------------------------------
/tests/test_unpack.py:
--------------------------------------------------------------------------------
1 | from blimpy.utils import unpack_1to8, unpack_2to8, unpack_4to8, unpack
2 | import numpy as np
3 | import pytest
4 |
5 | def test_1to8():
6 | a = np.array([0b01010101, 0b10101010], dtype=np.uint8)
7 | b = np.array([0,1,0,1,0,1,0,1,1,0,1,0,1,0,1,0])
8 | c = unpack_1to8(a)
9 | assert np.allclose(b, c)
10 | print(b)
11 | print(c)
12 |
13 | def test_2to8():
14 | # Create an array that should come out as [0, 1, 2, 3, 3, 2, 1, 0]
15 | # In binary, this is [00, 01, 10, 11, 11, 10, 01, 00]
16 | # Convert to 8-bit, this is [0b00011011, 0b11100100]
17 |
18 | a = np.array([0b00011011, 0b11100100], dtype=np.uint8)
19 | b = np.array([40, 12, -12, -40, -40, -12, 12, 40], dtype=np.int8)
20 |
21 | c = unpack_2to8(a)
22 |
23 | assert np.allclose(b, c)
24 |
25 | def test_4to8():
26 | # Create an array that should come out as [0, 1, 2, 3, 3, 2, 1, 0]
27 | # In binary, this is [00, 01, 10, 11, 11, 10, 01, 00]
28 | # Convert to 8-bit, this is [0b00011011, 0b11100100]
29 |
30 | # Test 4-bit unpack
31 | a = np.array([0b00000001, 0b00100011], dtype=np.uint8)
32 | b = np.array([0, 1, 2, 3], dtype=np.uint8)
33 | c = unpack_4to8(a)
34 | assert np.allclose(b, c)
35 |
36 | def test_unpack():
37 |
38 | # Test 2-bit unpack
39 | a = np.array([0b00011011, 0b11100100], dtype=np.uint8)
40 | b = np.array([40, 12, -12, -40, -40, -12, 12, 40], dtype=np.int8)
41 | c = unpack(a, 2)
42 | assert np.allclose(b, c)
43 |
44 | # Catch exceptions
45 | with pytest.raises(ValueError):
46 | unpack(a, 16) # nbit <= 8 is reqd
47 | with pytest.raises(ValueError):
48 | unpack(a, 3) # nbit must divide 8 (1,2,4 or 8)
49 | z = np.array([1,2,3], dtype='float32')
50 | with pytest.raises(TypeError):
51 | unpack(z, 2) # input data must be 8-bit
52 |
53 | # Test 4-bit unpack
54 | a = np.array([0b00000001, 0b00100011], dtype=np.uint8)
55 | b = np.array([0, 1, 2, 3], dtype=np.uint8)
56 | c = unpack(a, 4)
57 | print(b)
58 | print(c)
59 | assert np.allclose(b, c)
60 |
61 | # Test 1-bit unpack
62 | a = np.array([0b01010101, 0b10101010], dtype=np.uint8)
63 | b = np.array([0,1,0,1,0,1,0,1,1,0,1,0,1,0,1,0])
64 | c = unpack(a, 1)
65 | print(b)
66 | print(c)
67 | assert np.allclose(b, c)
68 |
69 | # Test 8-bit!
70 | c = unpack(a, 8)
71 | assert np.allclose(a, c)
72 |
73 | if __name__ == "__main__":
74 | test_1to8()
75 | test_2to8()
76 | test_4to8()
77 | test_unpack()
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from blimpy import utils
2 | import numpy as np
3 | import pytest
4 |
5 |
6 | def test_utils():
7 | assert utils.db(100) == 20.0
8 | assert utils.lin(20) == 100.0
9 | assert utils.closest(np.array([0,1,2,3,4,5]), 2.2) == 2
10 |
11 | def test_rebin():
12 | # 1D
13 | a = np.array([1, 1, 1, 1])
14 | aR = utils.rebin(a, 2)
15 | assert np.allclose(aR, np.array([1, 1]))
16 |
17 | # 2D
18 | b = np.array([[1,1,1,1], [2,2,2,2]])
19 | bR = utils.rebin(b, 1, 2)
20 | assert np.allclose(bR, [[1,1], [2,2]])
21 | bR = utils.rebin(b, None, 2)
22 | assert np.allclose(bR, [[1,1], [2,2]])
23 | bR = utils.rebin(b, 2, 1)
24 | assert np.allclose(bR, [1.5, 1.5, 1.5, 1.5])
25 | bR = utils.rebin(b, 2, None)
26 | assert np.allclose(bR, [1.5, 1.5, 1.5, 1.5])
27 |
28 | c = np.zeros([10, 10, 10])
29 | cR = utils.rebin(c, n_z=2)
30 | assert cR.shape == (10, 10, 5)
31 | cR = utils.rebin(c, n_y=2)
32 | assert cR.shape == (10, 5, 10)
33 | cR = utils.rebin(c, n_x=2)
34 | assert cR.shape == (5, 10, 10)
35 |
36 | c = np.zeros([10, 10, 10, 10])
37 | with pytest.raises(RuntimeError):
38 | utils.rebin(c, 2, 2)
39 |
40 |
41 | if __name__ == "__main__":
42 | test_utils()
43 | test_rebin()
--------------------------------------------------------------------------------
/tests/test_voyager_data_load.py:
--------------------------------------------------------------------------------
1 | """
2 | # test_voyager_data_load.py
3 |
4 | The hard-coded numbers in these tests can be found in the voyager_test_setup.ipynb
5 |
6 | """
7 |
8 | import blimpy as bl
9 | import numpy as np
10 | import pylab as plt
11 | from tests.data import voyager_fil, voyager_h5
12 |
13 |
14 | def test_waterfall_data_load_range_freq():
15 | fw = bl.Waterfall(voyager_fil, f_start=8419.24, f_stop=8419.35)
16 | hw = bl.Waterfall(voyager_h5, f_start=8419.24, f_stop=8419.35)
17 |
18 | print(fw.data.shape)
19 | print(hw.data.shape)
20 | print(hw.data[0].max(), hw.data[0].argmax())
21 | print(fw.data[0].max(), fw.data[0].argmax())
22 | print(hw.data[-1].max(), hw.data[-1].argmax())
23 | print(fw.data[-1].max(), fw.data[-1].argmax())
24 |
25 | # Assert data is loaded to the same shape and has same values
26 | assert hw.data.shape == fw.data.shape == (16, 1, 39370)
27 | assert np.allclose(hw.data, fw.data)
28 |
29 | # Check the Voyager carrier has the known amplitudes at first and last integration
30 | assert np.allclose(hw.data[0].max(), fw.data[0].max(), 3.09333e+11)
31 | assert np.allclose(hw.data[-1].max(), fw.data[-1].max(), 2.74257e+11)
32 |
33 | # Check the tone is in the same bin for both
34 | assert hw.data[0].argmax() == fw.data[0].argmax() == 18959
35 | assert hw.data[-1].argmax() == fw.data[-1].argmax() == 18996
36 |
37 | # And plot
38 | plt.figure("VOYAGER DATA LOAD")
39 | plt.subplot(2,1,1)
40 | fw.plot_spectrum()
41 |
42 | plt.subplot(2,1,2)
43 | hw.plot_spectrum()
44 | plt.tight_layout()
45 | #plt.clf()
46 |
47 | def test_grab_data_works_across_all_fil_h5():
48 |
49 | fw = bl.Waterfall(voyager_fil)
50 | hw = bl.Waterfall(voyager_h5)
51 | all_readers = [fw, hw]
52 |
53 | for ii, rr in enumerate(all_readers):
54 | f, d = rr.grab_data(f_start=8419.29, f_stop=8419.30)
55 | print(f.shape, d.shape)
56 | assert f.shape == (3580,)
57 | assert d.shape == (16, 3580)
58 |
59 | for ii, rr in enumerate(all_readers):
60 | f, d = rr.grab_data(f_start=8419.29685, f_stop=8419.2971)
61 | print(f.shape, d.shape)
62 | assert f.shape == (91,)
63 | assert d.shape == (16, 91)
64 |
65 | if __name__ == "__main__":
66 | test_waterfall_data_load_range_freq()
67 | test_grab_data_works_across_all_fil_h5()
68 |
--------------------------------------------------------------------------------
/tests/test_waterfall.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | from tests.data import voyager_h5, voyager_fil
4 | import blimpy as bl
5 | from blimpy.waterfall import cmd_tool
6 |
7 | import pytest
8 |
9 | OUTDIR = os.path.dirname(voyager_h5) + "/"
10 |
11 | def test_info():
12 | print("\n===== test_info")
13 | a = bl.Waterfall(voyager_h5)
14 | print(a)
15 | a.info()
16 | a.blank_dc(n_coarse_chan=1)
17 | a.calibrate_band_pass_N1()
18 | # Below: orphaned functions (not used anywhere else).
19 | # That is a little strange...
20 | a.grab_data()
21 | a.read_data()
22 | # It is VERY strange for INTERNAL functions to be orphaned!
23 | a._get_chunk_dimensions()
24 | # plenty of missed if suites
25 | a._get_blob_dimensions((300, 300, 300, 300))
26 | a._update_header()
27 | del a
28 |
29 | def test_get_freqs():
30 | print("\n===== test_get_freqs")
31 | wf = bl.Waterfall(voyager_h5)
32 | freqs = wf.container.populate_freqs()
33 | sum1 = np.sum(freqs)
34 | freqs = wf.get_freqs()
35 | sum2 = np.sum(freqs)
36 | assert sum1 == sum2
37 | wf = bl.Waterfall(voyager_fil)
38 | freqs = wf.container.populate_freqs()
39 | sum1 = np.sum(freqs)
40 | freqs = wf.get_freqs()
41 | sum2 = np.sum(freqs)
42 | assert sum1 == sum2
43 | len_f = len(freqs)
44 | first = wf.header["fch1"]
45 | last_1 = freqs[-1]
46 | last_2 = first + (len_f - 1 ) * wf.header["foff"]
47 | assert np.isclose(last_1, last_2, rtol=0.0001)
48 |
49 | def test_cmdline():
50 | print("\n===== test_cmdline")
51 |
52 | args = [voyager_h5, '-S', '-p', 'w', '-s', OUTDIR + 'test.png']
53 | cmd_tool(args)
54 |
55 | args = [voyager_h5, '-S', '-p', 's', '-s', OUTDIR + 'test.png']
56 | cmd_tool(args)
57 |
58 | args = [voyager_h5, '-S', '-p', 'mm', '-s', OUTDIR + 'test.png']
59 | cmd_tool(args)
60 |
61 | args = [voyager_h5, '-S', '-p', 'k', '-s', OUTDIR + 'test.png']
62 | cmd_tool(args)
63 |
64 | args = [voyager_h5, '-S', '-p', 't', '-s', OUTDIR + 'test.png']
65 | cmd_tool(args)
66 |
67 | args = [voyager_h5, '-S', '-p', 'a', '-s', OUTDIR + 'test.png']
68 | cmd_tool(args)
69 |
70 | args = [voyager_h5, '-S', '-p', 'ank', '-s', OUTDIR + 'test.png']
71 | cmd_tool(args)
72 |
73 | args = [voyager_h5, '-S', '-p', 'ank', '-s', OUTDIR + 'test.png']
74 | cmd_tool(args)
75 |
76 | # Blank DC to .h5
77 |
78 | args = [voyager_h5, '-D', '-H', '-o', OUTDIR + 'test.h5']
79 | cmd_tool(args)
80 |
81 | # Blank DC to .fil
82 |
83 | args = [voyager_h5, '-D', '-F', '-o', OUTDIR + 'test.fil']
84 | cmd_tool(args)
85 |
86 | # info with foff negative
87 |
88 | args = [voyager_h5, '-i']
89 | cmd_tool(args)
90 |
91 | if os.path.exists(OUTDIR + 'test.h5'):
92 | os.remove(OUTDIR + 'test.h5')
93 | args = [voyager_h5, '-H', '-o', OUTDIR + 'test.h5']
94 | cmd_tool(args)
95 | assert os.path.exists(OUTDIR + 'test.h5')
96 | os.remove(OUTDIR + 'test.h5')
97 |
98 | if os.path.exists(OUTDIR + 'test.fil'):
99 | os.remove(OUTDIR + 'test.fil')
100 | args = [voyager_h5, '-F', '-o', OUTDIR + 'test.fil']
101 | cmd_tool(args)
102 | assert os.path.exists(OUTDIR + 'test.fil')
103 | os.remove(OUTDIR + 'test.fil')
104 |
105 | def test_cmd_arguments():
106 | print("\n===== test_cmd_arguments")
107 | args = [voyager_h5, '-H', '-F', '-o', OUTDIR + 'test.fil']
108 | with pytest.raises(ValueError):
109 | cmd_tool(args)
110 |
111 | def test_neg_blank_dc():
112 | print("\n===== test_neg_blank_dc")
113 | wf = bl.Waterfall(voyager_h5)
114 | wf.blank_dc(0)
115 | wf.blank_dc(1.1)
116 | del wf
117 |
118 | def test_get_chunk_dimensions():
119 | print("\n===== test_get_chunk_dimensions")
120 | wf = bl.Waterfall(voyager_h5)
121 |
122 | wf.header['foff'] = 0.99e-5
123 | assert wf._get_chunk_dimensions() == (1, 1, 1048576)
124 | wf.header['foff'] = 1.1e-5
125 |
126 | wf.header['tsamp'] = 0.99e-3
127 | assert wf._get_chunk_dimensions() == (2048, 1, 512)
128 | wf.header['tsamp'] = 1.1e-3
129 |
130 | wf.header['foff'] = 0.99e-2
131 | assert wf._get_chunk_dimensions() == (10, 1, 65536)
132 |
133 | wf.header['foff'] = 1e-1
134 | assert wf._get_chunk_dimensions() == (1, 1, 512)
135 |
136 | del wf
137 |
138 | def test_neg_info_foff():
139 | print("\n===== test_neg_info_foff")
140 | wf = bl.Waterfall(voyager_h5)
141 | wf.header['foff'] = -1
142 | wf.info()
143 | del wf
144 |
145 | def test_bug_no_filename():
146 | print("\n===== test_bug_no_filename")
147 | with pytest.raises(ValueError):
148 | bl.Waterfall()
149 |
150 |
--------------------------------------------------------------------------------
/tests/test_waterfall2.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | from astropy.coordinates import Angle
4 | import matplotlib.pyplot as plt
5 | import blimpy as bl
6 | from blimpy.plotting import plot_waterfall, plot_spectrum_min_max
7 | from tests.data import voyager_h5, test_h5, test_fil
8 |
9 |
10 | TEST_DATA_DIR = os.path.dirname(voyager_h5)
11 | RTOL = 1e-05
12 |
13 |
14 | def compare_hdr_fields(hdr1, hdr2, fieldname):
15 | if isinstance(hdr1[fieldname], float):
16 | if np.isclose(hdr1[fieldname], hdr2[fieldname], rtol=RTOL):
17 | return 0
18 | else: # not a float: int or str
19 | if hdr1[fieldname] == hdr2[fieldname]:
20 | return 0
21 | print(f"*** compare_hdr_fields: {hdr1[fieldname]} != {hdr2[fieldname]}")
22 | return 1
23 |
24 |
25 | def compare_data_vectors(label, vec1, vec2):
26 | result = np.isclose(vec1, vec2, rtol=RTOL)
27 | if False in result:
28 | print(f"*** compare_data_vectors: {label}: {vec1} != {vec2}")
29 | return 1
30 | return 0
31 |
32 |
33 | def spot_check_data(data1, data2, n_ints_in_file, n_channels_in_file):
34 | nw1 = data1[0, 0, 0:3]
35 | ne1 = data1[0, 0, -4:-1]
36 | sw1 = data1[n_ints_in_file - 1, 0, 0:3]
37 | se1 = data1[n_ints_in_file - 1, 0, -4:-1]
38 | centre_row = n_ints_in_file // 2
39 | centre_col = n_channels_in_file // 2
40 | bullseye1 = data1[centre_row, 0, centre_col - 1 : centre_col + 2]
41 |
42 | nw2 = data2[0, 0, 0:3]
43 | ne2 = data2[0, 0, -4:-1]
44 | sw2 = data2[n_ints_in_file - 1, 0, 0:3]
45 | se2 = data2[n_ints_in_file - 1, 0, -4:-1]
46 | centre_row = n_ints_in_file // 2
47 | centre_col = n_channels_in_file // 2
48 | bullseye2 = data2[centre_row, 0, centre_col - 1 : centre_col + 2]
49 |
50 | n_errors = 0
51 | n_errors += compare_data_vectors("nw", nw1, nw2)
52 | n_errors += compare_data_vectors("ne", ne1, ne2)
53 | n_errors += compare_data_vectors("sw", sw1, sw2)
54 | n_errors += compare_data_vectors("se", se1, se2)
55 | n_errors += compare_data_vectors("bullseye", bullseye1, bullseye2)
56 | return n_errors
57 |
58 |
59 | def compare_headers(hdr1, hdr2):
60 | n_errors = 0
61 | n_errors += compare_hdr_fields(hdr1, hdr2, "fch1")
62 | n_errors += compare_hdr_fields(hdr1, hdr2, "nchans")
63 | n_errors += compare_hdr_fields(hdr1, hdr2, "nifs")
64 | n_errors += compare_hdr_fields(hdr1, hdr2, "nbits")
65 | n_errors += compare_hdr_fields(hdr1, hdr2, "source_name")
66 | n_errors += compare_hdr_fields(hdr1, hdr2, "telescope_id")
67 | n_errors += compare_hdr_fields(hdr1, hdr2, "tsamp")
68 | n_errors += compare_hdr_fields(hdr1, hdr2, "tstart")
69 | n_errors += compare_hdr_fields(hdr1, hdr2, "src_raj")
70 | n_errors += compare_hdr_fields(hdr1, hdr2, "src_dej")
71 | return n_errors
72 |
73 |
74 | def test_waterfall_stream_1():
75 |
76 | print("\n===== test_waterfall_stream_1")
77 |
78 | source_name = "Not_Voyager_1"
79 | src_raj = Angle("17:10:03.984 hours")
80 | src_dej = Angle("12:10:58.8 degrees")
81 | tstart = 57650.78209490741
82 | tsamp = 18.253611008
83 | f_start = 8418.457032646984
84 | f_stop = 8421.386717353016
85 | n_fine_chans = 20
86 | n_tints = 8
87 |
88 | foff = (f_stop - f_start) / float(n_fine_chans)
89 |
90 | header = {"az_start": 0.0, "data_type": 1,
91 | "fch1": f_start, "foff": foff,
92 | "ibeam": 1, "machine_id": 42, "nbeams": 1, "nbits": 32,
93 | "nchans": n_fine_chans, "nifs": 1, "rawdatafile": "nil",
94 | "source_name": source_name, "src_raj": src_raj, "src_dej": src_dej,
95 | "telescope_id": 42,
96 | "tstart": tstart, "tsamp": tsamp, "zs_tart": 0.0}
97 |
98 | data_matrix = np.zeros((n_tints, 1, n_fine_chans), dtype=np.float32)
99 |
100 | wf = bl.Waterfall(header_dict=header, data_array=data_matrix)
101 | print("\nwf:", wf)
102 | wf.info()
103 |
104 |
105 | def test_waterfall_stream_2():
106 |
107 | print("\n===== test_waterfall_stream_2")
108 | wf_storage = bl.Waterfall(voyager_h5)
109 | wf_stream = bl.Waterfall(header_dict=wf_storage.header, data_array=wf_storage.data)
110 | assert compare_headers(wf_storage.header, wf_stream.header) == 0
111 | assert spot_check_data(wf_storage.data,
112 | wf_stream.data,
113 | wf_storage.n_ints_in_file,
114 | wf_storage.n_channels_in_file) == 0
115 | wf_stream.info()
116 | plt.figure("Voyager 1", figsize=(10, 3))
117 | plt.subplot(1, 2, 1) # nrows=3, ncols=2, index=1 relative to 1
118 | plot_waterfall(wf_stream)
119 | plt.subplot(1, 2, 2) # nrows=3, ncols=2, index=2 relative to 1
120 | plot_spectrum_min_max(wf_stream)
121 | plt.tight_layout()
122 | plt.savefig(TEST_DATA_DIR + "/test_waterfall_stream_2.png")
123 | wf_stream.write_to_hdf5(test_h5)
124 | wf_stream.write_to_fil(test_fil)
125 |
126 |
127 | if __name__ == "__main__":
128 | test_waterfall_stream_1()
129 | test_waterfall_stream_2()
130 |
--------------------------------------------------------------------------------
/tests/test_write_to_fil.py:
--------------------------------------------------------------------------------
1 | """
2 | Very small module with one test: test_write_to_fil()
3 | """
4 | import os
5 | import blimpy as bl
6 |
7 | from tests.data import voyager_h5
8 |
9 | OUTDIR = os.path.dirname(voyager_h5) + "/"
10 |
11 | def test_write_to_fil():
12 | """ Load Voyager dataset and test plotting """
13 |
14 | a = bl.Waterfall(voyager_h5)
15 | a.write_to_fil(OUTDIR + 'test_out.fil')
16 |
17 | if __name__ == "__main__":
18 | test_write_to_fil()
19 |
--------------------------------------------------------------------------------