├── .github ├── ISSUE_TEMPLATE └── workflows │ ├── package_and_publish.yml │ └── tests.yml ├── .gitignore ├── AUTHORS ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── doc ├── Makefile ├── conf.py ├── index.rst ├── install.rst ├── make.bat └── modules │ ├── HDF.rst │ ├── SD.rst │ ├── V.rst │ └── VS.rst ├── examples ├── compress │ ├── README_compress.txt │ └── test-compress.py ├── hdfstruct │ ├── README_hdfstruct.txt │ └── hdfstruct.py ├── inventory │ ├── README_inventory.txt │ ├── inventory_1-1.py │ ├── inventory_1-2.py │ ├── inventory_1-3.py │ ├── inventory_1-4.py │ └── inventory_1-5.py ├── runall.py ├── txttohdf │ ├── README_txttohdf.txt │ ├── depth.txt │ ├── temp.txt │ └── txttohdf.py └── vgroup │ ├── README_vgroup.txt │ ├── vgread.py │ └── vgwrite.py ├── pyhdf ├── HC.py ├── HDF.py ├── Makefile ├── SD.py ├── V.py ├── VS.py ├── __init__.py ├── error.py ├── hdfext.i ├── hdfext.py ├── hdfext_wrap.c ├── six.py └── test_SD.py ├── pyproject.toml ├── setup.cfg └── setup.py /.github/ISSUE_TEMPLATE: -------------------------------------------------------------------------------- 1 | 7 | 8 | ### What version of pyhdf, HDF4, and Python are you using? 9 | 10 | pyhdf version: 11 | HDF4 C library version: 12 | Python version: 13 | 14 | ### What operating system are you using? 15 | 16 | 20 | 21 | 22 | ### What did you do? 23 | 24 | 28 | 29 | 30 | ### What did you expect to see? 31 | 32 | 33 | 34 | ### What did you see instead? 35 | -------------------------------------------------------------------------------- /.github/workflows/package_and_publish.yml: -------------------------------------------------------------------------------- 1 | name: PyPI publish 2 | 3 | on: 4 | push: 5 | 6 | jobs: 7 | packages: 8 | name: Wheels on ${{ matrix.os }} (${{ matrix.cibw_archs }}) 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | fail-fast: true 12 | matrix: 13 | os: [windows-latest, ubuntu-latest, macos-latest] 14 | cibw_archs: ["auto"] 15 | env: 16 | CIBW_SKIP: "*-musllinux_* cp36-* pp3*-win_*" 17 | steps: 18 | - uses: actions/checkout@v3 19 | 20 | - uses: actions/setup-python@v4 21 | name: Install Python 22 | with: 23 | python-version: '3.13' 24 | 25 | - name: Setup conda (windows-latest) 26 | if: matrix.os == 'windows-latest' 27 | uses: s-weigand/setup-conda@v1 28 | 29 | - name: Setup conda paths (windows-latest) 30 | if: matrix.os == 'windows-latest' 31 | run: | 32 | echo "LIBRARY_DIRS=C:\Miniconda\Library\lib;C:\Miniconda\Library\bin" >> $env:GITHUB_ENV 33 | echo "INCLUDE_DIRS=C:\Miniconda\Library\include" >> $env:GITHUB_ENV 34 | 35 | - name: Setup libjpeg paths (macos-latest) 36 | if: matrix.os == 'macos-latest' 37 | run: | 38 | echo 'LIBRARY_DIRS=/opt/homebrew/opt/jpeg/lib' >> $GITHUB_ENV 39 | echo 'INCLUDE_DIRS=/opt/homebrew/opt/jpeg/include' >> $GITHUB_ENV 40 | 41 | # See https://github.com/pypa/cibuildwheel/issues/563#issuecomment-2257729524 42 | - name: Set macOS deployment target 43 | if: matrix.os == 'macos-latest' 44 | run: echo "MACOSX_DEPLOYMENT_TARGET=$(sw_vers -productVersion | cut -d '.' -f 1-2)" >> $GITHUB_ENV 45 | 46 | - name: Install cibuildwheel 47 | run: | 48 | python -m pip install cibuildwheel==2.22.0 49 | - name: Build wheels 50 | run: | 51 | python -m cibuildwheel --output-dir dist 52 | env: 53 | CIBW_BUILD: '{cp,pp}3*' 54 | CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014 55 | CIBW_BEFORE_ALL_LINUX: yum -y install epel-release hdf hdf-devel && ln -s /usr/lib64/hdf/lib* /usr/lib64/ 56 | CIBW_ARCHS_LINUX: 'x86_64' # restrict to 64bit builds 57 | CIBW_ARCHS_WINDOWS: 'AMD64' # restrict to 64bit builds 58 | # (mac-os) Install hdf4 from sources 59 | CIBW_BEFORE_ALL_MACOS: > 60 | brew install ninja jpeg && 61 | export PATH="/opt/homebrew/opt/jpeg/bin:$PATH" && 62 | export LDFLAGS="-L/opt/homebrew/opt/jpeg/lib" && 63 | export CPPFLAGS="-I/opt/homebrew/opt/jpeg/include" && 64 | export PKG_CONFIG_PATH="/opt/homebrew/opt/jpeg/lib/pkgconfig" && 65 | cd /tmp && 66 | git clone --depth 1 --branch hdf4.3.0 https://github.com/HDFGroup/hdf4.git && 67 | mkdir build && cd build && 68 | ../hdf4/configure --enable-hdf4-xdr --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-production --with-zlib --prefix=/usr/local && 69 | sudo make install 70 | CIBW_BEFORE_ALL_WINDOWS: > 71 | conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled && 72 | conda install -q hdf4 73 | - name: Copy wheels into wheelhouse 74 | run: | 75 | mkdir wheelhouse 76 | cp dist/*.whl wheelhouse 77 | - uses: actions/upload-artifact@v4 78 | with: 79 | name: wheelhouse-${{ matrix.os }} 80 | path: wheelhouse 81 | 82 | publish: 83 | if: ${{ startsWith(github.ref, 'refs/tags/v') }} 84 | name: Publish to PyPI 85 | needs: [packages] 86 | runs-on: ubuntu-latest 87 | steps: 88 | - uses: actions/checkout@v3 89 | 90 | - name: Switch to using Python 3.x 91 | uses: actions/setup-python@v4 92 | with: 93 | python-version: 3.x 94 | 95 | - name: Create source distribution archive 96 | run: | 97 | python -m pip install build 98 | python -m build --sdist -o wheelhouse 99 | 100 | - uses: actions/upload-artifact@v4 101 | with: 102 | name: wheelhouse-sdist 103 | path: wheelhouse 104 | 105 | - name: Publish SDIST to PyPI # there are some problems if sdist is not pushed first 106 | if: github.event.base_ref == 'refs/heads/master' 107 | uses: pypa/gh-action-pypi-publish@release/v1 108 | with: 109 | user: __token__ 110 | password: ${{ secrets.PYPI_API_TOKEN }} 111 | packages_dir: wheelhouse/ 112 | 113 | - name: Download all the wheels 114 | uses: actions/download-artifact@v4 115 | with: 116 | path: ./wheelhouse/ 117 | pattern: wheelhouse-* 118 | merge-multiple: true 119 | 120 | - name: Publish a Python distribution to Test PyPI 121 | uses: pypa/gh-action-pypi-publish@release/v1 122 | with: 123 | user: __token__ 124 | password: ${{ secrets.PYPI_TEST_TOKEN }} 125 | repository_url: https://test.pypi.org/legacy/ 126 | packages_dir: wheelhouse/ 127 | verbose: true 128 | 129 | - name: Publish a Python distribution to PyPI 130 | if: github.event.base_ref == 'refs/heads/master' 131 | uses: pypa/gh-action-pypi-publish@release/v1 132 | with: 133 | user: __token__ 134 | password: ${{ secrets.PYPI_API_TOKEN }} 135 | packages_dir: wheelhouse/ 136 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | concurrency: 4 | group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }} 5 | cancel-in-progress: true 6 | 7 | on: 8 | push: 9 | 10 | jobs: 11 | packages: 12 | name: Test on ${{ matrix.os }} (${{ matrix.python }}) 13 | runs-on: ${{ matrix.os }} 14 | strategy: 15 | fail-fast: true 16 | matrix: 17 | os: [ubuntu-latest, macos-latest, windows-latest] 18 | python: ["3.9", "3.10", "3.11", "3.12", "3.13"] 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | 23 | - name: Install Python 24 | uses: actions/setup-python@v4 25 | with: 26 | python-version: ${{ matrix.python }} 27 | 28 | - name: Install libhdf4-dev (macos-latest) 29 | if: matrix.os == 'macos-latest' 30 | run: | 31 | brew install ninja jpeg 32 | export PATH="/opt/homebrew/opt/jpeg/bin:$PATH" 33 | export LDFLAGS="-L/opt/homebrew/opt/jpeg/lib" 34 | export CPPFLAGS="-I/opt/homebrew/opt/jpeg/include" 35 | export PKG_CONFIG_PATH="/opt/homebrew/opt/jpeg/lib/pkgconfig" 36 | echo 'LIBRARY_DIRS=/opt/homebrew/opt/jpeg/lib' >> $GITHUB_ENV 37 | echo 'INCLUDE_DIRS=/opt/homebrew/opt/jpeg/include' >> $GITHUB_ENV 38 | 39 | cd /tmp && 40 | git clone --depth 1 --branch hdf4.3.0 https://github.com/HDFGroup/hdf4.git && 41 | mkdir build && cd build && 42 | ../hdf4/configure --enable-hdf4-xdr --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-java --enable-production --with-zlib --prefix=/usr/local && 43 | sudo make install 44 | 45 | - name: Install libhdf4-dev (ubuntu-latest) 46 | if: matrix.os == 'ubuntu-latest' 47 | run: sudo apt-get install libhdf4-dev 48 | 49 | - name: Setup conda (windows-latest) 50 | if: matrix.os == 'windows-latest' 51 | uses: s-weigand/setup-conda@v1 52 | 53 | - name: Install libhdf4-dev (windows-latest) 54 | if: matrix.os == 'windows-latest' 55 | run: | 56 | conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled 57 | conda install -q hdf4 58 | echo "LIBRARY_DIRS=C:\Miniconda\Library\lib;C:\Miniconda\Library\bin" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append 59 | echo "INCLUDE_DIRS=C:\Miniconda\Library\include" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append 60 | 61 | - name: Install requirements 62 | run: | 63 | echo LIBRARY_DIRS is $LIBRARY_DIRS 64 | echo INCLUDE_DIRS is $INCLUDE_DIRS 65 | python -m pip install -U pip 66 | python -m pip install numpy pytest 67 | 68 | - name: Run tests 69 | run: | 70 | pip install -e . 71 | pytest 72 | python examples/runall.py 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | venv/ 3 | .venv/ 4 | build/ 5 | dist/ 6 | pyhdf.egg-info/ 7 | doc/_build/ 8 | pyhdf/_hdfext*.so 9 | examples/*/*.hdf 10 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Andre Gosselin 2 | @bmagill1250 3 | @dmarth 4 | Fazlul Shahriar 5 | HDF-EOS Tools Information Center 6 | H. Joe Lee 7 | Travis E. Oliphant 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 The pyhdf Authors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | 23 | 24 | Built distributions of pyhdf also include: 25 | Library | License 26 | - hdf | BSD-3 27 | - jpeg | Custom BSD-like 28 | - zlib | zlib 29 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE 3 | include AUTHORS 4 | include pyproject.toml 5 | include pyhdf/hdfext.i 6 | recursive-include examples * 7 | recursive-include doc * 8 | prune doc/_build 9 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PYTHON = python 2 | 3 | .PHONY: all 4 | all: build 5 | 6 | .PHONY: build 7 | build: 8 | make -C pyhdf build 9 | $(PYTHON) -m build 10 | 11 | .PHONY: install 12 | install: build 13 | $(PYTHON) -m pip install . 14 | 15 | .PHONY: builddoc 16 | .ONESHELL: 17 | builddoc: 18 | export PYTHONPATH=$(shell pwd) 19 | $(PYTHON) install -e . 20 | make -C doc clean 21 | make -C doc html 22 | @echo 23 | @echo doc index is doc/_build/html/index.html 24 | 25 | .PHONY: clean 26 | test: 27 | $(PYTHON) -m pip install -e . 28 | pytest 29 | $(PYTHON) examples/runall.py 30 | 31 | .PHONY: clean 32 | clean: 33 | rm -rf build/ dist/ pyhdf.egg-info examples/*/*.hdf 34 | make -C pyhdf clean 35 | make -C doc clean 36 | 37 | .PHONY: dist 38 | dist: 39 | $(PYTHON) -m build 40 | @echo Upload to test site: 41 | @echo $(PYTHON) -m twine upload --repository-url https://test.pypi.org/legacy/ dist/* 42 | @echo Upload to PyPI: 43 | @echo $(PYTHON) -m twine upload dist/* 44 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Tests](https://github.com/fhs/pyhdf/actions/workflows/tests.yml/badge.svg)](https://github.com/fhs/pyhdf/actions/workflows/tests.yml) 2 | [![Pypi build](https://github.com/fhs/pyhdf/actions/workflows/package_and_publish.yml/badge.svg)](https://github.com/fhs/pyhdf/actions/workflows/package_and_publish.yml) 3 | [![Anaconda-Server Badge](https://anaconda.org/conda-forge/pyhdf/badges/version.svg)](https://anaconda.org/conda-forge/pyhdf) 4 | 5 | # pyhdf 6 | 7 | pyhdf is a python wrapper around the NCSA HDF version 4 library. 8 | The SD (Scientific Dataset), VS (Vdata) and V (Vgroup) API's 9 | are currently implemented. NetCDF files can also be 10 | read and modified. It supports both Python 2 and Python 3. 11 | 12 | *Note:* The sourceforge pyhdf 13 | [website](http://pysclint.sourceforge.net/pyhdf/) and 14 | [project](https://sourceforge.net/projects/pysclint/) are out-of-date. 15 | The original author of pyhdf have abandoned the project and it is 16 | currently maintained in [github](https://github.com/fhs/pyhdf). 17 | 18 | Version 0.9.x was called 19 | [python-hdf4](https://pypi.org/project/python-hdf4/) 20 | in PyPI because at that time we didn't have 21 | [access](https://github.com/pypa/warehouse/issues/5157) to the 22 | [pyhdf package](https://pypi.org/project/pyhdf/) in PyPI. For version 23 | 0.10.0 and onward, please install `pyhdf` instead of `python-hdf4`. 24 | 25 | ## Installation 26 | 27 | See [pyhdf installation instructions](http://fhs.github.io/pyhdf/install.html) 28 | or [doc/install.rst](doc/install.rst). 29 | 30 | ## Documentation 31 | 32 | See [pyhdf documentation](http://fhs.github.io/pyhdf/). 33 | 34 | Additional documentation on the HDF4 format can be found in the 35 | [HDF4 Support Page](https://portal.hdfgroup.org/display/HDF4/HDF4). 36 | 37 | ## Examples 38 | 39 | Example python programs using the pyhdf package 40 | can be found inside the [examples/](examples/) subdirectory. 41 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyhdf.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyhdf.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pyhdf" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyhdf" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # pyhdf documentation build configuration file, created by 4 | # sphinx-quickstart2 on Sun Jul 27 02:07:57 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | 'sphinx.ext.viewcode', 34 | ] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ['_templates'] 38 | 39 | # The suffix of source filenames. 40 | source_suffix = '.rst' 41 | 42 | # The encoding of source files. 43 | #source_encoding = 'utf-8-sig' 44 | 45 | # The master toctree document. 46 | master_doc = 'index' 47 | 48 | # General information about the project. 49 | project = u'pyhdf' 50 | copyright = u'2019, pyhdf authors' 51 | 52 | # The version info for the project you're documenting, acts as replacement for 53 | # |version| and |release|, also used in various other places throughout the 54 | # built documents. 55 | # 56 | # The short X.Y version. 57 | version = '0.11' 58 | # The full version, including alpha/beta/rc tags. 59 | release = '0.11.4' 60 | 61 | # The language for content autogenerated by Sphinx. Refer to documentation 62 | # for a list of supported languages. 63 | #language = None 64 | 65 | # There are two options for replacing |today|: either, you set today to some 66 | # non-false value, then it is used: 67 | #today = '' 68 | # Else, today_fmt is used as the format for a strftime call. 69 | #today_fmt = '%B %d, %Y' 70 | 71 | # List of patterns, relative to source directory, that match files and 72 | # directories to ignore when looking for source files. 73 | exclude_patterns = ['_build'] 74 | 75 | # The reST default role (used for this markup: `text`) to use for all 76 | # documents. 77 | #default_role = None 78 | 79 | # If true, '()' will be appended to :func: etc. cross-reference text. 80 | #add_function_parentheses = True 81 | 82 | # If true, the current module name will be prepended to all description 83 | # unit titles (such as .. function::). 84 | #add_module_names = True 85 | 86 | # If true, sectionauthor and moduleauthor directives will be shown in the 87 | # output. They are ignored by default. 88 | #show_authors = False 89 | 90 | # The name of the Pygments (syntax highlighting) style to use. 91 | pygments_style = 'sphinx' 92 | 93 | # A list of ignored prefixes for module index sorting. 94 | #modindex_common_prefix = [] 95 | 96 | # If true, keep warnings as "system message" paragraphs in the built documents. 97 | #keep_warnings = False 98 | 99 | 100 | # -- Options for HTML output ---------------------------------------------- 101 | 102 | # The theme to use for HTML and HTML Help pages. See the documentation for 103 | # a list of builtin themes. 104 | html_theme = 'default' 105 | 106 | # Theme options are theme-specific and customize the look and feel of a theme 107 | # further. For a list of options available for each theme, see the 108 | # documentation. 109 | #html_theme_options = {} 110 | 111 | # Add any paths that contain custom themes here, relative to this directory. 112 | #html_theme_path = [] 113 | 114 | # The name for this set of Sphinx documents. If None, it defaults to 115 | # " v documentation". 116 | #html_title = None 117 | 118 | # A shorter title for the navigation bar. Default is the same as html_title. 119 | #html_short_title = None 120 | 121 | # The name of an image file (relative to this directory) to place at the top 122 | # of the sidebar. 123 | #html_logo = None 124 | 125 | # The name of an image file (within the static path) to use as favicon of the 126 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 127 | # pixels large. 128 | #html_favicon = None 129 | 130 | # Add any paths that contain custom static files (such as style sheets) here, 131 | # relative to this directory. They are copied after the builtin static files, 132 | # so a file named "default.css" will overwrite the builtin "default.css". 133 | html_static_path = ['_static'] 134 | 135 | # Add any extra paths that contain custom files (such as robots.txt or 136 | # .htaccess) here, relative to this directory. These files are copied 137 | # directly to the root of the documentation. 138 | #html_extra_path = [] 139 | 140 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 141 | # using the given strftime format. 142 | #html_last_updated_fmt = '%b %d, %Y' 143 | 144 | # If true, SmartyPants will be used to convert quotes and dashes to 145 | # typographically correct entities. 146 | #html_use_smartypants = True 147 | 148 | # Custom sidebar templates, maps document names to template names. 149 | #html_sidebars = {} 150 | 151 | # Additional templates that should be rendered to pages, maps page names to 152 | # template names. 153 | #html_additional_pages = {} 154 | 155 | # If false, no module index is generated. 156 | #html_domain_indices = True 157 | 158 | # If false, no index is generated. 159 | #html_use_index = True 160 | 161 | # If true, the index is split into individual pages for each letter. 162 | #html_split_index = False 163 | 164 | # If true, links to the reST sources are added to the pages. 165 | #html_show_sourcelink = True 166 | 167 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 168 | #html_show_sphinx = True 169 | 170 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 171 | #html_show_copyright = True 172 | 173 | # If true, an OpenSearch description file will be output, and all pages will 174 | # contain a tag referring to it. The value of this option must be the 175 | # base URL from which the finished HTML is served. 176 | #html_use_opensearch = '' 177 | 178 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 179 | #html_file_suffix = None 180 | 181 | # Output file base name for HTML help builder. 182 | htmlhelp_basename = 'pyhdfdoc' 183 | 184 | 185 | # -- Options for LaTeX output --------------------------------------------- 186 | 187 | latex_elements = { 188 | # The paper size ('letterpaper' or 'a4paper'). 189 | #'papersize': 'letterpaper', 190 | 191 | # The font size ('10pt', '11pt' or '12pt'). 192 | #'pointsize': '10pt', 193 | 194 | # Additional stuff for the LaTeX preamble. 195 | #'preamble': '', 196 | } 197 | 198 | # Grouping the document tree into LaTeX files. List of tuples 199 | # (source start file, target name, title, 200 | # author, documentclass [howto, manual, or own class]). 201 | latex_documents = [ 202 | ('index', 'pyhdf.tex', u'pyhdf Documentation', 203 | u'pyhdf authors', 'manual'), 204 | ] 205 | 206 | # The name of an image file (relative to this directory) to place at the top of 207 | # the title page. 208 | #latex_logo = None 209 | 210 | # For "manual" documents, if this is true, then toplevel headings are parts, 211 | # not chapters. 212 | #latex_use_parts = False 213 | 214 | # If true, show page references after internal links. 215 | #latex_show_pagerefs = False 216 | 217 | # If true, show URL addresses after external links. 218 | #latex_show_urls = False 219 | 220 | # Documents to append as an appendix to all manuals. 221 | #latex_appendices = [] 222 | 223 | # If false, no module index is generated. 224 | #latex_domain_indices = True 225 | 226 | 227 | # -- Options for manual page output --------------------------------------- 228 | 229 | # One entry per manual page. List of tuples 230 | # (source start file, name, description, authors, manual section). 231 | man_pages = [ 232 | ('index', 'pyhdf', u'pyhdf Documentation', 233 | [u'pyhdf authors'], 1) 234 | ] 235 | 236 | # If true, show URL addresses after external links. 237 | #man_show_urls = False 238 | 239 | 240 | # -- Options for Texinfo output ------------------------------------------- 241 | 242 | # Grouping the document tree into Texinfo files. List of tuples 243 | # (source start file, target name, title, author, 244 | # dir menu entry, description, category) 245 | texinfo_documents = [ 246 | ('index', 'pyhdf', u'pyhdf Documentation', 247 | u'pyhdf authors', 'pyhdf', 'One line description of project.', 248 | 'Miscellaneous'), 249 | ] 250 | 251 | # Documents to append as an appendix to all manuals. 252 | #texinfo_appendices = [] 253 | 254 | # If false, no module index is generated. 255 | #texinfo_domain_indices = True 256 | 257 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 258 | #texinfo_show_urls = 'footnote' 259 | 260 | # If true, do not generate a @detailmenu in the "Top" node's menu. 261 | #texinfo_no_detailmenu = False 262 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. pyhdf documentation master file, created by 2 | sphinx-quickstart2 on Sun Jul 27 02:07:57 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to pyhdf's documentation! 7 | ======================================= 8 | 9 | pyhdf is a python wrapper around the NCSA HDF version 4 library. The SD 10 | (Scientific Dataset), VS (Vdata) and V (Vgroup) API's are currently 11 | implemented. NetCDF files can also be read and modified. 12 | 13 | Development for this library happens in github: 14 | https://github.com/fhs/pyhdf/ 15 | 16 | 17 | Contents: 18 | 19 | .. toctree:: 20 | :maxdepth: 2 21 | 22 | install.rst 23 | modules/HDF.rst 24 | modules/SD.rst 25 | modules/VS.rst 26 | modules/V.rst 27 | 28 | 29 | 30 | Indices and tables 31 | ================== 32 | 33 | * :ref:`genindex` 34 | * :ref:`modindex` 35 | * :ref:`search` 36 | 37 | -------------------------------------------------------------------------------- /doc/install.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | pyhdf supports installation on Python 2 and Python 3. Please open an 6 | issue here if you encounter any problems during installation: 7 | https://github.com/fhs/pyhdf/issues 8 | 9 | The recommended method of installing 10 | pyhdf is to use conda. See the `Conda user guide 11 | `_ 12 | on how to install conda and activate your conda environment. 13 | Once you're in the conda environment, install `pyhdf from conda-forge 14 | `_:: 15 | 16 | conda install -c conda-forge pyhdf 17 | 18 | If you don't want to use conda, the instructions below describes how you 19 | can compile pyhdf from source. Version 0.10.3 also includes static linked wheels for 20 | linux with cpython 3.6-3.9. If compatible, `pip install pyhdf` will include the necessary 21 | libraries for you. If you don't want to use the built manylinux distribution, follow instructions 22 | below to build from source downloading from pypi with `pip install pyhdf --no-binary :all:`. 23 | 24 | 25 | Download the source 26 | ------------------- 27 | 28 | The source code of the latest release of pyhdf can be obtained from 29 | either of these two location: 30 | 31 | * PyPi / the cheeseshop: https://pypi.python.org/pypi/pyhdf 32 | * Github: https://github.com/fhs/pyhdf/releases 33 | 34 | Requirements 35 | ------------ 36 | 37 | The following packages are required to build and install pyhdf: 38 | 39 | - `Python `_: 40 | Python 2.6 or newer for Python 2, or Python 3.2 or newer for Python 3. 41 | - `NumPy `_ 42 | - `HDF4 libraries `_ (to use 43 | their HDF4 binaries, you will also need szip, available from the same page) 44 | - Compiler suite e.g. `GCC `_. 45 | On Windows, you need to use a `compatible Visual C++ compiler 46 | `_. 47 | - `zlib `_ 48 | - `libjpeg `_ 49 | 50 | On Debian and Debian-based Linux distributions (e.g. Ubuntu), you can install 51 | all the requirements for Python 3 using this command:: 52 | 53 | apt-get install build-essential python3-dev python3-numpy libhdf4-dev -y 54 | 55 | Installing from the source archive 56 | ---------------------------------- 57 | 58 | 1. Go to the pyhdf source directory. 59 | 60 | 2. If your HDF4 libraries or include files reside in directories 61 | that are not searched by default on your system, the installation script 62 | will complain about missing files. 63 | 64 | Add to the search path by exporting ``INCLUDE_DIRS`` and 65 | ``LIBRARY_DIRS``, e.g.:: 66 | 67 | export INCLUDE_DIRS=/usr/local/hdf-4.2r3/include 68 | export LIBRARY_DIRS=/usr/local/hdf-4.2r3/lib 69 | 70 | or on Windows something like (replace with actual location):: 71 | 72 | set INCLUDE_DIRS=C:\hdf4\include 73 | set LIBRARY_DIRS=C:\hdf4\lib;C:\hdf4\dll;C:\hdf4\jpeg6\lib;C:\hdf4\szip21\lib;C:\hdf4\zlib123\lib 74 | 75 | Note that jpeg, zlib, and (optionally) szip libraries must be found 76 | as well. If they are not in a standard place for the compiler, 77 | their location must be specified. On Mac OS X, ``/usr/local/lib`` 78 | and ``/usr/local/include`` may need to be specified if the 79 | libraries were installed there. You may need to install the devel 80 | versions of these packages to get the statically-linked libraries 81 | if your HDF binary is statically linked. 82 | 83 | If you are using the binary HDF4 library available from the HDF4 site, you 84 | must also have szlib installed. Then, you will also need to set ``SZIP``:: 85 | 86 | export SZIP=1 87 | 88 | (or on Windows: set SZIP=1) 89 | 90 | If you do not wish to use szlib, you will need to compile HDF4 from source. 91 | 92 | If anything goes wrong, read the detailed notes below. 93 | Warning messages about implicit declarations of some functions 94 | may be produced. Those are due to SWIG, and may be safely 95 | ignored. 96 | 97 | 3. Install system-wide or locally:: 98 | 99 | # sudo pip install . 100 | $ pip install -e . 101 | 102 | Or, you might prefer to make a python wheel and install it:: 103 | 104 | $ python -m build 105 | 106 | To make sure everything works as expected, run the ``hdfstruct.py`` 107 | script (under ``examples/hdfstruct``) on one of your HDF4 files. The 108 | script should display the file structure. This is a handy tool to have 109 | around when you want to explore the contents of any HDF4 file. 110 | 111 | 112 | Further notes 113 | ------------- 114 | 115 | External libraries 116 | ~~~~~~~~~~~~~~~~~~ 117 | 118 | HDF4.2 no longer provides its own copies of the jpeg and z libraries. 119 | Those must be installed separately (on Linux, they should be part of 120 | any standard distribution). 121 | 122 | The sz library (versions 2.0 or higher) must be installed if the SZIP 123 | compression method is to be used with SDsetcompress(). HDF v4.2 must 124 | also then be compiled with SZIP support. The binaries available from 125 | NCSA are (at the time of this writing) compiled with SZIP support 126 | (including encoding). To use these binaries, you *must have SZIP installed*. 127 | The binaries Enthought has produced and which are available in EPD and for 128 | download from Sourceforge are compiled with SZIP support without encoding 129 | capability. 130 | 131 | Getting an SZIP enabled HDF library may require compiling the library 132 | from source with the "--with-szlib" configuration option. Note that 133 | you *must* install SZIP in a separate step. For more details, see the 134 | `HDF Group site 135 | `_. 136 | 137 | In case your HDF library was compiled with SZIP support and you abide by the 138 | szip licensing terms, set the environment variable ``SZIP`` to ``1``. 139 | 140 | If you get error messages related to the ``SDgetcompress()`` / 141 | ``SDsetcompress()`` functions, e.g. ``"undefined symbol: 142 | SDgetcompress"``, set the environment variable ``NO_COMPRESS`` to "1". 143 | This will transform ``SDgetcompress()`` and ``SDsetcompress()`` into 144 | no-ops, which will immediately raise an exception, and will not be 145 | resolved against the HDF library symbols. This may make it possible to 146 | work with an HDF library earlier than v4.2. 147 | 148 | Swig-generated interface files 149 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 150 | Interface files ``hdfext.py`` and ``hdfext_wrap.c`` (located under the 151 | ``pyhdf`` subdirectory) have been generated using the SWIG tool. 152 | Those two files should be usable as is on most environments. It could 153 | happen however that, for reasons related to your environment, your C 154 | compiler does not accept the '.c' file and raises a compilation 155 | error. If so, the interface needs to be regenerated. To do so, 156 | install `SWIG `_, then run:: 157 | 158 | $ cd pyhdf 159 | $ swig -python hdfext.i 160 | 161 | SWIG should silently regenerate the two interface files, after which 162 | installation should proceed correctly. 163 | 164 | TRU64 note 165 | ~~~~~~~~~~ 166 | The HDF installation creates its libraries as archive (.a) files, 167 | not shareable (.so) ones. On TRU64, the linker by default first looks 168 | for shareable libraries in every directory, then in a second round 169 | for archive files. This means that if there is a libjpeg.so somewhere 170 | on the standard linker search paths, it will be found first, even if 171 | the HDF libjpeg.a file exists in the directory pointed by "library_dirs". 172 | To solve the problem, set the environment variable ``LINK_ARGS``:: 173 | 174 | export LINK_ARGS="-oldstyle_liblookup" 175 | 176 | This will tell the linker to look for .so then for .a files in each visited 177 | directory. 178 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pyhdf.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pyhdf.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /doc/modules/HDF.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: pyhdf.HDF 2 | :members: 3 | :undoc-members: 4 | -------------------------------------------------------------------------------- /doc/modules/SD.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: pyhdf.SD 2 | :members: 3 | :undoc-members: 4 | -------------------------------------------------------------------------------- /doc/modules/V.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: pyhdf.V 2 | :members: 3 | :undoc-members: 4 | -------------------------------------------------------------------------------- /doc/modules/VS.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: pyhdf.VS 2 | :members: 3 | :undoc-members: 4 | -------------------------------------------------------------------------------- /examples/compress/README_compress.txt: -------------------------------------------------------------------------------- 1 | This example script creates a data array and stores it as an HDF file under several different compression formats. 2 | 3 | TODO: More outputs from the code displayed at the command line. 4 | -------------------------------------------------------------------------------- /examples/compress/test-compress.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | 5 | # Generate test HDF files using different compression configurations, 6 | # and validate each resulting file to make sure its contents is OK. 7 | # Adapted from example: 8 | # "https://support.hdfgroup.org/doc_resource/SZIP/h4_examples/szip32.c". 9 | # A bigger dataset is defined to better show the size reduction achieved 10 | # by the SZIP compression. 11 | # Note that when applied to HDF4 SDS, the word "pixels" as used inside 12 | # the SZIP documentation should really be understood as a "data element", 13 | # eg a cell value inside a multidimensional array. 14 | # 15 | # On our systems, the program produced the following file sizes : 16 | # 17 | # $ ls -l *.hdf 18 | # -rw-r--r-- 1 root root 53389 Jun 29 14:20 SDS.COMP_DEFLATE.1.hdf 19 | # -rw-r--r-- 1 root root 56524 Jun 29 14:24 SDS.COMP_DEFLATE.2.hdf 20 | # -rw-r--r-- 1 root root 60069 Jun 29 14:24 SDS.COMP_DEFLATE.3.hdf 21 | # -rw-r--r-- 1 root root 59725 Jun 29 14:24 SDS.COMP_DEFLATE.4.hdf 22 | # -rw-r--r-- 1 root root 59884 Jun 29 14:24 SDS.COMP_DEFLATE.5.hdf 23 | # -rw-r--r-- 1 root root 58596 Jun 29 14:24 SDS.COMP_DEFLATE.6.hdf 24 | # -rw-r--r-- 1 root root 58450 Jun 29 14:24 SDS.COMP_DEFLATE.7.hdf 25 | # -rw-r--r-- 1 root root 58437 Jun 29 14:24 SDS.COMP_DEFLATE.8.hdf 26 | # -rw-r--r-- 1 root root 58446 Jun 29 14:24 SDS.COMP_DEFLATE.9.hdf 27 | # -rw-r--r-- 1 root root 102920 Jun 29 14:20 SDS.COMP_NONE.hdf 28 | # -rw-r--r-- 1 root root 103162 Jun 29 14:20 SDS.COMP_RLE.hdf 29 | # -rw-r--r-- 1 root root 60277 Jun 29 14:20 SDS.COMP_SKPHUFF.2.hdf 30 | # -rw-r--r-- 1 root root 52085 Jun 29 14:20 SDS.COMP_SKPHUFF.4.hdf 31 | # -rw-r--r-- 1 root root 52085 Jun 29 14:20 SDS.COMP_SKPHUFF.8.hdf 32 | # -rw-r--r-- 1 root root 71039 Jun 29 14:20 SDS.COMP_SZIP.EC.16.hdf 33 | # -rw-r--r-- 1 root root 79053 Jun 29 14:20 SDS.COMP_SZIP.EC.32.hdf 34 | # -rw-r--r-- 1 root root 66636 Jun 29 14:20 SDS.COMP_SZIP.EC.4.hdf 35 | # -rw-r--r-- 1 root root 66984 Jun 29 14:20 SDS.COMP_SZIP.EC.8.hdf 36 | # -rw-r--r-- 1 root root 39835 Jun 29 14:20 SDS.COMP_SZIP.NN.16.hdf 37 | # -rw-r--r-- 1 root root 44554 Jun 29 14:20 SDS.COMP_SZIP.NN.32.hdf 38 | # -rw-r--r-- 1 root root 38371 Jun 29 14:20 SDS.COMP_SZIP.NN.4.hdf 39 | # -rw-r--r-- 1 root root 38092 Jun 29 14:20 SDS.COMP_SZIP.NN.8.hdf 40 | # 41 | # For the chosen data set, the best results were attained using 42 | # SZIP compression with NN compression scheme and 8 pixels per block. 43 | # Mileage will vary with the data set used. 44 | 45 | import sys 46 | import os.path 47 | 48 | from pyhdf.SD import * 49 | import numpy 50 | 51 | # Array shape and data type. 52 | LENGTH = 250 53 | WIDTH = 100 54 | NUMPY_DATATYPE = numpy.int32 55 | HDF_DATATYPE = SDC.INT32 56 | 57 | def doCompress(compType, value=0, v2=0): 58 | """Create and validate an HDF file using a compression scheme 59 | specified by the parameters""" 60 | 61 | # Build a significant file name 62 | if compType == SDC.COMP_NONE: 63 | fileName = "SDS.COMP_NONE" 64 | elif compType == SDC.COMP_RLE: 65 | fileName = "SDS.COMP_RLE" 66 | elif compType == SDC.COMP_SKPHUFF: 67 | fileName = "SDS.COMP_SKPHUFF.%d" % value 68 | elif compType == SDC.COMP_DEFLATE: 69 | fileName = "SDS.COMP_DEFLATE.%d" % value 70 | elif compType == SDC.COMP_SZIP: 71 | fileName = "SDS.COMP_SZIP" 72 | if value == SDC.COMP_SZIP_NN: 73 | fileName += ".NN" 74 | elif value == SDC.COMP_SZIP_EC: 75 | fileName += ".EC" 76 | else: 77 | print("illegal value") 78 | sys.exit(1) 79 | fileName += ".%s" % v2 80 | else: 81 | print("illegal compType") 82 | sys.exit(1) 83 | fileName += ".hdf" 84 | 85 | SDS_NAME = "Data" 86 | 87 | fill_value = 0 88 | 89 | #LENGTH = 9 90 | #WIDTH = 6 91 | # 92 | #data = numpy.array( ((100,100,200,200,300,400), 93 | # (100,100,200,200,300,400), 94 | # (100,100,200,200,300,400), 95 | # (300,300, 0,400,300,400), 96 | # (300,300, 0,400,300,400), 97 | # (300,300, 0,400,300,400), 98 | # (0, 0,600,600,300,400), 99 | # (500,500,600,600,300,400), 100 | # (0, 0,600,600,300,400)), NUMPY_DATATYPE) 101 | 102 | # The above dataset is used in the original NCSA example. 103 | # It is too small to show a significant size reduction after 104 | # compression. The following is used for a more realistic example. 105 | data = numpy.zeros((LENGTH, WIDTH), NUMPY_DATATYPE) 106 | for i in range(LENGTH): 107 | for j in range(WIDTH): 108 | data[i,j] = (i+j)*(i-j) 109 | 110 | # Create HDF file, wiping it out it it already exists. 111 | sd_id = SD(fileName, SDC.WRITE | SDC.CREATE | SDC.TRUNC) 112 | 113 | # Create dataset. 114 | sds_id = sd_id.create(SDS_NAME, HDF_DATATYPE, (LENGTH, WIDTH)) 115 | 116 | # Fill dataset will fill value. 117 | sds_id.setfillvalue(0) 118 | 119 | # Apply compression. 120 | try: 121 | sds_id.setcompress(compType, # compression type 122 | value, v2) # args depend on compression type 123 | except HDF4Error as msg: 124 | print(("Error compressing the dataset with params: " 125 | "(%d,%d,%d) : %s" % (compType, value, v2, msg))) 126 | sds_id.endaccess() 127 | sd_id.end() 128 | os.remove(fileName) 129 | return 130 | 131 | # Load data in the dataset. 132 | sds_id[:] = data 133 | 134 | # Close dataset. 135 | sds_id.endaccess() 136 | 137 | # Close hdf file to flush compressed data. 138 | sd_id.end() 139 | 140 | # Verify compressed data. 141 | # ###################### 142 | 143 | # Reopen file and select first dataset. 144 | sd_id = SD(fileName, SDC.READ) 145 | sds_id = sd_id.select(0) 146 | 147 | # Obtain compression info. 148 | compInfo = sds_id.getcompress() 149 | compType = compInfo[0] 150 | print("file : %s" % fileName) 151 | print(" size = %d" % os.path.getsize(fileName)) 152 | if compType == SDC.COMP_NONE: 153 | print(" compType = COMP_NONE") 154 | elif compType == SDC.COMP_RLE: 155 | print(" compType = COMP_RLE") 156 | elif compType == SDC.COMP_SKPHUFF: 157 | print(" compType = COMP_SKPHUFF") 158 | print(" dataSize = %d" % compInfo[1]) 159 | elif compType == SDC.COMP_DEFLATE: 160 | print(" compType = COMP_DEFLATE (GZIP)") 161 | print(" level = %d" % compInfo[1]) 162 | elif compType == SDC.COMP_SZIP: 163 | print(" compType = COMP_SZIP") 164 | optionMask = compInfo[1] 165 | if optionMask & SDC.COMP_SZIP_NN: 166 | print(" encoding scheme = NN") 167 | elif optionMask & SDC.COMP_SZIP_EC: 168 | print(" encoding scheme = EC") 169 | else: 170 | print(" unknown encoding scheme") 171 | sys.exit(1) 172 | pixelsPerBlock, pixelsPerScanline, bitsPerPixel, pixels = compInfo[2:] 173 | print(" pixelsPerBlock = %d" % pixelsPerBlock) 174 | print(" pixelsPerScanline = %d" % pixelsPerScanline) 175 | print(" bitsPerPixel = %d" % bitsPerPixel) 176 | print(" pixels = %d" % pixels) 177 | else: 178 | print(" unknown compression type") 179 | sys.exit(1) 180 | 181 | # Read dataset contents. 182 | out_data = sds_id[:] 183 | 184 | # Compare with original data. 185 | num_errs = 0 186 | for i in range(LENGTH): 187 | for j in range(WIDTH): 188 | if data[i,j] != out_data[i,j]: 189 | print("bad value at %d,%d expected: %d got: %d" \ 190 | % (i,j,data[i,j],out_data[i,j])) 191 | num_errs += 1 192 | 193 | # Close dataset and hdf file. 194 | sds_id.endaccess() 195 | sd_id.end() 196 | 197 | if num_errs == 0: 198 | print(" file validated") 199 | else: 200 | print(" file invalid : %d errors" % num_errs) 201 | print("") 202 | 203 | # Try different compression configurations in turn. 204 | 205 | # All the following calls will fail with a "Cannot execute" exception if pyhdf 206 | # was installed with the NOCOMPRESS macro set. 207 | 208 | # No compression 209 | print("no compression") 210 | doCompress(SDC.COMP_NONE) 211 | 212 | # RLE compression 213 | print("run-length encoding") 214 | doCompress(SDC.COMP_RLE) 215 | 216 | # Skipping-Huffman compression. 217 | print("Skipping-Huffman encoding") 218 | for size in 2,4,8: 219 | doCompress(SDC.COMP_SKPHUFF, size) # size in bytes of the data elements 220 | 221 | # Gzip compression 222 | print("GZIP compression") 223 | for level in 1,2,3,4,5,6,7,8,9: 224 | doCompress(SDC.COMP_DEFLATE, level) # compression level, from 1 to 9 225 | 226 | # SZIP compression 227 | # Those calls will fail with an "Encoder not available" exception if 228 | # pyhdf was installed with the NOSZIP macro set. 229 | print("SZIP compression") 230 | for scheme in SDC.COMP_SZIP_NN, SDC.COMP_SZIP_EC: 231 | for ppb in 4,8,16,32: 232 | doCompress(SDC.COMP_SZIP, scheme, ppb) # scheme, pixels per block 233 | -------------------------------------------------------------------------------- /examples/hdfstruct/README_hdfstruct.txt: -------------------------------------------------------------------------------- 1 | HDFSTRUCT 2 | 3 | This script accesses an HDF file and displays information about its contents. 4 | The file is specified at the command line. 5 | 6 | TODO: Better documentation 7 | -------------------------------------------------------------------------------- /examples/hdfstruct/hdfstruct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | 5 | import sys 6 | from pyhdf.SD import * 7 | 8 | # Dictionary used to convert from a numeric data type to its symbolic 9 | # representation 10 | typeTab = { 11 | SDC.CHAR: 'CHAR', 12 | SDC.CHAR8: 'CHAR8', 13 | SDC.UCHAR8: 'UCHAR8', 14 | SDC.INT8: 'INT8', 15 | SDC.UINT8: 'UINT8', 16 | SDC.INT16: 'INT16', 17 | SDC.UINT16: 'UINT16', 18 | SDC.INT32: 'INT32', 19 | SDC.UINT32: 'UINT32', 20 | SDC.FLOAT32: 'FLOAT32', 21 | SDC.FLOAT64: 'FLOAT64' 22 | } 23 | 24 | printf = sys.stdout.write 25 | 26 | def eol(n=1): 27 | printf("%s" % chr(10) * n) 28 | 29 | hdfFile = sys.argv[1] # Get first command line argument 30 | 31 | try: # Catch pyhdf.SD errors 32 | # Open HDF file named on the command line 33 | f = SD(hdfFile) 34 | # Get global attribute dictionary 35 | attr = f.attributes(full=1) 36 | # Get dataset dictionary 37 | dsets = f.datasets() 38 | 39 | # File name, number of attributes and number of variables. 40 | printf("FILE INFO"); eol() 41 | printf("-------------"); eol() 42 | printf("%-25s%s" % ("File:", hdfFile)); eol() 43 | printf("%-25s%d" % (" file attributes:", len(attr))); eol() 44 | printf("%-25s%d" % (" datasets:", len(dsets))); eol() 45 | eol(); 46 | 47 | # Global attribute table. 48 | if len(attr) > 0: 49 | printf("File attributes"); eol(2) 50 | printf(" name idx type len value"); eol() 51 | printf(" -------------------- --- ------- --- -----"); eol() 52 | # Get list of attribute names and sort them lexically 53 | attNames = sorted(attr.keys()) 54 | for name in attNames: 55 | t = attr[name] 56 | # t[0] is the attribute value 57 | # t[1] is the attribute index number 58 | # t[2] is the attribute type 59 | # t[3] is the attribute length 60 | printf(" %-20s %3d %-7s %3d %s" % 61 | (name, t[1], typeTab[t[2]], t[3], t[0])); eol() 62 | eol() 63 | 64 | 65 | # Dataset table 66 | if len(dsets) > 0: 67 | printf("Datasets (idx:index #, na:# attributes, cv:coord var)"); eol(2) 68 | printf(" name idx type na cv dimension(s)"); eol() 69 | printf(" -------------------- --- ------- -- -- ------------"); eol() 70 | # Get list of dataset names and sort them lexically 71 | dsNames = sorted(dsets.keys()) 72 | for name in dsNames: 73 | # Get dataset instance 74 | ds = f.select(name) 75 | # Retrieve the dictionary of dataset attributes so as 76 | # to display their number 77 | vAttr = ds.attributes() 78 | t = dsets[name] 79 | # t[0] is a tuple of dimension names 80 | # t[1] is a tuple of dimension lengths 81 | # t[2] is the dataset type 82 | # t[3] is the dataset index number 83 | printf(" %-20s %3d %-7s %2d %-2s " % 84 | (name, t[3], typeTab[t[2]], len(vAttr), 85 | ds.iscoordvar() and 'X' or '')) 86 | # Display dimension info. 87 | n = 0 88 | for d in t[0]: 89 | printf("%s%s(%d)" % (n > 0 and ', ' or '', d, t[1][n])) 90 | n += 1 91 | eol() 92 | eol() 93 | 94 | # Dataset info. 95 | printf("DATASET INFO"); eol() 96 | printf("-------------"); eol(2) 97 | for name in dsNames: 98 | # Access the dataset 99 | dsObj = f.select(name) 100 | # Get dataset attribute dictionary 101 | dsAttr = dsObj.attributes(full=1) 102 | if len(dsAttr) > 0: 103 | printf("%s attributes" % name); eol(2) 104 | printf(" name idx type len value"); eol() 105 | printf(" -------------------- --- ------- --- -----"); eol() 106 | # Get the list of attribute names and sort them alphabetically. 107 | attNames = sorted(dsAttr.keys()) 108 | for nm in attNames: 109 | t = dsAttr[nm] 110 | # t[0] is the attribute value 111 | # t[1] is the attribute index number 112 | # t[2] is the attribute type 113 | # t[3] is the attribute length 114 | printf(" %-20s %3d %-7s %3d %s" % 115 | (nm, t[1], typeTab[t[2]], t[3], t[0])); eol() 116 | eol() 117 | # Get dataset dimension dictionary 118 | dsDim = dsObj.dimensions(full=1) 119 | if len(dsDim) > 0: 120 | printf ("%s dimensions" % name); eol(2) 121 | printf(" name idx len unl type natt");eol() 122 | printf(" -------------------- --- ----- --- ------- ----");eol() 123 | # Get the list of dimension names and sort them alphabetically. 124 | dimNames = sorted(dsDim.keys()) 125 | for nm in dimNames: 126 | t = dsDim[nm] 127 | # t[0] is the dimension length 128 | # t[1] is the dimension index number 129 | # t[2] is 1 if the dimension is unlimited, 0 if not 130 | # t[3] is the the dimension scale type, 0 if no scale 131 | # t[4] is the number of attributes 132 | printf(" %-20s %3d %5d %s %-7s %4d" % 133 | (nm, t[1], t[0], t[2] and "X" or " ", 134 | t[3] and typeTab[t[3]] or "", t[4])); eol() 135 | eol() 136 | 137 | 138 | except HDF4Error as msg: 139 | print("HDF4Error", msg) 140 | -------------------------------------------------------------------------------- /examples/inventory/README_inventory.txt: -------------------------------------------------------------------------------- 1 | INVENTORY 2 | 3 | These examples operate on a file called inventory.hdf. They do the following: 4 | 5 | 6 | inventory_1-1.py: 7 | Opens the file inventory.hdf (creating it if it doesn't exist), and adds a VData set of inventory items to it. 8 | 9 | inventory_1-2.py: 10 | Opens the file inventory.hdf (creating it if it doesn't exist) storing VData, updates the "status" attribute, and updates the data. 11 | 12 | inventory_1-3.py: 13 | Opens the file inventory.hdf (creating it if it doesn't exist) storing VData, updates the "status" attribute, and updates the data. 14 | 15 | inventory_1-4.py: 16 | Opens the file inventory.hdf (which must already exist) and displays some of its data. 17 | 18 | inventory_1-5.py: 19 | Similar functionality to inventory_1-5.py 20 | 21 | TODO: Better documentation 22 | -------------------------------------------------------------------------------- /examples/inventory/inventory_1-1.py: -------------------------------------------------------------------------------- 1 | from pyhdf.HDF import * 2 | from pyhdf.VS import * 3 | 4 | # Open HDF file and initialize the VS interface 5 | f = HDF('inventory.hdf', # Open file 'inventory.hdf' in write mode 6 | HC.WRITE|HC.CREATE) # creating it if it does not exist 7 | vs = f.vstart() # init vdata interface 8 | 9 | # Create vdata and define its structure 10 | vd = vs.create( # create a new vdata 11 | 'INVENTORY', # name of the vdata 12 | # fields of the vdata follow 13 | (('partid',HC.CHAR8, 5), # 5 char string 14 | ('description',HC.CHAR8, 10), # 10 char string field 15 | ('qty',HC.INT16, 1), # 1 16 bit int field 16 | ('wght',HC.FLOAT32, 1), # 1 32 bit float 17 | ('price',HC.FLOAT32,1) # 1 32 bit float 18 | )) # 5 fields allocated in the vdata 19 | 20 | # Set attributes on the vdata and its fields 21 | vd.field('wght').unit = 'lb' 22 | vd.field('price').unit = '$' 23 | # In order to be able to update a string attribute, it must 24 | # always be set to the same length. This sets 'status' to a 20 25 | # char long, left-justified string, padded with spaces on the right. 26 | 27 | vd.status = "%-20s" % 'phase 1 done' 28 | 29 | # Store records 30 | vd.write(( # write 3 records 31 | ('Q1234', 'bolt',12, 0.01, 0.05), # record 1 32 | ('B5432', 'brush', 10, 0.4, 4.25), # record 2 33 | ('S7613', 'scissor', 2, 0.2, 3.75) # record 3 34 | )) 35 | vd.detach() # "close" the vdata 36 | 37 | vs.end() # terminate the vdata interface 38 | f.close() # close the HDF file 39 | -------------------------------------------------------------------------------- /examples/inventory/inventory_1-2.py: -------------------------------------------------------------------------------- 1 | from pyhdf.HDF import * 2 | from pyhdf.VS import * 3 | 4 | f = HDF('inventory.hdf', # Open file 'inventory.hdf' 5 | HC.WRITE|HC.CREATE) # creating it if it does not exist 6 | vs = f.vstart() # init vdata interface 7 | vd = vs.attach('INVENTORY', 1) # attach vdata 'INVENTORY' in write mode 8 | 9 | # Update the `status' vdata attribute. The attribute length must not 10 | # change. We call the attribute info() method, which returns a list where 11 | # number of values (eg string length) is stored at index 2. 12 | # We then assign a left justified string of exactly that length. 13 | len = vd.attr('status').info()[2] 14 | vd.status = '%-*s' % (len, 'phase 2 done') 15 | 16 | vd[vd._nrecs:] = ( # append 2 records 17 | ('A4321', 'axe', 5, 1.5, 25), # first record 18 | ('C3214', 'cup', 100, 0.1, 3.25) # second record 19 | ) 20 | vd.detach() # "close" the vdata 21 | 22 | vs.end() # terminate the vdata interface 23 | f.close() # close the HDF file 24 | -------------------------------------------------------------------------------- /examples/inventory/inventory_1-3.py: -------------------------------------------------------------------------------- 1 | from pyhdf.HDF import * 2 | from pyhdf.VS import * 3 | 4 | f = HDF('inventory.hdf', # Open file 'inventory.hdf' in write mode 5 | HC.WRITE|HC.CREATE) # creating it if it does not exist 6 | vs = f.vstart() # init vdata interface 7 | vd = vs.attach('INVENTORY', 1) # attach vdata 'INVENTORY' in write mode 8 | 9 | # Update the `status' vdata attribute. The attribute length must not 10 | # change. We call the attribute info() method, which returns a list where 11 | # number of values (eg string length) is stored at index 2. 12 | # We then assign a left justified string of exactly that length. 13 | len = vd.attr('status').info()[2] 14 | vd.status = '%-*s' % (len, 'phase 3 done') 15 | 16 | # Update record at index 1 (second record) 17 | vd[1] = ('Z4367', 'surprise', 10, 3.1, 44.5) 18 | # Update record at index 4, and those after 19 | vd[4:] = ( 20 | ('QR231', 'toy', 12, 2.5, 45), 21 | ('R3389', 'robot', 3, 45, 2000), 22 | ('R3390', 'robot2', 8, 55, 2050) 23 | ) 24 | vd.detach() # "close" the vdata 25 | vs.end() # terminate the vdata interface 26 | f.close() # close the HDF file 27 | -------------------------------------------------------------------------------- /examples/inventory/inventory_1-4.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from pyhdf.HDF import * 4 | from pyhdf.VS import * 5 | 6 | f = HDF('inventory.hdf') # open 'inventory.hdf' in read mode 7 | vs = f.vstart() # init vdata interface 8 | vd = vs.attach('INVENTORY') # attach vdata 'INVENTORY' in read mode 9 | 10 | # Display some vdata attributes 11 | print("status:", vd.status) 12 | print("vdata: ", vd._name) # predefined attribute: vdata name 13 | print("nrecs: ", vd._nrecs) # predefined attribute: num records 14 | 15 | # Display value of attribute 'unit' for all fields on which 16 | # this attribute is set 17 | print("units: ", end=' ') 18 | for fieldName in vd._fields: # loop over all field names 19 | try: 20 | # instantiate field and obtain value of attribute 'unit' 21 | v = vd.field(fieldName).unit 22 | print("%s: %s" % (fieldName, v), end=' ') 23 | except: # no 'unit' attribute: ignore 24 | pass 25 | print("") 26 | print("") 27 | 28 | # Display table header. 29 | header = "%-7s %-12s %3s %4s %8s" % tuple(vd._fields) 30 | print("-" * len(header)) 31 | print(header) 32 | print("-" * len(header)) 33 | 34 | # Loop over the vdata records, displaying each record as a table row. 35 | # Current record position is 0 after attaching the vdata. 36 | while True: 37 | try: 38 | rec = vd.read() # read next record 39 | # equivalent to: 40 | # rec = vd[vd.tell()] 41 | print("%-7s %-12s %3d %4.1f %8.2f" % tuple(rec[0])) 42 | except HDF4Error: # end of vdata reached 43 | break 44 | 45 | vd.detach() # "close" the vdata 46 | vs.end() # terminate the vdata interface 47 | f.close() # close the HDF file 48 | -------------------------------------------------------------------------------- /examples/inventory/inventory_1-5.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from pyhdf.HDF import * 4 | from pyhdf.VS import * 5 | 6 | f = HDF('inventory.hdf') # open 'inventory.hdf' in read mode 7 | vs = f.vstart() # init vdata interface 8 | vd = vs.attach('INVENTORY') # attach vdata 'INVENTORY' in read mode 9 | 10 | # Display some vdata attributes 11 | print("status:", vd.status) 12 | print("vdata: ", vd._name) # predefined attribute: vdata name 13 | print("nrecs: ", vd._nrecs) # predefined attribute: num records 14 | 15 | # Display value of attribute 'unit' for all fields on which 16 | # this attribute is set 17 | print("units: ", end=' ') 18 | for fieldName in vd._fields: # loop over all field names 19 | try: 20 | # instantiate field and obtain value of attribute 'unit' 21 | v = vd.field(fieldName).unit 22 | print("%s: %s" % (fieldName, v), end=' ') 23 | except: # no 'unit' attribute: ignore 24 | pass 25 | print("") 26 | print("") 27 | 28 | # Display table header. 29 | header = "%-7s %-12s %3s %4s %8s" % tuple(vd._fields) 30 | print("-" * len(header)) 31 | print(header) 32 | print("-" * len(header)) 33 | 34 | # Read all records at once, and loop over the sequence. 35 | for rec in vd[:]: 36 | print("%-7s %-12s %3d %4.1f %8.2f" % tuple(rec)) 37 | 38 | vd.detach() # "close" the vdata 39 | vs.end() # terminate the vdata interface 40 | f.close() # close the HDF file 41 | -------------------------------------------------------------------------------- /examples/runall.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | This script runs all the examples in examples directory. 4 | It should be run from the top-level directory (containing pyproject.toml). 5 | """ 6 | 7 | import glob 8 | import os 9 | import sys 10 | import subprocess 11 | 12 | try: 13 | from subprocess import DEVNULL # py3k 14 | except ImportError: 15 | DEVNULL = open(os.devnull, 'wb') 16 | 17 | class ExampleTester: 18 | failed = [] 19 | passed = 0 20 | rootdir = os.getcwd() 21 | 22 | def __init__(self): 23 | pass 24 | 25 | def run(self, d, cmd, **kwargs): 26 | """ 27 | Run command cmd in directory d. 28 | """ 29 | print("running %s in %s ..." % (cmd, d)) 30 | os.chdir(os.path.join(self.rootdir, d)) 31 | r = subprocess.call([sys.executable] + cmd, **kwargs) 32 | if r != 0: 33 | self.failed.append((d, cmd, r)) 34 | else: 35 | self.passed += 1 36 | os.chdir(self.rootdir) 37 | return r 38 | 39 | def report(self): 40 | print(self.passed, "example(s) ran successfully") 41 | if len(self.failed) > 0: 42 | print("failed examples:") 43 | for ex in self.failed: 44 | print(" %s in %s failed with exit code %s" % (ex[1], ex[0], ex[2])) 45 | sys.exit(2) 46 | sys.exit(0) 47 | 48 | def main(): 49 | t = ExampleTester() 50 | t.run("examples/compress", ["test-compress.py"], stdout=DEVNULL) 51 | t.run("examples/vgroup", ["vgwrite.py"]) 52 | t.run("examples/vgroup", ["vgread.py", "inventory.hdf"], stdout=DEVNULL) 53 | t.run("examples/inventory", ["inventory_1-1.py"]) 54 | t.run("examples/inventory", ["inventory_1-2.py"]) 55 | t.run("examples/inventory", ["inventory_1-3.py"]) 56 | t.run("examples/inventory", ["inventory_1-4.py"], stdout=DEVNULL) 57 | t.run("examples/inventory", ["inventory_1-5.py"], stdout=DEVNULL) 58 | t.run("examples/txttohdf", ["txttohdf.py"]) 59 | 60 | # These HDF files were generated by above examples 61 | for g in sorted(glob.glob("examples/*/*.hdf")): 62 | hdffile = os.path.join("../..", g) 63 | t.run("examples/hdfstruct", ["hdfstruct.py", hdffile], stdout=DEVNULL) 64 | 65 | t.report() 66 | 67 | if __name__ == '__main__': 68 | main() 69 | -------------------------------------------------------------------------------- /examples/txttohdf/README_txttohdf.txt: -------------------------------------------------------------------------------- 1 | This example demonstrates creation of an HDF file from data in text files. Specifically, the files temp.txt and depth.txt each store a table of scientific data (first line of text giving table's dimensions). The HDF file table.hdf is created (being deleted first if it already existed), filled with the information in temp.txt in the SD (Scientific Dataset) format, and closed. It is then reopened and the information in depth.txt is added (without overwriting temp.txt). 2 | 3 | TODO: Include more code which accesses, in table.hdf, the information in tempt.txt and depth.txt. 4 | -------------------------------------------------------------------------------- /examples/txttohdf/depth.txt: -------------------------------------------------------------------------------- 1 | 9 4 2 | 100 101 102 103 3 | 200 201 202 203 4 | 300 301 302 303 5 | 400 401 402 403 6 | 500 501 502 503 7 | 600 601 602 603 8 | 700 701 702 703 9 | 800 801 802 803 10 | 900 901 902 903 11 | -------------------------------------------------------------------------------- /examples/txttohdf/temp.txt: -------------------------------------------------------------------------------- 1 | 9 4 2 | 100 101 102 103 3 | 200 201 202 203 4 | 300 301 302 303 5 | 400 401 402 403 6 | 500 501 502 503 7 | 600 601 602 603 8 | 700 701 702 703 9 | 800 801 802 803 10 | 900 901 902 903 11 | -------------------------------------------------------------------------------- /examples/txttohdf/txttohdf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from __future__ import print_function 4 | from pyhdf.six.moves import map 5 | 6 | from pyhdf.SD import SD, SDC, HDF4Error 7 | 8 | import os 9 | 10 | def txtToHDF(txtFile, hdfFile, varName, attr): 11 | """Inputs: 12 | txtFile = name of .txt file (passed as string) 13 | hdfFile = name of .hdf file (passed as string) 14 | varName = name of dataset to be added (passed as string) 15 | attr = dataset attributes (passed as dictionary) 16 | txtFile indicates a dataset, and varName and attr give information 17 | about it. txtToHDF puts this information into an SD (Scientific 18 | Dataset) object and stores that object as in hdfFile, creating 19 | hdfFile if need be, otherwise updating it. 20 | """ 21 | 22 | try: # Catch pyhdf errors 23 | # Open HDF file in update mode, creating it if non existent. 24 | d = SD(hdfFile, SDC.WRITE|SDC.CREATE) 25 | # Open text file and get matrix dimensions on first line. 26 | txt = open(txtFile) 27 | ni, nj = list(map(int, txt.readline().split())) 28 | # Define HDF dataset of type SDC.FLOAT32 with those dimensions. 29 | v = d.create(varName, SDC.FLOAT32, (ni, nj)) 30 | # Assign attributes passed as argument inside dict `attr'. 31 | for attrName in list(attr.keys()): 32 | setattr(v, attrName, attr[attrName]) 33 | # Load variable with lines of data. Compute min and max 34 | # over the whole matrix. 35 | i = 0 36 | while i < ni: 37 | elems = list(map(float, txt.readline().split())) 38 | v[i] = elems 39 | minE = min(elems) 40 | maxE = max(elems) 41 | if i: 42 | minVal = min(minVal, minE) 43 | maxVal = max(maxVal, maxE) 44 | else: 45 | minVal = minE 46 | maxVal = maxE 47 | i += 1 48 | # Set variable min and max attributes. 49 | v.minVal = minVal 50 | v.maxVal = maxVal 51 | # Close dataset and file objects (not really necessary, 52 | # since closing is automatic when objects go out of scope. 53 | v.endaccess() 54 | d.end() 55 | txt.close() 56 | except HDF4Error as msg: 57 | print("HDF4Error:", msg) 58 | 59 | if __name__ == '__main__': 60 | hdfFile = 'table.hdf' 61 | try: # Delete if exists. 62 | os.remove(hdfFile) 63 | except: 64 | pass 65 | 66 | # Transfer contents of file 'temp.txt' to dataset 'temperature' 67 | # and assign the attributes 'title', 'units' and 'valid_range'. 68 | txtToHDF('temp.txt', hdfFile, 'temperature', 69 | {'title' : 'temperature matrix', 70 | 'units' : 'celsius', 71 | 'valid_range': (-2.8,27.0)}) 72 | print("Temperature data successfully written to HDF file") 73 | 74 | # Transfer contents of file 'depth.txt' to dataset 'depth' 75 | # and assign the same attributes as above. 76 | txtToHDF('depth.txt', hdfFile, 'depth', 77 | {'title' : 'depth matrix', 78 | 'units' : 'meters', 79 | 'valid_range': (0, 500.0)}) 80 | print("Depth data successfully written to HDF file") 81 | 82 | # TODO: open up hdfFile and access the information that 83 | # was in temp.txt and depth.txt 84 | -------------------------------------------------------------------------------- /examples/vgroup/README_vgroup.txt: -------------------------------------------------------------------------------- 1 | These two samples demonstrate reading and writing Vgroup-type hdf files. 2 | 3 | TODO: Better documentation 4 | -------------------------------------------------------------------------------- /examples/vgroup/vgread.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | from pyhdf.HDF import * 4 | from pyhdf.V import * 5 | from pyhdf.VS import * 6 | from pyhdf.SD import * 7 | 8 | import sys 9 | 10 | def describevg(refnum): 11 | 12 | # Describe the vgroup with the given refnum. 13 | 14 | # Open vgroup in read mode. 15 | vg = v.attach(refnum) 16 | print("----------------") 17 | print("name:", vg._name, "class:",vg._class, "tag,ref:",vg._tag, vg._refnum) 18 | 19 | # Show the number of members of each main object type. 20 | print("# members: ", vg._nmembers,\ 21 | "# datasets:", vg.nrefs(HC.DFTAG_NDG),\ 22 | "# vdatas: ", vg.nrefs(HC.DFTAG_VH),\ 23 | "# vgroups: ", vg.nrefs(HC.DFTAG_VG)) 24 | 25 | # Read the contents of the vgroup. 26 | members = vg.tagrefs() 27 | 28 | # Display info about each member. 29 | index = -1 30 | for tag, ref in members: 31 | index += 1 32 | print("member index", index) 33 | # Vdata tag 34 | if tag == HC.DFTAG_VH: 35 | vd = vs.attach(ref) 36 | nrecs, intmode, fields, size, name = vd.inquire() 37 | print(" vdata:",name, "tag,ref:",tag, ref) 38 | print(" fields:",fields) 39 | print(" nrecs:",nrecs) 40 | vd.detach() 41 | 42 | # SDS tag 43 | elif tag == HC.DFTAG_NDG: 44 | sds = sd.select(sd.reftoindex(ref)) 45 | name, rank, dims, type, nattrs = sds.info() 46 | print(" dataset:",name, "tag,ref:", tag, ref) 47 | print(" dims:",dims) 48 | print(" type:",type) 49 | sds.endaccess() 50 | 51 | # VS tag 52 | elif tag == HC.DFTAG_VG: 53 | vg0 = v.attach(ref) 54 | print(" vgroup:", vg0._name, "tag,ref:", tag, ref) 55 | vg0.detach() 56 | 57 | # Unhandled tag 58 | else: 59 | print("unhandled tag,ref",tag,ref) 60 | 61 | # Close vgroup 62 | vg.detach() 63 | 64 | # Open HDF file in readonly mode. 65 | filename = sys.argv[1] 66 | hdf = HDF(filename) 67 | 68 | # Initialize the SD, V and VS interfaces on the file. 69 | sd = SD(filename) 70 | vs = hdf.vstart() 71 | v = hdf.vgstart() 72 | 73 | # Scan all vgroups in the file. 74 | ref = -1 75 | while True: 76 | try: 77 | ref = v.getid(ref) 78 | except HDF4Error as msg: # no more vgroup 79 | break 80 | describevg(ref) 81 | 82 | # Terminate V, VS and SD interfaces. 83 | v.end() 84 | vs.end() 85 | sd.end() 86 | 87 | # Close HDF file. 88 | hdf.close() 89 | -------------------------------------------------------------------------------- /examples/vgroup/vgwrite.py: -------------------------------------------------------------------------------- 1 | from pyhdf.HDF import * 2 | from pyhdf.V import * 3 | from pyhdf.VS import * 4 | from pyhdf.SD import * 5 | 6 | def vdatacreate(vs, name): 7 | 8 | # Create vdata and define its structure 9 | vd = vs.create(name, 10 | (('partid',HC.CHAR8, 5), # 5 char string 11 | ('description',HC.CHAR8, 10), # 10 char string field 12 | ('qty',HC.INT16, 1), # 1 16 bit int field 13 | ('wght',HC.FLOAT32, 1), # 1 32 bit float 14 | ('price',HC.FLOAT32,1) # 1 32 bit float 15 | )) 16 | 17 | # Store records 18 | vd.write((('Q1234', 'bolt',12, 0.01, 0.05), # record 1 19 | ('B5432', 'brush', 10, 0.4, 4.25), # record 2 20 | ('S7613', 'scissor', 2, 0.2, 3.75) # record 3 21 | )) 22 | # "close" vdata 23 | vd.detach() 24 | 25 | def sdscreate(sd, name): 26 | 27 | # Create a simple 3x3 float array. 28 | sds = sd.create(name, SDC.FLOAT32, (3,3)) 29 | # Initialize array 30 | sds[:] = ((0,1,2),(3,4,5),(6,7,8)) 31 | # "close" dataset. 32 | sds.endaccess() 33 | 34 | # Create HDF file 35 | filename = 'inventory.hdf' 36 | hdf = HDF(filename, HC.WRITE|HC.CREATE) 37 | 38 | # Initialize the SD, V and VS interfaces on the file. 39 | sd = SD(filename, SDC.WRITE) # SD interface 40 | vs = hdf.vstart() # vdata interface 41 | v = hdf.vgstart() # vgroup interface 42 | 43 | # Create vdata named 'INVENTORY'. 44 | vdatacreate(vs, 'INVENTORY') 45 | # Create dataset named "ARR_3x3" 46 | sdscreate(sd, 'ARR_3x3') 47 | 48 | # Attach the vdata and the dataset. 49 | vd = vs.attach('INVENTORY') 50 | sds = sd.select('ARR_3x3') 51 | 52 | # Create vgroup named 'TOTAL'. 53 | vg = v.create('TOTAL') 54 | 55 | # Add vdata to the vgroup 56 | vg.insert(vd) 57 | # We could also have written this: 58 | # vgroup.add(vd._tag, vd._refnum) 59 | # or this: 60 | # vgroup.add(HC.DFTAG_VH, vd._refnum) 61 | 62 | # Add dataset to the vgroup 63 | vg.add(HC.DFTAG_NDG, sds.ref()) 64 | 65 | # Close vgroup, vdata and dataset. 66 | vg.detach() # vgroup 67 | vd.detach() # vdata 68 | sds.endaccess() # dataset 69 | 70 | # Terminate V, VS and SD interfaces. 71 | v.end() # V interface 72 | vs.end() # VS interface 73 | sd.end() # SD interface 74 | 75 | # Close HDF file. 76 | hdf.close() 77 | -------------------------------------------------------------------------------- /pyhdf/HC.py: -------------------------------------------------------------------------------- 1 | # $Id: HC.py,v 1.2 2005-07-14 01:36:41 gosselin_a Exp $ 2 | # $Log: not supported by cvs2svn $ 3 | # Revision 1.1 2004/08/02 15:36:04 gosselin 4 | # Initial revision 5 | # 6 | 7 | from . import hdfext as _C 8 | 9 | class HC(object): 10 | """The HC class holds constants defining opening modes and data types. 11 | 12 | File opening modes (flags ORed together) 13 | 14 | CREATE 4 create file if it does not exist 15 | READ 1 read-only mode 16 | TRUNC 256 truncate if it exists 17 | WRITE 2 read-write mode 18 | 19 | Data types 20 | 21 | CHAR 4 8-bit char 22 | CHAR8 4 8-bit char 23 | UCHAR 3 unsigned 8-bit integer (0 to 255) 24 | UCHAR8 3 unsigned 8-bit integer (0 to 255) 25 | INT8 20 signed 8-bit integer (-128 to 127) 26 | UINT8 21 unsigned 8-bit integer (0 to 255) 27 | INT16 23 signed 16-bit integer 28 | UINT16 23 unsigned 16-bit integer 29 | INT32 24 signed 32-bit integer 30 | UINT32 25 unsigned 32-bit integer 31 | FLOAT32 5 32-bit floating point 32 | FLOAT64 6 64-bit floating point 33 | 34 | Tags 35 | 36 | DFTAG_NDG 720 dataset 37 | DFTAG_VH 1962 vdata 38 | DFTAG_VG 1965 vgroup 39 | 40 | 41 | 42 | """ 43 | 44 | CREATE = _C.DFACC_CREATE 45 | READ = _C.DFACC_READ 46 | TRUNC = 0x100 # specific to pyhdf 47 | WRITE = _C.DFACC_WRITE 48 | 49 | CHAR = _C.DFNT_CHAR8 50 | CHAR8 = _C.DFNT_CHAR8 51 | UCHAR = _C.DFNT_UCHAR8 52 | UCHAR8 = _C.DFNT_UCHAR8 53 | INT8 = _C.DFNT_INT8 54 | UINT8 = _C.DFNT_UINT8 55 | INT16 = _C.DFNT_INT16 56 | UINT16 = _C.DFNT_UINT16 57 | INT32 = _C.DFNT_INT32 58 | UINT32 = _C.DFNT_UINT32 59 | FLOAT32 = _C.DFNT_FLOAT32 60 | FLOAT64 = _C.DFNT_FLOAT64 61 | 62 | FULL_INTERLACE = 0 63 | NO_INTERLACE =1 64 | 65 | 66 | # NOTE: 67 | # INT64 and UINT64 are not yet supported py pyhdf 68 | 69 | DFTAG_NDG = _C.DFTAG_NDG 70 | DFTAG_VH = _C.DFTAG_VH 71 | DFTAG_VG = _C.DFTAG_VG 72 | -------------------------------------------------------------------------------- /pyhdf/HDF.py: -------------------------------------------------------------------------------- 1 | # $Id: HDF.py,v 1.3 2005-07-14 01:36:41 gosselin_a Exp $ 2 | # $Log: not supported by cvs2svn $ 3 | # Revision 1.2 2004/08/02 15:36:04 gosselin 4 | # pyhdf-0.7-1 5 | # 6 | # Revision 1.1 2004/08/02 15:22:59 gosselin 7 | # Initial revision 8 | # 9 | # Author: Andre Gosselin 10 | # Maurice-Lamontagne Institute 11 | # gosselina@dfo-mpo.gc.ca 12 | 13 | """ 14 | Basic API (:mod:`pyhdf.HDF`) 15 | ============================ 16 | 17 | A module of the pyhdf package implementing the basic API of the 18 | NCSA HDF4 library. 19 | 20 | Introduction 21 | ------------ 22 | The role of the HDF module is to provide support to other modules of the 23 | pyhdf package. It defines constants specifying file opening modes and 24 | various data types, methods for accessing files, plus a few utility 25 | functions to query library version and check if a file is an HDF one. 26 | 27 | It should be noted that, among the modules of the pyhdf package, SD is 28 | special in the sense that it is self-contained and does not need support 29 | from the HDF module. For example, SD provides its own file opening and 30 | closing methods, whereas VS uses methods of the HDF.HDF class for that. 31 | 32 | Functions and classes summary 33 | ----------------------------- 34 | The HDF module provides the following classes. 35 | 36 | HC 37 | The HC class holds constants defining opening modes and 38 | various data types. 39 | 40 | HDF 41 | The HDF class provides methods to open and close an HDF file, 42 | and return instances of the major HDF APIs (except SD). 43 | 44 | To instantiate an HDF class, call the HDF() constructor. 45 | 46 | methods: 47 | constructors: 48 | HDF() open an HDF file, creating the file if necessary, 49 | and return an HDF instance 50 | vstart() initialize the VS (Vdata) API over the HDF file and 51 | return a VS instance 52 | vgstart() initialize the V (Vgroup) interface over the HDF file 53 | and return a V instance. 54 | 55 | 56 | closing file 57 | close() close the HDF file 58 | 59 | inquiry 60 | getfileversion() return info about the version of the HDF file 61 | 62 | The HDF module also offers the following functions. 63 | 64 | inquiry 65 | getlibversion() return info about the version of the library 66 | ishdf() determine whether a file is an HDF file 67 | 68 | 69 | """ 70 | 71 | import os, sys, types 72 | 73 | from . import hdfext as _C 74 | from .six.moves import xrange 75 | from .HC import HC 76 | 77 | # NOTE: The vstart() and vgstart() modules need to access the 78 | # VS and V modules, resp. We could simply import those 79 | # two modules, but then they would always be loaded and this 80 | # may not be what the user wants. Instead of forcing the 81 | # systematic import, we import the package `pyhdf', 82 | # and access the needed constructors by writing 83 | # 'pyhdf.VS.VS()' and 'pyhdf.V.V()'. Until the VS or 84 | # V modules are imported, those statements will give an 85 | # error (undefined attribute). Once the user has imported 86 | # the modules, the error will disappear. 87 | 88 | import pyhdf 89 | 90 | from .error import HDF4Error, _checkErr 91 | 92 | # List of names we want to be imported by an "from pyhdf.HDF import *" 93 | # statement 94 | 95 | __all__ = ['HDF', 'HDF4Error', 96 | 'HC', 97 | 'getlibversion', 'ishdf'] 98 | 99 | def getlibversion(): 100 | """Get the library version info. 101 | 102 | Args: 103 | no argument 104 | Returns: 105 | 4-element tuple with the following components: 106 | -major version number (int) 107 | -minor version number (int) 108 | -complete library version number (int) 109 | -additional information (string) 110 | 111 | C library equivalent : Hgetlibversion 112 | """ 113 | 114 | status, major_v, minor_v, release, info = _C.Hgetlibversion() 115 | _checkErr('getlibversion', status, "cannot get lib version") 116 | return major_v, minor_v, release, info 117 | 118 | def ishdf(filename): 119 | """Determine whether a file is an HDF file. 120 | 121 | Args: 122 | filename name of the file to check 123 | Returns: 124 | 1 if the file is an HDF file, 0 otherwise 125 | 126 | C library equivalent : Hishdf 127 | """ 128 | 129 | return _C.Hishdf(filename) 130 | 131 | 132 | class HDF(object): 133 | """The HDF class encapsulates the basic HDF functions. 134 | Its main use is to open and close an HDF file, and return 135 | instances of the major HDF APIs (except for SD). 136 | To instantiate an HDF class, call the HDF() constructor. """ 137 | 138 | def __init__(self, path, mode=HC.READ, nblocks=0): 139 | """HDF constructor: open an HDF file, creating the file if 140 | necessary. 141 | 142 | Args: 143 | path name of the HDF file to open 144 | mode file opening mode; this mode is a set of binary flags 145 | which can be ored together 146 | 147 | HC.CREATE combined with HC.WRITE to create file 148 | if it does not exist 149 | HC.READ open file in read-only access (default) 150 | HC.TRUNC if combined with HC.WRITE, overwrite 151 | file if it already exists 152 | HC.WRITE open file in read-write mode; if file 153 | exists it is updated, unless HC.TRUNC is 154 | set, in which case it is erased and 155 | recreated; if file does not exist, an 156 | error is raised unless HC.CREATE is set, 157 | in which case the file is created 158 | 159 | Note an important difference in the way CREATE is 160 | handled by the HDF C library and the pyhdf package. 161 | In the C library, CREATE indicates that a new file should 162 | always be created, overwriting an existing one if 163 | any. For pyhdf, CREATE indicates a new file should be 164 | created only if it does not exist, and the overwriting 165 | of an already existing file must be explicitly asked 166 | for by setting the TRUNC flag. 167 | 168 | Those differences were introduced so as to harmonize 169 | the way files are opened in the pycdf and pyhdf 170 | packages. Also, this solves a limitation in the 171 | hdf (and netCDF) library, where there is no easy way 172 | to implement the frequent requirement that an existent 173 | file be opened in read-write mode, or created 174 | if it does not exist. 175 | 176 | nblocks number of data descriptor blocks in a block with which 177 | to create the file; the parameter is ignored if the file 178 | is not created; 0 asks to use the default 179 | 180 | Returns: 181 | an HDF instance 182 | 183 | C library equivalent : Hopen 184 | """ 185 | # Private attributes: 186 | # _id: file id (NOTE: not compatile with the SD file id) 187 | 188 | # See if file exists. 189 | exists = os.path.exists(path) 190 | 191 | if HC.WRITE & mode: 192 | if exists: 193 | if HC.TRUNC & mode: 194 | try: 195 | os.remove(path) 196 | except Exception as msg: 197 | raise HDF4Error(msg) 198 | mode = HC.CREATE 199 | else: 200 | mode = HC.WRITE 201 | else: 202 | if HC.CREATE & mode: 203 | mode = HC.CREATE 204 | else: 205 | raise HDF4Error("HDF: no such file") 206 | else: 207 | if exists: 208 | if mode & HC.READ: 209 | mode = HC.READ # clean mode 210 | else: 211 | raise HDF4Error("HDF: invalid mode") 212 | else: 213 | raise HDF4Error("HDF: no such file") 214 | 215 | id = _C.Hopen(path, mode, nblocks) 216 | _checkErr('HDF', id, "cannot open %s" % path) 217 | self._id = id 218 | 219 | 220 | def __del__(self): 221 | """Delete the instance, first calling the end() method 222 | if not already done. """ 223 | 224 | try: 225 | if self._id: 226 | self.close() 227 | except: 228 | pass 229 | 230 | def close(self): 231 | """Close the HDF file. 232 | 233 | Args: 234 | no argument 235 | Returns: 236 | None 237 | 238 | C library equivalent : Hclose 239 | """ 240 | 241 | _checkErr('close', _C.Hclose(self._id), "cannot close file") 242 | self._id = None 243 | 244 | def getfileversion(self): 245 | """Get file version info. 246 | 247 | Args: 248 | no argument 249 | Returns: 250 | 4-element tuple with the following components: 251 | -major version number (int) 252 | -minor version number (int) 253 | -complete library version number (int) 254 | -additional information (string) 255 | 256 | C library equivalent : Hgetlibversion 257 | """ 258 | 259 | status, major_v, minor_v, release, info = _C.Hgetfileversion(self._id) 260 | _checkErr('getfileversion', status, "cannot get file version") 261 | return major_v, minor_v, release, info 262 | 263 | def vstart(self): 264 | """Initialize the VS API over the file and return a VS instance. 265 | 266 | Args: 267 | no argument 268 | Returns: 269 | VS instance 270 | 271 | C library equivalent : Vstart (in fact: Vinitialize) 272 | """ 273 | # See note at top of file. 274 | return pyhdf.VS.VS(self) 275 | 276 | def vgstart(self): 277 | """Initialize the V API over the file and return a V instance. 278 | 279 | Args: 280 | no argument 281 | Returns: 282 | V instance 283 | 284 | C library equivalent : Vstart (in fact: Vinitialize) 285 | """ 286 | # See note at top of file. 287 | return pyhdf.V.V(self) 288 | 289 | 290 | 291 | ########################### 292 | # Support functions 293 | ########################### 294 | 295 | 296 | def _array_to_ret(buf, nValues): 297 | 298 | # Convert array 'buf' to a scalar or a list. 299 | 300 | if nValues == 1: 301 | ret = buf[0] 302 | else: 303 | ret = [] 304 | for i in xrange(nValues): 305 | ret.append(buf[i]) 306 | return ret 307 | 308 | def _array_to_str(buf, nValues): 309 | 310 | # Convert array of bytes 'buf' to a string. 311 | 312 | # Return empty string if there is no value. 313 | if nValues == 0: 314 | return "" 315 | # When there is just one value, _array_to_ret returns a scalar 316 | # over which we cannot iterate. 317 | if nValues == 1: 318 | chrs = [chr(buf[0])] 319 | else: 320 | chrs = [chr(b) for b in _array_to_ret(buf, nValues)] 321 | # Strip NULL at end 322 | if chrs[-1] == '\0': 323 | del chrs[-1] 324 | return ''.join(chrs) 325 | -------------------------------------------------------------------------------- /pyhdf/Makefile: -------------------------------------------------------------------------------- 1 | 2 | .PHONY: build 3 | build: hdfext.py hdfext_wrap.c 4 | 5 | hdfext.py: hdfext.i 6 | swig -python hdfext.i 7 | 8 | hdfext_wrap.c: hdfext.py 9 | # Already generated by swig. Do nothing. 10 | 11 | .PHONY: clean 12 | clean: 13 | rm -rf __pycache__ *.so *.pyc 14 | -------------------------------------------------------------------------------- /pyhdf/__init__.py: -------------------------------------------------------------------------------- 1 | # $Id: __init__.py,v 1.3 2004-08-02 15:22:59 gosselin Exp $ 2 | # $Log : $ 3 | -------------------------------------------------------------------------------- /pyhdf/error.py: -------------------------------------------------------------------------------- 1 | # $Id: error.py,v 1.1 2004-08-02 15:00:34 gosselin Exp $ 2 | # $Log: not supported by cvs2svn $ 3 | 4 | from . import hdfext as _C 5 | 6 | # ################# 7 | # Error processing 8 | # ################# 9 | 10 | class HDF4Error(Exception): 11 | """ An error from inside the HDF4 library. 12 | """ 13 | 14 | def _checkErr(procName, val, msg=""): 15 | 16 | if val is None or (not isinstance(val, str) and val < 0): 17 | #_C._HEprint(); 18 | errCode = _C.HEvalue(1) 19 | if errCode != 0: 20 | err = "%s (%d): %s" % (procName, errCode, _C.HEstring(errCode)) 21 | else: 22 | err = "%s : %s" % (procName, msg) 23 | raise HDF4Error(err) 24 | -------------------------------------------------------------------------------- /pyhdf/hdfext.i: -------------------------------------------------------------------------------- 1 | /* 2 | * $Id: hdfext.i,v 1.7 2008-06-30 02:41:44 gosselin_a Exp $ 3 | * $Log: not supported by cvs2svn $ 4 | * Revision 1.6 2005/07/14 01:36:41 gosselin_a 5 | * pyhdf-0.7-3 6 | * Ported to HDF4.2r1. 7 | * Support for SZIP compression on SDS datasets. 8 | * All classes are now 'new-style' classes, deriving from 'object'. 9 | * Update documentation. 10 | * 11 | * Revision 1.5 2004/11/02 21:33:39 gosselin 12 | * *** empty log message *** 13 | * 14 | * Revision 1.3 2004/08/02 15:36:04 gosselin 15 | * pyhdf-0.7-1 16 | * 17 | * Revision 1.2 2004/08/02 15:22:59 gosselin 18 | * pyhdf-0.6-1 19 | * 20 | * Revision 1.1 2004/08/02 15:00:34 gosselin 21 | * Initial revision 22 | * 23 | */ 24 | 25 | %module hdfext 26 | 27 | 28 | %include "typemaps.i" 29 | %include "cstring.i" 30 | %include "carrays.i" 31 | 32 | /* ********************************************************************* */ 33 | /* HDF type info codes */ 34 | /* ******************* */ 35 | 36 | #define DFNT_NONE 0 /* indicates that number type not set */ 37 | #define DFNT_QUERY 0 /* use this code to find the current type */ 38 | #define DFNT_VERSION 1 /* current version of NT info */ 39 | 40 | #define DFNT_FLOAT32 5 41 | #define DFNT_FLOAT 5 /* For backward compat; don't use */ 42 | #define DFNT_FLOAT64 6 43 | #define DFNT_DOUBLE 6 /* For backward compat; don't use */ 44 | #define DFNT_FLOAT128 7 /* No current plans for support */ 45 | 46 | #define DFNT_INT8 20 47 | #define DFNT_UINT8 21 48 | 49 | #define DFNT_INT16 22 50 | #define DFNT_UINT16 23 51 | #define DFNT_INT32 24 52 | #define DFNT_UINT32 25 53 | #define DFNT_INT64 26 54 | #define DFNT_UINT64 27 55 | #define DFNT_INT128 28 /* No current plans for support */ 56 | #define DFNT_UINT128 30 /* No current plans for support */ 57 | 58 | #define DFNT_UCHAR8 3 /* 3 chosen for backward compatibility */ 59 | #define DFNT_UCHAR 3 /* uchar=uchar8 for backward compatibility */ 60 | #define DFNT_CHAR8 4 /* 4 chosen for backward compatibility */ 61 | #define DFNT_CHAR 4 /* uchar=uchar8 for backward compatibility */ 62 | #define DFNT_CHAR16 42 /* No current plans for support */ 63 | #define DFNT_UCHAR16 43 /* No current plans for support */ 64 | 65 | #define SD_UNLIMITED 0 66 | #define SD_FILL 0 67 | #define SD_NOFILL 256 68 | 69 | #define CHAR_BUFFER_SIZE 4096 70 | #define ATTRIB_BUFFER_SIZE 128 71 | 72 | /* ********************************************************************* */ 73 | /* internal file access codes */ 74 | 75 | #define DFACC_READ 1 76 | #define DFACC_WRITE 2 77 | #define DFACC_CREATE 4 78 | #define DFACC_ALL 7 79 | 80 | #define DFACC_RDONLY 1 81 | #define DFACC_RDWR 3 82 | #define DFACC_CLOBBER 4 83 | 84 | /* New file access codes (for Hstartaccess only, currently) */ 85 | #define DFACC_BUFFER 8 /* buffer the access to this AID */ 86 | #define DFACC_APPENDABLE 0x10 /* make this AID appendable */ 87 | #define DFACC_CURRENT 0x20 /* start looking for a tag/ref from the current */ 88 | /* location in the DD list (useful for continued */ 89 | /* searching ala findfirst/findnext) */ 90 | 91 | /* External Element File access mode */ 92 | /* #define DFACC_CREATE 4 is for creating new external element file */ 93 | #define DFACC_OLD 1 /* for accessing existing ext. element file */ 94 | 95 | /* Compression codes */ 96 | #define COMP_CODE_NONE 0 97 | #define COMP_CODE_RLE 1 98 | #define COMP_CODE_NBIT 2 99 | #define COMP_CODE_SKPHUFF 3 100 | #define COMP_CODE_DEFLATE 4 101 | #define COMP_CODE_SZIP 5 102 | 103 | /* Tags */ 104 | #define DFTAG_NDG 720 105 | #define DFTAG_VH 1962 106 | #define DFTAG_VG 1965 107 | 108 | /* limits */ 109 | %constant int H4_MAX_VAR_DIMS = H4_MAX_VAR_DIMS; 110 | 111 | %array_class(unsigned char, array_byte); 112 | %array_class(signed char, array_int8); 113 | %array_class(short, array_int16); 114 | %array_class(unsigned short, array_uint16); 115 | %array_class(int, array_int32); 116 | %array_class(unsigned int, array_uint32); 117 | %array_class(float, array_float32); 118 | %array_class(double, array_float64); 119 | %array_functions(void *, array_voidp); 120 | 121 | typedef int int32; 122 | typedef int intn; 123 | typedef int uint32; 124 | typedef short int16; 125 | typedef unsigned char uint8; 126 | 127 | %{ 128 | #include "hdf.h" 129 | #include "mfhdf.h" 130 | %} 131 | 132 | /* 133 | *************** 134 | * Basic HDF API 135 | *************** 136 | */ 137 | 138 | /* 139 | * Opening and closing HDF file. 140 | */ 141 | 142 | extern int32 Hopen(const char *filename, 143 | intn access_mode, 144 | int num_dds_blocks); 145 | extern intn Hclose(int32 file_id); 146 | 147 | /* 148 | * Library version. 149 | */ 150 | 151 | %cstring_bounded_output(char *string, CHAR_BUFFER_SIZE); 152 | extern intn Hgetlibversion(uint32 *OUTPUT, /* major_v */ 153 | uint32 *OUTPUT, /* minor_v */ 154 | uint32 *OUTPUT, /* release */ 155 | char *string); 156 | extern intn Hgetfileversion(int32 file_id, 157 | uint32 *OUTPUT, /* major_v */ 158 | uint32 *OUTPUT, /* minor_v */ 159 | uint32 *OUTPUT, /* release */ 160 | char *string); 161 | %clear char *string; 162 | 163 | /* 164 | * Inquiry. 165 | */ 166 | 167 | extern intn Hishdf(const char *filename); 168 | 169 | /* 170 | *********** 171 | * Error API 172 | *********** 173 | */ 174 | 175 | %{ 176 | #include 177 | void _HEprint(void) { 178 | 179 | HEprint(stderr,0); 180 | } 181 | %} 182 | 183 | extern int32 HEvalue(int32 error_stack_offset); 184 | extern const char *HEstring(int32 error_code); 185 | extern void _HEprint(void); 186 | 187 | 188 | /* 189 | ******** 190 | * SD API 191 | ******** 192 | */ 193 | 194 | /* 195 | * Interface to numpy, which is used to read and write 196 | * SD array data. 197 | */ 198 | 199 | %init %{ 200 | /* Init numpy. Mandatory, otherwise the extension will bomb. */ 201 | import_array(); 202 | %} 203 | 204 | 205 | %{ 206 | #include "hdf.h" /* declares int32, float32, etc */ 207 | 208 | #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION 209 | #include "numpy/ndarraytypes.h" 210 | #include "numpy/ndarrayobject.h" 211 | 212 | #define DFNT_FLOAT32 5 213 | #define DFNT_FLOAT 5 /* For backward compat; don't use */ 214 | #define DFNT_FLOAT64 6 215 | #define DFNT_DOUBLE 6 /* For backward compat; don't use */ 216 | #define DFNT_FLOAT128 7 /* No current plans for support */ 217 | 218 | #define DFNT_INT8 20 219 | #define DFNT_UINT8 21 220 | 221 | #define DFNT_INT16 22 222 | #define DFNT_UINT16 23 223 | #define DFNT_INT32 24 224 | #define DFNT_UINT32 25 225 | #define DFNT_INT64 26 226 | #define DFNT_UINT64 27 227 | 228 | #define DFNT_UCHAR8 3 /* 3 chosen for backward compatibility */ 229 | #define DFNT_UCHAR 3 /* uchar=uchar8 for backward compatibility */ 230 | #define DFNT_CHAR8 4 /* 4 chosen for backward compatibility */ 231 | #define DFNT_CHAR 4 /* uchar=uchar8 for backward compatibility */ 232 | 233 | static int HDFtoNumericType(int hdf) { 234 | 235 | int num; 236 | 237 | switch (hdf) { 238 | case DFNT_FLOAT32: num = NPY_FLOAT; break; 239 | case DFNT_FLOAT64: num = NPY_DOUBLE; break; 240 | case DFNT_INT8 : num = NPY_BYTE; break; 241 | case DFNT_UINT8 : num = NPY_UBYTE; break; 242 | case DFNT_INT16 : num = NPY_SHORT; break; 243 | #ifndef NOUINT 244 | case DFNT_UINT16 : num = NPY_USHORT; break; 245 | #endif 246 | case DFNT_INT32 : num = NPY_INT; break; 247 | #ifndef NOUINT 248 | case DFNT_UINT32 : num = NPY_UINT; break; 249 | #endif 250 | case DFNT_CHAR8 : num = NPY_STRING; break; 251 | case DFNT_UCHAR8 : num = NPY_UBYTE; break; 252 | default: 253 | num = -1; 254 | break; 255 | } 256 | return num; 257 | } 258 | 259 | static PyObject * _SDreaddata_0(int32 sds_id, int32 data_type, 260 | PyObject *start, 261 | PyObject *edges, 262 | PyObject *stride) { 263 | 264 | /* 265 | * A value of -1 in 'edges' indicates that the dimension 266 | * is indexed, not sliced. This dimension should be removed from 267 | * the output array. 268 | */ 269 | 270 | PyArrayObject *array; 271 | PyObject *o; 272 | int n, rank, outRank, num_type, status; 273 | /* 274 | * Allocate those arrays on the stack for simplicity. 275 | * 80 dimensions should be more than enough! 276 | */ 277 | int startArr[80], strideArr[80], edgesArr[80]; 278 | npy_intp dims[80]; 279 | float f32; 280 | double f64; 281 | int i32; 282 | 283 | /* 284 | * Load arrays. Caller has guaranteeded that all 3 arrays have the 285 | * same dimensions. 286 | */ 287 | rank = PyObject_Length(start); 288 | outRank = 0; 289 | dims[0] = 0; 290 | for (n = 0; n < rank; n++) { 291 | o = PySequence_GetItem(start, n); 292 | if (!PyInt_Check(o)) { 293 | PyErr_SetString(PyExc_ValueError, "arg start contains a non-integer"); 294 | return NULL; 295 | } 296 | startArr[n] = PyInt_AsLong(o); 297 | 298 | o = PySequence_GetItem(edges, n); 299 | if (!PyInt_Check(o)) { 300 | PyErr_SetString(PyExc_ValueError, "arg edges contains a non-integer"); 301 | return NULL; 302 | } 303 | /* 304 | * Do as numpy when a dimension is indexed (indicated by 305 | * a count of -1). 306 | * This dimension is then dropped from the output array, 307 | * producing a subarray. For ex., if m is a 3x3 array, m[0] 308 | * is a 3 element vector holding the first row of `m'. 309 | * Variables `outRank' and `dims' store the resulting array 310 | * rank and dimension lengths, resp. 311 | */ 312 | edgesArr[n] = PyInt_AsLong(o); 313 | if (edgesArr[n] != -1) { 314 | dims[outRank++] = abs(edgesArr[n]); 315 | } 316 | else 317 | edgesArr[n] = 1; 318 | 319 | o = PySequence_GetItem(stride, n); 320 | if (!PyInt_Check(o)) { 321 | PyErr_SetString(PyExc_ValueError, "arg stride contains a non-integer"); 322 | return NULL; 323 | } 324 | strideArr[n] = PyInt_AsLong(o); 325 | } 326 | 327 | /* 328 | * Create output numpy array. We provide 1 for the itemsize argument to 329 | * PyArray_New to handle to case when num_type is NPY_STRING. All other 330 | * num_type possibilities are fixed-size types, so itemsize is ignored. 331 | */ 332 | if ((num_type = HDFtoNumericType(data_type)) < 0) { 333 | PyErr_SetString(PyExc_ValueError, "data_type not compatible with numpy"); 334 | return NULL; 335 | } 336 | array = (PyArrayObject *)PyArray_New(&PyArray_Type, outRank, dims, num_type, 337 | NULL, NULL, 1, 0, NULL); 338 | if (array == NULL) 339 | return NULL; 340 | /* 341 | * Load it from the SDS. 342 | */ 343 | status = SDreaddata(sds_id, startArr, strideArr, edgesArr, 344 | PyArray_DATA(array)); 345 | if (status < 0) { 346 | PyErr_SetString(PyExc_ValueError, "SDreaddata failure"); 347 | Py_DECREF(array); /* Free array */ 348 | return NULL; 349 | } 350 | 351 | /* 352 | * Return array. 353 | * PyArray_Return() does not seem to work ok. 354 | * Deal ourselves with the 0 rank case. 355 | */ 356 | /* return PyArray_Return(array); */ 357 | if (outRank > 0) 358 | return (PyObject *) array; 359 | switch (num_type) { 360 | case NPY_FLOAT: 361 | f32 = *(float *) PyArray_DATA(array); 362 | o = PyFloat_FromDouble((double) f32); 363 | break; 364 | case NPY_DOUBLE: 365 | f64 = *(double *) PyArray_DATA(array); 366 | o = PyFloat_FromDouble(f64); 367 | break; 368 | case NPY_STRING: 369 | case NPY_BYTE: 370 | i32 = *(char *) PyArray_DATA(array); 371 | o = PyInt_FromLong((long) i32); 372 | break; 373 | case NPY_UBYTE: 374 | i32 = *(unsigned char *) PyArray_DATA(array); 375 | o = PyInt_FromLong((long) i32); 376 | break; 377 | case NPY_SHORT: 378 | i32 = *(short *) PyArray_DATA(array); 379 | o = PyInt_FromLong((long) i32); 380 | break; 381 | case NPY_INT: 382 | i32 = *(int *) PyArray_DATA(array); 383 | o = PyInt_FromLong((long) i32); 384 | break; 385 | } 386 | Py_DECREF(array); /* Free array */ 387 | return o; 388 | } 389 | 390 | static PyObject * _SDwritedata_0(int32 sds_id, int32 data_type, 391 | PyObject *start, 392 | PyObject *edges, 393 | PyObject *data, 394 | PyObject *stride) { 395 | 396 | PyArrayObject *array; 397 | PyObject *o; 398 | int n, rank, num_type, status; 399 | /* 400 | * Allocate those arrays on the stack for simplicity. 401 | * 80 dimensions should be more than enough! 402 | */ 403 | int startArr[80], strideArr[80], edgesArr[80]; 404 | 405 | /* 406 | * Load arrays. Caller has guaranteeded that all 3 arrays have the 407 | * same dimensions. 408 | */ 409 | rank = PyObject_Length(start); 410 | for (n = 0; n < rank; n++) { 411 | o = PySequence_GetItem(start, n); 412 | if (!PyInt_Check(o)) { 413 | PyErr_SetString(PyExc_ValueError, "arg start contains a non-integer"); 414 | return NULL; 415 | } 416 | startArr[n] = PyInt_AsLong(o); 417 | 418 | o = PySequence_GetItem(edges, n); 419 | if (!PyInt_Check(o)) { 420 | PyErr_SetString(PyExc_ValueError, "arg edges contains a non-integer"); 421 | return NULL; 422 | } 423 | /* 424 | * A value of -1 indicates that an index, not a slice, was applied 425 | * to the dimension. This difference is significant only for a 426 | * `get' operation. So ignore it here. 427 | */ 428 | edgesArr[n] = abs(PyInt_AsLong(o)); 429 | 430 | o = PySequence_GetItem(stride, n); 431 | if (!PyInt_Check(o)) { 432 | PyErr_SetString(PyExc_ValueError, "arg stride contains a non-integer"); 433 | return NULL; 434 | } 435 | strideArr[n] = PyInt_AsLong(o); 436 | } 437 | 438 | /* 439 | * Convert input to a contiguous numpy array (no penalty if 440 | * input already in this format). 441 | */ 442 | if ((num_type = HDFtoNumericType(data_type)) < 0) { 443 | PyErr_SetString(PyExc_ValueError, "data_type not compatible with numpy"); 444 | return NULL; 445 | } 446 | if ((array = (PyArrayObject *) 447 | PyArray_ContiguousFromObject(data, num_type, rank - 1, rank)) == NULL) 448 | return NULL; 449 | /* 450 | * Store in the SDS. 451 | */ 452 | status = SDwritedata(sds_id, startArr, strideArr, edgesArr, 453 | PyArray_DATA(array)); 454 | Py_DECREF(array); /* Free array */ 455 | if (status < 0) { 456 | PyErr_SetString(PyExc_ValueError, "SDwritedata failure"); 457 | return NULL; 458 | } 459 | /* 460 | * Return None. 461 | */ 462 | Py_INCREF(Py_None); 463 | return Py_None; 464 | } 465 | 466 | %} 467 | 468 | /* 469 | * Following two routines are defined above, and interface to the 470 | * `SDreaddata()' and `SDwritedata()' hdf functions. 471 | */ 472 | 473 | extern PyObject * _SDreaddata_0(int32 sds_id, int32 data_type, 474 | PyObject *start, 475 | PyObject *edges, 476 | PyObject *stride); 477 | 478 | extern PyObject * _SDwritedata_0(int32 sds_id, int32 data_type, 479 | PyObject *start, 480 | PyObject *edges, 481 | PyObject *data, 482 | PyObject *stride); 483 | 484 | /* 485 | * Access 486 | */ 487 | 488 | extern int32 SDstart(const char *filename, int32 access_mode); 489 | 490 | extern int32 SDcreate(int32 sd_id, const char *sds_name, int32 data_type, 491 | int32 rank, const int32 *dim_sizes); 492 | 493 | extern int32 SDselect(int32 sd_id, int32 sds_index); 494 | 495 | extern int32 SDendaccess(int32 sds_id); 496 | 497 | extern int32 SDend(int32 sd_id); 498 | 499 | /* 500 | * General inquiry. 501 | */ 502 | 503 | extern int32 SDfileinfo(int32 sd_id, int32 *OUTPUT, int32 *OUTPUT); 504 | 505 | %cstring_bounded_output(char *sds_name, H4_MAX_NC_NAME); 506 | extern int32 SDgetinfo(int32 sds_id, char *sds_name, int32 *OUTPUT, void *buf, 507 | int32 *OUTPUT, int32 *OUTPUT); 508 | %clear char *sds_name; 509 | 510 | extern int32 SDcheckempty(int32 sds_id, int32 *OUTPUT); 511 | 512 | extern int32 SDidtoref(int32 sds_id); 513 | 514 | extern int32 SDiscoordvar(int32 sds_id); 515 | 516 | extern int32 SDisrecord(int32 sds_id); 517 | 518 | extern int32 SDnametoindex(int32 sd_id, const char *sds_name); 519 | 520 | extern int32 SDreftoindex(int32 sd_id, int32 sds_ref); 521 | 522 | /* 523 | * Dimensions 524 | */ 525 | 526 | %cstring_bounded_output(char *dim_name, CHAR_BUFFER_SIZE); 527 | extern int32 SDdiminfo(int32 dim_id, char *dim_name, 528 | int32 *OUTPUT, int32 *OUTPUT, int32 *OUTPUT); 529 | %clear char *dim_name; 530 | 531 | extern int32 SDgetdimid(int32 sds_id, int32 dim_index); 532 | 533 | extern int32 SDsetdimname(int32 dim_id, const char *dim_name); 534 | 535 | /* 536 | * Dimension scales 537 | */ 538 | 539 | extern int32 SDgetdimscale(int32 dim_id, void *buf); 540 | 541 | extern int32 SDsetdimscale(int32 dim_id, int32 n_values, int32 data_type, 542 | const void *buf); 543 | 544 | /* 545 | * User-defined attributes 546 | */ 547 | 548 | %cstring_bounded_output(char *attr_name, CHAR_BUFFER_SIZE); 549 | extern int32 SDattrinfo(int32 obj_id, int32 attr_index, 550 | char *attr_name, int32 *OUTPUT, int32 *OUTPUT); 551 | %clear char *attr_name; 552 | 553 | extern int32 SDfindattr(int32 obj_id, char *attr_name); 554 | 555 | extern int32 SDreadattr(int32 obj_id, int32 attr_index, void *buf); 556 | 557 | extern int32 SDsetattr(int32 obj_id, const char *attr_name, int32 data_type, 558 | int32 n_values, const void *values); 559 | 560 | 561 | /* 562 | * Predefined attributes 563 | */ 564 | 565 | extern int32 SDgetcal(int32 sds_id, double *OUTPUT, double *OUTPUT, 566 | double *OUTPUT, double *OUTPUT, int32 *OUTPUT); 567 | 568 | %cstring_bounded_output(char *label, ATTRIB_BUFFER_SIZE); 569 | %cstring_bounded_output(char *unit, ATTRIB_BUFFER_SIZE); 570 | %cstring_bounded_output(char *format, ATTRIB_BUFFER_SIZE); 571 | %cstring_bounded_output(char *coord_system, ATTRIB_BUFFER_SIZE); 572 | extern int32 SDgetdatastrs(int32 sds_id, char *label, char *unit, char *format, 573 | char *coord_system, int32 len); 574 | %clear char *label; 575 | %clear char *unit; 576 | %clear char *format; 577 | %clear char *coord_system; 578 | 579 | %cstring_bounded_output(char *label, ATTRIB_BUFFER_SIZE); 580 | %cstring_bounded_output(char *unit, ATTRIB_BUFFER_SIZE); 581 | %cstring_bounded_output(char *format, ATTRIB_BUFFER_SIZE); 582 | extern int32 SDgetdimstrs(int32 sds_id, char *label, char *unit, char *format, 583 | int32 len); 584 | %clear char *label; 585 | %clear char *unit; 586 | %clear char *format; 587 | 588 | extern int32 SDgetfillvalue(int32 sds_id, void *buf); 589 | 590 | extern int32 SDgetrange(int32 sds_id, void *buf1, void *buf2); 591 | 592 | extern int32 SDsetcal(int32 sds_id, double cal, double cal_error, 593 | double offset, double offset_err, int32 data_type); 594 | 595 | extern int32 SDsetdatastrs(int32 sds_id, const char *label, const char *unit, 596 | const char *format, const char *coord_system); 597 | 598 | extern int32 SDsetdimstrs(int32 sds_id, const char *label, const char *unit, 599 | const char *format); 600 | 601 | extern int32 SDsetfillmode(int32 sd_id, int32 fill_mode); 602 | 603 | extern int32 SDsetfillvalue(int32 sds_id, const void *fill_val); 604 | 605 | extern int32 SDsetrange(int32 sds_id, const void *max, const void *min); 606 | 607 | /* 608 | * Compression 609 | */ 610 | 611 | %{ 612 | 613 | #include "hcomp.h" 614 | 615 | static int32 _SDgetcompress(int32 sds_id, int32 *comp_type, int32 *value, 616 | int32 *v2, int32 *v3, int32 *v4, int32 *v5) { 617 | 618 | comp_info c_info; 619 | int32 status; 620 | 621 | #ifdef NOCOMPRESS 622 | status = -1; 623 | #else 624 | status = SDgetcompress(sds_id, comp_type, &c_info); 625 | switch (*comp_type) { 626 | case COMP_CODE_NONE: 627 | case COMP_CODE_RLE : 628 | break; 629 | case COMP_CODE_SKPHUFF: 630 | *value = c_info.skphuff.skp_size; 631 | break; 632 | case COMP_CODE_DEFLATE: 633 | *value = c_info.deflate.level; 634 | break; 635 | #ifndef NOSZIP 636 | case COMP_CODE_SZIP: 637 | *value = c_info.szip.options_mask; 638 | *v2 = c_info.szip.pixels_per_block; 639 | *v3 = c_info.szip.pixels_per_scanline; 640 | *v4 = c_info.szip.bits_per_pixel; 641 | *v5 = c_info.szip.pixels; 642 | break; 643 | #endif 644 | } 645 | #endif 646 | 647 | return status; 648 | } 649 | 650 | static int32 _SDsetcompress(int32 sds_id, int32 comp_type, int32 value, 651 | int32 v2) { 652 | 653 | comp_info c_info; 654 | int32 status; 655 | 656 | #ifdef NOCOMPRESS 657 | status = -1; 658 | #else 659 | switch (comp_type) { 660 | case COMP_CODE_NONE: 661 | case COMP_CODE_RLE : 662 | break; 663 | case COMP_CODE_SKPHUFF: 664 | c_info.skphuff.skp_size = value; 665 | break; 666 | case COMP_CODE_DEFLATE: 667 | c_info.deflate.level = value; 668 | break; 669 | #ifndef NOSZIP 670 | case COMP_CODE_SZIP: 671 | c_info.szip.options_mask = value; 672 | c_info.szip.pixels_per_block = v2; 673 | break; 674 | #endif 675 | } 676 | status = SDsetcompress(sds_id, comp_type, &c_info); 677 | #endif 678 | return status; 679 | } 680 | %} 681 | 682 | extern int32 _SDgetcompress(int32 sds_id, int32 *OUTPUT, int32 *OUTPUT, 683 | int32 *OUTPUT, int32 *OUTPUT, int32 *OUTPUT, 684 | int32 *OUTPUT); 685 | extern int32 _SDsetcompress(int32 sds_id, int32 comp_type, int32 value, 686 | int32 v2); 687 | 688 | /* 689 | * Misc 690 | */ 691 | 692 | extern int32 SDsetexternalfile(int32 sds_id, const char *filename, 693 | int32 offset); 694 | 695 | /* 696 | ******** 697 | * VS API 698 | ******** 699 | */ 700 | 701 | 702 | /* 703 | * Access / Create 704 | ***************** 705 | */ 706 | 707 | extern intn Vinitialize(int32 file_id); /* Vstart is a macro */ 708 | 709 | extern int32 VSattach(int32 file_id, 710 | int32 vdata_ref, 711 | const char * vdata_access_mode); 712 | 713 | extern int32 VSdetach(int32 vdata_id); 714 | 715 | extern intn Vfinish(int32 file_id); /* Vend is a macro */ 716 | 717 | /* 718 | * Creating one-field vdata. 719 | */ 720 | 721 | extern int32 VHstoredata(int32 file_id, 722 | const char *fieldname, 723 | void *buf, 724 | int32 n_records, 725 | int32 data_type, 726 | const char *vdata_name, 727 | const char *vdata_class); 728 | 729 | extern int32 VHstoredatam(int32 file_id, 730 | const char *fieldname, 731 | void *buf, 732 | int32 n_records, 733 | int32 data_type, 734 | const char *vdata_name, 735 | const char *vdata_class, 736 | int32 order); 737 | 738 | /* 739 | * Defining vdata structure. 740 | */ 741 | 742 | extern intn VSfdefine(int32 vdata_id, 743 | const char *fieldname, 744 | int32 data_type, 745 | int32 order); 746 | 747 | extern intn VSsetfields(int32 vdata_id, 748 | const char *fieldname_list); 749 | 750 | /* 751 | * Reading / writing vdata. 752 | */ 753 | 754 | int32 VSseek(int32 vdata_id, 755 | int32 record_index); 756 | 757 | int32 VSread(int32 vdata_id, 758 | void *databuf, 759 | int32 n_records, 760 | int32 interlace_mode); 761 | 762 | int32 VSwrite(int32 vdata_id, 763 | void *databuf, 764 | int32 n_records, 765 | int32 interlace_mode); 766 | 767 | intn VSfpack(int32 vdata_id, 768 | intn action, /* 0: PACK, 1: UNPACK */ 769 | const char *fields_in_buf, 770 | void *buf, 771 | intn buf_size, 772 | intn n_records, 773 | const char *fieldname_list, 774 | void **bufptrs); 775 | 776 | /* 777 | * Inquiry. 778 | */ 779 | 780 | extern int32 VSelts(int32 vdata_id); 781 | 782 | %cstring_bounded_output(char *vdata_class, CHAR_BUFFER_SIZE); 783 | extern intn VSgetclass(int32 vdata_id, 784 | char *vdata_class); 785 | %clear char *vdata_class; 786 | 787 | %cstring_bounded_output(char *fieldname_list, CHAR_BUFFER_SIZE); 788 | extern int32 VSgetfields(int32 vdata_id, 789 | char *fieldname_list); 790 | %clear char *fieldname_list; 791 | 792 | extern intn VSgetinterlace(int32 vdata_id); 793 | 794 | %cstring_bounded_output(char *vdata_name, CHAR_BUFFER_SIZE); 795 | extern intn VSgetname(int32 vdata_id, 796 | char *vdata_name); 797 | %clear char *vdata_name; 798 | 799 | extern intn VSsizeof(int32 vdata_id, 800 | const char *fieldname_list); 801 | 802 | %cstring_bounded_output(char *fieldname_list, CHAR_BUFFER_SIZE); 803 | %cstring_bounded_output(char *vdata_name, CHAR_BUFFER_SIZE); 804 | extern intn VSinquire(int32 vdata_id, 805 | int32 *OUTPUT, /* n_records */ 806 | int32 *OUTPUT, /* interlace_mode */ 807 | char *fieldname_list, 808 | int32 *OUTPUT, /* vdata_size */ 809 | char *vdata_name); 810 | %clear char *fieldname_list; 811 | %clear char *vdata_name; 812 | 813 | extern int32 VSQuerytag(int32 vdata_id); 814 | 815 | extern int32 VSQueryref(int32 vdata_id); 816 | 817 | extern intn VSfindex(int32 vdata_id, 818 | const char *field_name, 819 | int32 *OUTPUT); /* field_index */ 820 | 821 | extern intn VSisattr(int32 vdta_id); 822 | 823 | extern int32 VFnfields(int32 vdata_id); 824 | 825 | extern int32 VFfieldtype(int32 vdata_id, 826 | int32 field_index); 827 | 828 | extern const char *VFfieldname(int32 vdata_id, 829 | int32 field_index); 830 | 831 | extern int32 VFfieldesize(int32 vdata_id, 832 | int32 field_index); 833 | 834 | extern int32 VFfieldisize(int32 vdata_id, 835 | int32 field_index); 836 | 837 | extern int32 VFfieldorder(int32 vdata_id, 838 | int32 field_index); 839 | 840 | 841 | /* 842 | * Searching 843 | */ 844 | 845 | extern int32 VSfind(int32 file_id, 846 | const char *vdata_name); 847 | 848 | extern int32 VSgetid(int32 file_id, 849 | int32 vdata_ref); 850 | 851 | extern intn VSfexist(int32 vdata_id, 852 | const char *fieldname_list); 853 | 854 | /* 855 | * Attributes. 856 | */ 857 | 858 | extern int32 VSsetclass(int32 vdata_id, 859 | const char *vdata_class); 860 | 861 | extern int32 VSsetname(int32 vdata_id, 862 | const char *vdata_name); 863 | 864 | extern intn VSsetinterlace(int32 vdata_id, 865 | int32 interlace_mode); 866 | 867 | extern intn VSsetattr(int32 vdata_id, 868 | int32 field_index, 869 | const char *attr_name, 870 | int32 data_type, 871 | int32 n_values, 872 | const void *values); 873 | 874 | extern intn VSgetattr(int32 vdata_id, 875 | int32 field_index, 876 | intn attr_index, 877 | void *buf); 878 | 879 | extern int32 VSfnattrs(int32 vdata_id, 880 | int32 field_index); 881 | 882 | extern int32 VSnattrs(int32 vdata_id); 883 | 884 | %cstring_bounded_output(char *attr_name, CHAR_BUFFER_SIZE); 885 | extern intn VSattrinfo(int32 vdata_id, 886 | int32 field_index, 887 | intn attr_index, 888 | char *attr_name, 889 | int32 *OUTPUT, /* data_type */ 890 | int32 *OUTPUT, /* n_values */ 891 | int32 *OUTPUT); /* size */ 892 | %clear char *attr_name; 893 | 894 | extern intn VSfindattr(int32 vdata_id, 895 | int32 field_index, 896 | const char *attr_name); 897 | 898 | /********* 899 | * V API * 900 | *********/ 901 | 902 | /* 903 | * Access vgroup 904 | */ 905 | 906 | extern int32 Vattach(int32 file_id, 907 | int32 vgroup_ref, 908 | const char *vg_access_mode); 909 | 910 | extern int32 Vdetach(int32 vgroup_id); 911 | 912 | %cstring_bounded_output(char *name, CHAR_BUFFER_SIZE); 913 | extern int32 Vgetname(int32 vgroup_id, 914 | char *name); 915 | %clear char *name; 916 | 917 | extern int32 Vsetname(int32 vgroup_id, 918 | const char *vgroup_name); 919 | 920 | %cstring_bounded_output(char *name, CHAR_BUFFER_SIZE); 921 | extern int32 Vgetclass(int32 vgroup_id, 922 | char *name); 923 | %clear char *name; 924 | 925 | extern int32 Vsetclass(int32 vgroup_id, 926 | const char *vgroup_class); 927 | 928 | extern int32 Vfind(int32 file_id, 929 | const char *vgroup_name); 930 | 931 | extern int32 Vfindclass(int32 file_id, 932 | const char *vgroup_class); 933 | 934 | extern int32 Vinsert(int32 vgroup_id, 935 | int32 v_id); 936 | 937 | extern int32 Vaddtagref(int32 vgroup_id, 938 | int32 obj_tag, 939 | int32 obj_ref); 940 | 941 | extern int32 Vdeletetagref(int32 vgroup_id, 942 | int32 obj_tag, 943 | int32 obj_ref); 944 | 945 | extern int32 Vdelete(int32 file_id, 946 | int32 vgroup_id); 947 | 948 | extern int32 VQueryref(int32 vgroup_id); 949 | 950 | extern int32 VQuerytag(int32 vgroup_id); 951 | 952 | extern int32 Vntagrefs(int32 vgroup_id); 953 | 954 | extern int32 Vgettagref(int32 vgroup_id, 955 | int32 index, 956 | int32 *OUTPUT, /* obj_tag */ 957 | int32 *OUTPUT); /* obj_ref */ 958 | 959 | extern int32 Vgetversion(int32 vgroup_id); 960 | 961 | extern int32 Vgettagrefs(int32 vgroup_id, 962 | void *tag_attay, 963 | void *ref_array, 964 | int32 maxsize); 965 | 966 | extern int32 Vgetid(int32 file_id, 967 | int32 vgroup_ref); 968 | 969 | extern intn Vinqtagref(int32 vgroup_id, 970 | int32 tag, 971 | int32 ref); 972 | 973 | extern intn Visvg(int32 vgroup_id, 974 | int32 obj_ref); 975 | 976 | extern intn Visvs(int32 vgroup_id, 977 | int32 obj_ref); 978 | 979 | extern int32 Vnrefs(int32 vgroup_id, 980 | int32 tag_type); 981 | 982 | /* 983 | * Attributes 984 | */ 985 | 986 | extern intn Vfindattr(int32 vgroup_id, 987 | const char *attr_name); 988 | 989 | extern intn Vgetattr(int32 vdata_id, 990 | intn attr_index, 991 | void *buf); 992 | 993 | extern intn Vsetattr(int32 vgroup_id, 994 | const char *attr_name, 995 | int32 data_type, 996 | int32 n_values, 997 | const void *values); 998 | 999 | %cstring_bounded_output(char *attr_name, CHAR_BUFFER_SIZE); 1000 | extern intn Vattrinfo(int32 vgroup_id, 1001 | intn attr_index, 1002 | char *attr_name, 1003 | int32 *OUTPUT, /* data_type */ 1004 | int32 *OUTPUT, /* n_values */ 1005 | int32 *OUTPUT); /* size */ 1006 | %clear char *attr_name; 1007 | 1008 | extern intn Vnattrs(int32 vgroup_id); 1009 | -------------------------------------------------------------------------------- /pyhdf/hdfext.py: -------------------------------------------------------------------------------- 1 | # This file was automatically generated by SWIG (https://www.swig.org). 2 | # Version 4.1.1 3 | # 4 | # Do not make changes to this file unless you know what you are doing - modify 5 | # the SWIG interface file instead. 6 | 7 | from sys import version_info as _swig_python_version_info 8 | # Import the low-level C/C++ module 9 | if __package__ or "." in __name__: 10 | from . import _hdfext 11 | else: 12 | import _hdfext 13 | 14 | try: 15 | import builtins as __builtin__ 16 | except ImportError: 17 | import __builtin__ 18 | 19 | def _swig_repr(self): 20 | try: 21 | strthis = "proxy of " + self.this.__repr__() 22 | except __builtin__.Exception: 23 | strthis = "" 24 | return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) 25 | 26 | 27 | def _swig_setattr_nondynamic_instance_variable(set): 28 | def set_instance_attr(self, name, value): 29 | if name == "this": 30 | set(self, name, value) 31 | elif name == "thisown": 32 | self.this.own(value) 33 | elif hasattr(self, name) and isinstance(getattr(type(self), name), property): 34 | set(self, name, value) 35 | else: 36 | raise AttributeError("You cannot add instance attributes to %s" % self) 37 | return set_instance_attr 38 | 39 | 40 | def _swig_setattr_nondynamic_class_variable(set): 41 | def set_class_attr(cls, name, value): 42 | if hasattr(cls, name) and not isinstance(getattr(cls, name), property): 43 | set(cls, name, value) 44 | else: 45 | raise AttributeError("You cannot add class attributes to %s" % cls) 46 | return set_class_attr 47 | 48 | 49 | def _swig_add_metaclass(metaclass): 50 | """Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass""" 51 | def wrapper(cls): 52 | return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy()) 53 | return wrapper 54 | 55 | 56 | class _SwigNonDynamicMeta(type): 57 | """Meta class to enforce nondynamic attributes (no new attributes) for a class""" 58 | __setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__) 59 | 60 | 61 | DFNT_NONE = _hdfext.DFNT_NONE 62 | DFNT_QUERY = _hdfext.DFNT_QUERY 63 | DFNT_VERSION = _hdfext.DFNT_VERSION 64 | DFNT_FLOAT32 = _hdfext.DFNT_FLOAT32 65 | DFNT_FLOAT = _hdfext.DFNT_FLOAT 66 | DFNT_FLOAT64 = _hdfext.DFNT_FLOAT64 67 | DFNT_DOUBLE = _hdfext.DFNT_DOUBLE 68 | DFNT_FLOAT128 = _hdfext.DFNT_FLOAT128 69 | DFNT_INT8 = _hdfext.DFNT_INT8 70 | DFNT_UINT8 = _hdfext.DFNT_UINT8 71 | DFNT_INT16 = _hdfext.DFNT_INT16 72 | DFNT_UINT16 = _hdfext.DFNT_UINT16 73 | DFNT_INT32 = _hdfext.DFNT_INT32 74 | DFNT_UINT32 = _hdfext.DFNT_UINT32 75 | DFNT_INT64 = _hdfext.DFNT_INT64 76 | DFNT_UINT64 = _hdfext.DFNT_UINT64 77 | DFNT_INT128 = _hdfext.DFNT_INT128 78 | DFNT_UINT128 = _hdfext.DFNT_UINT128 79 | DFNT_UCHAR8 = _hdfext.DFNT_UCHAR8 80 | DFNT_UCHAR = _hdfext.DFNT_UCHAR 81 | DFNT_CHAR8 = _hdfext.DFNT_CHAR8 82 | DFNT_CHAR = _hdfext.DFNT_CHAR 83 | DFNT_CHAR16 = _hdfext.DFNT_CHAR16 84 | DFNT_UCHAR16 = _hdfext.DFNT_UCHAR16 85 | SD_UNLIMITED = _hdfext.SD_UNLIMITED 86 | SD_FILL = _hdfext.SD_FILL 87 | SD_NOFILL = _hdfext.SD_NOFILL 88 | CHAR_BUFFER_SIZE = _hdfext.CHAR_BUFFER_SIZE 89 | ATTRIB_BUFFER_SIZE = _hdfext.ATTRIB_BUFFER_SIZE 90 | DFACC_READ = _hdfext.DFACC_READ 91 | DFACC_WRITE = _hdfext.DFACC_WRITE 92 | DFACC_CREATE = _hdfext.DFACC_CREATE 93 | DFACC_ALL = _hdfext.DFACC_ALL 94 | DFACC_RDONLY = _hdfext.DFACC_RDONLY 95 | DFACC_RDWR = _hdfext.DFACC_RDWR 96 | DFACC_CLOBBER = _hdfext.DFACC_CLOBBER 97 | DFACC_BUFFER = _hdfext.DFACC_BUFFER 98 | DFACC_APPENDABLE = _hdfext.DFACC_APPENDABLE 99 | DFACC_CURRENT = _hdfext.DFACC_CURRENT 100 | DFACC_OLD = _hdfext.DFACC_OLD 101 | COMP_CODE_NONE = _hdfext.COMP_CODE_NONE 102 | COMP_CODE_RLE = _hdfext.COMP_CODE_RLE 103 | COMP_CODE_NBIT = _hdfext.COMP_CODE_NBIT 104 | COMP_CODE_SKPHUFF = _hdfext.COMP_CODE_SKPHUFF 105 | COMP_CODE_DEFLATE = _hdfext.COMP_CODE_DEFLATE 106 | COMP_CODE_SZIP = _hdfext.COMP_CODE_SZIP 107 | DFTAG_NDG = _hdfext.DFTAG_NDG 108 | DFTAG_VH = _hdfext.DFTAG_VH 109 | DFTAG_VG = _hdfext.DFTAG_VG 110 | H4_MAX_VAR_DIMS = _hdfext.H4_MAX_VAR_DIMS 111 | class array_byte(object): 112 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 113 | __repr__ = _swig_repr 114 | 115 | def __init__(self, nelements): 116 | _hdfext.array_byte_swiginit(self, _hdfext.new_array_byte(nelements)) 117 | __swig_destroy__ = _hdfext.delete_array_byte 118 | 119 | def __getitem__(self, index): 120 | return _hdfext.array_byte___getitem__(self, index) 121 | 122 | def __setitem__(self, index, value): 123 | return _hdfext.array_byte___setitem__(self, index, value) 124 | 125 | def cast(self): 126 | return _hdfext.array_byte_cast(self) 127 | 128 | @staticmethod 129 | def frompointer(t): 130 | return _hdfext.array_byte_frompointer(t) 131 | 132 | # Register array_byte in _hdfext: 133 | _hdfext.array_byte_swigregister(array_byte) 134 | class array_int8(object): 135 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 136 | __repr__ = _swig_repr 137 | 138 | def __init__(self, nelements): 139 | _hdfext.array_int8_swiginit(self, _hdfext.new_array_int8(nelements)) 140 | __swig_destroy__ = _hdfext.delete_array_int8 141 | 142 | def __getitem__(self, index): 143 | return _hdfext.array_int8___getitem__(self, index) 144 | 145 | def __setitem__(self, index, value): 146 | return _hdfext.array_int8___setitem__(self, index, value) 147 | 148 | def cast(self): 149 | return _hdfext.array_int8_cast(self) 150 | 151 | @staticmethod 152 | def frompointer(t): 153 | return _hdfext.array_int8_frompointer(t) 154 | 155 | # Register array_int8 in _hdfext: 156 | _hdfext.array_int8_swigregister(array_int8) 157 | class array_int16(object): 158 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 159 | __repr__ = _swig_repr 160 | 161 | def __init__(self, nelements): 162 | _hdfext.array_int16_swiginit(self, _hdfext.new_array_int16(nelements)) 163 | __swig_destroy__ = _hdfext.delete_array_int16 164 | 165 | def __getitem__(self, index): 166 | return _hdfext.array_int16___getitem__(self, index) 167 | 168 | def __setitem__(self, index, value): 169 | return _hdfext.array_int16___setitem__(self, index, value) 170 | 171 | def cast(self): 172 | return _hdfext.array_int16_cast(self) 173 | 174 | @staticmethod 175 | def frompointer(t): 176 | return _hdfext.array_int16_frompointer(t) 177 | 178 | # Register array_int16 in _hdfext: 179 | _hdfext.array_int16_swigregister(array_int16) 180 | class array_uint16(object): 181 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 182 | __repr__ = _swig_repr 183 | 184 | def __init__(self, nelements): 185 | _hdfext.array_uint16_swiginit(self, _hdfext.new_array_uint16(nelements)) 186 | __swig_destroy__ = _hdfext.delete_array_uint16 187 | 188 | def __getitem__(self, index): 189 | return _hdfext.array_uint16___getitem__(self, index) 190 | 191 | def __setitem__(self, index, value): 192 | return _hdfext.array_uint16___setitem__(self, index, value) 193 | 194 | def cast(self): 195 | return _hdfext.array_uint16_cast(self) 196 | 197 | @staticmethod 198 | def frompointer(t): 199 | return _hdfext.array_uint16_frompointer(t) 200 | 201 | # Register array_uint16 in _hdfext: 202 | _hdfext.array_uint16_swigregister(array_uint16) 203 | class array_int32(object): 204 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 205 | __repr__ = _swig_repr 206 | 207 | def __init__(self, nelements): 208 | _hdfext.array_int32_swiginit(self, _hdfext.new_array_int32(nelements)) 209 | __swig_destroy__ = _hdfext.delete_array_int32 210 | 211 | def __getitem__(self, index): 212 | return _hdfext.array_int32___getitem__(self, index) 213 | 214 | def __setitem__(self, index, value): 215 | return _hdfext.array_int32___setitem__(self, index, value) 216 | 217 | def cast(self): 218 | return _hdfext.array_int32_cast(self) 219 | 220 | @staticmethod 221 | def frompointer(t): 222 | return _hdfext.array_int32_frompointer(t) 223 | 224 | # Register array_int32 in _hdfext: 225 | _hdfext.array_int32_swigregister(array_int32) 226 | class array_uint32(object): 227 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 228 | __repr__ = _swig_repr 229 | 230 | def __init__(self, nelements): 231 | _hdfext.array_uint32_swiginit(self, _hdfext.new_array_uint32(nelements)) 232 | __swig_destroy__ = _hdfext.delete_array_uint32 233 | 234 | def __getitem__(self, index): 235 | return _hdfext.array_uint32___getitem__(self, index) 236 | 237 | def __setitem__(self, index, value): 238 | return _hdfext.array_uint32___setitem__(self, index, value) 239 | 240 | def cast(self): 241 | return _hdfext.array_uint32_cast(self) 242 | 243 | @staticmethod 244 | def frompointer(t): 245 | return _hdfext.array_uint32_frompointer(t) 246 | 247 | # Register array_uint32 in _hdfext: 248 | _hdfext.array_uint32_swigregister(array_uint32) 249 | class array_float32(object): 250 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 251 | __repr__ = _swig_repr 252 | 253 | def __init__(self, nelements): 254 | _hdfext.array_float32_swiginit(self, _hdfext.new_array_float32(nelements)) 255 | __swig_destroy__ = _hdfext.delete_array_float32 256 | 257 | def __getitem__(self, index): 258 | return _hdfext.array_float32___getitem__(self, index) 259 | 260 | def __setitem__(self, index, value): 261 | return _hdfext.array_float32___setitem__(self, index, value) 262 | 263 | def cast(self): 264 | return _hdfext.array_float32_cast(self) 265 | 266 | @staticmethod 267 | def frompointer(t): 268 | return _hdfext.array_float32_frompointer(t) 269 | 270 | # Register array_float32 in _hdfext: 271 | _hdfext.array_float32_swigregister(array_float32) 272 | class array_float64(object): 273 | thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") 274 | __repr__ = _swig_repr 275 | 276 | def __init__(self, nelements): 277 | _hdfext.array_float64_swiginit(self, _hdfext.new_array_float64(nelements)) 278 | __swig_destroy__ = _hdfext.delete_array_float64 279 | 280 | def __getitem__(self, index): 281 | return _hdfext.array_float64___getitem__(self, index) 282 | 283 | def __setitem__(self, index, value): 284 | return _hdfext.array_float64___setitem__(self, index, value) 285 | 286 | def cast(self): 287 | return _hdfext.array_float64_cast(self) 288 | 289 | @staticmethod 290 | def frompointer(t): 291 | return _hdfext.array_float64_frompointer(t) 292 | 293 | # Register array_float64 in _hdfext: 294 | _hdfext.array_float64_swigregister(array_float64) 295 | 296 | def new_array_voidp(nelements): 297 | return _hdfext.new_array_voidp(nelements) 298 | 299 | def delete_array_voidp(ary): 300 | return _hdfext.delete_array_voidp(ary) 301 | 302 | def array_voidp_getitem(ary, index): 303 | return _hdfext.array_voidp_getitem(ary, index) 304 | 305 | def array_voidp_setitem(ary, index, value): 306 | return _hdfext.array_voidp_setitem(ary, index, value) 307 | 308 | def Hopen(filename, access_mode, num_dds_blocks): 309 | return _hdfext.Hopen(filename, access_mode, num_dds_blocks) 310 | 311 | def Hclose(file_id): 312 | return _hdfext.Hclose(file_id) 313 | 314 | def Hgetlibversion(): 315 | return _hdfext.Hgetlibversion() 316 | 317 | def Hgetfileversion(file_id): 318 | return _hdfext.Hgetfileversion(file_id) 319 | 320 | def Hishdf(filename): 321 | return _hdfext.Hishdf(filename) 322 | 323 | def HEvalue(error_stack_offset): 324 | return _hdfext.HEvalue(error_stack_offset) 325 | 326 | def HEstring(error_code): 327 | return _hdfext.HEstring(error_code) 328 | 329 | def _HEprint(): 330 | return _hdfext._HEprint() 331 | 332 | def _SDreaddata_0(sds_id, data_type, start, edges, stride): 333 | return _hdfext._SDreaddata_0(sds_id, data_type, start, edges, stride) 334 | 335 | def _SDwritedata_0(sds_id, data_type, start, edges, data, stride): 336 | return _hdfext._SDwritedata_0(sds_id, data_type, start, edges, data, stride) 337 | 338 | def SDstart(filename, access_mode): 339 | return _hdfext.SDstart(filename, access_mode) 340 | 341 | def SDcreate(sd_id, sds_name, data_type, rank, dim_sizes): 342 | return _hdfext.SDcreate(sd_id, sds_name, data_type, rank, dim_sizes) 343 | 344 | def SDselect(sd_id, sds_index): 345 | return _hdfext.SDselect(sd_id, sds_index) 346 | 347 | def SDendaccess(sds_id): 348 | return _hdfext.SDendaccess(sds_id) 349 | 350 | def SDend(sd_id): 351 | return _hdfext.SDend(sd_id) 352 | 353 | def SDfileinfo(sd_id): 354 | return _hdfext.SDfileinfo(sd_id) 355 | 356 | def SDgetinfo(sds_id, buf): 357 | return _hdfext.SDgetinfo(sds_id, buf) 358 | 359 | def SDcheckempty(sds_id): 360 | return _hdfext.SDcheckempty(sds_id) 361 | 362 | def SDidtoref(sds_id): 363 | return _hdfext.SDidtoref(sds_id) 364 | 365 | def SDiscoordvar(sds_id): 366 | return _hdfext.SDiscoordvar(sds_id) 367 | 368 | def SDisrecord(sds_id): 369 | return _hdfext.SDisrecord(sds_id) 370 | 371 | def SDnametoindex(sd_id, sds_name): 372 | return _hdfext.SDnametoindex(sd_id, sds_name) 373 | 374 | def SDreftoindex(sd_id, sds_ref): 375 | return _hdfext.SDreftoindex(sd_id, sds_ref) 376 | 377 | def SDdiminfo(dim_id): 378 | return _hdfext.SDdiminfo(dim_id) 379 | 380 | def SDgetdimid(sds_id, dim_index): 381 | return _hdfext.SDgetdimid(sds_id, dim_index) 382 | 383 | def SDsetdimname(dim_id, dim_name): 384 | return _hdfext.SDsetdimname(dim_id, dim_name) 385 | 386 | def SDgetdimscale(dim_id, buf): 387 | return _hdfext.SDgetdimscale(dim_id, buf) 388 | 389 | def SDsetdimscale(dim_id, n_values, data_type, buf): 390 | return _hdfext.SDsetdimscale(dim_id, n_values, data_type, buf) 391 | 392 | def SDattrinfo(obj_id, attr_index): 393 | return _hdfext.SDattrinfo(obj_id, attr_index) 394 | 395 | def SDfindattr(obj_id, attr_name): 396 | return _hdfext.SDfindattr(obj_id, attr_name) 397 | 398 | def SDreadattr(obj_id, attr_index, buf): 399 | return _hdfext.SDreadattr(obj_id, attr_index, buf) 400 | 401 | def SDsetattr(obj_id, attr_name, data_type, n_values, values): 402 | return _hdfext.SDsetattr(obj_id, attr_name, data_type, n_values, values) 403 | 404 | def SDgetcal(sds_id): 405 | return _hdfext.SDgetcal(sds_id) 406 | 407 | def SDgetdatastrs(sds_id, len): 408 | return _hdfext.SDgetdatastrs(sds_id, len) 409 | 410 | def SDgetdimstrs(sds_id, len): 411 | return _hdfext.SDgetdimstrs(sds_id, len) 412 | 413 | def SDgetfillvalue(sds_id, buf): 414 | return _hdfext.SDgetfillvalue(sds_id, buf) 415 | 416 | def SDgetrange(sds_id, buf1, buf2): 417 | return _hdfext.SDgetrange(sds_id, buf1, buf2) 418 | 419 | def SDsetcal(sds_id, cal, cal_error, offset, offset_err, data_type): 420 | return _hdfext.SDsetcal(sds_id, cal, cal_error, offset, offset_err, data_type) 421 | 422 | def SDsetdatastrs(sds_id, label, unit, format, coord_system): 423 | return _hdfext.SDsetdatastrs(sds_id, label, unit, format, coord_system) 424 | 425 | def SDsetdimstrs(sds_id, label, unit, format): 426 | return _hdfext.SDsetdimstrs(sds_id, label, unit, format) 427 | 428 | def SDsetfillmode(sd_id, fill_mode): 429 | return _hdfext.SDsetfillmode(sd_id, fill_mode) 430 | 431 | def SDsetfillvalue(sds_id, fill_val): 432 | return _hdfext.SDsetfillvalue(sds_id, fill_val) 433 | 434 | def SDsetrange(sds_id, max, min): 435 | return _hdfext.SDsetrange(sds_id, max, min) 436 | 437 | def _SDgetcompress(sds_id): 438 | return _hdfext._SDgetcompress(sds_id) 439 | 440 | def _SDsetcompress(sds_id, comp_type, value, v2): 441 | return _hdfext._SDsetcompress(sds_id, comp_type, value, v2) 442 | 443 | def SDsetexternalfile(sds_id, filename, offset): 444 | return _hdfext.SDsetexternalfile(sds_id, filename, offset) 445 | 446 | def Vinitialize(file_id): 447 | return _hdfext.Vinitialize(file_id) 448 | 449 | def VSattach(file_id, vdata_ref, vdata_access_mode): 450 | return _hdfext.VSattach(file_id, vdata_ref, vdata_access_mode) 451 | 452 | def VSdetach(vdata_id): 453 | return _hdfext.VSdetach(vdata_id) 454 | 455 | def Vfinish(file_id): 456 | return _hdfext.Vfinish(file_id) 457 | 458 | def VHstoredata(file_id, fieldname, buf, n_records, data_type, vdata_name, vdata_class): 459 | return _hdfext.VHstoredata(file_id, fieldname, buf, n_records, data_type, vdata_name, vdata_class) 460 | 461 | def VHstoredatam(file_id, fieldname, buf, n_records, data_type, vdata_name, vdata_class, order): 462 | return _hdfext.VHstoredatam(file_id, fieldname, buf, n_records, data_type, vdata_name, vdata_class, order) 463 | 464 | def VSfdefine(vdata_id, fieldname, data_type, order): 465 | return _hdfext.VSfdefine(vdata_id, fieldname, data_type, order) 466 | 467 | def VSsetfields(vdata_id, fieldname_list): 468 | return _hdfext.VSsetfields(vdata_id, fieldname_list) 469 | 470 | def VSseek(vdata_id, record_index): 471 | return _hdfext.VSseek(vdata_id, record_index) 472 | 473 | def VSread(vdata_id, databuf, n_records, interlace_mode): 474 | return _hdfext.VSread(vdata_id, databuf, n_records, interlace_mode) 475 | 476 | def VSwrite(vdata_id, databuf, n_records, interlace_mode): 477 | return _hdfext.VSwrite(vdata_id, databuf, n_records, interlace_mode) 478 | 479 | def VSfpack(vdata_id, action, fields_in_buf, buf, buf_size, n_records, fieldname_list, bufptrs): 480 | return _hdfext.VSfpack(vdata_id, action, fields_in_buf, buf, buf_size, n_records, fieldname_list, bufptrs) 481 | 482 | def VSelts(vdata_id): 483 | return _hdfext.VSelts(vdata_id) 484 | 485 | def VSgetclass(vdata_id): 486 | return _hdfext.VSgetclass(vdata_id) 487 | 488 | def VSgetfields(vdata_id): 489 | return _hdfext.VSgetfields(vdata_id) 490 | 491 | def VSgetinterlace(vdata_id): 492 | return _hdfext.VSgetinterlace(vdata_id) 493 | 494 | def VSgetname(vdata_id): 495 | return _hdfext.VSgetname(vdata_id) 496 | 497 | def VSsizeof(vdata_id, fieldname_list): 498 | return _hdfext.VSsizeof(vdata_id, fieldname_list) 499 | 500 | def VSinquire(vdata_id): 501 | return _hdfext.VSinquire(vdata_id) 502 | 503 | def VSQuerytag(vdata_id): 504 | return _hdfext.VSQuerytag(vdata_id) 505 | 506 | def VSQueryref(vdata_id): 507 | return _hdfext.VSQueryref(vdata_id) 508 | 509 | def VSfindex(vdata_id, field_name): 510 | return _hdfext.VSfindex(vdata_id, field_name) 511 | 512 | def VSisattr(vdta_id): 513 | return _hdfext.VSisattr(vdta_id) 514 | 515 | def VFnfields(vdata_id): 516 | return _hdfext.VFnfields(vdata_id) 517 | 518 | def VFfieldtype(vdata_id, field_index): 519 | return _hdfext.VFfieldtype(vdata_id, field_index) 520 | 521 | def VFfieldname(vdata_id, field_index): 522 | return _hdfext.VFfieldname(vdata_id, field_index) 523 | 524 | def VFfieldesize(vdata_id, field_index): 525 | return _hdfext.VFfieldesize(vdata_id, field_index) 526 | 527 | def VFfieldisize(vdata_id, field_index): 528 | return _hdfext.VFfieldisize(vdata_id, field_index) 529 | 530 | def VFfieldorder(vdata_id, field_index): 531 | return _hdfext.VFfieldorder(vdata_id, field_index) 532 | 533 | def VSfind(file_id, vdata_name): 534 | return _hdfext.VSfind(file_id, vdata_name) 535 | 536 | def VSgetid(file_id, vdata_ref): 537 | return _hdfext.VSgetid(file_id, vdata_ref) 538 | 539 | def VSfexist(vdata_id, fieldname_list): 540 | return _hdfext.VSfexist(vdata_id, fieldname_list) 541 | 542 | def VSsetclass(vdata_id, vdata_class): 543 | return _hdfext.VSsetclass(vdata_id, vdata_class) 544 | 545 | def VSsetname(vdata_id, vdata_name): 546 | return _hdfext.VSsetname(vdata_id, vdata_name) 547 | 548 | def VSsetinterlace(vdata_id, interlace_mode): 549 | return _hdfext.VSsetinterlace(vdata_id, interlace_mode) 550 | 551 | def VSsetattr(vdata_id, field_index, attr_name, data_type, n_values, values): 552 | return _hdfext.VSsetattr(vdata_id, field_index, attr_name, data_type, n_values, values) 553 | 554 | def VSgetattr(vdata_id, field_index, attr_index, buf): 555 | return _hdfext.VSgetattr(vdata_id, field_index, attr_index, buf) 556 | 557 | def VSfnattrs(vdata_id, field_index): 558 | return _hdfext.VSfnattrs(vdata_id, field_index) 559 | 560 | def VSnattrs(vdata_id): 561 | return _hdfext.VSnattrs(vdata_id) 562 | 563 | def VSattrinfo(vdata_id, field_index, attr_index): 564 | return _hdfext.VSattrinfo(vdata_id, field_index, attr_index) 565 | 566 | def VSfindattr(vdata_id, field_index, attr_name): 567 | return _hdfext.VSfindattr(vdata_id, field_index, attr_name) 568 | 569 | def Vattach(file_id, vgroup_ref, vg_access_mode): 570 | return _hdfext.Vattach(file_id, vgroup_ref, vg_access_mode) 571 | 572 | def Vdetach(vgroup_id): 573 | return _hdfext.Vdetach(vgroup_id) 574 | 575 | def Vgetname(vgroup_id): 576 | return _hdfext.Vgetname(vgroup_id) 577 | 578 | def Vsetname(vgroup_id, vgroup_name): 579 | return _hdfext.Vsetname(vgroup_id, vgroup_name) 580 | 581 | def Vgetclass(vgroup_id): 582 | return _hdfext.Vgetclass(vgroup_id) 583 | 584 | def Vsetclass(vgroup_id, vgroup_class): 585 | return _hdfext.Vsetclass(vgroup_id, vgroup_class) 586 | 587 | def Vfind(file_id, vgroup_name): 588 | return _hdfext.Vfind(file_id, vgroup_name) 589 | 590 | def Vfindclass(file_id, vgroup_class): 591 | return _hdfext.Vfindclass(file_id, vgroup_class) 592 | 593 | def Vinsert(vgroup_id, v_id): 594 | return _hdfext.Vinsert(vgroup_id, v_id) 595 | 596 | def Vaddtagref(vgroup_id, obj_tag, obj_ref): 597 | return _hdfext.Vaddtagref(vgroup_id, obj_tag, obj_ref) 598 | 599 | def Vdeletetagref(vgroup_id, obj_tag, obj_ref): 600 | return _hdfext.Vdeletetagref(vgroup_id, obj_tag, obj_ref) 601 | 602 | def Vdelete(file_id, vgroup_id): 603 | return _hdfext.Vdelete(file_id, vgroup_id) 604 | 605 | def VQueryref(vgroup_id): 606 | return _hdfext.VQueryref(vgroup_id) 607 | 608 | def VQuerytag(vgroup_id): 609 | return _hdfext.VQuerytag(vgroup_id) 610 | 611 | def Vntagrefs(vgroup_id): 612 | return _hdfext.Vntagrefs(vgroup_id) 613 | 614 | def Vgettagref(vgroup_id, index): 615 | return _hdfext.Vgettagref(vgroup_id, index) 616 | 617 | def Vgetversion(vgroup_id): 618 | return _hdfext.Vgetversion(vgroup_id) 619 | 620 | def Vgettagrefs(vgroup_id, tag_attay, ref_array, maxsize): 621 | return _hdfext.Vgettagrefs(vgroup_id, tag_attay, ref_array, maxsize) 622 | 623 | def Vgetid(file_id, vgroup_ref): 624 | return _hdfext.Vgetid(file_id, vgroup_ref) 625 | 626 | def Vinqtagref(vgroup_id, tag, ref): 627 | return _hdfext.Vinqtagref(vgroup_id, tag, ref) 628 | 629 | def Visvg(vgroup_id, obj_ref): 630 | return _hdfext.Visvg(vgroup_id, obj_ref) 631 | 632 | def Visvs(vgroup_id, obj_ref): 633 | return _hdfext.Visvs(vgroup_id, obj_ref) 634 | 635 | def Vnrefs(vgroup_id, tag_type): 636 | return _hdfext.Vnrefs(vgroup_id, tag_type) 637 | 638 | def Vfindattr(vgroup_id, attr_name): 639 | return _hdfext.Vfindattr(vgroup_id, attr_name) 640 | 641 | def Vgetattr(vdata_id, attr_index, buf): 642 | return _hdfext.Vgetattr(vdata_id, attr_index, buf) 643 | 644 | def Vsetattr(vgroup_id, attr_name, data_type, n_values, values): 645 | return _hdfext.Vsetattr(vgroup_id, attr_name, data_type, n_values, values) 646 | 647 | def Vattrinfo(vgroup_id, attr_index): 648 | return _hdfext.Vattrinfo(vgroup_id, attr_index) 649 | 650 | def Vnattrs(vgroup_id): 651 | return _hdfext.Vnattrs(vgroup_id) 652 | 653 | -------------------------------------------------------------------------------- /pyhdf/six.py: -------------------------------------------------------------------------------- 1 | """Utilities for writing code that runs on Python 2 and 3""" 2 | 3 | # Copyright (c) 2010-2013 Benjamin Peterson 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | # this software and associated documentation files (the "Software"), to deal in 7 | # the Software without restriction, including without limitation the rights to 8 | # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | # the Software, and to permit persons to whom the Software is furnished to do so, 10 | # subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in all 13 | # copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | 22 | import operator 23 | import sys 24 | import types 25 | 26 | __author__ = "Benjamin Peterson " 27 | __version__ = "1.3.0" 28 | 29 | 30 | # True if we are running on Python 3. 31 | PY3 = sys.version_info[0] == 3 32 | 33 | if PY3: 34 | string_types = str, 35 | integer_types = int, 36 | class_types = type, 37 | text_type = str 38 | binary_type = bytes 39 | 40 | MAXSIZE = sys.maxsize 41 | else: 42 | string_types = basestring, 43 | integer_types = (int, long) 44 | class_types = (type, types.ClassType) 45 | text_type = unicode 46 | binary_type = str 47 | 48 | if sys.platform.startswith("java"): 49 | # Jython always uses 32 bits. 50 | MAXSIZE = int((1 << 31) - 1) 51 | else: 52 | # It's possible to have sizeof(long) != sizeof(Py_ssize_t). 53 | class X(object): 54 | def __len__(self): 55 | return 1 << 31 56 | try: 57 | len(X()) 58 | except OverflowError: 59 | # 32-bit 60 | MAXSIZE = int((1 << 31) - 1) 61 | else: 62 | # 64-bit 63 | MAXSIZE = int((1 << 63) - 1) 64 | del X 65 | 66 | 67 | def _add_doc(func, doc): 68 | """Add documentation to a function.""" 69 | func.__doc__ = doc 70 | 71 | 72 | def _import_module(name): 73 | """Import module, returning the module after the last dot.""" 74 | __import__(name) 75 | return sys.modules[name] 76 | 77 | 78 | class _LazyDescr(object): 79 | 80 | def __init__(self, name): 81 | self.name = name 82 | 83 | def __get__(self, obj, tp): 84 | result = self._resolve() 85 | setattr(obj, self.name, result) 86 | # This is a bit ugly, but it avoids running this again. 87 | delattr(tp, self.name) 88 | return result 89 | 90 | 91 | class MovedModule(_LazyDescr): 92 | 93 | def __init__(self, name, old, new=None): 94 | super(MovedModule, self).__init__(name) 95 | if PY3: 96 | if new is None: 97 | new = name 98 | self.mod = new 99 | else: 100 | self.mod = old 101 | 102 | def _resolve(self): 103 | return _import_module(self.mod) 104 | 105 | 106 | class MovedAttribute(_LazyDescr): 107 | 108 | def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): 109 | super(MovedAttribute, self).__init__(name) 110 | if PY3: 111 | if new_mod is None: 112 | new_mod = name 113 | self.mod = new_mod 114 | if new_attr is None: 115 | if old_attr is None: 116 | new_attr = name 117 | else: 118 | new_attr = old_attr 119 | self.attr = new_attr 120 | else: 121 | self.mod = old_mod 122 | if old_attr is None: 123 | old_attr = name 124 | self.attr = old_attr 125 | 126 | def _resolve(self): 127 | module = _import_module(self.mod) 128 | return getattr(module, self.attr) 129 | 130 | 131 | 132 | class _MovedItems(types.ModuleType): 133 | """Lazy loading of moved objects""" 134 | 135 | 136 | _moved_attributes = [ 137 | MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), 138 | MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), 139 | MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), 140 | MovedAttribute("map", "itertools", "builtins", "imap", "map"), 141 | MovedAttribute("reload_module", "__builtin__", "imp", "reload"), 142 | MovedAttribute("reduce", "__builtin__", "functools"), 143 | MovedAttribute("StringIO", "StringIO", "io"), 144 | MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), 145 | MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), 146 | 147 | MovedModule("builtins", "__builtin__"), 148 | MovedModule("configparser", "ConfigParser"), 149 | MovedModule("copyreg", "copy_reg"), 150 | MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), 151 | MovedModule("http_cookies", "Cookie", "http.cookies"), 152 | MovedModule("html_entities", "htmlentitydefs", "html.entities"), 153 | MovedModule("html_parser", "HTMLParser", "html.parser"), 154 | MovedModule("http_client", "httplib", "http.client"), 155 | MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), 156 | MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), 157 | MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), 158 | MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), 159 | MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), 160 | MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), 161 | MovedModule("cPickle", "cPickle", "pickle"), 162 | MovedModule("queue", "Queue"), 163 | MovedModule("reprlib", "repr"), 164 | MovedModule("socketserver", "SocketServer"), 165 | MovedModule("tkinter", "Tkinter"), 166 | MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), 167 | MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), 168 | MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), 169 | MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), 170 | MovedModule("tkinter_tix", "Tix", "tkinter.tix"), 171 | MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), 172 | MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), 173 | MovedModule("tkinter_colorchooser", "tkColorChooser", 174 | "tkinter.colorchooser"), 175 | MovedModule("tkinter_commondialog", "tkCommonDialog", 176 | "tkinter.commondialog"), 177 | MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), 178 | MovedModule("tkinter_font", "tkFont", "tkinter.font"), 179 | MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), 180 | MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", 181 | "tkinter.simpledialog"), 182 | MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), 183 | MovedModule("winreg", "_winreg"), 184 | ] 185 | for attr in _moved_attributes: 186 | setattr(_MovedItems, attr.name, attr) 187 | del attr 188 | 189 | moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves") 190 | 191 | 192 | def add_move(move): 193 | """Add an item to six.moves.""" 194 | setattr(_MovedItems, move.name, move) 195 | 196 | 197 | def remove_move(name): 198 | """Remove item from six.moves.""" 199 | try: 200 | delattr(_MovedItems, name) 201 | except AttributeError: 202 | try: 203 | del moves.__dict__[name] 204 | except KeyError: 205 | raise AttributeError("no such move, %r" % (name,)) 206 | 207 | 208 | if PY3: 209 | _meth_func = "__func__" 210 | _meth_self = "__self__" 211 | 212 | _func_closure = "__closure__" 213 | _func_code = "__code__" 214 | _func_defaults = "__defaults__" 215 | _func_globals = "__globals__" 216 | 217 | _iterkeys = "keys" 218 | _itervalues = "values" 219 | _iteritems = "items" 220 | _iterlists = "lists" 221 | else: 222 | _meth_func = "im_func" 223 | _meth_self = "im_self" 224 | 225 | _func_closure = "func_closure" 226 | _func_code = "func_code" 227 | _func_defaults = "func_defaults" 228 | _func_globals = "func_globals" 229 | 230 | _iterkeys = "iterkeys" 231 | _itervalues = "itervalues" 232 | _iteritems = "iteritems" 233 | _iterlists = "iterlists" 234 | 235 | 236 | try: 237 | advance_iterator = next 238 | except NameError: 239 | def advance_iterator(it): 240 | return it.next() 241 | next = advance_iterator 242 | 243 | 244 | try: 245 | callable = callable 246 | except NameError: 247 | def callable(obj): 248 | return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) 249 | 250 | 251 | if PY3: 252 | def get_unbound_function(unbound): 253 | return unbound 254 | 255 | Iterator = object 256 | else: 257 | def get_unbound_function(unbound): 258 | return unbound.im_func 259 | 260 | class Iterator(object): 261 | 262 | def next(self): 263 | return type(self).__next__(self) 264 | 265 | callable = callable 266 | _add_doc(get_unbound_function, 267 | """Get the function out of a possibly unbound function""") 268 | 269 | 270 | get_method_function = operator.attrgetter(_meth_func) 271 | get_method_self = operator.attrgetter(_meth_self) 272 | get_function_closure = operator.attrgetter(_func_closure) 273 | get_function_code = operator.attrgetter(_func_code) 274 | get_function_defaults = operator.attrgetter(_func_defaults) 275 | get_function_globals = operator.attrgetter(_func_globals) 276 | 277 | 278 | def iterkeys(d, **kw): 279 | """Return an iterator over the keys of a dictionary.""" 280 | return iter(getattr(d, _iterkeys)(**kw)) 281 | 282 | def itervalues(d, **kw): 283 | """Return an iterator over the values of a dictionary.""" 284 | return iter(getattr(d, _itervalues)(**kw)) 285 | 286 | def iteritems(d, **kw): 287 | """Return an iterator over the (key, value) pairs of a dictionary.""" 288 | return iter(getattr(d, _iteritems)(**kw)) 289 | 290 | def iterlists(d, **kw): 291 | """Return an iterator over the (key, [values]) pairs of a dictionary.""" 292 | return iter(getattr(d, _iterlists)(**kw)) 293 | 294 | 295 | if PY3: 296 | def b(s): 297 | return s.encode("latin-1") 298 | def u(s): 299 | return s 300 | if sys.version_info[1] <= 1: 301 | def int2byte(i): 302 | return bytes((i,)) 303 | else: 304 | # This is about 2x faster than the implementation above on 3.2+ 305 | int2byte = operator.methodcaller("to_bytes", 1, "big") 306 | import io 307 | StringIO = io.StringIO 308 | BytesIO = io.BytesIO 309 | else: 310 | def b(s): 311 | return s 312 | def u(s): 313 | return unicode(s, "unicode_escape") 314 | int2byte = chr 315 | import StringIO 316 | StringIO = BytesIO = StringIO.StringIO 317 | _add_doc(b, """Byte literal""") 318 | _add_doc(u, """Text literal""") 319 | 320 | 321 | if PY3: 322 | import builtins 323 | exec_ = getattr(builtins, "exec") 324 | 325 | 326 | def reraise(tp, value, tb=None): 327 | if value.__traceback__ is not tb: 328 | raise value.with_traceback(tb) 329 | raise value 330 | 331 | 332 | print_ = getattr(builtins, "print") 333 | del builtins 334 | 335 | else: 336 | def exec_(_code_, _globs_=None, _locs_=None): 337 | """Execute code in a namespace.""" 338 | if _globs_ is None: 339 | frame = sys._getframe(1) 340 | _globs_ = frame.f_globals 341 | if _locs_ is None: 342 | _locs_ = frame.f_locals 343 | del frame 344 | elif _locs_ is None: 345 | _locs_ = _globs_ 346 | exec("""exec _code_ in _globs_, _locs_""") 347 | 348 | 349 | exec_("""def reraise(tp, value, tb=None): 350 | raise tp, value, tb 351 | """) 352 | 353 | 354 | def print_(*args, **kwargs): 355 | """The new-style print function.""" 356 | fp = kwargs.pop("file", sys.stdout) 357 | if fp is None: 358 | return 359 | def write(data): 360 | if not isinstance(data, basestring): 361 | data = str(data) 362 | fp.write(data) 363 | want_unicode = False 364 | sep = kwargs.pop("sep", None) 365 | if sep is not None: 366 | if isinstance(sep, unicode): 367 | want_unicode = True 368 | elif not isinstance(sep, str): 369 | raise TypeError("sep must be None or a string") 370 | end = kwargs.pop("end", None) 371 | if end is not None: 372 | if isinstance(end, unicode): 373 | want_unicode = True 374 | elif not isinstance(end, str): 375 | raise TypeError("end must be None or a string") 376 | if kwargs: 377 | raise TypeError("invalid keyword arguments to print()") 378 | if not want_unicode: 379 | for arg in args: 380 | if isinstance(arg, unicode): 381 | want_unicode = True 382 | break 383 | if want_unicode: 384 | newline = unicode("\n") 385 | space = unicode(" ") 386 | else: 387 | newline = "\n" 388 | space = " " 389 | if sep is None: 390 | sep = space 391 | if end is None: 392 | end = newline 393 | for i, arg in enumerate(args): 394 | if i: 395 | write(sep) 396 | write(arg) 397 | write(end) 398 | 399 | _add_doc(reraise, """Reraise an exception.""") 400 | 401 | 402 | def with_metaclass(meta, base=object): 403 | """Create a base class with a metaclass.""" 404 | return meta("NewBase", (base,), {}) 405 | -------------------------------------------------------------------------------- /pyhdf/test_SD.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy as np 4 | import os 5 | import pyhdf.SD 6 | import shutil 7 | import tempfile 8 | from numpy.testing import assert_array_equal 9 | from pathlib import Path 10 | from pyhdf.SD import SDC 11 | 12 | def test_long_varname(): 13 | sds_name = 'a'*255 14 | 15 | temp = tempfile.mkdtemp(prefix='pyhdf_') 16 | try: 17 | path = os.path.join(temp, "test.hdf") 18 | 19 | # create a file with a long variable name 20 | sd = pyhdf.SD.SD(path, SDC.WRITE|SDC.CREATE|SDC.TRUNC) 21 | sds = sd.create(sds_name, SDC.FLOAT32, (3,)) 22 | sds[:] = range(10, 13) 23 | sds.endaccess() 24 | sd.end() 25 | 26 | # check we can read the variable name 27 | sd = pyhdf.SD.SD(path) 28 | sds = sd.select(sds_name) 29 | name, _, _, _, _ = sds.info() 30 | sds.endaccess() 31 | sd.end() 32 | assert sds_name == name 33 | finally: 34 | shutil.rmtree(temp) 35 | 36 | def test_negative_int8(): 37 | temp = tempfile.mkdtemp(prefix='pyhdf_') 38 | try: 39 | path = os.path.join(temp, "test.hdf") 40 | 41 | sd = pyhdf.SD.SD(path, SDC.WRITE|SDC.CREATE|SDC.TRUNC) 42 | data = np.zeros(shape=(20,20), dtype=np.int8) 43 | sds = sd.create("testsds", SDC.INT8, data.shape) 44 | sds.setfillvalue(-1) 45 | assert sds.getfillvalue() == -1 46 | 47 | sds.setrange(-50, -30) 48 | min, max = sds.getrange() 49 | assert min == -50 50 | assert max == -30 51 | 52 | attr = sds.attr("testattr") 53 | attr.set(SDC.INT8, -1) 54 | assert attr.get() == -1 55 | 56 | dim = sds.dim(0) 57 | scale = [-1]*20 58 | dim.setscale(SDC.INT8, scale) 59 | assert_array_equal(dim.getscale(), scale) 60 | 61 | sds[:,:] = -40 62 | sd.end() 63 | finally: 64 | shutil.rmtree(temp) 65 | 66 | def test_char(): 67 | with tempfile.TemporaryDirectory() as temp_dir: 68 | hdf_file = str(Path(temp_dir) / "test.hdf") 69 | sd = pyhdf.SD.SD(hdf_file, SDC.WRITE | SDC.CREATE) 70 | sds = sd.create("test_sds", SDC.CHAR, [5]) 71 | sds[:] = "ABCDE" 72 | assert_array_equal(sds[:], np.array(list("ABCDE"), "S2")) 73 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools", 4 | "setuptools-scm", 5 | "numpy", 6 | ] 7 | build-backend = "setuptools.build_meta" 8 | 9 | [project] 10 | name = "pyhdf" 11 | dynamic = ["version"] 12 | description = "Python interface to the NCSA HDF4 library" 13 | readme = "README.md" 14 | keywords = ['hdf4', 'netcdf', 'numpy', 'python', 'pyhdf'] 15 | classifiers = [ 16 | "Development Status :: 5 - Production/Stable", 17 | "Intended Audience :: Science/Research", 18 | "Intended Audience :: Developers", 19 | "License :: OSI Approved", 20 | "Programming Language :: C", 21 | "Programming Language :: Python", 22 | "Programming Language :: Python :: 3", 23 | "Topic :: Software Development", 24 | "Topic :: Scientific/Engineering", 25 | "Operating System :: Microsoft :: Windows", 26 | "Operating System :: POSIX", 27 | "Operating System :: Unix", 28 | "Operating System :: MacOS", 29 | ] 30 | license = {text = "MIT License"} 31 | authors = [ 32 | {name = "Andre Gosselin", email = "Andre.Gosselin@dfo-mpo.gc.ca"}, 33 | {name = "Travis E. Oliphant", email = "teoliphant@gmail.com"}, 34 | ] 35 | maintainers = [ 36 | {name = "Fazlul Shahriar", email = "fshahriar@gmail.com"}, 37 | ] 38 | dependencies = [ 39 | "numpy" 40 | ] 41 | 42 | [project.urls] 43 | Homepage = 'https://github.com/fhs/pyhdf' 44 | 45 | [tool.setuptools_scm] 46 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [options] 2 | packages = pyhdf 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function 2 | 3 | import sys 4 | import os 5 | import os.path as path 6 | import shlex 7 | import sysconfig 8 | 9 | from setuptools import Extension, setup 10 | import numpy as np 11 | 12 | 13 | def _find_args(pat, env): 14 | try: 15 | val = os.environ[env].split(os.pathsep) 16 | except KeyError: 17 | val = [] 18 | try: 19 | k = sys.argv.index(pat) 20 | val.extend(sys.argv[k + 1].split(os.pathsep)) 21 | del sys.argv[k] 22 | del sys.argv[k] 23 | except ValueError: 24 | pass 25 | return val 26 | 27 | 28 | # A Debian based linux distribution might be using libhdf4 (contains netcdf 29 | # routines) or libhdf4-alt (does not contain netcdf routines). This function 30 | # tries to detect if the alt version should be used. 31 | def _use_hdf4alt(libdirs): 32 | if not sys.platform.startswith("linux"): 33 | return False 34 | libdirs.extend(os.environ.get("LD_LIBRARY_PATH", "").split(os.pathsep)) 35 | libdirs.append("/usr/lib/%s" % sysconfig.get_config_var('MULTIARCH')) 36 | libdirs.append("/usr/lib") 37 | libdirs.append("/usr/local/lib") 38 | libdirs.append("/lib") 39 | for d in libdirs: 40 | if os.path.exists(os.path.join(d, "libdfalt.so")) and os.path.exists( 41 | os.path.join(d, "libmfhdfalt.so") 42 | ): 43 | return True 44 | return False 45 | 46 | 47 | include_dirs = _find_args("-i", "INCLUDE_DIRS") 48 | library_dirs = _find_args("-l", "LIBRARY_DIRS") 49 | szip_installed = "SZIP" in os.environ 50 | compress = "NO_COMPRESS" not in os.environ 51 | extra_link_args = None 52 | if "LINK_ARGS" in os.environ: 53 | extra_link_args = shlex.split(os.environ["LINK_ARGS"]) 54 | 55 | 56 | msg = ( 57 | "Cannot proceed without the HDF4 library. Please " 58 | "export INCLUDE_DIRS and LIBRARY_DIRS as explained" 59 | "in the INSTALL file." 60 | ) 61 | 62 | if sys.platform.startswith("linux"): 63 | # libhdf4 header files on most linux distributations 64 | # (e.g. Debian/Ubuntu, CentOS) are stored in /usr/include/hdf 65 | d = "/usr/include/hdf/" 66 | if not include_dirs and os.path.exists(d): 67 | include_dirs.append(d) 68 | 69 | for p in include_dirs + library_dirs: 70 | if not path.exists(p): 71 | print("\n******\n%s not found\n******\n\n" % p) 72 | raise RuntimeError(msg) 73 | 74 | if sys.platform == "win32": 75 | libraries = ["mfhdf", "hdf", "xdr"] 76 | elif _use_hdf4alt(library_dirs): 77 | libraries = ["mfhdfalt", "dfalt"] 78 | else: 79 | libraries = ["mfhdf", "df"] 80 | 81 | if szip_installed: 82 | extra_compile_args = [] 83 | if sys.platform == "win32": 84 | libraries += ["szlib"] 85 | else: 86 | libraries += ["sz"] 87 | else: 88 | extra_compile_args = ["-DNOSZIP"] 89 | if sys.platform == "win32": 90 | libraries += ["libjpeg", "zlib", "ws2_32"] 91 | else: 92 | libraries += ["jpeg", "z"] 93 | 94 | if not compress: 95 | extra_compile_args += ["-DNOCOMPRESS"] 96 | 97 | 98 | setup( 99 | ext_modules=[ 100 | Extension( 101 | name="pyhdf._hdfext", 102 | sources=["pyhdf/hdfext_wrap.c"], 103 | include_dirs=[np.get_include()] + include_dirs, 104 | extra_compile_args=extra_compile_args, 105 | library_dirs=library_dirs, 106 | extra_link_args=extra_link_args, 107 | libraries=libraries, 108 | ), 109 | ], 110 | ) 111 | --------------------------------------------------------------------------------