├── .coveragerc ├── .gitattributes ├── .github └── workflows │ ├── deploy.yaml │ ├── docs.yaml │ └── gh-ci.yaml ├── .gitignore ├── .readthedocs.yml ├── AUTHORS ├── CHANGELOG ├── COPYING ├── COPYING.LESSER ├── INSTALL ├── MAINTAINER.md ├── MANIFEST.in ├── README.rst ├── ci ├── environment.yaml └── requirements.txt ├── doc ├── Makefile └── source │ ├── _static │ └── logos │ │ ├── AUTHOR │ │ ├── mdanalysis-griddataformats-logo.pdf │ │ └── mdanalysis-griddataformats-logo.png │ ├── conf.py │ ├── gridData │ ├── basic.rst │ ├── core.rst │ ├── formats.rst │ ├── formats │ │ ├── OpenDX.rst │ │ ├── gOpenMol.rst │ │ └── mrc.rst │ └── overview.rst │ ├── index.rst │ └── installation.rst ├── examples └── extractISOPdb │ ├── extractISOPdb.py │ ├── extractISOPdb_gdf.py │ ├── how │ └── mdpout_freq_grid.dx ├── gridData ├── OpenDX.py ├── __init__.py ├── core.py ├── gOpenMol.py ├── mrc.py └── tests │ ├── __init__.py │ ├── datafiles │ ├── 1jzv.ccp4 │ ├── EMD-3001.map.bz2 │ ├── __init__.py │ ├── nAChR_M2_water.plt │ ├── test.ccp4 │ ├── test.dx │ └── test.dx.gz │ ├── test_dx.py │ ├── test_gOpenMol.py │ ├── test_grid.py │ └── test_mrc.py └── pyproject.toml /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | omit = 4 | # omit all tests 5 | gridData/tests/* 6 | gridData/testing/* 7 | # omit the versioneer-installed _version.py 8 | gridData/_version.py 9 | 10 | [report] 11 | exclude_lines = 12 | # omit lines marked with: # pragma: no cover 13 | pragma: no cover 14 | 15 | # Don't complain about missing debug-only code: 16 | def __unicode__ 17 | def __repr__ 18 | 19 | # Don't complain if tests don't hit defensive assertion code: 20 | raise AssertionError 21 | raise NotImplementedError 22 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | gridData/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Build and upload to PyPi 2 | 3 | on: 4 | push: 5 | tags: 6 | - "*" 7 | release: 8 | types: 9 | - published 10 | 11 | jobs: 12 | testpypi_push: 13 | environment: 14 | name: deploy 15 | url: https://test.pypi.org/p/GridDataFormats 16 | permissions: 17 | id-token: write 18 | if: | 19 | github.repository == 'MDAnalysis/GridDataFormats' && 20 | (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')) 21 | name: Build, upload and test pure Python wheels to TestPypi 22 | runs-on: ubuntu-latest 23 | 24 | steps: 25 | - uses: actions/checkout@v4 26 | 27 | - name: testpypi_deploy 28 | uses: MDAnalysis/pypi-deployment@main 29 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') 30 | with: 31 | test_submission: true 32 | package_name: GridDataFormats 33 | module_name: 'gridData' 34 | 35 | pypi_push: 36 | environment: 37 | name: deploy 38 | url: https://pypi.org/p/GridDataFormats 39 | permissions: 40 | id-token: write 41 | if: | 42 | github.repository == 'MDAnalysis/GridDataFormats' && 43 | (github.event_name == 'release' && github.event.action == 'published') 44 | name: Build, upload and test pure Python wheels to PyPi 45 | runs-on: ubuntu-latest 46 | 47 | steps: 48 | - uses: actions/checkout@v4 49 | 50 | - name: pypi_deploy 51 | uses: MDAnalysis/pypi-deployment@main 52 | if: github.event_name == 'release' && github.event.action == 'published' 53 | with: 54 | package_name: GridDataFormats 55 | module_name: 'gridData' 56 | -------------------------------------------------------------------------------- /.github/workflows/docs.yaml: -------------------------------------------------------------------------------- 1 | name: docs deployment 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - master 9 | 10 | concurrency: 11 | group: "${{ github.ref }}-${{ github.head_ref }}" 12 | cancel-in-progress: true 13 | 14 | defaults: 15 | run: 16 | shell: bash -l {0} 17 | 18 | jobs: 19 | docs: 20 | if: "github.repository == 'MDAnalysis/GridDataFormats'" 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | with: 26 | fetch-depth: 0 27 | 28 | - name: setup_miniconda 29 | uses: conda-incubator/setup-miniconda@v2 30 | with: 31 | python-version: 3.11 32 | environment-file: ci/environment.yaml 33 | auto-update-conda: true 34 | channel-priority: flexible 35 | channels: conda-forge 36 | miniforge-variant: Mambaforge 37 | use-mamba: true 38 | 39 | - name: install package 40 | run: | 41 | pip install -v . 42 | 43 | - name: build docs 44 | run: | 45 | cd doc && sphinx-build -b html source build 46 | 47 | - name: deploy docs 48 | uses: peaceiris/actions-gh-pages@v3 49 | if: github.event_name != 'pull_request' 50 | with: 51 | github_token: ${{ secrets.GITHUB_TOKEN }} 52 | publish_dir: ./doc/build 53 | user_name: 'github-actions' 54 | user_email: 'github-action@users.noreply.github.com' 55 | -------------------------------------------------------------------------------- /.github/workflows/gh-ci.yaml: -------------------------------------------------------------------------------- 1 | name: GH Actions CI 2 | on: 3 | push: 4 | branches: 5 | - master 6 | pull_request: 7 | branches: 8 | - master 9 | schedule: 10 | # 3 am Tuesdays and Fridays 11 | - cron: "0 3 * * 2,5" 12 | 13 | concurrency: 14 | group: "${{ github.ref }}-${{ github.head_ref }}-${{ github.workflow }}" 15 | cancel-in-progress: true 16 | 17 | defaults: 18 | run: 19 | shell: bash -leo pipefail {0} 20 | 21 | jobs: 22 | unittests: 23 | if: "github.repository == 'MDAnalysis/GridDataFormats'" 24 | runs-on: ${{ matrix.os }} 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | os: [ubuntu-latest, ] 29 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 30 | include: 31 | - os: windows-latest 32 | python-version: "3.13" 33 | - os: macos-latest 34 | python-version: "3.13" 35 | 36 | steps: 37 | - uses: actions/checkout@v4 38 | 39 | - name: setup_micromamba 40 | uses: mamba-org/setup-micromamba@v2 41 | with: 42 | environment-name: mda 43 | create-args: >- 44 | python=${{ matrix.python-version }} 45 | pip 46 | condarc: | 47 | channels: 48 | - conda-forge 49 | 50 | - name: install package deps 51 | run: | 52 | micromamba install numpy scipy mrcfile pytest pytest-cov codecov 53 | 54 | - name: check install 55 | run: | 56 | which python 57 | which pip 58 | micromamba info 59 | micromamba list 60 | 61 | - name: install package 62 | run: | 63 | pip install -v . 64 | 65 | - name: run unit tests 66 | run: | 67 | pytest -v --cov=gridData --cov-report=xml --color=yes ./gridData/tests 68 | 69 | - name: codecov 70 | uses: codecov/codecov-action@v4 71 | if: github.event_name != 'schedule' 72 | with: 73 | file: coverage.xml 74 | fail_ci_if_error: True 75 | verbose: True 76 | token: ${{ secrets.CODECOV_TOKEN }} 77 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | GridDataFormats.egg-info 3 | build 4 | dist 5 | doc/html 6 | doc/doctrees 7 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Build documentation in with Sphinx 9 | sphinx: 10 | configuration: doc/source/conf.py 11 | 12 | # Set the version of Python and other tools you might need 13 | build: 14 | os: ubuntu-22.04 15 | tools: 16 | python: "mambaforge-4.10" 17 | 18 | python: 19 | install: 20 | - method: pip 21 | path: . 22 | 23 | # Optionally set the version of Python and requirements required to build your docs 24 | conda: 25 | environment: ci/environment.yaml 26 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | ================= 2 | gridDataFormats 3 | ================= 4 | 5 | Copyright (c) 2007-2020 Oliver Beckstein and Contributors 6 | 7 | Published under the GNU Lesser General Public Licence, version 3 8 | 9 | * releases: https://pypi.python.org/pypi/GridDataFormats 10 | * source code repository: https://github.com/MDAnalysis/GridDataFormats 11 | 12 | Contributors: 13 | 14 | * Jesse Johnson (CCP4 format support) 15 | * Max Linke 16 | * Jan Domanski 17 | * Dominik Mierzejewski 18 | * Tyler Luchko 19 | * Giacomo Fiorin 20 | * Eloy Félix 21 | * René Hafner (Hamburger) 22 | * Lily Wang 23 | * Josh Vermaas 24 | * Irfan Alibay 25 | * Zhiyi Wu -------------------------------------------------------------------------------- /CHANGELOG: -------------------------------------------------------------------------------- 1 | # -*- tab-width: 2; indent-tabs-mode: nil; coding: utf-8 -*- 2 | =========================== 3 | GridDataFormats CHANGELOG 4 | =========================== 5 | 6 | The rules for this file: 7 | * release numbering uses semantic versioning 2.0 https://semver.org/ 8 | * entries are sorted newest-first. 9 | * summarize sets of changes - don't reproduce every git log comment here. 10 | * don't ever delete anything. 11 | * keep the format consistent (79 char width, M/D/Y date format) and do not 12 | use tabs but use spaces for formatting 13 | * accompany each entry with github issue/PR number (Issue #xyz) 14 | 15 | ------------------------------------------------------------------------------ 16 | 10/21/2023 IAlibay, orbeckst, lilyminium 17 | 18 | * 1.0.2 19 | 20 | Changes 21 | 22 | * Python 3.12 is now supported (PR #128) 23 | * Use new MDAnalysis docs theme (PR #127) 24 | 25 | Fixes 26 | 27 | * updated versioneer to be able to run under Python 3.12 (#124, PR #128) 28 | * replaced deprecated pkg_resources in tests with importlib (#130) 29 | 30 | 31 | 05/24/2022 IAlibay 32 | 33 | * 1.0.1 34 | 35 | Changes 36 | 37 | * Deployment is now automatically done using github actions. 38 | 39 | Fixes 40 | 41 | * Version 1.0.0 would allow for installations to be made with python versions 42 | < 3.8, resulting in incorrect builds. The setup configuration has now been 43 | altered to avoid this (Issue #112) 44 | 45 | 46 | 05/21/2022 orbeckst, IAlibay 47 | 48 | * 1.0.0 49 | 50 | Changes 51 | 52 | * API-stable release 53 | * deprecated and buggy CCP4 module was removed (#107, #50) 54 | * gridDataFormats now follows NEP29 (#102) 55 | * removed support for Python 2.7 and Python <3.8 (#102) 56 | 57 | 58 | 02/20/2022 orbeckst, tluchko, IAlibay 59 | 60 | * 0.7.0 61 | 62 | Changes 63 | 64 | * Python 3.10 is supported (issue #101) 65 | 66 | Enhancements 67 | 68 | * use mrcfile library to parse MRC files (including CCP4) using the 69 | new mrc.MRC class (issue #83) 70 | 71 | Fixes 72 | 73 | * The new mrc module correctly reorients the coordinate system based 74 | on mapc, mapr, maps and correctly calculates the origin (issue #76) 75 | * documented Grid attributes, including axis convention (issue #69) 76 | 77 | Deprecations 78 | 79 | * The CCP4 module (replaced by mrc) will be removed in 1.0.0 80 | 81 | 82 | 10/10/2021 eloyfelix, renehamburger1993, lilyminium, jvermaas, xiki-tempula, 83 | IAlibay, orbeckst 84 | 85 | * 0.6.0 86 | 87 | Changes 88 | 89 | * macOS and Windows are also tested (min and max supported Python, #97) 90 | * switched CI to using GitHub actions (#86) 91 | 92 | Enhancements 93 | 94 | * Allow parsing/writing gzipped DX files (PR #70, #99) 95 | * Update doc theme to use sphinx-rtd-theme (#80) 96 | * Delta of OpenDX writes 7 significant figures (#88) 97 | 98 | Fixes 99 | 100 | * fix initialization of mutable instance variable of Grid class (metadata dict) (#71) 101 | * fix multiple __init__ calls (#73) 102 | * interpolation behavior outside of the grid changed to default to a 103 | constant rather than the nearest value (#84) 104 | * corrected resampling behavior to not draw on values outside of the grid (#84) 105 | 106 | 107 | 05/16/2019 giacomofiorin, orbeckst 108 | 109 | * 0.5.0 110 | 111 | Changes 112 | 113 | * supported/tested on Python 2.7, >=3.5 (official 3.4 support was 114 | dropped: it might still work but is not tested anymore) 115 | 116 | Enhancements 117 | 118 | * Allow parsing DX files by NAMD's GridForces module (new typequote keyword, #58) 119 | * New keyword argument ``Grid(grid=, file_format=)`` to set file format when 120 | reading from a file (#33) 121 | 122 | Fixes 123 | 124 | * Allow parsing DX files by Pymol's buggy floating-point parser (#58) 125 | * Fixed loading of pickle files with .pkl suffix 126 | * Better input handling when constructing a Grid object (#59) 127 | 128 | 04/06/2019 rathann, tluchko, orbeckst 129 | 130 | * 0.4.1 131 | 132 | Fixes 133 | 134 | * Fixed testsuite on 32bit architectures (issue #44) 135 | * Improved DX parsing speed (#2) 136 | * Fixed reading in DX files containing scientific notation (PR #52) 137 | * Added missing floordivision to Grid (PR #53) 138 | * fix test on ARM (#51) 139 | * fix incorrect reading of ncstart and nrstart in CCP4 (#57) 140 | * fix that arithemtical operations broke inheritance (#56) 141 | * fix so that subclasses of ndarray are retained on input (#56) 142 | * fix Grid.save(filename) so that it respects the user's filename (#64) 143 | * fix grid equality test g == h (relied on old numpy behavior) 144 | 145 | Changes (do not affect user) 146 | 147 | * use pytest as testing framework (#49) 148 | * use truedivision consistently throughout the package and the tests (PR #52) 149 | * installation now requires scipy (it is much easier to install than it 150 | used to) 151 | 152 | 01/17/2017 orbeckst, kain88-de 153 | 154 | * 0.4.0 155 | 156 | Changes/Enhancements 157 | 158 | * Type of DX arrays is now set based on the numpy array dtype or can 159 | be manually set by the user to increase interoperability with 160 | tools such as PyMOL (issue #35) 161 | 162 | 05/13/2016 kain88-de 163 | 164 | * 0.3.3 165 | 166 | Changes 167 | 168 | * Included tempdir module directly 169 | 170 | 12/11/2015 orbeckst 171 | 172 | * 0.3.2 173 | 174 | Enhancements 175 | 176 | Changes 177 | 178 | * can import without scipy present (scipy.ndimage will only be used 179 | on demand when interpolation of a Grid is requested) (issue #25) 180 | 181 | Fixes 182 | 183 | 184 | 12/07/2015 orbeckst, richardjgowers 185 | 186 | * 0.3.1 187 | 188 | Enhancements 189 | 190 | Changes 191 | 192 | Fixes 193 | 194 | * fixed writing of DX files failed with "NotImplementedError: Only 195 | regularly spaced grids allowed." (issue #21 and 196 | MDAnalysis/mdanalysis#544) 197 | 198 | 09/22/2015 kain88-de, holocronweavers, orbeckst 199 | 200 | * 0.3.0 201 | 202 | Enhancements 203 | 204 | * Python 3 compatible (issue #5) 205 | * added subset of CCP4 format (reading) 206 | 207 | Changes 208 | 209 | * unit tests and coverage 210 | * docs at http://www.mdanalysis.org/GridDataFormats/ are always in 211 | sync with the master branch, 212 | http://griddataformats.readthedocs.org are for releases 213 | 214 | 215 | For previous releases please see the log messages in the git 216 | history. For authors see the file AUTHORS. 217 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /COPYING.LESSER: -------------------------------------------------------------------------------- 1 | GNU LESSER GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | 9 | This version of the GNU Lesser General Public License incorporates 10 | the terms and conditions of version 3 of the GNU General Public 11 | License, supplemented by the additional permissions listed below. 12 | 13 | 0. Additional Definitions. 14 | 15 | As used herein, "this License" refers to version 3 of the GNU Lesser 16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU 17 | General Public License. 18 | 19 | "The Library" refers to a covered work governed by this License, 20 | other than an Application or a Combined Work as defined below. 21 | 22 | An "Application" is any work that makes use of an interface provided 23 | by the Library, but which is not otherwise based on the Library. 24 | Defining a subclass of a class defined by the Library is deemed a mode 25 | of using an interface provided by the Library. 26 | 27 | A "Combined Work" is a work produced by combining or linking an 28 | Application with the Library. The particular version of the Library 29 | with which the Combined Work was made is also called the "Linked 30 | Version". 31 | 32 | The "Minimal Corresponding Source" for a Combined Work means the 33 | Corresponding Source for the Combined Work, excluding any source code 34 | for portions of the Combined Work that, considered in isolation, are 35 | based on the Application, and not on the Linked Version. 36 | 37 | The "Corresponding Application Code" for a Combined Work means the 38 | object code and/or source code for the Application, including any data 39 | and utility programs needed for reproducing the Combined Work from the 40 | Application, but excluding the System Libraries of the Combined Work. 41 | 42 | 1. Exception to Section 3 of the GNU GPL. 43 | 44 | You may convey a covered work under sections 3 and 4 of this License 45 | without being bound by section 3 of the GNU GPL. 46 | 47 | 2. Conveying Modified Versions. 48 | 49 | If you modify a copy of the Library, and, in your modifications, a 50 | facility refers to a function or data to be supplied by an Application 51 | that uses the facility (other than as an argument passed when the 52 | facility is invoked), then you may convey a copy of the modified 53 | version: 54 | 55 | a) under this License, provided that you make a good faith effort to 56 | ensure that, in the event an Application does not supply the 57 | function or data, the facility still operates, and performs 58 | whatever part of its purpose remains meaningful, or 59 | 60 | b) under the GNU GPL, with none of the additional permissions of 61 | this License applicable to that copy. 62 | 63 | 3. Object Code Incorporating Material from Library Header Files. 64 | 65 | The object code form of an Application may incorporate material from 66 | a header file that is part of the Library. You may convey such object 67 | code under terms of your choice, provided that, if the incorporated 68 | material is not limited to numerical parameters, data structure 69 | layouts and accessors, or small macros, inline functions and templates 70 | (ten or fewer lines in length), you do both of the following: 71 | 72 | a) Give prominent notice with each copy of the object code that the 73 | Library is used in it and that the Library and its use are 74 | covered by this License. 75 | 76 | b) Accompany the object code with a copy of the GNU GPL and this license 77 | document. 78 | 79 | 4. Combined Works. 80 | 81 | You may convey a Combined Work under terms of your choice that, 82 | taken together, effectively do not restrict modification of the 83 | portions of the Library contained in the Combined Work and reverse 84 | engineering for debugging such modifications, if you also do each of 85 | the following: 86 | 87 | a) Give prominent notice with each copy of the Combined Work that 88 | the Library is used in it and that the Library and its use are 89 | covered by this License. 90 | 91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license 92 | document. 93 | 94 | c) For a Combined Work that displays copyright notices during 95 | execution, include the copyright notice for the Library among 96 | these notices, as well as a reference directing the user to the 97 | copies of the GNU GPL and this license document. 98 | 99 | d) Do one of the following: 100 | 101 | 0) Convey the Minimal Corresponding Source under the terms of this 102 | License, and the Corresponding Application Code in a form 103 | suitable for, and under terms that permit, the user to 104 | recombine or relink the Application with a modified version of 105 | the Linked Version to produce a modified Combined Work, in the 106 | manner specified by section 6 of the GNU GPL for conveying 107 | Corresponding Source. 108 | 109 | 1) Use a suitable shared library mechanism for linking with the 110 | Library. A suitable mechanism is one that (a) uses at run time 111 | a copy of the Library already present on the user's computer 112 | system, and (b) will operate properly with a modified version 113 | of the Library that is interface-compatible with the Linked 114 | Version. 115 | 116 | e) Provide Installation Information, but only if you would otherwise 117 | be required to provide such information under section 6 of the 118 | GNU GPL, and only to the extent that such information is 119 | necessary to install and execute a modified version of the 120 | Combined Work produced by recombining or relinking the 121 | Application with a modified version of the Linked Version. (If 122 | you use option 4d0, the Installation Information must accompany 123 | the Minimal Corresponding Source and Corresponding Application 124 | Code. If you use option 4d1, you must provide the Installation 125 | Information in the manner specified by section 6 of the GNU GPL 126 | for conveying Corresponding Source.) 127 | 128 | 5. Combined Libraries. 129 | 130 | You may place library facilities that are a work based on the 131 | Library side by side in a single library together with other library 132 | facilities that are not Applications and are not covered by this 133 | License, and convey such a combined library under terms of your 134 | choice, if you do both of the following: 135 | 136 | a) Accompany the combined library with a copy of the same work based 137 | on the Library, uncombined with any other library facilities, 138 | conveyed under the terms of this License. 139 | 140 | b) Give prominent notice with the combined library that part of it 141 | is a work based on the Library, and explaining where to find the 142 | accompanying uncombined form of the same work. 143 | 144 | 6. Revised Versions of the GNU Lesser General Public License. 145 | 146 | The Free Software Foundation may publish revised and/or new versions 147 | of the GNU Lesser General Public License from time to time. Such new 148 | versions will be similar in spirit to the present version, but may 149 | differ in detail to address new problems or concerns. 150 | 151 | Each version is given a distinguishing version number. If the 152 | Library as you received it specifies that a certain numbered version 153 | of the GNU Lesser General Public License "or any later version" 154 | applies to it, you have the option of following the terms and 155 | conditions either of that published version or of any later version 156 | published by the Free Software Foundation. If the Library as you 157 | received it does not specify a version number of the GNU Lesser 158 | General Public License, you may choose any version of the GNU Lesser 159 | General Public License ever published by the Free Software Foundation. 160 | 161 | If the Library as you received it specifies that a proxy can decide 162 | whether future versions of the GNU Lesser General Public License shall 163 | apply, that proxy's public statement of acceptance of any version is 164 | permanent authorization for you to choose that version for the 165 | Library. 166 | -------------------------------------------------------------------------------- /INSTALL: -------------------------------------------------------------------------------- 1 | =============================== 2 | Installing gridDataFormats 3 | =============================== 4 | 5 | Releases 6 | ======== 7 | 8 | All releases of the package are hosted on the Python Package index so 9 | you can just install them directly with :: 10 | 11 | pip install gridDataFormats 12 | 13 | 14 | Source 15 | ====== 16 | 17 | From the checked out top level directory of the sources:: 18 | 19 | python setup.py install 20 | 21 | If you don't have NumPy_ installed it will download and install it 22 | automatically. 23 | 24 | For some functionality you will also need to install scipy_. 25 | 26 | .. _NumPy: http://numpy.scipy.org/ 27 | .. _scipy: http://www.scipy.org/scipylib/index.html 28 | -------------------------------------------------------------------------------- /MAINTAINER.md: -------------------------------------------------------------------------------- 1 | # Maintainer documentation 2 | 3 | For additional documentation see the [Developer Guide: Release 4 | Management](https://github.com/MDAnalysis/GridDataFormats/wiki/Developer-Guide#release-management) 5 | page. This file gives a brief reminder of what maintainers need to do 6 | for new releases. 7 | 8 | 1. create a release on GitHub using tag `..`. 9 | 1. release on PyPi under https://pypi.org/project/GridDataFormats 10 | 1. release on conda-forge https://anaconda.org/conda-forge/griddataformats 11 | 12 | ## GitHub release 13 | 14 | * We use [semantic versioning](https://semver.org) MAJOR.MINOR.PATCH 15 | (i.e., briefly, major revision changes whenever the API changes in 16 | backwards-incompatible manner, MINOR changes for new features, PATCH 17 | changes for fixes that correct functionality; as long as MAJOR == 0, 18 | we can break the API with increasing MINOR.) 19 | 20 | * Releases are cut from the master branch and tagged with 21 | *MAJOR.MINOR.PATCH* (note: the release tag *determines* the tag 22 | because we use 23 | [versioneer](https://github.com/warner/python-versioneer/blob/master/INSTALL.md#post-installation-usage), 24 | which obtains the release number from the git tag). We do from the 25 | master branch: 26 | 27 | 1. `git tag ..` 28 | 1. `git push --tags` 29 | 30 | * This will automatically trigger a github action (named 'deploy') to using pypa's `build` tool to create a tarball & pure Python wheel and upload it to https://test.pypi.org/project/GridDataFormats 31 | 32 | * Once uploaded to testpypi, please check locally that the testpypi build is working as intended. In a clean environment do: 33 | 34 | 1. `pip install -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple GridDataFormats=="version number"` 35 | 2. `pip install pytest` 36 | 3. `pytest --pyargs gridData` 37 | 38 | * Create a 39 | [GitHub release](https://github.com/MDAnalysis/GridDataFormats/releases) 40 | from the tag and name it `..` and add a short description. 41 | 42 | * The GitHub release triggers the `deploy` action to deploy the tarball and wheel to the standard PyPI repository. 43 | 44 | 45 | ## PyPi release 46 | 47 | A GitHub release is automatically deployed to PyPI. 48 | 49 | ## Update Conda-forge package 50 | 51 | *After* a PyPI release update the conda-forge package [feedstock](https://github.com/conda-forge/griddataformats-feedstock). 52 | 53 | ### Automatic 54 | 55 | 1. Wait for the *regro-cf-autotick-bot* to create a PR, based on the PyPI release (can take a few hours). 56 | 2. review the PR 57 | 3. merge the PR 58 | 4. conda packages will be built 59 | 60 | 61 | ### Manual 62 | 63 | Manual updates are rarely necessary. 64 | 65 | If necessary do the following on a local checkout of the package 66 | [feedstock](https://github.com/conda-forge/griddataformats-feedstock) 67 | 68 | 1. create a new branch 69 | 1. conda smithy rerender 70 | 1. update the sha256 in the `meta.yaml` (see the [PyPi downloads 71 | page](https://pypi.org/project/GridDataFormats/#files) for the 72 | sha256 of the tar.gz file) 73 | 1. update version number 74 | 75 | Afterwards upload the new branch to your **own fork** of the feedstock and 76 | generate a PR. Once all tests pass merge the PR and the package will be 77 | published. 78 | 79 | 80 | 81 | ## Documentation 82 | 83 | Documentation is automatically generated in CI and pushed to 84 | the gh-pages branch and appears at https://www.mdanalysis.org/GridDataFormats/. 85 | 86 | 87 | There is also alternative documentation on ReadTheDocs 88 | https://griddataformats.readthedocs.io/, which automatically rebuilds. 89 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst INSTALL CHANGELOG COPYING COPYING.LESSER AUTHORS 2 | include setup.py 3 | 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | README for GridDataFormats 3 | ============================ 4 | 5 | |build| |cov| |docs| |zenodo| |conda| 6 | 7 | The **GridDataFormats** package provides classes to unify reading and 8 | writing n-dimensional datasets. One can read grid data from files, 9 | make them available as a `Grid`_ object, and write out the data again. 10 | 11 | Availability 12 | ------------ 13 | 14 | The package is licensed under the LGPL, v3 (see files COPYING_ and 15 | `COPYING.LESSER`_) and is available 16 | 17 | * from the Python Package Index under the name `GridDataFormats`_ 18 | * as a conda package from the *conda-forge* channel, `conda-forge/griddataformats`_ 19 | * in source from the GitHub repository https://github.com/MDAnalysis/GridDataFormats 20 | 21 | .. _GridDataFormats: 22 | https://pypi.python.org/pypi/GridDataFormats 23 | .. _`conda-forge/griddataformats`: 24 | https://anaconda.org/conda-forge/griddataformats 25 | .. _COPYING: 26 | https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/master/COPYING 27 | .. _`COPYING.LESSER`: 28 | https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/master/COPYING.LESSER 29 | .. _Grid: 30 | https://www.mdanalysis.org/GridDataFormats/gridData/core.html#gridData.core.Grid 31 | 32 | Installation 33 | ------------ 34 | 35 | Installing GridDataFormats with ``pip`` 36 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 37 | 38 | Install with `pip`_:: 39 | 40 | pip install gridDataFormats 41 | 42 | .. _pip: https://pip.pypa.io/ 43 | 44 | 45 | Installing GridDataFormats with ``conda`` 46 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 47 | 48 | Installing *GridDataFormats* from the *conda-forge* channel can be 49 | achieved by adding "conda-forge" to your channels with:: 50 | 51 | conda config --add channels conda-forge 52 | 53 | Once the *conda-forge* channel has been enabled, *GridDataFormats* can 54 | be installed with:: 55 | 56 | conda install griddataformats 57 | 58 | 59 | 60 | Documentation 61 | ------------- 62 | 63 | For the latest docs see the `GridDataFormats docs`_. (Multiple 64 | versions of the docs are also available at 65 | `griddataformats.readthedocs.org`_.) 66 | 67 | 68 | .. _GridDataFormats docs: 69 | https://www.mdanalysis.org/GridDataFormats 70 | .. _`griddataformats.readthedocs.org`: 71 | https://griddataformats.readthedocs.org 72 | 73 | 74 | Contributing 75 | ------------ 76 | 77 | Please use the `issue tracker`_ for bugs and questions. 78 | 79 | **GridDataFormats** is open source and contributions are 80 | welcome. Please fork the repository and submit a `pull request`_. 81 | 82 | .. _issue tracker: 83 | https://github.com/MDAnalysis/GridDataFormats/issues 84 | .. _pull request: 85 | https://github.com/MDAnalysis/GridDataFormats/pulls 86 | 87 | .. |numfocus| image:: https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A 88 | :alt: Powered by NumFOCUS 89 | :target: https://www.numfocus.org/ 90 | 91 | .. |build| image:: https://github.com/MDAnalysis/GridDataFormats/actions/workflows/gh-ci.yaml/badge.svg 92 | :alt: Github Actions Build Status 93 | :target: https://github.com/MDAnalysis/GridDataFormats/actions/workflows/gh-ci.yaml 94 | 95 | .. |cov| image:: https://codecov.io/gh/MDAnalysis/GridDataFormats/branch/master/graph/badge.svg 96 | :alt: Coverage Status 97 | :target: https://codecov.io/gh/MDAnalysis/GridDataFormats 98 | 99 | .. |docs| image:: https://img.shields.io/badge/docs-latest-brightgreen.svg 100 | :alt: Documentation 101 | :target: https://www.mdanalysis.org/GridDataFormats/ 102 | 103 | .. |zenodo| image:: https://zenodo.org/badge/13219/MDAnalysis/GridDataFormats.svg 104 | :alt: Zenodo DOI 105 | :target: https://zenodo.org/badge/latestdoi/13219/MDAnalysis/GridDataFormats 106 | 107 | .. |conda| image:: https://anaconda.org/conda-forge/griddataformats/badges/version.svg 108 | :alt: Anaconda 109 | :target: https://anaconda.org/conda-forge/griddataformats 110 | -------------------------------------------------------------------------------- /ci/environment.yaml: -------------------------------------------------------------------------------- 1 | name: griddata_env 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - pip 6 | - numpy>=1.21 7 | - scipy 8 | - mrcfile 9 | 10 | # documentation 11 | - mdanalysis-sphinx-theme >=1.0.1 12 | - pip: 13 | - sphinx-sitemap -------------------------------------------------------------------------------- /ci/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | scipy 3 | mrcfile 4 | mdanalysis-sphinx-theme >=1.0.1 5 | sphinx-sitemap 6 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = . 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -T -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -for i in html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex text man changes linkcheck doctest; do \ 43 | rm -rf $(BUILDDIR)/$$i; \ 44 | done 45 | 46 | html: 47 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 48 | @echo 49 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 50 | 51 | dirhtml: 52 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 53 | @echo 54 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 55 | 56 | singlehtml: 57 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 58 | @echo 59 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 60 | 61 | pickle: 62 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 63 | @echo 64 | @echo "Build finished; now you can process the pickle files." 65 | 66 | json: 67 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 68 | @echo 69 | @echo "Build finished; now you can process the JSON files." 70 | 71 | htmlhelp: 72 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 73 | @echo 74 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 75 | ".hhp project file in $(BUILDDIR)/htmlhelp." 76 | 77 | qthelp: 78 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 79 | @echo 80 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 81 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 82 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/gridDataFormats.qhcp" 83 | @echo "To view the help file:" 84 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/gridDataFormats.qhc" 85 | 86 | devhelp: 87 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 88 | @echo 89 | @echo "Build finished." 90 | @echo "To view the help file:" 91 | @echo "# mkdir -p $$HOME/.local/share/devhelp/gridDataFormats" 92 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/gridDataFormats" 93 | @echo "# devhelp" 94 | 95 | epub: 96 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 97 | @echo 98 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 99 | 100 | latex: 101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 102 | @echo 103 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 104 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 105 | "(use \`make latexpdf' here to do that automatically)." 106 | 107 | latexpdf: 108 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 109 | @echo "Running LaTeX files through pdflatex..." 110 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 111 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 112 | 113 | text: 114 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 115 | @echo 116 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 117 | 118 | man: 119 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 120 | @echo 121 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 122 | 123 | texinfo: 124 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 125 | @echo 126 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 127 | @echo "Run \`make' in that directory to run these through makeinfo" \ 128 | "(use \`make info' here to do that automatically)." 129 | 130 | info: 131 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 132 | @echo "Running Texinfo files through makeinfo..." 133 | make -C $(BUILDDIR)/texinfo info 134 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 135 | 136 | gettext: 137 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 138 | @echo 139 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 140 | 141 | changes: 142 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 143 | @echo 144 | @echo "The overview file is in $(BUILDDIR)/changes." 145 | 146 | linkcheck: 147 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 148 | @echo 149 | @echo "Link check complete; look for any errors in the above output " \ 150 | "or in $(BUILDDIR)/linkcheck/output.txt." 151 | 152 | doctest: 153 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 154 | @echo "Testing of doctests in the sources finished, look at the " \ 155 | "results in $(BUILDDIR)/doctest/output.txt." 156 | -------------------------------------------------------------------------------- /doc/source/_static/logos/AUTHOR: -------------------------------------------------------------------------------- 1 | The MDAnalysis 'Atom' logo was created by Christian Beckstein and is 2 | 3 | Copyright (c) 2011 Christian Beckstein 4 | 5 | MDAnalysis Logo 'Atom' by Christian Beckstein is licensed under a 6 | Creative Commons Attribution-NoDerivs 3.0 Unported License. 7 | To view a copy of this license, visit 8 | http://creativecommons.org/licenses/by-nd/3.0/ or send a letter to Creative 9 | Commons, 444 Castro Street, Suite 900, Mountain View, California, 94041, USA. 10 | 11 | The logo is contained in the file 'mdanalysis-logo.png'. 12 | 13 | Derivatives in the files 'mdanalysis-logo-127x55.png', 14 | 'mdanalysis-logo-200x150.png', 'mdanalysis-logo.ico' were created for 15 | inclusion in MDAnalysis and on websites related to MDAnalysis. They 16 | are distributed under the same license as the 'Atom' logo. 17 | -------------------------------------------------------------------------------- /doc/source/_static/logos/mdanalysis-griddataformats-logo.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/doc/source/_static/logos/mdanalysis-griddataformats-logo.pdf -------------------------------------------------------------------------------- /doc/source/_static/logos/mdanalysis-griddataformats-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/doc/source/_static/logos/mdanalysis-griddataformats-logo.png -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # gridDataFormats documentation build configuration file, created by 4 | # sphinx-quickstart on Tue Jun 30 13:16:50 2015. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # Note that not all possible configuration values are present in this 9 | # autogenerated file. 10 | # 11 | # All configuration values have a default; values that are commented out 12 | # serve to show the default. 13 | 14 | import sys, os 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # sys.path.insert(0, os.path.abspath('.')) 20 | 21 | # make sure sphinx always uses the current branch 22 | sys.path.insert(0, os.path.abspath("../..")) 23 | 24 | 25 | # -- General configuration ----------------------------------------------------- 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # 29 | # needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | "sphinx.ext.autodoc", 36 | "sphinx.ext.intersphinx", 37 | "sphinx.ext.mathjax", 38 | "sphinx.ext.viewcode", 39 | "sphinx.ext.napoleon", 40 | "sphinx.ext.todo", 41 | "sphinx_sitemap", 42 | ] 43 | 44 | mathjax_path = "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-AMS-MML_HTMLorMML" 45 | 46 | # for sitemap with https://github.com/jdillard/sphinx-sitemap 47 | site_url = "https://www.mdanalysis.org/GridDataFormats/" 48 | sitemap_url_scheme = "{link}" 49 | 50 | # https://stackoverflow.com/questions/5599254/how-to-use-sphinxs-autodoc-to-document-a-classs-init-self-method 51 | autoclass_content = "both" 52 | 53 | # Add any paths that contain templates here, relative to this directory. 54 | templates_path = ["_templates"] 55 | 56 | # The suffix of source filenames. 57 | source_suffix = ".rst" 58 | 59 | # The encoding of source files. 60 | # source_encoding = 'utf-8-sig' 61 | 62 | # The master toctree document. 63 | master_doc = "index" 64 | 65 | # General information about the project. 66 | project = "GridDataFormats" 67 | authors = ( 68 | "Oliver Beckstein, Jan Domanski, Jesse Johnson, Max Linke, Tyler Luchko, " 69 | "Dominik Mierzejewski, Giacomo Fiorin, Lily Wang, Josh Vermaas, " 70 | "Irfan Alibay,Zhiyi Wu" 71 | ) 72 | copyright = "2007-2023, " + authors 73 | 74 | # The version info for the project you're documenting, acts as replacement for 75 | # |version| and |release|, also used in various other places throughout the 76 | # built documents. 77 | # 78 | # The full version, including alpha/beta/rc tags. 79 | release = __import__("gridData").__version__ 80 | # The short X.Y version. 81 | version = ".".join(release.split(".")[:2]) 82 | 83 | # The language for content autogenerated by Sphinx. Refer to documentation 84 | # for a list of supported languages. 85 | # language = None 86 | 87 | # There are two options for replacing |today|: either, you set today to some 88 | # non-false value, then it is used: 89 | # today = '' 90 | # Else, today_fmt is used as the format for a strftime call. 91 | # today_fmt = '%B %d, %Y' 92 | 93 | # List of patterns, relative to source directory, that match files and 94 | # directories to ignore when looking for source files. 95 | exclude_patterns = [] 96 | 97 | # The reST default role (used for this markup: `text`) to use for all documents. 98 | # default_role = None 99 | 100 | # If true, '()' will be appended to :func: etc. cross-reference text. 101 | # add_function_parentheses = True 102 | 103 | # If true, the current module name will be prepended to all description 104 | # unit titles (such as .. function::). 105 | # add_module_names = True 106 | 107 | # If true, sectionauthor and moduleauthor directives will be shown in the 108 | # output. They are ignored by default. 109 | # show_authors = False 110 | 111 | # The name of the Pygments (syntax highlighting) style to use. 112 | pygments_style = "default" 113 | 114 | # A list of ignored prefixes for module index sorting. 115 | # modindex_common_prefix = [] 116 | 117 | 118 | # -- Options for HTML output --------------------------------------------------- 119 | 120 | # The theme to use for HTML and HTML Help pages. See the documentation for 121 | # a list of builtin themes. 122 | 123 | html_theme = "mdanalysis_sphinx_theme" 124 | 125 | # html_theme_path = [] 126 | 127 | # Theme options are theme-specific and customize the look and feel of a theme 128 | # further. For a list of options available for each theme, see the 129 | # documentation. 130 | # 131 | 132 | 133 | html_theme_options = {"mda_official": True} 134 | 135 | 136 | # options common to RTD and MDAnalysis theme 137 | 138 | # Add any paths that contain custom static files (such as style sheets) here, 139 | # relative to this directory. They are copied after the builtin static files, 140 | # so a file named "default.css" will overwrite the builtin "default.css". 141 | # For RTD theme: custom.css to override theme defaults. 142 | html_static_path = ["_static"] 143 | # html_css_files = [] 144 | 145 | 146 | # The name of an image file (relative to this directory) to place at the top 147 | # of the sidebar. -- use theme (only enable if NOT using html_sidebars) 148 | html_logo = "_static/logos/mdanalysis-griddataformats-logo.png" 149 | 150 | # The name of an image file (within the static path) to use as favicon of the 151 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 152 | # pixels large. 153 | # html_favicon = "_static/logos/mdanalysis-logo.ico" 154 | 155 | 156 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 157 | # using the given strftime format. 158 | # html_last_updated_fmt = '%b %d, %Y' 159 | 160 | # If true, SmartyPants will be used to convert quotes and dashes to 161 | # typographically correct entities. 162 | # html_use_smartypants = True 163 | 164 | 165 | # Additional templates that should be rendered to pages, maps page names to 166 | # template names. 167 | # html_additional_pages = {} 168 | 169 | # If false, no module index is generated. 170 | # html_domain_indices = True 171 | 172 | # If false, no index is generated. 173 | # html_use_index = True 174 | 175 | # If true, the index is split into individual pages for each letter. 176 | # html_split_index = False 177 | 178 | # If true, links to the reST sources are added to the pages. 179 | # html_show_sourcelink = True 180 | 181 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 182 | # html_show_sphinx = True 183 | 184 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 185 | # html_show_copyright = True 186 | 187 | # If true, an OpenSearch description file will be output, and all pages will 188 | # contain a tag referring to it. The value of this option must be the 189 | # base URL from which the finished HTML is served. 190 | html_use_opensearch = "https://www.mdanalysis.org/GridDataFormats" 191 | 192 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 193 | # html_file_suffix = None 194 | 195 | # Output file base name for HTML help builder. 196 | htmlhelp_basename = "GridDataFormatsDoc" 197 | 198 | 199 | # -- Options for LaTeX output -------------------------------------------------- 200 | 201 | latex_elements = { 202 | # The paper size ('letterpaper' or 'a4paper'). 203 | #'papersize': 'letterpaper', 204 | # The font size ('10pt', '11pt' or '12pt'). 205 | #'pointsize': '10pt', 206 | # Additional stuff for the LaTeX preamble. 207 | #'preamble': '', 208 | } 209 | 210 | # Grouping the document tree into LaTeX files. List of tuples 211 | # (source start file, target name, title, author, documentclass [howto/manual]). 212 | latex_documents = [ 213 | ( 214 | "index", 215 | "gridDataFormats.tex", 216 | "gridDataFormats Documentation", 217 | authors, 218 | "manual", 219 | ), 220 | ] 221 | 222 | # The name of an image file (relative to this directory) to place at the top of 223 | # the title page. 224 | # latex_logo = None 225 | 226 | # For "manual" documents, if this is true, then toplevel headings are parts, 227 | # not chapters. 228 | # latex_use_parts = False 229 | 230 | # If true, show page references after internal links. 231 | # latex_show_pagerefs = False 232 | 233 | # If true, show URL addresses after external links. 234 | # latex_show_urls = False 235 | 236 | # Documents to append as an appendix to all manuals. 237 | # latex_appendices = [] 238 | 239 | # If false, no module index is generated. 240 | # latex_domain_indices = True 241 | 242 | 243 | # -- Options for manual page output -------------------------------------------- 244 | 245 | # One entry per manual page. List of tuples 246 | # (source start file, name, description, authors, manual section). 247 | man_pages = [ 248 | ("index", "griddataformats", "gridDataFormats Documentation", authors.split(), 1) 249 | ] 250 | 251 | # If true, show URL addresses after external links. 252 | # man_show_urls = False 253 | 254 | 255 | # -- Options for Texinfo output ------------------------------------------------ 256 | 257 | # Grouping the document tree into Texinfo files. List of tuples 258 | # (source start file, target name, title, author, 259 | # dir menu entry, description, category) 260 | texinfo_documents = [ 261 | ( 262 | "index", 263 | "gridDataFormats", 264 | "gridDataFormats Documentation", 265 | authors, 266 | "gridDataFormats", 267 | "Grid structure for regular data together with common format readers.", 268 | "Miscellaneous", 269 | ), 270 | ] 271 | 272 | # Documents to append as an appendix to all manuals. 273 | # texinfo_appendices = [] 274 | 275 | # If false, no module index is generated. 276 | # texinfo_domain_indices = True 277 | 278 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 279 | # texinfo_show_urls = 'footnote' 280 | 281 | 282 | # Example configuration for intersphinx: refer to the Python standard library. 283 | intersphinx_mapping = { 284 | "python": ("https://docs.python.org/3/", None), 285 | "numpy": ("https://numpy.org/doc/stable/", None), 286 | "scipy": ("https://docs.scipy.org/doc/scipy/", None), 287 | } 288 | -------------------------------------------------------------------------------- /doc/source/gridData/basic.rst: -------------------------------------------------------------------------------- 1 | Basic use 2 | ========= 3 | 4 | In most cases, only one class is important, the 5 | :class:`~gridData.core.Grid`, so we just load this right away:: 6 | 7 | from gridData import Grid 8 | 9 | 10 | Loading data 11 | ------------ 12 | 13 | From a OpenDX file:: 14 | 15 | g = Grid("density.dx") 16 | 17 | (See also :ref:`opendx-read-write` for more information, especially 18 | when working with visualization programs such as PyMOL, VMD, or 19 | Chimera.) 20 | 21 | From a gOpenMol PLT file:: 22 | 23 | g = Grid("density.plt") 24 | 25 | From the output of :func:`numpy.histogramdd`:: 26 | 27 | import numpy 28 | r = numpy.random.randn(100,3) 29 | H, edges = np.histogramdd(r, bins = (5, 8, 4)) 30 | g = Grid(H, edges=edges) 31 | 32 | For other ways to load data, see the docs for :class:`~gridData.core.Grid`. 33 | 34 | 35 | Writing out data 36 | ---------------- 37 | 38 | Some formats support writing data (see 39 | :ref:`supported-file-formats` for more details), using the 40 | :meth:`gridData.core.Grid.export` method:: 41 | 42 | g.export("density.dx") 43 | 44 | The format can also be specified explicitly:: 45 | 46 | g.export("density.pkl", file_format="pickle") 47 | 48 | Some of the exporters (such as for OpenDX, see 49 | :ref:`opendx-read-write`) may take additional, format-specific 50 | keywords, which are documented separately. 51 | 52 | 53 | 54 | Subtracting two densities 55 | ------------------------- 56 | 57 | Assuming one has two densities that were generated on the same grid 58 | positions, stored in files ``A.dx`` and ``B.dx``, one first reads the 59 | data into two :class:`~gridData.core.Grid` objects:: 60 | 61 | A = Grid('A.dx') 62 | B = Grid('B.dx') 63 | 64 | Subtract A from B:: 65 | 66 | C = B - A 67 | 68 | and write out as a dx file:: 69 | 70 | C.export('C.dx') 71 | 72 | The resulting file ``C.dx`` can be visualized with any OpenDX-capable 73 | viewer, or later read-in again. 74 | 75 | 76 | Resampling 77 | ---------- 78 | 79 | Load data:: 80 | 81 | A = Grid('A.dx') 82 | 83 | Interpolate with a cubic spline to twice the sample density:: 84 | 85 | A2 = A.resample_factor(2) 86 | 87 | Downsample to half of the bins in each dimension:: 88 | 89 | Ahalf = A.resample_factor(0.5) 90 | 91 | Resample to the grid of another density, B:: 92 | 93 | B = Grid('B.dx') 94 | A_on_B = A.resample(B.edges) 95 | 96 | or even simpler :: 97 | 98 | A_on_B = A.resample(B) 99 | 100 | .. Note:: The cubic spline generates region with values that did not 101 | occur in the original data; in particular if the original data's 102 | lowest value was 0 then the spline interpolation will probably 103 | produce some values <0 near regions where the density changed 104 | abruptly. 105 | 106 | -------------------------------------------------------------------------------- /doc/source/gridData/core.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: gridData.core 2 | :members: 3 | -------------------------------------------------------------------------------- /doc/source/gridData/formats.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | 3 | Formats 4 | ======= 5 | 6 | A limited number of commonly used formats can be read or written. The 7 | formats are particularly suitable to interface with molecular 8 | visualization tools such as VMD_, PyMOL_, or Chimera_. 9 | 10 | Adding new formats is not difficult and user-contributed 11 | format reader/writer can be easily integrated---send a `pull request`_. 12 | 13 | .. _supported-file-formats: 14 | 15 | Supported file formats 16 | ---------------------- 17 | 18 | The package can be easily extended. The OpenDX_ format is widely 19 | understood by many molecular viewers and is sufficient for many 20 | applications that were encountered so far. Hence, at the moment only a 21 | small number of file formats is directly supported. 22 | 23 | .. table:: Available file formats in :mod:`gridData` 24 | 25 | ============================ ========== ========= ===== ===== ========================================= 26 | module or class format extension read write remarks 27 | ============================ ========== ========= ===== ===== ========================================= 28 | :mod:`~gridData.OpenDX` OpenDX_ dx x x subset of OpenDX implemented 29 | :mod:`~gridData.gOpenMol` gOpenMol_ plt x 30 | :mod:`~gridData.mrc` CCP4_ ccp4,mrc x subset implemented 31 | :class:`~gridData.core.Grid` pickle pickle x x standard Python pickle of the Grid class 32 | ============================ ========== ========= ===== ===== ========================================= 33 | 34 | 35 | .. _pull request: https://github.com/MDAnalysis/GridDataFormats/pulls 36 | .. _VMD: http://www.ks.uiuc.edu/Research/vmd/ 37 | .. _PyMOL: http://www.pymol.org/ 38 | .. _Chimera: https://www.cgl.ucsf.edu/chimera/ 39 | .. _OpenDX: http://www.opendx.org/ 40 | .. _gOpenMol: http://www.csc.fi/gopenmol/ 41 | .. _CCP4: http://www.ccpem.ac.uk/mrc_format/mrc2014.php 42 | 43 | 44 | Format-specific modules 45 | ----------------------- 46 | 47 | .. toctree:: 48 | :maxdepth: 1 49 | 50 | formats/OpenDX 51 | formats/gOpenMol 52 | formats/mrc 53 | -------------------------------------------------------------------------------- /doc/source/gridData/formats/OpenDX.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: gridData.OpenDX 2 | :members: 3 | 4 | -------------------------------------------------------------------------------- /doc/source/gridData/formats/gOpenMol.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: gridData.gOpenMol 2 | :members: 3 | -------------------------------------------------------------------------------- /doc/source/gridData/formats/mrc.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: gridData.mrc 2 | :members: 3 | -------------------------------------------------------------------------------- /doc/source/gridData/overview.rst: -------------------------------------------------------------------------------- 1 | .. automodule:: gridData 2 | 3 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; coding: utf-8 -*- 2 | .. gridDataFormats documentation master file, created by 3 | sphinx-quickstart on Tue Jun 30 13:16:50 2015. 4 | You can adapt this file completely to your liking, but it should at least 5 | contain the root `toctree` directive. 6 | 7 | GridDataFormats: Handling volumetric data in Python 8 | =================================================== 9 | 10 | :Release: |release| 11 | :Date: |today| 12 | :Citation: |zenodo| 13 | 14 | The :mod:`gridData` module contains a simple class 15 | :class:`~gridData.core.Grid` that makes it easier to work with data on 16 | a regular grid. A limited number of commonly used formats can be read 17 | and written as described in :ref:`supported-file-formats`. 18 | 19 | The code is available under the `Lesser GNU General Public License, 20 | version 3`_ (see also the files ``COPYING`` and ``COPYING.LESSER`` in 21 | the source distribution). Releases are available from the Python 22 | Package index under GridDataFormats_ and source code is available from 23 | the GitHub repository 24 | https://github.com/MDAnalysis/GridDataFormats. 25 | 26 | .. rubric:: Participating 27 | 28 | Ask questions on the `mdnalysis-discussion mailing list`_ and join the 29 | discussion. 30 | 31 | Please report problems and enhancement requests through the `issue 32 | tracker`_. 33 | 34 | GridDataFormats is open source and welcomes your contributions. Fork 35 | the `GridDataFormats repository on GitHub`_ and submit a pull request. Participate on the 36 | `developer mailing list`_. 37 | 38 | 39 | 40 | .. _`Lesser GNU General Public License, version 3`: 41 | https://www.gnu.org/licenses/lgpl-3.0.en.html 42 | .. _GridDataFormats: https://pypi.python.org/pypi/GridDataFormats/ 43 | .. _issue tracker: https://github.com/MDAnalysis/GridDataFormats/issues 44 | .. _mdnalysis-discussion mailing list: 45 | https://groups.google.com/group/mdnalysis-discussion 46 | .. _developer mailing list: 47 | https://groups.google.com/group/mdnalysis-devel 48 | .. _GridDataFormats repository on GitHub: 49 | https://github.com/MDAnalysis/GridDataFormats 50 | 51 | .. |zenodo| image:: https://zenodo.org/badge/13219/MDAnalysis/GridDataFormats.svg 52 | :alt: Zenodo DOI 53 | :target: https://zenodo.org/badge/latestdoi/13219/MDAnalysis/GridDataFormats 54 | 55 | 56 | .. Contents (sidebar) 57 | 58 | .. toctree:: 59 | :maxdepth: 4 60 | :hidden: 61 | 62 | installation 63 | gridData/overview 64 | gridData/basic 65 | gridData/core 66 | gridData/formats 67 | -------------------------------------------------------------------------------- /doc/source/installation.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | GridDataFormats can be easily installed via the :ref:`conda` or 5 | :ref:`pip` package managers. 6 | 7 | It is a pure-python package but it has a few other packages (namely 8 | scipy) as dependencies that contain compiled code. For ease of 9 | installation we recommend :ref:`conda` but 10 | :ref:`pip` and installation from source are also fully 11 | supported. 12 | 13 | 14 | 15 | .. _conda-install: 16 | 17 | Installing GridDataFormats with ``conda`` 18 | ----------------------------------------- 19 | 20 | The `conda`_ package manager installs, runs, and updates whole 21 | environments with all their dependencies. 22 | 23 | Installing *GridDataFormats* from the *conda-forge* channel can be 24 | achieved by adding "conda-forge" to your channels with:: 25 | 26 | conda config --add channels conda-forge 27 | 28 | Once the *conda-forge* channel has been enabled, *GridDataFormats* can 29 | be installed with:: 30 | 31 | conda install griddataformats 32 | 33 | Any missing dependencies will be automatically downloaded and 34 | installed in the appropriate versions. 35 | 36 | You can later update with :: 37 | 38 | conda update griddataformats 39 | 40 | 41 | .. _conda: https://docs.conda.io/ 42 | 43 | .. _pip-install: 44 | 45 | Installing GridDataFormats with ``pip`` 46 | --------------------------------------- 47 | 48 | Install with `pip`_:: 49 | 50 | pip install gridDataFormats 51 | 52 | and you can later update with :: 53 | 54 | pip install --upgrade gridDataFormats 55 | 56 | 57 | `pip` also automatically downloads all missing dependencies and will 58 | attempt to compile them if necessary; this step can fail if you do not 59 | have the correct build environment with the necessary compilers 60 | installed. You should then read the pip_ documentation to learn what 61 | is needed or switch to the :ref:`conda installation`. 62 | 63 | 64 | .. _pip: https://pip.pypa.io/ 65 | 66 | 67 | -------------------------------------------------------------------------------- /examples/extractISOPdb/extractISOPdb.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import re,numpy as npy 4 | import sys,os 5 | #---------------------------------------------------------------------------------------------------------- 6 | # modify your stuff here 7 | 8 | if len(sys.argv)>3 : 9 | d=sys.argv[1] 10 | if not os.path.exists(d): 11 | sys.exit(" ERROR : the dx file provided does not exist . Breaking up.") 12 | else : 13 | sys.exit(" ERROR : please provide the name of the dx file, the isovalue and the outputname :\n python extractISOPdb.py path/my_dx_file.dx outputname.pdb isovalue") 14 | 15 | 16 | inputfile=sys.argv[1] 17 | pathOutput=sys.argv[2] 18 | iso_value=float(sys.argv[3]) 19 | #------------------------------------------------------------------------------------------------------------ 20 | 21 | #don't touch the rest...unless you know what you do :) 22 | 23 | 24 | 25 | f=open(inputfile,"r") 26 | 27 | 28 | #get the axis that shows the most variation during the trajectory, this will be the leading axis 29 | 30 | #read the header - here is an example 31 | header="" 32 | tmp=f.readline() 33 | while tmp[0]!="o" : 34 | header= header + tmp 35 | tmp=f.readline() 36 | #print header 37 | 38 | #read the grid size 39 | r=re.compile('\w+') 40 | gsize=r.findall(tmp) 41 | gsize=[int(gsize[-3]),int(gsize[-2]),int(gsize[-1])] 42 | #print gsize 43 | 44 | #read the origin of the system 45 | line=f.readline().split() 46 | origin=[float(line[-3]),float(line[-2]),float(line[-1])] 47 | #print origin 48 | 49 | #read grid space 50 | line=f.readline().split() 51 | deltax=[float(line[-3]),float(line[-2]),float(line[-1])] 52 | line=f.readline().split() 53 | deltay=[float(line[-3]),float(line[-2]),float(line[-1])] 54 | line=f.readline().split() 55 | deltaz=[float(line[-3]),float(line[-2]),float(line[-1])] 56 | 57 | 58 | #pay attention here, this assumes always orthogonal normalized space, but normally it should be ok 59 | delta=npy.array([deltax[0],deltay[1],deltaz[2]]) 60 | 61 | #read the number of data 62 | f.readline() 63 | r=re.compile('\d+') 64 | n_entries=int(r.findall(f.readline())[2]) 65 | 66 | if(n_entries!=gsize[0]*gsize[1]*gsize[2]) : sys.exit("Error reading the file. The number of expected data points does not correspond to the number of labeled data points in the header.") 67 | 68 | #create a 3D numpy array filled up with 0 69 | 70 | 71 | #initiate xyz counter for reading the grid data 72 | z=0 73 | y=0 74 | x=0 75 | 76 | print "Reading the grid. Depending on the number of data points you have this might take a while...." 77 | path=open(pathOutput,"w") 78 | print "delta=", delta 79 | print "origin=", origin 80 | print "gsize=", gsize 81 | counter=1 82 | print n_entries/3 83 | for count in range(n_entries/3) : 84 | c=f.readline().split() 85 | if(len(c)!=3) : 86 | print "error reading grid data" 87 | sys.exit("exiting the program") 88 | for i in range(3): 89 | if (iso_value<0 and float(c[i]) < iso_value) or (iso_value > 0 and float(c[i]) > iso_value) : 90 | path.write('ATOM %5d C PTH 1 %8.3f%8.3f%8.3f%6.2f%6.2f\n'%(counter,origin[0]+float(x)*delta[0],origin[1]+float(y)*delta[1],origin[2]+float(z)*delta[2],0.0,0.0)) 91 | counter+=1 92 | z+=1 93 | if z >= gsize[2]: 94 | z=0 95 | y+=1 96 | if y >=gsize[1]: 97 | y=0 98 | x+=1 99 | 100 | 101 | path.close() 102 | f.close() 103 | 104 | print "finished writing %s"%(pathOutput) 105 | -------------------------------------------------------------------------------- /examples/extractISOPdb/extractISOPdb_gdf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import numpy as np 5 | from gridData import Grid, OpenDX 6 | 7 | def parse_args(): 8 | parser = argparse.ArgumentParser(description='Save grid to PDB a given isovalue') 9 | parser.add_argument('input') 10 | parser.add_argument('output') 11 | parser.add_argument('iso', type=float) 12 | return parser.parse_args() 13 | 14 | def extractISOPdb(input, output, iso): 15 | g = Grid(input) 16 | 17 | # JD: this is clunky but i'm not sure what's a better way 18 | data = np.array(OpenDX.array(3,g.grid).array.flat) 19 | data = data.reshape((len(data)/3, 3)) 20 | 21 | x, y, z = 0, 0, 0 22 | counter = 1 23 | lines = [] 24 | for point in data: 25 | for pos in point: 26 | if (iso<0 and pos < iso) or (iso > 0 and pos > iso) : 27 | line = 'ATOM %5d C PTH 1 %8.3f%8.3f%8.3f%6.2f%6.2f\n'%(counter,g.origin[0]+float(x)*g.delta[0,0], 28 | g.origin[1]+float(y)*g.delta[1,1], 29 | g.origin[2]+float(z)*g.delta[2,2], 0.0,0.0) 30 | lines.append(line) 31 | counter += 1 32 | z+=1 33 | if z >= g.grid.shape[2]: 34 | z=0 35 | y+=1 36 | if y >=g.grid.shape[1]: 37 | y=0 38 | x+=1 39 | 40 | with open(output, "w") as f: 41 | f.writelines(lines) 42 | 43 | return g 44 | if __name__ == "__main__": 45 | args = parse_args() 46 | g = extractISOPdb(args.input, args.output, args.iso) -------------------------------------------------------------------------------- /examples/extractISOPdb/how: -------------------------------------------------------------------------------- 1 | 2 | # save a dx file to a pdb at a given iso value 3 | 4 | python extractISOPdb.py mdpout_freq_grid.dx mdpout_freq_iso_0_1.pdb 0.1 5 | 6 | python extractISOPdb_gdf.py mdpout_freq_grid.dx mdpout_freq_iso_0_1_gdf.pdb 0.1 7 | 8 | # mdpout_freq_iso_0_1_gdf.pdb and mdpout_freq_iso_0_1.pdb should be identical -------------------------------------------------------------------------------- /gridData/OpenDX.py: -------------------------------------------------------------------------------- 1 | # gridData --- python modules to read and write gridded data 2 | # Copyright (c) 2009-2014 Oliver Beckstein 3 | # Released under the GNU Lesser General Public License, version 3 or later. 4 | 5 | r""" 6 | :mod:`~gridData.OpenDX` --- routines to read and write simple OpenDX files 7 | ========================================================================== 8 | 9 | The OpenDX format for multi-dimensional grid data. OpenDX is a free 10 | visualization software, see http://www.opendx.org. 11 | 12 | .. Note:: This module only implements a primitive subset, sufficient 13 | to represent n-dimensional regular grids. 14 | 15 | The OpenDX scalar file format is specified in Appendix `B.2 Data 16 | Explorer Native Files`_ [#OpenDXformat]_. 17 | 18 | If you want to build a dx object from your data you can either use the 19 | convenient :class:`~gridData.core.Grid` class from the top level 20 | module (:class:`gridData.Grid`) or see the lower-level methods 21 | described below. 22 | 23 | 24 | .. _opendx-read-write: 25 | 26 | Reading and writing OpenDX files 27 | -------------------------------- 28 | 29 | If you have OpenDX files from other software and you just want to 30 | **read** it into a Python array then you do not really need to use the 31 | interface in :mod:`gridData.OpenDX`: just use 32 | :class:`~gridData.core.Grid` and load the file:: 33 | 34 | from gridData import Grid 35 | g = Grid("data.dx") 36 | 37 | This should work for files produced by common visualization programs 38 | (VMD_, PyMOL_, Chimera_). The documentation for :mod:`gridData` tells 39 | you more about what to do with the :class:`~gridData.core.Grid` 40 | object. 41 | 42 | If you want to **write** an OpenDX file then you just use the 43 | :meth:`gridData.core.Grid.export` method with `file_format="dx"` (or 44 | just use a filename with extension ".dx"):: 45 | 46 | g.export("data.dx") 47 | 48 | However, some visualization programs do not implement full OpenDX 49 | specifications and only read very specific, "OpenDX-like" 50 | files. :mod:`gridData.OpenDX` tries to be compatible with these 51 | formats. However, sometimes additional help is needed to write an 52 | OpenDX file that can be read by a specific software, as described 53 | below: 54 | 55 | Known issues for writing OpenDX files 56 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 57 | 58 | * APBS require the delta to be written to the seventh significant figure. 59 | The delta is now written to reflect this increase in precision. 60 | 61 | .. versionchanged:: 0.6.0 62 | 63 | * PyMOL_ requires OpenDX files with the type specification "double" in 64 | the `class array` section (see issue `#35`_). By default (since 65 | release 0.4.0), the type is set to the one that most closely 66 | approximates the dtype of the numpy array :attr:`Grid.grid`, which 67 | holds all data. This is often :class:`numpy.float64`, which will 68 | create an OpenDX type "double", which PyMOL will read. 69 | 70 | However, if you want to *force* a specific OpenDX type (such as 71 | "float" or "double", see :attr:`gridData.OpenDX.array.dx_types` for 72 | available values) then you can use the ``type`` keyword argument:: 73 | 74 | g.export("for_pymol.dx", type="double") 75 | 76 | If you always want to be able to read OpenDX files with PyMOL, it is 77 | suggested to always export with ``type="double"``. 78 | 79 | .. versionadded:: 0.4.0 80 | 81 | 82 | 83 | .. _VMD: http://www.ks.uiuc.edu/Research/vmd/ 84 | .. _PyMOL: http://www.pymol.org/ 85 | .. _Chimera: https://www.cgl.ucsf.edu/chimera/ 86 | .. _`#35`: https://github.com/MDAnalysis/GridDataFormats/issues/35 87 | 88 | 89 | 90 | 91 | Building a dx object from a numpy array ``A`` 92 | --------------------------------------------- 93 | 94 | If you have a numpy array ``A`` that represents a density in cartesian 95 | space then you can construct a dx object (named a *field* in OpenDX 96 | parlance) if you provide some additional information that fixes the 97 | coordinate system in space and defines the units along the axes. 98 | 99 | The following data are required: 100 | 101 | grid 102 | numpy nD array (typically a nD histogram) 103 | grid.shape 104 | the shape of the array 105 | origin 106 | the cartesian coordinates of the center of the (0,0,..,0) grid cell 107 | delta 108 | :math:`n \times n` array with the length of a grid cell along 109 | each axis; for regular rectangular grids the off-diagonal 110 | elements are 0 and the diagonal ones correspond to the 111 | 'bin width' of the histogram, eg ``delta[0,0] = 1.0`` (Angstrom) 112 | 113 | The DX data type ("type" in the DX file) is determined from the 114 | :class:`numpy.dtype` of the :class:`numpy.ndarray` that is provided as 115 | the *grid* (or with the *type* keyword argument to 116 | :class:`gridData.OpenDX.array`). 117 | 118 | For example, to build a :class:`field`:: 119 | 120 | dx = OpenDX.field('density') 121 | dx.add('positions', OpenDX.gridpositions(1, grid.shape, origin, delta)) 122 | dx.add('connections', OpenDX.gridconnections(2, grid.shape)) 123 | dx.add('data', OpenDX.array(3, grid)) 124 | 125 | or all with the constructor:: 126 | 127 | dx = OpenDX.field('density', components=dict( 128 | positions=OpenDX.gridpositions(1,grid.shape, d.origin, d.delta), 129 | connections=OpenDX.gridconnections(2, grid.shape), 130 | data=OpenDX.array(3, grid))) 131 | 132 | 133 | Building a dx object from a dx file 134 | ----------------------------------- 135 | 136 | One can also read data from an existing dx file:: 137 | 138 | dx = OpenDX.field(0) 139 | dx.read('file.dx') 140 | 141 | Only simple arrays are read and initially stored as a 1-d 142 | :class:`numpy.ndarray` in the `dx.components['data'].array` with the 143 | :class:`numpy.dtype` determined by the DX type in the file. 144 | 145 | The dx :class:`field` object has a method 146 | :meth:`~OpenDX.field.histogramdd` that produces output identical to 147 | the :func:`numpy.histogramdd` function by taking the stored dimension 148 | and deltas into account. In this way, one can store nD histograms in a 149 | portable and universal manner:: 150 | 151 | histogram, edges = dx.histogramdd() 152 | 153 | .. rubric:; Footnotes 154 | 155 | .. [#OpenDXformat] The original link to the OpenDX file format specs 156 | http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF is dead so I am linking 157 | to an archived copy at the Internet Archive , `B.2 Data Explorer Native Files`_. 158 | 159 | .. _`B.2 Data Explorer Native Files`: 160 | https://web.archive.org/web/20080808140524/http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm 161 | .. http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF 162 | 163 | Classes and functions 164 | --------------------- 165 | 166 | """ 167 | import numpy 168 | import re 169 | import gzip 170 | 171 | import warnings 172 | 173 | # Python 2/3 compatibility (see issue #99) 174 | # and https://bugs.python.org/issue30012 175 | import sys 176 | if sys.version_info >= (3, ): 177 | def _gzip_open(filename, mode="rt"): 178 | return gzip.open(filename, mode) 179 | else: 180 | def _gzip_open(filename, mode="rt"): 181 | return gzip.open(filename) 182 | del sys 183 | 184 | class DXclass(object): 185 | """'class' object as defined by OpenDX""" 186 | def __init__(self,classid): 187 | """id is the object number""" 188 | self.id = classid # serial number of the object 189 | self.name = None # name of the DXclass 190 | self.component = None # component type 191 | self.D = None # dimensions 192 | 193 | def write(self, stream, optstring="", quote=False): 194 | """write the 'object' line; additional args are packed in string""" 195 | classid = str(self.id) 196 | if quote: classid = '"'+classid+'"' 197 | # Only use a *single* space between tokens; both chimera's and pymol's DX parser 198 | # does not properly implement the OpenDX specs and produces garbage with multiple 199 | # spaces. (Chimera 1.4.1, PyMOL 1.3) 200 | to_write = 'object '+classid+' class '+str(self.name)+' '+optstring+'\n' 201 | self._write_line(stream, to_write) 202 | 203 | @staticmethod 204 | def _write_line(stream, line="", quote=False): 205 | """write a line to the file""" 206 | if isinstance(stream, gzip.GzipFile): 207 | line = line.encode() 208 | stream.write(line) 209 | 210 | def read(self, stream): 211 | raise NotImplementedError('Reading is currently not supported.') 212 | 213 | def ndformat(self,s): 214 | """Returns a string with as many repetitions of s as self 215 | has dimensions (derived from shape)""" 216 | return s * len(self.shape) 217 | 218 | def __repr__(self): 219 | return '' 220 | 221 | 222 | class gridpositions(DXclass): 223 | """OpenDX gridpositions class. 224 | 225 | shape D-tuplet describing size in each dimension 226 | origin coordinates of the centre of the grid cell with index 0,0,...,0 227 | delta DxD array describing the deltas 228 | """ 229 | def __init__(self,classid,shape=None,origin=None,delta=None,**kwargs): 230 | if shape is None or origin is None or delta is None: 231 | raise ValueError('all keyword arguments are required') 232 | self.id = classid 233 | self.name = 'gridpositions' 234 | self.component = 'positions' 235 | self.shape = numpy.asarray(shape) # D dimensional shape 236 | self.origin = numpy.asarray(origin) # D vector 237 | self.rank = len(self.shape) # D === rank 238 | 239 | self.delta = numpy.asarray(delta) # DxD array of grid spacings 240 | # gridDataFormats actually provides a simple 1D array with the deltas because only 241 | # regular grids are used but the following is a reminder that OpenDX should be able 242 | # to handle more complicated volume elements 243 | if len(self.delta.shape) == 1: 244 | self.delta = numpy.diag(delta) 245 | if self.delta.shape != (self.rank, self.rank): 246 | # check OpenDX specs for irreg spacing if we want to implement 247 | # anything more complicated 248 | raise NotImplementedError('Only regularly spaced grids allowed, ' 249 | 'not delta={}'.format(self.delta)) 250 | def write(self, stream): 251 | super(gridpositions, self).write( 252 | stream, ('counts '+self.ndformat(' %d')) % tuple(self.shape)) 253 | self._write_line(stream, 'origin %f %f %f\n' % tuple(self.origin)) 254 | for delta in self.delta: 255 | self._write_line( 256 | stream, ('delta ' + 257 | self.ndformat(' {:.7g}').format(*delta) + 258 | '\n')) 259 | 260 | def edges(self): 261 | """Edges of the grid cells, origin at centre of 0,0,..,0 grid cell. 262 | 263 | Only works for regular, orthonormal grids. 264 | """ 265 | return [self.delta[d,d] * numpy.arange(self.shape[d]+1) + self.origin[d]\ 266 | - 0.5*self.delta[d,d] for d in range(self.rank)] 267 | 268 | 269 | class gridconnections(DXclass): 270 | """OpenDX gridconnections class""" 271 | def __init__(self,classid,shape=None,**kwargs): 272 | if shape is None: 273 | raise ValueError('all keyword arguments are required') 274 | self.id = classid 275 | self.name = 'gridconnections' 276 | self.component = 'connections' 277 | self.shape = numpy.asarray(shape) # D dimensional shape 278 | 279 | def write(self, stream): 280 | super(gridconnections, self).write( 281 | stream, ('counts '+self.ndformat(' %d')) % tuple(self.shape)) 282 | 283 | 284 | class array(DXclass): 285 | """OpenDX array class. 286 | 287 | See `Array Objects`_ for details. 288 | 289 | .. _Array Objects: 290 | https://web.archive.org/web/20080808140524/http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#Header_440 291 | """ 292 | #: conversion from :attr:`numpy.dtype.name` to closest OpenDX array type 293 | #: (round-tripping is not guaranteed to produce identical types); not all 294 | #: types are supported (e.g., strings are missing) 295 | np_types = { 296 | "uint8": "byte", # DX "unsigned byte" equivalent 297 | "int8": "signed byte", 298 | "uint16": "unsigned short", 299 | "int16": "short", # DX "signed short" equivalent 300 | "uint32": "unsigned int", 301 | "int32": "int", # DX "signed int" equivalent 302 | "uint64": "unsigned int", # not explicit in DX, for compatibility 303 | "int64": "int", # not explicit in DX, for compatibility 304 | # "hyper", # ? 305 | "float32": "float", # default 306 | "float64": "double", 307 | "float16": "float", # float16 not available in DX, use float 308 | # numpy "float128 not available, raise error 309 | # "string" not automatically supported 310 | } 311 | #: conversion from OpenDX type to closest :class:`numpy.dtype` 312 | #: (round-tripping is not guaranteed to produce identical types); not all 313 | #: types are supported (e.g., strings and conversion to int64 are missing) 314 | dx_types = { 315 | "byte": "uint8", 316 | "unsigned byte": "uint8", 317 | "signed byte": "int8", 318 | "unsigned short": "uint16", 319 | "short": "int16", 320 | "signed short": "int16", 321 | "unsigned int": "uint32", 322 | "int": "int32", 323 | "signed int": "int32", 324 | # "hyper", # ? 325 | "float": "float32", # default 326 | "double": "float64", 327 | # "string" not automatically supported 328 | } 329 | 330 | def __init__(self, classid, array=None, type=None, typequote='"', 331 | **kwargs): 332 | """ 333 | Parameters 334 | ---------- 335 | classid : int 336 | array : array_like 337 | type : str (optional) 338 | Set the DX type in the output file and cast `array` to 339 | the closest numpy dtype. `type` must be one of the 340 | allowed types in DX files as defined under `Array 341 | Objects`_. The default ``None`` tries to set the type 342 | from the :class:`numpy.dtype` of `array`. 343 | 344 | .. versionadded:: 0.4.0 345 | 346 | Raises 347 | ------ 348 | ValueError 349 | if `array` is not provided; or if `type` is not of the correct 350 | DX type 351 | """ 352 | if array is None: 353 | raise ValueError('array keyword argument is required') 354 | self.id = classid 355 | self.name = 'array' 356 | self.component = 'data' 357 | # detect type https://github.com/MDAnalysis/GridDataFormats/issues/35 358 | if type is None: 359 | self.array = numpy.asarray(array) 360 | try: 361 | self.type = self.np_types[self.array.dtype.name] 362 | except KeyError: 363 | warnings.warn(("array dtype.name = {0} can not be automatically " 364 | "converted to a DX array type. Use the 'type' keyword " 365 | "to manually specify the correct type.").format( 366 | self.array.dtype.name)) 367 | self.type = self.array.dtype.name # will raise ValueError on writing 368 | else: 369 | try: 370 | self.array = numpy.asarray(array, dtype=self.dx_types[type]) 371 | except KeyError: 372 | raise ValueError(("DX type {0} cannot be converted to an " 373 | "appropriate numpy dtype. Available " 374 | "types are: {1}".format(type, 375 | list(self.dx_types.values())))) 376 | self.type = type 377 | self.typequote = typequote 378 | 379 | def write(self, stream): 380 | """Write the *class array* section. 381 | 382 | Parameters 383 | ---------- 384 | stream : stream 385 | 386 | Raises 387 | ------ 388 | ValueError 389 | If the `dxtype` is not a valid type, :exc:`ValueError` is 390 | raised. 391 | 392 | """ 393 | if self.type not in self.dx_types: 394 | raise ValueError(("DX type {} is not supported in the DX format. \n" 395 | "Supported valus are: {}\n" 396 | "Use the type= keyword argument.").format( 397 | self.type, list(self.dx_types.keys()))) 398 | typelabel = (self.typequote+self.type+self.typequote) 399 | super(array, self).write(stream, 'type {0} rank 0 items {1} data follows'.format( 400 | typelabel, self.array.size)) 401 | 402 | # grid data, serialized as a C array (z fastest varying) 403 | # (flat iterator is equivalent to: for x: for y: for z: grid[x,y,z]) 404 | # VMD's DX reader requires exactly 3 values per line 405 | fmt_string = "{:d}" 406 | if (self.array.dtype.kind == 'f' or self.array.dtype.kind == 'c'): 407 | precision = numpy.finfo(self.array.dtype).precision 408 | fmt_string = "{:."+"{:d}".format(precision)+"f}" 409 | values_per_line = 3 410 | values = self.array.flat 411 | while 1: 412 | try: 413 | for i in range(values_per_line): 414 | self._write_line(stream, fmt_string.format(next(values)) + "\t") 415 | self._write_line(stream, '\n') 416 | except StopIteration: 417 | self._write_line(stream, '\n') 418 | break 419 | self._write_line(stream, 'attribute "dep" string "positions"\n') 420 | 421 | class field(DXclass): 422 | """OpenDX container class 423 | 424 | The *field* is the top-level object and represents the whole 425 | OpenDX file. It contains a number of other objects. 426 | 427 | Instantiate a DX object from this class and add subclasses with 428 | :meth:`add`. 429 | 430 | """ 431 | # perhaps this should not derive from DXclass as those are 432 | # objects in field but a field cannot contain itself 433 | def __init__(self,classid='0',components=None,comments=None): 434 | """OpenDX object, which is build from a list of components. 435 | 436 | Parameters 437 | ---------- 438 | 439 | id : str 440 | arbitrary string 441 | components : dict 442 | dictionary of DXclass instances (no sanity check on the 443 | individual ids!) which correspond to 444 | 445 | * positions 446 | * connections 447 | * data 448 | 449 | comments : list 450 | list of strings; each string becomes a comment line 451 | prefixed with '#'. Avoid newlines. 452 | 453 | 454 | A field must have at least the components 'positions', 455 | 'connections', and 'data'. Those components are associated 456 | with objects belonging to the field. When writing a dx file 457 | from the field, only the required objects are dumped to the file. 458 | 459 | (For a more general class that can use field: 460 | Because there could be more objects than components, we keep a 461 | separate object list. When dumping the dx file, first all 462 | objects are written and then the field object describes its 463 | components. Objects are referenced by their unique id.) 464 | 465 | .. Note:: uniqueness of the *id* is not checked. 466 | 467 | 468 | Example 469 | ------- 470 | Create a new dx object:: 471 | 472 | dx = OpenDX.field('density',[gridpoints,gridconnections,array]) 473 | 474 | """ 475 | if components is None: 476 | components = dict(positions=None,connections=None,data=None) 477 | if comments is None: 478 | comments = ['OpenDX written by gridData.OpenDX', 479 | 'from https://github.com/MDAnalysis/GridDataFormats'] 480 | elif type(comments) is not list: 481 | comments = [str(comments)] 482 | self.id = classid # can be an arbitrary string 483 | self.name = 'field' 484 | self.component = None # cannot be a component of a field 485 | self.components = components 486 | self.comments= comments 487 | 488 | def _openfile_writing(self, filename): 489 | """Returns a regular or gz file stream for writing""" 490 | if filename.endswith('.gz'): 491 | return gzip.open(filename, 'wb') 492 | else: 493 | return open(filename, 'w') 494 | 495 | def write(self, filename): 496 | """Write the complete dx object to the file. 497 | 498 | This is the simple OpenDX format which includes the data into 499 | the header via the 'object array ... data follows' statement. 500 | 501 | Only simple regular arrays are supported. 502 | 503 | The format should be compatible with VMD's dx reader plugin. 504 | """ 505 | # comments (VMD chokes on lines of len > 80, so truncate) 506 | maxcol = 80 507 | with self._openfile_writing(str(filename)) as outfile: 508 | for line in self.comments: 509 | comment = '# '+str(line) 510 | self._write_line(outfile, comment[:maxcol]+'\n') 511 | # each individual object 512 | for component, object in self.sorted_components(): 513 | object.write(outfile) 514 | # the field object itself 515 | super(field, self).write(outfile, quote=True) 516 | for component, object in self.sorted_components(): 517 | self._write_line(outfile, 'component "%s" value %s\n' % ( 518 | component, str(object.id))) 519 | 520 | def read(self, stream): 521 | """Read DX field from file. 522 | 523 | dx = OpenDX.field.read(dxfile) 524 | 525 | The classid is discarded and replaced with the one from the file. 526 | """ 527 | DXfield = self 528 | p = DXParser(stream) 529 | p.parse(DXfield) 530 | 531 | def add(self,component,DXobj): 532 | """add a component to the field""" 533 | self[component] = DXobj 534 | 535 | def add_comment(self,comment): 536 | """add comments""" 537 | self.comments.append(comment) 538 | 539 | def sorted_components(self): 540 | """iterator that returns (component,object) in id order""" 541 | for component, object in \ 542 | sorted(self.components.items(), 543 | key=lambda comp_obj: comp_obj[1].id): 544 | yield component, object 545 | 546 | def histogramdd(self): 547 | """Return array data as (edges,grid), i.e. a numpy nD histogram.""" 548 | shape = self.components['positions'].shape 549 | edges = self.components['positions'].edges() 550 | hist = self.components['data'].array.reshape(shape) 551 | return (hist,edges) 552 | 553 | def __getitem__(self,key): 554 | return self.components[key] 555 | 556 | def __setitem__(self,key,value): 557 | self.components[key] = value 558 | 559 | def __repr__(self): 560 | return '' 563 | 564 | 565 | #------------------------------------------------------------ 566 | # DX file parsing 567 | #------------------------------------------------------------ 568 | 569 | class DXParseError(Exception): 570 | """general exception for parsing errors in DX files""" 571 | pass 572 | class DXParserNoTokens(DXParseError): 573 | """raised when the token buffer is exhausted""" 574 | pass 575 | 576 | class Token: 577 | # token categories (values of dx_regex must match up with these categories) 578 | category = {'COMMENT': ['COMMENT'], 579 | 'WORD': ['WORD'], 580 | 'STRING': ['QUOTEDSTRING','BARESTRING','STRING'], 581 | 'WHITESPACE': ['WHITESPACE'], 582 | 'INTEGER': ['INTEGER'], 583 | 'REAL': ['REAL'], 584 | 'NUMBER': ['INTEGER','REAL']} 585 | # cast functions 586 | cast = {'COMMENT': lambda s:re.sub(r'#\s*','',s), 587 | 'WORD': str, 588 | 'STRING': str, 'QUOTEDSTRING': str, 'BARESTRING': str, 589 | 'WHITESPACE': None, 590 | 'NUMBER': float, 'INTEGER': int, 'REAL': float} 591 | 592 | def __init__(self,code,text): 593 | self.code = code # store raw code 594 | self.text = text 595 | def equals(self,v): 596 | return self.text == v 597 | def iscode(self,code): 598 | return self.code in self.category[code] # use many -> 1 mappings 599 | def value(self,ascode=None): 600 | """Return text cast to the correct type or the selected type""" 601 | if ascode is None: 602 | ascode = self.code 603 | return self.cast[ascode](self.text) 604 | def __repr__(self): 605 | return '' 606 | 607 | class DXInitObject(object): 608 | """Storage class that holds data to initialize one of the 'real' 609 | classes such as OpenDX.array, OpenDX.gridconnections, ... 610 | 611 | All variables are stored in args which will be turned into the 612 | arguments for the DX class. 613 | """ 614 | DXclasses = {'gridpositions':gridpositions, 615 | 'gridconnections':gridconnections, 616 | 'array':array, 'field':field, 617 | } 618 | 619 | def __init__(self,classtype,classid): 620 | self.type = classtype 621 | self.id = classid 622 | self.args = dict() 623 | def initialize(self): 624 | """Initialize the corresponding DXclass from the data. 625 | 626 | class = DXInitObject.initialize() 627 | """ 628 | return self.DXclasses[self.type](self.id,**self.args) 629 | def __getitem__(self,k): 630 | return self.args[k] 631 | def __setitem__(self,k,v): 632 | self.args[k] = v 633 | def __repr__(self): 634 | return '' 635 | 636 | class DXParser(object): 637 | """Brain-dead baroque implementation to read a simple (VMD) dx file. 638 | 639 | Requires a OpenDX.field instance. 640 | 641 | 1) scan for 'object' lines: 642 | 'object' id 'class' class [data] 643 | [data ...] 644 | 2) parse data according to class 645 | 3) construct dx field from classes 646 | """ 647 | 648 | # the regexes must match with the categories defined in the Token class 649 | # REAL regular expression will catch both integers and floats. 650 | # Taken from 651 | # https://docs.python.org/3/library/re.html#simulating-scanf 652 | dx_regex = re.compile(r""" 653 | (?P\#.*$) # comment (until end of line) 654 | |(?P(object|class|counts|origin|delta|type|counts|rank|items|data)) 655 | |"(?P[^\"]*)" # string in double quotes (quotes removed) 656 | |(?P\s+) # white space 657 | |(?P[-+]? # true real number (decimal point or 658 | (\d+(\.\d*)?|\.\d+) # scientific notation) and integers 659 | ([eE][-+]?\d+)?) 660 | |(?P[a-zA-Z_][^\s\#\"]+) # unquoted strings, starting with non-numeric 661 | """, re.VERBOSE) 662 | 663 | 664 | def __init__(self, filename): 665 | """Setup a parser for a simple DX file (from VMD) 666 | 667 | >>> DXfield_object = OpenDX.field(id) 668 | >>> p = DXparser('bulk.dx') 669 | >>> p.parse(DXfield_object) 670 | 671 | The field object will be completely rewritten (including the 672 | id if one is found in the input file. The input files 673 | component layout is currently ignored. 674 | 675 | Note that quotes are removed from quoted strings. 676 | """ 677 | self.filename = str(filename) 678 | self.field = field('grid data',comments=['filename: {0}'.format(self.filename)]) 679 | # other variables are initialised every time parse() is called 680 | 681 | self.parsers = {'general':self.__general, 682 | 'comment':self.__comment, 'object':self.__object, 683 | 'gridpositions':self.__gridpositions, 684 | 'gridconnections':self.__gridconnections, 685 | 'array':self.__array, 'field':self.__field, 686 | } 687 | 688 | 689 | def parse(self, DXfield): 690 | """Parse the dx file and construct a DX field object with component classes. 691 | 692 | A :class:`field` instance *DXfield* must be provided to be 693 | filled by the parser:: 694 | 695 | DXfield_object = OpenDX.field(*args) 696 | parse(DXfield_object) 697 | 698 | A tokenizer turns the dx file into a stream of tokens. A 699 | hierarchy of parsers examines the stream. The level-0 parser 700 | ('general') distinguishes comments and objects (level-1). The 701 | object parser calls level-3 parsers depending on the object 702 | found. The basic idea is that of a 'state machine'. There is 703 | one parser active at any time. The main loop is the general 704 | parser. 705 | 706 | * Constructing the dx objects with classtype and classid is 707 | not implemented yet. 708 | * Unknown tokens raise an exception. 709 | """ 710 | 711 | self.DXfield = DXfield # OpenDX.field (used by comment parser) 712 | self.currentobject = None # containers for data 713 | self.objects = [] # | 714 | self.tokens = [] # token buffer 715 | 716 | if self.filename.endswith('.gz'): 717 | with _gzip_open(self.filename, 'rt') as self.dxfile: 718 | self.use_parser('general') 719 | else: 720 | with open(self.filename, 'r') as self.dxfile: 721 | self.use_parser('general') # parse the whole file and populate self.objects 722 | 723 | # assemble field from objects 724 | for o in self.objects: 725 | if o.type == 'field': 726 | # Almost ignore the field object; VMD, for instance, 727 | # does not write components. To make this work 728 | # seamlessly I have to think harder how to organize 729 | # and use the data, eg preping the field object 730 | # properly and the initializing. Probably should also 731 | # check uniqueness of ids etc. 732 | DXfield.id = o.id 733 | continue 734 | c = o.initialize() 735 | self.DXfield.add(c.component,c) 736 | 737 | # free space 738 | del self.currentobject, self.objects 739 | 740 | 741 | 742 | def __general(self): 743 | """Level-0 parser and main loop. 744 | 745 | Look for a token that matches a level-1 parser and hand over control.""" 746 | while 1: # main loop 747 | try: 748 | tok = self.__peek() # only peek, apply_parser() will consume 749 | except DXParserNoTokens: 750 | # save previous DXInitObject 751 | # (kludge in here as the last level-2 parser usually does not return 752 | # via the object parser) 753 | if self.currentobject and self.currentobject not in self.objects: 754 | self.objects.append(self.currentobject) 755 | return # stop parsing and finish 756 | # decision branches for all level-1 parsers: 757 | # (the only way to get out of the lower level parsers!) 758 | if tok.iscode('COMMENT'): 759 | self.set_parser('comment') # switch the state 760 | elif tok.iscode('WORD') and tok.equals('object'): 761 | self.set_parser('object') # switch the state 762 | elif self.__parser is self.__general: 763 | # Either a level-2 parser screwed up or some level-1 764 | # construct is not implemented. (Note: this elif can 765 | # be only reached at the beginning or after comments; 766 | # later we never formally switch back to __general 767 | # (would create inifinite loop) 768 | raise DXParseError('Unknown level-1 construct at '+str(tok)) 769 | 770 | self.apply_parser() # hand over to new parser 771 | # (possibly been set further down the hierarchy!) 772 | 773 | # Level-1 parser 774 | def __comment(self): 775 | """Level-1 parser for comments. 776 | 777 | pattern: #.* 778 | Append comment (with initial '# ' stripped) to all comments. 779 | """ 780 | tok = self.__consume() 781 | self.DXfield.add_comment(tok.value()) 782 | self.set_parser('general') # switch back to general parser 783 | 784 | def __object(self): 785 | """Level-1 parser for objects. 786 | 787 | pattern: 'object' id 'class' type ... 788 | 789 | id ::= integer|string|'"'white space string'"' 790 | type ::= string 791 | """ 792 | self.__consume() # 'object' 793 | classid = self.__consume().text 794 | word = self.__consume().text 795 | if word != "class": 796 | raise DXParseError("reserved word %s should have been 'class'." % word) 797 | # save previous DXInitObject 798 | if self.currentobject: 799 | self.objects.append(self.currentobject) 800 | # setup new DXInitObject 801 | classtype = self.__consume().text 802 | self.currentobject = DXInitObject(classtype=classtype,classid=classid) 803 | 804 | self.use_parser(classtype) 805 | 806 | # Level-2 parser (object parsers) 807 | def __gridpositions(self): 808 | """Level-2 parser for gridpositions. 809 | 810 | pattern: 811 | object 1 class gridpositions counts 97 93 99 812 | origin -46.5 -45.5 -48.5 813 | delta 1 0 0 814 | delta 0 1 0 815 | delta 0 0 1 816 | """ 817 | try: 818 | tok = self.__consume() 819 | except DXParserNoTokens: 820 | return 821 | 822 | if tok.equals('counts'): 823 | shape = [] 824 | try: 825 | while True: 826 | # raises exception if not an int 827 | self.__peek().value('INTEGER') 828 | tok = self.__consume() 829 | shape.append(tok.value('INTEGER')) 830 | except (DXParserNoTokens, ValueError): 831 | pass 832 | if len(shape) == 0: 833 | raise DXParseError('gridpositions: no shape parameters') 834 | self.currentobject['shape'] = shape 835 | elif tok.equals('origin'): 836 | origin = [] 837 | try: 838 | while (self.__peek().iscode('INTEGER') or 839 | self.__peek().iscode('REAL')): 840 | tok = self.__consume() 841 | origin.append(tok.value()) 842 | except DXParserNoTokens: 843 | pass 844 | if len(origin) == 0: 845 | raise DXParseError('gridpositions: no origin parameters') 846 | self.currentobject['origin'] = origin 847 | elif tok.equals('delta'): 848 | d = [] 849 | try: 850 | while (self.__peek().iscode('INTEGER') or 851 | self.__peek().iscode('REAL')): 852 | tok = self.__consume() 853 | d.append(tok.value()) 854 | except DXParserNoTokens: 855 | pass 856 | if len(d) == 0: 857 | raise DXParseError('gridpositions: missing delta parameters') 858 | try: 859 | self.currentobject['delta'].append(d) 860 | except KeyError: 861 | self.currentobject['delta'] = [d] 862 | else: 863 | raise DXParseError('gridpositions: '+str(tok)+' not recognized.') 864 | 865 | 866 | def __gridconnections(self): 867 | """Level-2 parser for gridconnections. 868 | 869 | pattern: 870 | object 2 class gridconnections counts 97 93 99 871 | """ 872 | try: 873 | tok = self.__consume() 874 | except DXParserNoTokens: 875 | return 876 | 877 | if tok.equals('counts'): 878 | shape = [] 879 | try: 880 | while True: 881 | # raises exception if not an int 882 | self.__peek().value('INTEGER') 883 | tok = self.__consume() 884 | shape.append(tok.value('INTEGER')) 885 | except (DXParserNoTokens, ValueError): 886 | pass 887 | if len(shape) == 0: 888 | raise DXParseError('gridconnections: no shape parameters') 889 | self.currentobject['shape'] = shape 890 | else: 891 | raise DXParseError('gridconnections: '+str(tok)+' not recognized.') 892 | 893 | 894 | def __array(self): 895 | """Level-2 parser for arrays. 896 | 897 | pattern: 898 | object 3 class array type double rank 0 items 12 data follows 899 | 0 2 0 900 | 0 0 3.6 901 | 0 -2.0 1e-12 902 | +4.534e+01 .34534 0.43654 903 | attribute "dep" string "positions" 904 | """ 905 | try: 906 | tok = self.__consume() 907 | except DXParserNoTokens: 908 | return 909 | 910 | if tok.equals('type'): 911 | tok = self.__consume() 912 | if not tok.iscode('STRING'): 913 | raise DXParseError('array: type was "%s", not a string.'%\ 914 | tok.text) 915 | self.currentobject['type'] = tok.value() 916 | elif tok.equals('rank'): 917 | tok = self.__consume() 918 | try: 919 | self.currentobject['rank'] = tok.value('INTEGER') 920 | except ValueError: 921 | raise DXParseError('array: rank was "%s", not an integer.'%\ 922 | tok.text) 923 | elif tok.equals('items'): 924 | tok = self.__consume() 925 | try: 926 | self.currentobject['size'] = tok.value('INTEGER') 927 | except ValueError: 928 | raise DXParseError('array: items was "%s", not an integer.'%\ 929 | tok.text) 930 | elif tok.equals('data'): 931 | tok = self.__consume() 932 | if not tok.iscode('STRING'): 933 | raise DXParseError('array: data was "%s", not a string.'%\ 934 | tok.text) 935 | if tok.text != 'follows': 936 | raise NotImplementedError(\ 937 | 'array: Only the "data follows header" format is supported.') 938 | if not self.currentobject['size']: 939 | raise DXParseError("array: missing number of items") 940 | # This is the slow part. Once we get here, we are just 941 | # reading in a long list of numbers. Conversion to floats 942 | # will be done later when the numpy array is created. 943 | 944 | # Don't assume anything about whitespace or the number of elements per row 945 | self.currentobject['array'] = [] 946 | while len(self.currentobject['array']) 3 | # Released under the GNU Lesser General Public License, version 3 or later. 4 | # See the files COPYING and COPYING.LESSER for details. 5 | 6 | r""" 7 | Handling grids of data --- :mod:`gridData` 8 | ========================================== 9 | 10 | Overview 11 | -------- 12 | 13 | This module contains classes that allow importing and exporting of 14 | simple gridded data, A grid is an N-dimensional array that represents 15 | a discrete mesh over a region of space. The array axes are taken to be 16 | parallel to the cartesian axes of this space. Together with this array 17 | we also store the edges, which are are (essentially) the cartesian 18 | coordinates of the intersections of the grid (mesh) lines on the 19 | axes. In this way the grid is anchored in space. 20 | 21 | The :class:`~gridData.core.Grid` object can be resampled at arbitrary 22 | resolution (by interpolating the data). Standard algebraic operations 23 | are defined for grids on a point-wise basis (same as for 24 | :class:`numpy.ndarray`). 25 | 26 | 27 | Description 28 | ----------- 29 | 30 | The package reads grid data from files, makes them available as a 31 | :class:`~gridData.core.Grid` object, and allows one to write out the data again. 32 | 33 | A :class:`~gridData.core.Grid` consists of a rectangular, regular, N-dimensional 34 | array of data. It contains 35 | 36 | (1) The position of the array cell edges. 37 | (2) The array data itself. 38 | 39 | This is equivalent to knowing 40 | 41 | (1) The origin of the coordinate system (i.e. which data cell 42 | corresponds to (0,0,...,0) 43 | (2) The spacing of the grid in each dimension. 44 | (3) The data on a grid. 45 | 46 | :class:`~gridData.core.Grid` objects have some convenient properties: 47 | 48 | * The data is represented as a :class:`numpy.ndarray` in 49 | :attr:`Grid.grid<~gridData.core.Grid.grid>` and thus can be directly 50 | manipulated with all the tools available in NumPy. 51 | 52 | * :class:`Grid` instances can be manipulated arithmetically, e.g. one 53 | can simply add or subtract two of them and get another one, or 54 | multiply by a constant. Note that all operations are defined 55 | point-wise (see the :mod:`numpy` documentation for details) and that 56 | only grids defined on the same cell edges can be combined. 57 | 58 | * A :class:`~gridData.core.Grid` object can also be created from 59 | within python code e.g. from the output of the 60 | :func:`numpy.histogramdd` function. 61 | 62 | * The representation of the data is abstracted from the format that 63 | the files are saved in. This makes it straightforward to add 64 | additional readers for new formats. 65 | 66 | * The data can be written out again in formats that are understood by 67 | other programs such as VMD_, ChimeraX_ or PyMOL_. 68 | 69 | 70 | Reading grid data files 71 | ----------------------- 72 | 73 | Some Formats_ can be read directly from a file on disk:: 74 | 75 | g = Grid(filename) 76 | 77 | *filename* could be, for instance, "density.dx". 78 | 79 | 80 | Constructing a Grid 81 | ------------------- 82 | 83 | Data from an n-dimensional array can be packaged as a :class:`~gridData.core.Grid` 84 | for convenient handling (especially export to other formats). The 85 | :class:`~gridData.core.Grid` class acts as a universal constructor:: 86 | 87 | g = Grid(ndarray, edges=edges) # from histogramdd 88 | g = Grid(ndarray, origin=origin, delta=delta) # from arbitrary data 89 | 90 | g.export(filename, format) # export to the desire format 91 | 92 | See the doc string for :class:`~gridData.core.Grid` for details. 93 | 94 | 95 | Formats 96 | ------- 97 | 98 | For the available file formats see :ref:`supported-file-formats`. 99 | 100 | 101 | .. _VMD: https://www.ks.uiuc.edu/Research/vmd/ 102 | 103 | .. _PyMOL: https://pymol.org/ 104 | 105 | .. _ChimeraX: https://www.cgl.ucsf.edu/chimerax/ 106 | 107 | """ 108 | 109 | from .core import Grid 110 | from . import OpenDX 111 | from . import gOpenMol 112 | from . import mrc 113 | 114 | __all__ = ['Grid', 'OpenDX', 'gOpenMol', 'mrc'] 115 | 116 | from importlib.metadata import version 117 | __version__ = version("GridDataFormats") 118 | 119 | -------------------------------------------------------------------------------- /gridData/core.py: -------------------------------------------------------------------------------- 1 | # gridDataFormats --- python modules to read and write gridded data 2 | # Copyright (c) 2009-2014 Oliver Beckstein 3 | # Released under the GNU Lesser General Public License, version 3 or later. 4 | r""" 5 | Core functionality for storing n-D grids --- :mod:`gridData.core` 6 | ================================================================= 7 | 8 | The :mod:`core` module contains classes and functions that are 9 | independent of the grid data format. In particular this module 10 | contains the :class:`Grid` class that acts as a universal constructor 11 | for specific formats:: 12 | 13 | g = Grid(**kwargs) # construct 14 | g.export(filename, format) # export to the desired format 15 | 16 | Some formats can also be read:: 17 | 18 | g = Grid() # make an empty Grid 19 | g.load(filename) # populate with data from filename 20 | 21 | 22 | Classes and functions 23 | --------------------- 24 | 25 | """ 26 | import os 27 | import errno 28 | import pickle 29 | 30 | import numpy 31 | 32 | # For interpolated grids: need scipy.ndimage but we import it only when needed: 33 | # import scipy 34 | 35 | from . import OpenDX 36 | from . import gOpenMol 37 | from . import mrc 38 | 39 | 40 | def _grid(x): 41 | """Access the underlying ndarray of a Grid object or return the object itself""" 42 | try: 43 | return x.grid 44 | except AttributeError: 45 | return x 46 | 47 | 48 | class Grid(object): 49 | """A multidimensional grid object with origin and grid spacings. 50 | 51 | :class:`Grid` objects can be used in arithmetical calculations 52 | just like numpy arrays *if* they are compatible, i.e., they have 53 | the same shapes and lengths. In order to make arrays compatible, 54 | they an be resampled (:meth:`resample`) on a common grid. 55 | 56 | The attribute :attr:`grid` that holds the data is a standard numpy 57 | array and so the data can be directly manipulated. 58 | 59 | Data can be read from a number of molecular volume/density formats 60 | and written out in different formats with :meth:`export`. 61 | 62 | 63 | Parameters 64 | ---------- 65 | grid : numpy.ndarray or str (optional) 66 | Build the grid either from a histogram or density (a numpy nD 67 | array) or read data from a filename. 68 | 69 | edges : list (optional) 70 | List of arrays, the lower and upper bin edges along the axes 71 | (same as the output by :func:`numpy.histogramdd`) 72 | 73 | origin : :class:`numpy.ndarray` (optional) 74 | Cartesian coordinates of the center of grid position at index 75 | ``[0, 0, ..., 0]``. 76 | 77 | delta : :class:`numpy.ndarray` (optional) 78 | Either ``n x n`` array containing the cell lengths in each dimension, 79 | or ``n x 1`` array for rectangular arrays. 80 | 81 | metadata : dict (optional) 82 | A user defined dictionary of arbitrary key/value pairs 83 | associated with the density; the class does not touch 84 | :attr:`metadata` but stores it with :meth:`save` 85 | 86 | interpolation_spline_order : int (optional) 87 | Order of interpolation function for resampling with 88 | :func:`resample`; cubic splines = 3 and the default is 3 89 | 90 | file_format : str (optional) 91 | Name of the file format; only necessary when `grid` is a 92 | filename (see :meth:`load`) and autodetection of the file 93 | format fails. The default is ``None`` and normally the file 94 | format is guessed from the file extension. 95 | 96 | Raises 97 | ------ 98 | TypeError 99 | If the dimensions of the various input data do not agree with 100 | each other. 101 | ValueError 102 | If some of the required data are not provided in the keyword 103 | arguments, e.g., if only the `grid` is supplied as an array but 104 | not the `edges` or only `grid` and one of `origin` and `delta`. 105 | NotImplementedError 106 | If triclinic (non-orthorhombic) boxes are supplied in `delta` 107 | 108 | .. Note:: `delta` can only be a 1D array of length :attr:`grid.ndim` 109 | 110 | 111 | Attributes 112 | ---------- 113 | grid : :class:`numpy.ndarray` 114 | This array can be any number of dimensions supported by NumPy 115 | in order to represent high-dimensional data. When used with 116 | data that represents real space densities then the **axis 117 | convention in GridDataFormats** is that axis 0 corresponds to 118 | the Cartesian :math:`x` component, axis 1 corresponds to the 119 | :math:`y` component, and axis 2 to the :math:`z` component. 120 | 121 | delta : :class:`numpy.ndarray` 122 | Length of a grid cell (spacing or voxelsize) in :math:`x`, 123 | :math:`y`, :math:`z` dimensions. This is a *1D array* with 124 | length :attr:`Grid.grid.ndim`. 125 | 126 | origin : :class:`numpy.ndarray` 127 | Array with the Cartesian coordinates of the coordinate system 128 | origin, the *center* of cell ``Grid.grid[0, 0, .., 0]``. 129 | 130 | edges : list 131 | List of arrays, one for each axis in :attr:`grid`. Each 1D edge 132 | array describes the *edges* of the grid cells along the 133 | corresponding axis. The length of an edge array for axis ``i`` 134 | is ``grid.shape[i] + 1`` because it contains the lower boundary 135 | for the first cell, the boundaries between all grid cells, and 136 | the upper boundary for the last cell. The edges are assumed to 137 | be regular with spacing indicated in :attr:`delta`, namely 138 | ``Grid.delta[i]`` for axis ``i``. 139 | 140 | midpoints : list 141 | List of arrays, one for each axis in :attr:`grid`. Each 1D 142 | midpoints array contains the *midpoints* of the grid cells along 143 | the corresponding axis. 144 | 145 | metadata : dict 146 | A user-defined dictionary that can be used to annotate the 147 | data. The content is not touched by :class:`Grid`. It is saved 148 | together with the other data with :meth:`save`. 149 | 150 | 151 | Example 152 | ------- 153 | Create a Grid object from data. 154 | 155 | From :func:`numpy.histogramdd`:: 156 | 157 | grid, edges = numpy.histogramdd(...) 158 | g = Grid(grid, edges=edges) 159 | 160 | From an arbitrary grid:: 161 | 162 | g = Grid(grid, origin=origin, delta=delta) 163 | 164 | From a saved file:: 165 | 166 | g = Grid(filename) 167 | 168 | or :: 169 | 170 | g = Grid() 171 | g.load(filename) 172 | 173 | 174 | Notes 175 | ----- 176 | In principle, the dimension (number of axes) is arbitrary but in 177 | practice many formats only support three and almost all 178 | functionality is only tested for this special case. 179 | 180 | The :meth:`export` method with ``format='dx'`` always exports a 3D 181 | object. Other methods might work for an array of any dimension (in 182 | particular the Python pickle output). 183 | 184 | 185 | .. versionchanged:: 0.5.0 186 | New *file_format* keyword argument. 187 | 188 | .. versionchanged:: 0.7.0 189 | CCP4 files are now read with :class:`gridData.mrc.MRC` and not anymore 190 | with the deprecated/buggy `ccp4.CCP4` 191 | 192 | """ 193 | 194 | #: Default format for exporting with :meth:`export`. 195 | default_format = 'DX' 196 | 197 | def __init__(self, grid=None, edges=None, origin=None, delta=None, 198 | metadata=None, interpolation_spline_order=3, 199 | file_format=None): 200 | # file formats are guessed from extension == lower case key 201 | self._exporters = { 202 | 'DX': self._export_dx, 203 | 'PKL': self._export_python, 204 | 'PICKLE': self._export_python, # compatibility 205 | 'PYTHON': self._export_python, # compatibility 206 | } 207 | self._loaders = { 208 | 'CCP4': self._load_mrc, 209 | 'MRC': self._load_mrc, 210 | 'DX': self._load_dx, 211 | 'PLT': self._load_plt, 212 | 'PKL': self._load_python, 213 | 'PICKLE': self._load_python, # compatibility 214 | 'PYTHON': self._load_python, # compatibility 215 | } 216 | 217 | self.metadata = metadata if metadata is not None else {} 218 | self.__interpolated = None # cache for interpolated grid 219 | self.__interpolation_spline_order = interpolation_spline_order 220 | self.interpolation_cval = None # default to using min(grid) 221 | 222 | if grid is not None: 223 | if isinstance(grid, str): 224 | # can probably safely try to load() it... 225 | filename = grid 226 | else: 227 | try: 228 | # Can we read this as a file? 229 | # Use str(x) to work with py.path.LocalPath and pathlib.Path instances 230 | # even for Python < 3.6 231 | with open(str(grid), 'rb'): 232 | pass 233 | except (OSError, IOError): 234 | # no, this is probably an array-like thingy 235 | filename = None 236 | else: 237 | # yes, let's use it as a file 238 | filename = str(grid) 239 | 240 | if filename is not None: 241 | self.load(filename, file_format=file_format) 242 | else: 243 | self._load(grid, edges, metadata, origin, delta) 244 | 245 | @property 246 | def interpolation_spline_order(self): 247 | """Order of the B-spline interpolation of the data. 248 | 249 | 3 = cubic; 4 & 5 are also supported 250 | 251 | Only choose values that are acceptable to 252 | :func:`scipy.ndimage.spline_filter`! 253 | 254 | See Also 255 | -------- 256 | interpolated 257 | """ 258 | 259 | return self.__interpolation_spline_order 260 | 261 | @interpolation_spline_order.setter 262 | def interpolation_spline_order(self, x): 263 | """Setting the ``interpolation_spline_order`` updates :func:`interpolated` 264 | 265 | Because we cache the interpolation function, we need to rebuild the 266 | cache whenever the interpolation order changes: this is 267 | handled by :meth:`_update` 268 | 269 | """ 270 | self.__interpolation_spline_order = x 271 | self._update() 272 | 273 | def resample(self, edges): 274 | """Resample data to a new grid with edges *edges*. 275 | 276 | This method creates a new grid with the data from the current 277 | grid resampled to a regular grid specified by `edges`. The 278 | order of the interpolation is set by 279 | :attr:`Grid.interpolation_spline_order`: change the value 280 | *before* calling :meth:`resample`. 281 | 282 | Parameters 283 | ---------- 284 | edges : tuple of arrays or Grid 285 | edges of the new grid or a :class:`Grid` instance that 286 | provides :attr:`Grid.edges` 287 | 288 | Returns 289 | ------- 290 | Grid 291 | a new :class:`Grid` with the data interpolated over the 292 | new grid cells 293 | 294 | 295 | Examples 296 | -------- 297 | 298 | Providing `edges` (a tuple of three arrays, indicating the 299 | boundaries of each grid cell):: 300 | 301 | g = grid.resample(edges) 302 | 303 | As a convenience, one can also supply another :class:`Grid` as 304 | the argument for this method :: 305 | 306 | g = grid.resample(othergrid) 307 | 308 | and the edges are taken from :attr:`Grid.edges`. 309 | 310 | """ 311 | try: 312 | edges = edges.edges # can also supply another Grid 313 | except AttributeError: 314 | pass 315 | midpoints = self._midpoints(edges) 316 | coordinates = ndmeshgrid(*midpoints) 317 | # feed a meshgrid to generate all points 318 | newgrid = self.interpolated(*coordinates) 319 | return self.__class__(newgrid, edges) 320 | 321 | def resample_factor(self, factor): 322 | """Resample to a new regular grid. 323 | 324 | 325 | Parameters 326 | ---------- 327 | factor : float 328 | The number of grid cells are scaled with `factor` in each 329 | dimension, i.e., ``factor * N_i`` cells along each 330 | dimension i. Must be positive, and cannot result in fewer 331 | than 2 cells along a dimension. 332 | 333 | 334 | Returns 335 | ------- 336 | interpolated grid : Grid 337 | The resampled data are represented on a :class:`Grid` with the new 338 | grid cell sizes. 339 | 340 | See Also 341 | -------- 342 | resample 343 | 344 | 345 | .. versionchanged:: 0.6.0 346 | Previous implementations would not alter the range of the grid edges 347 | being resampled on. As a result, values at the grid edges would creep 348 | steadily inward. The new implementation recalculates the extent of 349 | grid edges for every resampling. 350 | 351 | """ 352 | if float(factor) <= 0: 353 | raise ValueError("Factor must be positive") 354 | # Determine current spacing 355 | spacing = (numpy.array(self._max_edges()) - numpy.array(self._min_edges())) / ( 356 | -1 + numpy.array(self._len_edges())) 357 | # First guess at the new spacing is inversely related to the 358 | # magnification factor. 359 | newspacing = spacing / float(factor) 360 | smidpoints = numpy.array(self._midpoints()) 361 | # We require that the new spacing result in an even subdivision of the 362 | # existing midpoints 363 | newspacing = (smidpoints[:, -1] - smidpoints[:, 0]) / (numpy.maximum( 364 | 1, numpy.floor((smidpoints[:, -1] - smidpoints[:, 0]) / newspacing))) 365 | # How many edge points should there be? It is the number of intervals 366 | # between midpoints + 2 367 | edgelength = 2 + \ 368 | numpy.round((smidpoints[:, -1] - smidpoints[:, 0]) / newspacing) 369 | edges = [numpy.linspace(start, stop, num=int(N), endpoint=True) for (start, stop, N) in zip( 370 | smidpoints[:, 0] - 0.5 * newspacing, smidpoints[:, -1] + 0.5 * newspacing, edgelength)] 371 | return self.resample(edges) 372 | 373 | def _update(self): 374 | """compute/update all derived data 375 | 376 | Can be called without harm and is idem-potent. 377 | 378 | Updates these attributes and methods: 379 | :attr:`origin` 380 | the center of the cell with index 0,0,0 381 | :attr:`midpoints` 382 | centre coordinate of each grid cell 383 | :meth:`interpolated` 384 | spline interpolation function that can generated a value for 385 | coordinate 386 | """ 387 | self.delta = numpy.array(list( 388 | map(lambda e: (e[-1] - e[0]) / (len(e) - 1), self.edges))) 389 | self.midpoints = self._midpoints(self.edges) 390 | self.origin = numpy.array(list(map(lambda m: m[0], self.midpoints))) 391 | if self.__interpolated is not None: 392 | # only update if we are using it 393 | self.__interpolated = self._interpolationFunctionFactory() 394 | 395 | @property 396 | def interpolated(self): 397 | """B-spline function over the data grid(x,y,z). 398 | 399 | The :func:`interpolated` function allows one to obtain data 400 | values for any values of the coordinates:: 401 | 402 | interpolated([x1,x2,...],[y1,y2,...],[z1,z2,...]) -> F[x1,y1,z1],F[x2,y2,z2],... 403 | 404 | The interpolation order is set in 405 | :attr:`Grid.interpolation_spline_order`. 406 | 407 | The interpolated function is computed once and is cached for better 408 | performance. Whenever :attr:`~Grid.interpolation_spline_order` is 409 | modified, :meth:`Grid.interpolated` is recomputed. 410 | 411 | The value for unknown data is set in :attr:`Grid.interpolation_cval` 412 | (TODO: also recompute when ``interpolation_cval`` value is changed.) 413 | 414 | Example 415 | ------- 416 | Example usage for resampling:: 417 | 418 | XX, YY, ZZ = numpy.mgrid[40:75:0.5, 96:150:0.5, 20:50:0.5] 419 | FF = interpolated(XX, YY, ZZ) 420 | 421 | Note 422 | ---- 423 | Values are interpolated with a spline function. It is possible 424 | that the spline will generate values that would not normally 425 | appear in the data. For example, a density is non-negative but 426 | a cubic spline interpolation can generate negative values, 427 | especially at the boundary between 0 and high values. 428 | 429 | Internally, the function uses :func:`scipy.ndimage.map_coordinates` 430 | with ``mode="constant"`` whereby interpolated values outside 431 | the interpolated grid are determined by filling all values beyond 432 | the edge with the same constant value, defined by the 433 | :attr:`interpolation_cval` parameter, which when not set defaults 434 | to the minimum value in the interpolated grid. 435 | 436 | 437 | .. versionchanged:: 0.6.0 438 | Interpolation outside the grid is now performed with 439 | ``mode="constant"`` rather than ``mode="nearest"``, eliminating 440 | extruded volumes when interpolating beyond the grid. 441 | 442 | """ 443 | if self.__interpolated is None: 444 | self.__interpolated = self._interpolationFunctionFactory() 445 | return self.__interpolated 446 | 447 | def _map_edges(self, func, edges=None): 448 | if edges is None: 449 | edges = self.edges 450 | return [func(e) for e in edges] 451 | 452 | def _midpoints(self, edges=None): 453 | return self._map_edges(lambda e: 0.5 * (e[:-1] + e[1:]), edges=edges) 454 | 455 | def _len_edges(self, edges=None): 456 | return self._map_edges(len, edges=edges) 457 | 458 | def _min_edges(self, edges=None): 459 | return self._map_edges(numpy.min, edges=edges) 460 | 461 | def _max_edges(self, edges=None): 462 | return self._map_edges(numpy.max, edges=edges) 463 | 464 | def _guess_format(self, filename, file_format=None, export=True): 465 | if export: 466 | available = self._exporters 467 | else: 468 | available = self._loaders 469 | if file_format is None: 470 | splitted = os.path.splitext(filename) 471 | if splitted[1][1:] in ('gz', ): 472 | file_format = os.path.splitext(splitted[0])[1][1:] 473 | else: 474 | file_format = splitted[1][1:] 475 | file_format = file_format.upper() 476 | if not file_format: 477 | file_format = self.default_format 478 | if file_format not in available: 479 | raise ValueError( 480 | "File format {} not available, choose one of {}".format( 481 | file_format, available.keys())) 482 | return file_format 483 | 484 | def _get_exporter(self, filename, file_format=None): 485 | return self._exporters[self._guess_format(filename, 486 | file_format=file_format, 487 | export=True)] 488 | 489 | def _get_loader(self, filename, file_format=None): 490 | return self._loaders[self._guess_format(filename, 491 | file_format=file_format, 492 | export=False)] 493 | 494 | def _load( 495 | self, 496 | grid=None, 497 | edges=None, 498 | metadata=None, 499 | origin=None, 500 | delta=None): 501 | if edges is not None: 502 | # set up from histogramdd-type data 503 | self.grid = numpy.asanyarray(grid) 504 | self.edges = edges 505 | self._update() 506 | elif origin is not None and delta is not None: 507 | # setup from generic data 508 | origin = numpy.asanyarray(origin) 509 | delta = numpy.asanyarray(delta) 510 | if len(origin) != grid.ndim: 511 | raise TypeError( 512 | "Dimension of origin is not the same as grid dimension.") 513 | if delta.shape == () and numpy.isreal(delta): 514 | delta = numpy.ones(grid.ndim) * delta 515 | elif delta.ndim > 1: 516 | raise NotImplementedError( 517 | "Non-rectangular grids are not supported.") 518 | elif len(delta) != grid.ndim: 519 | raise TypeError("delta should be scalar or array-like of" 520 | "len(grid.ndim)") 521 | # note that origin is CENTER so edges must be shifted by -0.5*delta 522 | self.edges = [origin[dim] + 523 | (numpy.arange(m + 1) - 0.5) * delta[dim] 524 | for dim, m in enumerate(grid.shape)] 525 | self.grid = numpy.asanyarray(grid) 526 | self._update() 527 | else: 528 | raise ValueError( 529 | "Wrong/missing data to set up Grid. Use Grid() or " 530 | "Grid(grid=, edges=) or " 531 | "Grid(grid=, origin=(x0, y0, z0), delta=(dx, dy, dz)):\n" 532 | "grid={0} edges={1} origin={2} delta={3}".format( 533 | grid, edges, origin, delta)) 534 | 535 | def load(self, filename, file_format=None): 536 | """Load saved grid and edges from `filename` 537 | 538 | The :meth:`load` method calls the class's constructor method and 539 | completely resets all values, based on the loaded data. 540 | """ 541 | filename = str(filename) 542 | if not os.path.exists(filename): 543 | # check before we try to detect the file type because 544 | # _guess_fileformat() does not work well with things that 545 | # are not really a file 546 | raise IOError(errno.ENOENT, "file not found", filename) 547 | loader = self._get_loader(filename, file_format=file_format) 548 | loader(filename) 549 | 550 | def _load_python(self, filename): 551 | with open(filename, 'rb') as f: 552 | saved = pickle.load(f) 553 | self._load(grid=saved['grid'], 554 | edges=saved['edges'], 555 | metadata=saved['metadata']) 556 | 557 | def _load_mrc(self, filename): 558 | """Initializes Grid from a MRC/CCP4 file.""" 559 | mrcfile = mrc.MRC(filename) 560 | grid, edges = mrcfile.histogramdd() 561 | self._load(grid=grid, edges=edges, metadata=self.metadata) 562 | # Store header for access from Grid object (undocumented) 563 | # https://github.com/MDAnalysis/GridDataFormats/pull/100#discussion_r782604833 564 | self._mrc_header = mrcfile.header.copy() 565 | 566 | def _load_dx(self, filename): 567 | """Initializes Grid from a OpenDX file.""" 568 | dx = OpenDX.field(0) 569 | dx.read(filename) 570 | grid, edges = dx.histogramdd() 571 | self._load(grid=grid, edges=edges, metadata=self.metadata) 572 | 573 | def _load_plt(self, filename): 574 | """Initialize Grid from gOpenMol plt file.""" 575 | g = gOpenMol.Plt() 576 | g.read(filename) 577 | grid, edges = g.histogramdd() 578 | self._load(grid=grid, edges=edges, metadata=self.metadata) 579 | 580 | def export(self, filename, file_format=None, type=None, typequote='"'): 581 | """export density to file using the given format. 582 | 583 | The format can also be deduced from the suffix of the filename 584 | although the `file_format` keyword takes precedence. 585 | 586 | The default format for :meth:`export` is 'dx'. Use 'dx' for 587 | visualization. 588 | 589 | Implemented formats: 590 | 591 | dx 592 | :mod:`OpenDX` 593 | pickle 594 | pickle (use :meth:`Grid.load` to restore); :meth:`Grid.save` 595 | is simpler than ``export(format='python')``. 596 | 597 | Parameters 598 | ---------- 599 | filename : str 600 | name of the output file 601 | 602 | file_format : {'dx', 'pickle', None} (optional) 603 | output file format, the default is "dx" 604 | 605 | type : str (optional) 606 | for DX, set the output DX array type, e.g., "double" or "float". 607 | By default (``None``), the DX type is determined from the numpy 608 | dtype of the array of the grid (and this will typically result in 609 | "double"). 610 | 611 | .. versionadded:: 0.4.0 612 | 613 | typequote : str (optional) 614 | For DX, set the character used to quote the type string; 615 | by default this is a double-quote character, '"'. 616 | Custom parsers like the one from NAMD-GridForces (backend for MDFF) 617 | expect no quotes, and typequote='' may be used to appease them. 618 | 619 | .. versionadded:: 0.5.0 620 | 621 | """ 622 | filename = str(filename) 623 | exporter = self._get_exporter(filename, file_format=file_format) 624 | exporter(filename, type=type, typequote=typequote) 625 | 626 | # note: the _export_FORMAT() methods all take the filename as a mandatory 627 | # argument. They can process kwargs but they are not required to do 628 | # so. However, they must ignore any kwargs that they are not processing. 629 | 630 | def _export_python(self, filename, **kwargs): 631 | """Pickle the Grid object 632 | 633 | The object is dumped as a dictionary with grid and edges: This 634 | is sufficient to recreate the grid object with ``__init__()``. 635 | """ 636 | data = dict(grid=self.grid, edges=self.edges, metadata=self.metadata) 637 | with open(filename, 'wb') as f: 638 | pickle.dump(data, f, pickle.HIGHEST_PROTOCOL) 639 | 640 | def _export_dx(self, filename, type=None, typequote='"', **kwargs): 641 | """Export the density grid to an OpenDX file. 642 | 643 | The file format is the simplest regular grid array and it is 644 | also understood by VMD's and Chimera's DX reader; PyMOL 645 | requires the dx `type` to be set to "double". 646 | 647 | For the file format see 648 | http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF 649 | 650 | """ 651 | root, ext = os.path.splitext(filename) 652 | filename = root + '.dx' 653 | 654 | comments = [ 655 | 'OpenDX density file written by gridDataFormats.Grid.export()', 656 | 'File format: http://opendx.sdsc.edu/docs/html/pages/usrgu068.htm#HDREDF', 657 | 'Data are embedded in the header and tied to the grid positions.', 658 | 'Data is written in C array order: In grid[x,y,z] the axis z is fastest', 659 | 'varying, then y, then finally x, i.e. z is the innermost loop.'] 660 | 661 | # write metadata in comments section 662 | if self.metadata: 663 | comments.append('Meta data stored with the python Grid object:') 664 | for k in self.metadata: 665 | comments.append(' ' + str(k) + ' = ' + str(self.metadata[k])) 666 | comments.append( 667 | '(Note: the VMD dx-reader chokes on comments below this line)') 668 | 669 | components = dict( 670 | positions=OpenDX.gridpositions(1, self.grid.shape, self.origin, 671 | self.delta), 672 | connections=OpenDX.gridconnections(2, self.grid.shape), 673 | data=OpenDX.array(3, self.grid, type=type, typequote=typequote), 674 | ) 675 | dx = OpenDX.field('density', components=components, comments=comments) 676 | if ext == '.gz': 677 | filename = root + ext 678 | dx.write(filename) 679 | 680 | def save(self, filename): 681 | """Save a grid object to `filename` and add ".pickle" extension. 682 | 683 | Internally, this calls 684 | ``Grid.export(filename, format="python")``. A grid can be 685 | regenerated from the saved data with :: 686 | 687 | g = Grid(filename="grid.pickle") 688 | 689 | .. note:: 690 | The pickle format depends on the Python version and 691 | therefore it is not guaranteed that a grid saved with, say, 692 | Python 2.7 can also be read with Python 3.5. The OpenDX format 693 | is a better alternative for portability. 694 | 695 | """ 696 | self.export(filename, file_format="pickle") 697 | 698 | def centers(self): 699 | """Returns the coordinates of the centers of all grid cells as an iterator. 700 | 701 | 702 | See Also 703 | -------- 704 | :func:`numpy.ndindex` 705 | """ 706 | for idx in numpy.ndindex(self.grid.shape): 707 | yield self.delta * numpy.array(idx) + self.origin 708 | 709 | def check_compatible(self, other): 710 | """Check if `other` can be used in an arithmetic operation. 711 | 712 | `other` is compatible if 713 | 714 | 1) `other` is a scalar 715 | 2) `other` is a grid defined on the same edges 716 | 717 | In order to make `other` compatible, resample it on the same 718 | grid as this one using :meth:`resample`. 719 | 720 | Parameters 721 | ---------- 722 | other : Grid or float or int 723 | Another object to be used for standard arithmetic 724 | operations with this :class:`Grid` 725 | 726 | Raises 727 | ------ 728 | TypeError 729 | if not compatible 730 | 731 | See Also 732 | -------- 733 | :meth:`resample` 734 | """ 735 | 736 | if not (numpy.isreal(other) or self == other): 737 | raise TypeError( 738 | "The argument cannot be arithmetically combined with the grid. " 739 | "It must be a scalar or a grid with identical edges. " 740 | "Use Grid.resample(other.edges) to make a new grid that is " 741 | "compatible with other.") 742 | return True 743 | 744 | def _interpolationFunctionFactory(self, spline_order=None, cval=None): 745 | """Returns a function F(x,y,z) that interpolates any values on the grid. 746 | 747 | _interpolationFunctionFactory(self,spline_order=3,cval=None) --> F 748 | 749 | *cval* is set to :meth:`Grid.grid.min`. *cval* cannot be chosen too 750 | large or too small or NaN because otherwise the spline interpolation 751 | breaks down near that region and produces wild oscillations. 752 | 753 | .. Note:: Only correct for equally spaced values (i.e. regular edges with 754 | constant delta). 755 | .. SeeAlso:: http://www.scipy.org/Cookbook/Interpolation 756 | """ 757 | # for scipy >=0.9: should use scipy.interpolate.griddata 758 | # http://docs.scipy.org/doc/scipy/reference/generated/scipy.interpolate.griddata.html#scipy.interpolate.griddata 759 | # (does it work for nD?) 760 | import scipy.ndimage 761 | 762 | if spline_order is None: 763 | # must be compatible with whatever 764 | # :func:`scipy.ndimage.spline_filter` takes. 765 | spline_order = self.interpolation_spline_order 766 | if cval is None: 767 | cval = self.interpolation_cval 768 | 769 | data = self.grid 770 | if cval is None: 771 | cval = data.min() 772 | try: 773 | # masked arrays, fill with min: should keep spline happy 774 | _data = data.filled(cval) 775 | except AttributeError: 776 | _data = data 777 | 778 | coeffs = scipy.ndimage.spline_filter(_data, order=spline_order) 779 | x0 = self.origin 780 | dx = self.delta 781 | 782 | def _transform(cnew, c0, dc): 783 | return (numpy.atleast_1d(cnew) - c0) / dc 784 | 785 | def interpolatedF(*coordinates): 786 | """B-spline function over the data grid(x,y,z). 787 | 788 | interpolatedF([x1,x2,...],[y1,y2,...],[z1,z2,...]) -> F[x1,y1,z1],F[x2,y2,z2],... 789 | 790 | Example usage for resampling:: 791 | >>> XX,YY,ZZ = numpy.mgrid[40:75:0.5, 96:150:0.5, 20:50:0.5] 792 | >>> FF = _interpolationFunction(XX,YY,ZZ) 793 | """ 794 | _coordinates = numpy.array( 795 | [_transform(coordinates[i], x0[i], dx[i]) for i in range(len( 796 | coordinates))]) 797 | return scipy.ndimage.map_coordinates(coeffs, 798 | _coordinates, 799 | prefilter=False, 800 | mode='constant', 801 | cval=cval) 802 | return interpolatedF 803 | 804 | def __eq__(self, other): 805 | if not isinstance(other, Grid): 806 | return False 807 | return numpy.all( 808 | other.grid == self.grid) and numpy.all( 809 | other.origin == self.origin) and numpy.all( 810 | numpy.all( 811 | other_edge == self_edge) for other_edge, 812 | self_edge in zip( 813 | other.edges, 814 | self.edges)) 815 | 816 | def __ne__(self, other): 817 | return not self.__eq__(other) 818 | 819 | def __add__(self, other): 820 | self.check_compatible(other) 821 | return self.__class__(self.grid + _grid(other), edges=self.edges) 822 | 823 | def __sub__(self, other): 824 | self.check_compatible(other) 825 | return self.__class__(self.grid - _grid(other), edges=self.edges) 826 | 827 | def __mul__(self, other): 828 | self.check_compatible(other) 829 | return self.__class__(self.grid * _grid(other), edges=self.edges) 830 | 831 | def __truediv__(self, other): 832 | self.check_compatible(other) 833 | return self.__class__(self.grid / _grid(other), edges=self.edges) 834 | 835 | def __floordiv__(self, other): 836 | self.check_compatible(other) 837 | return self.__class__(self.grid // _grid(other), edges=self.edges) 838 | 839 | def __pow__(self, other): 840 | self.check_compatible(other) 841 | return self.__class__( 842 | numpy.power( 843 | self.grid, 844 | _grid(other)), 845 | edges=self.edges) 846 | 847 | def __radd__(self, other): 848 | self.check_compatible(other) 849 | return self.__class__(_grid(other) + self.grid, edges=self.edges) 850 | 851 | def __rsub__(self, other): 852 | self.check_compatible(other) 853 | return self.__class__(_grid(other) - self.grid, edges=self.edges) 854 | 855 | def __rmul__(self, other): 856 | self.check_compatible(other) 857 | return self.__class__(_grid(other) * self.grid, edges=self.edges) 858 | 859 | def __rtruediv__(self, other): 860 | self.check_compatible(other) 861 | return self.__class__(_grid(other) / self.grid, edges=self.edges) 862 | 863 | def __rfloordiv__(self, other): 864 | self.check_compatible(other) 865 | return self.__class__(_grid(other) // self.grid, edges=self.edges) 866 | 867 | def __rpow__(self, other): 868 | self.check_compatible(other) 869 | return self.__class__( 870 | numpy.power( 871 | _grid(other), 872 | self.grid), 873 | edges=self.edges) 874 | 875 | def __repr__(self): 876 | try: 877 | bins = self.grid.shape 878 | except AttributeError: 879 | bins = "no" 880 | return '<{0} with {1!r} bins>'.format(self.__class__, bins) 881 | 882 | 883 | def ndmeshgrid(*arrs): 884 | """Return a mesh grid for N dimensions. 885 | 886 | The input are N arrays, each of which contains the values along one axis of 887 | the coordinate system. The arrays do not have to have the same number of 888 | entries. The function returns arrays that can be fed into numpy functions 889 | so that they produce values for *all* points spanned by the axes *arrs*. 890 | 891 | Original from 892 | http://stackoverflow.com/questions/1827489/numpy-meshgrid-in-3d and fixed. 893 | 894 | .. SeeAlso: :func:`numpy.meshgrid` for the 2D case. 895 | """ 896 | # arrs = tuple(reversed(arrs)) <-- wrong on stackoverflow.com 897 | arrs = tuple(arrs) 898 | lens = list(map(len, arrs)) 899 | dim = len(arrs) 900 | 901 | sz = 1 902 | for s in lens: 903 | sz *= s 904 | 905 | ans = [] 906 | for i, arr in enumerate(arrs): 907 | slc = [1] * dim 908 | slc[i] = lens[i] 909 | arr2 = numpy.asanyarray(arr).reshape(slc) 910 | for j, sz in enumerate(lens): 911 | if j != i: 912 | arr2 = arr2.repeat(sz, axis=j) 913 | ans.append(arr2) 914 | 915 | return tuple(ans) 916 | -------------------------------------------------------------------------------- /gridData/gOpenMol.py: -------------------------------------------------------------------------------- 1 | # gridDataFormats --- python modules to read and write gridded data 2 | # Copyright (c) 2009-2014 Oliver Beckstein 3 | # Released under the GNU Lesser General Public License, version 3 or later. 4 | # 5 | # Part of the documentation and format specification: Copyright CSC, 2005 6 | 7 | """ 8 | :mod:`gOpenMol` --- the gOpenMol plt format 9 | =========================================== 10 | 11 | .. _gOpenMol: http://www.csc.fi/english/pages/g0penMol 12 | 13 | The module provides a simple implementation of a reader for gOpenMol_ 14 | *plt* files. Plt files are binary files. The :class:`Plt` reader tries 15 | to guess the endianess of the file, but this can fail (with a 16 | :exc:`TypeError`); you are on your own in this case. 17 | 18 | Only the reader is implemented. If you want to write gridded data use a format 19 | that is more standard, such as OpenDX (see :mod:`OpenDX`). 20 | 21 | 22 | Background 23 | ---------- 24 | 25 | gOpenMol http://www.csc.fi/english/pages/g0penMol plt format. 26 | 27 | Used to be documented at http://www.csc.fi/gopenmol/developers/plt_format.phtml but currently this is only accessible through the internet archive at 28 | http://web.archive.org/web/20061011125817/http://www.csc.fi/gopenmol/developers/plt_format.phtml 29 | 30 | 31 | 32 | Grid data plt file format 33 | ------------------------- 34 | 35 | Copyright CSC, 2005. Last modified: September 23, 2003 09:18:50 36 | 37 | Plot file (plt) format The plot files are regular 3D grid files for plotting of 38 | molecular orbitals, electron densities or other molecular properties. The plot 39 | files are produced by several programs. It is also possible to format/unformat 40 | plot files using the pltfile program in the utility directory. It is also 41 | possible to produce plot files with external (own) programs. Produce first a 42 | formatted text file and use then the pltfile program to unformat the file for 43 | gOpenMol. The format for the plot files are very simple and a description of 44 | the format can be found elsewhere in this manual. gOpenMol can read binary plot 45 | files from different hardware platforms independent of the system type (little 46 | or big endian machines). 47 | 48 | Format of the binary ``*.plt`` file 49 | ................................... 50 | 51 | The ``*.plt`` file binary and formatted file formats are very simple but please 52 | observe that unformatted files written with a FORTRAN program are not pure 53 | binary files because there are file records between the values while pure 54 | binary files do not have any records between the values. gOpenMol should be 55 | able to figure out if the file is pure binary or FORTRAN unformatted but it is 56 | not very well tested. 57 | 58 | Binary ``*.plt`` (grid) file format 59 | ................................... 60 | 61 | Record number and meaning:: 62 | 63 | #1: Integer, rank value must always be = 3 64 | #2: Integer, possible values are 1 ... 50. This value is not used but 65 | it can be used to define the type of surface! 66 | Values used (you can use your own value between 1... 50): 67 | 68 | 1: VSS surface 69 | 2: Orbital/density surface 70 | 3: Probe surface 71 | 200: Gaussian 94/98 72 | 201: Jaguar 73 | 202: Gamess 74 | 203: AutoDock 75 | 204: Delphi/Insight 76 | 205: Grid 77 | 78 | Value 100 is reserved for grid data coming from OpenMol! 79 | 80 | #3: Integer, number of points in z direction 81 | #4: Integer, number of points in y direction 82 | #5: Integer, number of points in x direction 83 | #6: Float, zmin value 84 | #7: Float, zmax value 85 | #8: Float, ymin value 86 | #9: Float, ymax value 87 | #10: Float, xmin value 88 | #11: Float, xmax value 89 | #12 ... Float, grid data values running (x is inner loop, then y and last z): 90 | 91 | 1. Loop in the z direction 92 | 2. Loop in the y direction 93 | 3. Loop in the x direction 94 | 95 | Example:: 96 | 97 | nx=2 ny=1 nz=3 98 | 99 | 0,0,0 1,0,0 y=0, z=0 100 | 0,0,1 1,0,0 y=0, z=1 101 | 0,0,2 1,0,2 y=0, z=2 102 | 103 | The formatted (the first few lines) file can look like:: 104 | 105 | 3 2 106 | 65 65 65 107 | -3.300000e+001 3.200000e+001 -3.300000e+001 3.200000e+001 -3.300000e+001 3.200000e+001 108 | -1.625609e+001 -1.644741e+001 -1.663923e+001 -1.683115e+001 -1.702274e+001 -1.721340e+001 109 | -1.740280e+001 -1.759018e+001 -1.777478e+001 -1.795639e+001 -1.813387e+001 -1.830635e+001 110 | ... 111 | 112 | Formatted ``*.plt`` (grid) file format 113 | ...................................... 114 | 115 | Line numbers and variables on the line:: 116 | 117 | line #1: Integer, Integer. Rank and type of surface (rank is always = 3) 118 | line #2: Integer, Integer, Integer. Zdim, Ydim, Xdim (number of points in the z,y,x directions) 119 | line #3: Float, Float, Float, Float, Float, Float. Zmin, Zmax, Ymin, Ymax, Xmin,Xmax (min and max values) 120 | line #4: ... Float. Grid data values running (x is inner loop, then y and last z) with one or several values per line: 121 | 122 | 1. Loop in the z direction 123 | 2. Loop in the y direction 124 | 3. Loop in the x direction 125 | 126 | Classes 127 | ------- 128 | 129 | """ 130 | import warnings 131 | import struct 132 | import numpy 133 | 134 | class Record(object): 135 | def __init__(self, key, bintype, values=None): 136 | self.key = key 137 | self.bintype = bintype 138 | self.values = values # dict(value='comment', ...) 139 | def is_legal(self, value): 140 | if self.values is None: 141 | return True 142 | return value in self.values 143 | def is_legal_dict(self, d): 144 | return self.is_legal(d[self.key]) 145 | def __repr__(self): 146 | return "Record(%(key)r,%(bintype)r,...)" % vars(self) 147 | 148 | class Plt(object): 149 | """A class to represent a gOpenMol_ plt file. 150 | 151 | Only reading is implemented; either supply a filename to the constructor 152 | >>> G = Plt(filename) 153 | or load the file with the read method 154 | >>> G = Plt() 155 | >>> G.read(filename) 156 | 157 | The data is held in :attr:`GOpenMol.array` and all header information is in 158 | the dict :attr:`GOpenMol.header`. 159 | 160 | :attr:`Plt.shape` 161 | D-tuplet describing size in each dimension 162 | :attr:`Plt.origin` 163 | coordinates of the centre of the grid cell with index 0,0,...,0 164 | :attr:`Plt.delta` 165 | DxD array describing the deltas 166 | 167 | """ 168 | 169 | _header_struct = (Record('rank', 'I', {3:'dimension'}), 170 | Record('surface','I', {1: 'VSS surface', 171 | 2: 'Orbital/density surface', 172 | 3: 'Probe surface', 173 | 42: 'gridcount', 174 | 100: 'OpenMol', 175 | 200: 'Gaussian 94/98', 176 | 201: 'Jaguar', 177 | 202: 'Gamess', 178 | 203: 'AutoDock', 179 | 204: 'Delphi/Insight', 180 | 205: 'Grid', 181 | }), # update in init with all user defined values 182 | Record('nz', 'I'), 183 | Record('ny', 'I'), 184 | Record('nx', 'I'), 185 | Record('zmin', 'f'), 186 | Record('zmax', 'f'), 187 | Record('ymin', 'f'), 188 | Record('ymax', 'f'), 189 | Record('xmin', 'f'), 190 | Record('xmax', 'f')) 191 | _data_bintype = 'f' # write(&value,sizeof(float),1L,output); 192 | 193 | def __init__(self, filename=None): 194 | self.filename = str(filename) 195 | # fix header_struct because I cannot do {...}.update() 196 | rec_surf = [r for r in self._header_struct if r.key == 'surface'][0] 197 | rec_surf.values.update(dict((k,'user-defined') for k in range(4,51) if k != 42)) 198 | # assemble format 199 | self._headerfmt = "".join([r.bintype for r in self._header_struct]) 200 | 201 | if not filename is None: 202 | self.read(filename) 203 | 204 | def read(self, filename): 205 | """Populate the instance from the plt file *filename*.""" 206 | from struct import calcsize, unpack 207 | if not filename is None: 208 | self.filename = str(filename) 209 | with open(self.filename, 'rb') as plt: 210 | h = self.header = self._read_header(plt) 211 | nentries = h['nx'] * h['ny'] * h['nz'] 212 | # quick and dirty... slurp it all in one go 213 | datafmt = h['bsaflag']+str(nentries)+self._data_bintype 214 | a = numpy.array(unpack(datafmt, plt.read(calcsize(datafmt)))) 215 | self.header['filename'] = self.filename 216 | self.array = a.reshape(h['nz'], h['ny'], h['nx']).transpose() # unpack plt in reverse!! 217 | self.delta = self._delta() 218 | self.origin = numpy.array([h['xmin'], h['ymin'], h['zmin']]) + 0.5*numpy.diagonal(self.delta) 219 | self.rank = h['rank'] 220 | 221 | @property 222 | def shape(self): 223 | return self.array.shape 224 | 225 | @property 226 | def edges(self): 227 | """Edges of the grid cells, origin at centre of 0,0,..,0 grid cell. 228 | 229 | Only works for regular, orthonormal grids. 230 | """ 231 | return [self.delta[d,d] * numpy.arange(self.shape[d]+1) + self.origin[d]\ 232 | - 0.5*self.delta[d,d] for d in range(self.rank)] 233 | 234 | def _delta(self): 235 | h = self.header 236 | qmin = numpy.array([h['xmin'],h['ymin'],h['zmin']]) 237 | qmax = numpy.array([h['xmax'],h['ymax'],h['zmax']]) 238 | delta = numpy.abs(qmax - qmin) / self.shape 239 | return numpy.diag(delta) 240 | 241 | def _read_header(self, pltfile): 242 | """Read header bytes, try all possibilities for byte order/size/alignment.""" 243 | nheader = struct.calcsize(self._headerfmt) 244 | names = [r.key for r in self._header_struct] 245 | binheader = pltfile.read(nheader) 246 | def decode_header(bsaflag='@'): 247 | h = dict(zip(names, struct.unpack(bsaflag+self._headerfmt, binheader))) 248 | h['bsaflag'] = bsaflag 249 | return h 250 | for flag in '@=<>': 251 | # try all endinaness and alignment options until we find something that looks sensible 252 | header = decode_header(flag) 253 | if header['rank'] == 3: 254 | break # only legal value according to spec 255 | header = None 256 | if header is None: 257 | raise TypeError("Cannot decode header --- corrupted or wrong format?") 258 | for rec in self._header_struct: 259 | if not rec.is_legal_dict(header): 260 | warnings.warn("Key %s: Illegal value %r" % (rec.key, header[rec.key])) 261 | return header 262 | 263 | def histogramdd(self): 264 | """Return array data as (edges,grid), i.e. a numpy nD histogram.""" 265 | return (self.array, self.edges) 266 | -------------------------------------------------------------------------------- /gridData/mrc.py: -------------------------------------------------------------------------------- 1 | # gridDataFormats --- python modules to read and write gridded data 2 | # Copyright (c) 2009-2021 Oliver Beckstein 3 | # Released under the GNU Lesser General Public License, version 3 or later. 4 | # 5 | 6 | """:mod:`mrc` --- the MRC/CCP4 volumetric data format 7 | =================================================== 8 | 9 | .. versionadded:: 0.7.0 10 | 11 | Reading of MRC/CCP4 volumetric files (`MRC2014 file format`_) using 12 | the mrcfile_ library [Burnley2017]_. 13 | 14 | .. _mrcfile: https://mrcfile.readthedocs.io/ 15 | .. _`MRC2014 file format`: http://www.ccpem.ac.uk/mrc_format/mrc2014.php 16 | 17 | 18 | References 19 | ---------- 20 | 21 | .. [Burnley2017] Burnley T, Palmer C and Winn M (2017) Recent 22 | developments in the CCP-EM software suite. *Acta 23 | Cryst.* D73:469-477. doi: `10.1107/S2059798317007859`_ 24 | 25 | 26 | .. _`10.1107/S2059798317007859`: https://doi.org/10.1107/S2059798317007859 27 | 28 | Classes 29 | ------- 30 | 31 | """ 32 | import numpy as np 33 | import mrcfile 34 | 35 | 36 | class MRC(object): 37 | """Represent a MRC/CCP4 file. 38 | 39 | Load `MRC/CCP4 2014 `_ 3D volumetric data with 40 | the mrcfile_ library. 41 | 42 | Parameters 43 | ---------- 44 | filename : str (optional) 45 | input file (or stream), can be compressed 46 | 47 | Raises 48 | ------ 49 | ValueError 50 | If the unit cell is not orthorhombic or if the data 51 | are not volumetric. 52 | 53 | 54 | Attributes 55 | ---------- 56 | header : numpy.recarray 57 | Header data from the MRC file as a numpy record array. 58 | 59 | array : numpy.ndarray 60 | Data as a 3-dimensional array where axis 0 corresponds to X, 61 | axis 1 to Y, and axis 2 to Z. This order is always enforced, 62 | regardless of the order in the mrc file. 63 | 64 | delta : numpy.ndarray 65 | Diagonal matrix with the voxel size in X, Y, and Z direction 66 | (taken from the :attr:`mrcfile.mrcfile.voxel_size` attribute) 67 | 68 | origin : numpy.ndarray 69 | numpy array with coordinates of the coordinate system origin 70 | (computed from :attr:`header.origin`, the offsets 71 | :attr:`header.origin.nxstart`, :attr:`header.origin.nystart`, 72 | :attr:`header.origin.nzstart` and the spacing :attr:`delta`) 73 | 74 | rank : int 75 | The integer 3, denoting that only 3D maps are read. 76 | 77 | 78 | Notes 79 | ----- 80 | * Only volumetric (3D) densities are read. 81 | * Only orthorhombic unitcells supported (other raise :exc:`ValueError`) 82 | * Only reading is currently supported. 83 | 84 | 85 | .. versionadded:: 0.7.0 86 | 87 | """ 88 | 89 | def __init__(self, filename=None): 90 | self.filename = filename 91 | if filename is not None: 92 | self.read(filename) 93 | 94 | def read(self, filename): 95 | """Populate the instance from the MRC/CCP4 file *filename*.""" 96 | if filename is not None: 97 | self.filename = filename 98 | with mrcfile.open(filename) as mrc: 99 | if not mrc.is_volume(): #pragma: no cover 100 | raise ValueError( 101 | "MRC file {} is not a volumetric density.".format(filename)) 102 | self.header = h = mrc.header.copy() 103 | # check for being orthorhombic 104 | if not np.allclose([h.cellb.alpha, h.cellb.beta, h.cellb.gamma], 105 | [90, 90, 90]): 106 | raise ValueError("Only orthorhombic unitcells are currently " 107 | "supported, not " 108 | "alpha={0}, beta={1}, gamma={2}".format( 109 | h.cellb.alpha, h.cellb.beta, h.cellb.gamma)) 110 | # mrc.data[z, y, x] indexed: convert to x,y,z as used in GridDataFormats 111 | # together with the axes orientation information in mapc/mapr/maps. 112 | # mapc, mapr, maps = 1, 2, 3 for Fortran-ordering and 3, 2, 1 for C-ordering. 113 | # Other combinations are possible. We reorder the data for the general case 114 | # by sorting mapc, mapr, maps in ascending order, i.e., to obtain x,y,z. 115 | # mrcfile provides the data in zyx shape (without regard to map*) so we first 116 | # transpose it to xyz and then reorient with axes_c_order. 117 | # 118 | # All other "xyz" quantitities are also reordered. 119 | axes_order = np.hstack([h.mapc, h.mapr, h.maps]) 120 | axes_c_order = np.argsort(axes_order) 121 | transpose_order = np.argsort(axes_order[::-1]) 122 | self.array = np.transpose(mrc.data, axes=transpose_order) 123 | self.delta = np.diag(np.array([mrc.voxel_size.x, mrc.voxel_size.y, mrc.voxel_size.z])) 124 | # the grid is shifted to the MRC origin by offset 125 | # (assume orthorhombic) 126 | offsets = np.hstack([h.nxstart, h.nystart, h.nzstart])[axes_c_order] * np.diag(self.delta) 127 | # GridData origin is centre of cell at x=col=0, y=row=0 z=seg=0 128 | self.origin = np.hstack([h.origin.x, h.origin.y, h.origin.z]) + offsets 129 | self.rank = 3 130 | 131 | @property 132 | def shape(self): 133 | """Shape of the :attr:`array`""" 134 | return self.array.shape 135 | 136 | @property 137 | def edges(self): 138 | """Edges of the grid cells, origin at centre of 0,0,0 grid cell. 139 | 140 | Only works for regular, orthonormal grids. 141 | """ 142 | # TODO: Add triclinic cell support. 143 | return [self.delta[d, d] * np.arange(self.shape[d] + 1) + 144 | self.origin[d] - 0.5 * self.delta[d, d] 145 | for d in range(self.rank)] 146 | 147 | def histogramdd(self): 148 | """Return array data as (edges,grid), i.e. a numpy nD histogram.""" 149 | return (self.array, self.edges) 150 | 151 | 152 | 153 | -------------------------------------------------------------------------------- /gridData/tests/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /gridData/tests/datafiles/1jzv.ccp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/gridData/tests/datafiles/1jzv.ccp4 -------------------------------------------------------------------------------- /gridData/tests/datafiles/EMD-3001.map.bz2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/gridData/tests/datafiles/EMD-3001.map.bz2 -------------------------------------------------------------------------------- /gridData/tests/datafiles/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import importlib.resources as importlib_resources 4 | 5 | __all__ = ["DX", "CCP4", "gOpenMol"] 6 | 7 | DX = importlib_resources.files(__name__) / 'test.dx' 8 | DXGZ = importlib_resources.files(__name__) / 'test.dx.gz' 9 | CCP4 = importlib_resources.files(__name__) / 'test.ccp4' 10 | # from http://www.ebi.ac.uk/pdbe/coordinates/files/1jzv.ccp4 11 | # (see issue #57) 12 | CCP4_1JZV = importlib_resources.files(__name__) / '1jzv.ccp4' 13 | # from https://github.com/ccpem/mrcfile/blob/master/tests/test_data/EMD-3001.map.bz2 14 | MRC_EMD3001 = importlib_resources.files(__name__) / 'EMD-3001.map.bz2' 15 | # water density around M2 TM helices of nAChR from MD simulations 16 | # [O. Beckstein and M. S. P. Sansom. Physical Biology 3(2):147-159, 2006] 17 | gOpenMol = importlib_resources.files(__name__) / 'nAChR_M2_water.plt' 18 | -------------------------------------------------------------------------------- /gridData/tests/datafiles/nAChR_M2_water.plt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/gridData/tests/datafiles/nAChR_M2_water.plt -------------------------------------------------------------------------------- /gridData/tests/datafiles/test.ccp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/gridData/tests/datafiles/test.ccp4 -------------------------------------------------------------------------------- /gridData/tests/datafiles/test.dx: -------------------------------------------------------------------------------- 1 | object 1 class gridpositions counts 2 2 2 2 | origin 20.1 3e0 -1e1 3 | delta 1.00 0 0 4 | delta 0 1.00 0 5 | delta 0 0 1.00 6 | object 2 class gridconnections counts 2 2 2 7 | object 3 class array type double rank 0 items 8 data follows 8 | 1.000 1.000 1.000 9 | 1.000 1.e-6 -1.e6 10 | 1.000 1.000 11 | -------------------------------------------------------------------------------- /gridData/tests/datafiles/test.dx.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MDAnalysis/GridDataFormats/86a4109dea49a1edf08727b0888f307347d7873a/gridData/tests/datafiles/test.dx.gz -------------------------------------------------------------------------------- /gridData/tests/test_dx.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy.testing import assert_equal, assert_almost_equal 3 | 4 | import pytest 5 | 6 | import gridData.OpenDX 7 | from gridData import Grid 8 | 9 | from . import datafiles 10 | 11 | @pytest.mark.parametrize("infile", [datafiles.DX, datafiles.DXGZ]) 12 | def test_read_dx(infile): 13 | g = Grid(infile) 14 | POINTS = 8 15 | ref = np.ones(POINTS) 16 | ref[4] = 1e-6 17 | ref[5] = -1e+6 18 | assert_equal(g.grid.flat, ref) 19 | assert_equal(g.grid.size, POINTS) 20 | assert_equal(g.delta, np.ones(3)) 21 | assert_equal(g.origin, np.array([20.1, 3., -10.])) 22 | 23 | @pytest.mark.parametrize("outfile", ["grid.dx", "grid.dx.gz"]) 24 | @pytest.mark.parametrize("nptype,dxtype", [ 25 | ("float16", "float"), 26 | ("float32", "float"), 27 | ("float64", "double"), 28 | ("int64", "int"), 29 | ("int32", "int"), 30 | ("uint32", "unsigned int"), 31 | ("uint64", "unsigned int"), 32 | ("int16", "short"), 33 | ("uint16", "unsigned short"), 34 | ("int8", "signed byte"), 35 | ("uint8", "byte"), 36 | ]) 37 | def test_write_dx(tmpdir, nptype, dxtype, outfile, counts=100, ndim=3): 38 | # conversion from numpy array to DX file 39 | 40 | h, edges = np.histogramdd(np.random.random((counts, ndim)), bins=10) 41 | g = Grid(h, edges) 42 | 43 | # hack the grid to be a different dtype 44 | g.grid = g.grid.astype(nptype) 45 | 46 | assert_equal(g.grid.sum(), counts) 47 | 48 | with tmpdir.as_cwd(): 49 | g.export(outfile) 50 | g2 = Grid(outfile) 51 | 52 | # check that dxtype was written 53 | dx = gridData.OpenDX.field(0) 54 | dx.read(outfile) 55 | data = dx.components['data'] 56 | out_dxtype = data.type 57 | 58 | assert_almost_equal(g.grid, g2.grid, 59 | err_msg="written grid does not match original") 60 | assert_almost_equal( 61 | g.delta, g2.delta, 62 | decimal=6, 63 | err_msg="deltas of written grid do not match original") 64 | 65 | assert_equal(out_dxtype, dxtype) 66 | 67 | @pytest.mark.parametrize("outfile", ["grid.dx", "grid.dx.gz"]) 68 | @pytest.mark.parametrize('nptype', ("complex64", "complex128", "bool_")) 69 | @pytest.mark.filterwarnings("ignore:array dtype.name =") 70 | def test_write_dx_ValueError(tmpdir, nptype, outfile, counts=100, ndim=3): 71 | h, edges = np.histogramdd(np.random.random((counts, ndim)), bins=10) 72 | g = Grid(h, edges) 73 | 74 | # hack the grid to be a different dtype 75 | g.grid = g.grid.astype(nptype) 76 | 77 | with pytest.raises(ValueError): 78 | with tmpdir.as_cwd(): 79 | g.export(outfile) 80 | 81 | 82 | def test_delta_precision(tmpdir): 83 | '''Test if the delta has been written to the 7th significant figure.''' 84 | g = Grid(datafiles.DX) 85 | g.delta = np.array([90, 90, 150]) / 257 86 | with tmpdir.as_cwd(): 87 | g.export("grid.dx") 88 | g2 = Grid("grid.dx") 89 | assert_almost_equal( 90 | g.delta, g2.delta, 91 | decimal=7, 92 | err_msg="deltas of written grid do not match original") 93 | -------------------------------------------------------------------------------- /gridData/tests/test_gOpenMol.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy.testing import (assert_almost_equal, 3 | assert_equal) 4 | 5 | from gridData import Grid 6 | 7 | from . import datafiles 8 | 9 | def test_gOpenMol(): 10 | g = Grid(datafiles.gOpenMol) 11 | POINTS = 192 12 | assert g.grid.size == 165048 13 | assert_almost_equal(g.delta, [1.0, 1.0, 1.0]) 14 | assert_equal(g.grid.shape, (46, 46, 78)) 15 | assert_almost_equal(g.origin, [0.5995016, 0.5995016, 0.5919984]) 16 | assert_almost_equal(g.grid[::20, ::20, ::30], 17 | np.array([[[1.02196848, 0. , 0.88893718], 18 | [0.99051529, 0. , 0.95906246], 19 | [0.96112466, 0. , 0.88996845]], 20 | 21 | [[0.97247058, 0. , 0.91574967], 22 | [1.00237465, 1.34423399, 0.87810922], 23 | [0.97917157, 0. , 0.84717268]], 24 | 25 | [[0.99103099, 0. , 0.86521846], 26 | [0.96421844, 0. , 0. ], 27 | [0.98432779, 0. , 0.8817184 ]]]) 28 | ) 29 | assert_almost_equal(g.grid.mean(), 0.5403224581733577) 30 | -------------------------------------------------------------------------------- /gridData/tests/test_grid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy.testing import (assert_array_equal, assert_array_almost_equal, 3 | assert_almost_equal) 4 | 5 | import pytest 6 | 7 | from gridData import Grid 8 | 9 | def f_arithmetic(g): 10 | return g + g - 2.5 * g / (g + 5.3) 11 | 12 | @pytest.fixture(scope="class") 13 | def data(): 14 | d = dict( 15 | griddata=np.arange(1, 28).reshape(3, 3, 3), 16 | origin=np.zeros(3), 17 | delta=np.ones(3)) 18 | d['grid'] = Grid(d['griddata'], origin=d['origin'], 19 | delta=d['delta']) 20 | return d 21 | 22 | class TestGrid(object): 23 | @pytest.fixture 24 | def pklfile(self, data, tmpdir): 25 | g = data['grid'] 26 | fn = tmpdir.mkdir('grid').join('grid.dat') 27 | g.save(fn) # always saves as pkl 28 | return fn 29 | 30 | def test_init(self, data): 31 | g = Grid(data['griddata'], origin=data['origin'], 32 | delta=1) 33 | assert_array_equal(g.delta, data['delta']) 34 | 35 | def test_init_wrong_origin(self, data): 36 | with pytest.raises(TypeError): 37 | Grid(data['griddata'], origin=np.ones(4), delta=data['delta']) 38 | 39 | def test_init_wrong_delta(self, data): 40 | with pytest.raises(TypeError): 41 | Grid(data['griddata'], origin=data['origin'], delta=np.ones(4)) 42 | 43 | def test_empty_Grid(self): 44 | g = Grid() 45 | assert isinstance(g, Grid) 46 | 47 | def test_init_missing_delta_ValueError(self, data): 48 | with pytest.raises(ValueError): 49 | Grid(data['griddata'], origin=data['origin']) 50 | 51 | def test_init_missing_origin_ValueError(self, data): 52 | with pytest.raises(ValueError): 53 | Grid(data['griddata'], delta=data['delta']) 54 | 55 | def test_init_wrong_data_exception(self): 56 | with pytest.raises(IOError): 57 | Grid("__does_not_exist__") 58 | 59 | def test_load_wrong_fileformat_ValueError(self): 60 | with pytest.raises(ValueError): 61 | Grid(grid=True, file_format="xxx") 62 | 63 | def test_equality(self, data): 64 | assert data['grid'] == data['grid'] 65 | assert data['grid'] != 'foo' 66 | g = Grid(data['griddata'], origin=data['origin'] + 1, delta=data['delta']) 67 | assert data['grid'] != g 68 | 69 | def test_addition(self, data): 70 | g = data['grid'] + data['grid'] 71 | assert_array_equal(g.grid.flat, (2 * data['griddata']).flat) 72 | g = 2 + data['grid'] 73 | assert_array_equal(g.grid.flat, (2 + data['griddata']).flat) 74 | g = g + data['grid'] 75 | assert_array_equal(g.grid.flat, (2 + (2 * data['griddata'])).flat) 76 | 77 | def test_subtraction(self, data): 78 | g = data['grid'] - data['grid'] 79 | assert_array_equal(g.grid.flat, np.zeros(27)) 80 | g = 2 - data['grid'] 81 | assert_array_equal(g.grid.flat, (2 - data['griddata']).flat) 82 | 83 | def test_multiplication(self, data): 84 | g = data['grid'] * data['grid'] 85 | assert_array_equal(g.grid.flat, (data['griddata'] ** 2).flat) 86 | g = 2 * data['grid'] 87 | assert_array_equal(g.grid.flat, (2 * data['griddata']).flat) 88 | 89 | def test_division(self, data): 90 | g = data['grid'] / data['grid'] 91 | assert_array_equal(g.grid.flat, np.ones(27)) 92 | g = 2 / data['grid'] 93 | assert_array_equal(g.grid.flat, (2 / data['griddata']).flat) 94 | 95 | def test_floordivision(self, data): 96 | g = data['grid'].__floordiv__(data['grid']) 97 | assert_array_equal(g.grid.flat, np.ones(27, dtype=np.int64)) 98 | g = 2 // data['grid'] 99 | assert_array_equal(g.grid.flat, (2 // data['griddata']).flat) 100 | 101 | def test_power(self, data): 102 | g = data['grid'] ** 2 103 | assert_array_equal(g.grid.flat, (data['griddata'] ** 2).flat) 104 | g = 2 ** data['grid'] 105 | assert_array_equal(g.grid.flat, (2 ** data['griddata']).flat) 106 | 107 | def test_compatibility_type(self, data): 108 | assert data['grid'].check_compatible(data['grid']) 109 | assert data['grid'].check_compatible(3) 110 | g = Grid(data['griddata'], origin=data['origin'] - 1, delta=data['delta']) 111 | assert data['grid'].check_compatible(g) 112 | 113 | def test_wrong_compatibile_type(self, data): 114 | with pytest.raises(TypeError): 115 | data['grid'].check_compatible("foo") 116 | 117 | def test_non_orthonormal_boxes(self, data): 118 | delta = np.eye(3) 119 | with pytest.raises(NotImplementedError): 120 | Grid(data['griddata'], origin=data['origin'], delta=delta) 121 | 122 | def test_centers(self, data): 123 | # this only checks the edges. If you know an alternative 124 | # algorithm that isn't an exact duplicate of the one in 125 | # g.centers to test this please implement it. 126 | g = Grid(data['griddata'], origin=np.ones(3), delta=data['delta']) 127 | centers = np.array(list(g.centers())) 128 | assert_array_equal(centers[0], g.origin) 129 | assert_array_equal(centers[-1] - g.origin, 130 | (np.array(g.grid.shape) - 1) * data['delta']) 131 | 132 | def test_resample_factor_failure(self, data): 133 | pytest.importorskip('scipy') 134 | 135 | with pytest.raises(ValueError): 136 | g = data['grid'].resample_factor(0) 137 | 138 | def test_resample_factor(self, data): 139 | pytest.importorskip('scipy') 140 | 141 | g = data['grid'].resample_factor(2) 142 | assert_array_equal(g.delta, np.ones(3) * .5) 143 | # zooming in by a factor of 2. Each subinterval is 144 | # split in half, so 3 gridpoints (2 subintervals) 145 | # becomes 5 gridpoints (4 subintervals) 146 | assert_array_equal(g.grid.shape, np.ones(3) * 5) 147 | # check that the values are identical with the 148 | # correct stride. 149 | assert_array_almost_equal(g.grid[::2, ::2, ::2], 150 | data['grid'].grid) 151 | 152 | def test_load_pickle(self, data, tmpdir): 153 | g = data['grid'] 154 | fn = str(tmpdir.mkdir('grid').join('grid.pkl')) 155 | g.save(fn) 156 | 157 | h = Grid() 158 | h.load(fn) 159 | 160 | assert h == g 161 | 162 | def test_init_pickle_pathobjects(self, data, tmpdir): 163 | g = data['grid'] 164 | fn = tmpdir.mkdir('grid').join('grid.pickle') 165 | g.save(fn) 166 | 167 | h = Grid(fn) 168 | 169 | assert h == g 170 | 171 | @pytest.mark.parametrize("fileformat", ("pkl", "PKL", "pickle", "python")) 172 | def test_load_fileformat(self, data, pklfile, fileformat): 173 | h = Grid(pklfile, file_format="pkl") 174 | assert h == data['grid'] 175 | 176 | # At the moment, reading the file with the wrong parser does not give 177 | # good error messages. 178 | @pytest.mark.xfail 179 | @pytest.mark.parametrize("fileformat", ("ccp4", "plt", "dx")) 180 | def test_load_wrong_fileformat(self, data, pklfile, fileformat): 181 | with pytest.raises('ValueError'): 182 | Grid(pklfile, file_format=fileformat) 183 | 184 | # just check that we can export without stupid failures; detailed 185 | # format checks in separate tests 186 | @pytest.mark.parametrize("fileformat", ("dx", "pkl")) 187 | def test_export(self, data, fileformat, tmpdir): 188 | g = data['grid'] 189 | fn = tmpdir.mkdir('grid_export').join("grid.{}".format(fileformat)) 190 | g.export(fn) # check that path objects work 191 | h = Grid(fn) # use format autodetection 192 | assert g == h 193 | 194 | @pytest.mark.parametrize("fileformat", ("ccp4", "plt")) 195 | def test_export_not_supported(self, data, fileformat, tmpdir): 196 | g = data['grid'] 197 | fn = tmpdir.mkdir('grid_export').join("grid.{}".format(fileformat)) 198 | with pytest.raises(ValueError): 199 | g.export(fn) 200 | 201 | 202 | def test_inheritance(data): 203 | class DerivedGrid(Grid): 204 | pass 205 | 206 | dg = DerivedGrid(data['griddata'], origin=data['origin'], 207 | delta=data['delta']) 208 | result = f_arithmetic(dg) 209 | 210 | assert isinstance(result, DerivedGrid) 211 | 212 | ref = f_arithmetic(data['grid']) 213 | assert_almost_equal(result.grid, ref.grid) 214 | 215 | def test_anyarray(data): 216 | ma = np.ma.MaskedArray(data['griddata']) 217 | mg = Grid(ma, origin=data['origin'], delta=data['delta']) 218 | 219 | assert isinstance(mg.grid, ma.__class__) 220 | 221 | result = f_arithmetic(mg) 222 | ref = f_arithmetic(data['grid']) 223 | 224 | assert_almost_equal(result.grid, ref.grid) 225 | -------------------------------------------------------------------------------- /gridData/tests/test_mrc.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import numpy as np 4 | from numpy.testing import (assert_allclose, 5 | assert_equal) 6 | 7 | from gridData import Grid, mrc 8 | 9 | from . import datafiles 10 | 11 | @pytest.fixture(scope="module") 12 | def g1(): 13 | return Grid(datafiles.CCP4, file_format="MRC") 14 | 15 | @pytest.fixture(scope="module") 16 | def g2(): 17 | data = mrc.MRC() 18 | data.read(datafiles.CCP4) 19 | grid, edges = data.histogramdd() 20 | return Grid(grid=grid, edges=edges) 21 | 22 | def test_ccp4_Grid(g1): 23 | _test_ccp4(g1) 24 | 25 | def test_ccp4_mrc(g2): 26 | _test_ccp4(g2) 27 | 28 | def _test_ccp4(g): 29 | POINTS = 192 30 | assert_equal(g.grid.flat, np.arange(1, POINTS+1)) 31 | assert_equal(g.grid.size, POINTS) 32 | assert_allclose(g.delta, [3./4, .5, 2./3]) 33 | assert_equal(g.origin, np.zeros(3)) 34 | 35 | 36 | 37 | @pytest.fixture(scope="module") 38 | def ccp4data(): 39 | return mrc.MRC(datafiles.CCP4_1JZV) 40 | 41 | @pytest.mark.parametrize('name,value', [ 42 | # nx, ny, nz are named nc, nr, ns in the CCP4 module 43 | ('nx', 96), 44 | ('ny', 76), 45 | ('nz', 70), 46 | ('mode', 2), 47 | ('nxstart', -4), 48 | ('nystart', -23), 49 | ('nzstart', 102), 50 | ('mx', 84), 51 | ('my', 84), 52 | ('mz', 160), 53 | ('cella', np.rec.array((45.8, 45.8, 89.65), 54 | dtype=[('x', '= 40.9.0", 4 | "versioningit", 5 | ] 6 | build-backend = "setuptools.build_meta" 7 | 8 | [project] 9 | name = "GridDataFormats" 10 | description = "Reading and writing of data on regular grids in Python" 11 | license = {file = "COPYING.LESSER" } 12 | authors = [ 13 | {name = "Oliver Beckstein", email = "orbeckst@gmail.com"}, 14 | ] 15 | maintainers = [ 16 | {name = "MDAnalysis", email = "mdanalysis@numfocus.org"}, 17 | ] 18 | classifiers = [ 19 | "Development Status :: 6 - Mature", 20 | "Environment :: Console", 21 | "Intended Audience :: Science/Research", 22 | "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", 23 | "Operating System :: POSIX", 24 | "Operating System :: MacOS :: MacOS X", 25 | "Operating System :: Microsoft :: Windows", 26 | "Programming Language :: C", 27 | "Programming Language :: Python", 28 | "Programming Language :: Python :: 3.9", 29 | "Programming Language :: Python :: 3.10", 30 | "Programming Language :: Python :: 3.11", 31 | "Programming Language :: Python :: 3.12", 32 | "Programming Language :: Python :: 3.13", 33 | "Topic :: Scientific/Engineering", 34 | "Topic :: Software Development :: Libraries :: Python Modules", 35 | ] 36 | readme = {file = "README.rst", content-type = "text/x-rst"} 37 | requires-python = ">=3.9" 38 | dependencies = [ 39 | "numpy>=1.21", 40 | "scipy", 41 | "mrcfile", 42 | ] 43 | keywords = [ 44 | "science", 45 | "array", 46 | "density", 47 | ] 48 | dynamic = [ 49 | "version", 50 | ] 51 | 52 | [project.optional-dependencies] 53 | test = [ 54 | "pytest", 55 | "numpy", 56 | ] 57 | 58 | [project.urls] 59 | source = "https://github.com/MDAnalysis/GridDataFormats" 60 | documentation = "https://www.mdanalysis.org/GridDataFormats/" 61 | 62 | [tool.setuptools] 63 | zip-safe = true 64 | include-package-data = true 65 | 66 | [tool.setuptools.packages.find] 67 | namespaces = false 68 | include=["gridData"] 69 | exclude=["devtools", "doc", "ci", "examples"] 70 | 71 | [tool.setuptools.package-data] 72 | gridData = [ 73 | "tests/datafiles/*.dx", 74 | "tests/datafiles/*.dx.gz", 75 | "tests/datafiles/*.ccp4", 76 | "tests/datafiles/*.plt", 77 | "tests/datafiles/*.bz2", 78 | ] 79 | 80 | [tool.versioningit] 81 | default-version = "1+unknown" 82 | 83 | [tool.versioningit.vcs] 84 | method = "git" 85 | # the below line expects tags to look like '1.0.2'. 86 | # if prefixing with a v, e.g. 'v1.0.2', change it to ["v*"] 87 | match = ["*"] 88 | 89 | [tool.versioningit.format] 90 | distance = "{base_version}+{distance}.{vcs}{rev}" 91 | dirty = "{base_version}+{distance}.{vcs}{rev}.dirty" 92 | distance-dirty = "{base_version}+{distance}.{vcs}{rev}.dirty" 93 | --------------------------------------------------------------------------------