├── .gitignore ├── .gitmodules ├── .rtd-environment.yml ├── .travis.yml ├── LICENSE.rst ├── MANIFEST.in ├── README.rst ├── ah_bootstrap.py ├── appveyor.yml ├── code-of-conduct.md ├── codecov.yml ├── docs ├── Makefile ├── make.bat └── source │ ├── INSTALLATION.rst │ ├── LICENSE.rst │ ├── README.rst │ ├── TUTORIALS.rst │ ├── conf.py │ └── index.rst ├── logos ├── mirapy-logo-square.png └── mirapy-logo.png ├── mirapy ├── __init__.py ├── _astropy_init.py ├── autoencoder │ ├── __init__.py │ └── models.py ├── classifiers │ ├── __init__.py │ ├── models.py │ └── tests │ │ ├── __init__.py │ │ └── test_models.py ├── conftest.py ├── data │ ├── README.rst │ ├── __init__.py │ ├── load_dataset.py │ └── tests │ │ ├── __init__.py │ │ └── test_load_dataset.py ├── fitting │ ├── __init__.py │ ├── losses.py │ ├── models.py │ ├── optimizers.py │ └── tests │ │ ├── __init__.py │ │ ├── test_losses.py │ │ ├── test_models.py │ │ └── test_optimizers.py ├── tests │ ├── __init__.py │ ├── coveragerc │ └── setup_package.py ├── utils │ ├── __init__.py │ ├── tests │ │ ├── __init__.py │ │ └── test_utils.py │ └── utils.py └── visualization │ ├── __init__.py │ ├── tests │ ├── __init__.py │ └── test_visualize.py │ └── visualize.py ├── paper.bib ├── paper.md ├── readthedocs.yml ├── requirements.txt ├── setup.cfg └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled files 2 | *.py[cod] 3 | *.a 4 | *.o 5 | *.so 6 | __pycache__ 7 | 8 | # Ignore .c files by default to avoid including generated code. If you want to 9 | # add a non-generated .c extension, use `git add -f filename.c`. 10 | *.c 11 | 12 | # Other generated files 13 | */version.py 14 | */cython_version.py 15 | htmlcov 16 | .coverage 17 | MANIFEST 18 | .ipynb_checkpoints 19 | 20 | # Sphinx 21 | docs/api 22 | docs/_build 23 | 24 | # Eclipse editor project files 25 | .project 26 | .pydevproject 27 | .settings 28 | 29 | # Pycharm editor project files 30 | .idea 31 | 32 | # Floobits project files 33 | .floo 34 | .flooignore 35 | 36 | # Packages/installer info 37 | *.egg 38 | *.egg-info 39 | dist 40 | build 41 | eggs 42 | parts 43 | bin 44 | var 45 | sdist 46 | develop-eggs 47 | .installed.cfg 48 | distribute-*.tar.gz 49 | 50 | # Other 51 | .cache 52 | .tox 53 | .*.sw[op] 54 | *~ 55 | .project 56 | .pydevproject 57 | .settings 58 | .vscode 59 | 60 | # Mac OSX 61 | .DS_Store 62 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "astropy_helpers"] 2 | url = https://github.com/astropy/astropy-helpers.git 3 | path = astropy_helpers 4 | branch = refs/heads/v3.1 5 | -------------------------------------------------------------------------------- /.rtd-environment.yml: -------------------------------------------------------------------------------- 1 | name: mirapy 2 | 3 | channels: 4 | - astropy 5 | 6 | dependencies: 7 | - python>=3.5 8 | - astropy 9 | - Cython 10 | - matplotlib 11 | - numpy 12 | - pip: 13 | - nbsphinx 14 | - sphinx-autoapi -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # We set the language to c because python isn't supported on the MacOS X nodes 2 | # on Travis. However, the language ends up being irrelevant anyway, since we 3 | # install Python ourselves using conda. 4 | language: c 5 | 6 | os: 7 | - linux 8 | 9 | # Setting sudo to false opts in to Travis-CI container-based builds. 10 | sudo: false 11 | 12 | # The apt packages below are needed for sphinx builds. A full list of packages 13 | # that can be included can be found here: 14 | # 15 | # https://github.com/travis-ci/apt-package-whitelist/blob/master/ubuntu-precise 16 | 17 | addons: 18 | apt: 19 | packages: 20 | - graphviz 21 | - texlive-latex-extra 22 | - dvipng 23 | 24 | env: 25 | global: 26 | 27 | # The following versions are the 'default' for tests, unless 28 | # overridden underneath. They are defined here in order to save having 29 | # to repeat them for all configurations. 30 | - PYTHON_VERSION=3.7 31 | - NUMPY_VERSION=stable 32 | - ASTROPY_VERSION=stable 33 | - MAIN_CMD='python setup.py' 34 | - SETUP_CMD='test' 35 | - EVENT_TYPE='pull_request push' 36 | 37 | 38 | # List runtime dependencies for the package that are available as conda 39 | # packages here. 40 | - CONDA_DEPENDENCIES='scipy==1.1.0 h5py matplotlib tqdm sklearn keras pandas' 41 | - CONDA_DEPENDENCIES_DOC='sphinx-astropy' 42 | 43 | # List other runtime dependencies for the package that are available as 44 | # pip packages here. 45 | - PIP_DEPENDENCIES='scipy==1.1.0 autograd opencv-python sphinx_rtd_theme coveralls' 46 | 47 | # Conda packages for affiliated packages are hosted in channel 48 | # "astropy" while builds for astropy LTS with recent numpy versions 49 | # are in astropy-ci-extras. If your package uses either of these, 50 | # add the channels to CONDA_CHANNELS along with any other channels 51 | # you want to use. 52 | - CONDA_CHANNELS='astropy-ci-extras astropy' 53 | 54 | # If there are matplotlib or other GUI tests, uncomment the following 55 | # line to use the X virtual framebuffer. 56 | # - SETUP_XVFB=True 57 | 58 | # If you want to ignore certain flake8 errors, you can list them 59 | # in FLAKE8_OPT, for example: 60 | # - FLAKE8_OPT='--ignore=E501' 61 | - FLAKE8_OPT='' 62 | 63 | matrix: 64 | # Make sure that egg_info works without dependencies 65 | - PYTHON_VERSION=3.7 SETUP_CMD='egg_info' 66 | 67 | 68 | matrix: 69 | 70 | # Don't wait for allowed failures 71 | fast_finish: true 72 | 73 | include: 74 | # Try MacOS X 75 | - os: osx 76 | env: SETUP_CMD='test' 77 | 78 | # Do a coverage test. 79 | - os: linux 80 | env: SETUP_CMD='test --coverage' 81 | 82 | # # Check for sphinx doc build warnings - we do this first because it 83 | # # may run for a long time 84 | # - os: linux 85 | # env: SETUP_CMD='build_docs -w' 86 | # CONDA_DEPENDENCIES=$CONDA_DEPENDENCIES_DOC 87 | 88 | # Now try Astropy dev with the latest Python and LTS with Python 2.7 and 3.x. 89 | - os: linux 90 | env: ASTROPY_VERSION=development 91 | EVENT_TYPE='pull_request push cron' 92 | - os: linux 93 | env: ASTROPY_VERSION=lts 94 | 95 | # Try all python versions and Numpy versions. Since we can assume that 96 | # the Numpy developers have taken care of testing Numpy with different 97 | # versions of Python, we can vary Python and Numpy versions at the same 98 | # time. 99 | 100 | # - os: linux 101 | # env: PYTHON_VERSION=3.5 NUMPY_VERSION=1.12 MATPLOTLIB_VERSION=1.3 102 | - os: linux 103 | env: PYTHON_VERSION=3.6 NUMPY_VERSION=1.13 104 | - os: linux 105 | env: NUMPY_VERSION=1.14 106 | 107 | # Try numpy pre-release 108 | - os: linux 109 | env: NUMPY_VERSION=prerelease 110 | EVENT_TYPE='pull_request push cron' 111 | 112 | # Do a PEP8 test with flake8 113 | - os: linux 114 | env: MAIN_CMD='flake8 mirapy --count --show-source --statistics $FLAKE8_OPT' SETUP_CMD='' 115 | 116 | allow_failures: 117 | # Do a PEP8 test with flake8 118 | # (allow to fail unless your code completely compliant) 119 | - os: linux 120 | env: MAIN_CMD='flake8 mirapy --count --show-source --statistics $FLAKE8_OPT' SETUP_CMD='' 121 | 122 | install: 123 | 124 | # We now use the ci-helpers package to set up our testing environment. 125 | # This is done by using Miniconda and then using conda and pip to install 126 | # dependencies. Which dependencies are installed using conda and pip is 127 | # determined by the CONDA_DEPENDENCIES and PIP_DEPENDENCIES variables, 128 | # which should be space-delimited lists of package names. See the README 129 | # in https://github.com/astropy/ci-helpers for information about the full 130 | # list of environment variables that can be used to customize your 131 | # environment. In some cases, ci-helpers may not offer enough flexibility 132 | # in how to install a package, in which case you can have additional 133 | # commands in the install: section below. 134 | 135 | - git clone --depth 1 git://github.com/astropy/ci-helpers.git 136 | - source ci-helpers/travis/setup_conda.sh 137 | 138 | # As described above, using ci-helpers, you should be able to set up an 139 | # environment with dependencies installed using conda and pip, but in some 140 | # cases this may not provide enough flexibility in how to install a 141 | # specific dependency (and it will not be able to install non-Python 142 | # dependencies). Therefore, you can also include commands below (as 143 | # well as at the start of the install section or in the before_install 144 | # section if they are needed before setting up conda) to install any 145 | # other dependencies. 146 | 147 | script: 148 | - $MAIN_CMD $SETUP_CMD 149 | 150 | after_success: 151 | # If coveralls.io is set up for this package, uncomment the line below. 152 | # The coveragerc file may be customized as needed for your package. 153 | # - if [[ $SETUP_CMD == *coverage* ]]; then coveralls --rcfile='mirapy/tests/coveragerc'; fi 154 | - bash <(curl -s https://codecov.io/bash) 155 | - coveralls 156 | -------------------------------------------------------------------------------- /LICENSE.rst: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 MiraPy Organisation 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include CHANGES.rst 3 | 4 | include ah_bootstrap.py 5 | include setup.cfg 6 | include mirapy/tests/coveragerc 7 | 8 | recursive-include mirapy *.pyx *.c *.pxd 9 | recursive-include docs * 10 | recursive-include licenses * 11 | recursive-include cextern * 12 | recursive-include scripts * 13 | 14 | prune build 15 | prune docs/_build 16 | prune docs/api 17 | 18 | 19 | # the next few stanzas are for astropy_helpers. It's derived from the 20 | # astropy_helpers/MANIFEST.in, but requires additional includes for the actual 21 | # package directory and egg-info. 22 | 23 | include astropy_helpers/README.rst 24 | include astropy_helpers/CHANGES.rst 25 | include astropy_helpers/LICENSE.rst 26 | recursive-include astropy_helpers/licenses * 27 | 28 | include astropy_helpers/ah_bootstrap.py 29 | 30 | recursive-include astropy_helpers/astropy_helpers *.py *.pyx *.c *.h *.rst 31 | recursive-include astropy_helpers/astropy_helpers.egg-info * 32 | # include the sphinx stuff with "*" because there are css/html/rst/etc. 33 | recursive-include astropy_helpers/astropy_helpers/sphinx * 34 | 35 | prune astropy_helpers/build 36 | prune astropy_helpers/astropy_helpers/tests 37 | 38 | 39 | global-exclude *.pyc *.o 40 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | MiraPy: Python Package for Deep Learning in Astronomy 2 | -------------------------------------------------------- 3 | 4 | .. image:: https://img.shields.io/badge/Powered%20by-Keras-red?style=flat-square 5 | :target: http://keras.io 6 | :alt: Powered by Keras Badge 7 | 8 | .. image:: http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat-square 9 | :target: http://www.astropy.org 10 | :alt: Powered by Astropy Badge 11 | 12 | .. image:: https://img.shields.io/travis/com/mirapy-org/mirapy.svg?style=flat-square&logo=travis%20ci 13 | :target: https://travis-ci.com/mirapy-org/mirapy 14 | :alt: Travis CI 15 | 16 | .. image:: https://readthedocs.org/projects/mirapy/badge/?version=latest&style=flat-square 17 | :target: https://mirapy.readthedocs.io/en/latest/?badge=latest 18 | :alt: Documentation Status 19 | 20 | .. image:: https://img.shields.io/coveralls/github/mirapy-org/mirapy.svg?style=flat-square 21 | :target: https://coveralls.io/github/mirapy-org/mirapy 22 | :alt: Coveralls 23 | 24 | .. image:: https://img.shields.io/badge/chat%20on-Slack-4A154B.svg?style=flat-square&logo=slack 25 | :target: https://join.slack.com/t/mirapy/shared_invite/enQtNjEyNDQwNTI2NDY3LTE3ZmI3M2EyMjdkZWU4NTE2NjkxZjdhYWE4ZjUyODY0NzllNzRlMzZhNThhNWRiMjk4MjNhYWQ3NjA3YjJiNGY 26 | :alt: Slack 27 | 28 | .. image:: https://img.shields.io/pypi/v/mirapy.svg?style=flat-square&logo=pypi 29 | :target: https://pypi.org/project/mirapy/ 30 | :alt: PyPI 31 | 32 | .. image:: https://img.shields.io/github/license/mirapy-org/mirapy.svg?style=flat-square 33 | :target: https://github.com/mirapy-org/mirapy/blob/master/LICENSE.rst 34 | :alt: LICENSE 35 | 36 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.2908315.svg 37 | :target: https://doi.org/10.5281/zenodo.2908315 38 | :alt: Zenodo DOI 39 | 40 | 41 | MiraPy is a Python package for Deep Learning in Astronomy. It is built using 42 | Keras for developing ML models to run on CPU and GPU seamlessly. The 43 | aim is to make applying machine learning techniques on astronomical data easy 44 | for astronomers, researchers and students. 45 | 46 | The documentation is available `here `_. 47 | 48 | Applications 49 | ------------ 50 | 51 | MiraPy can be used for problem solving using ML techniques and will continue to grow to tackle new problems in Astronomy. Following are some of the experiments that you can perform right now: 52 | 53 | - Classification of X-Ray Binaries using neural network 54 | - Astronomical Image Reconstruction using Autoencoder 55 | - Classification of the first catalog of variable stars by ATLAS 56 | - HTRU1 Pulsar Dataset Image Classification using Convolutional Neural Network 57 | - OGLE Catalogue Variable Star Classification using Recurrent Neural Network (RNN) 58 | - 2D and 3D visualization of feature sets using Principal Component Analysis (PCA) 59 | - Curve Fitting using Autograd (basic implementation) 60 | 61 | There are more projects that we will add soon and some of them are as following: 62 | 63 | - Feature Engineering (Selection, Reduction and Visualization) 64 | - Classification of different states of GRS1905+105 X-Ray Binaries using Recurrent Neural Network (RNN) 65 | - Feature extraction from Images using Autoencoders and its applications in Astronomy 66 | 67 | You can find the applications MiraPy in our `tutorial `_ repository. 68 | 69 | Installation 70 | ------------ 71 | 72 | Before installing Keras, please install one of its backend engines: TensorFlow, Theano, or CNTK. We recommend the TensorFlow backend. You can find Keras installation guide `here `_. 73 | 74 | You can download the package using `pip` package installer:: 75 | 76 | pip install mirapy 77 | 78 | You can also build from source code:: 79 | 80 | git clone --recursive https://github.com/mirapy-org/mirapy.git 81 | cd mirapy 82 | pip install -r requirements.txt 83 | python setup.py install 84 | 85 | Contributing 86 | ------------ 87 | 88 | MiraPy is far from perfect and we would love to see your contributions to open source community! In future, it will be able to do more and in better ways and we need your suggestions! Tell us what you would like to see as a part of this package on `Slack `_. 89 | 90 | 91 | About Us 92 | -------- 93 | 94 | MiraPy is developed by `Swapnil Sharma `_ and `Akhil Singhal `_ as their final year 'Major Technical Project' under the guidance of `Dr. Arnav Bhavsar `_ at `Indian Institute of Technology, Mandi `_. 95 | 96 | License 97 | ------- 98 | 99 | This project is Copyright (c) Swapnil Sharma, Akhil Singhal and licensed under 100 | the terms of the MIT license. 101 | -------------------------------------------------------------------------------- /ah_bootstrap.py: -------------------------------------------------------------------------------- 1 | """ 2 | This bootstrap module contains code for ensuring that the astropy_helpers 3 | package will be importable by the time the setup.py script runs. It also 4 | includes some workarounds to ensure that a recent-enough version of setuptools 5 | is being used for the installation. 6 | 7 | This module should be the first thing imported in the setup.py of distributions 8 | that make use of the utilities in astropy_helpers. If the distribution ships 9 | with its own copy of astropy_helpers, this module will first attempt to import 10 | from the shipped copy. However, it will also check PyPI to see if there are 11 | any bug-fix releases on top of the current version that may be useful to get 12 | past platform-specific bugs that have been fixed. When running setup.py, use 13 | the ``--offline`` command-line option to disable the auto-upgrade checks. 14 | 15 | When this module is imported or otherwise executed it automatically calls a 16 | main function that attempts to read the project's setup.cfg file, which it 17 | checks for a configuration section called ``[ah_bootstrap]`` the presences of 18 | that section, and options therein, determine the next step taken: If it 19 | contains an option called ``auto_use`` with a value of ``True``, it will 20 | automatically call the main function of this module called 21 | `use_astropy_helpers` (see that function's docstring for full details). 22 | Otherwise no further action is taken and by default the system-installed version 23 | of astropy-helpers will be used (however, ``ah_bootstrap.use_astropy_helpers`` 24 | may be called manually from within the setup.py script). 25 | 26 | This behavior can also be controlled using the ``--auto-use`` and 27 | ``--no-auto-use`` command-line flags. For clarity, an alias for 28 | ``--no-auto-use`` is ``--use-system-astropy-helpers``, and we recommend using 29 | the latter if needed. 30 | 31 | Additional options in the ``[ah_boostrap]`` section of setup.cfg have the same 32 | names as the arguments to `use_astropy_helpers`, and can be used to configure 33 | the bootstrap script when ``auto_use = True``. 34 | 35 | See https://github.com/astropy/astropy-helpers for more details, and for the 36 | latest version of this module. 37 | """ 38 | 39 | import contextlib 40 | import errno 41 | import io 42 | import locale 43 | import os 44 | import re 45 | import subprocess as sp 46 | import sys 47 | 48 | __minimum_python_version__ = (3, 5) 49 | 50 | if sys.version_info < __minimum_python_version__: 51 | print("ERROR: Python {} or later is required by astropy-helpers".format( 52 | __minimum_python_version__)) 53 | sys.exit(1) 54 | 55 | try: 56 | from ConfigParser import ConfigParser, RawConfigParser 57 | except ImportError: 58 | from configparser import ConfigParser, RawConfigParser 59 | 60 | 61 | _str_types = (str, bytes) 62 | 63 | 64 | # What follows are several import statements meant to deal with install-time 65 | # issues with either missing or misbehaving pacakges (including making sure 66 | # setuptools itself is installed): 67 | 68 | # Check that setuptools 1.0 or later is present 69 | from distutils.version import LooseVersion 70 | 71 | try: 72 | import setuptools 73 | assert LooseVersion(setuptools.__version__) >= LooseVersion('1.0') 74 | except (ImportError, AssertionError): 75 | print("ERROR: setuptools 1.0 or later is required by astropy-helpers") 76 | sys.exit(1) 77 | 78 | # typing as a dependency for 1.6.1+ Sphinx causes issues when imported after 79 | # initializing submodule with ah_boostrap.py 80 | # See discussion and references in 81 | # https://github.com/astropy/astropy-helpers/issues/302 82 | 83 | try: 84 | import typing # noqa 85 | except ImportError: 86 | pass 87 | 88 | 89 | # Note: The following import is required as a workaround to 90 | # https://github.com/astropy/astropy-helpers/issues/89; if we don't import this 91 | # module now, it will get cleaned up after `run_setup` is called, but that will 92 | # later cause the TemporaryDirectory class defined in it to stop working when 93 | # used later on by setuptools 94 | try: 95 | import setuptools.py31compat # noqa 96 | except ImportError: 97 | pass 98 | 99 | 100 | # matplotlib can cause problems if it is imported from within a call of 101 | # run_setup(), because in some circumstances it will try to write to the user's 102 | # home directory, resulting in a SandboxViolation. See 103 | # https://github.com/matplotlib/matplotlib/pull/4165 104 | # Making sure matplotlib, if it is available, is imported early in the setup 105 | # process can mitigate this (note importing matplotlib.pyplot has the same 106 | # issue) 107 | try: 108 | import matplotlib 109 | matplotlib.use('Agg') 110 | import matplotlib.pyplot 111 | except: 112 | # Ignore if this fails for *any* reason* 113 | pass 114 | 115 | 116 | # End compatibility imports... 117 | 118 | 119 | # In case it didn't successfully import before the ez_setup checks 120 | import pkg_resources 121 | 122 | from setuptools import Distribution 123 | from setuptools.package_index import PackageIndex 124 | 125 | from distutils import log 126 | from distutils.debug import DEBUG 127 | 128 | 129 | # TODO: Maybe enable checking for a specific version of astropy_helpers? 130 | DIST_NAME = 'astropy-helpers' 131 | PACKAGE_NAME = 'astropy_helpers' 132 | UPPER_VERSION_EXCLUSIVE = None 133 | 134 | # Defaults for other options 135 | DOWNLOAD_IF_NEEDED = True 136 | INDEX_URL = 'https://pypi.python.org/simple' 137 | USE_GIT = True 138 | OFFLINE = False 139 | AUTO_UPGRADE = True 140 | 141 | # A list of all the configuration options and their required types 142 | CFG_OPTIONS = [ 143 | ('auto_use', bool), ('path', str), ('download_if_needed', bool), 144 | ('index_url', str), ('use_git', bool), ('offline', bool), 145 | ('auto_upgrade', bool) 146 | ] 147 | 148 | 149 | class _Bootstrapper(object): 150 | """ 151 | Bootstrapper implementation. See ``use_astropy_helpers`` for parameter 152 | documentation. 153 | """ 154 | 155 | def __init__(self, path=None, index_url=None, use_git=None, offline=None, 156 | download_if_needed=None, auto_upgrade=None): 157 | 158 | if path is None: 159 | path = PACKAGE_NAME 160 | 161 | if not (isinstance(path, _str_types) or path is False): 162 | raise TypeError('path must be a string or False') 163 | 164 | if not isinstance(path, str): 165 | fs_encoding = sys.getfilesystemencoding() 166 | path = path.decode(fs_encoding) # path to unicode 167 | 168 | self.path = path 169 | 170 | # Set other option attributes, using defaults where necessary 171 | self.index_url = index_url if index_url is not None else INDEX_URL 172 | self.offline = offline if offline is not None else OFFLINE 173 | 174 | # If offline=True, override download and auto-upgrade 175 | if self.offline: 176 | download_if_needed = False 177 | auto_upgrade = False 178 | 179 | self.download = (download_if_needed 180 | if download_if_needed is not None 181 | else DOWNLOAD_IF_NEEDED) 182 | self.auto_upgrade = (auto_upgrade 183 | if auto_upgrade is not None else AUTO_UPGRADE) 184 | 185 | # If this is a release then the .git directory will not exist so we 186 | # should not use git. 187 | git_dir_exists = os.path.exists(os.path.join(os.path.dirname(__file__), '.git')) 188 | if use_git is None and not git_dir_exists: 189 | use_git = False 190 | 191 | self.use_git = use_git if use_git is not None else USE_GIT 192 | # Declared as False by default--later we check if astropy-helpers can be 193 | # upgraded from PyPI, but only if not using a source distribution (as in 194 | # the case of import from a git submodule) 195 | self.is_submodule = False 196 | 197 | @classmethod 198 | def main(cls, argv=None): 199 | if argv is None: 200 | argv = sys.argv 201 | 202 | config = cls.parse_config() 203 | config.update(cls.parse_command_line(argv)) 204 | 205 | auto_use = config.pop('auto_use', False) 206 | bootstrapper = cls(**config) 207 | 208 | if auto_use: 209 | # Run the bootstrapper, otherwise the setup.py is using the old 210 | # use_astropy_helpers() interface, in which case it will run the 211 | # bootstrapper manually after reconfiguring it. 212 | bootstrapper.run() 213 | 214 | return bootstrapper 215 | 216 | @classmethod 217 | def parse_config(cls): 218 | if not os.path.exists('setup.cfg'): 219 | return {} 220 | 221 | cfg = ConfigParser() 222 | 223 | try: 224 | cfg.read('setup.cfg') 225 | except Exception as e: 226 | if DEBUG: 227 | raise 228 | 229 | log.error( 230 | "Error reading setup.cfg: {0!r}\n{1} will not be " 231 | "automatically bootstrapped and package installation may fail." 232 | "\n{2}".format(e, PACKAGE_NAME, _err_help_msg)) 233 | return {} 234 | 235 | if not cfg.has_section('ah_bootstrap'): 236 | return {} 237 | 238 | config = {} 239 | 240 | for option, type_ in CFG_OPTIONS: 241 | if not cfg.has_option('ah_bootstrap', option): 242 | continue 243 | 244 | if type_ is bool: 245 | value = cfg.getboolean('ah_bootstrap', option) 246 | else: 247 | value = cfg.get('ah_bootstrap', option) 248 | 249 | config[option] = value 250 | 251 | return config 252 | 253 | @classmethod 254 | def parse_command_line(cls, argv=None): 255 | if argv is None: 256 | argv = sys.argv 257 | 258 | config = {} 259 | 260 | # For now we just pop recognized ah_bootstrap options out of the 261 | # arg list. This is imperfect; in the unlikely case that a setup.py 262 | # custom command or even custom Distribution class defines an argument 263 | # of the same name then we will break that. However there's a catch22 264 | # here that we can't just do full argument parsing right here, because 265 | # we don't yet know *how* to parse all possible command-line arguments. 266 | if '--no-git' in argv: 267 | config['use_git'] = False 268 | argv.remove('--no-git') 269 | 270 | if '--offline' in argv: 271 | config['offline'] = True 272 | argv.remove('--offline') 273 | 274 | if '--auto-use' in argv: 275 | config['auto_use'] = True 276 | argv.remove('--auto-use') 277 | 278 | if '--no-auto-use' in argv: 279 | config['auto_use'] = False 280 | argv.remove('--no-auto-use') 281 | 282 | if '--use-system-astropy-helpers' in argv: 283 | config['auto_use'] = False 284 | argv.remove('--use-system-astropy-helpers') 285 | 286 | return config 287 | 288 | def run(self): 289 | strategies = ['local_directory', 'local_file', 'index'] 290 | dist = None 291 | 292 | # First, remove any previously imported versions of astropy_helpers; 293 | # this is necessary for nested installs where one package's installer 294 | # is installing another package via setuptools.sandbox.run_setup, as in 295 | # the case of setup_requires 296 | for key in list(sys.modules): 297 | try: 298 | if key == PACKAGE_NAME or key.startswith(PACKAGE_NAME + '.'): 299 | del sys.modules[key] 300 | except AttributeError: 301 | # Sometimes mysterious non-string things can turn up in 302 | # sys.modules 303 | continue 304 | 305 | # Check to see if the path is a submodule 306 | self.is_submodule = self._check_submodule() 307 | 308 | for strategy in strategies: 309 | method = getattr(self, 'get_{0}_dist'.format(strategy)) 310 | dist = method() 311 | if dist is not None: 312 | break 313 | else: 314 | raise _AHBootstrapSystemExit( 315 | "No source found for the {0!r} package; {0} must be " 316 | "available and importable as a prerequisite to building " 317 | "or installing this package.".format(PACKAGE_NAME)) 318 | 319 | # This is a bit hacky, but if astropy_helpers was loaded from a 320 | # directory/submodule its Distribution object gets a "precedence" of 321 | # "DEVELOP_DIST". However, in other cases it gets a precedence of 322 | # "EGG_DIST". However, when activing the distribution it will only be 323 | # placed early on sys.path if it is treated as an EGG_DIST, so always 324 | # do that 325 | dist = dist.clone(precedence=pkg_resources.EGG_DIST) 326 | 327 | # Otherwise we found a version of astropy-helpers, so we're done 328 | # Just active the found distribution on sys.path--if we did a 329 | # download this usually happens automatically but it doesn't hurt to 330 | # do it again 331 | # Note: Adding the dist to the global working set also activates it 332 | # (makes it importable on sys.path) by default. 333 | 334 | try: 335 | pkg_resources.working_set.add(dist, replace=True) 336 | except TypeError: 337 | # Some (much) older versions of setuptools do not have the 338 | # replace=True option here. These versions are old enough that all 339 | # bets may be off anyways, but it's easy enough to work around just 340 | # in case... 341 | if dist.key in pkg_resources.working_set.by_key: 342 | del pkg_resources.working_set.by_key[dist.key] 343 | pkg_resources.working_set.add(dist) 344 | 345 | @property 346 | def config(self): 347 | """ 348 | A `dict` containing the options this `_Bootstrapper` was configured 349 | with. 350 | """ 351 | 352 | return dict((optname, getattr(self, optname)) 353 | for optname, _ in CFG_OPTIONS if hasattr(self, optname)) 354 | 355 | def get_local_directory_dist(self): 356 | """ 357 | Handle importing a vendored package from a subdirectory of the source 358 | distribution. 359 | """ 360 | 361 | if not os.path.isdir(self.path): 362 | return 363 | 364 | log.info('Attempting to import astropy_helpers from {0} {1!r}'.format( 365 | 'submodule' if self.is_submodule else 'directory', 366 | self.path)) 367 | 368 | dist = self._directory_import() 369 | 370 | if dist is None: 371 | log.warn( 372 | 'The requested path {0!r} for importing {1} does not ' 373 | 'exist, or does not contain a copy of the {1} ' 374 | 'package.'.format(self.path, PACKAGE_NAME)) 375 | elif self.auto_upgrade and not self.is_submodule: 376 | # A version of astropy-helpers was found on the available path, but 377 | # check to see if a bugfix release is available on PyPI 378 | upgrade = self._do_upgrade(dist) 379 | if upgrade is not None: 380 | dist = upgrade 381 | 382 | return dist 383 | 384 | def get_local_file_dist(self): 385 | """ 386 | Handle importing from a source archive; this also uses setup_requires 387 | but points easy_install directly to the source archive. 388 | """ 389 | 390 | if not os.path.isfile(self.path): 391 | return 392 | 393 | log.info('Attempting to unpack and import astropy_helpers from ' 394 | '{0!r}'.format(self.path)) 395 | 396 | try: 397 | dist = self._do_download(find_links=[self.path]) 398 | except Exception as e: 399 | if DEBUG: 400 | raise 401 | 402 | log.warn( 403 | 'Failed to import {0} from the specified archive {1!r}: ' 404 | '{2}'.format(PACKAGE_NAME, self.path, str(e))) 405 | dist = None 406 | 407 | if dist is not None and self.auto_upgrade: 408 | # A version of astropy-helpers was found on the available path, but 409 | # check to see if a bugfix release is available on PyPI 410 | upgrade = self._do_upgrade(dist) 411 | if upgrade is not None: 412 | dist = upgrade 413 | 414 | return dist 415 | 416 | def get_index_dist(self): 417 | if not self.download: 418 | log.warn('Downloading {0!r} disabled.'.format(DIST_NAME)) 419 | return None 420 | 421 | log.warn( 422 | "Downloading {0!r}; run setup.py with the --offline option to " 423 | "force offline installation.".format(DIST_NAME)) 424 | 425 | try: 426 | dist = self._do_download() 427 | except Exception as e: 428 | if DEBUG: 429 | raise 430 | log.warn( 431 | 'Failed to download and/or install {0!r} from {1!r}:\n' 432 | '{2}'.format(DIST_NAME, self.index_url, str(e))) 433 | dist = None 434 | 435 | # No need to run auto-upgrade here since we've already presumably 436 | # gotten the most up-to-date version from the package index 437 | return dist 438 | 439 | def _directory_import(self): 440 | """ 441 | Import astropy_helpers from the given path, which will be added to 442 | sys.path. 443 | 444 | Must return True if the import succeeded, and False otherwise. 445 | """ 446 | 447 | # Return True on success, False on failure but download is allowed, and 448 | # otherwise raise SystemExit 449 | path = os.path.abspath(self.path) 450 | 451 | # Use an empty WorkingSet rather than the man 452 | # pkg_resources.working_set, since on older versions of setuptools this 453 | # will invoke a VersionConflict when trying to install an upgrade 454 | ws = pkg_resources.WorkingSet([]) 455 | ws.add_entry(path) 456 | dist = ws.by_key.get(DIST_NAME) 457 | 458 | if dist is None: 459 | # We didn't find an egg-info/dist-info in the given path, but if a 460 | # setup.py exists we can generate it 461 | setup_py = os.path.join(path, 'setup.py') 462 | if os.path.isfile(setup_py): 463 | # We use subprocess instead of run_setup from setuptools to 464 | # avoid segmentation faults - see the following for more details: 465 | # https://github.com/cython/cython/issues/2104 466 | sp.check_output([sys.executable, 'setup.py', 'egg_info'], cwd=path) 467 | 468 | for dist in pkg_resources.find_distributions(path, True): 469 | # There should be only one... 470 | return dist 471 | 472 | return dist 473 | 474 | def _do_download(self, version='', find_links=None): 475 | if find_links: 476 | allow_hosts = '' 477 | index_url = None 478 | else: 479 | allow_hosts = None 480 | index_url = self.index_url 481 | 482 | # Annoyingly, setuptools will not handle other arguments to 483 | # Distribution (such as options) before handling setup_requires, so it 484 | # is not straightforward to programmatically augment the arguments which 485 | # are passed to easy_install 486 | class _Distribution(Distribution): 487 | def get_option_dict(self, command_name): 488 | opts = Distribution.get_option_dict(self, command_name) 489 | if command_name == 'easy_install': 490 | if find_links is not None: 491 | opts['find_links'] = ('setup script', find_links) 492 | if index_url is not None: 493 | opts['index_url'] = ('setup script', index_url) 494 | if allow_hosts is not None: 495 | opts['allow_hosts'] = ('setup script', allow_hosts) 496 | return opts 497 | 498 | if version: 499 | req = '{0}=={1}'.format(DIST_NAME, version) 500 | else: 501 | if UPPER_VERSION_EXCLUSIVE is None: 502 | req = DIST_NAME 503 | else: 504 | req = '{0}<{1}'.format(DIST_NAME, UPPER_VERSION_EXCLUSIVE) 505 | 506 | attrs = {'setup_requires': [req]} 507 | 508 | # NOTE: we need to parse the config file (e.g. setup.cfg) to make sure 509 | # it honours the options set in the [easy_install] section, and we need 510 | # to explicitly fetch the requirement eggs as setup_requires does not 511 | # get honored in recent versions of setuptools: 512 | # https://github.com/pypa/setuptools/issues/1273 513 | 514 | try: 515 | 516 | context = _verbose if DEBUG else _silence 517 | with context(): 518 | dist = _Distribution(attrs=attrs) 519 | try: 520 | dist.parse_config_files(ignore_option_errors=True) 521 | dist.fetch_build_eggs(req) 522 | except TypeError: 523 | # On older versions of setuptools, ignore_option_errors 524 | # doesn't exist, and the above two lines are not needed 525 | # so we can just continue 526 | pass 527 | 528 | # If the setup_requires succeeded it will have added the new dist to 529 | # the main working_set 530 | return pkg_resources.working_set.by_key.get(DIST_NAME) 531 | except Exception as e: 532 | if DEBUG: 533 | raise 534 | 535 | msg = 'Error retrieving {0} from {1}:\n{2}' 536 | if find_links: 537 | source = find_links[0] 538 | elif index_url != INDEX_URL: 539 | source = index_url 540 | else: 541 | source = 'PyPI' 542 | 543 | raise Exception(msg.format(DIST_NAME, source, repr(e))) 544 | 545 | def _do_upgrade(self, dist): 546 | # Build up a requirement for a higher bugfix release but a lower minor 547 | # release (so API compatibility is guaranteed) 548 | next_version = _next_version(dist.parsed_version) 549 | 550 | req = pkg_resources.Requirement.parse( 551 | '{0}>{1},<{2}'.format(DIST_NAME, dist.version, next_version)) 552 | 553 | package_index = PackageIndex(index_url=self.index_url) 554 | 555 | upgrade = package_index.obtain(req) 556 | 557 | if upgrade is not None: 558 | return self._do_download(version=upgrade.version) 559 | 560 | def _check_submodule(self): 561 | """ 562 | Check if the given path is a git submodule. 563 | 564 | See the docstrings for ``_check_submodule_using_git`` and 565 | ``_check_submodule_no_git`` for further details. 566 | """ 567 | 568 | if (self.path is None or 569 | (os.path.exists(self.path) and not os.path.isdir(self.path))): 570 | return False 571 | 572 | if self.use_git: 573 | return self._check_submodule_using_git() 574 | else: 575 | return self._check_submodule_no_git() 576 | 577 | def _check_submodule_using_git(self): 578 | """ 579 | Check if the given path is a git submodule. If so, attempt to initialize 580 | and/or update the submodule if needed. 581 | 582 | This function makes calls to the ``git`` command in subprocesses. The 583 | ``_check_submodule_no_git`` option uses pure Python to check if the given 584 | path looks like a git submodule, but it cannot perform updates. 585 | """ 586 | 587 | cmd = ['git', 'submodule', 'status', '--', self.path] 588 | 589 | try: 590 | log.info('Running `{0}`; use the --no-git option to disable git ' 591 | 'commands'.format(' '.join(cmd))) 592 | returncode, stdout, stderr = run_cmd(cmd) 593 | except _CommandNotFound: 594 | # The git command simply wasn't found; this is most likely the 595 | # case on user systems that don't have git and are simply 596 | # trying to install the package from PyPI or a source 597 | # distribution. Silently ignore this case and simply don't try 598 | # to use submodules 599 | return False 600 | 601 | stderr = stderr.strip() 602 | 603 | if returncode != 0 and stderr: 604 | # Unfortunately the return code alone cannot be relied on, as 605 | # earlier versions of git returned 0 even if the requested submodule 606 | # does not exist 607 | 608 | # This is a warning that occurs in perl (from running git submodule) 609 | # which only occurs with a malformatted locale setting which can 610 | # happen sometimes on OSX. See again 611 | # https://github.com/astropy/astropy/issues/2749 612 | perl_warning = ('perl: warning: Falling back to the standard locale ' 613 | '("C").') 614 | if not stderr.strip().endswith(perl_warning): 615 | # Some other unknown error condition occurred 616 | log.warn('git submodule command failed ' 617 | 'unexpectedly:\n{0}'.format(stderr)) 618 | return False 619 | 620 | # Output of `git submodule status` is as follows: 621 | # 622 | # 1: Status indicator: '-' for submodule is uninitialized, '+' if 623 | # submodule is initialized but is not at the commit currently indicated 624 | # in .gitmodules (and thus needs to be updated), or 'U' if the 625 | # submodule is in an unstable state (i.e. has merge conflicts) 626 | # 627 | # 2. SHA-1 hash of the current commit of the submodule (we don't really 628 | # need this information but it's useful for checking that the output is 629 | # correct) 630 | # 631 | # 3. The output of `git describe` for the submodule's current commit 632 | # hash (this includes for example what branches the commit is on) but 633 | # only if the submodule is initialized. We ignore this information for 634 | # now 635 | _git_submodule_status_re = re.compile( 636 | '^(?P[+-U ])(?P[0-9a-f]{40}) ' 637 | '(?P\S+)( .*)?$') 638 | 639 | # The stdout should only contain one line--the status of the 640 | # requested submodule 641 | m = _git_submodule_status_re.match(stdout) 642 | if m: 643 | # Yes, the path *is* a git submodule 644 | self._update_submodule(m.group('submodule'), m.group('status')) 645 | return True 646 | else: 647 | log.warn( 648 | 'Unexpected output from `git submodule status`:\n{0}\n' 649 | 'Will attempt import from {1!r} regardless.'.format( 650 | stdout, self.path)) 651 | return False 652 | 653 | def _check_submodule_no_git(self): 654 | """ 655 | Like ``_check_submodule_using_git``, but simply parses the .gitmodules file 656 | to determine if the supplied path is a git submodule, and does not exec any 657 | subprocesses. 658 | 659 | This can only determine if a path is a submodule--it does not perform 660 | updates, etc. This function may need to be updated if the format of the 661 | .gitmodules file is changed between git versions. 662 | """ 663 | 664 | gitmodules_path = os.path.abspath('.gitmodules') 665 | 666 | if not os.path.isfile(gitmodules_path): 667 | return False 668 | 669 | # This is a minimal reader for gitconfig-style files. It handles a few of 670 | # the quirks that make gitconfig files incompatible with ConfigParser-style 671 | # files, but does not support the full gitconfig syntax (just enough 672 | # needed to read a .gitmodules file). 673 | gitmodules_fileobj = io.StringIO() 674 | 675 | # Must use io.open for cross-Python-compatible behavior wrt unicode 676 | with io.open(gitmodules_path) as f: 677 | for line in f: 678 | # gitconfig files are more flexible with leading whitespace; just 679 | # go ahead and remove it 680 | line = line.lstrip() 681 | 682 | # comments can start with either # or ; 683 | if line and line[0] in (':', ';'): 684 | continue 685 | 686 | gitmodules_fileobj.write(line) 687 | 688 | gitmodules_fileobj.seek(0) 689 | 690 | cfg = RawConfigParser() 691 | 692 | try: 693 | cfg.readfp(gitmodules_fileobj) 694 | except Exception as exc: 695 | log.warn('Malformatted .gitmodules file: {0}\n' 696 | '{1} cannot be assumed to be a git submodule.'.format( 697 | exc, self.path)) 698 | return False 699 | 700 | for section in cfg.sections(): 701 | if not cfg.has_option(section, 'path'): 702 | continue 703 | 704 | submodule_path = cfg.get(section, 'path').rstrip(os.sep) 705 | 706 | if submodule_path == self.path.rstrip(os.sep): 707 | return True 708 | 709 | return False 710 | 711 | def _update_submodule(self, submodule, status): 712 | if status == ' ': 713 | # The submodule is up to date; no action necessary 714 | return 715 | elif status == '-': 716 | if self.offline: 717 | raise _AHBootstrapSystemExit( 718 | "Cannot initialize the {0} submodule in --offline mode; " 719 | "this requires being able to clone the submodule from an " 720 | "online repository.".format(submodule)) 721 | cmd = ['update', '--init'] 722 | action = 'Initializing' 723 | elif status == '+': 724 | cmd = ['update'] 725 | action = 'Updating' 726 | if self.offline: 727 | cmd.append('--no-fetch') 728 | elif status == 'U': 729 | raise _AHBootstrapSystemExit( 730 | 'Error: Submodule {0} contains unresolved merge conflicts. ' 731 | 'Please complete or abandon any changes in the submodule so that ' 732 | 'it is in a usable state, then try again.'.format(submodule)) 733 | else: 734 | log.warn('Unknown status {0!r} for git submodule {1!r}. Will ' 735 | 'attempt to use the submodule as-is, but try to ensure ' 736 | 'that the submodule is in a clean state and contains no ' 737 | 'conflicts or errors.\n{2}'.format(status, submodule, 738 | _err_help_msg)) 739 | return 740 | 741 | err_msg = None 742 | cmd = ['git', 'submodule'] + cmd + ['--', submodule] 743 | log.warn('{0} {1} submodule with: `{2}`'.format( 744 | action, submodule, ' '.join(cmd))) 745 | 746 | try: 747 | log.info('Running `{0}`; use the --no-git option to disable git ' 748 | 'commands'.format(' '.join(cmd))) 749 | returncode, stdout, stderr = run_cmd(cmd) 750 | except OSError as e: 751 | err_msg = str(e) 752 | else: 753 | if returncode != 0: 754 | err_msg = stderr 755 | 756 | if err_msg is not None: 757 | log.warn('An unexpected error occurred updating the git submodule ' 758 | '{0!r}:\n{1}\n{2}'.format(submodule, err_msg, 759 | _err_help_msg)) 760 | 761 | class _CommandNotFound(OSError): 762 | """ 763 | An exception raised when a command run with run_cmd is not found on the 764 | system. 765 | """ 766 | 767 | 768 | def run_cmd(cmd): 769 | """ 770 | Run a command in a subprocess, given as a list of command-line 771 | arguments. 772 | 773 | Returns a ``(returncode, stdout, stderr)`` tuple. 774 | """ 775 | 776 | try: 777 | p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) 778 | # XXX: May block if either stdout or stderr fill their buffers; 779 | # however for the commands this is currently used for that is 780 | # unlikely (they should have very brief output) 781 | stdout, stderr = p.communicate() 782 | except OSError as e: 783 | if DEBUG: 784 | raise 785 | 786 | if e.errno == errno.ENOENT: 787 | msg = 'Command not found: `{0}`'.format(' '.join(cmd)) 788 | raise _CommandNotFound(msg, cmd) 789 | else: 790 | raise _AHBootstrapSystemExit( 791 | 'An unexpected error occurred when running the ' 792 | '`{0}` command:\n{1}'.format(' '.join(cmd), str(e))) 793 | 794 | 795 | # Can fail of the default locale is not configured properly. See 796 | # https://github.com/astropy/astropy/issues/2749. For the purposes under 797 | # consideration 'latin1' is an acceptable fallback. 798 | try: 799 | stdio_encoding = locale.getdefaultlocale()[1] or 'latin1' 800 | except ValueError: 801 | # Due to an OSX oddity locale.getdefaultlocale() can also crash 802 | # depending on the user's locale/language settings. See: 803 | # http://bugs.python.org/issue18378 804 | stdio_encoding = 'latin1' 805 | 806 | # Unlikely to fail at this point but even then let's be flexible 807 | if not isinstance(stdout, str): 808 | stdout = stdout.decode(stdio_encoding, 'replace') 809 | if not isinstance(stderr, str): 810 | stderr = stderr.decode(stdio_encoding, 'replace') 811 | 812 | return (p.returncode, stdout, stderr) 813 | 814 | 815 | def _next_version(version): 816 | """ 817 | Given a parsed version from pkg_resources.parse_version, returns a new 818 | version string with the next minor version. 819 | 820 | Examples 821 | ======== 822 | >>> _next_version(pkg_resources.parse_version('1.2.3')) 823 | '1.3.0' 824 | """ 825 | 826 | if hasattr(version, 'base_version'): 827 | # New version parsing from setuptools >= 8.0 828 | if version.base_version: 829 | parts = version.base_version.split('.') 830 | else: 831 | parts = [] 832 | else: 833 | parts = [] 834 | for part in version: 835 | if part.startswith('*'): 836 | break 837 | parts.append(part) 838 | 839 | parts = [int(p) for p in parts] 840 | 841 | if len(parts) < 3: 842 | parts += [0] * (3 - len(parts)) 843 | 844 | major, minor, micro = parts[:3] 845 | 846 | return '{0}.{1}.{2}'.format(major, minor + 1, 0) 847 | 848 | 849 | class _DummyFile(object): 850 | """A noop writeable object.""" 851 | 852 | errors = '' # Required for Python 3.x 853 | encoding = 'utf-8' 854 | 855 | def write(self, s): 856 | pass 857 | 858 | def flush(self): 859 | pass 860 | 861 | 862 | @contextlib.contextmanager 863 | def _verbose(): 864 | yield 865 | 866 | @contextlib.contextmanager 867 | def _silence(): 868 | """A context manager that silences sys.stdout and sys.stderr.""" 869 | 870 | old_stdout = sys.stdout 871 | old_stderr = sys.stderr 872 | sys.stdout = _DummyFile() 873 | sys.stderr = _DummyFile() 874 | exception_occurred = False 875 | try: 876 | yield 877 | except: 878 | exception_occurred = True 879 | # Go ahead and clean up so that exception handling can work normally 880 | sys.stdout = old_stdout 881 | sys.stderr = old_stderr 882 | raise 883 | 884 | if not exception_occurred: 885 | sys.stdout = old_stdout 886 | sys.stderr = old_stderr 887 | 888 | 889 | _err_help_msg = """ 890 | If the problem persists consider installing astropy_helpers manually using pip 891 | (`pip install astropy_helpers`) or by manually downloading the source archive, 892 | extracting it, and installing by running `python setup.py install` from the 893 | root of the extracted source code. 894 | """ 895 | 896 | 897 | class _AHBootstrapSystemExit(SystemExit): 898 | def __init__(self, *args): 899 | if not args: 900 | msg = 'An unknown problem occurred bootstrapping astropy_helpers.' 901 | else: 902 | msg = args[0] 903 | 904 | msg += '\n' + _err_help_msg 905 | 906 | super(_AHBootstrapSystemExit, self).__init__(msg, *args[1:]) 907 | 908 | 909 | BOOTSTRAPPER = _Bootstrapper.main() 910 | 911 | 912 | def use_astropy_helpers(**kwargs): 913 | """ 914 | Ensure that the `astropy_helpers` module is available and is importable. 915 | This supports automatic submodule initialization if astropy_helpers is 916 | included in a project as a git submodule, or will download it from PyPI if 917 | necessary. 918 | 919 | Parameters 920 | ---------- 921 | 922 | path : str or None, optional 923 | A filesystem path relative to the root of the project's source code 924 | that should be added to `sys.path` so that `astropy_helpers` can be 925 | imported from that path. 926 | 927 | If the path is a git submodule it will automatically be initialized 928 | and/or updated. 929 | 930 | The path may also be to a ``.tar.gz`` archive of the astropy_helpers 931 | source distribution. In this case the archive is automatically 932 | unpacked and made temporarily available on `sys.path` as a ``.egg`` 933 | archive. 934 | 935 | If `None` skip straight to downloading. 936 | 937 | download_if_needed : bool, optional 938 | If the provided filesystem path is not found an attempt will be made to 939 | download astropy_helpers from PyPI. It will then be made temporarily 940 | available on `sys.path` as a ``.egg`` archive (using the 941 | ``setup_requires`` feature of setuptools. If the ``--offline`` option 942 | is given at the command line the value of this argument is overridden 943 | to `False`. 944 | 945 | index_url : str, optional 946 | If provided, use a different URL for the Python package index than the 947 | main PyPI server. 948 | 949 | use_git : bool, optional 950 | If `False` no git commands will be used--this effectively disables 951 | support for git submodules. If the ``--no-git`` option is given at the 952 | command line the value of this argument is overridden to `False`. 953 | 954 | auto_upgrade : bool, optional 955 | By default, when installing a package from a non-development source 956 | distribution ah_boostrap will try to automatically check for patch 957 | releases to astropy-helpers on PyPI and use the patched version over 958 | any bundled versions. Setting this to `False` will disable that 959 | functionality. If the ``--offline`` option is given at the command line 960 | the value of this argument is overridden to `False`. 961 | 962 | offline : bool, optional 963 | If `False` disable all actions that require an internet connection, 964 | including downloading packages from the package index and fetching 965 | updates to any git submodule. Defaults to `True`. 966 | """ 967 | 968 | global BOOTSTRAPPER 969 | 970 | config = BOOTSTRAPPER.config 971 | config.update(**kwargs) 972 | 973 | # Create a new bootstrapper with the updated configuration and run it 974 | BOOTSTRAPPER = _Bootstrapper(**config) 975 | BOOTSTRAPPER.run() 976 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | # AppVeyor.com is a Continuous Integration service to build and run tests under 2 | # Windows 3 | 4 | environment: 5 | 6 | global: 7 | PYTHON: "C:\\conda" 8 | MINICONDA_VERSION: "latest" 9 | CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\ci-helpers\\appveyor\\windows_sdk.cmd" 10 | PYTHON_ARCH: "64" # needs to be set for CMD_IN_ENV to succeed. If a mix 11 | # of 32 bit and 64 bit builds are needed, move this 12 | # to the matrix section. 13 | 14 | 15 | # List runtime dependencies for the package that are available as conda 16 | # packages here. 17 | CONDA_DEPENDENCIES: "" 18 | 19 | # List other runtime dependencies for the package that are available as 20 | # pip packages here. 21 | PIP_DEPENDENCIES: "" 22 | 23 | # Conda packages for affiliated packages are hosted in channel 24 | # "astropy" while builds for astropy LTS with recent numpy versions 25 | # are in astropy-ci-extras. If your package uses either of these, 26 | # add the channels to CONDA_CHANNELS along with any other channels 27 | # you want to use. 28 | CONDA_CHANNELS: "astropy-ci-extras astropy" 29 | 30 | matrix: 31 | 32 | # We test Python 3.7 for Python 3 support. 33 | - PYTHON_VERSION: "3.7" 34 | ASTROPY_VERSION: "stable" 35 | NUMPY_VERSION: "stable" 36 | 37 | platform: 38 | -x64 39 | 40 | install: 41 | - "git clone --depth 1 git://github.com/astropy/ci-helpers.git" 42 | - "powershell ci-helpers/appveyor/install-miniconda.ps1" 43 | - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" 44 | - "activate test" 45 | 46 | # Not a .NET project, we build the package in the install step instead 47 | build: false 48 | 49 | test_script: 50 | - "%CMD_IN_ENV% python setup.py test" 51 | -------------------------------------------------------------------------------- /code-of-conduct.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to making participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at [INSERT EMAIL ADDRESS]. All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | 78 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = source 8 | BUILDDIR = build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/INSTALLATION.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | You can download the package using `pip` package installer:: 5 | 6 | pip install mirapy 7 | 8 | You can also build from source code:: 9 | 10 | git clone --recursive https://github.com/mirapy-org/mirapy.git 11 | cd mirapy 12 | pip install -r requirements.txt 13 | python setup.py install 14 | -------------------------------------------------------------------------------- /docs/source/LICENSE.rst: -------------------------------------------------------------------------------- 1 | License 2 | ======= 3 | 4 | MIT License 5 | 6 | Copyright (c) 2019 MiraPy Organisation 7 | 8 | Permission is hereby granted, free of charge, to any person obtaining a copy 9 | of this software and associated documentation files (the "Software"), to deal 10 | in the Software without restriction, including without limitation the rights 11 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the Software is 13 | furnished to do so, subject to the following conditions: 14 | 15 | The above copyright notice and this permission notice shall be included in all 16 | copies or substantial portions of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | SOFTWARE. 25 | -------------------------------------------------------------------------------- /docs/source/README.rst: -------------------------------------------------------------------------------- 1 | Introduction 2 | ============ 3 | 4 | MiraPy is a Python package for Deep Learning in Astronomy. It is built using 5 | Keras for developing ML models to run on CPU and GPU seamlessly. The 6 | aim is to make applying machine learning techniques on astronomical data easy 7 | for astronomers, researchers and students. 8 | 9 | Applications 10 | ------------ 11 | 12 | MiraPy can be used for problem solving using ML techniques and will continue to grow to tackle new problems in Astronomy. Following are some of the experiments that you can perform right now: 13 | 14 | - Classification of X-Ray Binaries using neural network 15 | - Astronomical Image Reconstruction using Autoencoder 16 | - Classification of the first catalog of variable stars by ATLAS 17 | - HTRU1 Pulsar Dataset Image Classification using Convolutional Neural Network 18 | - Variable star Classification using Recurrent Neural Network (RNN) 19 | - 2D visualization of feature sets using Principal Component Analysis (PCA) 20 | - Curve Fitting using Autograd (basic implementation) 21 | 22 | There are more projects that we will add soon and some of them are as following: 23 | 24 | - Feature Engineering (Selection, Reduction and Visualization) 25 | - Classification of different states of GRS1905+105 X-Ray Binaries using Recurrent Neural Network (RNN) 26 | - Feature extraction from Images using Autoencoders and its applications in Astronomy 27 | 28 | You can find the applications MiraPy in our `tutorial `_ repository. 29 | 30 | In future, MiraPy will be able to do more and in better ways and we need your suggestions! Tell us what you would like to see as a part of this package on `Slack `_. 31 | 32 | Installation 33 | ------------ 34 | 35 | You can download the package using `pip` package installer:: 36 | 37 | pip install mirapy 38 | 39 | You can also build from source code:: 40 | 41 | git clone --recursive https://github.com/mirapy-org/mirapy.git 42 | cd mirapy 43 | pip install -r requirements.txt 44 | python setup.py install 45 | 46 | Contributing 47 | ------------ 48 | 49 | MiraPy is far from perfect and we would love to see your contributions to open source community! MiraPy is open source, built on open source, and we'd love to have you hang out in our community. 50 | 51 | About Us 52 | -------- 53 | 54 | MiraPy is developed by `Swapnil Sharma `_ and `Akhil Singhal `_ as their final year 'Major Technical Project' under the guidance of `Dr. Arnav Bhavsar `_ at `Indian Institute of Technology, Mandi `_. 55 | -------------------------------------------------------------------------------- /docs/source/TUTORIALS.rst: -------------------------------------------------------------------------------- 1 | Tutorials 2 | ========= 3 | 4 | You can find the Jupyter notebooks on various applications of MiraPy for problem-solving in Astronomy in our `Github repository `_. 5 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # http://www.sphinx-doc.org/en/master/config 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | 18 | # -- Project information ----------------------------------------------------- 19 | 20 | project = 'MiraPy' 21 | copyright = '2019, Swapnil Sharma, Akhil Singhal' 22 | author = 'Swapnil Sharma, Akhil Singhal' 23 | 24 | # The full version, including alpha/beta/rc tags 25 | release = 'v0.1.0' 26 | 27 | 28 | # -- General configuration --------------------------------------------------- 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | extensions = ['sphinx.ext.autodoc', 'autoapi.extension', 'nbsphinx'] 34 | 35 | # Add any paths that contain templates here, relative to this directory. 36 | templates_path = ['_templates'] 37 | 38 | # List of patterns, relative to source directory, that match files and 39 | # directories to ignore when looking for source files. 40 | # This pattern also affects html_static_path and html_extra_path. 41 | exclude_patterns = [] 42 | 43 | 44 | # -- Options for HTML output ------------------------------------------------- 45 | 46 | # The theme to use for HTML and HTML Help pages. See the documentation for 47 | # a list of builtin themes. 48 | # 49 | html_theme = 'sphinx_rtd_theme' 50 | 51 | html_theme_options = { 52 | 'canonical_url': '', 53 | # 'analytics_id': 'UA-XXXXXXX-1', # Provided by Google in your dashboard 54 | # 'logo_only': True, 55 | 'display_version': True, 56 | 'prev_next_buttons_location': 'bottom', 57 | 'style_external_links': False, 58 | # 'vcs_pageview_mode': '', 59 | 'style_nav_header_background': '#2D4264', 60 | # Toc options 61 | # 'collapse_navigation': True, 62 | 'sticky_navigation': True, 63 | 'navigation_depth': 4, 64 | 'includehidden': True, 65 | 'titles_only': False 66 | } 67 | 68 | # Add any paths that contain custom static files (such as style sheets) here, 69 | # relative to this directory. They are copied after the builtin static files, 70 | # so a file named "default.css" will overwrite the builtin "default.css". 71 | html_static_path = [] 72 | 73 | # ------------------------------------API DOC --------------------------------- 74 | autoapi_dirs = ['../../mirapy'] 75 | autoapi_type = 'python' 76 | autoapi_ignore = ['*tests*', '*_astropy_init*', '*conftest*', '*version*'] 77 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to MiraPy's documentation! 2 | ================================== 3 | 4 | .. image:: ../../logos/mirapy-logo.png 5 | :width: 400 6 | :alt: MiraPy Logo 7 | 8 | MiraPy is a Python package for Deep Learning in Astronomy. It is built using 9 | Keras for developing ML models to run on CPU and GPU seamlessly. The 10 | aim is to make applying machine learning techniques on astronomical data easy 11 | for astronomers, researchers and students. 12 | 13 | Github repository: `mirapy-org/mirapy `_ 14 | 15 | Table of Contents 16 | ================= 17 | 18 | .. toctree:: 19 | :maxdepth: 1 20 | 21 | README 22 | INSTALLATION 23 | TUTORIALS 24 | LICENSE 25 | 26 | 27 | Indices and tables 28 | ================== 29 | 30 | * :ref:`genindex` 31 | * :ref:`modindex` 32 | * :ref:`search` 33 | -------------------------------------------------------------------------------- /logos/mirapy-logo-square.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/logos/mirapy-logo-square.png -------------------------------------------------------------------------------- /logos/mirapy-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/logos/mirapy-logo.png -------------------------------------------------------------------------------- /mirapy/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under the MIT license - see LICENSE.rst 2 | 3 | # Packages may add whatever they like to this file, but 4 | # should keep this content at the top. 5 | # ---------------------------------------------------------------------------- 6 | from ._astropy_init import * 7 | # ---------------------------------------------------------------------------- 8 | 9 | if not _ASTROPY_SETUP_: 10 | # For egg_info test builds to pass, put package imports here. 11 | from mirapy.fitting import * 12 | -------------------------------------------------------------------------------- /mirapy/_astropy_init.py: -------------------------------------------------------------------------------- 1 | # Licensed under the MIT license - see LICENSE.rst 2 | 3 | __all__ = ['__version__', '__githash__'] 4 | 5 | # this indicates whether or not we are in the package's setup.py 6 | try: 7 | _ASTROPY_SETUP_ 8 | except NameError: 9 | 10 | import builtins 11 | builtins._ASTROPY_SETUP_ = False 12 | 13 | try: 14 | from .version import version as __version__ 15 | except ImportError: 16 | __version__ = '' 17 | try: 18 | from .version import githash as __githash__ 19 | except ImportError: 20 | __githash__ = '' 21 | 22 | 23 | if not _ASTROPY_SETUP_: # noqa 24 | import os 25 | from warnings import warn 26 | from astropy.config.configuration import ( 27 | update_default_config, 28 | ConfigurationDefaultMissingError, 29 | ConfigurationDefaultMissingWarning) 30 | 31 | # Create the test function for self test 32 | from astropy.tests.runner import TestRunner 33 | test = TestRunner.make_test_runner_in(os.path.dirname(__file__)) 34 | test.__test__ = False 35 | __all__ += ['test'] 36 | 37 | # add these here so we only need to cleanup the namespace at the end 38 | config_dir = None 39 | 40 | if not os.environ.get('ASTROPY_SKIP_CONFIG_UPDATE', False): 41 | config_dir = os.path.dirname(__file__) 42 | config_template = os.path.join(config_dir, __package__ + ".cfg") 43 | if os.path.isfile(config_template): 44 | try: 45 | update_default_config( 46 | __package__, config_dir, version=__version__) 47 | except TypeError as orig_error: 48 | try: 49 | update_default_config(__package__, config_dir) 50 | except ConfigurationDefaultMissingError as e: 51 | wmsg = (e.args[0] + 52 | " Cannot install default profile. If you are " 53 | "importing from source, this is expected.") 54 | warn(ConfigurationDefaultMissingWarning(wmsg)) 55 | del e 56 | except Exception: 57 | raise orig_error 58 | -------------------------------------------------------------------------------- /mirapy/autoencoder/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/autoencoder/__init__.py -------------------------------------------------------------------------------- /mirapy/autoencoder/models.py: -------------------------------------------------------------------------------- 1 | import os 2 | from keras.models import Model, load_model 3 | from keras.layers import * 4 | import matplotlib.pyplot as plt 5 | 6 | 7 | class Autoencoder: 8 | def __init__(self): 9 | """ 10 | Base Class for autoencoder models. 11 | """ 12 | self.model = None 13 | self.history = None 14 | self.dim = None 15 | self.input_img = None 16 | self.encoded = None 17 | self.decoded = None 18 | 19 | def compile(self, optimizer, loss): 20 | """ 21 | Compile model with given configuration. 22 | 23 | :param optimizer: Instance of optimizer. 24 | :param loss: String (name of loss function) or custom function. 25 | """ 26 | pass 27 | 28 | def train(self, x, y, batch_size=32, epochs=100, validation_data=None, 29 | shuffle=True, verbose=1): 30 | """ 31 | Trains the model on the training data with given settings. 32 | 33 | :param x: Numpy array of training data. 34 | :param y: Numpy array of target data. 35 | :param epochs: Integer. Number of epochs during training. 36 | :param batch_size: Number of samples per gradient update. 37 | :param validation_data: Numpy array of validation data. 38 | :param shuffle: Boolean. Shuffles the data before training. 39 | :param verbose: Value is 0, 1, or 2. 40 | """ 41 | pass 42 | 43 | def predict(self, x): 44 | """ 45 | Predicts the output of the model for the given data as input. 46 | 47 | :param x: Input data as Numpy arrays. 48 | """ 49 | pass 50 | 51 | def plot_history(self): 52 | """ 53 | Plots loss vs epoch graph. 54 | """ 55 | plt.plot(self.history.history['loss']) 56 | if 'val_loss' in self.history.history.keys(): 57 | plt.plot(self.history.history['val_loss']) 58 | plt.title('Autoencoder loss') 59 | plt.ylabel('Loss') 60 | plt.xlabel('Epoch') 61 | plt.legend(['Train', 'Test'], loc='upper right') 62 | plt.show() 63 | 64 | def save_model(self, model_name, path='models/'): 65 | """ 66 | Saves a model into a H5py file. 67 | 68 | :param model_name: File name. 69 | :param path: Pa 70 | """ 71 | path += model_name 72 | self.model.save(path) 73 | 74 | def load_model(self, model_name, path='models/'): 75 | """ 76 | Loads a model from a H5py file. 77 | 78 | :param model_name: File name. 79 | :param path: Pa 80 | """ 81 | path += model_name 82 | if os.path.exists(path): 83 | self.model = load_model(path) 84 | else: 85 | raise FileNotFoundError("Model does not exists") 86 | 87 | def summary(self): 88 | pass 89 | 90 | 91 | class DeNoisingAutoencoder(Autoencoder): 92 | def __init__(self, img_dim, activation='relu', padding='same'): 93 | """ 94 | De-noising Autoencoder used for the astronomical image reconstruction. 95 | 96 | :param img_dim: Set. Dimension of input and output image. 97 | :param activation: String (activation function name). 98 | :param padding: String (type of padding in convolution layers). 99 | """ 100 | self.dim = img_dim 101 | self.input_img = Input(shape=(*img_dim, 1)) 102 | 103 | x = Conv2D(64, (3, 3), activation=activation, padding=padding)( 104 | self.input_img) 105 | x = MaxPooling2D((2, 2), padding=padding)(x) 106 | x = Conv2D(32, (3, 3), activation=activation, padding=padding)(x) 107 | x = MaxPooling2D((2, 2), padding=padding)(x) 108 | x = Conv2D(16, (3, 3), activation=activation, padding=padding)(x) 109 | x = BatchNormalization()(x) 110 | encoded = MaxPooling2D((2, 2), padding=padding)(x) 111 | 112 | x = Conv2D(16, (3, 3), activation=activation, padding=padding)(encoded) 113 | x = UpSampling2D((2, 2))(x) 114 | x = Conv2D(32, (3, 3), activation=activation, padding=padding)(x) 115 | x = UpSampling2D((2, 2))(x) 116 | x = Conv2D(64, (3, 3), activation=activation, padding=padding)(x) 117 | x = BatchNormalization()(x) 118 | x = UpSampling2D((2, 2))(x) 119 | self.decoded = Conv2D(1, (3, 3), activation='sigmoid', 120 | padding=padding)(x) 121 | 122 | def compile(self, optimizer, loss): 123 | """ 124 | Compile model with given configuration. 125 | 126 | :param optimizer: Instance of optimizer. 127 | :param loss: String (name of loss function) or custom function. 128 | """ 129 | self.model = Model(self.input_img, self.decoded) 130 | self.model.compile(optimizer=optimizer, loss=loss) 131 | 132 | def train(self, x, y, batch_size=32, epochs=100, validation_data=None, 133 | shuffle=True, verbose=1): 134 | """ 135 | Trains the model on the training data with given settings. 136 | 137 | :param x: Numpy array of training data. 138 | :param y: Numpy array of target data. 139 | :param epochs: Integer. Number of epochs during training. 140 | :param batch_size: Number of samples per gradient update. 141 | :param validation_data: Numpy array of validation data. 142 | :param shuffle: Boolean. Shuffles the data before training. 143 | :param verbose: Value is 0, 1, or 2. 144 | """ 145 | self.history = self.model.fit(x, y, 146 | epochs=epochs, 147 | batch_size=batch_size, 148 | shuffle=shuffle, 149 | validation_data=validation_data, 150 | verbose=verbose) 151 | 152 | def predict(self, x): 153 | """ 154 | Predicts the output of the model for the given data as input. 155 | 156 | :param x: Input data as Numpy arrays. 157 | """ 158 | return self.model.predict(x) 159 | 160 | def show_image_pairs(self, original_images, decoded_images, max_images): 161 | """ 162 | Displays images in pair of images in grid form using Matplotlib. 163 | 164 | :param original_images: Array of original images. 165 | :param decoded_images: Array of decoded images. 166 | :param max_images: Integer. Set number of images in a row. 167 | """ 168 | n = min(max_images, len(decoded_images)) 169 | 170 | plt.figure(figsize=(20, 8)) 171 | for i in range(n): 172 | ax = plt.subplot(2, n, i + 1) 173 | plt.imshow(original_images[i].reshape(self.dim)) 174 | plt.gray() 175 | ax.get_xaxis().set_visible(False) 176 | ax.get_yaxis().set_visible(False) 177 | 178 | ax = plt.subplot(2, n, i + 1 + n) 179 | plt.imshow(decoded_images[i].reshape(self.dim)) 180 | plt.gray() 181 | ax.get_xaxis().set_visible(False) 182 | ax.get_yaxis().set_visible(False) 183 | plt.show() 184 | -------------------------------------------------------------------------------- /mirapy/classifiers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/classifiers/__init__.py -------------------------------------------------------------------------------- /mirapy/classifiers/models.py: -------------------------------------------------------------------------------- 1 | import os 2 | from keras.optimizers import * 3 | from keras.models import load_model, Sequential 4 | # from keras.layers import Input, Dense, LSTM, Dropout 5 | from keras.layers import * 6 | import matplotlib.pyplot as plt 7 | 8 | 9 | class Classifier: 10 | def __init__(self): 11 | """ 12 | Base class for classification models. It provides general abstract 13 | methods required for applying a machine learning techniques. 14 | """ 15 | 16 | self.model = None 17 | self.optimizer = None 18 | self.activation = None 19 | self.history = None 20 | 21 | def compile(self, optimizer, loss='mean_squared_error'): 22 | """ 23 | Compile model with given configuration. 24 | 25 | :param optimizer: Instance of optimizer. 26 | :param loss: String (name of loss function) or custom function. 27 | """ 28 | pass 29 | 30 | def save_model(self, model_name, path='models/'): 31 | """ 32 | Saves a model into a H5py file. 33 | 34 | :param model_name: File name. 35 | :param path: Path of directory. 36 | """ 37 | path += model_name 38 | self.model.save(path) 39 | 40 | def load_model(self, model_name, path='models/'): 41 | """ 42 | Loads a model from a H5py file. 43 | 44 | :param model_name: File name. 45 | :param path: Pa 46 | """ 47 | path += model_name 48 | if os.path.exists(path): 49 | self.model = load_model(path) 50 | else: 51 | raise FileNotFoundError("Model does not exists") 52 | 53 | def train(self, x_train, y_train, epochs, batch_size, reset_weights, 54 | class_weight, validation_data, verbose): 55 | """ 56 | Trains the model on the training data with given settings. 57 | 58 | :param x_train: Numpy array of training data. 59 | :param y_train: Numpy array of target data. 60 | :param epochs: Integer. Number of epochs during training. 61 | :param batch_size: Number of samples per gradient update. 62 | :param reset_weights: Boolean. Set true to reset the weights of model. 63 | :param class_weight: Dictionary. Weights of classes in loss function 64 | during training. 65 | :param validation_data: Numpy array of validation data. 66 | :param verbose: Value is 0, 1, or 2. 67 | """ 68 | pass 69 | 70 | def predict(self, x): 71 | """ 72 | Predicts the output of the model for the given data as input. 73 | 74 | :param x: Input data as Numpy arrays. 75 | """ 76 | pass 77 | 78 | def plot_history(self): 79 | """ 80 | Plots loss vs epoch graph. 81 | """ 82 | plt.plot(self.history.history['loss']) 83 | if 'val_loss' in self.history.history.keys(): 84 | plt.plot(self.history.history['val_loss']) 85 | plt.title('Autoencoder loss') 86 | plt.ylabel('Loss') 87 | plt.xlabel('Epoch') 88 | plt.legend(['Train', 'Test'], loc='upper right') 89 | plt.show() 90 | 91 | def reset(self): 92 | """ 93 | Resets all weights of the model. 94 | """ 95 | self.model.reset_states() 96 | 97 | 98 | class XRayBinaryClassifier(Classifier): 99 | def __init__(self, activation='relu'): 100 | """ 101 | Classification model for X-Ray Binaries. 102 | 103 | :param activation: String (activation function name). 104 | """ 105 | self.activation = activation 106 | 107 | model = Sequential() 108 | model.add(Dense(32, input_shape=(3,), activation=self.activation)) 109 | model.add(Dense(32, activation=self.activation)) 110 | model.add(Dense(16, activation=self.activation)) 111 | model.add(Dense(1, activation='softmax')) 112 | self.model = model 113 | 114 | def compile(self, optimizer=Adam(lr=0.0001, decay=1e-6), 115 | loss='mean_squared_error'): 116 | """ 117 | Compile model with given configuration. 118 | 119 | :param optimizer: Instance of optimizer. 120 | :param loss: String (name of loss function) or custom function. 121 | """ 122 | self.optimizer = optimizer 123 | self.model.compile(self.optimizer, 124 | loss=loss, metrics=['accuracy']) 125 | 126 | def train(self, x_train, y_train, epochs=50, batch_size=100, 127 | reset_weights=True, class_weight=None, validation_data=None, 128 | verbose=1): 129 | """ 130 | Trains the model on the training data with given settings. 131 | 132 | :param x_train: Numpy array of training data. 133 | :param y_train: Numpy array of target data. 134 | :param epochs: Integer. Number of epochs during training. 135 | :param batch_size: Number of samples per gradient update. 136 | :param reset_weights: Boolean. Set true to reset the weights of model. 137 | :param class_weight: Dictionary. Weights of classes in loss function 138 | during training. 139 | :param validation_data: Numpy array of validation data. 140 | :param verbose: Value is 0, 1, or 2. 141 | """ 142 | if reset_weights: 143 | self.reset() 144 | 145 | self.history = self.model.fit(x_train, y_train, batch_size=batch_size, 146 | epochs=epochs, 147 | validation_data=validation_data, 148 | class_weight=class_weight, shuffle=True, 149 | verbose=verbose) 150 | 151 | def predict(self, x): 152 | """ 153 | Predicts the output of the model for the given data as input. 154 | 155 | :param x: Input data as Numpy arrays. 156 | :return: Predicted class for Input data. 157 | """ 158 | return self.model.predict_classes(x) 159 | 160 | 161 | class AtlasVarStarClassifier(Classifier): 162 | 163 | def __init__(self, activation='relu', input_size=22, num_classes=9): 164 | """ 165 | Classification model for ATLAS variable stars 166 | 167 | :param activation: String (activation function name). 168 | :param input_size: Integer. Dimension of Feature Vector. 169 | :param num_classes: Integer. Number of Classes. 170 | """ 171 | self.activation = activation 172 | self.history = None 173 | 174 | model = Sequential() 175 | model.add(Dense(64, input_shape=(input_size,), 176 | activation=self.activation)) 177 | model.add(Dense(64, activation=self.activation)) 178 | model.add(Dense(32, activation=self.activation)) 179 | model.add(Dense(16, activation=self.activation)) 180 | model.add(Dense(num_classes, activation='softmax')) 181 | self.model = model 182 | 183 | def compile(self, optimizer=Adam(lr=0.01, decay=0.01), 184 | loss='mean_squared_error'): 185 | """ 186 | Compile model with given configuration. 187 | 188 | :param optimizer: Instance of optimizer. 189 | :param loss: String (name of loss function) or custom function. 190 | """ 191 | self.optimizer = optimizer 192 | self.model.compile(self.optimizer, 193 | loss=loss, metrics=['accuracy']) 194 | 195 | def train(self, x_train, y_train, epochs=50, batch_size=100, 196 | reset_weights=True, class_weight=None, validation_data=None, 197 | verbose=1): 198 | """ 199 | Trains the model on the training data with given settings. 200 | 201 | :param x_train: Numpy array of training data. 202 | :param y_train: Numpy array of target data. 203 | :param epochs: Integer. Number of epochs during training. 204 | :param batch_size: Number of samples per gradient update. 205 | :param reset_weights: Boolean. Set true to reset the weights of model. 206 | :param class_weight: Dictionary. Weights of classes in loss function 207 | during training. 208 | :param validation_data: Numpy array of validation data. 209 | :param verbose: Value is 0, 1, or 2. 210 | """ 211 | if reset_weights: 212 | self.reset() 213 | 214 | self.history = self.model.fit(x_train, y_train, batch_size=batch_size, 215 | epochs=epochs, 216 | validation_data=validation_data, 217 | class_weight=class_weight, shuffle=True, 218 | verbose=verbose) 219 | 220 | def predict(self, x): 221 | """ 222 | Predicts the output of the model for the given data as input. 223 | 224 | :param x: Input data as Numpy arrays. 225 | :return: Predicted class for Input data. 226 | """ 227 | return self.model.predict_classes(x) 228 | 229 | 230 | class OGLEClassifier(Classifier): 231 | 232 | def __init__(self, activation='relu', input_size=50, num_classes=5): 233 | """ 234 | Feature classification model for OGLE variable star 235 | time-series dataset. 236 | 237 | :param activation: String (activation function name). 238 | :param input_size: Integer. Dimension of Feature Vector. 239 | :param num_classes: Integer. Number of Classes. 240 | """ 241 | self.activation = activation 242 | self.history = None 243 | 244 | model = Sequential() 245 | model.add(LSTM(units=64, input_shape=(input_size, 1))) 246 | model.add(Dense(64, activation=self.activation)) 247 | model.add(Dropout(0.2)) 248 | model.add(Dense(16, activation=self.activation)) 249 | model.add(Dense(num_classes, activation='softmax')) 250 | self.model = model 251 | 252 | def compile(self, optimizer='adam', loss='categorical_crossentropy'): 253 | """ 254 | Compile model with given configuration. 255 | 256 | :param optimizer: Instance of optimizer. 257 | :param loss: String (name of loss function) or custom function. 258 | """ 259 | self.optimizer = optimizer 260 | self.model.compile(self.optimizer, loss=loss, metrics=['accuracy']) 261 | 262 | def train(self, x_train, y_train, epochs=50, batch_size=100, 263 | reset_weights=True, class_weight=None, validation_data=None, 264 | verbose=1): 265 | """ 266 | Trains the model on the training data with given settings. 267 | 268 | :param x_train: Numpy array of training data. 269 | :param y_train: Numpy array of target data. 270 | :param epochs: Integer. Number of epochs during training. 271 | :param batch_size: Number of samples per gradient update. 272 | :param reset_weights: Boolean. Set true to reset the weights of model. 273 | :param class_weight: Dictionary. Weights of classes in loss function 274 | during training. 275 | :param validation_data: Numpy array of validation data. 276 | :param verbose: Value is 0, 1, or 2. 277 | """ 278 | if reset_weights: 279 | self.reset() 280 | 281 | self.history = self.model.fit(x_train, y_train, batch_size=batch_size, 282 | epochs=epochs, 283 | validation_data=validation_data, 284 | class_weight=class_weight, shuffle=True, 285 | verbose=verbose) 286 | 287 | def predict(self, x): 288 | """ 289 | Predicts the output of the model for the given data as input. 290 | 291 | :param x: Input data as Numpy arrays. 292 | :return: Predicted class for Input data. 293 | """ 294 | return self.model.predict_classes(x) 295 | 296 | 297 | class HTRU1Classifier(Classifier): 298 | def __init__(self, input_dim, activation='relu', padding='same', 299 | dropout=0.25, num_classes=2): 300 | """ 301 | CNN Classification of pulsars and non-pulsars data released by HTRU 302 | survey as Data Release 1. The dataset has same structure as CIFAR-10 303 | dataset. 304 | 305 | :param input_dim: Set. Dimension of input data. 306 | :param activation: String. Activation function name. 307 | :param padding: Sting. Padding type. 308 | :param dropout: Float between 0 and 1. Dropout value. 309 | :param num_classes: Integer. Number of classes. 310 | """ 311 | self.input_dim = input_dim 312 | self.activation = activation 313 | self.padding = padding 314 | self.history = None 315 | 316 | self.model = Sequential() 317 | self.model.add(Conv2D(32, (3, 3), padding=padding, 318 | input_shape=input_dim)) 319 | self.model.add(Activation(activation)) 320 | self.model.add(Conv2D(32, (3, 3))) 321 | self.model.add(Activation(activation)) 322 | self.model.add(MaxPooling2D(pool_size=(2, 2))) 323 | self.model.add(Dropout(dropout)) 324 | 325 | self.model.add(Conv2D(64, (3, 3), padding=padding)) 326 | self.model.add(Activation(activation)) 327 | self.model.add(Conv2D(64, (3, 3))) 328 | self.model.add(Activation(activation)) 329 | self.model.add(MaxPooling2D(pool_size=(2, 2))) 330 | self.model.add(Dropout(dropout)) 331 | 332 | self.model.add(Flatten()) 333 | self.model.add(Dense(512)) 334 | self.model.add(Activation(activation)) 335 | self.model.add(Dropout(0.5)) 336 | self.model.add(Dense(num_classes)) 337 | self.model.add(Activation('softmax')) 338 | 339 | def compile(self, optimizer, loss='categorical_crossentropy'): 340 | """ 341 | Compile model with given configuration. 342 | 343 | :param optimizer: Instance of optimizer. 344 | :param loss: String (name of loss function) or custom function. 345 | """ 346 | self.model.compile(loss=loss, optimizer=optimizer) 347 | 348 | def train(self, x_train, y_train, epochs=100, batch_size=32, 349 | reset_weights=True, class_weight=None, validation_data=None, 350 | verbose=1): 351 | if reset_weights: 352 | self.reset() 353 | 354 | self.history = self.model.fit(x_train, y_train, batch_size=batch_size, 355 | epochs=epochs, 356 | validation_data=validation_data, 357 | class_weight=class_weight, shuffle=True, 358 | verbose=verbose) 359 | 360 | def predict(self, x): 361 | """ 362 | Predicts the output of the model for the given data as input. 363 | 364 | :param x: Input data as Numpy arrays. 365 | :return: Predicted class for Input data. 366 | """ 367 | return self.model.predict_classes(x) 368 | -------------------------------------------------------------------------------- /mirapy/classifiers/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/classifiers/tests/__init__.py -------------------------------------------------------------------------------- /mirapy/classifiers/tests/test_models.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import numpy as np 4 | from mirapy.data import load_dataset 5 | from mirapy.classifiers import models 6 | 7 | def test_XRayBinaryClassifier(): 8 | x = np.array([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.]]) 9 | y = np.array([0, 1, 2]) 10 | model = models.XRayBinaryClassifier('relu') 11 | model.compile() 12 | model.train(x, y) 13 | y_pred = model.predict(x) 14 | 15 | assert type(y_pred) == np.ndarray -------------------------------------------------------------------------------- /mirapy/conftest.py: -------------------------------------------------------------------------------- 1 | # This file is used to configure the behavior of pytest when using the Astropy 2 | # test infrastructure. 3 | 4 | from astropy.version import version as astropy_version 5 | if astropy_version < '3.0': 6 | # With older versions of Astropy, we actually need to import the pytest 7 | # plugins themselves in order to make them discoverable by pytest. 8 | from astropy.tests.pytest_plugins import * 9 | else: 10 | # As of Astropy 3.0, the pytest plugins provided by Astropy are 11 | # automatically made available when Astropy is installed. This means it's 12 | # not necessary to import them here, but we still need to import global 13 | # variables that are used for configuration. 14 | from astropy.tests.plugins.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS 15 | 16 | from astropy.tests.helper import enable_deprecations_as_exceptions 17 | 18 | ## Uncomment the following line to treat all DeprecationWarnings as 19 | ## exceptions. For Astropy v2.0 or later, there are 2 additional keywords, 20 | ## as follow (although default should work for most cases). 21 | ## To ignore some packages that produce deprecation warnings on import 22 | ## (in addition to 'compiler', 'scipy', 'pygments', 'ipykernel', and 23 | ## 'setuptools'), add: 24 | ## modules_to_ignore_on_import=['module_1', 'module_2'] 25 | ## To ignore some specific deprecation warning messages for Python version 26 | ## MAJOR.MINOR or later, add: 27 | ## warnings_to_ignore_by_pyver={(MAJOR, MINOR): ['Message to ignore']} 28 | # enable_deprecations_as_exceptions() 29 | 30 | ## Uncomment and customize the following lines to add/remove entries from 31 | ## the list of packages for which version numbers are displayed when running 32 | ## the tests. Making it pass for KeyError is essential in some cases when 33 | ## the package uses other astropy affiliated packages. 34 | # try: 35 | # PYTEST_HEADER_MODULES['Astropy'] = 'astropy' 36 | # PYTEST_HEADER_MODULES['scikit-image'] = 'skimage' 37 | # del PYTEST_HEADER_MODULES['h5py'] 38 | # except (NameError, KeyError): # NameError is needed to support Astropy < 1.0 39 | # pass 40 | 41 | ## Uncomment the following lines to display the version number of the 42 | ## package rather than the version number of Astropy in the top line when 43 | ## running the tests. 44 | # import os 45 | # 46 | ## This is to figure out the package version, rather than 47 | ## using Astropy's 48 | # try: 49 | # from .version import version 50 | # except ImportError: 51 | # version = 'dev' 52 | # 53 | # try: 54 | # packagename = os.path.basename(os.path.dirname(__file__)) 55 | # TESTED_VERSIONS[packagename] = version 56 | # except NameError: # Needed to support Astropy <= 1.0.0 57 | # pass 58 | -------------------------------------------------------------------------------- /mirapy/data/README.rst: -------------------------------------------------------------------------------- 1 | Data directory 2 | ============== 3 | 4 | This directory contains data files included with the package source 5 | code distribution. Note that this is intended only for relatively small files 6 | - large files should be externally hosted and downloaded as needed. 7 | 8 | -------------------------------------------------------------------------------- /mirapy/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/data/__init__.py -------------------------------------------------------------------------------- /mirapy/data/load_dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | from scipy.signal import convolve2d 4 | from tqdm import tqdm 5 | import cv2 6 | import pandas as pd 7 | from sklearn.preprocessing import StandardScaler 8 | 9 | from mirapy.utils import unpickle 10 | 11 | 12 | def load_messier_catalog_images(path, img_size=None, disable_tqdm=False): 13 | """ 14 | Data loader for Messier catalog images. The images are available 15 | in `messier-catalog-images` repository of MiraPy organisation. 16 | 17 | :param path: String. Directory path. 18 | :param img_size: Final dimensions of the image. 19 | :param disable_tqdm: Boolean. Set True to disable progress bar. 20 | :return: Array of images. 21 | """ 22 | images = [] 23 | for filename in tqdm(os.listdir(path), disable=disable_tqdm): 24 | filepath = os.path.join(path, filename) 25 | img = cv2.imread(filepath, cv2.IMREAD_GRAYSCALE) 26 | img = img/img.max() 27 | img = img * 255. 28 | if img_size: 29 | img = cv2.resize(img, img_size) 30 | images.append(np.array(img)) 31 | return np.array(images) 32 | 33 | 34 | def prepare_messier_catalog_images(images, psf, sigma): 35 | """ 36 | Function to apply convolution and add noise from poisson distribution on 37 | an array of images. 38 | 39 | :param images: Array of images. 40 | :param psf: Point Spread Function (PSF). 41 | :param sigma: Float. VStandard deviation. 42 | :return: Original image arrays and convolved image arrays. 43 | """ 44 | images = np.array(images).astype('float32') / 255. 45 | x_conv2d = [convolve2d(I, psf, 'same') for I in images] 46 | x_conv2d_noisy = [I + sigma * np.random.poisson(I) for I in x_conv2d] 47 | return images, x_conv2d_noisy 48 | 49 | 50 | def load_xray_binary_data(path, standard_scaler=True): 51 | """ 52 | Loads X Ray Binary dataset from directory. 53 | 54 | :param path: Path to the directory. 55 | :param standard_scaler: Bool. Standardize data or not. 56 | :return: Dataset and Class labels. 57 | """ 58 | asc_files = [os.path.join(dp, f) 59 | for dp, dn, filenames in os.walk(path) 60 | for f in filenames if os.path.splitext(f)[1] == '.asc'] 61 | datapoints = [] 62 | for path in asc_files: 63 | with open(path, 'r') as f: 64 | lis = [line.split() for line in f] 65 | for l in lis: 66 | if len(l) == 6: 67 | l[1] = l[0] + " " + l[1] 68 | l.remove(l[0]) 69 | datapoints += lis 70 | 71 | bh_keys = ['CygX-1 HMBH', 'LMCX-1 HMBH', 'J1118+480 LMBH', 72 | 'J1550m564 LMBH', 'J1650-500 LMBH', 'J1655-40 LMBH', 73 | 'GX339-4 LMBH', 'J1859+226 LMBH', 'GRS1915+105 LMBH'] 74 | pulsar_keys = ['J0352+309 Pulsar', 'J1901+03 Pulsar', 'J1947+300 Pulsar', 75 | 'J2030p375 Pulsar', 'J1538-522 Pulsar', 'CenX-3 Pulsar', 76 | 'HerX-1 Pulsar', 'SMCX-1 Pulsar', 'VelaX-1 Pulsar'] 77 | nonpulsar_keys = ['ScoX-1 Zsource', 'GX9+1 Atoll', 'GX17+2 Zsource', 78 | 'CygX-2 Zsource', 'GX9+9 Atoll', 'GX349+2 Zsource'] 79 | 80 | for i, _ in enumerate(datapoints): 81 | system = datapoints[i][0] 82 | if system in bh_keys: 83 | datapoints[i][0] = 'BH' 84 | elif system in pulsar_keys: 85 | datapoints[i][0] = 'P' 86 | elif system in nonpulsar_keys: 87 | datapoints[i][0] = 'NP' 88 | 89 | rawdf = pd.DataFrame(datapoints) 90 | rawdf.columns = ['class', 'date', 'intensity', 'c1', 'c2'] 91 | rawdf = rawdf.drop('date', 1) 92 | rawdf = rawdf.convert_objects(convert_numeric=True) 93 | df = rawdf.copy() 94 | 95 | scale_features = ['intensity', 'c1', 'c2'] 96 | if standard_scaler: 97 | ss = StandardScaler() 98 | df[scale_features] = ss.fit_transform(df[scale_features]) 99 | 100 | x = df.drop('class', axis=1).values 101 | y = df['class'].values 102 | 103 | return x, y 104 | 105 | 106 | def load_atlas_star_data(path, standard_scaler=True, feat_list=None): 107 | """ 108 | Loads ATLAS variable star dataset from directory. 109 | 110 | :param path: Path to the directory. 111 | :param standard_scaler: Bool. Standardize data or not. 112 | :param feat_list: List of features to include in dataset. 113 | :return: Dataset and Class labels. 114 | """ 115 | df = pd.read_csv(path) 116 | y = df['CLASS'] 117 | 118 | # features selected using GradientBoost feature selection 119 | # (non-zero second decimal place) 120 | 121 | if feat_list is None: 122 | feat_list = ["fp_timerev", "fp_powerterm", "fp_phase180", 123 | "fp_hifreq", "fp_PPFAPshort1", "fp_period", 124 | "fp_fournum", "fp_multfac", "vf_percentile10", 125 | "fp_PPFAPshort3", "fp_PPFAPshort4", "vf_S_K", 126 | "ls_Cchin", "vf_wsd", "vf_percentile75", 127 | "fp_domperiod", "ls_RMS", "ls_Pday", "vf_percentile25", 128 | "fp_magrms_o", "fp_origLogFAP", "vf_percentile5"] 129 | 130 | list_cols = list(df) 131 | for f in feat_list: 132 | if f not in list_cols: 133 | raise AssertionError("Key "+f + " not in dataframe") 134 | 135 | for f in list_cols: 136 | if f in feat_list: 137 | continue 138 | df.drop(f, axis=1, inplace=True) 139 | 140 | x = df.iloc[:, 0:] 141 | y = y.values 142 | x = x.values 143 | 144 | if standard_scaler: 145 | sc = StandardScaler() 146 | x = sc.fit_transform(x) 147 | 148 | return x, y 149 | 150 | 151 | # handle class inequality 152 | def load_ogle_dataset(path, classes, time_len=50, pad=False): 153 | """ 154 | Loads OGLE variable star time series data from directory. 155 | 156 | :param path: Path to the directory. 157 | :param classes: Classes to include in dataset. 158 | :param time_len: Length of time series data. 159 | :param pad: Bool. Pad zeroes or not. 160 | :return: Dataset and Class labels. 161 | """ 162 | mag, y = [], [] 163 | for class_ in classes: 164 | folder = path + '/' + class_ + '/I' 165 | for file in os.listdir(folder): 166 | num_lines = sum(1 for line in open(folder + '/' + file)) 167 | mag_i, j = [0 for i in range(time_len)], 0 168 | 169 | if not pad and num_lines < time_len: 170 | continue 171 | for line in open(folder + '/' + file): 172 | try: 173 | _, b, _ = line.split(' ') 174 | except Exception: 175 | break 176 | mag_i[j] = float(b) 177 | j += 1 178 | if j is time_len or j is num_lines: 179 | mag.append(np.array(mag_i)) 180 | y.append(class_) 181 | break 182 | 183 | mag = np.array(mag) 184 | mag = mag.reshape(mag.shape[0], mag.shape[1], 1) 185 | return mag, y 186 | 187 | 188 | def load_htru1_data(data_dir='htru1-batches-py'): 189 | x_train = None 190 | y_train = [] 191 | 192 | for i in range(1, 6): 193 | x_train_dict = unpickle(data_dir + "/data_batch_{}".format(i)) 194 | if i == 1: 195 | x_train = x_train_dict[b'data'] 196 | else: 197 | x_train = np.vstack((x_train, x_train_dict[b'data'])) 198 | y_train += x_train_dict[b'labels'] 199 | 200 | x_train = x_train.reshape((len(x_train), 3, 32, 32)) 201 | x_train = np.rollaxis(x_train, 1, 4) 202 | y_train = np.array(y_train) 203 | 204 | x_test_dict = unpickle(data_dir + "/test_batch") 205 | x_test = x_test_dict[b'data'] 206 | y_test = x_test_dict[b'labels'] 207 | 208 | x_test = np.array(x_test).reshape((len(x_test), 3, 32, 32)) 209 | x_test = np.rollaxis(x_test, 1, 4) 210 | y_test = np.array(y_test) 211 | 212 | return x_train, y_train, x_test, y_test 213 | -------------------------------------------------------------------------------- /mirapy/data/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/data/tests/__init__.py -------------------------------------------------------------------------------- /mirapy/data/tests/test_load_dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | import numpy as np 4 | import cv2 5 | from mirapy.utils import get_psf_airy 6 | from mirapy.data import load_dataset 7 | 8 | 9 | def test_load_xray_binary_data(): 10 | path = 'test_XRayBinary/' 11 | filename = 'test.asc' 12 | 13 | os.mkdir(path) 14 | with open(path+filename, 'w') as f: 15 | f.write('J0352+309 Pulsar\t50139.6\t0.363137\t0.995448\t1.54628\n') 16 | x, y = load_dataset.load_xray_binary_data(path) 17 | 18 | os.remove(path+filename) 19 | os.rmdir(path) 20 | 21 | assert len(x) == 1 and len(x) == len(y) 22 | 23 | def test_messier_catalog_images(): 24 | path = 'test_messier_catalog_image/' 25 | filename = 'test.png' 26 | img = np.zeros([100,100,3],dtype=np.uint8) 27 | 28 | os.mkdir(path) 29 | cv2.imwrite(path+filename, img) 30 | 31 | imgs = load_dataset.load_messier_catalog_images(path) 32 | 33 | os.remove(path+filename) 34 | os.rmdir(path) 35 | 36 | assert len(imgs) == 1 37 | 38 | psf = get_psf_airy(100, 2) 39 | imgs, imgs_noisy = load_dataset.prepare_messier_catalog_images(imgs, psf, psf) 40 | 41 | assert len(imgs) == len(imgs_noisy) 42 | -------------------------------------------------------------------------------- /mirapy/fitting/__init__.py: -------------------------------------------------------------------------------- 1 | from mirapy.fitting.models import Model1D, Gaussian1D 2 | from mirapy.fitting.losses import mean_squared_error, negative_log_likelihood 3 | from mirapy.fitting.optimizers import ParameterEstimation 4 | -------------------------------------------------------------------------------- /mirapy/fitting/losses.py: -------------------------------------------------------------------------------- 1 | import autograd.numpy as np 2 | from autograd.scipy.stats import norm 3 | 4 | 5 | def negative_log_likelihood(y_true, y_pred): 6 | """ 7 | Function for negative log-likelihood error. 8 | 9 | :param y_true: Array of true values. 10 | :param y_pred: Array of predicted values. 11 | :return: Float. Loss value. 12 | """ 13 | ll = norm.logpdf(y_true, y_pred) 14 | return -np.sum(ll) 15 | 16 | 17 | def mean_squared_error(y_true, y_pred): 18 | """ 19 | Function for mean squared error. 20 | 21 | :param y_true: Array of true values. 22 | :param y_pred: Array of predicted values. 23 | :return: Float. Loss value. 24 | """ 25 | return np.sum((y_true-y_pred)**2)/len(y_true) 26 | -------------------------------------------------------------------------------- /mirapy/fitting/models.py: -------------------------------------------------------------------------------- 1 | import autograd.numpy as np 2 | 3 | 4 | class Model1D: 5 | def __init__(self): 6 | """ 7 | Base class for 1-D model. 8 | """ 9 | pass 10 | 11 | def __call__(self, x): 12 | """ 13 | Return the value of evaluate function by calling it. 14 | 15 | :param x: Array of 1-D input values. 16 | :return: Return the output of the evaluate function. 17 | """ 18 | return self.evaluate(x) 19 | 20 | def evaluate(self, x): 21 | """ 22 | Return the value of a model of the given input. 23 | 24 | :param x: Array of 1-D input values. 25 | :return: Return the output of the model. 26 | """ 27 | pass 28 | 29 | def set_params_from_array(self, params): 30 | """ 31 | Sets the parameters of the model from an array. 32 | 33 | :param params: Array of parameter values. 34 | """ 35 | pass 36 | 37 | def get_params_as_array(self): 38 | """ 39 | Returns the parameters of the model as an array. 40 | """ 41 | pass 42 | 43 | 44 | class Gaussian1D(Model1D): 45 | def __init__(self, amplitude=1., mean=0., stddev=1.): 46 | """ 47 | One dimensional Gaussian model. 48 | 49 | :param amplitude: Amplitude. 50 | :param mean: Mean. 51 | :param stddev: Standard deviation. 52 | """ 53 | self.amplitude = amplitude 54 | self.mean = mean 55 | self.stddev = stddev 56 | 57 | def evaluate(self, x): 58 | """ 59 | Return the value of Gaussian model of the given input. 60 | 61 | :param x: Array of 1-D input values. 62 | :return: Return the output of the model. 63 | """ 64 | return self.amplitude * np.exp(-0.5 * 65 | (x-self.mean) ** 2 / self.stddev ** 2) 66 | 67 | def set_params_from_array(self, params): 68 | """ 69 | Sets the parameters of the model from an array. 70 | 71 | :param params: Array of parameter values. 72 | """ 73 | if len(params) != 3: 74 | raise ValueError("The length of the parameter array must be 3") 75 | 76 | self.amplitude = params[0] 77 | self.mean = params[1] 78 | self.stddev = params[2] 79 | 80 | def get_params_as_array(self): 81 | """ 82 | Returns the parameters of the model as an array. 83 | """ 84 | return np.array([self.amplitude, self.mean, self.stddev]) 85 | -------------------------------------------------------------------------------- /mirapy/fitting/optimizers.py: -------------------------------------------------------------------------------- 1 | from scipy.optimize import minimize 2 | from autograd import grad 3 | from copy import deepcopy 4 | 5 | 6 | class ParameterEstimation: 7 | def __init__(self, x, y, model, loss_function, callback=None): 8 | """ 9 | Base class of parameter estimation of a model using regression. 10 | 11 | :param x: Array of input values. 12 | :param y: Array of target values. 13 | :param model: Model instance. 14 | :param loss_function: Instance of loss function. 15 | :param callback: Callback function. 16 | """ 17 | self.x = x 18 | self.y = y 19 | self.init_model = deepcopy(model) 20 | self.model = deepcopy(model) 21 | self.p_init = model.get_params_as_array() 22 | self.loss_function = loss_function 23 | self.callback = callback 24 | self.results = None 25 | 26 | def regression_function(self, params): 27 | """ 28 | Return the output of loss function. 29 | 30 | :param params: Array of new parameters of the model. 31 | :return: Output of loss function. 32 | """ 33 | self.model.set_params_from_array(params) 34 | y_true = self.y 35 | y_pred = self.model(self.x) 36 | return self.loss_function(y_true, y_pred) 37 | 38 | def get_model(self): 39 | """ 40 | Returns a copy of model used in estimation. 41 | 42 | :return: Model instance. 43 | """ 44 | model = deepcopy(self.init_model) 45 | if self.results is not None: 46 | model.set_params_from_array(self.results.x) 47 | return model 48 | 49 | def fit(self): 50 | """ 51 | Fits the data into the model using regression. 52 | 53 | :return: Returns the result. 54 | """ 55 | results = minimize(self.regression_function, self.p_init, 56 | method='L-BFGS-B', 57 | jac=grad(self.regression_function), 58 | callback=self.callback) 59 | self.results = results 60 | return results 61 | -------------------------------------------------------------------------------- /mirapy/fitting/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/fitting/tests/__init__.py -------------------------------------------------------------------------------- /mirapy/fitting/tests/test_losses.py: -------------------------------------------------------------------------------- 1 | from mirapy.fitting.losses import * 2 | import pytest 3 | 4 | 5 | def test_losses(): 6 | a = np.random.rand(10) 7 | b = np.random.rand(10) 8 | 9 | loss1 = negative_log_likelihood(a, b) 10 | loss2 = mean_squared_error(a, b) 11 | 12 | assert (loss1 > 0.0) 13 | assert (loss2 > 0.0) -------------------------------------------------------------------------------- /mirapy/fitting/tests/test_models.py: -------------------------------------------------------------------------------- 1 | from mirapy.fitting import Model1D, Gaussian1D 2 | import pytest 3 | 4 | 5 | def test_model1d(): 6 | model = Model1D() 7 | model.evaluate([]) 8 | model.get_params_as_array() 9 | model.set_params_from_array([]) 10 | 11 | def test_gaussian1d_error(): 12 | model = Gaussian1D() 13 | 14 | with pytest.raises(ValueError): 15 | model.set_params_from_array([]) 16 | -------------------------------------------------------------------------------- /mirapy/fitting/tests/test_optimizers.py: -------------------------------------------------------------------------------- 1 | from mirapy.fitting import Gaussian1D, mean_squared_error, ParameterEstimation 2 | import autograd.numpy as np 3 | from astropy.modeling import models, fitting 4 | 5 | 6 | def test_parameter_estimation(): 7 | x = np.linspace(-10., 10., 200) 8 | 9 | amplitude = 3. 10 | x_0 = 4. 11 | sigma = 2. 12 | noise = 0.2 13 | 14 | model = Gaussian1D(amplitude, x_0, sigma) 15 | y = model(x) 16 | 17 | np.random.seed(0) 18 | y += np.random.normal(0., noise, x.shape) 19 | 20 | # parameter estimation using MiraPy 21 | init_model = Gaussian1D(1., 1., 1.) 22 | parest = ParameterEstimation(x, y, init_model, mean_squared_error) 23 | parest.fit() 24 | best_model = parest.get_model() 25 | 26 | # paramter estimation using Astropy 27 | 28 | g_init = models.Gaussian1D(amplitude=1., mean=1., stddev=1.) 29 | pfit = fitting.LevMarLSQFitter() 30 | new_model = pfit(g_init, x, y) 31 | 32 | assert np.all(np.isclose(best_model(x), new_model(x), atol=0.01)) 33 | -------------------------------------------------------------------------------- /mirapy/tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Licensed under the MIT license - see LICENSE.rst 2 | """ 3 | This module contains package tests. 4 | """ 5 | -------------------------------------------------------------------------------- /mirapy/tests/coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = {packagename} 3 | omit = 4 | {packagename}/_astropy_init* 5 | {packagename}/conftest* 6 | {packagename}/cython_version* 7 | {packagename}/setup_package* 8 | {packagename}/*/setup_package* 9 | {packagename}/*/*/setup_package* 10 | {packagename}/tests/* 11 | {packagename}/*/tests/* 12 | {packagename}/*/*/tests/* 13 | {packagename}/version* 14 | 15 | [report] 16 | exclude_lines = 17 | # Have to re-enable the standard pragma 18 | pragma: no cover 19 | 20 | # Don't complain about packages we have installed 21 | except ImportError 22 | 23 | # Don't complain if tests don't hit assertions 24 | raise AssertionError 25 | raise NotImplementedError 26 | 27 | # Don't complain about script hooks 28 | def main\(.*\): 29 | 30 | # Ignore branches that don't pertain to this version of Python 31 | pragma: py{ignore_python_version} -------------------------------------------------------------------------------- /mirapy/tests/setup_package.py: -------------------------------------------------------------------------------- 1 | # import os 2 | 3 | # If this package has tests data in the tests/data directory, add them to 4 | # the paths here, see commented example 5 | paths = ['coveragerc', 6 | # os.path.join('data', '*fits') 7 | ] 8 | 9 | def get_package_data(): 10 | return { 11 | _ASTROPY_PACKAGE_NAME_ + '.tests': paths} 12 | -------------------------------------------------------------------------------- /mirapy/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .utils import * 2 | -------------------------------------------------------------------------------- /mirapy/utils/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/utils/tests/__init__.py -------------------------------------------------------------------------------- /mirapy/utils/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | import pickle 4 | from keras.preprocessing.image import ImageDataGenerator 5 | from mirapy import utils 6 | import pytest 7 | 8 | 9 | def test_get_psf_airy(): 10 | a = utils.get_psf_airy(1, 1) 11 | b = np.array([[1.]]) 12 | assert (a == b).all() 13 | 14 | def test_psnr(): 15 | a = np.random.rand(10, 10) 16 | b = np.random.rand(10, 10) 17 | psnr = utils.psnr(a, b) 18 | assert psnr >= 0 and psnr <= 100 19 | 20 | def test_image_augmentation(): 21 | a = np.array([np.random.rand(128, 128)]) 22 | datagen = ImageDataGenerator(rotation_range=40, 23 | width_shift_range=0.2, 24 | height_shift_range=0.2, 25 | zoom_range=0.2, 26 | horizontal_flip=True) 27 | b = utils.image_augmentation(a, datagen, 10) 28 | print(type(b)) 29 | assert type(a) == type(b) 30 | 31 | def test_append_one_to_shape(): 32 | a = np.array([[1, 2], [3, 4]]) 33 | b = utils.append_one_to_shape(a) 34 | assert ((a.shape + (1,)) == b.shape) 35 | 36 | def test_unpickle(): 37 | a = np.array([1, 2, 3, 4, 5]) 38 | filename = "test.pkl" 39 | 40 | with open(filename, 'wb') as f: 41 | pickle.dump(a, f) 42 | 43 | b = utils.unpickle(filename) 44 | os.remove(filename) 45 | 46 | assert (a == b).all() 47 | 48 | def test_to_numeric(): 49 | a = np.array([[0.2, 0.8], [0.6, 0.4]]) 50 | b = np.array([1, 0]) 51 | assert (utils.to_numeric(a) == b).all() 52 | 53 | def test_accuracy_per_class(): 54 | a = utils.accuracy_per_class(np.array([1, 2]), np.array([3, 4])) 55 | b = np.array([1.]) 56 | assert (a == b).all() 57 | -------------------------------------------------------------------------------- /mirapy/utils/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import scipy 3 | from tqdm import tqdm 4 | import pickle 5 | from sklearn.metrics import confusion_matrix 6 | from keras.preprocessing.image import img_to_array 7 | 8 | 9 | def get_psf_airy(n, nr): 10 | """ 11 | Calculates Point Spread Function. 12 | 13 | :param n: 14 | :param nr: 15 | :return: Numpy array of Point Spread Function 16 | """ 17 | xpsf = np.linspace(-1, 1, n) 18 | xg, yg = np.meshgrid(xpsf, xpsf) 19 | r = np.sqrt(xg**2+yg**2)*np.pi*nr 20 | psf = (scipy.special.j1(r)/r)**2 21 | psf = psf/psf.sum() 22 | return psf 23 | 24 | 25 | def image_augmentation(images, image_data_generator, num_of_augumentations, 26 | disable=False): 27 | """ 28 | Form augmented images for input array of images 29 | 30 | :param images: numpy array of Images. 31 | :param image_data_generator: Keras image generator object. 32 | :param num_of_augumentations: Number of augmentations of each image. 33 | :param disable: Bool. Disable/enable tqdm progress bar. 34 | :return: Numpy array of augmented images. 35 | """ 36 | images_aug = [] 37 | for image in tqdm(images, disable=disable): 38 | img_dim = image.shape 39 | img_array = img_to_array(image) 40 | img_array = img_array.reshape((1,) + img_array.shape) 41 | i = 0 42 | for batch in image_data_generator.flow(img_array, batch_size=1): 43 | i += 1 44 | img = batch[0] 45 | img = img.reshape(img_dim) 46 | images_aug.append(img) 47 | 48 | if i >= num_of_augumentations: 49 | break 50 | 51 | images_aug = np.array(images_aug) 52 | return images_aug 53 | 54 | 55 | def psnr(img1, img2): 56 | """ 57 | Calculate Peak Signal to Noise Ratio value. 58 | 59 | :param img1: Float. Array of first image. 60 | :param img2: Float.Array of second image. 61 | :return: Float. PSNR value of x and y. 62 | """ 63 | mse = np.mean((img1 - img2) ** 2) 64 | return -10 * np.log10(mse) 65 | 66 | 67 | def append_one_to_shape(x): 68 | """ 69 | Reshapes input. 70 | 71 | :param x: Array input. 72 | :return: Reshaped array. 73 | """ 74 | x_shape = x.shape 75 | x = x.reshape((len(x), np.prod(x.shape[1:]))) 76 | x = np.reshape(x, (*x_shape, 1)) 77 | return x 78 | 79 | 80 | def unpickle(file): 81 | """ 82 | Unpickle and read file. 83 | 84 | :param file: Pickle file to read. 85 | :return: Data loaded from pickle file. 86 | """ 87 | with open(file, 'rb') as fo: 88 | data = pickle.load(fo, encoding='bytes') 89 | return data 90 | 91 | 92 | def to_numeric(y): 93 | """ 94 | Convert numpy array of array of probabilities to numeric array. 95 | 96 | :param y: Numpy array. 97 | :return: Numpy array of classes. 98 | """ 99 | return np.array([np.argmax(value) for value in y]) 100 | 101 | 102 | def accuracy_per_class(y_true, y_pred): 103 | """ 104 | Computes accuracy per class. 105 | 106 | :param y_true: True class. 107 | :param y_pred: Predicted class. 108 | :return: 109 | """ 110 | y_true = to_numeric(y_true) 111 | y_pred = to_numeric(y_pred) 112 | 113 | cm = confusion_matrix(y_true, y_pred) 114 | return cm.diagonal() / cm.sum(axis=1) 115 | -------------------------------------------------------------------------------- /mirapy/visualization/__init__.py: -------------------------------------------------------------------------------- 1 | from .visualize import * 2 | -------------------------------------------------------------------------------- /mirapy/visualization/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mirapy-org/mirapy/d37cb6c8f8c6630411a5ee1b9c7d5aa6bb6479eb/mirapy/visualization/tests/__init__.py -------------------------------------------------------------------------------- /mirapy/visualization/tests/test_visualize.py: -------------------------------------------------------------------------------- 1 | from mirapy.visualization import visualize 2 | import numpy as np 3 | import pytest 4 | 5 | 6 | def test_visualize_2d(): 7 | a = np.array([[1, 2], [3, 4]]) 8 | b = np.array([1, 2]) 9 | visualize.visualize_2d(a, b) 10 | 11 | def test_visualize_3d(): 12 | a = np.array([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]]) 13 | b = np.array([1, 2, 3]) 14 | visualize.visualize_3d(a, b) 15 | -------------------------------------------------------------------------------- /mirapy/visualization/visualize.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.decomposition import PCA 3 | import matplotlib.pyplot as plt 4 | from mpl_toolkits.mplot3d import Axes3D # do not remove this line 5 | 6 | 7 | def visualize_2d(x, y): 8 | """ 9 | Function for 2D visualization of data using Principal Component Analysis 10 | (PCA). 11 | 12 | :param x: Array of features. 13 | :param y: Array of target values. 14 | """ 15 | pca = PCA(n_components=2) 16 | principal_components = pca.fit_transform(x) 17 | principal_df = pd.DataFrame(data=principal_components, 18 | columns=['PC 1', 'PC 2']) 19 | target = pd.DataFrame(y, columns=['target']) 20 | new_df = pd.concat([principal_df, target], axis=1) 21 | 22 | fig = plt.figure(figsize=(10, 10)) 23 | ax = fig.add_subplot(1, 1, 1) 24 | ax.set_xlabel('Principal Component 1') 25 | ax.set_ylabel('Principal Component 2') 26 | ax.set_title('2D Feature Visualization') 27 | 28 | im = ax.scatter(new_df[['PC 1']], new_df[['PC 2']], 29 | c=new_df[['target']].values, s=2) 30 | fig.colorbar(im, ax=ax) 31 | 32 | ax.grid() 33 | 34 | 35 | def visualize_3d(x, y): 36 | """ 37 | Function for 3D visualization of data using Principal Component Analysis 38 | (PCA). 39 | 40 | :param x: Array of features. 41 | :param y: Array of target values. 42 | """ 43 | pca = PCA(n_components=3) 44 | principal_components = pca.fit_transform(x) 45 | principal_df = pd.DataFrame(data=principal_components, 46 | columns=['PC 1', 'PC 2', 'PC 3']) 47 | target = pd.DataFrame(y, columns=['target']) 48 | new_df = pd.concat([principal_df, target], axis=1) 49 | 50 | fig = plt.figure(figsize=(10, 10)) 51 | ax = fig.add_subplot(111, projection='3d') 52 | 53 | im = ax.scatter(new_df[['PC 1']], new_df[['PC 2']], new_df[['PC 3']], c=y, 54 | s=2) 55 | 56 | ax.set_xlabel('Principal Component 1') 57 | ax.set_ylabel('Principal Component 2') 58 | ax.set_zlabel('Principal Component 3') 59 | ax.set_title('3D Feature Visualization') 60 | fig.colorbar(im, ax=ax) 61 | ax.grid() 62 | -------------------------------------------------------------------------------- /paper.bib: -------------------------------------------------------------------------------- 1 | @misc{chollet2015keras, 2 | title={Keras}, 3 | author={Chollet, Fran\c{c}ois and others}, 4 | year={2015}, 5 | howpublished={\url{https://keras.io}}, 6 | } 7 | @article{Heinze_2018, 8 | doi = {10.3847/1538-3881/aae47f}, 9 | url = {https://doi.org/10.3847%2F1538-3881%2Faae47f}, 10 | year = 2018, 11 | month = {nov}, 12 | publisher = {American Astronomical Society}, 13 | volume = {156}, 14 | number = {5}, 15 | pages = {241}, 16 | author = {A. N. Heinze and J. L. Tonry and L. Denneau and H. Flewelling and B. Stalder and A. Rest and K. W. Smith and S. J. Smartt and H. Weiland}, 17 | title = {A First Catalog of Variable Stars Measured by the Asteroid Terrestrial-impact Last Alert System ({ATLAS})}, 18 | journal = {The Astronomical Journal}, 19 | abstract = {The Asteroid Terrestrial-impact Last Alert System (ATLAS) carries out its primary planetary defense mission by surveying about 13,000 deg2 at least four times per night. The resulting data set is useful for the discovery of variable stars to a magnitude limit fainter than r ∼ 18, with amplitudes down to 0.02 mag for bright objects. Here, we present a Data Release One catalog of variable stars based on analyzing the light curves of 142 million stars that were measured at least 100 times in the first two years of ATLAS operations. Using a Lomb–Scargle periodogram and other variability metrics, we identify 4.7 million candidate variables. Through the Space Telescope Science Institute, we publicly release light curves for all of them, together with a vector of 169 classification features for each star. We do this at the level of unconfirmed candidate variables in order to provide the community with a large set of homogeneously analyzed photometry and to avoid pre-judging which types of objects others may find most interesting. We use machine learning to classify the candidates into 15 different broad categories based on light-curve morphology. About 10% (427,000 stars) pass extensive tests designed to screen out spurious variability detections: we label these as “probable” variables. Of these, 214,000 receive specific classifications as eclipsing binaries, pulsating, Mira-type, or sinusoidal variables: these are the “classified” variables. New discoveries among the probable variables number 315,000, while 141,000 of the classified variables are new, including about 10,400 pulsating variables, 2060 Mira stars, and 74,700 eclipsing binaries.} 20 | } 21 | @article{1810.09489, 22 | Author = {A. Udalski and I. Soszyński and P. Pietrukowicz and M. K. Szymański and D. M. Skowron and J. Skowron and P. Mróz and R. Poleski and S. Kozłowski and K. Ulaczyk and K. Rybicki and P. Iwanek and M. Wrona}, 23 | Title = {OGLE Collection of Galactic Cepheids}, 24 | Year = {2018}, 25 | Eprint = {arXiv:1810.09489}, 26 | Howpublished = {2018, Acta Astron., 68, 315}, 27 | Doi = {10.32023/0001-5237/68.4.1}, 28 | } 29 | @article{Gopalan_2015, 30 | doi = {10.1088/0004-637x/809/1/40}, 31 | url = {https://doi.org/10.1088%2F0004-637x%2F809%2F1%2F40}, 32 | year = 2015, 33 | month = {aug}, 34 | publisher = {{IOP} Publishing}, 35 | volume = {809}, 36 | number = {1}, 37 | pages = {40}, 38 | author = {Giri Gopalan and Saeqa Dil Vrtilek and Luke Bornn}, 39 | title = {{CLASSIFYING} X-{RAY} {BINARIES}: A {PROBABILISTIC} {APPROACH}}, 40 | journal = {The Astrophysical Journal}, 41 | abstract = {In X-ray binary star systems consisting of a compact object that accretes material from an orbiting secondary star, there is no straightforward means to decide whether the compact object is a black hole or a neutron star. To assist in this process, we develop a Bayesian statistical model that makes use of the fact that X-ray binary systems appear to cluster based on their compact object type when viewed from a three-dimensional coordinate system derived from X-ray spectral data where the first coordinate is the ratio of counts in the mid- to low-energy band (color 1), the second coordinate is the ratio of counts in the high- to low-energy band (color 2), and the third coordinate is the sum of counts in all three bands. We use this model to estimate the probabilities of an X-ray binary system containing a black hole, non-pulsing neutron star, or pulsing neutron star. In particular, we utilize a latent variable model in which the latent variables follow a Gaussian process prior distribution, and hence we are able to induce the spatial correlation which we believe exists between systems of the same type. The utility of this approach is demonstrated by the accurate prediction of system types using Rossi X-ray Timing Explorer All Sky Monitor data, but it is not flawless. In particular, non-pulsing neutron systems containing “bursters” that are close to the boundary demarcating systems containing black holes tend to be classified as black hole systems. As a byproduct of our analyses, we provide the astronomer with the public R code which can be used to predict the compact object type of XRBs given training data.} 42 | } 43 | @article{10.1093/mnras/stu1188, 44 | author = {Morello, V. and Barr, E. D. and Bailes, M. and Flynn, C. M. and Keane, E. F. and van Straten, W.}, 45 | title = "{SPINN: a straightforward machine learning solution to the pulsar candidate selection problem}", 46 | journal = {Monthly Notices of the Royal Astronomical Society}, 47 | volume = {443}, 48 | number = {2}, 49 | pages = {1651-1662}, 50 | year = {2014}, 51 | month = {07}, 52 | abstract = "{We describe SPINN (Straightforward Pulsar Identification using Neural Networks), a high-performance machine learning solution developed to process increasingly large data outputs from pulsar surveys. SPINN has been cross-validated on candidates from the southern High Time Resolution Universe (HTRU) survey and shown to identify every known pulsar found in the survey data while maintaining a false positive rate of 0.64 per cent. Furthermore, it ranks 99 per cent of pulsars among the top 0.11 per cent of candidates, and 95 per cent among the top 0.01 per cent. In conjunction with the peasoup pipeline, it has already discovered four new pulsars in a re-processing of the intermediate Galactic latitude area of HTRU, three of which have spin periods shorter than 5 ms. SPINN's ability to reduce the amount of candidates to visually inspect by up to four orders of magnitude makes it a very promising tool for future large-scale pulsar surveys. In an effort to provide a common testing ground for pulsar candidate selection tools and stimulate interest in their development, we also make publicly available the set of candidates on which SPINN was cross-validated.}", 53 | issn = {0035-8711}, 54 | doi = {10.1093/mnras/stu1188}, 55 | url = {https://doi.org/10.1093/mnras/stu1188}, 56 | eprint = {http://oup.prod.sis.lan/mnras/article-pdf/443/2/1651/3623597/stu1188.pdf}, 57 | } 58 | @article{Barnes_2016, 59 | doi = {10.3847/0004-637x/829/2/89}, 60 | url = {https://doi.org/10.3847%2F0004-637x%2F829%2F2%2F89}, 61 | year = 2016, 62 | month = {sep}, 63 | publisher = {American Astronomical Society}, 64 | volume = {829}, 65 | number = {2}, 66 | pages = {89}, 67 | author = {G. Barnes and K. D. Leka and C. J. Schrijver and T. Colak and R. Qahwaji and O. W. Ashamari and Y. Yuan and J. Zhang and R. T. J. McAteer and D. S. Bloomfield and P. A. Higgins and P. T. Gallagher and D. A. Falconer and M. K. Georgoulis and M. S. Wheatland and C. Balch and T. Dunn and E. L. Wagner}, 68 | title = {A {COMPARISON} {OF} {FLARE} {FORECASTING} {METHODS}. I. {RESULTS} {FROM} {THE} {\textquotedblleft}{ALL}-{CLEAR}{\textquotedblright} {WORKSHOP}}, 69 | journal = {The Astrophysical Journal}, 70 | abstract = {Solar flares produce radiation that can have an almost immediate effect on the near-Earth environment, making it crucial to forecast flares in order to mitigate their negative effects. The number of published approaches to flare forecasting using photospheric magnetic field observations has proliferated, with varying claims about how well each works. Because of the different analysis techniques and data sets used, it is essentially impossible to compare the results from the literature. This problem is exacerbated by the low event rates of large solar flares. The challenges of forecasting rare events have long been recognized in the meteorology community, but have yet to be fully acknowledged by the space weather community. During the interagency workshop on “all clear” forecasts held in Boulder, CO in 2009, the performance of a number of existing algorithms was compared on common data sets, specifically line-of-sight magnetic field and continuum intensity images from the Michelson Doppler Imager, with consistent definitions of what constitutes an event. We demonstrate the importance of making such systematic comparisons, and of using standard verification statistics to determine what constitutes a good prediction scheme. When a comparison was made in this fashion, no one method clearly outperformed all others, which may in part be due to the strong correlations among the parameters used by different methods to characterize an active region. For M-class flares and above, the set of methods tends toward a weakly positive skill score (as measured with several distinct metrics), with no participating method proving substantially better than climatological forecasts.} 71 | } 72 | @article{astropy:2018, 73 | Adsnote = {Provided by the SAO/NASA Astrophysics Data System}, 74 | Adsurl = {https://ui.adsabs.harvard.edu/#abs/2018AJ....156..123T}, 75 | Author = {{Price-Whelan}, A.~M. and {Sip{\H{o}}cz}, B.~M. and {G{\"u}nther}, H.~M. and {Lim}, P.~L. and {Crawford}, S.~M. and {Conseil}, S. and {Shupe}, D.~L. and {Craig}, M.~W. and {Dencheva}, N. and {Ginsburg}, A. and {VanderPlas}, J.~T. and {Bradley}, L.~D. and {P{\'e}rez-Su{\'a}rez}, D. and {de Val-Borro}, M. and {Paper Contributors}, (Primary and {Aldcroft}, T.~L. and {Cruz}, K.~L. and {Robitaille}, T.~P. and {Tollerud}, E.~J. and {Coordination Committee}, (Astropy and {Ardelean}, C. and {Babej}, T. and {Bach}, Y.~P. and {Bachetti}, M. and {Bakanov}, A.~V. and {Bamford}, S.~P. and {Barentsen}, G. and {Barmby}, P. and {Baumbach}, A. and {Berry}, K.~L. and {Biscani}, F. and {Boquien}, M. and {Bostroem}, K.~A. and {Bouma}, L.~G. and {Brammer}, G.~B. and {Bray}, E.~M. and {Breytenbach}, H. and {Buddelmeijer}, H. and {Burke}, D.~J. and {Calderone}, G. and {Cano Rodr{\'\i}guez}, J.~L. and {Cara}, M. and {Cardoso}, J.~V.~M. and {Cheedella}, S. and {Copin}, Y. and {Corrales}, L. and {Crichton}, D. and {D{\textquoteright}Avella}, D. and {Deil}, C. and {Depagne}, {\'E}. and {Dietrich}, J.~P. and {Donath}, A. and {Droettboom}, M. and {Earl}, N. and {Erben}, T. and {Fabbro}, S. and {Ferreira}, L.~A. and {Finethy}, T. and {Fox}, R.~T. and {Garrison}, L.~H. and {Gibbons}, S.~L.~J. and {Goldstein}, D.~A. and {Gommers}, R. and {Greco}, J.~P. and {Greenfield}, P. and {Groener}, A.~M. and {Grollier}, F. and {Hagen}, A. and {Hirst}, P. and {Homeier}, D. and {Horton}, A.~J. and {Hosseinzadeh}, G. and {Hu}, L. and {Hunkeler}, J.~S. and {Ivezi{\'c}}, {\v{Z}}. and {Jain}, A. and {Jenness}, T. and {Kanarek}, G. and {Kendrew}, S. and {Kern}, N.~S. and {Kerzendorf}, W.~E. and {Khvalko}, A. and {King}, J. and {Kirkby}, D. and {Kulkarni}, A.~M. and {Kumar}, A. and {Lee}, A. and {Lenz}, D. and {Littlefair}, S.~P. and {Ma}, Z. and {Macleod}, D.~M. and {Mastropietro}, M. and {McCully}, C. and {Montagnac}, S. and {Morris}, B.~M. and {Mueller}, M. and {Mumford}, S.~J. and {Muna}, D. and {Murphy}, N.~A. and {Nelson}, S. and {Nguyen}, G.~H. and {Ninan}, J.~P. and {N{\"o}the}, M. and {Ogaz}, S. and {Oh}, S. and {Parejko}, J.~K. and {Parley}, N. and {Pascual}, S. and {Patil}, R. and {Patil}, A.~A. and {Plunkett}, A.~L. and {Prochaska}, J.~X. and {Rastogi}, T. and {Reddy Janga}, V. and {Sabater}, J. and {Sakurikar}, P. and {Seifert}, M. and {Sherbert}, L.~E. and {Sherwood-Taylor}, H. and {Shih}, A.~Y. and {Sick}, J. and {Silbiger}, M.~T. and {Singanamalla}, S. and {Singer}, L.~P. and {Sladen}, P.~H. and {Sooley}, K.~A. and {Sornarajah}, S. and {Streicher}, O. and {Teuben}, P. and {Thomas}, S.~W. and {Tremblay}, G.~R. and {Turner}, J.~E.~H. and {Terr{\'o}n}, V. and {van Kerkwijk}, M.~H. and {de la Vega}, A. and {Watkins}, L.~L. and {Weaver}, B.~A. and {Whitmore}, J.~B. and {Woillez}, J. and {Zabalza}, V. and {Contributors}, (Astropy}, 76 | Doi = {10.3847/1538-3881/aabc4f}, 77 | Eid = {123}, 78 | Journal = {\aj}, 79 | Keywords = {methods: data analysis, methods: miscellaneous, methods: statistical, reference systems, Astrophysics - Instrumentation and Methods for Astrophysics}, 80 | Month = Sep, 81 | Pages = {123}, 82 | Primaryclass = {astro-ph.IM}, 83 | Title = {{The Astropy Project: Building an Open-science Project and Status of the v2.0 Core Package}}, 84 | Volume = {156}, 85 | Year = 2018, 86 | Bdsk-Url-1 = {https://doi.org/10.3847/1538-3881/aabc4f}} 87 | @article{scikit-learn, 88 | title={Scikit-learn: Machine Learning in {P}ython}, 89 | author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. 90 | and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P. 91 | and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and 92 | Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, 93 | journal={Journal of Machine Learning Research}, 94 | volume={12}, 95 | pages={2825--2830}, 96 | year={2011} 97 | } 98 | @article{scikit-image, 99 | title = {scikit-image: image processing in {P}ython}, 100 | author = {van der Walt, {S}t\'efan and {S}ch\"onberger, {J}ohannes {L}. and 101 | {Nunez-Iglesias}, {J}uan and {B}oulogne, {F}ran\c{c}ois and {W}arner, 102 | {J}oshua {D}. and {Y}ager, {N}eil and {G}ouillart, {E}mmanuelle and 103 | {Y}u, {T}ony and the scikit-image contributors}, 104 | year = {2014}, 105 | month = {6}, 106 | keywords = {Image processing, Reproducible research, Education, 107 | Visualization, Open source, Python, Scientific programming}, 108 | volume = {2}, 109 | pages = {e453}, 110 | journal = {PeerJ}, 111 | issn = {2167-8359}, 112 | url = {https://doi.org/10.7717/peerj.453}, 113 | doi = {10.7717/peerj.453} 114 | } 115 | @Misc{scipy, 116 | author = {Eric Jones and Travis Oliphant and Pearu Peterson and others}, 117 | title = {{SciPy}: Open source scientific tools for {Python}}, 118 | year = {2001--}, 119 | url = "http://www.scipy.org/", 120 | note = {[Online; accessed ]} 121 | } 122 | @book{oliphant2006guide, 123 | title={A guide to NumPy}, 124 | author={Oliphant, Travis E}, 125 | volume={1}, 126 | year={2006}, 127 | publisher={Trelgol Publishing USA} 128 | } 129 | @Article{Hunter:2007, 130 | Author = {Hunter, J. D.}, 131 | Title = {Matplotlib: A 2D graphics environment}, 132 | Journal = {Computing in Science \& Engineering}, 133 | Volume = {9}, 134 | Number = {3}, 135 | Pages = {90--95}, 136 | abstract = {Matplotlib is a 2D graphics package used for Python for 137 | application development, interactive scripting, and publication-quality 138 | image generation across user interfaces and operating systems.}, 139 | publisher = {IEEE COMPUTER SOC}, 140 | doi = {10.1109/MCSE.2007.55}, 141 | year = 2007 142 | } 143 | -------------------------------------------------------------------------------- /paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'MiraPy: Python Package for Deep Learning in Astronomy' 3 | tags: 4 | - Python 5 | - astronomy 6 | - deep learning 7 | - machine learning 8 | - image reconstruction 9 | - variable star classification 10 | - curve fitting 11 | authors: 12 | - name: Swapnil Sharma 13 | orcid: 0000-0002-0329-9314 14 | affiliation: 1 15 | - name: Akhil Singhal 16 | affiliation: 1 17 | - name: Arnav Bhavsar 18 | affiliation: 1 19 | affiliations: 20 | - name: Indian Institute of Technology, Mandi 21 | index: 1 22 | date: 1 July 2019 23 | bibliography: paper.bib 24 | --- 25 | 26 | # Summary 27 | 28 | MiraPy is a Python package[astropy:2018] for Deep Learning in Astronomy. It is built using 29 | Keras [@chollet2015keras] for developing ML models to run on CPU and GPU seamlessly. The 30 | aim is to make applying machine learning techniques on astronomical data easy 31 | for astronomers, researchers and students. 32 | 33 | MiraPy can be used for problem solving using ML techniques and will continue to grow to tackle new problems in Astronomy. 34 | 35 | # Applications 36 | 37 | Following are the experiments that you can perform right now: 38 | 39 | - Classification of X-Ray Binaries using neural network [@Gopalan_2015] 40 | - Astronomical Image Reconstruction using Autoencoder 41 | - Classification of the first catalog of variable stars by ATLAS [@Heinze_2018] 42 | - HTRU1 Pulsar Dataset Image Classification using Convolutional Neural Network [@10.1093/mnras/stu1188] 43 | - OGLE Catalogue Variable Star Classification using Recurrent Neural Network (RNN) [@1810.09489] 44 | - 2D and 3D visualization of feature sets using Principal Component Analysis (PCA) [@Barnes_2016] 45 | - Curve Fitting using Autograd (basic implementation) 46 | 47 | # Acknowledgements 48 | MiraPy is developed by Swapnil Sharma and Akhil Singhal as their final year 'Major Technical Project' under the guidance of Dr. Arnav Bhavsar at Indian Institute of Technology, Mandi. 49 | [@scikit-learn] [@scikit-image] [@scipy] [@oliphant2006guide] [@Hunter:2007] 50 | 51 | # References 52 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | conda: 2 | file: .rtd-environment.yml 3 | 4 | python: 5 | setup_py_install: true -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Also update setup.cfg (if needed) 2 | astropy 3 | numpy>=1.11.0 4 | scipy>=0.18.0 5 | matplotlib>=1.3 6 | autograd 7 | nbsphinx -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [build_sphinx] 2 | source-dir = docs/source 3 | build-dir = docs/build 4 | all_files = 1 5 | 6 | [build_docs] 7 | source-dir = docs/source 8 | build-dir = docs/build 9 | all_files = 1 10 | 11 | [upload_docs] 12 | upload-dir = docs/build/html 13 | show-response = 1 14 | 15 | [tool:pytest] 16 | minversion = 3.0 17 | norecursedirs = build docs/build 18 | doctest_plus = enabled 19 | addopts = -p no:warnings 20 | 21 | [ah_bootstrap] 22 | auto_use = True 23 | 24 | [flake8] 25 | exclude = extern,sphinx,*parsetab.py 26 | 27 | [pycodestyle] 28 | exclude = extern,sphinx,*parsetab.py 29 | 30 | [metadata] 31 | package_name = mirapy 32 | description = Python package for Deep Learning in Astronomy 33 | long_description = MiraPy is a Python package for Deep Learning in Astronomy. It is built using Keras for developing ML models to run on CPU and GPU seamlessly. The aim is to make applying machine learning techniques on astronomical data easy for astronomers, researchers and students. 34 | author = Swapnil Sharma, Akhil Singhal 35 | author_email = swap.sha96@gmail.com, akhilsinghal1234@gmail.com 36 | license = MIT 37 | url = https://github.com/swapsha96/MiraPy 38 | edit_on_github = False 39 | github_project = swapsha96/MiraPy 40 | # install_requires should be formatted as a comma-separated list, e.g.: 41 | # install_requires = astropy, scipy, matplotlib 42 | install_requires = astropy, numpy>=1.11.0, scipy>=0.18.0, matplotlib>=1.3, autograd 43 | # version should be PEP440 compatible (https://www.python.org/dev/peps/pep-0440/) 44 | version = 0.1.0 45 | # Note: you will also need to change this in your package's __init__.py 46 | minimum_python_version = 3.5 47 | 48 | [entry_points] 49 | 50 | astropy-package-template-example = packagename.example_mod:main 51 | 52 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Licensed under the MIT license - see LICENSE.rst 3 | # Note: This file needs to be Python 2 / <3.6 compatible, so that the nice 4 | # "This package only supports Python 3.x+" error prints without syntax errors etc. 5 | 6 | import glob 7 | import os 8 | import sys 9 | try: 10 | from configparser import ConfigParser 11 | except ImportError: 12 | from ConfigParser import ConfigParser 13 | 14 | # Get some values from the setup.cfg 15 | conf = ConfigParser() 16 | conf.read(['setup.cfg']) 17 | metadata = dict(conf.items('metadata')) 18 | 19 | PACKAGENAME = metadata.get('package_name', 'mirapy') 20 | DESCRIPTION = metadata.get('description', 'Python package for Machine Learning in Astronomy') 21 | AUTHOR = metadata.get('author', 'Swapnil Sharma, Akhil Singhal') 22 | AUTHOR_EMAIL = metadata.get('author_email', '') 23 | LICENSE = metadata.get('license', 'unknown') 24 | URL = metadata.get('url', 'https://github.com/swapsha96/MiraPy') 25 | __minimum_python_version__ = metadata.get("minimum_python_version", "3.5") 26 | 27 | # Enforce Python version check - this is the same check as in __init__.py but 28 | # this one has to happen before importing ah_bootstrap. 29 | if sys.version_info < tuple((int(val) for val in __minimum_python_version__.split('.'))): 30 | sys.stderr.write("ERROR: mirapy requires Python {} or later\n".format(__minimum_python_version__)) 31 | sys.exit(1) 32 | 33 | # Import ah_bootstrap after the python version validation 34 | 35 | import ah_bootstrap 36 | from setuptools import setup 37 | 38 | import builtins 39 | builtins._ASTROPY_SETUP_ = True 40 | 41 | from astropy_helpers.setup_helpers import (register_commands, get_debug_option, 42 | get_package_info) 43 | from astropy_helpers.git_helpers import get_git_devstr 44 | from astropy_helpers.version_helpers import generate_version_py 45 | 46 | 47 | # order of priority for long_description: 48 | # (1) set in setup.cfg, 49 | # (2) load LONG_DESCRIPTION.rst, 50 | # (3) load README.rst, 51 | # (4) package docstring 52 | readme_glob = 'README*' 53 | _cfg_long_description = metadata.get('long_description', '') 54 | if _cfg_long_description: 55 | LONG_DESCRIPTION = _cfg_long_description 56 | 57 | elif os.path.exists('LONG_DESCRIPTION.rst'): 58 | with open('LONG_DESCRIPTION.rst') as f: 59 | LONG_DESCRIPTION = f.read() 60 | 61 | elif len(glob.glob(readme_glob)) > 0: 62 | with open(glob.glob(readme_glob)[0]) as f: 63 | LONG_DESCRIPTION = f.read() 64 | 65 | else: 66 | # Get the long description from the package's docstring 67 | __import__(PACKAGENAME) 68 | package = sys.modules[PACKAGENAME] 69 | LONG_DESCRIPTION = package.__doc__ 70 | 71 | # Store the package name in a built-in variable so it's easy 72 | # to get from other parts of the setup infrastructure 73 | builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME 74 | 75 | # VERSION should be PEP440 compatible (http://www.python.org/dev/peps/pep-0440) 76 | VERSION = metadata.get('version', '0.0.dev') 77 | 78 | # Indicates if this version is a release version 79 | RELEASE = 'dev' not in VERSION 80 | 81 | if not RELEASE: 82 | VERSION += get_git_devstr(False) 83 | 84 | # Populate the dict of setup command overrides; this should be done before 85 | # invoking any other functionality from distutils since it can potentially 86 | # modify distutils' behavior. 87 | cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE) 88 | 89 | # Freeze build information in version.py 90 | generate_version_py(PACKAGENAME, VERSION, RELEASE, 91 | get_debug_option(PACKAGENAME)) 92 | 93 | # Treat everything in scripts except README* as a script to be installed 94 | scripts = [fname for fname in glob.glob(os.path.join('scripts', '*')) 95 | if not os.path.basename(fname).startswith('README')] 96 | 97 | 98 | # Get configuration information from all of the various subpackages. 99 | # See the docstring for setup_helpers.update_package_files for more 100 | # details. 101 | package_info = get_package_info() 102 | 103 | # Add the project-global data 104 | package_info['package_data'].setdefault(PACKAGENAME, []) 105 | package_info['package_data'][PACKAGENAME].append('data/*') 106 | 107 | # Define entry points for command-line scripts 108 | entry_points = {'console_scripts': []} 109 | 110 | if conf.has_section('entry_points'): 111 | entry_point_list = conf.items('entry_points') 112 | for entry_point in entry_point_list: 113 | entry_points['console_scripts'].append('{0} = {1}'.format( 114 | entry_point[0], entry_point[1])) 115 | 116 | # Include all .c files, recursively, including those generated by 117 | # Cython, since we can not do this in MANIFEST.in with a "dynamic" 118 | # directory name. 119 | c_files = [] 120 | for root, dirs, files in os.walk(PACKAGENAME): 121 | for filename in files: 122 | if filename.endswith('.c'): 123 | c_files.append( 124 | os.path.join( 125 | os.path.relpath(root, PACKAGENAME), filename)) 126 | package_info['package_data'][PACKAGENAME].extend(c_files) 127 | 128 | with open('requirements.txt') as f: 129 | required = f.read().splitlines() 130 | 131 | # Note that requires and provides should not be included in the call to 132 | # ``setup``, since these are now deprecated. See this link for more details: 133 | # https://groups.google.com/forum/#!topic/astropy-dev/urYO8ckB2uM 134 | 135 | setup(name=PACKAGENAME, 136 | version=VERSION, 137 | description=DESCRIPTION, 138 | scripts=scripts, 139 | install_requires=[s.strip() for s in metadata.get('install_requires', 'astropy').split(',')], 140 | author=AUTHOR, 141 | author_email=AUTHOR_EMAIL, 142 | license=LICENSE, 143 | url=URL, 144 | long_description=LONG_DESCRIPTION, 145 | cmdclass=cmdclassd, 146 | zip_safe=False, 147 | use_2to3=False, 148 | entry_points=entry_points, 149 | python_requires='>={}'.format(__minimum_python_version__), 150 | **package_info 151 | ) 152 | --------------------------------------------------------------------------------