├── .gitignore
├── HISTORY.rst
├── .coveralls.yml
├── conda-requirements.txt
├── examples
├── red
│ └── 020_LightCurve_00022.fits.gz
├── constant
│ └── 020_LightCurve_00001.fits.gz
└── loggauss
│ └── 020_LightCurve_00001.fits.gz
├── .coveragerc
├── MANIFEST.in
├── setup.cfg
├── .github
└── workflows
│ └── test.yml
├── .travis.yml
├── setup.py
├── Makefile
├── README.rst
├── scripts
├── quick_ero.py
└── bexvar_ero.py
├── experimental
├── periodic_ero.py
├── cplar_ero.py
├── bexvar2_ero.py
└── cplar_ero_full.py
└── COPYING
/.gitignore:
--------------------------------------------------------------------------------
1 | *.fits
2 | *.png
3 | *.pdf
4 |
--------------------------------------------------------------------------------
/HISTORY.rst:
--------------------------------------------------------------------------------
1 | Changelog
2 | ----------
3 |
--------------------------------------------------------------------------------
/.coveralls.yml:
--------------------------------------------------------------------------------
1 | repo_token: ZvC01ANuubKpXKvAKyGgcmxyRCl74l6Ww
2 |
--------------------------------------------------------------------------------
/conda-requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | scipy
3 | ultranest
4 | astropy
5 | tqdm
6 | joblib
7 | matplotlib
8 | corner
9 |
--------------------------------------------------------------------------------
/examples/red/020_LightCurve_00022.fits.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JohannesBuchner/bexvar/main/examples/red/020_LightCurve_00022.fits.gz
--------------------------------------------------------------------------------
/examples/constant/020_LightCurve_00001.fits.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JohannesBuchner/bexvar/main/examples/constant/020_LightCurve_00001.fits.gz
--------------------------------------------------------------------------------
/examples/loggauss/020_LightCurve_00001.fits.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/JohannesBuchner/bexvar/main/examples/loggauss/020_LightCurve_00001.fits.gz
--------------------------------------------------------------------------------
/.coveragerc:
--------------------------------------------------------------------------------
1 | #
2 | # .coveragerc to control coverage.py
3 | #
4 |
5 | [run]
6 | branch = True
7 | include =
8 | scripts/*
9 | experimental/*
10 |
11 |
12 | [report]
13 | exclude_lines =
14 | pragma: no cover
15 | def __repr__
16 | if __name__ == .__main__.:
17 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include CONTRIBUTING.rst
2 | include HISTORY.rst
3 | include LICENSE
4 | include README.rst
5 |
6 | recursive-include tests *
7 | recursive-exclude * __pycache__
8 | recursive-exclude * *.py[co]
9 |
10 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif
11 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 1.1.1
3 | commit = True
4 | tag = True
5 |
6 | [bumpversion:file:setup.py]
7 | search = version='{current_version}'
8 | replace = version='{new_version}'
9 |
10 | [bumpversion:file:scripts/bexvar_ero.py]
11 | search = __version__ = '{current_version}'
12 | replace = __version__ = '{new_version}'
13 |
14 | [bumpversion:file:scripts/quick_ero.py]
15 | search = __version__ = '{current_version}'
16 | replace = __version__ = '{new_version}'
17 |
18 | [flake8]
19 | exclude = docs
20 | ignore = E501,F401,E128,E231,E124
21 |
22 | [aliases]
23 | test = pytest
24 |
25 | [tool:pytest]
26 | collect_ignore = ['setup.py']
27 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 | on:
3 | push:
4 | pull_request:
5 | schedule:
6 | - cron: '42 4 5,20 * *'
7 | jobs:
8 | Test:
9 | runs-on: ubuntu-latest
10 | #strategy:
11 | # matrix:
12 | # python-version: [3.8]
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 |
17 | - name: Set up Python ${{ matrix.python-version }}
18 | uses: actions/setup-python@v2
19 | with:
20 | python-version: 3.8
21 | # python-version: ${{ matrix.python-version }}
22 |
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip
26 | pip install PyYAML coveralls rst2html5 cython cmdstancache brokenaxes git+https://github.com/JohannesBuchner/stan_utility.git#egg=stan-utility
27 | if [ -f conda-requirements.txt ]; then pip install -r conda-requirements.txt; fi
28 | install_cmdstan
29 | - run: python setup.py install
30 | - run: coverage run -a scripts/quick_ero.py examples/*/020_LightCurve_*.fits.gz
31 | - run: coverage run -a scripts/bexvar_ero.py examples/constant/020_LightCurve_00001.fits.gz
32 | - run: coverage run -a experimental/cplar_ero.py examples/red/020_LightCurve_00022.fits.gz
33 | #- run: coverage run -a experimental/periodic_ero.py examples/constant/020_LightCurve_00001.fits.gz
34 | - run: coveralls
35 | - run: make docs
36 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 |
3 | sudo: false
4 |
5 | #python:
6 | # - "3.5"
7 | # - "3.6"
8 | # - "3.9"
9 |
10 | install:
11 | # Fetch and install conda
12 | # -----------------------
13 | - export CONDA_BASE="http://repo.continuum.io/miniconda/Miniconda"
14 | - wget ${CONDA_BASE}3-latest-Linux-x86_64.sh -O miniconda.sh;
15 | - bash miniconda.sh -b -p ${HOME}/miniconda
16 | - export PATH="${HOME}/miniconda/bin:${PATH}"
17 |
18 | # Create the testing environment
19 | # ------------------------------
20 | - conda config --set always_yes true
21 | - conda config --set changeps1 no
22 | - conda config --set show_channel_urls true
23 | - conda config --add channels conda-forge
24 | - conda update --quiet conda
25 | - ENV_NAME="test-environment"
26 | - conda create --quiet -n ${ENV_NAME} python=${TRAVIS_PYTHON_VERSION}
27 | - source activate ${ENV_NAME}
28 |
29 | # Customise the testing environment
30 | # ---------------------------------
31 | - conda install --quiet --file conda-requirements.txt cython
32 | - pip install coveralls rst2html5
33 |
34 | # Summerise environment
35 | # ---------------------
36 | - conda list
37 | - conda info -a
38 |
39 | # Install and test
40 | - python setup.py install
41 |
42 | script:
43 | - make docs
44 | - coverage -a scripts/quick_ero.py examples/*/020_LightCurve_*.fits.gz
45 | - coverage -a scripts/bexvar_ero.py examples/constant/020_LightCurve_00001.fits.gz
46 | - coverage -a experimental/cplar_ero.py examples/red/020_LightCurve_00001.fits.gz
47 | - coverage -a experimental/periodic_ero.py examples/constant/020_LightCurve_00001.fits.gz
48 |
49 | after_success: coveralls
50 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | import os
4 |
5 | try:
6 | from setuptools import setup
7 | except:
8 | from distutils.core import setup
9 |
10 | try:
11 | with open('README.rst') as readme_file:
12 | readme = readme_file.read()
13 |
14 | with open('HISTORY.rst') as history_file:
15 | history = history_file.read()
16 | except IOError:
17 | with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme_file:
18 | readme = readme_file.read()
19 |
20 | with open(os.path.join(os.path.dirname(__file__), 'HISTORY.rst')) as history_file:
21 | history = history_file.read()
22 |
23 | requirements = ['numpy', 'scipy', 'ultranest', 'matplotlib', 'astropy']
24 |
25 | setup_requirements = ['pytest-runner', ]
26 |
27 | test_requirements = ['pytest>=3', ]
28 |
29 | setup(
30 | author="Johannes Buchner",
31 | author_email='johannes.buchner.acad@gmx.com',
32 | python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*',
33 | classifiers=[
34 | 'Development Status :: 2 - Pre-Alpha',
35 | 'Intended Audience :: Developers',
36 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
37 | 'Natural Language :: English',
38 | 'Programming Language :: Python :: 3.5',
39 | 'Programming Language :: Python :: 3.6',
40 | 'Programming Language :: Python :: 3.7',
41 | 'Programming Language :: Python :: 3.8',
42 | 'Programming Language :: Python :: 3.9',
43 | ],
44 | description="Bayesian excess variance for Poisson data time series with backgrounds.",
45 | install_requires=requirements,
46 | license="Affero GNU General Public License v3",
47 | long_description=readme + '\n\n' + history,
48 | keywords='bexvar',
49 | name='bexvar',
50 | scripts=['scripts/bexvar_ero.py', 'scripts/quick_ero.py'],
51 | setup_requires=setup_requirements,
52 | url='https://github.com/JohannesBuchner/bexvar',
53 | version='1.1.1',
54 | )
55 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: clean clean-test clean-pyc clean-build docs help show
2 | .DEFAULT_GOAL := help
3 | .SECONDARY:
4 |
5 | export PRINT_HELP_PYSCRIPT
6 |
7 | PYTHON := python3
8 |
9 | help:
10 | @$(PYTHON) -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
11 |
12 | clean: clean-build clean-pyc clean-test clean-doc ## remove all build, test, coverage and Python artifacts
13 |
14 | clean-build: ## remove build artifacts
15 | rm -fr build/
16 | rm -fr dist/
17 | rm -fr .eggs/
18 | find . -name '*.egg-info' -exec rm -fr {} +
19 | find . -name '*.egg' -exec rm -f {} +
20 |
21 | clean-pyc: ## remove Python file artifacts
22 | find . -name '*.pyc' -exec rm -f {} +
23 | find . -name '*.pyo' -exec rm -f {} +
24 | find . -name '*~' -exec rm -f {} +
25 | find . -name '__pycache__' -exec rm -fr {} +
26 | find . -name '*.so' -exec rm -f {} +
27 | find . -name '*.c' -exec rm -f {} +
28 |
29 | clean-test: ## remove test and coverage artifacts
30 | rm -fr .tox/
31 | rm -f .coverage
32 | rm -fr htmlcov/
33 | rm -fr .pytest_cache
34 |
35 | clean-doc:
36 | rm -rf README.html
37 |
38 | #lint: ## check style with flake8
39 | # flake8 snowline tests
40 |
41 | test: ## run tests quickly with the default Python
42 | ${PYTHON} tutorial/run.py
43 | rst2html5.py README.rst > README.html
44 |
45 | test-all: ## run tests on every Python version with tox
46 | tox
47 |
48 | show: flatdist.txt.gz_out_gauss/plots/corner.pdf
49 | xdg-open $^
50 |
51 | coverage: ## check code coverage quickly with the default Python
52 | coverage run --source snowline -m pytest
53 | coverage report -m
54 | coverage html
55 | $(BROWSER) htmlcov/index.html
56 |
57 | doc: ## generate Sphinx HTML documentation, including API docs
58 | # python3 tutorial/run.py
59 | rst2html5.py README.rst > README.html
60 |
61 | release: dist ## package and upload a release
62 | twine upload -s dist/*.tar.gz
63 |
64 | dist: clean ## builds source and wheel package
65 | $(PYTHON) setup.py sdist
66 | $(PYTHON) setup.py bdist_wheel
67 | ls -l dist
68 |
69 | install: clean ## install the package to the active Python's site-packages
70 | $(PYTHON) setup.py install
71 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | bexvar
2 | ==================
3 |
4 | Bayesian excess variance for Poisson data time series with backgrounds.
5 | Excess variance is over-dispersion beyond the observational poisson noise,
6 | caused by an astrophysical source.
7 |
8 | * `Introduction <#introduction>`_
9 | * `Method <#method>`_
10 | * `Tutorial <#tutorial>`_
11 | * `Output plot <#visualising-the-results>`_ and files
12 |
13 | Introduction
14 | -------------------
15 |
16 | In high-energy astrophysics, the analysis of photon count time series
17 | is common. Examples include the detection of gamma-ray bursts,
18 | periodicity searches in pulsars, or the characterisation of
19 | damped random walk-like accretion in the X-ray emission of
20 | active galactic nuclei.
21 |
22 | Methods
23 | --------------
24 |
25 | paper: https://arxiv.org/abs/2106.14529
26 |
27 | This repository provides new statistical analysis methods for light curves.
28 | They can deal with
29 |
30 | * very low count statistics (0 or a few counts per time bin)
31 | * (potentially variable) instrument sensitivity
32 | * (potentially variable) backgrounds, measured simultaneously in an 'off' region.
33 |
34 | The tools can read eROSITA light curves. Contributions that can read other
35 | file formats are welcome.
36 |
37 | The `bexvar_ero.py` tool computes posterior distributions on the Bayesian excess variance,
38 | and source count rate.
39 |
40 | `quick_ero.py` computes simpler statistics, including Bayesian blocks,
41 | fraction variance, the normalised excess variance, and
42 | the amplitude maximum deviation statistics.
43 |
44 | Licence
45 | --------
46 | AGPLv3 (see COPYING file). Contact me if you need a different licence.
47 |
48 | Install
49 | --------
50 |
51 | .. image:: https://img.shields.io/pypi/v/bexvar.svg
52 | :target: https://pypi.python.org/pypi/bexvar
53 |
54 | .. image:: https://github.com/JohannesBuchner/bexvar/actions/workflows/test.yml/badge.svg
55 | :target: https://github.com/JohannesBuchner/bexvar/actions/workflows/test.yml
56 |
57 | .. image:: https://img.shields.io/badge/astroph.HE-arXiv%3A2106.14529-B31B1B.svg
58 | :target: https://arxiv.org/abs/2106.14529
59 | :alt: Publication
60 |
61 |
62 |
63 | Install as usual::
64 |
65 | $ pip3 install bexvar
66 |
67 | This also installs the required `ultranest `_
68 | python package.
69 |
70 |
71 | Example
72 | ----------
73 |
74 | Run with::
75 |
76 | $ bexvar_ero.py 020_LightCurve_00001.fits
77 |
78 | Run simpler variability analyses with::
79 |
80 | $ quick_ero.py 020_LightCurve_*.fits.gz
81 |
82 |
83 | Contributing
84 | --------------
85 |
86 | Contributions are welcome. Please open pull requests
87 | with code contributions, or issues for bugs and questions.
88 |
89 | Contributors include:
90 |
91 | * Johannes Buchner
92 | * David Bogensberger
93 |
94 | If you use this software, please cite this paper: https://arxiv.org/abs/2106.14529
95 |
96 | See also
97 | --------
98 | * https://github.com/rarcodia/eRebin for rebinning eROSITA light curves to eroDays
99 |
--------------------------------------------------------------------------------
/scripts/quick_ero.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Given an eROSITA SRCTOOL light curve, computes various variability statistics.
4 |
5 | Run as:
6 |
7 | $ python quick_ero.py 020_LightCurve_00001.fits
8 |
9 | """
10 |
11 | from numpy import log
12 | import numpy as np
13 | import sys
14 | from astropy.table import Table
15 | from astropy.stats import bayesian_blocks
16 |
17 | __version__ = '1.1.1'
18 | __author__ = 'Johannes Buchner'
19 |
20 | def calc_Fvar(flux, flux_err):
21 | assert flux.shape == flux_err.shape
22 | assert np.isfinite(flux).all()
23 | assert np.isfinite(flux_err).all()
24 | flux_mean = np.nanmean(flux)
25 | n_points = len(flux)
26 |
27 | s_square = np.nansum((flux - flux_mean) ** 2) / (n_points - 1)
28 | sig_square = np.nansum(flux_err ** 2) / n_points
29 |
30 | nev = (s_square - sig_square) / flux_mean**2
31 | if not nev > 0.001:
32 | nev = 0.001
33 | fvar = np.sqrt(nev)
34 |
35 | sigxserr_a = np.sqrt(2 / n_points) * (sig_square / flux_mean ** 2)
36 | sigxserr_b = np.sqrt(sig_square / n_points) * (2 * fvar / flux_mean)
37 | sigxserr = np.sqrt(sigxserr_a**2 + sigxserr_b**2)
38 | fvar_err = sigxserr / (2 * fvar)
39 |
40 | return nev, sigxserr, fvar, fvar_err, flux_mean, n_points
41 |
42 |
43 | def calc_MAD(flux, flux_err):
44 | assert flux.shape == flux_err.shape
45 | i = flux.argmin()
46 | j = flux.argmax()
47 | up = flux[j] - flux_err[j]
48 | lo = flux[i] + flux_err[i]
49 |
50 | diff = up - lo
51 | mad_sig = diff / (flux_err[i]**2 + flux_err[j]**2)**0.5
52 |
53 | return mad_sig, diff
54 |
55 | # 1-sigma quantiles and median
56 | #quantiles = scipy.stats.norm().cdf([-1, 0, 1])
57 |
58 | first = True
59 | for filename in sys.argv[1:]:
60 | print(filename, end="\r")
61 | lc_all = Table.read(filename, hdu='RATE', format='fits')
62 | nbands = lc_all['COUNTS'].shape[1]
63 | for band in range(nbands):
64 | lc = lc_all[lc_all['FRACEXP'][:,band] > 0.1]
65 | if len(lc['TIME']) == 0:
66 | continue
67 | x = lc['TIME'] - lc['TIME'][0]
68 | bc = lc['BACK_COUNTS'][:,band]
69 | c = lc['COUNTS'][:,band]
70 | bgarea = 1. / lc['BACKRATIO']
71 | rate_conversion = lc['FRACEXP'][:,band] * lc['TIMEDEL']
72 |
73 | # compute rate and rate_err ourselves, because SRCTOOL has nans
74 | rate = (c - bc / bgarea) / rate_conversion
75 | sigma_src = (c + 0.75)**0.5 + 1
76 | sigma_bkg = (bc + 0.75)**0.5 + 1
77 | rate_err = (sigma_src**2 + sigma_bkg**2 / bgarea)**0.5 / rate_conversion
78 | assert np.isfinite(rate).all(), rate
79 | assert np.isfinite(rate_err).all(), rate_err
80 |
81 | nev, nev_err, fvar, fvar_err, cr_mean, n_points = calc_Fvar(rate, rate_err)
82 | mad_sig, mad = calc_MAD(rate, rate_err)
83 |
84 | cr = rate.mean()
85 | cr_err = np.mean(rate_err**2)**0.5
86 |
87 | # parameters from eronrta email "regarding BBlocks Alerts" from 18.12.2019
88 | fp_rate = 0.01
89 | ncp_prior = 4 - log(73.53 * fp_rate * len(rate)**-0.478)
90 | edges = bayesian_blocks(t=x, x=rate, sigma=rate_err,
91 | fitness='measures', ncp_prior=ncp_prior)
92 | nbblocks=len(edges)-1
93 |
94 | with open('quickstats_%d.txt' % band, 'w' if first else 'a') as out:
95 | out.write(filename + "\t")
96 | out.write(filename.replace('020_LightCurve_', '').replace('.fits', '').replace('.gz', '') + "\t")
97 | out.write("%d\t" % n_points)
98 | out.write("%f\t" % cr)
99 | out.write("%f\t" % cr_err)
100 | out.write("%f\t" % nev)
101 | out.write("%f\t" % nev_err)
102 | out.write("%f\t" % (nev/nev_err))
103 | out.write("%f\t" % fvar)
104 | out.write("%f\t" % fvar_err)
105 | out.write("%f\t" % (fvar/fvar_err))
106 | out.write("%f\t" % mad)
107 | out.write("%f\t" % mad_sig)
108 | out.write("%d\n" % nbblocks)
109 | first = False
110 | print()
111 |
--------------------------------------------------------------------------------
/experimental/periodic_ero.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Given an eROSITA SRCTOOL light curve, computes a Bayesian periodogram.
4 |
5 | The model is:
6 |
7 | rate = B + A * ((1 + sin((t/P + p) * 2 * pi)) / 2)**shape
8 |
9 | B: base rate
10 | A: variable rate
11 | P: period
12 | p: phase
13 | shape: signal shape parameter (30 for QPEs, 1 for sine)
14 |
15 | Example:
16 |
17 | $ python periodic_ero.py 020_LightCurve_00001.fits
18 |
19 | It will make a few visualisations and a file containing the resulting parameters.
20 |
21 | """
22 |
23 | import sys
24 | import argparse
25 |
26 | import joblib
27 | import matplotlib.pyplot as plt
28 | from numpy import log, log10, pi, sin
29 | import numpy as np
30 | import scipy.stats, scipy.optimize
31 | from astropy.table import Table
32 | import tqdm.auto as tqdm
33 | #from getdist import MCSamples, plots
34 |
35 | mem = joblib.Memory('.', verbose=False)
36 |
37 | # 1-sigma quantiles and median
38 | quantiles = scipy.stats.norm().cdf([-1, 0, 1])
39 |
40 | N = 1000
41 | M = 1000
42 |
43 | @mem.cache
44 | def estimate_source_cr_marginalised(log_src_crs_grid, src_counts, bkg_counts, bkg_area, rate_conversion):
45 | """ Compute the PDF at positions in log(source count rate)s grid log_src_crs_grid
46 | for observing src_counts counts in the source region of size src_area,
47 | and bkg_counts counts in the background region of size bkg_area.
48 |
49 | """
50 | # background counts give background cr deterministically
51 | u = np.linspace(0, 1, N)[1:-1]
52 | def prob(log_src_cr):
53 | src_cr = 10**log_src_cr * rate_conversion
54 | bkg_cr = scipy.special.gammaincinv(bkg_counts + 1, u) / bkg_area
55 | like = scipy.stats.poisson.pmf(src_counts, src_cr + bkg_cr).mean()
56 | return like
57 |
58 | weights = np.array([prob(log_src_cr) for log_src_cr in log_src_crs_grid])
59 | if weights.sum() == 0:
60 | print(np.log10(src_counts.max() / rate_conversion))
61 | weights /= weights.sum()
62 |
63 | return weights
64 |
65 | def model(time, base, ampl, period, phase, shape):
66 | return base + ampl * ((1 + sin((time / period + phase) * 2 * pi)) / 2)**shape
67 |
68 | class HelpfulParser(argparse.ArgumentParser):
69 | def error(self, message):
70 | sys.stderr.write('error: %s\n' % message)
71 | self.print_help()
72 | sys.exit(2)
73 |
74 | parser = HelpfulParser(description=__doc__,
75 | epilog="""Johannes Buchner (C) 2020 """,
76 | formatter_class=argparse.ArgumentDefaultsHelpFormatter)
77 | parser.add_argument('lightcurve', type=str, help="eROSITA light curve fits file")
78 | parser.add_argument("--band", type=int, default=0, help="Energy band")
79 | parser.add_argument("--fracexp_min", type=float, default=0.1, help="Smallest fractional exposure to consider")
80 | parser.add_argument("--shape_mean", type=float, default=1, help="Expected shape. Use 1 for sine, 30 for QPE.")
81 | parser.add_argument("--shape_std", type=float, default=0.5, help="Expected shape diversity in dex.")
82 |
83 | args = parser.parse_args()
84 |
85 | filename = args.lightcurve
86 | band = args.band
87 |
88 | lc_all = Table.read(filename, hdu='RATE', format='fits')
89 | if lc_all['COUNTS'].ndim == 1:
90 | lc = lc_all[lc_all['FRACEXP'] > args.fracexp_min]
91 | bc = lc['BACK_COUNTS'].astype(int)
92 | c = lc['COUNTS'].astype(int)
93 | rate_conversion = lc['FRACEXP'] * lc['TIMEDEL']
94 | rate = lc['RATE']
95 | rate_err=lc['RATE_ERR']
96 | else:
97 | nbands = lc_all['COUNTS'].shape[1]
98 | print("band %d" % band)
99 | lc = lc_all[lc_all['FRACEXP'][:,band] > args.fracexp_min]
100 | bc = lc['BACK_COUNTS'][:,band].astype(int)
101 | c = lc['COUNTS'][:,band]
102 | rate_conversion = lc['FRACEXP'][:,band] * lc['TIMEDEL']
103 | rate = lc['RATE'][:,band]
104 | rate_err=lc['RATE_ERR'][:,band]
105 |
106 | bgarea = 1. / lc['BACKRATIO']
107 | t0 = lc['TIME'][0]
108 | x = lc['TIME'] - t0
109 |
110 | if np.log10(c / rate_conversion + 0.001).max() > 2:
111 | log_src_crs_grid = np.linspace(-2, np.log10(c / rate_conversion).max() + 0.5, M)
112 | else:
113 | log_src_crs_grid = np.linspace(-2, 2, M)
114 |
115 | print("preparing time bin posteriors...")
116 | src_posteriors_list = []
117 | for xi, ci, bci, bgareai, rate_conversioni in zip(tqdm.tqdm(x), c, bc, bgarea, rate_conversion):
118 | # print(xi, ci, bci, bgareai, rate_conversioni)
119 | pdf = estimate_source_cr_marginalised(log_src_crs_grid, ci, bci, bgareai, rate_conversioni)
120 | src_posteriors_list.append(pdf)
121 | src_posteriors = np.array(src_posteriors_list)
122 |
123 | print("running qpe...")
124 | outprefix = '%s-band%d-fracexp%s-expsine' % (filename, band, args.fracexp_min)
125 | time = x
126 |
127 | parameter_names = ['jitter', 'logbase', 'logampl', 'logperiod', 'phase', 'shape']
128 | rv_gamma = scipy.stats.norm(np.log10(args.shape_mean), args.shape_std)
129 | rv_base = scipy.stats.norm(0, 2)
130 | rv_ampl = scipy.stats.norm(0, 2)
131 | logTmax = log10((time[-1] - time[0]) * 5)
132 | logTmin = log10(np.min(time[1:] - time[:-1]))
133 |
134 | def transform(cube):
135 | params = cube.copy()
136 | params[0] = 10**(cube[0]*2 - 2)
137 | params[1] = rv_base.ppf(cube[1])
138 | params[2] = rv_ampl.ppf(cube[2])
139 | params[3] = cube[3]*(logTmax - logTmin) + logTmin
140 | params[4] = cube[4]
141 | params[5] = 10**rv_gamma.ppf(cube[5])
142 | return params
143 |
144 | def loglike(params):
145 | jitter, log_base, log_amplfrac, log_period, phase, shape = params
146 | # predict model:
147 | model_logmean = log10(model(time, 10**log_base, 10**(log_base + log_amplfrac), 10**log_period, phase, shape))
148 |
149 | # compute for each grid log-countrate its probability, according to log_mean, log_sigma
150 | variance_pdf = np.exp(-0.5 * ((log_src_crs_grid.reshape((1, -1)) - model_logmean.reshape((-1, 1))) / jitter)**2) / (2 * pi * jitter**2)**0.5
151 | # multiply that probability with the precomputed probabilities (pdfs)
152 | like = log((variance_pdf * src_posteriors).mean(axis=1) + 1e-100).sum()
153 | if not np.isfinite(like):
154 | like = -1e300
155 | return like
156 |
157 | from ultranest import ReactiveNestedSampler
158 | import ultranest.stepsampler
159 | sampler = ReactiveNestedSampler(parameter_names, loglike, transform=transform,
160 | log_dir=outprefix, resume=True)
161 | sampler.stepsampler = ultranest.stepsampler.RegionSliceSampler(nsteps=40, max_nsteps=40, adaptive_nsteps='move-distance')
162 | samples = sampler.run(frac_remain=0.5)['samples']
163 | print("running qpe... done")
164 |
165 | print("plotting ...")
166 |
167 | sampler.plot()
168 |
169 | plt.figure(figsize=(12, 4))
170 | from brokenaxes import brokenaxes
171 |
172 | x1 = x + lc['TIMEDEL'] * 2
173 | x0 = x - lc['TIMEDEL'] * 3
174 | mask_breaks = x0[1:] > x1[:-1]
175 | xlims = list(zip([x0.min()] + list(x0[1:][mask_breaks]), list(x1[:-1][mask_breaks]) + [x1.max()]))
176 | xm = np.hstack([np.linspace(lo, hi, 40) for lo, hi in xlims])
177 | xm[::40] = np.nan
178 |
179 | print(xlims)
180 | from ultranest.plot import PredictionBand
181 |
182 | bax = brokenaxes(xlims=xlims, hspace=.05)
183 |
184 | for jitter, log_base, log_amplfrac, log_period, phase, shape in tqdm.tqdm(samples[:100]):
185 | model_logmean = model(xm, 10**log_base, 10**(log_base + log_amplfrac), 10**log_period, phase, shape)
186 | bax.plot(xm, model_logmean, color='orange', alpha=0.5)
187 |
188 | bax.errorbar(x, y=rate, yerr=rate_err, marker='x', ls=' ')
189 | bax.set_xlabel('Time [s] - %s' % t0)
190 | bax.set_ylabel('Count rate [cts/s]')
191 | plt.yscale('log')
192 | plt.savefig(outprefix + '.pdf', bbox_inches='tight')
193 | plt.close()
194 |
195 | del xm, band
196 | xm = np.linspace(x0.min() - (x1.max() + x0.min()), x1.max() + (x1.max() + x0.min()), 4000)
197 | plt.figure(figsize=(12, 4))
198 | band = PredictionBand(xm)
199 | for jitter, log_base, log_amplfrac, log_period, phase, shape in tqdm.tqdm(samples[:4000]):
200 | #if 10**log_period < 10000: continue
201 | band.add(model(xm, 10**log_base, 10**(log_base + log_amplfrac), 10**log_period, phase, shape))
202 | # plt.plot(xm, model_logmean, color='orange', alpha=0.5)
203 |
204 | band.line(color='orange')
205 | band.shade(q=0.45, color='orange', alpha=0.2)
206 | band.shade(color='orange', alpha=0.2)
207 | plt.errorbar(x, y=rate, yerr=rate_err, marker='x', ls=' ')
208 | plt.xlabel('Time [s] - %s' % t0)
209 | plt.ylabel('Count rate [cts/s]')
210 | plt.yscale('log')
211 | plt.savefig(outprefix + '-full.pdf', bbox_inches='tight')
212 | plt.close()
213 |
--------------------------------------------------------------------------------
/scripts/bexvar_ero.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Given an eROSITA SRCTOOL light curve,
4 | Computes a Bayesian excess variance, by estimating mean and variance of
5 | the log of the count rate.
6 |
7 | The model allows a different source count rate in each time bin,
8 | but marginalises over this.
9 | First the source count rate PDF is determined in each time bin,
10 | on a fixed source count grid. The nested sampling algorithm
11 | explores mean and variance combinations and
12 | combines the source count rate PDFs with the mean/variance
13 | information.
14 | No time information is used (order is irrelevant).
15 |
16 | Run as:
17 |
18 | $ python bexvar_ero.py 020_LightCurve_00001.fits
19 |
20 | It will make a few visualisations and a file containing the resulting parameters.
21 |
22 | * 020_LightCurve_00001.fits-bexvar--corner.png:
23 |
24 | * plot of the intrinsic source count rate and its excess variance
25 |
26 | * 020_LightCurve_00001.fits-bexvar-.png:
27 |
28 | * plot of the light curve and estimated intrinsic rates
29 |
30 | * 020_LightCurve_00001.fits-bexvar-.fits:
31 |
32 | * Bayesian light curve rates in table, with columns
33 |
34 | * 'TIME': time (from SRCTOOL light curve)
35 | * 'RATE': source count rate
36 | * 'RATE_LO': source count rate, lower 1 sigma quantile
37 | * 'RATE_HI': source count rate, upper 1 sigma quantile
38 |
39 | * header:
40 |
41 | * 'pvar_p1': probability that the excess variance exceeds 0.1 dex,
42 | * 'pvar_p3': probability that the excess variance exceeds 0.3 dex,
43 | * 'pvar_1': probability that the excess variance exceeds 1 dex,
44 | * 'rate': estimated mean log(count rate),
45 | * 'rate_err': uncertainty of the mean log(count rate),
46 | * 'scatt': estimated scatter of the log(count rate) in dex,
47 | * 'scatt_lo': lower 1 sigma quantile of the estimated scatter of the log(count rate) in dex,
48 | * 'scatt_hi': upper 1 sigma quantile of the estimated scatter of the log(count rate) in dex,
49 |
50 |
51 | Authors: Johannes Buchner, David Bogensberger
52 |
53 | """
54 |
55 | import matplotlib.pyplot as plt
56 | from numpy import log
57 | import numpy as np
58 | import scipy.stats, scipy.optimize
59 | import sys
60 | from astropy.table import Table
61 |
62 | __version__ = '1.1.1'
63 | __author__ = 'Johannes Buchner'
64 |
65 | # 1-sigma quantiles and median
66 | quantiles = scipy.stats.norm().cdf([-1, 0, 1])
67 |
68 | N = 1000
69 | M = 1000
70 |
71 | def lscg_gen(src_counts, bkg_counts, bkg_area, rate_conversion, density_gp):
72 | """
73 | Generates a log_src_crs_grid applicable to this particular light curve,
74 | with appropriately designated limits, for a faster and more accurate
75 | run of estimate_source_cr_marginalised and bexvar
76 | """
77 | # lowest count rate
78 | a = scipy.special.gammaincinv(src_counts + 1, 0.001) / rate_conversion
79 | # highest background count rate
80 | b = scipy.special.gammaincinv(bkg_counts + 1, 0.999) / (rate_conversion * bkg_area)
81 | mindiff = min(a - b)
82 | if mindiff > 0: # background-subtracted rate is positive
83 | m0 = np.log10(mindiff)
84 | else: # more background than source -> subtraction negative somewhere
85 | m0 = -1
86 | # highest count rate (including background)
87 | c = scipy.special.gammaincinv(src_counts + 1, 0.999) / rate_conversion
88 | m1 = np.log10(c.max())
89 | # print(src_counts, bkg_counts, a, b, m0, m1)
90 |
91 | # add a bit of padding to the bottom and top
92 | lo = m0 - 0.05 * (m1 - m0)
93 | hi = m1 + 0.05 * (m1 - m0)
94 | span = hi - lo
95 | if lo < -1:
96 | log_src_crs_grid = np.linspace(-1.0, hi, int(np.ceil(density_gp * (hi + 1.0))))
97 | else:
98 | log_src_crs_grid = np.linspace(lo, hi, int(np.ceil(density_gp * 1.05 * span)))
99 |
100 | return log_src_crs_grid
101 |
102 | def estimate_source_cr_marginalised(log_src_crs_grid, src_counts, bkg_counts, bkg_area, rate_conversion):
103 | """ Compute the PDF at positions in log(source count rate)s grid log_src_crs_grid
104 | for observing src_counts counts in the source region of size src_area,
105 | and bkg_counts counts in the background region of size bkg_area.
106 |
107 | """
108 | # background counts give background cr deterministically
109 | u = np.linspace(0, 1, N)[1:-1]
110 | bkg_cr = scipy.special.gammaincinv(bkg_counts + 1, u) / bkg_area
111 | def prob(log_src_cr):
112 | src_cr = 10**log_src_cr * rate_conversion
113 | like = scipy.stats.poisson.pmf(src_counts, src_cr + bkg_cr).mean()
114 | return like
115 |
116 | weights = np.array([prob(log_src_cr) for log_src_cr in log_src_crs_grid])
117 | if not weights.sum() > 0:
118 | print("WARNING: Weight problem! sum is", weights.sum(), np.log10(src_counts.max() / rate_conversion), log_src_crs_grid[0], log_src_crs_grid[-1])
119 | weights /= weights.sum()
120 |
121 | return weights
122 |
123 | def bexvar(log_src_crs_grid, pdfs):
124 | """
125 | Assumes that the source count rate is log-normal distributed.
126 | returns posterior samples of the mean and std of that distribution.
127 |
128 | pdfs: PDFs for each object
129 | defined over the log-source count rate grid log_src_crs_grid.
130 |
131 | returns (log_mean, log_std), each an array of posterior samples.
132 | """
133 |
134 | def transform(cube):
135 | params = cube.copy()
136 | params[0] = cube[0] * (log_src_crs_grid[-1] - log_src_crs_grid[0]) + log_src_crs_grid[0]
137 | params[1] = 10**(cube[1]*4 - 2)
138 | return params
139 |
140 | def loglike(params):
141 | log_mean = params[0]
142 | log_sigma = params[1]
143 | # compute for each grid log-countrate its probability, according to log_mean, log_sigma
144 | variance_pdf = scipy.stats.norm.pdf(log_src_crs_grid, log_mean, log_sigma)
145 | # multiply that probability with the precomputed probabilities (pdfs)
146 | likes = log((variance_pdf.reshape((1, -1)) * pdfs).mean(axis=1) + 1e-100)
147 | like = likes.sum()
148 | if not np.isfinite(like):
149 | like = -1e300
150 | return like
151 |
152 |
153 | from ultranest import ReactiveNestedSampler
154 | sampler = ReactiveNestedSampler(['logmean', 'logsigma'], loglike,
155 | transform=transform, vectorized=False)
156 | samples = sampler.run(viz_callback=False)['samples']
157 | sampler.print_results()
158 | log_mean, log_sigma = samples.transpose()
159 |
160 | return log_mean, log_sigma
161 |
162 | filename = sys.argv[1]
163 |
164 |
165 | lc_all = Table.read(filename, hdu='RATE', format='fits')
166 | nbands = lc_all['COUNTS'].shape[1]
167 | for band in range(nbands):
168 | print("band %d" % band)
169 | lc = lc_all[lc_all['FRACEXP'][:,band] > 0.1]
170 | x = lc['TIME'] - lc['TIME'][0]
171 | bc = lc['BACK_COUNTS'][:,band]
172 | c = lc['COUNTS'][:,band]
173 | bgarea = 1. / lc['BACKRATIO']
174 | fe = lc['FRACEXP'][:,band]
175 | rate_conversion = fe * lc['TIMEDEL']
176 |
177 | log_src_crs_grid = lscg_gen(c, bc, bgarea, rate_conversion, 100)
178 |
179 | src_posteriors = []
180 |
181 | print("preparing time bin posteriors...")
182 | for xi, ci, bci, bgareai, rate_conversioni in zip(x, c, bc, bgarea, rate_conversion):
183 | # print(xi, ci, bci, bgareai, rate_conversioni)
184 | pdf = estimate_source_cr_marginalised(log_src_crs_grid, ci, bci, bgareai, rate_conversioni)
185 | src_posteriors.append(pdf)
186 |
187 | src_posteriors = np.array(src_posteriors)
188 |
189 | print("plotting data...")
190 | cdfs = np.cumsum(src_posteriors, axis=1)
191 |
192 | rate_lo, rate_mid, rate_hi = [np.array([10**np.interp(q, cdf, log_src_crs_grid)
193 | for xi, cdf in zip(x, cdfs)])
194 | for q in quantiles]
195 | plt.errorbar(x=x, y=rate_mid, yerr=[rate_mid - rate_lo, rate_hi - rate_mid],
196 | marker='x', color='k', capsize=3, label='Bayesian rate estimates')
197 | plt.plot(x, c / rate_conversion, 'o ', label='counts', color='k')
198 | plt.plot(x, bc / bgarea / rate_conversion, 'o ', label='background contribution', color='r')
199 | #plt.errorbar(x, y=lc['RATE'][:,band], yerr=lc['RATE_ERR'][:,band], marker='s', linestyle=' ',
200 | # label='naive estimator')
201 |
202 | print("running bexvar...")
203 | logcr_mean, logcr_sigma = bexvar(log_src_crs_grid, src_posteriors)
204 | print("running bexvar... done")
205 |
206 | # plot mean count rate:
207 | lo, mid, hi = scipy.stats.mstats.mquantiles(10**logcr_mean, quantiles)
208 | l = plt.hlines(mid, x.min(), x.max(), color='navy',
209 | linestyles='-', alpha=0.5, label='intrinsic source rate')
210 | # plot its uncertainty:
211 | plt.fill_between([x.min(), x.max()], lo, hi,
212 | alpha=0.5, color=l.get_color(), lw=0)
213 |
214 | # plot scatter:
215 | lo, mid, hi = scipy.stats.mstats.mquantiles(10**(logcr_mean - logcr_sigma), quantiles)
216 | lo2, mid2, hi2 = scipy.stats.mstats.mquantiles(10**(logcr_mean + logcr_sigma), quantiles)
217 | l = plt.hlines([mid, mid2], x.min(), x.max(), color='orange',
218 | alpha=0.5, label='intrinsic scatter', linestyles=['--', '--'])
219 | plt.fill_between([x.min(), x.max()], lo, hi, alpha=0.5, color=l.get_color(), lw=0)
220 | plt.fill_between([x.min(), x.max()], lo2, hi2, alpha=0.5, color=l.get_color(), lw=0)
221 |
222 | plt.legend(loc='best')
223 | plt.ylabel('Count rate [cts/s]')
224 | plt.xlabel('Time [s] - %d' % lc['TIME'][0])
225 | plt.yscale('log')
226 | plt.savefig(filename + '-bexvar-%d.png' % band, bbox_inches='tight')
227 | plt.close()
228 |
229 | import corner
230 | corner.corner(np.transpose([logcr_mean, np.log10(logcr_sigma)]),
231 | labels=['log(source count rate)', 'log(log-scatter)'])
232 | plt.savefig(filename + '-bexvar-%d-corner.png' % band, bbox_inches='tight')
233 | plt.close()
234 | lo, mid, hi = scipy.stats.mstats.mquantiles(logcr_sigma, quantiles)
235 |
236 | # compute rate and rate_err ourselves, because SRCTOOL has nans
237 | rate = (c - bc / bgarea) / rate_conversion
238 | sigma_src = (c + 0.75)**0.5 + 1
239 | sigma_bkg = (bc + 0.75)**0.5 + 1
240 | rate_err = (sigma_src**2 + sigma_bkg**2 / bgarea)**0.5 / rate_conversion
241 |
242 | stats = dict(
243 | pvar_p1=(logcr_sigma>0.1).mean(),
244 | pvar_p3=(logcr_sigma>0.3).mean(),
245 | pvar_1=(logcr_sigma>1.0).mean(),
246 | rate=logcr_mean.mean(),
247 | rate_err=logcr_mean.std(),
248 | scatt=mid,
249 | scatt_lo=lo,
250 | scatt_hi=hi,
251 | )
252 | for k, v in stats.items():
253 | print(k, v)
254 |
255 | t = Table(data=[lc['TIME'], rate_mid, rate_lo, rate_hi],
256 | names=['TIME', 'RATE', 'RATE_LO', 'RATE_HI'],
257 | meta=stats
258 | )
259 | t.write(filename + '-bexvar-%d.fits' % band, format='fits',
260 | overwrite=True)
261 |
--------------------------------------------------------------------------------
/experimental/cplar_ero.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Continuous poisson log-autoregressive model for eROSITA light curves.
5 |
6 | The model assumes a autoregressive model for the instantaneous log-count rate
7 | in each time bin. For the background, each time bin is estimated independently.
8 |
9 | The autoregressive model can take care of lightcurve gaps. Its assumption
10 | is that the power spectrum transitions from white noise with some amplitude (sigma)
11 | to a powerlaw with index -2, at a characteristic dampening time-scale (tau).
12 |
13 | sigma, tau and the mean count rate (c) are the most important parameters of this model.
14 |
15 | """
16 |
17 | import numpy as np
18 | import matplotlib.pyplot as plt
19 | from numpy import log, pi, exp
20 | import sys
21 | import tqdm
22 | from astropy.table import Table
23 | import cmdstancache
24 | from brokenaxes import brokenaxes
25 | import corner
26 | from ultranest.plot import PredictionBand
27 |
28 |
29 | model_code = """
30 | data {
31 | int N;
32 |
33 | // duration of time bin
34 | real dt;
35 | // number of time step between end of previous time bin and end of current time bin
36 | // this may be different from dt when there are gaps
37 | array[N-1] real tsteps;
38 | array[N] int z_obs;
39 | array[N] int B_obs;
40 | vector[N] Barearatio;
41 | vector[N] countrate_conversion;
42 | vector[N] Bcountrate_conversion;
43 |
44 | real prior_logBc_mean;
45 | real prior_logBc_std;
46 | real prior_logc_mean;
47 | real prior_lognoise_mean;
48 | real prior_logc_std;
49 | real prior_lognoise_std;
50 | real prior_logtau_mean;
51 | real prior_logtau_std;
52 | }
53 | transformed data {
54 | vector[N] logBarearatio = log(Barearatio);
55 | vector[N] logcountrate_conversion = log(countrate_conversion);
56 | vector[N] logBcountrate_conversion = log(Bcountrate_conversion);
57 | real logminduration = log(dt);
58 | real T = sum(tsteps);
59 | real logT = log(T);
60 | }
61 | parameters {
62 | // auto-regression time-scale
63 | real logtau;
64 | // mean
65 | real logc;
66 | // sigma
67 | real lognoise;
68 | // unit normal noise deviations
69 | vector[N] W;
70 |
71 | // intrinsic background count rate at each time bin
72 | vector[N] logBy;
73 | }
74 | transformed parameters {
75 | // linear transformations of the log parameters
76 | real phi;
77 | real tau;
78 | real noise;
79 | real c;
80 | // intrinsic source count rate at each time bin
81 | vector[N] logy;
82 |
83 | // transform parameters to linear space
84 | tau = exp(logtau);
85 | noise = exp(lognoise);
86 | c = exp(logc);
87 | phi = exp(-dt / tau);
88 |
89 | // apply centering to real units, AR(1) formulas:
90 | logy[1] = logc + W[1] * noise;
91 | for (i in 2:N) {
92 | logy[i] = logc + exp(-tsteps[i-1] / tau) * (logy[i-1] - logc) + W[i] * noise * tsteps[i-1] / dt;
93 | }
94 | }
95 | model {
96 | // correct for observing efficiency
97 | vector[N] logysrcarea = logy + logcountrate_conversion;
98 | vector[N] logybkgarea = logBy + logBarearatio + logBcountrate_conversion;
99 | vector[N] logytot;// = logysrcarea;
100 | // sum source and background together
101 | for (i in 1:N) {
102 | logytot[i] = log_sum_exp(logysrcarea[i], logybkgarea[i]);
103 | }
104 |
105 | logBy ~ normal(prior_logBc_mean, prior_logBc_std);
106 | B_obs ~ poisson_log(logBy + logBcountrate_conversion);
107 |
108 | // source AR process priors:
109 | logtau ~ normal(prior_logtau_mean, prior_logtau_std);
110 | logc ~ normal(prior_logc_mean, prior_logc_std);
111 | lognoise ~ normal(prior_lognoise_mean, prior_lognoise_std);
112 | logy ~ normal(0, 5); // stay within a reasonable range
113 | W ~ std_normal();
114 | // comparison to total source region counts
115 | z_obs ~ poisson_log(logytot);
116 | }
117 | """
118 |
119 |
120 | np.random.seed(1)
121 |
122 | filename = sys.argv[1]
123 |
124 |
125 | lc_all = Table.read(filename, hdu='RATE', format='fits')
126 | nbands = lc_all['COUNTS'].shape[1]
127 | if len(sys.argv) > 2:
128 | band = int(sys.argv[2])
129 | else:
130 | band = 0
131 |
132 | fracexp = lc_all['FRACEXP'][:,band]
133 | print("band %d" % band)
134 |
135 | print(fracexp.min(), fracexp.max())
136 | lc = lc_all[fracexp > 0.1 * fracexp.max()]
137 | bc = lc['BACK_COUNTS'][:,band].value
138 | c = lc['COUNTS'][:,band].value
139 | bgarea = np.array(1. / lc['BACKRATIO'].value)
140 | # length of the time bin
141 | dt = lc['TIMEDEL']
142 | assert dt.max() == dt.min()
143 | dt = dt.min()
144 | print("dt:", dt)
145 | # TIME is the mid point of the light curve bin
146 | # here we want the starting point:
147 | x_start = lc['TIME'] - lc['TIME'][0] - lc['TIMEDEL'] / 2.0
148 | x_end = lc['TIME'] - lc['TIME'][0] + lc['TIMEDEL'] / 2.0
149 | x = (lc['TIME'] - lc['TIME'][0])
150 | tsteps = (x_end[1:] - x_end[:-1]).value
151 | assert (tsteps > 0).all(), np.unique(tsteps)
152 | assert (bc.astype(int) == bc).all(), bc
153 | prefix = sys.argv[1] + '-%d-cplar1b' % band
154 | fe = lc['FRACEXP'][:,band].value
155 | rate_conversion = fe * dt
156 | #print("tsteps:", tsteps.sum())
157 |
158 | N = len(x_start)
159 | data = dict(
160 | # provide observations
161 | N=N, z_obs=c, B_obs=bc.astype(int),
162 | # additional information about the sampling:
163 | dt=dt, Barearatio=1. / bgarea, tsteps=tsteps,
164 | # source count rate is modulated by fracexp
165 | countrate_conversion=rate_conversion,
166 | # background count rate is modulated by fracexp, except in the hard band,
167 | # where it is assumed constant (particle background dominated)
168 | Bcountrate_conversion=rate_conversion*0 + 1 if band == 2 else rate_conversion,
169 | # background count rates expected:
170 | prior_logBc_mean=0, prior_logBc_std=np.log(10),
171 | # source count rates expected:
172 | prior_logc_mean=0, prior_logc_std=5,
173 | # expected noise level is 10% +- 2 dex
174 | prior_lognoise_mean=np.log(0.1), prior_lognoise_std=np.log(100),
175 | prior_logtau_mean=np.log(x.max()), prior_logtau_std=3,
176 | # prefer long correlation time-scales; the data have to convince us that the
177 | # data points scatter.
178 | # prior_logtau_mean=np.log(x.max()), prior_logtau_std=10 * np.log(x.max() / dt),
179 | # prefer short correlation time-scales; the data have to convince us that the
180 | # data points are similar.
181 | #prior_logtau_mean=np.log(dt), prior_logtau_std=np.log(1000),
182 | )
183 |
184 | # print(c, bc.astype(int), bgarea, rate_conversion, Nsteps.astype(int), dt)
185 |
186 | # Continuous Poisson Log-Auto-Regressive 1 with Background
187 | stan_variables, method_variables = cmdstancache.run_stan(model_code, data=data,
188 | adapt_delta=0.99, max_treedepth=12,
189 | #warmup=5000, iter=10000,
190 | seed=1)
191 | #control=dict(max_treedepth=14))
192 |
193 | paramnames = []
194 | badlist = ['lp__', 'phi', 'Bphi']
195 | #badlist += ['log' + k for k in la.keys()]
196 | # remove linear parameters, only show log:
197 | badlist += [k.replace('log', '') for k in stan_variables.keys()
198 | if 'log' in k and k.replace('log', '') in stan_variables.keys()]
199 |
200 | typical_step = max(np.median(tsteps), dt * 5)
201 |
202 | for broken in False, True:
203 |
204 | fig = plt.figure(figsize=(15, 5))
205 |
206 | if broken:
207 | # find wide gaps in the light curves:
208 | i, = np.where(tsteps > typical_step * 20)
209 | xlims = list(zip([x_start[0] - typical_step] + list(x_start[i+1] + typical_step), list(x_end[i] - typical_step) + [x_end[-1] + typical_step]))
210 | bax = brokenaxes(xlims=xlims, hspace=0.05)
211 | else:
212 | bax = plt.gca()
213 | bax.plot(x, c / rate_conversion, 'o ')
214 | bax.plot(x, bc / bgarea / rate_conversion, 'o ')
215 | y = np.exp(stan_variables['logy'].reshape((-1, N)))
216 | bax.errorbar(
217 | x=x, xerr=dt,
218 | y=np.median(y, axis=0),
219 | yerr=np.quantile(y, [0.005, 0.995], axis=0),
220 | color='k', ls=' ', elinewidth=0.1, capsize=0,
221 | )
222 | By = np.exp(stan_variables['logBy'].reshape((-1, N))) / bgarea
223 | bax.errorbar(
224 | x=x, xerr=dt,
225 | y=np.median(By, axis=0),
226 | yerr=np.quantile(By, [0.005, 0.995], axis=0),
227 | color='orange', ls=' ', elinewidth=0.1, capsize=0,
228 | )
229 | bax.set_yscale('log')
230 | bax.set_xlabel('Time')
231 | bax.set_ylabel('Count rate [cts/s]')
232 | bax.set_ylim(min(((bc + 0.1) / bgarea / rate_conversion).min(), ((c + 0.1) / rate_conversion).min()) / 10, None)
233 | fig.savefig(prefix + '_t%s.pdf' % ('_broken' if broken else ''), bbox_inches='tight')
234 | plt.close(fig)
235 |
236 |
237 | print("priors:")
238 | for k, v in sorted(data.items()):
239 | if k.startswith('prior'):
240 | # convert to base 10 for easier reading
241 | print("%20s: " % k, v / log(10) if k.startswith('log') else v)
242 |
243 | samples = []
244 |
245 | print("posteriors:")
246 | for k in sorted(stan_variables.keys()):
247 | print('%20s: %.4f +- %.4f' % (k, stan_variables[k].mean(), stan_variables[k].std()))
248 | if k not in badlist and stan_variables[k].ndim == 1:
249 | # convert to base 10 for easier reading
250 | samples.append(stan_variables[k] / log(10) if k.startswith('log') else stan_variables[k])
251 | paramnames.append(k)
252 | elif stan_variables[k].ndim > 1:
253 | plt.figure()
254 | plt.hist(stan_variables[k].mean(axis=1), histtype='step', bins=40, label='over bins')
255 | plt.hist(stan_variables[k].mean(axis=0), histtype='step', bins=40, label='over realisations')
256 | plt.yscale('log')
257 | plt.xlabel(k)
258 | plt.legend(title='average')
259 | plt.savefig(prefix + "_hist_%s.pdf" % k, bbox_inches='tight')
260 | plt.close()
261 |
262 | samples = np.transpose(samples)
263 | print(paramnames)
264 | corner.corner(samples, labels=paramnames)
265 | plt.savefig(prefix + "_corner_log.pdf", bbox_inches='tight')
266 | plt.close()
267 | corner.corner(10**(samples), labels=[k.replace('log', '') for k in paramnames])
268 | plt.savefig(prefix + "_corner.pdf", bbox_inches='tight')
269 | plt.close()
270 |
271 | # switch to units of seconds here
272 | omega = np.linspace(0, 10. / dt, 10000)
273 | # longest duration: entire observation
274 | omega1 = 1. / x.max()
275 | # Nyquist frequency: twice the bin duration
276 | omega0 = 1. / (2 * dt)
277 |
278 | pband2 = PredictionBand(omega)
279 |
280 | for tausample, sigma in zip(tqdm.tqdm(stan_variables['tau'].flatten()), stan_variables['noise'].flatten()):
281 | DT = 1 # unit: seconds
282 | phi = exp(-DT / tausample)
283 | gamma = DT / tausample
284 | specdens = (2 * pi)**0.5 * sigma**2 / (1 - phi**2) * gamma / (pi * (gamma**2 + omega**2))
285 | pband2.add(specdens)
286 |
287 | pband2.line(color='r')
288 | pband2.shade(color='r', alpha=0.5)
289 | pband2.shade(q=0.95/2, color='r', alpha=0.1)
290 |
291 | plt.xlabel('Frequency [Hz]')
292 | plt.ylabel('Power spectral density (PSD)')
293 | plt.xscale('log')
294 | plt.yscale('log')
295 | ylo, yhi = plt.ylim()
296 | # mark observing window:
297 | plt.vlines([omega0, omega1], ylo, yhi, ls='--', color='k', alpha=0.5)
298 | plt.ylim(ylo, yhi)
299 | #plt.xlim(2 / (N * T), 1000 * 2 / (N * T))
300 | plt.savefig(prefix + '_F.pdf', bbox_inches='tight')
301 | plt.close()
302 |
--------------------------------------------------------------------------------
/experimental/bexvar2_ero.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Given an eROSITA SRCTOOL light curve,
4 | Computes a Bayesian excess variance, by estimating mean and variance of
5 | the log of the count rate.
6 |
7 | The model allows a different source count rate in each time bin,
8 | but marginalises over this.
9 | First the source count rate PDF is determined in each time bin,
10 | on a fixed source count grid. The nested sampling algorithm
11 | explores mean and variance combinations and
12 | combines the source count rate PDFs with the mean/variance
13 | information.
14 | No time information is used (order is irrelevant).
15 |
16 | Run as:
17 |
18 | $ python bexvar_ero.py 020_LightCurve_00001.fits
19 |
20 | It will make a few visualisations and a file containing the resulting parameters.
21 |
22 | * 020_LightCurve_00001.fits-bexvar--corner.png:
23 |
24 | * plot of the intrinsic source count rate and its excess variance
25 |
26 | * 020_LightCurve_00001.fits-bexvar-.png:
27 |
28 | * plot of the light curve and estimated intrinsic rates
29 |
30 | * 020_LightCurve_00001.fits-bexvar-.fits:
31 |
32 | * Bayesian light curve rates in table, with columns
33 |
34 | * 'TIME': time (from SRCTOOL light curve)
35 | * 'RATE': source count rate
36 | * 'RATE_LO': source count rate, lower 1 sigma quantile
37 | * 'RATE_HI': source count rate, upper 1 sigma quantile
38 |
39 | * header:
40 |
41 | * 'pvar_p1': probability that the excess variance exceeds 0.1 dex,
42 | * 'pvar_p3': probability that the excess variance exceeds 0.3 dex,
43 | * 'pvar_1': probability that the excess variance exceeds 1 dex,
44 | * 'rate': estimated mean log(count rate),
45 | * 'rate_err': uncertainty of the mean log(count rate),
46 | * 'scatt': estimated scatter of the log(count rate) in dex,
47 | * 'scatt_lo': lower 1 sigma quantile of the estimated scatter of the log(count rate) in dex,
48 | * 'scatt_hi': upper 1 sigma quantile of the estimated scatter of the log(count rate) in dex,
49 |
50 |
51 | Authors: Johannes Buchner, David Bogensberger
52 |
53 | """
54 |
55 | import matplotlib.pyplot as plt
56 | from numpy import log
57 | import numpy as np
58 | import scipy.stats, scipy.optimize
59 | import sys
60 | from astropy.table import Table
61 |
62 | __version__ = '1.0.1'
63 | __author__ = 'Johannes Buchner'
64 |
65 | # 1-sigma quantiles and median
66 | quantiles = scipy.stats.norm().cdf([-1, 0, 1])
67 |
68 | N = 1000
69 | M = 1000
70 |
71 | def lscg_gen(src_counts, bkg_counts, bkg_area, rate_conversion, density_gp):
72 | """
73 | Generates a log_src_crs_grid applicable to this particular light curve,
74 | with appropriately designated limits, for a faster and more accurate
75 | run of estimate_source_cr_marginalised and bexvar
76 | """
77 | # lowest count rate
78 | a = scipy.special.gammaincinv(src_counts + 1, 0.001) / rate_conversion
79 | # highest background count rate
80 | b = scipy.special.gammaincinv(bkg_counts + 1, 0.999) / (rate_conversion * bkg_area)
81 | mindiff = min(a - b)
82 | if mindiff > 0: # background-subtracted rate is positive
83 | m0 = np.log10(mindiff)
84 | else: # more background than source -> subtraction negative somewhere
85 | m0 = -1
86 | # highest count rate (including background)
87 | c = scipy.special.gammaincinv(src_counts + 1, 0.999) / rate_conversion
88 | m1 = np.log10(c.max())
89 | # print(src_counts, bkg_counts, a, b, m0, m1)
90 |
91 | # add a bit of padding to the bottom and top
92 | lo = m0 - 0.05 * (m1 - m0)
93 | hi = m1 + 0.05 * (m1 - m0)
94 | span = hi - lo
95 | if lo < -1:
96 | log_src_crs_grid = np.linspace(-1.0, hi, int(np.ceil(density_gp * (hi + 1.0))))
97 | else:
98 | log_src_crs_grid = np.linspace(lo, hi, int(np.ceil(density_gp * 1.05 * span)))
99 |
100 | return log_src_crs_grid
101 |
102 | def estimate_source_cr_marginalised(log_src_crs_grid, src_counts, bkg_counts, bkg_area, rate_conversion):
103 | """ Compute the PDF at positions in log(source count rate)s grid log_src_crs_grid
104 | for observing src_counts counts in the source region of size src_area,
105 | and bkg_counts counts in the background region of size bkg_area.
106 |
107 | """
108 | # background counts give background cr deterministically
109 | u = np.linspace(0, 1, N)[1:-1]
110 | bkg_cr = scipy.special.gammaincinv(bkg_counts + 1, u) / bkg_area
111 | def prob(log_src_cr):
112 | src_cr = 10**log_src_cr * rate_conversion
113 | like = scipy.stats.poisson.pmf(src_counts, src_cr + bkg_cr).mean()
114 | return like
115 |
116 | weights = np.array([prob(log_src_cr) for log_src_cr in log_src_crs_grid])
117 | if not weights.sum() > 0:
118 | print("WARNING: Weight problem! sum is", weights.sum(), np.log10(src_counts.max() / rate_conversion), log_src_crs_grid[0], log_src_crs_grid[-1])
119 | weights /= weights.sum()
120 |
121 | return weights
122 |
123 | def bexvar(log_src_crs_grid, pdfs):
124 | """
125 | Assumes that the source count rate is log-normal distributed.
126 | returns posterior samples of the mean and std of that distribution.
127 |
128 | pdfs: PDFs for each object
129 | defined over the log-source count rate grid log_src_crs_grid.
130 |
131 | returns (log_mean, log_std), each an array of posterior samples.
132 | """
133 |
134 | gaussian_means = []
135 | gaussian_stdevs = []
136 | numerical_pdfs = []
137 | for pdf_in in pdfs:
138 | pdf = pdf_in / pdf_in.sum()
139 | gauss_mean = np.average(log_src_crs_grid, weights=pdf)
140 | gauss_sigma = np.sqrt(np.cov(log_src_crs_grid, aweights=pdf))
141 | rv = scipy.stats.norm(gauss_mean, gauss_sigma)
142 | gauss_logpdf = rv.logpdf(log_src_crs_grid)
143 | gauss_pdf = rv.pdf(log_src_crs_grid)
144 | gauss_norm = gauss_pdf.sum()
145 | surprise = np.sum(((gauss_logpdf - np.log(gauss_norm)) / np.log(2) - np.log2((pdf + 1e-300))) * (gauss_pdf / gauss_norm))
146 | print("checking whether analytic integration is possible...", surprise, gauss_pdf[0] / (gauss_pdf.max()))
147 | if surprise < 0.1 and gauss_pdf[0] < gauss_pdf.max() / 100:
148 | gaussian_means.append(gauss_mean)
149 | gaussian_stdevs.append(gauss_sigma)
150 | else:
151 | numerical_pdfs.append(pdf_in)
152 |
153 | numerical_pdfs = np.array(numerical_pdfs)
154 | gaussian_means = np.array(gaussian_means)
155 | gaussian_stdevs = np.array(gaussian_stdevs)
156 | has_numerical_pdfs = len(numerical_pdfs) > 0
157 | has_gaussian_pdfs = len(gaussian_stdevs) > 0
158 | print("using %d analytic, %d numerical integrals" % (len(gaussian_stdevs), len(numerical_pdfs)))
159 |
160 | def transform(cube):
161 | params = cube.copy()
162 | params[0] = cube[0] * (log_src_crs_grid[-1] - log_src_crs_grid[0]) + log_src_crs_grid[0]
163 | params[1] = 10**(cube[1]*4 - 2)
164 | return params
165 |
166 | def loglike(params):
167 | log_mean = params[0]
168 | log_sigma = params[1]
169 |
170 | like = 0
171 | if has_numerical_pdfs:
172 | # compute for each grid log-countrate its probability, according to log_mean, log_sigma
173 | variance_pdf = scipy.stats.norm.pdf(log_src_crs_grid, log_mean, log_sigma)
174 | # multiply that probability with the precomputed probabilities (pdfs)
175 | numerical_likes = log((variance_pdf.reshape((1, -1)) * numerical_pdfs).mean(axis=1) + 1e-100)
176 | like += numerical_likes.sum()
177 |
178 | if has_gaussian_pdfs:
179 | # compute integral of the product of two gaussians analytically
180 | total_variance = log_sigma**2 + gaussian_stdevs**2
181 | gauss_likes = -0.5 * (log_mean - gaussian_means)**2 / total_variance - 0.5 * np.log(2 * np.pi * total_variance)
182 | like += gauss_likes.sum()
183 |
184 | if not np.isfinite(like):
185 | like = -1e300
186 |
187 | return like
188 |
189 |
190 | from ultranest import ReactiveNestedSampler
191 | sampler = ReactiveNestedSampler(['logmean', 'logsigma'], loglike,
192 | transform=transform, vectorized=False)
193 | samples = sampler.run(viz_callback=False)['samples']
194 | sampler.print_results()
195 | log_mean, log_sigma = samples.transpose()
196 |
197 | return log_mean, log_sigma
198 |
199 | filename = sys.argv[1]
200 |
201 |
202 | lc_all = Table.read(filename, hdu='RATE', format='fits')
203 | nbands = lc_all['COUNTS'].shape[1]
204 | for band in range(nbands):
205 | print("band %d" % band)
206 | lc = lc_all[lc_all['FRACEXP'][:,band] > 0.1]
207 | x = lc['TIME'] - lc['TIME'][0]
208 | bc = lc['BACK_COUNTS'][:,band].astype(int)
209 | c = lc['COUNTS'][:,band]
210 | bgarea = 1. / lc['BACKRATIO']
211 | fe = lc['FRACEXP'][:,band]
212 | rate_conversion = fe * lc['TIMEDEL']
213 |
214 | log_src_crs_grid = lscg_gen(c, bc, bgarea, rate_conversion, 100)
215 |
216 | src_posteriors = []
217 |
218 | print("preparing time bin posteriors...")
219 | for xi, ci, bci, bgareai, rate_conversioni in zip(x, c, bc, bgarea, rate_conversion):
220 | # print(xi, ci, bci, bgareai, rate_conversioni)
221 | pdf = estimate_source_cr_marginalised(log_src_crs_grid, ci, bci, bgareai, rate_conversioni)
222 | src_posteriors.append(pdf)
223 |
224 | src_posteriors = np.array(src_posteriors)
225 |
226 | print("plotting data...")
227 | cdfs = np.cumsum(src_posteriors, axis=1)
228 |
229 | rate_lo, rate_mid, rate_hi = [np.array([10**np.interp(q, cdf, log_src_crs_grid)
230 | for xi, cdf in zip(x, cdfs)])
231 | for q in quantiles]
232 | plt.errorbar(x=x, y=rate_mid, yerr=[rate_mid - rate_lo, rate_hi - rate_mid],
233 | marker='x', color='k', capsize=3, label='Bayesian rate estimates')
234 | plt.plot(x, c / rate_conversion, 'o ', label='counts', color='k')
235 | plt.plot(x, bc / bgarea / rate_conversion, 'o ', label='background contribution', color='r')
236 | #plt.errorbar(x, y=lc['RATE'][:,band], yerr=lc['RATE_ERR'][:,band], marker='s', linestyle=' ',
237 | # label='naive estimator')
238 |
239 | print("running bexvar...")
240 | logcr_mean, logcr_sigma = bexvar(log_src_crs_grid, src_posteriors)
241 | print("running bexvar... done")
242 |
243 | # plot mean count rate:
244 | lo, mid, hi = scipy.stats.mstats.mquantiles(10**logcr_mean, quantiles)
245 | l = plt.hlines(mid, x.min(), x.max(), color='navy',
246 | linestyles='-', alpha=0.5, label='intrinsic source rate')
247 | # plot its uncertainty:
248 | plt.fill_between([x.min(), x.max()], lo, hi,
249 | alpha=0.5, color=l.get_color(), lw=0)
250 |
251 | # plot scatter:
252 | lo, mid, hi = scipy.stats.mstats.mquantiles(10**(logcr_mean - logcr_sigma), quantiles)
253 | lo2, mid2, hi2 = scipy.stats.mstats.mquantiles(10**(logcr_mean + logcr_sigma), quantiles)
254 | l = plt.hlines([mid, mid2], x.min(), x.max(), color='orange',
255 | alpha=0.5, label='intrinsic scatter', linestyles=['--', '--'])
256 | plt.fill_between([x.min(), x.max()], lo, hi, alpha=0.5, color=l.get_color(), lw=0)
257 | plt.fill_between([x.min(), x.max()], lo2, hi2, alpha=0.5, color=l.get_color(), lw=0)
258 |
259 | plt.legend(loc='best')
260 | plt.ylabel('Count rate [cts/s]')
261 | plt.xlabel('Time [s] - %d' % lc['TIME'][0])
262 | plt.yscale('log')
263 | plt.savefig(filename + '-bexvar-%d.png' % band, bbox_inches='tight')
264 | plt.close()
265 |
266 | import corner
267 | corner.corner(np.transpose([logcr_mean, np.log10(logcr_sigma)]),
268 | labels=['log(source count rate)', 'log(log-scatter)'])
269 | plt.savefig(filename + '-bexvar-%d-corner.png' % band, bbox_inches='tight')
270 | plt.close()
271 | lo, mid, hi = scipy.stats.mstats.mquantiles(logcr_sigma, quantiles)
272 |
273 | # compute rate and rate_err ourselves, because SRCTOOL has nans
274 | rate = (c - bc / bgarea) / rate_conversion
275 | sigma_src = (c + 0.75)**0.5 + 1
276 | sigma_bkg = (bc + 0.75)**0.5 + 1
277 | rate_err = (sigma_src**2 + sigma_bkg**2 / bgarea)**0.5 / rate_conversion
278 |
279 | stats = dict(
280 | pvar_p1=(logcr_sigma>0.1).mean(),
281 | pvar_p3=(logcr_sigma>0.3).mean(),
282 | pvar_1=(logcr_sigma>1.0).mean(),
283 | rate=logcr_mean.mean(),
284 | rate_err=logcr_mean.std(),
285 | scatt=mid,
286 | scatt_lo=lo,
287 | scatt_hi=hi,
288 | )
289 | for k, v in stats.items():
290 | print(k, v)
291 |
292 | t = Table(data=[lc['TIME'], rate_mid, rate_lo, rate_hi],
293 | names=['TIME', 'RATE', 'RATE_LO', 'RATE_HI'],
294 | meta=stats
295 | )
296 | t.write(filename + '-bexvar-%d.fits' % band, format='fits',
297 | overwrite=True)
298 |
--------------------------------------------------------------------------------
/experimental/cplar_ero_full.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | Continuous poisson log-autoregressive model for eROSITA light curves.
5 |
6 | The model assumes a autoregressive model for the instantaneous log-count rate
7 | in each time bin. For the background, each time bin is estimated independently.
8 |
9 | The autoregressive model can take care of lightcurve gaps. Its assumption
10 | is that the power spectrum transitions from white noise with some amplitude (sigma)
11 | to a powerlaw with index -2, at a characteristic dampening time-scale (tau).
12 |
13 | sigma, tau and the mean count rate (c) are the most important parameters of this model.
14 |
15 | """
16 |
17 | import numpy as np
18 | import matplotlib.pyplot as plt
19 | from numpy import log, pi, exp
20 | import sys
21 | import tqdm
22 | import h5py
23 | import datetime
24 | from brokenaxes import brokenaxes
25 | import astropy.time
26 | from astropy.table import Table
27 | import cmdstancache
28 | import corner
29 | from ultranest.plot import PredictionBand
30 |
31 |
32 | model_code = """
33 | data {
34 | int N;
35 |
36 | // duration of time bin
37 | real dt;
38 | // number of time step between end of previous time bin and end of current time bin
39 | // this may be different from dt when there are gaps
40 | real tstep;
41 | array[N] int z_obs;
42 | array[N] int B_obs;
43 | vector[N] Barearatio;
44 | vector[N] countrate_conversion;
45 | vector[N] Bcountrate_conversion;
46 | int N_good;
47 | array[N] int mask_good;
48 |
49 | real prior_logc_mean;
50 | real prior_lognoise_mean;
51 | real prior_logc_std;
52 | real prior_lognoise_std;
53 | real prior_logtau_mean;
54 | real prior_logtau_std;
55 | }
56 | transformed data {
57 | vector[N] logBarearatio = log(Barearatio);
58 | vector[N] logcountrate_conversion = log(countrate_conversion);
59 | vector[N] logBcountrate_conversion = log(Bcountrate_conversion);
60 | real logminduration = log(dt);
61 | real T = tstep * N;
62 | real logT = log(T);
63 |
64 | array[N_good] int B_obs_filtered;
65 | array[N_good] int z_obs_filtered;
66 | {
67 | int j = 0;
68 | for (i in 1:N) {
69 | if (mask_good[i] == 1) {
70 | j += 1;
71 | B_obs_filtered[j] = B_obs[i];
72 | z_obs_filtered[j] = z_obs[i];
73 | }
74 | }
75 | }
76 | }
77 | parameters {
78 | // auto-regression time-scale
79 | real logtau;
80 | // mean
81 | real logc;
82 | // sigma
83 | real lognoise;
84 | // unit normal noise deviations
85 | vector[N] W;
86 |
87 | // intrinsic background count rate at each time bin
88 | vector[N] logBy;
89 |
90 | // hyper-parameter for background count rates
91 | real prior_logBc_mean;
92 | real prior_logBc_std;
93 | }
94 | transformed parameters {
95 | // linear transformations of the log parameters
96 | real phi;
97 | real tau;
98 | real noise;
99 | real c;
100 | // intrinsic source count rate at each time bin
101 | vector[N] logy;
102 |
103 | // transform parameters to linear space
104 | tau = exp(logtau);
105 | noise = exp(lognoise);
106 | c = exp(logc);
107 | phi = exp(-dt / tau);
108 |
109 | // apply centering to real units, AR(1) formulas:
110 | logy[1] = logc + W[1] * noise;
111 | for (i in 2:N) {
112 | logy[i] = logc + exp(-tstep / tau) * (logy[i-1] - logc) + W[i] * noise * tstep / dt;
113 | // print(i, ": ", logc, " ", tstep, " ", tau, " ", dt, " ", logy[i-1], " ", logy[i], " ", W[i], " ", noise);
114 | }
115 | }
116 | model {
117 | // correct for observing efficiency
118 | vector[N] logysrcarea = logy + logcountrate_conversion;
119 | vector[N] logybkgarea = logBy + logBarearatio + logBcountrate_conversion;
120 | vector[N_good] logytot_filtered;
121 | vector[N_good] logBy_filtered;
122 | // sum source and background together
123 | {
124 | int j = 0;
125 | for (i in 1:N) {
126 | if (mask_good[i] == 1) {
127 | j += 1;
128 | logytot_filtered[j] = log_sum_exp(logysrcarea[i], logybkgarea[i]);
129 | logBy_filtered[j] = logBy[i] + logBcountrate_conversion[i];
130 | // print(i, ": ", logytot_filtered[j], " ", logy[i], " ", logcountrate_conversion[i], logysrcarea[i], " ", logybkgarea[i]);
131 | }
132 | }
133 | }
134 |
135 | prior_logBc_std ~ uniform(0, 5);
136 | logBy ~ normal(prior_logBc_mean, prior_logBc_std);
137 | B_obs_filtered ~ poisson_log(logBy_filtered);
138 |
139 | // source AR process priors:
140 | logtau ~ normal(prior_logtau_mean, prior_logtau_std);
141 | logc ~ normal(prior_logc_mean, prior_logc_std);
142 | lognoise ~ normal(prior_lognoise_mean, prior_lognoise_std);
143 | //logy ~ normal(0, 5); // stay within a reasonable range
144 | W ~ std_normal();
145 | // comparison to total source region counts
146 | z_obs_filtered ~ poisson_log(logytot_filtered);
147 | }
148 | """
149 |
150 |
151 |
152 | filename = sys.argv[1]
153 |
154 | seed = 1
155 |
156 | lc_all = Table.read(filename, hdu='RATE', format='fits')
157 | nbands = lc_all['COUNTS'].shape[1]
158 | if len(sys.argv) > 2:
159 | band = int(sys.argv[2])
160 | else:
161 | band = 0
162 |
163 | fracexp = lc_all['FRACEXP'][:,band]
164 | indices_good_fracexp, = np.where(fracexp > fracexp.max() * 0.1)
165 | print("band %d" % band)
166 |
167 | print(fracexp.min(), fracexp.max())
168 | # only model part where we have data
169 | # skip last, because TIMEDEL is different
170 | lc = lc_all[indices_good_fracexp.min() : min(len(fracexp) - 1, indices_good_fracexp.max() + 1)]
171 | bc = lc['BACK_COUNTS'][:,band].value
172 | c = lc['COUNTS'][:,band].value
173 | bgarea = np.array(1. / lc['BACKRATIO'].value)
174 | # length of the time bin
175 | dt = lc['TIMEDEL']
176 | assert dt.max() == dt.min(), (dt.max(), dt.min())
177 | dt = dt.min()
178 | print("dt:", dt)
179 | # TIME is the mid point of the light curve bin
180 | # here we want the starting point:
181 | t0 = lc['TIME'][0]
182 | x_start = lc['TIME'] - t0 - lc['TIMEDEL'] / 2.0
183 | x_end = lc['TIME'] - t0 + lc['TIMEDEL'] / 2.0
184 | x = (lc['TIME'] - t0)
185 | tsteps = (x_end[1:] - x_end[:-1]).value
186 | assert (tsteps == dt).all(), np.unique(tsteps)
187 | assert (bc.astype(int) == bc).all(), bc
188 | prefix = sys.argv[1] + '-%d-cplar1full' % band
189 | fe = lc['FRACEXP'][:,band].value
190 | rate_conversion = fe * dt
191 | mask_good = fe > fe.max() * 0.1
192 | assert (fe[mask_good] > 0).all()
193 |
194 | """
195 | fig, axs = plt.subplots(2, 1, figsize=(25, 10), sharex=True)
196 | bax = axs[0]
197 | bax.plot(lc_all['TIME'], fracexp, 'o ', ms=2)
198 | bax.plot(lc['TIME'], fe, 's-', ms=2)
199 | bax.set_xlabel('Time [s]')
200 | bax.set_ylabel('FRACEXP')
201 | bax.set_xlim(lc_all['TIME'].min(), lc_all['TIME'].max())
202 | bax = axs[1]
203 | bax.plot(lc_all['TIME'], lc_all['COUNTS'][:,band].value, 'o ', ms=2)
204 | bax.plot(lc['TIME'], c, 's-', ms=2)
205 | bax.set_xlabel('Time [s]')
206 | bax.set_ylabel('COUNTS')
207 | bax.set_ylim(1, None)
208 | bax.set_yscale('log')
209 | bax.set_xlim(lc_all['TIME'].min(), lc_all['TIME'].max())
210 | fig.savefig(prefix + '_fracexp.pdf')
211 | plt.close()
212 | """
213 |
214 | #print("tsteps:", tsteps.sum())
215 |
216 | N = len(x_start)
217 | data = dict(
218 | # provide observations
219 | N=N, z_obs=c, B_obs=bc.astype(int),
220 | # additional information about the sampling:
221 | dt=dt, Barearatio=1. / bgarea, tstep=dt,
222 | # source count rate is modulated by fracexp
223 | countrate_conversion=rate_conversion,
224 | # background count rate is modulated by fracexp, except in the hard band,
225 | # where it is assumed constant (particle background dominated)
226 | Bcountrate_conversion=rate_conversion*0 + 1 if band == 2 else rate_conversion,
227 | N_good = mask_good.sum(), mask_good=mask_good * 1,
228 | # source count rates expected:
229 | prior_logc_mean=0, prior_logc_std=5,
230 | # expected noise level is 10% +- 2 dex
231 | prior_lognoise_mean=np.log(0.1), prior_lognoise_std=np.log(100),
232 | prior_logtau_mean=np.log(x.max()), prior_logtau_std=3,
233 | # prefer long correlation time-scales; the data have to convince us that the
234 | # data points scatter.
235 | # prior_logtau_mean=np.log(x.max()), prior_logtau_std=10 * np.log(x.max() / dt),
236 | # prefer short correlation time-scales; the data have to convince us that the
237 | # data points are similar.
238 | #prior_logtau_mean=np.log(dt), prior_logtau_std=np.log(1000),
239 | )
240 |
241 | def init_function(seed):
242 | # guess good parameters for chain to start
243 | rng = np.random.RandomState(seed)
244 |
245 | guess = dict(
246 | # start with short time-scales: all bins independent
247 | logtau=np.log(dt / 2),
248 | # background count rates; estimated from background counts
249 | logBy=np.array(np.log((bc + 0.1) / (0.0001 + data['Bcountrate_conversion']))),
250 | # background count rates expected:
251 | prior_logBc_mean=np.median(np.log((bc + 0.1) / (0.0001 + data['Bcountrate_conversion']))),
252 | prior_logBc_std=np.log(10),
253 | # average count rate; estimated from counts
254 | logc=np.median(np.log(((c + 0.1) / (0.0001 + data['countrate_conversion'])))),
255 | # minimal noise
256 | lognoise=np.log(1e-10),
257 | W=rng.normal(size=N),
258 |
259 | )
260 | # print("initial guess:", guess)
261 | return guess
262 |
263 | # Continuous Poisson Log-Auto-Regressive 1 with Background
264 | stan_variables, method_variables = cmdstancache.run_stan(
265 | model_code, data=data,
266 | # adapt_delta=0.98, max_treedepth=12,
267 | #show_console=True,
268 | refresh=10,
269 | inits=init_function(seed),
270 | # iter_warmup=2000, iter_sampling=1000,
271 | seed=seed)
272 |
273 | paramnames = []
274 | badlist = ['lp__', 'phi', 'Bphi']
275 | #badlist += ['log' + k for k in la.keys()]
276 | # remove linear parameters, only show log:
277 | badlist += [k.replace('log', '') for k in stan_variables.keys()
278 | if 'log' in k and k.replace('log', '') in stan_variables.keys()]
279 |
280 | typical_step = max(np.median(tsteps), dt * 5)
281 |
282 | def tomjd(t_seconds):
283 | return (astropy.time.Time(51543.875, format='mjd') + (t_seconds + t0) * astropy.units.s).mjd - mjd0
284 |
285 | mjd0 = 58828
286 | x_mjd = tomjd(x.value)
287 | print(x_mjd, x.value)
288 |
289 | for broken in False, True:
290 | print("plotting time series...")
291 | fig = plt.figure(figsize=(15, 5))
292 | if broken:
293 | i, = np.where((x_end[mask_good][1:] - x_end[mask_good][:-1]).value > typical_step * 20)
294 | xlims = list(
295 | zip([tomjd(x_start.value[mask_good][0] - 3 * typical_step)] + list(tomjd(x_start.value[mask_good][i+1] - 3 * typical_step)),
296 | list(tomjd(x_end.value[mask_good][i] + 3 * typical_step)) + [tomjd(x_end.value[mask_good][-1] + 3 * typical_step)]))
297 | print(xlims)
298 | bax = brokenaxes(xlims=xlims, hspace=0.05)
299 | else:
300 | bax = plt.gca()
301 |
302 | bax.plot(x_mjd[mask_good], ((c + 0.1) / rate_conversion)[mask_good], 'x ', ms=4, label='source count rate', color='tab:blue')
303 | bax.plot(x_mjd[mask_good], ((bc + 0.1) / bgarea / rate_conversion)[mask_good], '+ ', ms=4, label='background count rate', color='gray')
304 | y = np.exp(stan_variables['logy'].reshape((-1, N)))
305 | bax.plot(x_mjd, np.median(y, axis=0), color='navy')
306 | bax.fill_between(
307 | x_mjd,
308 | np.quantile(y, 0.005, axis=0),
309 | np.quantile(y, 0.995, axis=0),
310 | color='navy', alpha=0.4, label='source log-AR(1) model',
311 | )
312 | By = np.exp(stan_variables['logBy'].reshape((-1, N))) / bgarea
313 | bax.plot(x_mjd[mask_good], np.median(By, axis=0)[mask_good], color='gray')
314 | bax.fill_between(
315 | np.where(mask_good, x_mjd, np.nan),
316 | np.where(mask_good, np.quantile(By, 0.005, axis=0), np.nan),
317 | np.where(mask_good, np.quantile(By, 0.995, axis=0), np.nan),
318 | color='gray', alpha=0.4, label='background LogNormal model',
319 | )
320 | bax.set_yscale('log')
321 | bax.set_xlabel('Time [MJD - %d]' % mjd0)
322 | bax.set_ylabel('Count rate [cts/s]')
323 | bax.set_title(
324 | 'ID=%s :%.2f #=%d' % (
325 | filename.replace('.fits', '').replace('020_LightCurve_', ''),
326 | c.mean(), len(c)))
327 |
328 | if broken:
329 | bax.set_ylim(
330 | ((c + 0.1) / rate_conversion)[mask_good].min(),
331 | ((c + 0.1) / rate_conversion)[mask_good].max(),
332 | )
333 | ymax = ((c + 0.1) / rate_conversion)[mask_good].max()
334 | else:
335 | bax.set_xlim(x_mjd.min(), x_mjd.max())
336 | bax.set_ylim(bax.get_ylim())
337 | ymax = bax.set_ylim()[1]
338 |
339 | for year in np.arange(2020, 2023, 0.5):
340 | year_mjd = (astropy.time.Time(datetime.datetime(int(year), int(year * 12) % 12 + 1, 1), format='datetime')).mjd - mjd0
341 | if x_mjd.min() < float(year_mjd) < x_mjd.max():
342 | try:
343 | bax.text(
344 | year_mjd,
345 | ymax * 0.99,
346 | '%s ' % (year), rotation=90, size=6, ha='center', va='top',
347 | )
348 | except ValueError:
349 | pass
350 | bax.legend()
351 | if broken:
352 | fig.savefig(prefix + '_t_s.pdf')
353 | else:
354 | fig.savefig(prefix + '_t.pdf')
355 | plt.close()
356 |
357 | print("priors:")
358 | for k, v in sorted(data.items()):
359 | if k.startswith('prior'):
360 | # convert to base 10 for easier reading
361 | print("%20s: " % k, v / log(10) if k.startswith('log') else v)
362 |
363 | samples = []
364 |
365 | with h5py.File(prefix + "_post.h5", 'w') as fout:
366 | for k, v in stan_variables.items():
367 | if k not in badlist:
368 | print("storing", k)
369 | fout.create_dataset(k, data=v, shuffle=True, compression='gzip')
370 |
371 | print("posteriors:")
372 | for k in sorted(stan_variables.keys()):
373 | print('%20s: %.4f +- %.4f' % (k, stan_variables[k].mean(), stan_variables[k].std()))
374 | if k not in badlist and stan_variables[k].ndim == 1:
375 | # convert to base 10 for easier reading
376 | samples.append(stan_variables[k] / log(10) if k.startswith('log') else stan_variables[k])
377 | paramnames.append(k)
378 | elif stan_variables[k].ndim > 1 and False:
379 | plt.figure()
380 | plt.hist(stan_variables[k].mean(axis=1), histtype='step', bins=40, label='over bins')
381 | plt.hist(stan_variables[k].mean(axis=0), histtype='step', bins=40, label='over realisations')
382 | plt.yscale('log')
383 | plt.xlabel(k)
384 | plt.legend(title='average')
385 | plt.savefig(prefix + "_hist_%s.pdf" % k, bbox_inches='tight')
386 | plt.close()
387 |
388 | samples = np.transpose(samples)
389 | if False:
390 | print('making corner plots ...')
391 | corner.corner(samples, labels=paramnames)
392 | plt.savefig(prefix + "_corner_log.pdf", bbox_inches='tight')
393 | plt.close()
394 | corner.corner(10**(samples), labels=[k.replace('log', '') for k in paramnames])
395 | plt.savefig(prefix + "_corner.pdf", bbox_inches='tight')
396 | plt.close()
397 |
398 | print("saving samples...")
399 | np.savetxt(
400 | prefix + 'LC_netrate.txt.gz',
401 | np.transpose(np.vstack((lc['TIME'].reshape((1, -1)), x.reshape((1, -1)), y[::40]))),
402 | fmt='%.5f', #delimiter=',',
403 | )
404 |
405 | print("plotting fourier transforms ...")
406 | # switch to units of seconds here
407 | omega = np.linspace(0, 10. / dt, 10000)
408 | # longest duration: entire observation
409 | omega1 = 1. / x.max()
410 | # Nyquist frequency: twice the bin duration
411 | omega0 = 1. / (2 * dt)
412 |
413 | pband2 = PredictionBand(omega)
414 |
415 | for tausample, sigma in zip(tqdm.tqdm(stan_variables['tau'].flatten()), stan_variables['noise'].flatten()):
416 | DT = 1 # unit: seconds
417 | phi = exp(-DT / tausample)
418 | gamma = DT / tausample
419 | specdens = (2 * pi)**0.5 * sigma**2 / (1 - phi**2) * gamma / (pi * (gamma**2 + omega**2))
420 | pband2.add(specdens / x.max() / dt)
421 | print('factors:', x.max(), dt, len(x))
422 | pband2.line(color='r')
423 | pband2.shade(color='r', alpha=0.5)
424 | pband2.shade(q=0.95/2, color='r', alpha=0.1)
425 |
426 | # handle inferred damped random walk realisations here:
427 | #from astropy.timeseries import LombScargle
428 | def fourier_periodogram(t, y):
429 | N = len(t)
430 | frequency = np.fft.fftfreq(N, t[1] - t[0])
431 | y_fft = np.fft.fft(y)
432 | positive = (frequency > 0)
433 | return frequency[positive], (1. / N) * abs(y_fft[positive]) ** 2
434 |
435 | #from ducc0.fft import genuine_fht
436 | #from scipy.ndimage import gaussian_filter
437 | #f = 1. / np.array(x)[1:][::-1]
438 | #pbandf = PredictionBand(f)
439 | #pbandf = PredictionBand(f[len(f) // 2:])
440 | #pbandf = PredictionBand(f[1::2])
441 | #pbandf = PredictionBand(1. / dt / (1 + np.arange(len(f[1::2])))[::-1])
442 | pbandf = None
443 | t = np.array(x)
444 |
445 | for y_realisation in tqdm.tqdm(y):
446 | # take the fourier transform of y_realisation
447 | #fourier_spectrum_twice = (genuine_fht(y_realisation))**2 * len(x)
448 | #fourier_spectrum = fourier_spectrum_twice[: len(fourier_spectrum_twice) // 2]
449 | # make a bit smoother
450 | #smooth_fourier_spectrum = gaussian_filter(
451 | # fourier_spectrum[1:],
452 | # sigma=2, truncate=100, mode='nearest')
453 | #print(smooth_fourier_spectrum.shape)
454 | #pbandf.add(smooth_fourier_spectrum)
455 | #frequency, power = LombScargle(t, y_realisation, normalization='psd').autopower()
456 | frequency, power = fourier_periodogram(t, y_realisation)
457 | if pbandf is None:
458 | pbandf = PredictionBand(frequency)
459 | pbandf.add(power)
460 |
461 | #plt.figure()
462 | pbandf.line(color='navy')
463 | pbandf.shade(color='navy', alpha=0.5)
464 | pbandf.shade(q=0.95/2, color='navy', alpha=0.1)
465 |
466 | plt.xlabel('Frequency [Hz]')
467 | plt.ylabel('Power spectral density (PSD)')
468 | plt.xscale('log')
469 | plt.yscale('log')
470 | ylo, yhi = plt.ylim()
471 | # mark observing window:
472 | plt.vlines([omega0, omega1], ylo, yhi, ls='--', color='k', alpha=0.5)
473 | plt.ylim(ylo, yhi)
474 | #plt.xlim(2 / (N * T), 1000 * 2 / (N * T))
475 | plt.savefig(prefix + '_F.pdf', bbox_inches='tight')
476 | plt.close()
477 |
478 | #
479 |
--------------------------------------------------------------------------------
/COPYING:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published by
637 | the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
662 |
--------------------------------------------------------------------------------