├── .coveragerc ├── .dockerignore ├── .github └── workflows │ └── pytest.yml ├── .gitignore ├── COPYING ├── LICENSE ├── MANIFEST.in ├── README.md ├── initial.requirements.txt ├── matplotlibrc ├── pytest.ini ├── requirements.dev.txt ├── requirements.txt ├── setup.py ├── stream2segment ├── __init__.py ├── cli.py ├── download │ ├── __init__.py │ ├── db │ │ ├── __init__.py │ │ ├── inspection │ │ │ ├── __init__.py │ │ │ ├── main.py │ │ │ ├── static │ │ │ │ └── js │ │ │ │ │ └── leafletmap.js │ │ │ └── templates │ │ │ │ ├── base.html │ │ │ │ └── dstats.html │ │ ├── management.py │ │ └── models.py │ ├── exc.py │ ├── inputvalidation.py │ ├── log.py │ ├── main.py │ ├── modules │ │ ├── __init__.py │ │ ├── channels.py │ │ ├── datacenters.py │ │ ├── events.py │ │ ├── mseedlite.py │ │ ├── segments.py │ │ ├── stations.py │ │ ├── stationsearch.py │ │ └── utils.py │ └── url.py ├── io │ ├── __init__.py │ ├── cli.py │ ├── db │ │ ├── __init__.py │ │ ├── inspection.py │ │ ├── models.py │ │ ├── pdsql.py │ │ └── sqlconstructs.py │ ├── inputvalidation.py │ └── log.py ├── process │ ├── __init__.py │ ├── db │ │ ├── __init__.py │ │ ├── models.py │ │ └── sqlevalexpr.py │ ├── funclib │ │ ├── __init__.py │ │ ├── coda.py │ │ ├── ndarrays.py │ │ └── traces.py │ ├── gui │ │ ├── __init__.py │ │ ├── main.py │ │ └── webapp │ │ │ ├── __init__.py │ │ │ ├── mainapp │ │ │ ├── __init__.py │ │ │ ├── core.py │ │ │ ├── db.py │ │ │ └── views.py │ │ │ ├── static │ │ │ └── js │ │ │ │ └── mainapp.js │ │ │ └── templates │ │ │ ├── base.html │ │ │ └── mainapp.html │ ├── inspectimport.py │ ├── log.py │ ├── main.py │ └── writers.py ├── resources │ ├── __init__.py │ ├── eidars.txt │ ├── program_version │ ├── templates │ │ ├── The-Segment-object.ipynb │ │ ├── Using-Stream2segment-in-your-Python-code.ipynb │ │ ├── __init__.py │ │ ├── create_wiki.py │ │ ├── download.yaml │ │ ├── example.db.sqlite │ │ ├── gui.py │ │ ├── gui.yaml │ │ ├── paramtable.py │ │ ├── paramtable.yaml │ │ ├── save2fs.py │ │ └── save2fs.yaml │ └── traveltimes │ │ ├── __init__.py │ │ ├── ak135_ttp+.npz │ │ ├── ak135_tts+.npz │ │ ├── iasp91_ttp+.npz │ │ └── iasp91_tts+.npz └── traveltimes │ ├── __init__.py │ ├── ttcreator.py │ └── ttloader.py └── tests ├── __init__.py ├── conftest.py ├── data ├── 20091217_231838.FR.ESCA.00.HHZ.SAC ├── BS.*.*.*.2016-06-05.21:05-09.47.mseed ├── GE.FLT1..HH?.mseed ├── GE.FLT1.xml ├── IA.BAKI..BHZ.D.2016.004.head ├── ak135_ttp+_10.npz ├── ak135_ttp+_5.npz ├── ak135_tts+_10.npz ├── ak135_tts+_5.npz ├── channel_query_response.txt ├── db.no_event_type_column.sqlite ├── db.no_event_type_column.yaml ├── download-network-filter.yaml ├── download_poligon_article.yaml ├── eida_routing_service_response.txt ├── eidatoken ├── event_request_sample_iris.txt ├── event_request_sample_isc.isf ├── iasp91_ttp+_10.npz ├── iasp91_ttp+_5.npz ├── iasp91_tts+_10.npz ├── iasp91_tts+_5.npz ├── inventory_GE.APE.xml ├── isc_response.txt ├── jupyter.example.process.yaml ├── jupyter.example.sqlite ├── processingmodule.noop.oldversion.py ├── processingmodule.noop.py └── trace_GE.APE.mseed ├── download ├── __init__.py ├── db │ ├── test_cli_classlabels.py │ ├── test_cli_ddrop.py │ ├── test_cli_update_metadata.py │ ├── test_dl_commands.py │ └── test_dstats.py ├── test_download.py ├── test_download2.py ├── test_download_auth.py ├── test_download_real_with_network_filter.py ├── test_inputvalidation.py ├── test_mseedlite.py ├── test_u_download.py ├── test_u_download_01_events.py ├── test_u_download_02_datacenters.py ├── test_u_download_03_channels.py ├── test_u_download_04_merge_events_stations.py ├── test_u_download_05_prepare_for_download.py ├── test_u_download_06_download_save_segments.py ├── test_u_download_modules_utils.py └── test_url.py ├── io ├── __init__.py ├── test_fdsn_url.py ├── test_pdsql.py └── test_pdsql_dtypes.py ├── misc ├── __init__.py ├── test_click_shallow_cmcall.py ├── test_compress_decompress.py ├── test_notebook.py ├── test_request.py ├── test_resources.py ├── test_ttimes.py └── test_utils.py └── process ├── __init__.py ├── db ├── __init__.py ├── test_db.py ├── test_db_segment_obspy_methods.py ├── test_dbqueries.py └── test_inspection.py ├── funclib ├── __init__.py ├── test_coda.py ├── test_ndarrays.py ├── test_ndarrays_response_spectrum.py ├── test_traces.py └── test_traces_with_local_mseeds.py ├── gui ├── __init__.py └── test_webgui.py ├── test_imap.py ├── test_processing.py ├── test_sqlevalexpr.py └── test_u_various_utilities.py /.coveragerc: -------------------------------------------------------------------------------- 1 | # coverage config file,. For info see https://coverage.readthedocs.io/en/latest/cmd.html#cmd-report 2 | 3 | [run] 4 | omit = 5 | */traveltimes/* 6 | */data/* 7 | */docs/* 8 | source = stream2segment/* 9 | 10 | [report] 11 | # Regexes for lines to exclude from consideration 12 | exclude_lines = 13 | # Have to re-enable the standard pragma 14 | pragma: no cover 15 | 16 | # Don't complain about missing debug-only code: 17 | def __repr__ 18 | if self\.debug 19 | 20 | # Don't complain if tests don't hit defensive assertion code: 21 | raise AssertionError 22 | raise NotImplementedError 23 | 24 | # Don't complain if non-runnable code isn't run: 25 | if __name__ == .__main__.: 26 | 27 | # skip missing imports usually for Py2 compatibility 28 | except ImportError -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | **/*.pyc -------------------------------------------------------------------------------- /.github/workflows/pytest.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Python >= 3.8 5 | 6 | on: 7 | 8 | workflow_dispatch: # enable run manually the workflow 9 | 10 | push: 11 | branches: [ master ] 12 | paths-ignore: 13 | - "**/README.md" 14 | - "**/LICENSE" 15 | - "**/COPYING" 16 | - "stream2segment/resources/traveltimes/**" 17 | 18 | pull_request: 19 | branches: [ master, dev ] 20 | paths-ignore: 21 | - "**/README.md" 22 | - "**/LICENSE" 23 | - "**/COPYING" 24 | - "stream2segment/resources/traveltimes/**" 25 | 26 | jobs: 27 | build: 28 | # Reminder (commented for the moment): 29 | # here we run if the pull request source is not dev (because for dev we already run this test, see above) 30 | # https://github.com/orgs/community/discussions/30794#discussioncomment-3482788 31 | # https://docs.github.com/en/actions/learn-github-actions/contexts#github-context 32 | # if: ${{ github.head_ref || github.ref_name }} != 'dev' 33 | runs-on: ubuntu-latest 34 | strategy: 35 | fail-fast: false 36 | matrix: 37 | python-version: ["3.8", "3.9", "3.10", "3.11"] 38 | 39 | # service containers to run with `postgres-job` 40 | services: 41 | # label used to access the service container 42 | postgres: 43 | # Docker Hub image 44 | image: postgres:latest 45 | # service environment variables 46 | env: 47 | # The hostname used to communicate with the PostgreSQL service container 48 | POSTGRES_HOST: localhost 49 | # optional (defaults to `postgres`) 50 | POSTGRES_DB: s2s_test 51 | # required 52 | POSTGRES_PASSWORD: postgres 53 | # optional (defaults to `5432`) 54 | POSTGRES_PORT: 5432 55 | # optional (defaults to `postgres`) 56 | POSTGRES_USER: postgres 57 | ports: 58 | # maps tcp port 5432 on service container to the host 59 | - 5432:5432 60 | # set health checks to wait until postgres has started 61 | options: >- 62 | --health-cmd pg_isready 63 | --health-interval 10s 64 | --health-timeout 5s 65 | --health-retries 5 66 | 67 | steps: 68 | - uses: actions/checkout@v2 69 | - name: Set up Python ${{ matrix.python-version }} 70 | uses: actions/setup-python@v2 71 | with: 72 | python-version: ${{ matrix.python-version }} 73 | - name: Install dependencies 74 | run: | 75 | pip install --upgrade pip setuptools wheel 76 | pip install -r requirements.dev.txt 77 | pip install jupyterlab 78 | pip install . 79 | - name: Test with pytest 80 | run: | 81 | pytest -xvvv --dburl postgresql://postgres:postgres@localhost/s2s_test 82 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Added by me: 2 | .DS_Store 3 | .env 4 | *.sqlite 5 | tmp/ 6 | skip/ 7 | .idea/ 8 | 9 | # Byte-compiled / optimized / DLL files 10 | __pycache__/ 11 | *.py[cod] 12 | *$py.class 13 | 14 | # C extensions 15 | *.so 16 | 17 | # Distribution / packaging 18 | .Python 19 | env/ 20 | build/ 21 | develop-eggs/ 22 | dist/ 23 | downloads/ 24 | eggs/ 25 | .eggs/ 26 | lib/ 27 | lib64/ 28 | parts/ 29 | sdist/ 30 | var/ 31 | *.egg-info/ 32 | .installed.cfg 33 | *.egg 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *,cover 54 | .hypothesis/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | #Ipython Notebook 70 | .ipynb_checkpoints 71 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | GNU General Public License, Version 3, 29 June 2007 2 | 3 | Copyright © 2018 Helmholtz Centre Potsdam GFZ German Research Centre 4 | for Geosciences, Potsdam, Germany (Riccardo Zaccarelli). 5 | 6 | stream2segment is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License 8 | as published by the Free Software Foundation, either version 3 9 | of the License, or (at your option) any later version. 10 | stream2segment is distributed in the hope that it will be useful, 11 | but WITHOUT ANY WARRANTY; without even the implied warranty 12 | of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 13 | See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License 15 | along with this program. If not, see http://www.gnu.org/licenses/. -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include stream2segment/resources * 2 | recursive-include stream2segment/download/db/inspection/static * 3 | recursive-include stream2segment/download/db/inspection/templates * 4 | recursive-include stream2segment/process/gui/webapp/static * 5 | recursive-include stream2segment/process/gui/webapp/templates * -------------------------------------------------------------------------------- /initial.requirements.txt: -------------------------------------------------------------------------------- 1 | # Old requirements, to use for checking backward compatibility: versions below 2 | # the ones listed below are not compatible and were never meant to be 3 | 4 | # This was the initial requirements.txt 5 | 6 | # certifi==2017.7.27.1 7 | # chardet==3.0.4 8 | # click==6.7 9 | # cycler==0.10.0 10 | # decorator==4.1.2 11 | # Flask==0.12.3 12 | # future==0.16.0 13 | # futures==3.1.1 14 | # idna==2.6 15 | # itsdangerous==0.24 16 | # Jinja2==2.9.6 17 | # lxml==4.0.0 18 | # MarkupSafe==1.0 19 | # matplotlib==2.0.2 20 | # numpy==1.13.1 21 | # obspy==1.0.3 22 | # pandas==0.20.3 23 | # psutil==5.3.1 24 | # psycopg2==2.7.3.1 25 | # pyparsing==2.2.0 26 | # python-dateutil==2.6.1 27 | # pytz==2017.2 28 | # PyYAML==3.12 29 | # requests==2.18.4 30 | # scipy==0.19.1 31 | # six==1.11.0 32 | # SQLAlchemy==1.1.14 33 | # urllib3==1.22 34 | # Werkzeug==0.12.2 35 | 36 | 37 | # requirements.dev.txt: 38 | 39 | # astroid==1.5.3 40 | # certifi==2017.7.27.1 41 | # chardet==3.0.4 42 | # click==6.7 43 | # coverage==4.4.1 44 | # cycler==0.10.0 45 | # decorator==4.1.2 46 | # Flask==0.12.3 47 | # future==0.16.0 48 | # futures==3.1.1 49 | # idna==2.6 50 | # isort==4.2.15 51 | # itsdangerous==0.24 52 | # Jinja2==2.9.6 53 | # lazy-object-proxy==1.3.1 54 | # lxml==4.0.0 55 | # MarkupSafe==1.0 56 | # matplotlib==2.0.2 57 | # mccabe==0.6.1 58 | # mock==2.0.0 59 | # numpy==1.13.1 60 | # obspy==1.0.3 61 | # pandas==0.20.3 62 | # pbr==3.1.1 63 | # pep8==1.7.0 64 | # psutil==5.3.1 65 | # psycopg2==2.7.3.1 66 | # py==1.4.34 67 | # pylint==1.7.2 68 | # pyparsing==2.2.0 69 | # pytest==3.2.2 70 | # pytest-cov==2.5.1 71 | # pytest-mock==1.6.3 72 | # python-dateutil==2.6.1 73 | # pytz==2017.2 74 | # PyYAML==3.12 75 | # requests==2.20.0 76 | # scipy==0.19.1 77 | # six==1.11.0 78 | # SQLAlchemy==1.1.14 79 | # urllib3==1.22 80 | # Werkzeug==0.12.2 81 | # wrapt==1.10.11 82 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | # pytest.ini. Needed to avoid _pytest.pathlib.ImportPathMismatchError in GitHub Actions 2 | [pytest] 3 | testpaths = 4 | tests 5 | python_files = test_*.py 6 | addopts = -rf --import-mode=importlib -------------------------------------------------------------------------------- /requirements.dev.txt: -------------------------------------------------------------------------------- 1 | astroid==2.15.4 2 | blinker==1.6.2 3 | blosc2==2.0.0 4 | certifi==2023.5.7 5 | charset-normalizer==3.1.0 6 | click==8.1.3 7 | contourpy==1.0.7 8 | coverage==7.2.5 9 | cycler==0.11.0 10 | Cython==0.29.34 11 | decorator==5.1.1 12 | dill==0.3.6 13 | Flask==2.3.2 14 | fonttools==4.39.4 15 | greenlet==2.0.2 16 | idna==3.4 17 | iniconfig==2.0.0 18 | isort==5.12.0 19 | itsdangerous==2.1.2 20 | Jinja2==3.1.2 21 | kiwisolver==1.4.4 22 | lazy-object-proxy==1.9.0 23 | lxml==4.9.2 24 | MarkupSafe==2.1.2 25 | matplotlib==3.7.1 26 | mccabe==0.7.0 27 | msgpack==1.0.5 28 | numexpr==2.8.4 29 | numpy==1.24.3 30 | obspy==1.4.0 31 | packaging==23.1 32 | pandas==2.0.1 33 | pep8==1.7.1 34 | Pillow==9.5.0 35 | platformdirs==3.5.0 36 | pluggy==1.0.0 37 | psutil==5.9.5 38 | psycopg2==2.9.6 39 | py-cpuinfo==9.0.0 40 | pylint==2.17.4 41 | pyparsing==3.0.9 42 | pytest==7.3.1 43 | pytest-cov==4.0.0 44 | pytest-mock==3.10.0 45 | python-dateutil==2.8.2 46 | pytz==2023.3 47 | PyYAML==6.0 48 | requests==2.30.0 49 | scipy==1.10.1 50 | six==1.16.0 51 | SQLAlchemy==2.0.13 52 | -e git+https://github.com/rizac/stream2segment.git@ea3d7f4b7e4b89f9f58e93f375569272f50e8af2#egg=stream2segment 53 | tables==3.8.0 54 | tomlkit==0.11.8 55 | typing_extensions==4.5.0 56 | tzdata==2023.3 57 | urllib3==2.0.2 58 | Werkzeug==2.3.4 59 | wrapt==1.15.0 60 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | blinker==1.6.2 2 | blosc2==2.0.0 3 | certifi==2023.5.7 4 | charset-normalizer==3.1.0 5 | click==8.1.3 6 | contourpy==1.0.7 7 | cycler==0.11.0 8 | Cython==0.29.34 9 | decorator==5.1.1 10 | Flask==2.3.2 11 | fonttools==4.39.4 12 | greenlet==2.0.2 13 | idna==3.4 14 | itsdangerous==2.1.2 15 | Jinja2==3.1.2 16 | kiwisolver==1.4.4 17 | lxml==4.9.2 18 | MarkupSafe==2.1.2 19 | matplotlib==3.7.1 20 | msgpack==1.0.5 21 | numexpr==2.8.4 22 | numpy==1.24.3 23 | obspy==1.4.0 24 | packaging==23.1 25 | pandas==2.0.1 26 | Pillow==9.5.0 27 | psutil==5.9.5 28 | psycopg2==2.9.6 29 | py-cpuinfo==9.0.0 30 | pyparsing==3.0.9 31 | python-dateutil==2.8.2 32 | pytz==2023.3 33 | PyYAML==6.0 34 | requests==2.30.0 35 | scipy==1.10.1 36 | six==1.16.0 37 | SQLAlchemy==2.0.13 38 | -e git+https://github.com/rizac/stream2segment.git@ea3d7f4b7e4b89f9f58e93f375569272f50e8af2#egg=stream2segment 39 | tables==3.8.0 40 | typing_extensions==4.5.0 41 | tzdata==2023.3 42 | urllib3==2.0.2 43 | Werkzeug==2.3.4 44 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """A setuptools based setup module. 2 | Taken from: 3 | https://github.com/pypa/sampleproject/blob/master/setup.py 4 | 5 | See also: 6 | http://python-packaging-user-guide.readthedocs.org/en/latest/distributing/ 7 | 8 | Additional links: 9 | https://packaging.python.org/en/latest/distributing.html 10 | https://github.com/pypa/sampleproject 11 | """ 12 | from __future__ import print_function 13 | 14 | # Always prefer setuptools over distutils 15 | from setuptools import setup, find_packages 16 | # To use a consistent encoding 17 | from codecs import open 18 | from os import path 19 | 20 | here = path.abspath(path.dirname(__file__)) 21 | 22 | # Get the long description from the README file 23 | with open(path.join(here, 'README.md'), encoding='utf-8') as f: 24 | long_description = f.read() 25 | 26 | # http://stackoverflow.com/questions/2058802/how-can-i-get-the-version-defined-in-setup-py-setuptools-in-my-package 27 | version = "" 28 | with open(path.join(here, 'stream2segment', 'resources', 'program_version')) as version_file: 29 | version = version_file.read().strip() 30 | 31 | setup( 32 | name='stream2segment', 33 | 34 | # Versions should comply with PEP440. For a discussion on single-sourcing 35 | # the version across setup.py and the project code, see 36 | # https://packaging.python.org/en/latest/single_source_version.html 37 | version=version, 38 | 39 | description='A python project to download, process and visualize ' 40 | 'event-based seismic waveforms', 41 | long_description=long_description, 42 | 43 | # The project's main homepage. 44 | url='https://github.com/rizac/stream2segment', 45 | 46 | # Author details 47 | author='riccardo zaccarelli', 48 | author_email='rizac@gfz-potsdam.de', # FIXME: what to provide? 49 | 50 | # Choose your license 51 | license='GNU', 52 | 53 | # See https://pypi.python.org/pypi?%3Aaction=list_classifiers 54 | classifiers=[ 55 | # How mature is this project? Common values are 56 | # 3 - Alpha 57 | # 4 - Beta 58 | # 5 - Production/Stable 59 | 'Development Status :: 4 - Beta', 60 | 61 | # Indicate who your project is intended for 62 | 'Intended Audience :: Science/Research', 63 | 'Topic :: Scientific/Engineering', 64 | 65 | # Pick your license as you wish (should match "license" above) 66 | 'License :: OSI Approved :: GNU License', 67 | 68 | 69 | # Specify the Python versions you support here. 70 | # 'Programming Language :: Python :: 3.5', 71 | # 'Programming Language :: Python :: 3.6', 72 | # 'Programming Language :: Python :: 3.7', 73 | 'Programming Language :: Python :: 3.8', 74 | 'Programming Language :: Python :: 3.9', 75 | 'Programming Language :: Python :: 3.10', 76 | 'Programming Language :: Python :: 3.11', 77 | ], 78 | 79 | # What does your project relate to? 80 | keywords='download seismic waveforms related to events', 81 | 82 | # You can just specify the packages manually here if your project is 83 | # simple. Or you can use find_packages(). 84 | packages=find_packages(exclude=['contrib', 'docs', 'tests', 'htmlcov']), 85 | 86 | # Alternatively, if you want to distribute just a my_module.py, uncomment 87 | # this: 88 | # py_modules=["my_module"], 89 | 90 | # List run-time dependencies here. These will be installed by pip when 91 | # your project is installed. For info see: 92 | # https://packaging.python.org/en/latest/requirements.html 93 | install_requires=['PyYAML>=3.12', 94 | 'numpy>=1.13.1', 95 | 'tables>=3.5.2', 96 | 'pandas>=0.20.3', 97 | 'obspy>=1.0.3', 98 | 'Flask>=0.12.3', 99 | 'psycopg2>=2.7.3.1', 100 | 'psutil>=5.3.1', 101 | 'SQLAlchemy>=1.1.14', 102 | 'click>=6.7' 103 | ], 104 | 105 | # List additional groups of dependencies here (e.g. development 106 | # dependencies). You can install these using the following syntax, 107 | # for example: 108 | # $ pip install -e .[dev,test] (pip install -e ".[dev,test]" in zsh) 109 | extras_require={ 110 | # use latest versions. Without boundaries 111 | 'dev': ['pep8>=1.7.0', 112 | 'pylint>=1.7.2', 113 | 'pytest>=3.2.2', 114 | 'pytest-cov>=2.5.1', 115 | 'pytest-mock>=1.6.2'], 116 | 'jupyter': ['jupyter>=1.0.0'] 117 | }, 118 | 119 | # If there are data files included in your packages that need to be 120 | # installed, specify them here. If using Python 2.6 or less, then these 121 | # have to be included in MANIFEST.in as well. 122 | # 123 | # package_data={ 124 | # 'sample': ['package_data.dat'], 125 | # }, 126 | 127 | # make the installation process copy also the package data (see MANIFEST.in) 128 | # for info see https://python-packaging.readthedocs.io/en/latest/non-code-files.html 129 | include_package_data=True, 130 | 131 | # Although 'package_data' is the preferred approach, in some case you may 132 | # need to place data files outside of your packages. See: 133 | # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa 134 | # In this case, 'data_file' will be installed into '/my_data' 135 | # 136 | # data_files=[('my_data', ['data/data_file'])], 137 | 138 | # To provide executable scripts, use entry points in preference to the 139 | # "scripts" keyword. Entry points provide cross-platform support and allow 140 | # pip to create the appropriate form of executable for the target platform. 141 | entry_points={ 142 | 'console_scripts': [ 143 | 'stream2segment=stream2segment.cli:cli', 144 | 's2s=stream2segment.cli:cli', 145 | ], 146 | }, 147 | ) 148 | -------------------------------------------------------------------------------- /stream2segment/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /stream2segment/download/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/download/__init__.py -------------------------------------------------------------------------------- /stream2segment/download/db/__init__.py: -------------------------------------------------------------------------------- 1 | from stream2segment.io.db import get_session as _get_session, DbNotFound, is_sqlite 2 | 3 | 4 | def get_session(dburl, scoped=False, **engine_kwargs): 5 | """Returns an SqlAlchemy session object for downloading data""" 6 | try: 7 | sess = _get_session(dburl, scoped, check_db_existence=not is_sqlite(dburl), 8 | **engine_kwargs) 9 | except DbNotFound as dbnf: 10 | raise ValueError('%s. Did you create the database first?' % str(dbnf)) 11 | 12 | # Note: this creates the SCHEMA, not the database 13 | # the import below is in the function because slightly time consuming: 14 | from stream2segment.download.db.models import Base 15 | try: 16 | Base.metadata.create_all(sess.get_bind()) 17 | except Exception as exc: 18 | raise ValueError('Error creating tables. Possible reason: tables created ' 19 | 'with an older version or with a different program ' 20 | '(original error: %s)' % str(exc)) 21 | return sess 22 | 23 | 24 | -------------------------------------------------------------------------------- /stream2segment/download/db/inspection/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/download/db/inspection/__init__.py -------------------------------------------------------------------------------- /stream2segment/download/db/inspection/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | {{ title }} 5 | {% block head_meta %} 6 | 7 | 8 | {% endblock %} 9 | 10 | 20 | {% block head_scripts %} 21 | {% if use_axios %} 22 | 23 | {% endif %} 24 | {% if use_plotly %} 25 | 26 | {% endif %} 27 | {% if use_leaflet %} 28 | 29 | {% endif %} 30 | {% endblock %} 31 | {% block head_stylesheets %} 32 | {% if use_leaflet %} 33 | 34 | {% endif %} 35 | {% if use_bootstrap_css %} 36 | 37 | {% endif %} 38 | {% endblock %} 39 | 40 | 41 | 42 | {% block body_content %} 43 | {% endblock %} 44 | 45 | 46 | {% block after_body %} 47 | {% endblock %} 48 | 49 | -------------------------------------------------------------------------------- /stream2segment/download/db/inspection/templates/dstats.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% set use_leaflet = true %} 3 | {% set use_axios = false %} 4 | {% set use_plotly = false %} 5 | {% set use_bootstrap_css = true %} 6 | 7 | {% block head_scripts %} 8 | {{ super() }} 9 | 23 | {% endblock %} 24 | 25 | {% block head_css %} 26 | {{ super() }} 27 | .leaflet-container{ font-family: inherit !important; } 28 | .colorbar{ 29 | background: linear-gradient(to right, rgba(255, 190, 190, 1) , red); 30 | height: 1em; 31 | width: 2em; 32 | display: inline-block; 33 | border: 1px solid #666; 34 | vertical-align: middle; 35 | } 36 | {% endblock %} 37 | 38 | {% block body_content %} 39 |
40 |
41 | 42 | 44 | 45 | 46 |
47 | 48 | 49 | 50 | 55 | 56 | 57 | 58 | 69 | 70 | 71 | 72 | 77 | 78 | 79 |
Legend
51 | 52 | 53 | 54 | Station (click for details)
59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | Station size: number of requested segments
73 | 0% 74 | 75 | 100% 76 | Station color: % of requested segments in the selected categories
80 |
81 | 82 | 84 |
86 | 87 |
88 |
Segments (categories)
89 |
select:
90 | 92 | 94 |
95 | 96 | 97 | {% for code in codes %} 98 | 99 | 108 | 109 | 110 | {% endfor %} 111 |
CategoryDescription
100 | 106 | 107 | {{code[1]}}
112 | 113 |
114 |
Data-centers
115 |
select:
116 | 118 | 120 |
121 | 122 | 123 | 124 | 125 | 126 | 127 | {% for id, url in datacenters.items() %} 128 | 129 | 137 | 138 | 139 | 140 | 141 | {% endfor %} 142 |
URLRequested segmentsSelected
segments
130 | 135 | 136 |
143 | 144 |
145 |
Downloads
146 |
select:
147 | 149 | 151 |
152 | 153 | 154 | 155 | 156 | 157 | 158 | {% for id, values in downloads.items() %} 159 | 160 | 169 | 170 | 182 | 183 | {% endfor %} 184 |
IdExecutedEvent query parameters
161 | 167 | 168 | {{ values[0] | replace("T", "
") | safe }}
171 | {%- if values[1] -%} 172 | {% for key, value in values[1].items() %} 173 | {% if loop.index0 > 0 %} 174 |
175 | {% endif %} 176 | {{ key }}: {{ value | tojson }} 177 | {% endfor %} 178 | {%- else -%} 179 | N/A 180 | {%- endif -%} 181 |
185 |
186 |
187 | 188 |
189 | UPDATING MAP ... 190 |
191 | {% endblock %} 192 | 193 | {% block after_body %} 194 | 195 | 196 | 207 | {% endblock %} 208 | -------------------------------------------------------------------------------- /stream2segment/download/db/management.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database management functions 3 | """ 4 | 5 | # Import input so that can be mocked in tests: 6 | from builtins import input 7 | 8 | from sqlalchemy import func 9 | from sqlalchemy.exc import SQLAlchemyError 10 | 11 | from stream2segment.io.db import close_session 12 | from stream2segment.io.inputvalidation import validate_param 13 | from stream2segment.download.db import get_session 14 | from stream2segment.download.db.models import Class, Download, Segment 15 | 16 | 17 | def classlabels(dburl, *, add, rename, delete): 18 | """Configure the class labels of the database related to the database 19 | of the given session. Return a dict of class labels (mapped to their 20 | description) in the db after the operation 21 | 22 | :param add: Class labels to add as a Dict[str, str]. The dict keys are 23 | the new class labels, the dict values are the label description 24 | :param rename: Class labels to rename as Dict[str, Sequence[str]] 25 | The dict keys are the old class labels, and the dict values are 26 | a 2-element sequence (e.g., list/tuple) denoting the new class label 27 | and the new description. The latter can be None (= do not modify 28 | the description, just change the label) 29 | :param delete: Class labels to delete, as Squence[str] denoting the class 30 | labels to delete 31 | """ 32 | # create the session by raising a BadParam (associated to the name 'dburl') in case: 33 | session = validate_param("dburl", dburl, get_session) 34 | try: 35 | configure_classlabels(session, add=add, rename=rename, delete=delete) 36 | return {c.label: c.description for c in session.query(Class)} 37 | finally: 38 | close_session(session) 39 | 40 | 41 | def configure_classlabels(session, *, add, rename, delete, commit=True): 42 | """Configure the class labels of the database related to the database 43 | of the given session. Lower level function than `classlabels`, accepts 44 | a `session` object and optional `commit` (to be performed later if needed) 45 | 46 | :param add: Class labels to add as a Dict[str, str]. The dict keys are 47 | the new class labels, the dict values are the label description 48 | :param rename: Class labels to rename as Dict[str, Sequence[str]] 49 | The dict keys are the old class labels, and the dict values are 50 | a 2-element sequence (e.g., list/tuple) denoting the new class label 51 | and the new description. The latter can be None (= do not modify 52 | the description, just change the label) 53 | :param delete: Class labels to delete, as Squence[str] denoting the class 54 | labels to delete 55 | :param commit: boolean (default True) whether to commit (save changes 56 | to the database). If True and the commit fails, the session is 57 | rolled back before raising 58 | """ 59 | db_classes = {c.label: c for c in session.query(Class)} 60 | if add: 61 | for label, description in add.items(): 62 | if label in db_classes: # unique constraint 63 | continue 64 | class_label = Class(label=label, description=description) 65 | session.add(class_label) 66 | db_classes[label] = class_label 67 | 68 | if rename: 69 | for label, (new_label, new_description) in rename.items(): 70 | if label not in db_classes: # unique constraint 71 | continue 72 | db_classes[label].label = new_label 73 | if new_description is not None: 74 | db_classes[label].description = new_description 75 | 76 | if delete: 77 | for label in delete: 78 | if label in db_classes: 79 | session.delete(db_classes[label]) 80 | 81 | if commit: 82 | try: 83 | session.commit() 84 | except SQLAlchemyError as sqlerr: 85 | session.rollback() 86 | raise 87 | 88 | 89 | def drop(dburl, download_ids, confirm=True): 90 | """Drop data from the database by download id(s). Drops also all segments. 91 | 92 | :param confirm_func: a function accepting a single argument and that 93 | should return "y" to proceed, or any other value to stop. It defaults 94 | to the builtin `input` function, to be used from the command line interface 95 | Set to None to disable the confirmation (but do it at your own risk) 96 | 97 | :return: None if prompt is True and the user decided not to drop via user 98 | input, otherwise a dict of deleted download ids mapped to either: 99 | - an int (the number of segments deleted) 100 | - an exception (if the download id could not be deleted) 101 | """ 102 | ret = {} 103 | # create the session by raising a BadParam (associated to the name 'dburl') in case: 104 | session = validate_param('dburl', dburl, get_session) 105 | try: 106 | ids = [_[0] for _ in 107 | session.query(Download.id).filter(Download.id.in_(download_ids))] 108 | if not ids: 109 | return ret 110 | if confirm is not None: 111 | segs = session.query(func.count(Segment.id)).\ 112 | filter(Segment.download_id.in_(ids)).scalar() 113 | val = input('Do you want to delete %d download execution(s) ' 114 | '(id=%s) and the associated %d segment(s) from the ' 115 | 'database [y|n]?' % (len(ids), str(ids), segs)) 116 | if val.lower().strip() != 'y': 117 | return None 118 | 119 | for did in ids: 120 | ret[did] = session.query(func.count(Segment.id)).\ 121 | filter(Segment.download_id == did).scalar() 122 | try: 123 | session.query(Download).filter(Download.id == did).delete() 124 | session.commit() 125 | # be sure about how many segments we deleted: 126 | ret[did] -= session.query(func.count(Segment.id)).\ 127 | filter(Segment.download_id == did).scalar() 128 | except Exception as exc: 129 | session.rollback() 130 | ret[did] = exc 131 | return ret 132 | finally: 133 | close_session(session) 134 | -------------------------------------------------------------------------------- /stream2segment/download/db/models.py: -------------------------------------------------------------------------------- 1 | """ 2 | s2s Download database ORM 3 | 4 | :date: Jul 15, 2016 5 | 6 | .. moduleauthor:: Riccardo Zaccarelli 7 | """ 8 | 9 | from sqlalchemy import event 10 | # import declarative_base from io.db.models to be sqlalchemy 1.x vs 2.x compliant: 11 | from stream2segment.io.db import models, declarative_base 12 | 13 | 14 | Base = declarative_base(cls=models.Base) 15 | 16 | 17 | class Download(Base, models.Download): # noqa 18 | """Model representing the executed downloads""" 19 | pass 20 | 21 | 22 | class Event(Base, models.Event): # noqa 23 | """Model representing a seismic Event""" 24 | pass 25 | 26 | 27 | class WebService(Base, models.WebService): 28 | """Model representing a web service (e.g., event web service)""" 29 | pass 30 | 31 | 32 | class DataCenter(Base, models.DataCenter): 33 | """Model representing a Data center (data provider, e.g. EIDA Node)""" 34 | pass 35 | 36 | 37 | # listen for insertion and updates and check Datacenter URLS (the call below 38 | # is the same as decorating check_datacenter_urls_fdsn with '@event.listens_for'): 39 | event.listens_for(DataCenter, 'before_insert')(models.check_datacenter_urls_fdsn) 40 | event.listens_for(DataCenter, 'before_update')(models.check_datacenter_urls_fdsn) 41 | 42 | 43 | class Station(Base, models.Station): 44 | """Model representing a Station""" 45 | pass 46 | 47 | 48 | class Channel(Base, models.Channel): 49 | """Model representing a Channel""" 50 | pass 51 | 52 | 53 | class Segment(Base, models.Segment): 54 | """Model representing a Waveform segment""" 55 | pass 56 | 57 | 58 | class Class(Base, models.Class): 59 | """Model representing a segment class label""" 60 | pass 61 | 62 | 63 | class ClassLabelling(Base, models.ClassLabelling): 64 | """Model representing a class labelling (or segment annotation), i.e. a 65 | pair (segment, class label)""" 66 | pass 67 | 68 | -------------------------------------------------------------------------------- /stream2segment/download/exc.py: -------------------------------------------------------------------------------- 1 | """Download exceptions module""" 2 | 3 | 4 | class QuitDownload(Exception): 5 | """This is an abstract-like class representing an Exception to be raised 6 | as soon as something causes no segments to be downloaded. 7 | 8 | **IMPORTANT**: This class should not be called directly. The user should re-raise 9 | a :class:`NothingToDownload` or :class:`FailedDownload` (see their 10 | documentation) because those are the exceptions caught and handled by the program 11 | """ 12 | 13 | def __init__(self, exc_or_msg): 14 | """Create a new QuitDownload instance 15 | 16 | :param exc_or_msg: an Exception or a message string. If string, it is 17 | usually passed via the :function:`formatmsg` function in order to 18 | provide harmonized message formats 19 | """ 20 | if isinstance(exc_or_msg, KeyError): # just re-format key errors 21 | exc_or_msg = 'KeyError: %s' % str(exc_or_msg) 22 | super(QuitDownload, self).__init__(str(exc_or_msg)) 23 | 24 | 25 | class NothingToDownload(QuitDownload): 26 | """Exception that should be raised whenever the download process has no 27 | segments to download according to the user's settings. Currently, 28 | stream2segments catches these Exceptions logging their message as level 29 | INFO and returning a 0 (=successful) status code 30 | 31 | This class and :class:`FailedDownload` both inherit from 32 | :class:`QuitDownload`. 33 | """ 34 | pass 35 | 36 | 37 | class FailedDownload(QuitDownload): 38 | """Exception that should be raised whenever the download process could not 39 | proceed. E.g., a download error (e.g., no internet connection) prevents to 40 | fetch any data. Currently, stream2segments catches these Exceptions logging 41 | their message as level CRITICAL or ERROR and returning a nonzero 42 | (=unsuccessful) status code 43 | 44 | This class and :class:`NothingToDownload` both inherit from 45 | :class:`QuitDownload` 46 | """ 47 | pass -------------------------------------------------------------------------------- /stream2segment/download/log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Log utilities for the download routine 3 | """ 4 | 5 | import logging 6 | import os 7 | import sys 8 | 9 | from stream2segment.download.db import models as ddb 10 | from stream2segment.io.log import LevelFilter 11 | 12 | 13 | def configlog4download(logger, logfile_path='', verbose=False): 14 | """"Configures the logger, setting it to a `INFO` level with a list of 15 | default handlers: 16 | 17 | - If `logfile_path` is not the empty str, a :class:`DbStreamHandler` 18 | (streaming to that file) will capture all INFO, ERROR and WARNING level 19 | messages, and when its finalize() method is called, flushes the file 20 | content to the database (deleting the file if needed. This assures that 21 | if `DbStreamHandler.finalize` is not called, possibly due to an 22 | exception, the file can be inspected). See :func:`logfilepath` if you 23 | want to create automatically a log file path in the same directory of a 24 | given download config file. 25 | 26 | - If `verbose` is True (False by default), a :class:`StreamHandler` 27 | (streaming to standard output) will capture ONLY messages of level INFO 28 | (20) and ERROR (40) and CRITICAL (50), ideal for showing relevant 29 | information to the user on a terminal 30 | 31 | The returned list can thus contain 0, 1 or 2 loggers depending on the 32 | arguments. 33 | 34 | Implementation detail: this method modifies these values for performance 35 | reason: 36 | ``` 37 | logging._srcfile = None 38 | logging.logThreads = 0 39 | logging.logProcesses = 0 40 | ``` 41 | 42 | :return: a list of handlers added to the logger 43 | """ 44 | # https://docs.python.org/2/howto/logging.html#optimization: 45 | logging._srcfile = None # pylint: disable=protected-access 46 | logging.logThreads = 0 47 | logging.logProcesses = 0 48 | 49 | logger.setLevel(logging.INFO) # necessary to forward to handlers 50 | 51 | # add handlers: 52 | db_streamer, sysout_streamer = None, None 53 | 54 | if logfile_path: 55 | db_streamer = DbStreamHandler(logfile_path) 56 | logger.addHandler(db_streamer) 57 | 58 | if verbose: 59 | sysout_streamer = logging.StreamHandler(sys.stdout) 60 | sysout_streamer.setFormatter(logging.Formatter('%(message)s')) 61 | # configure the levels we want to print (20: info, 40: error, 50: critical) 62 | l_filter = LevelFilter((20, 40, 50)) 63 | sysout_streamer.addFilter(l_filter) 64 | # set minimum level (for safety): 65 | sysout_streamer.setLevel(min(l_filter.levels)) 66 | logger.addHandler(sysout_streamer) 67 | 68 | # return db_streamer, sysout_streamer 69 | 70 | # custom StreamHandler: count errors and warnings: 71 | # handlers = [] 72 | # if logfile_path: 73 | # handlers.append(DbStreamHandler(logfile_path)) 74 | # if verbose: 75 | # handlers.append(SysOutStreamHandler(sys.stdout)) 76 | # for hand in handlers: 77 | # logger.addHandler(hand) 78 | # return handlers 79 | 80 | 81 | class DbStreamHandler(logging.FileHandler): 82 | """A `logging.FileHandler` which counts errors and warnings. See 83 | https://stackoverflow.com/q/812477. This class takes in the constructor an 84 | id of the table 'downloads' (referring to the current download), and when 85 | closed writes the content of the file to the database, deleting the 86 | handler's file. You should always explicitly call close() to assure the log 87 | is written to the database**. For an example using SQL-Alchemy log rows 88 | (slightly different case but informative) see: 89 | http://docs.pylonsproject.org/projects/pyramid_cookbook/en/latest/logging/sqlalchemy_logger.html 90 | """ 91 | def __init__(self, filepath, min_level=20): 92 | """ 93 | Initialize a DbStreamHandler 94 | 95 | :param min_level: this handlers level 96 | (https://docs.python.org/3/library/logging.html#logging.Handler.setLevel) 97 | """ 98 | # w+: allows to read without closing first: 99 | super(DbStreamHandler, self).__init__(filepath, mode='w+') 100 | # access the stream with self.stream 101 | self.errors = 0 102 | self.warnings = 0 103 | self.criticals = 0 # one should be enough 104 | # configure level and formatter 105 | self.setLevel(min_level) 106 | self.setFormatter(logging.Formatter('[%(levelname)s] %(message)s')) 107 | 108 | def emit(self, record): 109 | if record.levelno == 30: 110 | self.warnings += 1 111 | elif record.levelno == 40: 112 | self.errors += 1 113 | elif record.levelno == 50: 114 | self.criticals += 1 115 | super(DbStreamHandler, self).emit(record) 116 | # (superclass logging.FileHandler flushes every emit) 117 | 118 | def finalize(self, session, download_id, removefile=True): 119 | """Write to db, closes this handler 120 | and optionally removes the underlying file""" 121 | # the super-class sets the stream to None when closing, so we might 122 | # check this to see if we closed it already: 123 | if self.stream is None: 124 | return 125 | # we experienced the NoneType error which we could not test 126 | # deterministically so the if above serves to this, especially because 127 | # we know self.stream == None => already closed 128 | 129 | super(DbStreamHandler, self).flush() # for safety 130 | self.stream.seek(0) # offset of 0 131 | logcontent = self.stream.read() # read again 132 | try: 133 | super(DbStreamHandler, self).close() 134 | except: 135 | pass 136 | if removefile: 137 | try: 138 | os.remove(self.baseFilename) 139 | except: 140 | pass 141 | Download = ddb.Download 142 | session.query(Download).filter(Download.id == download_id).\ 143 | update({Download.log.key: logcontent, 144 | Download.errors.key: self.errors, 145 | Download.warnings.key: self.warnings}) 146 | session.commit() -------------------------------------------------------------------------------- /stream2segment/download/modules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/download/modules/__init__.py -------------------------------------------------------------------------------- /stream2segment/io/cli.py: -------------------------------------------------------------------------------- 1 | """ 2 | Command line interface IO utilities 3 | 4 | .. moduleauthor:: Riccardo Zaccarelli 5 | """ 6 | 7 | import sys 8 | from contextlib import contextmanager 9 | from itertools import chain 10 | 11 | from click import progressbar as click_progressbar 12 | 13 | 14 | def ascii_decorate(string, frame=None): 15 | """Decorate the string with a frame in unicode decoration characters, 16 | and returns the decorated string 17 | 18 | :param string: a signle- or multi-line string 19 | :param frame: list of characters or string. The string/list can have length 1,3 or 7: 20 | 1 character/list defines the decorator character. E.g. '#' or ('#',) 21 | 3 characters/lists define the (top, mid, bottom) characters. E.g. ("=", "|", "-") 22 | 7 characters define the (topleft, topcenter, topright, midleft, midright 23 | bottomleft, bottomcenter, bottomright) characters. When None or missing, 24 | this argument defaults to "╔═╗║║╚═╝" 25 | """ 26 | if not string: 27 | return '' 28 | if not frame: 29 | frame = "╔", "═", "╗", "║", "║", "╚", "═", "╝" 30 | if len(frame) == 1: 31 | frame = frame * 8 32 | elif len(frame) == 3: 33 | frame = [frame[0]*3, frame[1]*2, frame[2]*3] 34 | 35 | linez = string.splitlines() 36 | maxlen = max(len(l) for l in linez) 37 | frmt = "%s {:<%d} %s" % (frame[3], maxlen, frame[4]) 38 | hline_top = frame[0] + frame[1] * (maxlen + 2) + frame[2] 39 | hline_bottom = frame[-3] + frame[-2] * (maxlen + 2) + frame[-1] 40 | 41 | return "\n".join(chain([hline_top], 42 | (frmt.format(l) for l in linez), 43 | [hline_bottom])) 44 | 45 | 46 | class Nop: 47 | """Dummy class (no-op), used to yield a contextmanager where each method 48 | is no-op. Used in `get_progressbar` 49 | """ 50 | # https://stackoverflow.com/a/24946360 51 | def __init__(self, *a, **kw): 52 | pass 53 | 54 | @staticmethod 55 | def __nop(*args, **kw): 56 | pass 57 | 58 | def __getattr__(self, _): 59 | return self.__nop 60 | 61 | 62 | @contextmanager 63 | def get_progressbar(show, **kw): 64 | """Return a `click.progressbar` if `show` is True, otherwise a No-op 65 | class, so that we can run programs by simply doing: 66 | ``` 67 | isterminal = True # or False for no-op class 68 | with get_progressbar(isterminal, length=..., ...) as bar: 69 | # do your stuff ... and then: 70 | bar.update(num_increments) # this is no-op if `isterminal` is False 71 | ``` 72 | """ 73 | if not show or kw.get('length', 1) == 0: 74 | yield Nop(**kw) 75 | else: 76 | # some custom setup if missing: 77 | # (note that progressbar characters render differently across OSs: 78 | # after some attempts, I found out the best for mac - which is the 79 | # default - and Ubuntu): 80 | is_linux = sys.platform.startswith('linux') 81 | kw.setdefault('fill_char', "▮" if is_linux else "●") 82 | kw.setdefault('empty_char', "▯" if is_linux else "○") 83 | kw.setdefault('bar_template', '%(label)s %(bar)s %(info)s') 84 | with click_progressbar(**kw) as pbar: 85 | yield pbar -------------------------------------------------------------------------------- /stream2segment/io/db/__init__.py: -------------------------------------------------------------------------------- 1 | import re 2 | import os 3 | from contextlib import contextmanager 4 | 5 | from sqlalchemy.exc import ProgrammingError, OperationalError, SQLAlchemyError 6 | from sqlalchemy.orm.scoping import scoped_session 7 | from sqlalchemy.engine import create_engine 8 | from sqlalchemy.orm.session import sessionmaker 9 | from sqlalchemy import text, __version__ as __sa_version__ 10 | 11 | 12 | sqlalchemy_version = float(".".join(__sa_version__.split('.')[0:2])) # https://stackoverflow.com/a/75634238 13 | 14 | # IMPORTS to be called from the codebase to fix sqlalchemy 1.x vs 2.x changes: 15 | 16 | if sqlalchemy_version >= 2: 17 | from sqlalchemy.orm import declarative_base # noqa 18 | else: 19 | from sqlalchemy.ext.declarative import declarative_base # noqa 20 | 21 | 22 | def get_session(dbpath, scoped=False, check_db_existence=True, **engine_args): 23 | """Create an SQLAlchemy session for IO database operations 24 | 25 | :param dbpath: the path to the database, e.g. sqlite:///path_to_my_dbase.sqlite 26 | :param scoped: boolean (False by default) if the session must be scoped session 27 | :param check_db_existence: True by default, will raise a :class:`DbNotFound` if the 28 | database does not exist 29 | :param engine_args: optional keyword argument values for the 30 | `create_engine` method. E.g., let's provide two engine arguments, 31 | `echo` and `connect_args`: 32 | ``` 33 | get_session(dbpath, ..., echo=True, connect_args={'connect_timeout': 10}) 34 | ``` 35 | For info see: 36 | https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.connect_args 37 | """ 38 | if not isinstance(dbpath, str): 39 | raise TypeError('string required, %s found' % str(type(dbpath))) 40 | 41 | try: 42 | # set max timeout if not set 43 | if is_postgres(dbpath): 44 | timeout = 20 # in seconds 45 | engine_args.setdefault('connect_args', {}) 46 | engine_args['connect_args'].setdefault('connect_timeout', timeout) 47 | engine = create_engine(dbpath, **engine_args) 48 | except (SQLAlchemyError, ValueError) as _: 49 | # ValueError: 'postgresql://4:a6gfds' (cannot create port) 50 | raise ValueError('Cannot create a db engine. Possible reason: ' 51 | 'the URL is not well formed or contains typos ' 52 | '(original error: %s)' % str(_)) 53 | 54 | if check_db_existence: 55 | # (the only case when we don't care if the database exists is when 56 | # we have sqlite and we are downloading) 57 | if not database_exists(engine): 58 | raise DbNotFound(dbpath) 59 | 60 | session_factory = sessionmaker(bind=engine) 61 | 62 | if not scoped: 63 | # create a Session 64 | return session_factory() 65 | 66 | return scoped_session(session_factory) 67 | 68 | 69 | class DbNotFound(ValueError): 70 | """DbNotFound are exception raised when the database could not be found. this 71 | happens basically when either the db does not exist, or any entry (user, password, 72 | host) is wrong. E.g.: this connects to an engine but might raise this exception: 73 | postgresql://:@.gfz-potsdam.de/me" 74 | whereas this does not even raise this exception and fails when creating an engine: 75 | wrong_dialect_and_driver://:@.gfz-potsdam.de/me" 76 | """ 77 | def __init__(self, dburl): 78 | super().__init__(dburl) 79 | 80 | @property 81 | def dburl(self): 82 | return self.args[0] 83 | 84 | def __str__(self): 85 | # Warning: if you change the message below, check also the message raised in 86 | # `stream2segment.download.db::valid_session` that relies upon it 87 | return 'Database not accessible. Possible reason: wrong user/password/host ' \ 88 | 'in the URL, timeout (do you use VPN?) or the db does not exist' 89 | 90 | 91 | def is_sqlite(dburl): 92 | return isinstance(dburl, str) and dburl.lower().startswith('sqlite') 93 | 94 | 95 | def is_postgres(dburl): 96 | return isinstance(dburl, str) and dburl.lower().startswith('postgres') 97 | 98 | 99 | def get_dbname(dburl): 100 | return dburl[dburl.rfind('/') + 1:] 101 | 102 | 103 | def database_exists(url_or_engine): 104 | """Return true if the database exists. Works for Postgres, MySQL, SQLite. 105 | 106 | :param url_or_engine: SQLAlchemy engine or string denoting a database URL. 107 | """ 108 | # We adopt a quick and dirt solution from https://stackoverflow.com/a/3670000 109 | # slightly modified because although they claimed it does, it doesn't work for sqlite 110 | # (a db is created if it does not exist). For a more sophisticated solution, see: 111 | # https://sqlalchemy-utils.readthedocs.io/en/latest/_modules/sqlalchemy_utils/functions/database.html#database_exists 112 | 113 | # Is it sqlite? 114 | url_ = get_url(url_or_engine) 115 | if is_sqlite(url_): 116 | return os.path.isfile(_extract_file_path(url_)) 117 | 118 | with _engine(url_or_engine) as engine: 119 | try: 120 | with engine.begin() as conn: 121 | conn.execute(text('SELECT 1')) 122 | return True 123 | except (ProgrammingError, OperationalError) as _: 124 | return False 125 | 126 | 127 | def _extract_file_path(sqlite_url): 128 | return os.path.abspath(sqlite_url[10:]) # remove sqlite:/// 129 | 130 | 131 | def get_url(url_or_engine): 132 | """Return the URL from the given argument (if already url, return the argument) 133 | """ 134 | if isinstance(url_or_engine, str): 135 | return url_or_engine 136 | return str(url_or_engine.url) 137 | 138 | 139 | @contextmanager 140 | def _engine(url_or_engine): 141 | engine = url_or_engine 142 | engine_needs_disposal = False 143 | if isinstance(url_or_engine, str): 144 | engine_needs_disposal = True 145 | engine = create_engine(url_or_engine) 146 | try: 147 | yield engine 148 | finally: 149 | if engine_needs_disposal: 150 | engine.dispose() 151 | 152 | 153 | def close_session(session, dispose_engine=True): 154 | """Close the SQLAlchemy session 155 | https://docs.sqlalchemy.org/en/13/orm/session_basics.html#closing 156 | and the underline engine accessible via `session.get_bind()` 157 | https://docs.sqlalchemy.org/en/14/core/connections.html?highlight=dispose#engine-disposal 158 | unless `dispose_engine` is False (default: True). 159 | 160 | :param session: a SQLAlchemy session 161 | :param dispose_engine: boolean (default True when missing) close also the 162 | underlying engine 163 | :return: True if all required operation(s) where performed with no exceptions, 164 | False otherwise 165 | """ 166 | ret = True 167 | try: 168 | session.close() 169 | except Exception: 170 | ret = False 171 | if dispose_engine: 172 | try: 173 | session.get_bind().dispose() 174 | except Exception: 175 | ret = False 176 | return ret 177 | 178 | 179 | def secure_dburl(dburl): 180 | """Return a printable database name by removing passwords, if any 181 | 182 | :param dburl: database path as string in the format: 183 | dialect+driver://username:password@host:port/database 184 | For info see: 185 | http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls 186 | """ 187 | return re.sub(r"://(.*?):(.*)@", r"://\1:***@", dburl) -------------------------------------------------------------------------------- /stream2segment/io/db/sqlconstructs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for registering non-standard SQL constructs 3 | 4 | The following SQL functions work with both SQLite and Postgres. If you 5 | add support for new databases, you should modify the code below. For info: 6 | http://docs.sqlalchemy.org/en/latest/core/compiler.html#further-examples) 7 | """ 8 | 9 | from sqlalchemy import Integer, String, Float 10 | from sqlalchemy.ext.compiler import compiles 11 | from sqlalchemy.sql.expression import FunctionElement 12 | 13 | 14 | class strpos(FunctionElement): 15 | name = 'strpos' 16 | type = Integer() 17 | inherit_cache = True 18 | 19 | 20 | @compiles(strpos) 21 | def standard_strpos(element, compiler, **kw): 22 | """delegates strpos to the strpos db function""" 23 | return compiler.visit_function(element) 24 | 25 | 26 | @compiles(strpos, 'sqlite') 27 | def sqlite_strpos(element, compiler, **kw): 28 | return "instr(%s)" % compiler.process(element.clauses) 29 | # return func.instr(compiler.process(element.clauses)) 30 | 31 | 32 | # function `concat` 33 | 34 | class concat(FunctionElement): 35 | name = 'concat' 36 | type = String() 37 | inherit_cache = True 38 | 39 | 40 | @compiles(concat) 41 | def standard_concat(element, compiler, **kw): 42 | return compiler.visit_function(element) 43 | 44 | 45 | @compiles(concat, 'sqlite') 46 | def sqlite_concat(element, compiler, **kw): 47 | return " || ".join(compiler.process(c) for c in element.clauses) 48 | 49 | 50 | # two utility functions to return the timestamp from a datetime 51 | def _duration_sqlite(start, end): 52 | """Return the time in seconds since 1970 as floating point for of the 53 | specified argument (a datetime in sqlite format) 54 | """ 55 | # note: sqlite time format is bizarre. They have %s: timestamp in SECONDS 56 | # since 1970, %f seconds only (with 3 decimal digits WTF?) and %S: seconds 57 | # part (integer). Thus to have a floating point value with 3 decimal digits 58 | # we should return: 59 | # ``` 60 | # round(strftime('%s',{}) + strftime('%f',{}) - strftime('%S',{}), 3)".\ 61 | # format(dtime) 62 | # ``` 63 | # However, for performance reasons we think it's sufficient to return the 64 | # seconds, thus we keep it more simple with the use round at the end to 65 | # coerce to float with 3 decimal digits, for safety (yes, round in sqlite 66 | # returns a float) and avoid integer divisions when needed but proper 67 | # floating point arithmentic 68 | return ("round(strftime('%s',{1})+strftime('%f',{1})-strftime('%S',{1}) - " 69 | "(strftime('%s',{0})+strftime('%f',{0})-strftime('%S',{0})), 3)").\ 70 | format(start, end) 71 | 72 | 73 | def _duration_postgres(start, end): 74 | """Return the time in seconds since 1970 as floating point for of the 75 | specified argument (a datetime in postgres format) 76 | """ 77 | # Note: we use round at the end to coerce to float with 3 decimal digits, 78 | # for safety and avoid integer divisions when needed but proper floating 79 | # point arithmentic 80 | return "round(EXTRACT(EPOCH FROM ({1}-{0}))::numeric, 3)".format(start, 81 | end) 82 | 83 | 84 | # function `duration_sec` 85 | 86 | class duration_sec(FunctionElement): 87 | name = 'duration_sec' 88 | type = Float() 89 | inherit_cache = True 90 | 91 | 92 | @compiles(duration_sec) 93 | def standard_duration_sec(element, compiler, **kw): 94 | starttime, endtime = [compiler.process(c) for c in element.clauses] 95 | return _duration_postgres(starttime, endtime) 96 | 97 | 98 | @compiles(duration_sec, 'sqlite') 99 | def sqlite_duration_sec(element, compiler, **kw): 100 | starttime, endtime = [compiler.process(c) for c in element.clauses] 101 | return _duration_sqlite(starttime, endtime) 102 | 103 | 104 | # function `missing_data_sec` 105 | 106 | class missing_data_sec(FunctionElement): 107 | name = 'missing_data_sec' 108 | type = Float() 109 | inherit_cache = True 110 | 111 | 112 | @compiles(missing_data_sec) 113 | def standard_missing_data_sec(element, compiler, **kw): 114 | start, end, request_start, request_end = [compiler.process(c) 115 | for c in element.clauses] 116 | return "({1}) - ({0})".format(_duration_postgres(start, end), 117 | _duration_postgres(request_start, request_end)) 118 | 119 | 120 | @compiles(missing_data_sec, 'sqlite') 121 | def sqlite_missing_data_sec(element, compiler, **kw): 122 | start, end, request_start, request_end = [compiler.process(c) 123 | for c in element.clauses] 124 | return "({1}) - ({0})".format(_duration_sqlite(start, end), 125 | _duration_sqlite(request_start, request_end)) 126 | 127 | 128 | # function `missing_data_ratio` 129 | 130 | class missing_data_ratio(FunctionElement): 131 | name = 'missing_data_ratio' 132 | type = Float() 133 | inherit_cache = True 134 | 135 | 136 | @compiles(missing_data_ratio) 137 | def standard_missing_data_ratio(element, compiler, **kw): 138 | start, end, request_start, request_end = [compiler.process(c) 139 | for c in element.clauses] 140 | return "1.0 - (({0}) / ({1}))".format(_duration_postgres(start, end), 141 | _duration_postgres(request_start, request_end)) 142 | 143 | 144 | @compiles(missing_data_ratio, 'sqlite') 145 | def sqlite_missing_data_ratio(element, compiler, **kw): 146 | start, end, request_start, request_end = [compiler.process(c) 147 | for c in element.clauses] 148 | return "1.0 - (({0}) / ({1}))".format(_duration_sqlite(start, end), 149 | _duration_sqlite(request_start, request_end)) 150 | 151 | 152 | # function `deg2km` 153 | 154 | class deg2km(FunctionElement): 155 | name = 'deg2km' 156 | type = Float() 157 | inherit_cache = True 158 | 159 | 160 | @compiles(deg2km) 161 | def standard_deg2km(element, compiler, **kw): 162 | deg = compiler.process(list(element.clauses)[0]) 163 | return "%s * (2.0 * 6371 * 3.14159265359 / 360.0)" % deg 164 | 165 | 166 | # function `substr` 167 | 168 | class substr(FunctionElement): 169 | name = 'substr' 170 | type = String() 171 | inherit_cache = True 172 | 173 | 174 | @compiles(substr) 175 | def standard_substr(element, compiler, **kw): 176 | clauses = list(element.clauses) 177 | column = compiler.process(clauses[0]) 178 | start = compiler.process(clauses[1]) 179 | leng = compiler.process(clauses[2]) 180 | return "substr(%s, %s, %s)" % (column, start, leng) 181 | 182 | -------------------------------------------------------------------------------- /stream2segment/io/log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Class handling logger for downloading and processing 3 | 4 | :date: Feb 20, 2017 5 | 6 | .. moduleauthor:: Riccardo Zaccarelli 7 | """ 8 | import time 9 | from datetime import datetime, timedelta 10 | 11 | 12 | class LevelFilter: # noqa 13 | """Logging filter that logs only messages in a set of levels (the base filter 14 | class only allows events which are below a certain point in the logger hierarchy). 15 | 16 | Usage `logger.addFilter(LevelFilter(20, 50, 50))` 17 | """ 18 | 19 | # note: looking at the code, it seems that we do not need to inherit from 20 | # logging.Filter 21 | def __init__(self, levels): 22 | """Initialize a LevelFilter 23 | 24 | :param levels: iterable of `int`s representing different logging levels: 25 | ``` 26 | CRITICAL 50 27 | ERROR 40 28 | WARNING 30 29 | INFO 20 30 | DEBUG 10 31 | NOTSET 0 32 | ``` 33 | """ 34 | self.levels = set(levels) 35 | 36 | def filter(self, record): 37 | """Filter record according to its level number""" 38 | return True if record.levelno in self.levels else False 39 | 40 | 41 | def logfilepath(filepath): 42 | """Return a log file associated to the given `filepath`, i.e.: 43 | `filepath + "[now].log"` where [now] is the current date-time in ISO 44 | format, rounded to the closest second 45 | 46 | :param filepath: a file path serving as base for the log file path. The 47 | file does not need to exist but if you want to use the returned file 48 | for logging (the usual case), its parent directory must exist 49 | """ 50 | _now = datetime.utcnow().replace(microsecond=0).isoformat() 51 | return filepath + (".%s.log" % _now) 52 | 53 | 54 | def close_logger(logger): 55 | """Close all logger handlers and removes them from logger""" 56 | handlers = logger.handlers[:] 57 | for handler in handlers: 58 | try: 59 | handler.close() # maybe already closed? pass in case 60 | except Exception: # noqa 61 | pass 62 | logger.removeHandler(handler) 63 | 64 | 65 | def elapsed_time(t0_sec, t1_sec=None): 66 | """Time elapsed from `t0_sec` until `t1_sec`, as `timedelta` object rounded 67 | to seconds. If `t1_sec` is None, it will default to `time.time()` (the 68 | current time since the epoch, in seconds) 69 | 70 | :param t0_sec: (float) the start time in seconds. Usually it is the result 71 | of a previous call to `time.time()`, before starting a process that 72 | had to be monitored 73 | :param t1_sec: (float) the end time in seconds. If None, it defaults to 74 | `time.time()` (current time since the epoch, in seconds) 75 | 76 | :return: a timedelta object, rounded to seconds 77 | """ 78 | return timedelta(seconds=round((time.time() if t1_sec is None else t1_sec) - t0_sec)) 79 | 80 | 81 | -------------------------------------------------------------------------------- /stream2segment/process/db/__init__.py: -------------------------------------------------------------------------------- 1 | # import here for namespacing reasons (https://stackoverflow.com/a/18979314): 2 | from stream2segment.io.db import get_session 3 | -------------------------------------------------------------------------------- /stream2segment/process/funclib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/process/funclib/__init__.py -------------------------------------------------------------------------------- /stream2segment/process/funclib/coda.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for coda analysis. 3 | Not yet implemented (future feature in new versions) 4 | 5 | Created on Jul 25, 2016 6 | 7 | .. moduleauthor:: Jessie mMayor 8 | .. moduleauthor:: Riccardo Zaccarelli 9 | """ 10 | import numpy as np 11 | import scipy 12 | 13 | from obspy.signal.filter import bandpass 14 | from obspy.signal.trigger import classic_sta_lta 15 | 16 | 17 | # cycle=nombre de periode "moyenne" dans une fenetre; signal est un array, 18 | # fm=freq moyenne du signal (filtre), dt=pas d'echant 19 | def mysmooth(signal, time, fm, cycle, dt): 20 | """ 21 | Return the envelop of the signal and its corresponding time, smoothed from natural 22 | variations thanks to an average moving window of length the number of cycle. 23 | Note that the signal is under-sampled depending on the number of cycles. 24 | 25 | :param signal: energy computed as the squared of the velocigram (from Obspy.core.Trace) 26 | :type signal: array (units depends on the input trace) 27 | :param time: time corresponding to the trace (st.times() for an Obspy trace object) 28 | :type time: array in seconds 29 | :param fm: mean frequency of the band passe filter 30 | :type fm: float in Hertz 31 | :param cycle: number of cycle in the moving window (1 cycle = 1 period) 32 | :type cycle: float number (adimensionnal) 33 | :param dt: sampling rate of the data 34 | :type dt: float in seconds 35 | """ 36 | signal = list(signal) 37 | # longueur de la fenetre en temps 38 | window = cycle / fm 39 | # nombre de points dans la fenetre glissante=duree de la fenetre divise par le pas de tps 40 | npts = int(window // dt) 41 | helf_npts = npts // 2 # as int 42 | signal_smooth = [] 43 | time_smooth = [] 44 | for i in range(0, len(signal) - helf_npts, helf_npts): # data c'est chaque point du signal 45 | end_ = i + npts 46 | signal_smooth.append(np.mean(signal[i:end_])) 47 | time_smooth.append(time[i + helf_npts]) 48 | return (signal_smooth, np.array(time_smooth)) 49 | 50 | 51 | def group(indices_list): 52 | """ 53 | Extract the first and the last part of a list components 54 | :param indices_list: list of indices 55 | :type indices_list: list 56 | """ 57 | first = last = indices_list[0] 58 | for n in indices_list[1:]: 59 | if n - 1 == last: # Part of the group, bump the end 60 | last = n 61 | else: # Not part of the group, yield current group and start a new 62 | yield first, last 63 | first = last = n 64 | yield first, last # Yield the last group 65 | 66 | 67 | def analyze_coda(trace, fm=6, cycle=10, noise_level=16, Lw=50, noise_duration=5, subwdw_length=5, 68 | subwdw_length_rec=2.5): 69 | """ 70 | Return the correlation coefficient of the coda part of the signal : the onset of the coda 71 | is selected as the maximum amplitude time and the coda duration is Lw. 72 | 73 | :param trace: an obspy.core.Trace object 74 | :return: a list of tuples of the form: 75 | (slope_start_time, slope, intercept, R, pvalue, stderr) 76 | where slope_start_time is an obspy UTCDateTime object. For the other values, see: 77 | http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.stats.linregress.html 78 | for details 79 | :rtype: see return field 80 | """ 81 | st = trace 82 | try: 83 | st.data = bandpass(st.data, freqmin=4, freqmax=8, df=st.stats. sampling_rate, corners=2) 84 | except ValueError: 85 | return None 86 | if (st.stats.npts*st.stats.delta) > 100: 87 | st.detrend('demean') # on ramene le signal a 0 88 | energy = st.data * st.data 89 | t = st.times() 90 | st_smooth, t_smooth = mysmooth(energy, t, fm, cycle, st.stats.delta) 91 | imax = st_smooth.index(max(st_smooth)) 92 | new_dt = round(t_smooth[1]-t_smooth[0], 2) 93 | sec = int(noise_duration // new_dt) # on prend 10seconde de debut de signal 94 | noise = st_smooth[0:sec] # on prend 5 seconde pour la moyenne de bruit 95 | # df=st.stats.sampling_rate 96 | # df = 1/new_dt 97 | 98 | # valeur que j'ai prise= 2 et 5 (en echantillon) 99 | cft = classic_sta_lta(noise, nsta=2, nlta=5) 100 | stalta = np.where(cft > 3)[0] # valeur que j'ai prise =1.5 101 | # si on detecte effectivement du signal dans la fenetre de bruit: ca va pas 102 | if len(stalta) > 0: 103 | return None # on ne peut pas definir une bonne moyenne de bruit 104 | else: 105 | noisedata = noise 106 | # ----fin definition moyenne du bruit ---------------------------------------- 107 | # ##### duree de la coda = du maximum de l'enveloppe ------> ratio signal/bruit<4 ####### 108 | j = 0 109 | start = imax 110 | end_ = start + int(subwdw_length // new_dt) # on prend 5s de fenetre glissante 111 | # rec_window = new_dt/2. # 50% de recouvrement 112 | n_rec = int(subwdw_length_rec // new_dt) # nombre de pts de recouvrement : on choisit 2.5s 113 | ratio = [] 114 | while j < len(st_smooth[imax:imax+int(Lw // new_dt)]): 115 | ratio.append(np.mean(st_smooth[start:end_]) / np.mean(noisedata)) 116 | j = j+n_rec 117 | start = start+n_rec 118 | end_ = start + int(subwdw_length // new_dt) 119 | # ou est ce que le signal dans les 80s de fenetre de coda est superieur au niveau de bruit 120 | indok = np.where(np.array(ratio) > noise_level)[0] 121 | ret_vals = None 122 | if len(indok) > 0: 123 | doublons = list(group(indok)) 124 | if (len(doublons) == 1) and (doublons[0][-1] == len(ratio)-1) or (doublons[0][0] == 0) \ 125 | and (doublons[0][-1] == len(ratio)-1): 126 | # ca veut dire qu'il detecte une coda ou du moins un ratio>4 et 127 | # on choisi une longueur de au moins 20 seconde 128 | coda = st_smooth[imax:imax+int(Lw // new_dt)] # donnee lissee 129 | 130 | # tcoda = t_smooth[imax:imax+int(Lw/new_dt)] 131 | 132 | # raw=st.data[imax:imax+int(Lw/new_dt)]# donnee brut 133 | 134 | # test sur la coda pour voir si on a bien une "pente" : 135 | # on joue avec le coeff de correlation 136 | 137 | # tr is the coda trace 138 | coda = np.log10(coda) # on travaille en log avec la coda pour avoir une pente 139 | n_pts = len(coda) # nombre de point dans la coda 140 | # window=5 141 | # rec=2.5 142 | 143 | # nombre de pts dans la fenetre de 5 seconde 144 | wdw_npts = int(subwdw_length // new_dt) 145 | # nombre de point pour la fenetre de recouvrement: 146 | wdw_rec = int(subwdw_length_rec // new_dt) 147 | # borne maximale a atteindre pour faire des fenetres de 5 seconde: 148 | n_max = int(n_pts // wdw_npts) 149 | start = 0 150 | end = wdw_npts 151 | 152 | means = [] 153 | x_means = [] 154 | k = 0 155 | while end < n_max * wdw_npts: 156 | means.append(np.mean(coda[start: end])) 157 | x_means.append(k) 158 | k = k + 1 159 | start = start + wdw_rec 160 | end = end + wdw_rec 161 | slope, intercept, R, pvalue, stderr = scipy.stats.linregress(x_means, means) # @UndefinedVariable 162 | start_time = st.stats.starttime + t_smooth[imax] 163 | ret_vals = (start_time, slope, intercept, R, pvalue, stderr) 164 | 165 | return ret_vals 166 | -------------------------------------------------------------------------------- /stream2segment/process/gui/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Decorators for the making function displayable on the 3 | Graphical User Interface (GUI) 4 | 5 | :date: Sep 19, 2017 6 | 7 | .. moduleauthor:: Riccardo Zaccarelli 8 | """ 9 | 10 | 11 | def preprocess(func): 12 | """Decorator to add the attribute `func._s2s_att = "gui.preprocess"`""" 13 | func._s2s_att = "gui.preprocess" # pylint: disable=protected-access 14 | return func 15 | 16 | 17 | def customplot(func): # DEPRECATED: backward compatibility 18 | """Decorator to add the attribute `func._s2s_att = "gui.customplot"`""" 19 | func._s2s_att = "gui.customplot" # pylint: disable=protected-access 20 | return plot('b')(func) 21 | 22 | 23 | def sideplot(func): # DEPRECATED: backward compatibility 24 | """Decorator to add the attribute `func._s2s_att = "gui.sideplot"`""" 25 | return plot('r', xaxis={'type': 'log'}, yaxis={'type': 'log'})(func) 26 | 27 | 28 | def plot(*args, **kwargs): 29 | """Decorator to add the attribute `func._s2s_att` = "gui.plot" and the given 30 | properties 31 | 32 | :param kwargs: `position` ('b' for bottom, the default, or 'r' for right), 33 | `xaxis`, `yaxis` (both dict of plotly axis properties, default: None, i.e. 34 | empty dict. For info on axis, see: https://plot.ly/python/axes/) 35 | """ 36 | position = kwargs.get('position', 'b') 37 | xaxis = kwargs.get('xaxis', None) 38 | yaxis = kwargs.get('yaxis', None) 39 | 40 | # Here we want to allow @decorator, @decorator() and @decorator(position='b',...) 41 | # Solution hint here: https://stackoverflow.com/q/3931627 42 | 43 | # First define decorator wrapper: 44 | def decorator(func): 45 | """Set the attributes on the function in order to make it recognizable as 46 | gui func 47 | """ 48 | func._s2s_att = 'gui.plot' # pylint: disable=protected-access 49 | func._s2s_position = position # pylint: disable=protected-access 50 | func._s2s_xaxis = xaxis or {} # pylint: disable=protected-access 51 | func._s2s_yaxis = yaxis or {} # pylint: disable=protected-access 52 | return func 53 | 54 | if len(args) == 1 and hasattr(args[0], '__call__') and not kwargs: 55 | # we called @gui.plot (with no arguments nor brackets) 56 | return decorator(args[0]) 57 | 58 | # now we pay back: we have to parse args, as we might have called the 59 | # decorator with positional arguments... 60 | if len(args) > 3: 61 | raise SyntaxError('@gui.plot: 0 to 3 positional arguments expected, ' 62 | '%d received' % len(args)) 63 | 64 | if len(args) >= 1: 65 | position = args[0] 66 | if len(args) >= 2: 67 | xaxis = args[1] 68 | if len(args) == 3: 69 | yaxis = args[2] 70 | 71 | return decorator 72 | 73 | 74 | def get_func_attrs(func): 75 | """Return the function attributes for a function decorated with the decorators of 76 | this class: `attname, position, xaxis, yaxis`. Check for attname first: if empty 77 | string, the function is not a gui decorated function 78 | """ 79 | return getattr(func, '_s2s_att', ''), \ 80 | getattr(func, '_s2s_position', 'b'), \ 81 | getattr(func, '_s2s_xaxis', {}), \ 82 | getattr(func, '_s2s_yaxis', {}) 83 | -------------------------------------------------------------------------------- /stream2segment/process/gui/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions for launching the web app 3 | 4 | :date: Jun 20, 2016 5 | 6 | .. moduleauthor:: 7 | """ 8 | import uuid 9 | from webbrowser import open as open_in_browser 10 | import random 11 | import threading 12 | 13 | from flask import Flask 14 | 15 | from stream2segment.io import yaml_load 16 | from stream2segment.io.inputvalidation import validate_param, BadParam 17 | from stream2segment.process import get_default_segments_selection 18 | from stream2segment.process.inspectimport import load_source 19 | from stream2segment.process.db import get_session 20 | 21 | 22 | 23 | def show_gui(dburl, pyfile, configfile): 24 | """Show downloaded data plots in a system browser dynamic web page""" 25 | session, pymodule, config_dict, segments_selection = \ 26 | load_config_for_visualization(dburl, pyfile, configfile) 27 | run_in_browser(create_s2s_show_app(session, pymodule, config_dict, 28 | segments_selection)) 29 | return 0 30 | 31 | 32 | def load_config_for_visualization(dburl, pyfile=None, config=None): 33 | """Check visualization arguments and return a tuple of well-formed args. 34 | Raise :class:`BadParam` if any param is invalid 35 | """ 36 | # in process and download routines, validation is in a separate inputvalidation.py 37 | # module. Here for the moment we leave it here 38 | session = validate_param('dburl', dburl, get_session, scoped=True) 39 | pymodule = None if not pyfile else validate_param('pyfile', pyfile, load_source) 40 | config_dict = {} if not config else validate_param('configfile', config, yaml_load) 41 | seg_sel = get_default_segments_selection() 42 | # Add constraints on traces with gaps. This is not only to avoid plotting traces 43 | # with gaps, but to help users showing an example of segment selection expr. 44 | seg_sel['maxgap_numsamples'] = '[-0.5, 0.5]' 45 | 46 | return session, pymodule, config_dict, seg_sel 47 | 48 | 49 | def create_s2s_show_app(session, pymodule=None, config=None, segments_selection=None): 50 | """Create a new app for processing. Note that config_py_file is the 51 | stream2segment GUI config, not the config passed to Flask 52 | `app.config.from_pyfile`. 53 | """ 54 | from stream2segment.process.gui import webapp 55 | # http://flask.pocoo.org/docs/0.12/patterns/appfactories/#basic-factories 56 | app = Flask(webapp.__name__) 57 | 58 | from stream2segment.process.gui.webapp.mainapp import core 59 | seg_count = core.init(app, session, pymodule, config, segments_selection) 60 | if seg_count < 1: 61 | raise ValueError('No plottable waveform found on the database') 62 | core.reset_segment_ids_array(seg_count) 63 | 64 | # Note that the templae_folder of the Blueprint and the static paths in 65 | # the HTML are relative to the path of THIS MODULE, so execute the lines 66 | # below HERE or good luck changing all static paths in the html: 67 | from stream2segment.process.gui.webapp.mainapp.views import main_app 68 | app.register_blueprint(main_app) 69 | 70 | return app 71 | 72 | 73 | def run_in_browser(app, port=None, debug=False): 74 | app.config.update( 75 | ENV='development', # https://stackoverflow.com/a/53919435, 76 | # DEBUG = True, 77 | # although we do not use sessions (which write cookies client side), 78 | # we set a secret key neverthless: 79 | # https://www.tutorialspoint.com/flask/flask_sessions.htm 80 | SECRET_KEY=str(uuid.uuid4()) 81 | ) 82 | if port is None: 83 | port = 5000 + random.randint(0, 999) 84 | url = "http://127.0.0.1:{0}".format(port) 85 | if not debug: 86 | threading.Timer(1.25, lambda: open_in_browser(url)).start() 87 | app.run(port=port, debug=debug) 88 | -------------------------------------------------------------------------------- /stream2segment/process/gui/webapp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/process/gui/webapp/__init__.py -------------------------------------------------------------------------------- /stream2segment/process/gui/webapp/mainapp/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/process/gui/webapp/mainapp/__init__.py -------------------------------------------------------------------------------- /stream2segment/process/gui/webapp/mainapp/views.py: -------------------------------------------------------------------------------- 1 | """ 2 | Views for the web app (processing) 3 | 4 | :date: Jun 20, 2016 5 | 6 | .. moduleauthor:: Riccardo Zaccarelli 7 | """ 8 | from flask import (render_template, request, jsonify, Blueprint) 9 | from werkzeug.exceptions import HTTPException 10 | 11 | from stream2segment.process.gui.webapp.mainapp import core 12 | 13 | # http://flask.pocoo.org/docs/0.12/patterns/appfactories/#basic-factories: 14 | # Note that the template_folder and the static paths in the HTML are relative 15 | # to the path of the module WHERE we register this blueprint 16 | # (stream2segment.gui.main) 17 | 18 | 19 | main_app = Blueprint('main_app', __name__, template_folder='templates') 20 | 21 | 22 | @main_app.errorhandler(HTTPException) 23 | def handle_exception(e): 24 | """Return JSON instead of HTML for HTTP errors.""" 25 | import sys 26 | exc_i = sys.exc_info() 27 | return jsonify({ 28 | 'message': str(exc_i[1].__class__.__name__) + ": " + str(exc_i[1]), 29 | 'traceback': '' # do not rpovide it for the moment 30 | }), 500 31 | 32 | 33 | @main_app.route("/") 34 | def main(): 35 | ud_plots = core.userdefined_plots 36 | data = core.get_init_data(metadata=True, classes=True) 37 | classes = data['classes'] 38 | metadata = data['metadata'] 39 | r_plots = [{**p, 'name': n} for n, p in ud_plots.items() if p['position'] == 'r'] 40 | b_plots = [{**p, 'name': n} for n, p in ud_plots.items() if p['position'] == 'b'] 41 | pp_func = core.get_preprocess_function() 42 | pp_func_doc = core.get_func_doc(pp_func) 43 | pp_func_defined = pp_func not in (core._default_preprocessfunc, None) 44 | return render_template('mainapp.html', 45 | num_segments=len(core.g_segment_ids), 46 | title=core.get_db_url(safe=True), 47 | rightPlots=r_plots, 48 | bottomPlots=b_plots, 49 | metadata=metadata, 50 | classes=classes, 51 | preprocess_func_on=pp_func_defined, 52 | preprocessfunc_doc=pp_func_doc) 53 | 54 | 55 | @main_app.route("/get_config", methods=['POST']) 56 | def get_config(): 57 | asstr = (request.get_json() or {}).get('as_str', False) 58 | return jsonify(core.get_config(asstr)) 59 | 60 | 61 | @main_app.route("/get_selection", methods=['POST']) 62 | def get_selection(): 63 | return jsonify(core.get_select_conditions()) 64 | 65 | 66 | @main_app.route("/set_config", methods=['POST']) 67 | def set_config(): 68 | data = request.get_json() 69 | new_config = core.validate_config_str(data['data']) 70 | core.reset_global_vars(new_config, None) 71 | return jsonify(new_config) 72 | 73 | 74 | @main_app.route("/set_selection", methods=['POST']) 75 | def set_selection(): 76 | sel_conditions = request.get_json() or None 77 | # sel condition = None: do not update conditions but use already loaded one 78 | if sel_conditions: 79 | # remove space-only and empty strings in expressions: 80 | sel_conditions = {k: v for k, v in sel_conditions.items() if v and v.strip()} 81 | num_segments = core.get_segments_count(sel_conditions) 82 | if num_segments < 1: 83 | raise ValueError('No segment matching the current selection') 84 | core.reset_global_vars(None, sel_conditions) 85 | core.reset_segment_ids_array(num_segments) 86 | return jsonify(num_segments) 87 | 88 | 89 | @main_app.route("/get_segment_data", methods=['POST']) 90 | def get_segment_data(): 91 | """Return the response for the segment data (and/or metadata)""" 92 | data = request.get_json() 93 | seg_index = data['seg_index'] 94 | seg_count = data['seg_count'] 95 | seg_id = core.get_segment_id(seg_index, seg_count) 96 | plot_names = data.get('plot_names', {}) 97 | preprocessed = data.get('pre_processed', False) 98 | zooms = data.get('zooms', None) 99 | all_components = data.get('all_components', False) 100 | attributes = data.get('attributes', False) 101 | classes = data.get('classes', False) 102 | return jsonify(core.get_segment_data(seg_id, 103 | plot_names, all_components, 104 | preprocessed, zooms, 105 | attributes, classes)) 106 | 107 | 108 | @main_app.route("/set_class_id", methods=['POST']) 109 | def set_class_id(): 110 | data = request.get_json() 111 | seg_index = data['seg_index'] 112 | seg_count = data['seg_count'] 113 | seg_id = core.get_segment_id(seg_index, seg_count) 114 | return jsonify(core.set_class_id(seg_id, data['class_id'], data['value'])) 115 | 116 | -------------------------------------------------------------------------------- /stream2segment/process/gui/webapp/static/js/mainapp.js: -------------------------------------------------------------------------------- 1 | function setInfoMessage(msg){ 2 | var elm = document.getElementById('message-dialog'); 3 | elm.style.color = 'inherit'; 4 | elm.querySelector('.loader').style.display=''; 5 | elm.querySelector('.btn-close').style.display='none'; 6 | elm.querySelector('.message').innerHTML = msg || ""; 7 | setDivVisible(elm, !!msg); 8 | } 9 | 10 | function setErrorMessage(msg){ 11 | var elm = document.getElementById('message-dialog'); 12 | elm.style.color = 'red'; 13 | elm.querySelector('.loader').style.display='none'; 14 | elm.querySelector('.btn-close').style.display=''; 15 | elm.querySelector('.message').innerHTML = msg || ""; 16 | setDivVisible(elm, !!msg); 17 | } 18 | 19 | function isDivVisible(div){ 20 | if (typeof div === 'string') {div = document.getElementById(div); } 21 | return !div.classList.contains('d-none'); 22 | } 23 | 24 | function setDivVisible(div, value){ 25 | if (typeof div === 'string') {div = document.getElementById(div); } 26 | if (value){ 27 | div.classList.remove('d-none'); 28 | }else{ 29 | div.classList.add('d-none'); 30 | } 31 | } 32 | 33 | axios.interceptors.response.use((response) => { 34 | setInfoMessage(""); 35 | return response; 36 | }, (error) => { 37 | var msg = 'Internal Server Error'; 38 | var response = error.response; 39 | if(response.data && response.data.message){ 40 | msg = response.data.message.replaceAll("\n", "
"); 41 | if (response.data.traceback){ 42 | msg += "
Traceback: " + response.data.traceback + '
' 43 | } 44 | } 45 | setErrorMessage(msg); 46 | return Promise.reject(error.message); 47 | }); 48 | 49 | function setSegmentsSelection(inputElements){ 50 | setInfoMessage("Selecting segments ... (it might take a while for large databases)"); 51 | var segmentsSelection = {}; 52 | for(var att of Object.keys(inputElements)){ 53 | var val = inputElements[att].value; 54 | if (val && val.trim()){ 55 | segmentsSelection[att] = val; 56 | } 57 | } 58 | return axios.post("/set_selection", segmentsSelection, {headers: {'Content-Type': 'application/json'}}).then(response => { 59 | return response; 60 | }); 61 | } 62 | 63 | function getSegmentsSelection(inputElements){ 64 | // queries the current segments selection and puts the selection expressions into the given input elements 65 | return axios.post("/get_selection", {}, {headers: {'Content-Type': 'application/json'}}).then(response => { 66 | for(var attname of Object.keys(inputElements)){ 67 | inputElements[attname].value = response.data[attname] || ""; 68 | inputElements[attname].dispatchEvent(new Event("input")); // notify listeners 69 | } 70 | return response; 71 | }); 72 | } 73 | 74 | function get_segment_data(segmentIndex, segmentsCount, plots, tracesArePreprocessed, mainPlotShowsAllComponents, 75 | attrElements, classElements, descElement){ 76 | /** 77 | * Main function to update the GUI from a given segment. 78 | * plots: Array of 3-elements Arrays, where the 3 elements are: 79 | * [Python function name (string), destination
id (string), plotlty layout (Object)] 80 | * tracesArePreprocessed: boolean denoting if the traces should be pre-processed 81 | * mainPlotShowsAllComponents: boolean denoting if the main trace should plot all 3 components / orientations 82 | * attrElements: Object of segment attributes (string) mapped to the HTML element whose 83 | * innerHTML should be set to the relative segment attr value (each element innerHTML is assumed 84 | * to be empty). If null / undefined, segment 85 | * attr are not fetched and nothing is set 86 | * classElements: Object of DB classes ids (integer) mapped to the input[type=checkbox] 87 | * element whose checked state should be set true or false depending on whether the segment 88 | * has the relative class label assigned or not (each input.checked property is assumed to be false). 89 | * If null / undefined, segment classes are not fetched and nothing happens 90 | * this method returns a Promise with argument an Object of metadata (e.g. 'id', 'event.latiture') 91 | * mapped to their value. The Object 'class.id' is mapped to an Array of ids. If attrElements and 92 | * classElements are null, the returned Object is empty 93 | */ 94 | var funcName2ID = {}; 95 | var funcName2Layout = {}; 96 | for (var [fName, divId, layout] of plots){ 97 | funcName2ID[fName] = divId; 98 | funcName2Layout[fName] = layout; 99 | } 100 | var params = { 101 | seg_index: segmentIndex, 102 | seg_count: segmentsCount, 103 | pre_processed: tracesArePreprocessed, 104 | zooms: null, // not used 105 | plot_names: Object.keys(funcName2ID), 106 | all_components: mainPlotShowsAllComponents, 107 | attributes: !!attrElements, 108 | classes: !!classElements 109 | } 110 | 111 | setInfoMessage("Fetching and computing data (it might take a while) ..."); 112 | return axios.post("/get_segment_data", params, {headers: {'Content-Type': 'application/json'}}).then(response => { 113 | for (var name of Object.keys(response.data.plotData)){ 114 | var data = response.data.plotData[name]; 115 | var layout = Object.assign({}, funcName2Layout[name], response.data.plotLayout[name] || {}); 116 | redrawPlot(funcName2ID[name], data, layout); 117 | } 118 | var ret = {}; 119 | // update metadata if needed: 120 | if (attrElements){ 121 | for (var att of response.data.attributes){ 122 | attrElements[att.label].innerHTML = att.value; 123 | ret[att.label] = att.value; 124 | } 125 | if (descElement){ 126 | descElement.innerHTML = response.data.description 127 | } 128 | } 129 | ret['class.id'] = []; 130 | // update classes if needed: 131 | if (classElements){ 132 | for (var classId of response.data.classes){ 133 | ret['class.id'].push(classId); 134 | classElements[classId].checked=true; 135 | } 136 | } 137 | return ret; 138 | }); 139 | } 140 | 141 | function getPageFontInfo(){ 142 | var style = window.getComputedStyle(document.body); 143 | var fsize = parseFloat(style.getPropertyValue('font-size')); 144 | var ffamily = style.getPropertyValue('font-family'); 145 | return { 146 | 'size': isNaN(fsize) ? 15 : fsize, 147 | 'family': ffamily || 'sans-serif' 148 | } 149 | } 150 | 151 | function redrawPlot(divId, plotlyData, plotlyLayout){ 152 | var div = document.getElementById(divId); 153 | var initialized = !!div.layout; 154 | var font = getPageFontInfo(); 155 | var _ff = window.getComputedStyle(document.body).getPropertyValue('font-family'); 156 | var layout = { // set default layout (and merge later with plotlyLayout, if given) 157 | margin:{'l': 10, 't':10, 'b':10, 'r':10}, 158 | pad: 0, 159 | autosize: true, 160 | paper_bgcolor: 'rgba(0,0,0,0)', 161 | font: font, 162 | xaxis: { 163 | autorange: true, 164 | automargin: true, 165 | tickangle: 0, 166 | linecolor: '#aaa', 167 | linewidth: 1, 168 | mirror: true 169 | }, 170 | yaxis: { 171 | autorange: true, 172 | automargin: true, 173 | linecolor: '#aaa', 174 | linewidth: 1, 175 | mirror: true 176 | //fixedrange: true 177 | }, 178 | annotations: [], 179 | legend: { 180 | xanchor:'right', 181 | font: { 182 | size: font.size *.9, 183 | family: font.family, 184 | }, 185 | x:0.99 186 | } 187 | }; 188 | // deep merge plotlyLayout into layout 189 | var objs = [[plotlyLayout, layout]]; // [src, dest] 190 | while (objs.length){ 191 | var [src, dest] = objs.shift(); // remove 1st element 192 | Object.keys(src).forEach(key => { 193 | if ((typeof src[key] === 'object') && (typeof dest[key] === 'object')){ 194 | objs.push([src[key], dest[key]]); 195 | }else{ 196 | dest[key] = src[key]; 197 | } 198 | }) 199 | } 200 | // if data is a string, put it as message: 201 | if (typeof plotlyData === 'string'){ 202 | layout.annotations || (layout.annotations = []); 203 | layout.annotations.push({ 204 | xref: 'paper', 205 | yref: 'paper', 206 | x: 0.5, // 0.01, 207 | xanchor: 'center', 208 | y: 0.5, //.98, 209 | yanchor: 'middle', 210 | text: plotlyData.replace("\n", "
"), 211 | showarrow: false, 212 | bordercolor: '#ffffff', // '#c7c7c7', 213 | bgcolor: '#C0392B', 214 | font: { 215 | size: font.size *.9, 216 | family: font.family, 217 | color: '#FFFFFF' 218 | } 219 | }); 220 | plotlyData = []; 221 | } 222 | // plot (use plotly react if the plot is already set cause it's faster than newPlot): 223 | if (!initialized){ 224 | var config = { 225 | displaylogo: false, 226 | showLink: false, 227 | modeBarButtonsToRemove: ['sendDataToCloud'] 228 | }; 229 | Plotly.newPlot(div, plotlyData, layout, config); 230 | }else{ 231 | Plotly.react(div, plotlyData, layout); 232 | } 233 | } 234 | 235 | function setConfig(aceEditor){ 236 | // query config and show form only upon successful response: 237 | return axios.post("/get_config", {as_str: true}, {headers: {'Content-Type': 'application/json'}}).then(response => { 238 | aceEditor.setValue(response.data); 239 | aceEditor.clearSelection(); 240 | return response; 241 | }); 242 | } 243 | -------------------------------------------------------------------------------- /stream2segment/process/gui/webapp/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | {{ title }} 5 | {% block head_meta %} 6 | 7 | 8 | {% endblock %} 9 | 10 | 20 | {% block head_scripts %} 21 | {% if use_axios %} 22 | 23 | {% endif %} 24 | {% if use_plotly %} 25 | 26 | {% endif %} 27 | {% if use_leaflet %} 28 | 29 | {% endif %} 30 | {% endblock %} 31 | {% block head_stylesheets %} 32 | {% if use_leaflet %} 33 | 34 | {% endif %} 35 | {% if use_bootstrap_css %} 36 | 37 | {% endif %} 38 | {% endblock %} 39 | 40 | 41 | 42 | {% block body_content %} 43 | {% endblock %} 44 | 45 | 46 | {% block after_body %} 47 | {% endblock %} 48 | 49 | -------------------------------------------------------------------------------- /stream2segment/process/inspectimport.py: -------------------------------------------------------------------------------- 1 | """ 2 | inspect+importlib functions for stream2segment 3 | 4 | March 22, 2020 5 | 6 | .. moduleauthor:: 7 | """ 8 | import os 9 | import importlib.util 10 | import inspect 11 | 12 | 13 | def _getmodulename(pyfilepath): 14 | """Return a (most likely) unique module name for a python source file 15 | loaded as a module 16 | """ 17 | # In both python2 and 3, the builtin function importing a module from file 18 | # needs two arguments, a 'file path' and a 'name'. It's not clear why the 19 | # letter is necessary and does not default to, e.g., the filepath 's name. 20 | # We build the name here following these conventions: 21 | # 1. The name must be UNIQUE: otherwise when importing the second file the 22 | # module of the former is actually returned 23 | # 2. Names should NOT contain dots, as otherwise a 24 | # `RuntimeWarning: Parent module ... not found` is issued. 25 | return os.path.abspath(os.path.realpath(pyfilepath)).replace(".", "_dot_").\ 26 | replace(os.path.sep, "_pathsep_") 27 | # note above: os.path.sep returns '/' on mac, os.pathsep returns ':' 28 | 29 | 30 | def load_source(pyfilepath): 31 | """Load a source python file and returns it""" 32 | name = _getmodulename(pyfilepath) 33 | spec = importlib.util.spec_from_file_location(name, pyfilepath) # noqa 34 | mod_ = importlib.util.module_from_spec(spec) # noqa 35 | spec.loader.exec_module(mod_) 36 | return mod_ 37 | 38 | 39 | def is_mod_function(pymodule, func, include_classes=False): 40 | """Return True if the python function `func` is a function (or class if 41 | `include_classes` is True) defined (and not imported) in the Python 42 | module `pymodule` 43 | """ 44 | is_candidate = inspect.isfunction(func) or \ 45 | (include_classes and inspect.isclass(func)) 46 | # check that the source file is the module (i.e. not imported). NOTE that 47 | # getsourcefile might raise (not the case for functions or classes) 48 | return is_candidate and os.path.abspath(inspect.getsourcefile(pymodule)) == \ 49 | os.path.abspath(inspect.getsourcefile(func)) 50 | 51 | 52 | def iterfuncs(pymodule, include_classes=False): 53 | """Return an iterator over all functions (or classes if `include_classes` 54 | is True) defined (and not imported) in the given python module `pymodule` 55 | """ 56 | for func in pymodule.__dict__.values(): 57 | if is_mod_function(pymodule, func, include_classes): 58 | yield func 59 | 60 | 61 | -------------------------------------------------------------------------------- /stream2segment/process/log.py: -------------------------------------------------------------------------------- 1 | """ 2 | Log utilities for the process routine 3 | """ 4 | import logging 5 | import sys 6 | 7 | from stream2segment.io.log import LevelFilter 8 | 9 | 10 | def configlog4processing(logger, logfile_path='', verbose=False): 11 | """Configures the logger, setting it to a `INFO` level with a list of 12 | default handlers: 13 | 14 | - If `logfile_path` is given (not empty), a :class:`logging.FileHandler` ( 15 | streaming to that file) will capture all messages of at least level INFO 16 | (e.g., INFO, WARNING, ERROR). 17 | See :func:`logfilepath` if you want to create automatically a log file 18 | path in the same directory of a given processing file. 19 | 20 | - If `verbose` = True, a :class:`StreamHandler` (streaming to standard 21 | output) will capture ONLY messages of level INFO (20) and ERROR (40) and 22 | CRITICAL (50), ideal for showing relevant information to the user on a 23 | terminal 24 | 25 | The returned list can thus contain 0, 1 or 2 loggers depending on the 26 | arguments. 27 | 28 | Implementation detail: this method modifies these values for performance 29 | reason: 30 | ``` 31 | logging._srcfile = None 32 | logging.logThreads = 0 33 | logging.logProcesses = 0 34 | ``` 35 | 36 | :return: a list of handlers added to the logger 37 | """ 38 | # https://docs.python.org/2/howto/logging.html#optimization: 39 | logging._srcfile = None # pylint: disable=protected-access 40 | logging.logThreads = 0 41 | logging.logProcesses = 0 42 | 43 | logger.setLevel(logging.INFO) # necessary to forward to handlers 44 | handlers = [] 45 | if logfile_path: 46 | logger.addHandler(logging.FileHandler(logfile_path, mode='w')) 47 | if verbose: 48 | # handlers.append(SysOutStreamHandler(sys.stdout)) 49 | sysout_streamer = logging.StreamHandler(sys.stdout) 50 | sysout_streamer.setFormatter(logging.Formatter('%(message)s')) 51 | # configure the levels we want to print (20: info, 40: error, 50: critical) 52 | l_filter = LevelFilter((20, 40, 50)) 53 | sysout_streamer.addFilter(l_filter) 54 | # set minimum level (for safety): 55 | sysout_streamer.setLevel(min(l_filter.levels)) 56 | logger.addHandler(sysout_streamer) 57 | 58 | for hand in handlers: 59 | logger.addHandler(hand) 60 | # return handlers -------------------------------------------------------------------------------- /stream2segment/resources/__init__.py: -------------------------------------------------------------------------------- 1 | from os import listdir 2 | from os.path import abspath, dirname, join, splitext 3 | 4 | PATH = abspath(dirname(__file__)) 5 | 6 | 7 | def get_resource_abspath(*paths): 8 | """Return the resource file as the concatenation of this module path (resource 9 | directory) and any members of `*paths`. 10 | Same as `os.path.join(resource_directory, *paths)` 11 | 12 | :param paths:filename(s) relative to the resource directory 13 | """ 14 | return join(PATH, *paths) 15 | 16 | 17 | def get_ttable_fpath(basename): 18 | """Return the file for the given travel times table 19 | 20 | :param basename: the file name (with or without extension) located under 21 | `get_traveltimestables_dirpath()` 22 | """ 23 | if not splitext(basename)[1]: 24 | basename += ".npz" 25 | return join(get_resource_abspath("traveltimes"), basename) 26 | 27 | 28 | def get_templates_fpaths(*filenames): 29 | """Return the template file paths with given filename(s) inside the package 30 | `templates` of the `resource` directory. If filenames is empty (no 31 | arguments), returns all files (no dir) in the `templates` directory 32 | 33 | :param filenames: a list of file names relative to the templates directory. 34 | With no argument,returns all valid files inside that directory 35 | """ 36 | templates_path = get_resource_abspath("templates") 37 | if not filenames: 38 | filenames = listdir(templates_path) 39 | 40 | return list(join(templates_path, _name) for _name in filenames) 41 | 42 | 43 | def get_templates_fpath(filename): 44 | """Return the template file path with given filename inside the package 45 | `templates` of the `resource` directory 46 | 47 | :param filename: a filename relative to the templates directory 48 | """ 49 | return get_templates_fpaths(filename)[0] -------------------------------------------------------------------------------- /stream2segment/resources/program_version: -------------------------------------------------------------------------------- 1 | 3.3.0 -------------------------------------------------------------------------------- /stream2segment/resources/templates/__init__.py: -------------------------------------------------------------------------------- 1 | """This module holds doc strings to be injected via jinja2 into the templates when 2 | running `s2s init`. 3 | Any NON-PRIVATE variable name (i.e., without leading underscore '_') of this module 4 | can be injected in a template file in the usual way, e.g.: 5 | {{ PROCESS_PY_BANDPASSFUNC }} 6 | 7 | .. moduleauthor:: Riccardo Zaccarelli 8 | """ 9 | from stream2segment.download.modules.utils import EVENTWS_MAPPING 10 | # DO NOT REMOVE IMPORT BELOW, IT IS USED IN TEMPLATES: 11 | from stream2segment.process.writers import SEGMENT_ID_COLNAME 12 | 13 | _WIKI_BASE_URL = 'https://github.com/rizac/stream2segment/wiki' 14 | 15 | USING_S2S_IN_YOUR_PYTHON_CODE_WIKI_URL = \ 16 | _WIKI_BASE_URL + '/using-stream2segment-in-your-python-code' 17 | 18 | THE_SEGMENT_OBJECT_WIKI_URL = _WIKI_BASE_URL + '/the-segment-object' 19 | 20 | THE_SEGMENT_OBJECT_ATTRS_AND_METHS = \ 21 | THE_SEGMENT_OBJECT_WIKI_URL + '#attributes-and-methods' 22 | 23 | THE_SEGMENT_OBJECT_WIKI_URL_SEGMENT_SELECTION = \ 24 | THE_SEGMENT_OBJECT_WIKI_URL + '#segments-selection' 25 | 26 | 27 | PROCESS_PY_BANDPASSFUNC = """ 28 | Apply a pre-process on the given segment waveform by filtering the signal and 29 | removing the instrumental response. 30 | 31 | This function is used for processing (see `main` function) and visualization 32 | (see the `@gui.preprocess` decorator and its documentation above) 33 | 34 | The steps performed are: 35 | 1. Sets the max frequency to 0.9 of the Nyquist frequency (sampling rate /2) 36 | (slightly less than Nyquist seems to avoid artifacts) 37 | 2. Offset removal (subtract the mean from the signal) 38 | 3. Tapering 39 | 4. Pad data with zeros at the END in order to accommodate the filter transient 40 | 5. Apply bandpass filter, where the lower frequency is magnitude dependent 41 | 6. Remove padded elements 42 | 7. Remove the instrumental response 43 | 44 | IMPORTANT: This function modifies the segment stream in-place: further calls to 45 | `segment.stream()` will return the pre-processed stream. During visualization, this 46 | is not an issue because Stream2segment always caches a copy of the raw trace. 47 | During processing (see `main` function) you need to be more careful: if needed, you 48 | can store the raw stream beforehand (`raw_trace=segment.stream().copy()`) or reload 49 | the segment stream afterwards with `segment.stream(reload=True)`. 50 | 51 | :return: a Trace object (a Stream is also valid value for functions decorated with 52 | `@gui.preprocess`) 53 | """ 54 | 55 | 56 | DOWNLOAD_EVENTWS_LIST = '\n'.join('%s"%s": %s' % ('# ' if i > 0 else '', str(k), str(v)) 57 | for i, (k, v) in enumerate(EVENTWS_MAPPING.items())) 58 | 59 | # setting up DOCVARS: 60 | DOCVARS = {k: v.strip() for k, v in globals().items() 61 | if hasattr(v, 'strip') and not k.startswith('_')} 62 | -------------------------------------------------------------------------------- /stream2segment/resources/templates/create_wiki.py: -------------------------------------------------------------------------------- 1 | """Script module generating wiki pages from current Jupyter notebook 2 | 3 | WARNING: This script module not used. The generation fo the wiki pages is issued by 4 | means of normal commands on the terminal (see the "Updating wiki" section in the README). 5 | """ 6 | from os.path import isdir 7 | 8 | from stream2segment.resources import get_resource_abspath 9 | 10 | for fle_ in ['jupyter.example.ipynb', 'the-segment-object.ipynb']: 11 | fle = get_resource_abspath('templates', fle_) 12 | with open(fle) as f: 13 | nb = nbformat.read(f, as_version=4) 14 | ep = ExecutePreprocessor(timeout=600) # , kernel_name='python3') 15 | cwd = os.path.dirname(fle) 16 | ep.preprocess(nb, {'metadata': {'path': cwd}}) 17 | 18 | if __name__ == "__main__": 19 | import sys 20 | argv = sys.argv 21 | if len(argv) < 2: 22 | print('Please provide the directory of the stream2segment wiki git repo') 23 | repo = argv[1] 24 | if not isdir(repo): 25 | print('"%s" is not an existing directory') 26 | 27 | -------------------------------------------------------------------------------- /stream2segment/resources/templates/example.db.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/resources/templates/example.db.sqlite -------------------------------------------------------------------------------- /stream2segment/resources/templates/gui.yaml: -------------------------------------------------------------------------------- 1 | # ======================================================================= 2 | # Stream2segment configuration file / visualization routine (Browser GUI) 3 | # ======================================================================= 4 | # 5 | # This is a configuration file for all gui-decorated functions in the 6 | # associated Python module (generated via `s2s init`) 7 | # 8 | # This file is written in YAML syntax. For info see: http://docs.ansible.com/ansible/latest/YAMLSyntax.html 9 | 10 | 11 | # settings for computing the 'signal' and 'noise' time windows on a segment waveform 12 | sn_windows: 13 | # set the separator between noise part and signal part, in seconds relative to each 14 | # segment arrival time: 15 | arrival_time_shift: -2.0 16 | # define the signal window [start, end] within the signal part (see above). Each number 17 | # is given as ratio of the total signal sum of squares (so 0.1 is the time when the 18 | # signal cumulative reaches 10% of the total) 19 | signal_window: [0.1, 0.9] 20 | 21 | # settings for the sn (signal-to-noise) spectra implemented in the associated python module 22 | sn_spectra: 23 | taper: 24 | max_percentage: 0.05 25 | type: 'hann' 26 | smoothing_wlen_ratio: 0.05 # 0 for no smoothing 27 | type: 'amp' # if 'pow', then power spectra are computed, otherwise if 'amp', amplitude spectra are computed 28 | 29 | # settings for the pre-process function implemented in the associated python module 30 | preprocess: 31 | remove_response_water_level: 60 32 | remove_response_output: 'ACC' 33 | bandpass_freq_max: 30 # the max frequency, in Hz: 34 | bandpass_max_nyquist_ratio: 0.9 35 | bandpass_corners: 2 36 | 37 | # settings for the wood-anderson implemented in the associated python module 38 | paz_wa: 39 | sensitivity: 2800 40 | zeros: 41 | - '0j' 42 | poles: 43 | - '-6.2832-4.7124j' 44 | - '-6.2832+4.7124j' 45 | gain: 1 46 | 47 | # savitzky_golay: 48 | savitzky_golay: 49 | wsize: 31 # window size in pts 50 | order: 4 # polynomial order to use to fit and smooth data 51 | deriv: 2 # the derivative (1st, second, ...) 52 | -------------------------------------------------------------------------------- /stream2segment/resources/templates/paramtable.yaml: -------------------------------------------------------------------------------- 1 | # ====================================================== 2 | # Stream2segment configuration file / processing routine 3 | # ====================================================== 4 | # 5 | # This is a configuration file for the stream2segment `imap` and `process` functions. 6 | # See associated Python module (generated via `s2s init`) for details 7 | # 8 | # This file is written in YAML syntax. For info see: http://docs.ansible.com/ansible/latest/YAMLSyntax.html 9 | 10 | 11 | # settings for computing the 'signal' and 'noise' time windows on a segment waveform 12 | sn_windows: 13 | # set the separator between noise part and signal part, in seconds relative to each 14 | # segment arrival time: 15 | arrival_time_shift: -2.0 16 | # define the signal window [start, end] within the signal part (see above). Each number 17 | # is given as ratio of the total signal sum of squares (so 0.1 is the time when the 18 | # signal cumulative reaches 10% of the total) 19 | signal_window: [0.1, 0.9] 20 | 21 | # settings for the sn (signal-to-noise) spectra implemented in the associated python module 22 | sn_spectra: 23 | taper: 24 | max_percentage: 0.05 25 | type: 'hann' 26 | smoothing_wlen_ratio: 0.05 # 0 for no smoothing 27 | type: 'amp' # if 'pow', then power spectra are computed, otherwise if 'amp', amplitude spectra are computed 28 | 29 | # settings for the pre-process function implemented in the associated python module 30 | preprocess: 31 | remove_response_water_level: 60 32 | remove_response_output: 'ACC' 33 | bandpass_freq_max: 30 # the max frequency, in Hz: 34 | bandpass_max_nyquist_ratio: 0.9 35 | bandpass_corners: 2 36 | 37 | # settings for the wood-anderson implemented in the associated python module 38 | paz_wa: 39 | sensitivity: 2800 40 | zeros: 41 | - '0j' 42 | poles: 43 | - '-6.2832-4.7124j' 44 | - '-6.2832+4.7124j' 45 | gain: 1 46 | 47 | # savitzky_golay: 48 | savitzky_golay: 49 | wsize: 31 # window size in pts 50 | order: 4 # polynomial order to use to fit and smooth data 51 | deriv: 2 # the derivative (1st, second, ...) 52 | 53 | # thresholds for the multievent (heuristic) algorithm 54 | multievent_thresholds: 55 | inside_tmin_tmax_inpercent: 0.90 56 | inside_tmin_tmax_insec: 10.0 57 | after_tmax_inpercent: 0.10 58 | 59 | # other custom parameters used in the associated python module 60 | amp_ratio_threshold: 0.8 61 | snr_threshold: 3 62 | freqs_interp: 63 | - 0.1 64 | - 0.106365 65 | - 0.113136 66 | - 0.120337 67 | - 0.127997 68 | - 0.136145 69 | - 0.144811 70 | - 0.154028 71 | - 0.163833 72 | - 0.174261 73 | - 0.185354 74 | - 0.197152 75 | - 0.209701 76 | - 0.22305 77 | - 0.237248 78 | - 0.252349 79 | - 0.268412 80 | - 0.285497 81 | - 0.30367 82 | - 0.323 83 | - 0.34356 84 | - 0.365429 85 | - 0.388689 86 | - 0.413431 87 | - 0.439747 88 | - 0.467739 89 | - 0.497512 90 | - 0.52918 91 | - 0.562864 92 | - 0.598692 93 | - 0.636801 94 | - 0.677336 95 | - 0.72045 96 | - 0.766309 97 | - 0.815088 98 | - 0.866971 99 | - 0.922156 100 | - 0.980855 101 | - 1.04329 102 | - 1.1097 103 | - 1.18033 104 | - 1.25547 105 | - 1.33538 106 | - 1.42038 107 | - 1.5108 108 | - 1.60696 109 | - 1.70925 110 | - 1.81805 111 | - 1.93378 112 | - 2.05687 113 | - 2.18779 114 | - 2.32705 115 | - 2.47518 116 | - 2.63273 117 | - 2.80031 118 | - 2.97856 119 | - 3.16816 120 | - 3.36982 121 | - 3.58432 122 | - 3.81248 123 | - 4.05516 124 | - 4.31328 125 | - 4.58784 126 | - 4.87987 127 | - 5.19049 128 | - 5.52088 129 | - 5.8723 130 | - 6.24609 131 | - 6.64368 132 | - 7.06657 133 | - 7.51638 134 | - 7.99483 135 | - 8.50372 136 | - 9.04501 137 | - 9.62076 138 | - 10.2332 139 | - 10.8845 140 | - 11.5774 141 | - 12.3143 142 | - 13.0982 143 | - 13.9319 144 | - 14.8187 145 | - 15.762 146 | - 16.7653 147 | - 17.8324 148 | - 18.9675 149 | - 20.1749 150 | - 21.4591 151 | - 22.825 152 | - 24.2779 153 | - 25.8233 154 | - 27.467 155 | - 29.2154 156 | - 31.075 157 | - 33.0531 158 | - 35.157 159 | - 37.3949 160 | - 39.7752 161 | - 42.307 162 | - 45. 163 | -------------------------------------------------------------------------------- /stream2segment/resources/templates/save2fs.py: -------------------------------------------------------------------------------- 1 | """ 2 | Stream2segment processing+visualization module saving raw and pre-processed segments 3 | from the database on the file system. 4 | 5 | {{ PROCESS_PY_MAIN }} 6 | """ 7 | import os 8 | # OrderedDict is a python dict that returns its keys in the order they are inserted 9 | # (a normal python dict returns its keys in arbitrary order) 10 | # Useful e.g. in "main" if we want to control the *order* of the columns in the output csv 11 | from collections import OrderedDict 12 | from datetime import datetime, timedelta # always useful 13 | from math import factorial # for savitzky_golay function 14 | 15 | # import numpy for efficient computation: 16 | import numpy as np 17 | # import obspy core classes (when working with times, use obspy UTCDateTime when possible): 18 | from obspy import Trace, Stream, UTCDateTime 19 | from obspy.geodetics import degrees2kilometers as d2km 20 | # decorators needed to setup this module @gui.sideplot, @gui.preprocess @gui.customplot: 21 | from stream2segment.process import gui, SkipSegment 22 | # strem2segment functions for processing obspy Traces. This is just a list of possible functions 23 | # to show how to import them: 24 | from stream2segment.process.funclib.traces import bandpass, cumsumsq,\ 25 | fft, ampspec, powspec, timeof 26 | # stream2segment function for processing numpy arrays: 27 | from stream2segment.process.funclib.ndarrays import triangsmooth, snr 28 | 29 | 30 | def assert1trace(stream): 31 | """asserts the stream has only one trace, raising an Exception if it's not the case, 32 | as this is the pre-condition for all processing functions implemented here. 33 | Note that, due to the way we download data, a stream with more than one trace his 34 | most likely due to gaps / overlaps""" 35 | # stream.get_gaps() is slower as it does more than checking the stream length 36 | if len(stream) != 1: 37 | raise SkipSegment("%d traces (probably gaps/overlaps)" % len(stream)) 38 | 39 | 40 | def main(segment, config): 41 | """{{ PROCESS_PY_MAINFUNC | indent }} 42 | """ 43 | stream = segment.stream() 44 | assert1trace(stream) # raise and return if stream has more than one trace 45 | trace = stream[0] # work with the (surely) one trace now 46 | 47 | # discard saturated signals (according to the threshold set in the config file): 48 | amp_ratio = np.true_divide(np.nanmax(np.abs(trace.data)), 2**23) 49 | if amp_ratio >= config['amp_ratio_threshold']: 50 | raise SkipSegment('possibly saturated (amp. ratio exceeds)') 51 | 52 | original_trace = trace.copy() # keep a track of the original mseed 53 | 54 | # bandpass the trace, according to the event magnitude. 55 | # WARNING: this modifies the segment.stream() permanently! 56 | # If you want to preserve the original stream, store trace.copy() beforehand. 57 | # Also, use a 'try catch': sometimes Inventories are corrupted and obspy raises 58 | # a TypeError, which would break the WHOLE processing execution. 59 | # Raising a SkipSegment will stop the execution of the currently processed 60 | # segment only (logging the error message): 61 | try: 62 | processed_trace = bandpass_remresp(segment, config) 63 | except TypeError as type_error: 64 | raise SkipSegment("Error in 'bandpass_remresp': %s" % str(type_error)) 65 | 66 | stream_path = segment.sds_path(config['root_dir']) 67 | basedir = os.path.dirname(stream_path) 68 | path1 = stream_path + ".s2s.raw.mseed" 69 | path2 = stream_path + ".s2s.processed.mseed" 70 | # assure directories exists, making all intermediate paths if needed: 71 | if not os.path.exists(basedir): 72 | os.makedirs(basedir) 73 | original_trace.write(path1, format='MSEED') 74 | processed_trace.write(path2, format='MSEED') 75 | 76 | 77 | @gui.preprocess 78 | def bandpass_remresp(segment, config): 79 | """{{ PROCESS_PY_BANDPASSFUNC | indent }} 80 | """ 81 | stream = segment.stream() 82 | assert1trace(stream) # raise and return if stream has more than one trace 83 | trace = stream[0] 84 | 85 | inventory = segment.inventory() 86 | 87 | # define some parameters: 88 | evt = segment.event 89 | conf = config['preprocess'] 90 | # note: bandpass here below copied the trace! important! 91 | trace = bandpass(trace, mag2freq(evt.magnitude), freq_max=conf['bandpass_freq_max'], 92 | max_nyquist_ratio=conf['bandpass_max_nyquist_ratio'], 93 | corners=conf['bandpass_corners'], copy=False) 94 | trace.remove_response(inventory=inventory, output=conf['remove_response_output'], 95 | water_level=conf['remove_response_water_level']) 96 | return trace 97 | 98 | 99 | def mag2freq(magnitude): 100 | '''returns a frequency in Hz from a given magnitude''' 101 | if magnitude <= 4.5: 102 | freq_min = 0.4 103 | elif magnitude <= 5.5: 104 | freq_min = 0.2 105 | elif magnitude <= 6.5: 106 | freq_min = 0.1 107 | else: 108 | freq_min = 0.05 109 | return freq_min 110 | -------------------------------------------------------------------------------- /stream2segment/resources/templates/save2fs.yaml: -------------------------------------------------------------------------------- 1 | # {{ PROCESS_YAML_MAIN }} 2 | 3 | # {{ PROCESS_YAML_SEGMENTSELECT }} 4 | segments_selection: 5 | has_data: 'true' 6 | maxgap_numsamples: '[-0.5, 0.5]' 7 | # missing_data_sec: '<120' 8 | # missing_data_ratio: '<0.5' 9 | # id: '<300' 10 | # event.time: "(2014-01-01T00:00:00, 2014-12-31T23:59:59)" 11 | # event.latitude: "[24, 70]" 12 | # event.longitude: "[-11, 24]" 13 | 14 | # {{ PROCESS_YAML_SNWINDOWS }} 15 | sn_windows: 16 | arrival_time_shift: -2.0 # programmatically shifts the arrival time for every segment (in seconds) 17 | signal_window: [0.1, 0.9] # either a number (in seconds) or interval relative to the % of the cumulative 18 | 19 | # settings for the pre-process function implemented in the associated python module 20 | preprocess: 21 | remove_response_water_level: 60 22 | remove_response_output: 'ACC' 23 | bandpass_freq_max: 30 # the max frequency, in Hz: 24 | bandpass_max_nyquist_ratio: 0.9 25 | bandpass_corners: 2 26 | 27 | # the output root path where to store the files used in the associated python module: 28 | root_dir: '/var/tmp' 29 | 30 | # other custom parameters used in the associated python module: 31 | amp_ratio_threshold: 0.8 32 | 33 | # {{ PROCESS_YAML_ADVANCEDSETTINGS }} 34 | -------------------------------------------------------------------------------- /stream2segment/resources/traveltimes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/resources/traveltimes/__init__.py -------------------------------------------------------------------------------- /stream2segment/resources/traveltimes/ak135_ttp+.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/resources/traveltimes/ak135_ttp+.npz -------------------------------------------------------------------------------- /stream2segment/resources/traveltimes/ak135_tts+.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/resources/traveltimes/ak135_tts+.npz -------------------------------------------------------------------------------- /stream2segment/resources/traveltimes/iasp91_ttp+.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/resources/traveltimes/iasp91_ttp+.npz -------------------------------------------------------------------------------- /stream2segment/resources/traveltimes/iasp91_tts+.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/resources/traveltimes/iasp91_tts+.npz -------------------------------------------------------------------------------- /stream2segment/traveltimes/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/stream2segment/traveltimes/__init__.py -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /tests/data/20091217_231838.FR.ESCA.00.HHZ.SAC: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/20091217_231838.FR.ESCA.00.HHZ.SAC -------------------------------------------------------------------------------- /tests/data/BS.*.*.*.2016-06-05.21:05-09.47.mseed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/BS.*.*.*.2016-06-05.21:05-09.47.mseed -------------------------------------------------------------------------------- /tests/data/GE.FLT1..HH?.mseed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/GE.FLT1..HH?.mseed -------------------------------------------------------------------------------- /tests/data/IA.BAKI..BHZ.D.2016.004.head: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/IA.BAKI..BHZ.D.2016.004.head -------------------------------------------------------------------------------- /tests/data/ak135_ttp+_10.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/ak135_ttp+_10.npz -------------------------------------------------------------------------------- /tests/data/ak135_ttp+_5.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/ak135_ttp+_5.npz -------------------------------------------------------------------------------- /tests/data/ak135_tts+_10.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/ak135_tts+_10.npz -------------------------------------------------------------------------------- /tests/data/ak135_tts+_5.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/ak135_tts+_5.npz -------------------------------------------------------------------------------- /tests/data/channel_query_response.txt: -------------------------------------------------------------------------------- 1 | #Network|Station|Location|Channel|Latitude|Longitude|Elevation|Depth|Azimuth|Dip|SensorDescription|Scale|ScaleFreq|ScaleUnits|SampleRate|StartTime|EndTime 2 | 1G|SA03||BHE|70.5034|29.06682|272.0|0.0|90.0|0.0|GFZ:1G2012:STS-2/N/g=1500|943681500.0|0.02|M/S|20.0|2012-08-03T00:00:00|2017-12-31T00:00:00 3 | 1G|SA03||BHN|70.5034|29.06682|272.0|0.0|0.0|0.0|GFZ:1G2012:STS-2/N/g=1500|943681500.0|0.02|M/S|20.0|2012-08-03T00:00:00|2017-12-31T00:00:00 4 | 1G|SA03||BHZ|70.5034|29.06682|272.0|0.0|0.0|-90.0|GFZ:1G2012:STS-2/N/g=1500|943681500.0|0.02|M/S|20.0|2012-08-03T00:00:00|2017-12-31T00:00:00 5 | 1G|SA05||HHE|70.28404|31.00829|0.0|0.0|90.0|0.0|GFZ:1G2012:STS-2/G3/g=1500|1500000000.0|0.02|M/S|100.0|2013-06-12T00:00:00|2017-12-31T00:00:00 6 | 1G|SA05||HHN|70.28404|31.00829|0.0|0.0|0.0|0.0|GFZ:1G2012:STS-2/G3/g=1500|1500000000.0|0.02|M/S|100.0|2013-06-12T00:00:00|2017-12-31T00:00:00 7 | 1G|SA05||HHZ|70.28404|31.00829|0.0|0.0|0.0|-90.0|GFZ:1G2012:STS-2/G3/g=1500|1500000000.0|0.02|M/S|100.0|2013-06-12T00:00:00|2017-12-31T00:00:00 8 | 2F|CAPIN||BHE|31.70834|104.28955|883.0|0.0|90.0|0.0|GFZ:2F2012:Trillium-Compact/g=750|3072000000.0|1.0|M/S|50.0|2012-07-30T03:00:00|2013-10-08T01:53:00 9 | 2F|CAPIN||BHN|31.70834|104.28955|883.0|0.0|0.0|0.0|GFZ:2F2012:Trillium-Compact/g=750|3072000000.0|1.0|M/S|50.0|2012-07-30T03:00:00|2013-10-08T01:53:00 10 | 2F|CAPIN||BHZ|31.70834|104.28955|883.0|0.0|0.0|-90.0|GFZ:2F2012:Trillium-Compact/g=750|3072000000.0|1.0|M/S|50.0|2012-07-30T03:00:00|2013-10-08T01:53:00 11 | DK|SFJD|00|BHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-1/VBB/g=2400|4026528000.0|0.02|M/S|20.0|2008-09-20T00:00:00| 12 | DK|SFJD|00|BHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-1/VBB/g=2400|4026528000.0|0.02|M/S|20.0|2008-09-20T00:00:00| 13 | DK|SFJD|00|BHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-1/VBB/g=2400|4026528000.0|0.02|M/S|20.0|2008-09-20T00:00:00| 14 | DK|SFJD|00|LHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-1/VBB/g=2400|4026528000.0|0.02|M/S|1.0|2008-09-20T00:00:00| 15 | DK|SFJD|00|LHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-1/VBB/g=2400|4026528000.0|0.02|M/S|1.0|2008-09-20T00:00:00| 16 | DK|SFJD|00|LHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-1/VBB/g=2400|4026528000.0|0.02|M/S|1.0|2008-09-20T00:00:00| 17 | DK|SFJD|00|VHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-1/VBB/g=2400|16106112000.0|0.02|M/S|0.1|2008-09-20T00:00:00| 18 | DK|SFJD|00|VHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-1/VBB/g=2400|16106112000.0|0.02|M/S|0.1|2008-09-20T00:00:00| 19 | DK|SFJD|00|VHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-1/VBB/g=2400|16106112000.0|0.02|M/S|0.1|2008-09-20T00:00:00| 20 | DK|SFJD|10|HHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|100.0|2008-09-20T00:00:00| 21 | DK|SFJD|10|HHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|100.0|2008-09-20T00:00:00| 22 | DK|SFJD|10|HHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|100.0|2008-09-20T00:00:00| 23 | DK|SFJD|10|LHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|1.0|2008-09-20T00:00:00| 24 | DK|SFJD|10|LHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|1.0|2008-09-20T00:00:00| 25 | DK|SFJD|10|LHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|1.0|2008-09-20T00:00:00| 26 | DK|SFJD|10|SHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|40.0|2008-09-20T00:00:00| 27 | DK|SFJD|10|SHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|40.0|2008-09-20T00:00:00| 28 | DK|SFJD|10|SHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-2/N/g=20000|33554400000.0|0.02|M/S|40.0|2008-09-20T00:00:00| 29 | DK|SFJD|10|VHE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:STS-2/N/g=20000|134217600000.0|0.02|M/S|0.1|2008-09-20T00:00:00| 30 | DK|SFJD|10|VHN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:STS-2/N/g=20000|134217600000.0|0.02|M/S|0.1|2008-09-20T00:00:00| 31 | DK|SFJD|10|VHZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:STS-2/N/g=20000|134217600000.0|0.02|M/S|0.1|2008-09-20T00:00:00| 32 | DK|SFJD|20|HNE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:FBA-EST/10.0/g=1|427566.942|1.0|M/S**2|100.0|2008-09-20T00:00:00| 33 | DK|SFJD|20|HNN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:FBA-EST/10.0/g=1|427566.942|1.0|M/S**2|100.0|2008-09-20T00:00:00| 34 | DK|SFJD|20|HNZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:FBA-EST/10.0/g=1|427566.942|1.0|M/S**2|100.0|2008-09-20T00:00:00| 35 | DK|SFJD|20|LNE|66.996|-50.6215|330.0|0.0|90.0|0.0|GFZ:DK1980:FBA-EST/10.0/g=1|427566.942|1.0|M/S**2|1.0|2008-09-20T00:00:00| 36 | DK|SFJD|20|LNN|66.996|-50.6215|330.0|0.0|0.0|0.0|GFZ:DK1980:FBA-EST/10.0/g=1|427566.942|1.0|M/S**2|1.0|2008-09-20T00:00:00| 37 | DK|SFJD|20|LNZ|66.996|-50.6215|330.0|0.0|0.0|-90.0|GFZ:DK1980:FBA-EST/10.0/g=1|427566.942|1.0|M/S**2|1.0|2008-09-20T00:00:00| -------------------------------------------------------------------------------- /tests/data/db.no_event_type_column.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/db.no_event_type_column.sqlite -------------------------------------------------------------------------------- /tests/data/db.no_event_type_column.yaml: -------------------------------------------------------------------------------- 1 | dburl: sqlite:///db.no_event_type_column.sqlite 2 | starttime: '2021-05-01' 3 | endtime: '2021-05-02' 4 | eventws: http://geofon.gfz-potsdam.de/fdsnws/event/1/query 5 | minlatitude: null 6 | maxlatitude: null 7 | minlongitude: null 8 | maxlongitude: null 9 | mindepth: null 10 | maxdepth: null 11 | minmagnitude: 5.7 12 | maxmagnitude: null # 5.81 13 | eventws_params: null 14 | channel: 15 | - BHZ 16 | network: '*' 17 | station: '*' 18 | location: '*' 19 | min_sample_rate: 60 20 | update_metadata: false 21 | inventory: true 22 | search_radius: 23 | min: 5 24 | max: 98 25 | dataws: 26 | - eida 27 | - iris 28 | traveltimes_model: ak135_ttp+ 29 | timespan: 30 | - 5.0 31 | - 10.0 32 | restricted_data: '' 33 | retry_seg_not_found: true 34 | retry_url_err: true 35 | retry_mseed_err: false 36 | retry_client_err: true 37 | retry_server_err: true 38 | retry_timespan_err: true 39 | advanced_settings: 40 | routing_service_url: http://www.orfeus-eu.org/eidaws/routing/1/query 41 | max_concurrent_downloads: null 42 | e_timeout: 5 43 | s_timeout: 120 44 | i_timeout: 60 45 | w_timeout: 30 46 | download_blocksize: 1048576 47 | db_buf_size: 100 48 | -------------------------------------------------------------------------------- /tests/data/eidatoken: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP SIGNED MESSAGE----- 2 | Hash: SHA1 3 | 4 | {"valid_until": "2019-02-16T13:52:59.585382Z", "cn": "Dino Bindi", "memberof": "/epos/alparray;/epos;/", "sn": "Bindi", "issued": "2019-01-17T13:52:59.585393Z", "mail": "bindi@gfz-potsdam.de", "givenName": "Dino", "expiration": "1m"} 5 | -----BEGIN PGP SIGNATURE----- 6 | Version: GnuPG v1 7 | 8 | iQEcBAEBAgAGBQJcQIi7AAoJEEFpzp0AlwdXCpAH/1QM6qrm5afs/AkvpK63oRqL 9 | y62mwwiD+wTn4NJonPpiy0AjxfiYOe10M3hrIo1cvI0CSrcLAvgmdZABDLoULvus 10 | bF6A5y3IaHLI2PLT8/BlN88YnLh6wretePK99xKqPCzeK05puwXcRqSuPz1hX5fH 11 | 30KunrvI5TeBEcNvDEM31VEN+Nr88FiiDNdFQTNRS3LT55lUzqevYV5/Sr+Q8+L5 12 | 6JymJGtAQCt8CyrK4yt8sCX26A6EkNaKeDR3JAXyGQalK7hL2q4eOsSko7hZ9WW3 13 | NW0AXR6jTzMnEz8MEFbSk/JG/MTFV7E05T/x6mg1Df+qfvtbmP8IezdcUNnGalw= 14 | =cvU6 15 | -----END PGP SIGNATURE----- 16 | -------------------------------------------------------------------------------- /tests/data/event_request_sample_iris.txt: -------------------------------------------------------------------------------- 1 | #EventID | Time | Latitude | Longitude | Depth/km | Author | Catalog | Contributor | ContributorID | MagType | Magnitude | MagAuthor | EventLocationName 2 | 4258220|2011-01-08T00:04:29|50.1178|19.1442|0.0|ISC|ISC|ISC|15916121|ML|2.0|IPEC|POLAND 3 | 4258219|2011-01-08T00:03:54|37.6272|23.568|12.4|THE|ISC|ISC|16868827|ML|2.0|ATH|SOUTHERN GREECE 4 | 4258218|2011-01-08T00:01:31|31.09|131.31|32.0|JMA|ISC|ISC|600516599||1.1|JMA|KYUSHU, JAPAN 5 | 4258217|2011-01-08T00:00:39|34.24|135.41|5.0|JMA|ISC|ISC|600516598||0.4|JMA|NEAR S. COAST OF WESTERN HONSHU 6 | -------------------------------------------------------------------------------- /tests/data/event_request_sample_isc.isf: -------------------------------------------------------------------------------- 1 | DATA_TYPE EVENT IMS1.0 2 | ISC Bulletin 3 | Event 600516598 Near south coast of western Honshu 4 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 5 | 2011/01/08 00:00:39.70 0.10 34.2400 135.4100 1.1 0.9 -1 5.0 4.0 JMA 00353623 6 | 7 | Magnitude Err Nsta Author OrigID 8 | 0.4 JMA 00353623 9 | 10 | Event 600516599 Kyushu 11 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 12 | 2011/01/08 00:01:31.90 0.20 31.0900 131.3100 3.3 1.9 -1 32.0 4.0 JMA 00353624 13 | 14 | Magnitude Err Nsta Author OrigID 15 | 1.1 JMA 00353624 16 | 17 | Event 16868827 Southern Greece 18 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 19 | 2011/01/08 00:03:54.40 0.500 37.6272 23.5680 0.9 0.5 120 12.4 0.6 55 30 139 0.29 ke THE 00984253 20 | 21 | Magnitude Err Nsta Author OrigID 22 | ML 2.1 0.2 12 THE 00984253 23 | 24 | Event 15916121 Poland 25 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 26 | 2011/01/08 00:04:29.93 0.86 1.169 50.1178 19.1442 5.747 3.054 12 0.0f 38 24 143 0.43 3.75 m i ki ISC 01765749 27 | 28 | 29 | STOP 30 | -------------------------------------------------------------------------------- /tests/data/iasp91_ttp+_10.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/iasp91_ttp+_10.npz -------------------------------------------------------------------------------- /tests/data/iasp91_ttp+_5.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/iasp91_ttp+_5.npz -------------------------------------------------------------------------------- /tests/data/iasp91_tts+_10.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/iasp91_tts+_10.npz -------------------------------------------------------------------------------- /tests/data/iasp91_tts+_5.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/iasp91_tts+_5.npz -------------------------------------------------------------------------------- /tests/data/isc_response.txt: -------------------------------------------------------------------------------- 1 | DATA_TYPE EVENT IMS1.0 2 | ISC Bulletin 3 | Event 15938379 Germany 4 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 5 | 2011/01/18 03:54:57.90 0.21 0.430 49.0041 8.2923 3.5 2.2 95 5.0f 268 0.32 2.30 ke LDG 15485443 6 | 7 | Magnitude Err Nsta Author OrigID 8 | Md 2.0 0.1 3 LDG 15485443 9 | Ml 1.9 0.4 7 LDG 15485443 10 | 11 | Event 15938049 Germany 12 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 13 | 2011/01/19 01:26:50.20 0.03 0.270 48.1615 9.0633 0.7 0.5 133 10.0 0.0 62 0.10 3.92 ke LDG 15420641 14 | 15 | Magnitude Err Nsta Author OrigID 16 | Md 2.0 0.2 3 LDG 15420641 17 | Ml 2.1 0.3 14 LDG 15420641 18 | 19 | Event 601241379 Czech and Slovak Republics 20 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 21 | 2011/01/19 15:05:06.42 0.23 0.050 49.0557 16.6001 2.0 1.5 127 0.0 0.0 5 12 1.23 1.23 sm VIE 01111198 22 | 23 | Magnitude Err Nsta Author OrigID 24 | mb 0.8 0.5 2 VIE 01111198 25 | ml 2.0 0.3 3 VIE 01111198 26 | 27 | Event 15960438 Poland 28 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 29 | 2011/01/20 05:00:54.85 0.41 1.307 51.5797 16.2084 3.426 2.472 23 0.0f 232 174 37 0.74 83.72 m i sr ISC 01767565 30 | 31 | Magnitude Err Nsta Author OrigID 32 | mb 3.8 0.2 11 ISC 01767565 33 | 34 | Event 15960448 Poland 35 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 36 | 2011/01/20 18:14:12.10 0.41 0.660 51.5229 16.1293 5.9 3.3 15 1.0f 20 12 259 0.69 2.90 ki CSEM 01644129 37 | 38 | Event 600516600 Hokkaido region 39 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 40 | 2011/01/08 00:08:41.60 0.10 41.6700 141.0900 1.1 0.8 -1 99.0 1.0 JMA 00353625 41 | 42 | Magnitude Err Nsta Author OrigID 43 | 2.0 JMA 00353625 44 | 45 | Event 16442119 Austria 46 | Date Time Err RMS Latitude Longitude Smaj Smin Az Depth Err Ndef Nsta Gap mdist Mdist Qual Author OrigID 47 | 2011/01/22 03:36:15.50 0.56 0.890 47.2365 11.5528 12.9 4.3 145 3.2 5.9 17 11 178 0.10 2.16 ke CSEM 01656162 48 | 49 | Magnitude Err Nsta Author OrigID 50 | ML 2.1 0.6 4 CSEM 01656162 51 | 52 | 53 | STOP 54 | -------------------------------------------------------------------------------- /tests/data/jupyter.example.process.yaml: -------------------------------------------------------------------------------- 1 | segments_selection: 2 | has_data: "true" -------------------------------------------------------------------------------- /tests/data/jupyter.example.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/jupyter.example.sqlite -------------------------------------------------------------------------------- /tests/data/processingmodule.noop.oldversion.py: -------------------------------------------------------------------------------- 1 | def main(segment, config): 2 | pass -------------------------------------------------------------------------------- /tests/data/processingmodule.noop.py: -------------------------------------------------------------------------------- 1 | from stream2segment.process import SkipSegment 2 | 3 | def main(segment, config): 4 | pass -------------------------------------------------------------------------------- /tests/data/trace_GE.APE.mseed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/data/trace_GE.APE.mseed -------------------------------------------------------------------------------- /tests/download/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/download/__init__.py -------------------------------------------------------------------------------- /tests/download/db/test_cli_classlabels.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Feb 14, 2017 3 | 4 | @author: riccardo 5 | """ 6 | import os 7 | from click.testing import CliRunner 8 | from unittest.mock import patch 9 | 10 | import pytest 11 | 12 | from stream2segment.cli import cli 13 | from stream2segment.resources import get_templates_fpath 14 | from stream2segment.download.db.models import Class, ClassLabelling 15 | 16 | 17 | class Test: 18 | 19 | pyfile = get_templates_fpath("paramtable.py") 20 | 21 | @property 22 | def logfilecontent(self): 23 | assert os.path.isfile(self._logfilename) 24 | with open(self._logfilename) as opn: 25 | return opn.read() 26 | 27 | # The class-level `init` fixture is marked with autouse=true which implies that all test 28 | # methods in the class will use this fixture without a need to state it in the test 29 | # function signature or with a class-level usefixtures decorator. For info see: 30 | # https://docs.pytest.org/en/latest/fixture.html#autouse-fixtures-xunit-setup-on-steroids 31 | @pytest.fixture(autouse=True) 32 | def init(self, request, pytestdir, db4process): 33 | db4process.create(to_file=True) 34 | session = db4process.session 35 | 36 | class patches: 37 | # paths container for class-level patchers used below. Hopefully 38 | # will mek easier debug when refactoring/move functions 39 | get_session = 'stream2segment.download.db.management.get_session' 40 | # close_session = 'stream2segment.process.main.close_session' 41 | # configlog4processing = 'stream2segment.process.main.configlog4processing' 42 | 43 | # sets up the mocked functions: db session handling (using the already created 44 | # session) and log file handling: 45 | with patch(patches.get_session, return_value=session): 46 | yield 47 | 48 | def inlogtext(self, string): 49 | '''Checks that `string` is in log text. 50 | The assertion `string in self.logfilecontent` fails in py3.5, although the differences 51 | between characters is the same position is zero. We did not find any better way than 52 | fixing it via this cumbersome function''' 53 | logtext = self.logfilecontent 54 | i = 0 55 | while len(logtext[i:i+len(string)]) == len(string): 56 | if (sum(ord(a)-ord(b) for a, b in zip(string, logtext[i:i+len(string)]))) == 0: 57 | return True 58 | i += 1 59 | return False 60 | 61 | # ## ======== ACTUAL TESTS: ================================ 62 | 63 | @patch('stream2segment.cli.input', side_effect=lambda *a, **kw: 'y') 64 | def test_classlabel_cmd(self, mock_input, 65 | # fixtures: 66 | db4process): 67 | 68 | def get_classes(session): 69 | return [{ 70 | 'id': c.id, 71 | 'label': c.label, 72 | 'description': c.description 73 | } for c in session.query(Class)] 74 | 75 | classes = get_classes(db4process.session) 76 | assert not classes 77 | runner = CliRunner() 78 | # test add a class from the command line argument 79 | result = runner.invoke(cli, ['db', 'classlabel', 80 | '-d', db4process.dburl, 81 | '--add', 'label', 'description']) 82 | assert not result.exception 83 | assert 'label (description)' in result.output 84 | classes = get_classes(db4process.session) 85 | assert classes[0]['label'] == 'label' 86 | assert classes[0]['description'] == 'description' 87 | # store id to be sure we will have from now on the same id: 88 | id_ = classes[0]['id'] 89 | 90 | # test rename a class from the command line argument 91 | # only label, no description 92 | result = runner.invoke(cli, ['db', 'classlabel', 93 | '-d', db4process.dburl, 94 | '--rename', 'label', 'label2', '']) 95 | assert not result.exception 96 | assert 'label2 (description)' in result.output 97 | classes = get_classes(db4process.session) 98 | assert classes[0]['label'] == 'label2' 99 | assert classes[0]['description'] == 'description' 100 | assert classes[0]['id'] == id_ 101 | 102 | # test rename a class and the description from the command line argument 103 | # only label, no description 104 | result = runner.invoke(cli, ['db', 'classlabel', 105 | '-d', db4process.dburl, 106 | '--rename', 'label2', 'label2', 107 | 'description2']) 108 | assert not result.exception 109 | assert 'label2 (description2)' in result.output 110 | classes = get_classes(db4process.session) 111 | assert classes[0]['label'] == 'label2' 112 | assert classes[0]['description'] == 'description2' 113 | assert classes[0]['id'] == id_ 114 | 115 | # add a class labelling 116 | assert len(db4process.session.query(ClassLabelling).all()) == 0 117 | segments = db4process.segments(False, False, False).all() 118 | cl = ClassLabelling(class_id=classes[0]['id'], segment_id=segments[0].id) 119 | db4process.session.add(cl) 120 | db4process.session.commit() 121 | assert len(db4process.session.query(ClassLabelling).all()) == 1 122 | 123 | # test delete a class from the command line argument 124 | # (non existing label) 125 | ccount = mock_input.call_count 126 | assert ccount > 0 127 | result = runner.invoke(cli, ['db', 'classlabel', 128 | '--no-prompt' 129 | '-d', db4process.dburl, 130 | '--delete', 'label']) 131 | assert mock_input.call_count == ccount 132 | # The method assert result.exception 133 | # still same class: 134 | classes = get_classes(db4process.session) 135 | assert classes[0]['label'] == 'label2' 136 | assert classes[0]['description'] == 'description2' 137 | assert classes[0]['id'] == id_ 138 | 139 | # test delete a class from the command line argument 140 | result = runner.invoke(cli, ['db', 'classlabel', 141 | '-d', db4process.dburl, 142 | '--delete', 'label2']) 143 | assert not result.exception 144 | assert 'None' in result.output 145 | assert mock_input.call_count == ccount + 1 146 | classes = get_classes(db4process.session) 147 | assert not classes 148 | assert len(db4process.session.query(ClassLabelling).all()) == 0 149 | -------------------------------------------------------------------------------- /tests/download/db/test_cli_ddrop.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: utf-8 -*- 2 | """ 3 | Created on Feb 14, 2017 4 | 5 | @author: riccardo 6 | """ 7 | from itertools import cycle, chain 8 | from collections import defaultdict 9 | from datetime import datetime, timedelta 10 | 11 | from unittest.mock import patch 12 | import pytest 13 | from click.testing import CliRunner 14 | 15 | from stream2segment.cli import cli 16 | from stream2segment.download.db.models import (Event, Station, WebService, Segment, 17 | Channel, Download, DataCenter) 18 | from stream2segment.download.modules.utils import s2scodes 19 | 20 | 21 | class patches: 22 | # paths container for patchers used below. Hopefully 23 | # will mek easier debug when refactoring/move functions 24 | input = 'stream2segment.download.db.management.input' 25 | get_session = 'stream2segment.download.db.management.get_session' 26 | 27 | 28 | class Test: 29 | 30 | # define ONCE HERE THE command name, so if we change it in the cli it will be easier to fix here 31 | CMD_PREFIX = ['db', 'drop'] 32 | 33 | # execute this fixture always even if not provided as argument: 34 | # https://docs.pytest.org/en/documentation-restructure/how-to/fixture.html#autouse-fixtures-xunit-setup-on-steroids 35 | @pytest.fixture(autouse=True) 36 | def init(self, request, db, data): 37 | # re-init a sqlite database (no-op if the db is not sqlite): 38 | db.create(to_file=True) 39 | 40 | # setup a run_id: 41 | self.downloads = [Download(id=1), Download(id=2), Download(id=3)] 42 | db.session.add_all(self.downloads) 43 | db.session.commit() 44 | 45 | wss = WebService(id=1, url='eventws') 46 | db.session.add(wss) 47 | db.session.commit() 48 | 49 | # setup an event: 50 | ev1 = Event(id=1, webservice_id=wss.id, event_id='ev1', latitude=8, longitude=9, 51 | magnitude=5, depth_km=4, time=datetime.utcnow()) 52 | db.session.add_all([ev1]) 53 | db.session.commit() 54 | 55 | dc1 = DataCenter(station_url='asd', dataselect_url='www.dc1/dataselect/query') 56 | db.session.add_all([dc1]) 57 | db.session.commit() 58 | 59 | # d1 has one station 60 | s_d1 = Station(datacenter_id=dc1.id, latitude=11, longitude=11, network='N1', station='S1', 61 | start_time=datetime.utcnow()) 62 | s_d2 = Station(datacenter_id=dc1.id, latitude=22.1, longitude=22.1, network='N1', 63 | station='S2a', start_time=datetime.utcnow()) 64 | s2_d2 = Station(datacenter_id=dc1.id, latitude=22.2, longitude=22.2, network='N2', 65 | station='S2b', start_time=datetime.utcnow()) 66 | db.session.add_all([s_d1, s_d2, s2_d2]) 67 | db.session.commit() 68 | 69 | # we are about to add 3 stations * 4 channels = 12 channels 70 | # we add also 1 segment pre channel 71 | # the segments data is as follows (data, download_code, maxgap) 72 | # the first 4 SEGMENTS are download id = self.downloads[0].id 73 | # the last 8 are download id = self.downloads[1].id 74 | seg_data = ([None, s2scodes.url_err, None, self.downloads[0].id], 75 | [None, s2scodes.mseed_err, None, self.downloads[0].id], 76 | [None, None, None, self.downloads[0].id], 77 | [None, s2scodes.timespan_err, None, self.downloads[0].id], 78 | # station s_d2: 79 | [b'x', 200, 0.2, self.downloads[1].id], 80 | [b'x', s2scodes.timespan_warn, 3.9, self.downloads[1].id], 81 | [b'x', 200, 0.6, self.downloads[1].id], 82 | [b'x', 200, 0.3, self.downloads[1].id], 83 | # station s_d3: 84 | [b'x', 200, 0.1, self.downloads[1].id], 85 | [b'x', s2scodes.timespan_warn, 3.9, self.downloads[1].id], 86 | [b'x', 400, None, self.downloads[1].id], 87 | [b'x', 500, None, self.downloads[1].id], 88 | ) 89 | 90 | i = 0 91 | for s in [s_d1, s_d2, s2_d2]: 92 | for cha in ['HHZ', 'HHE', 'HHN', 'ABC']: 93 | c = Channel(station_id=s.id, location='', channel=cha, sample_rate=56.7) 94 | db.session.add(c) 95 | db.session.commit() 96 | 97 | data, code, gap, did = seg_data[i] 98 | i += 1 99 | seg = Segment(channel_id=c.id, datacenter_id=s.datacenter_id, 100 | event_id=ev1.id, download_id=did, 101 | event_distance_deg=35, request_start=datetime.utcnow(), 102 | arrival_time=datetime.utcnow(), 103 | request_end=datetime.utcnow() + timedelta(seconds=5), data=data, 104 | download_code=code, maxgap_numsamples=gap) 105 | db.session.add(seg) 106 | db.session.commit() 107 | 108 | with patch(patches.get_session, return_value=db.session) as mock_session: 109 | yield 110 | 111 | def get_db_status(self, session): 112 | ddic = defaultdict(list) 113 | for did, segid in session.query(Segment.download_id, Segment.id).order_by(Segment.download_id): 114 | ddic[did].append(segid) 115 | for did in session.query(Download.id): 116 | if did[0] not in ddic: 117 | ddic[did[0]] 118 | return dict(ddic) 119 | 120 | # ## ======== ACTUAL TESTS: ================================ 121 | 122 | @patch(patches.input, return_value='y') 123 | def test_simple_ddrop_boundary_cases(self, mock_input, 124 | # oytest fixtures: 125 | db): 126 | '''test boundary cases for ddrop''' 127 | 128 | runner = CliRunner() 129 | # text no download id provided 130 | result = runner.invoke(cli, self.CMD_PREFIX + ['--dburl', db.dburl]) 131 | assert result.exception 132 | assert result.exit_code != 0 133 | assert not mock_input.called 134 | # click outputs slightly different messages depending on version: 135 | assert ('Missing option "-did" / "--download-id"' in result.output) \ 136 | or ("Missing option '-did' / '--download-id'" in result.output) 137 | 138 | # text output, to file 139 | result = runner.invoke(cli, self.CMD_PREFIX + ['--dburl', db.dburl, '-did', 4]) 140 | assert not result.exception 141 | assert "Nothing to delete" in result.output 142 | assert result.exit_code == 0 143 | assert not mock_input.called 144 | 145 | 146 | @pytest.mark.parametrize('ids_to_delete', [(1,), (2,), (3,), 147 | (1, 2), (1, 3), (2, 3), 148 | (1, 2, 3)]) 149 | @patch(patches.input, return_value='y') 150 | def test_simple_ddrop(self, mock_input, ids_to_delete, 151 | # oytest fixtures: 152 | db): 153 | '''test ddrop with different cases''' 154 | 155 | db_status = self.get_db_status(db.session) 156 | runner = CliRunner() 157 | expected_deleted_seg_ids = [segid for id2delete in ids_to_delete 158 | for segid in db_status[id2delete]] 159 | # add list of the form ['--did', 1, '--did', 2, ...]: 160 | dids_args = [item for pair in zip(cycle(['-did']), ids_to_delete) for item in pair] 161 | # db.session.query(Download.id, ).join(Download.segments) 162 | result = runner.invoke(cli, self.CMD_PREFIX + ['--dburl', db.dburl] + 163 | dids_args) 164 | assert not result.exception 165 | assert mock_input.called 166 | for ddd in ids_to_delete: 167 | expected_str = 'Download id=%d: DELETED (%d associated segments deleted)' % \ 168 | (ddd, len(db_status[ddd])) 169 | assert expected_str in result.output 170 | expected_dids_remained = sorted(set(db_status) - set(ids_to_delete)) 171 | assert sorted(set(_[0] for _ in db.session.query(Download.id))) == \ 172 | expected_dids_remained 173 | expected_segids_remained = sorted(chain(*[db_status[_] for _ in expected_dids_remained])) 174 | assert sorted(_[0] for _ in db.session.query(Segment.id)) == \ 175 | expected_segids_remained 176 | 177 | 178 | 179 | -------------------------------------------------------------------------------- /tests/download/test_download_real_with_network_filter.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Feb 4, 2016 3 | 4 | @author: riccardo 5 | """ 6 | import sys 7 | from http.client import HTTPException 8 | from urllib.error import URLError 9 | 10 | from stream2segment.download.exc import NothingToDownload 11 | from io import StringIO 12 | from unittest.mock import patch 13 | import socket 14 | 15 | import pandas as pd 16 | import pytest 17 | 18 | from stream2segment.cli import cli 19 | from stream2segment.download.log import configlog4download 20 | from stream2segment.download.db.models import Station, WebService 21 | from stream2segment.download.url import urlread 22 | from stream2segment.download.modules.channels import get_post_data as origi_get_post_data 23 | 24 | 25 | def no_connection(): 26 | from stream2segment.download.url import HTTPError 27 | try: 28 | data, err, code = urlread("https://geofon.gfz-potsdam.de/") 29 | return err is None or isinstance(err, (socket.error, URLError, HTTPError, HTTPException)) 30 | except Exception: # noqa 31 | return True 32 | 33 | 34 | class patches: 35 | # paths container for class-level patchers used below. Hopefully 36 | # will mek easier debug when refactoring/move functions 37 | # urlopen = 'stream2segment.download.url.urlopen' 38 | get_session = 'stream2segment.download.inputvalidation.get_session' 39 | close_session = 'stream2segment.download.main.close_session' 40 | # yaml_load = 'stream2segment.download.inputvalidation.yaml_load' 41 | # ThreadPool = 'stream2segment.download.url.ThreadPool' 42 | configlog4download = 'stream2segment.download.main.configlog4download' 43 | download_save_segments = 'stream2segment.download.main.download_save_segments' 44 | get_events_df = 'stream2segment.download.main.get_events_df' 45 | get_post_data = 'stream2segment.download.modules.channels.get_post_data' 46 | mock_merge_event_stations = 'stream2segment.download.main.merge_events_stations' 47 | 48 | 49 | @pytest.mark.skipif(no_connection(), 50 | reason="no internet connection") 51 | @pytest.mark.skipif(sys.version_info < (3,7), 52 | reason="requires python3.7+") 53 | @patch(patches.get_session) 54 | @patch(patches.close_session) 55 | @patch(patches.configlog4download) 56 | @patch(patches.download_save_segments) 57 | @patch(patches.get_events_df) 58 | @patch(patches.get_post_data) 59 | def test_real_run_old_buggy_network_filter(mock_get_post_data, 60 | mock_get_events_df, 61 | mock_download_save_segments, 62 | mock_config4download, 63 | mock_close_session, mock_get_session, 64 | # fixtures: 65 | db, clirunner, pytestdir, data): 66 | """This tess a REAL download run with an OLD bug when providing filtering on network 67 | and stations with negations only. We just test that the correct 'NothingToDownload' 68 | messages are issued. The download of segments and inventories (the time consuming 69 | part) is mocked and raises NothingToDownload (we just want to test stations and 70 | network) 71 | """ 72 | if db.is_postgres: 73 | # THIS TEST IS JUST ENOUGH WITH ONE DB (USE SQLITE BECAUSE POSTGRES MIGHT NOT BE 74 | # SETUP FOR TESTS) 75 | return 76 | 77 | db.create(to_file=False) 78 | 79 | ws = WebService(name='isc', type='event', url='http://www.isc.ac.uk/fdsnws/event/1/query') 80 | db.session.add(ws) 81 | db.session.commit() 82 | ws_id = ws.id 83 | 84 | # mock just one event downloaded. The event below is a RELa event (we took the 85 | # 1st one only): 86 | d = pd.read_csv(StringIO("""event_id,time,latitude,longitude,depth_km,author,catalog,contributor,contributor_id,mag_type,magnitude,mag_author,event_location_name,event_type,webservice_id,id 87 | 750359 P,2000-01-03T18:28:35,42.2585,2.5413,6.9,MDD,ISC,ISC,1750359 P,mb,4.3,MDD,yrenees,,1,1"""), sep=',') 88 | d['time'] = pd.to_datetime(d['time']) 89 | d['event_type'] = d['event_type'].astype(str) 90 | d['webservice_id'].at[0] = ws_id 91 | 92 | mock_get_events_df.return_value = d 93 | 94 | mock_get_session.return_value=db.session 95 | # (close_session is ignored, as we will close the session with the db ficture) 96 | # Now define the mock for the config4download option 97 | logfilepath = pytestdir.newfile('.log') 98 | def c4d(logger, logfilebasepath, verbose): 99 | # config logger as usual, but redirects to a temp file 100 | # that will be deleted by pytest, instead of polluting the program 101 | # package: 102 | ret = configlog4download(logger, logfilepath, verbose) 103 | return ret 104 | 105 | mock_config4download.side_effect = c4d 106 | 107 | def mock_get_post_data_side_effect(*a, **kw): 108 | ret = origi_get_post_data(*a, **kw) 109 | return ret.replace('*', '') 110 | mock_get_post_data.side_effect = mock_get_post_data_side_effect 111 | 112 | # mock download save segments: raise NothingToDownload to speed up things: 113 | def func_(*a, **kw): 114 | raise NothingToDownload() 115 | mock_download_save_segments.side_effect = func_ 116 | 117 | cfg_file = data.path("download-network-filter.yaml") 118 | 119 | result = clirunner.invoke(cli, ['download', 120 | '-c', cfg_file, 121 | '--dburl', db.dburl, 122 | ]) 123 | assert not clirunner.ok(result) 124 | assert 'No station found' in result.output 125 | 126 | 127 | @pytest.mark.skipif(no_connection(), 128 | reason="no internet connection") 129 | @pytest.mark.skipif(sys.version_info < (3,7), 130 | reason="requires python3.7+") 131 | @patch(patches.get_session) 132 | @patch(patches.close_session) 133 | @patch(patches.configlog4download) 134 | @patch(patches.mock_merge_event_stations) 135 | @patch(patches.get_events_df) 136 | def test_real_run(mock_get_events_df, mock_merge_event_stations, mock_config4download, 137 | mock_close_session, mock_get_session, 138 | # fixtures: 139 | db, clirunner, pytestdir, data): 140 | """This tess a REAL download run providing filtering on network and stations 141 | The download of segments and inventories (the time consuming part) is mocked 142 | and raises NothingToDownload (we just want to test stations and netowrk) 143 | """ 144 | if db.is_postgres: 145 | # THIS TEST IS JUST ENOUGH WITH ONE DB (USE SQLITE BECAUSE POSTGRES MIGHT NOT BE 146 | # SETUP FOR TESTS) 147 | return 148 | 149 | db.create(to_file=False) 150 | 151 | ws = WebService(name='isc', type='event', url='http://www.isc.ac.uk/fdsnws/event/1/query') 152 | db.session.add(ws) 153 | db.session.commit() 154 | ws_id = ws.id 155 | 156 | # mock just one event downloaded. The event below is a RELa event (we took the 157 | # 1st one only): 158 | d = pd.read_csv(StringIO("""event_id,time,latitude,longitude,depth_km,author,catalog,contributor,contributor_id,mag_type,magnitude,mag_author,event_location_name,event_type,webservice_id,id 159 | 750359 P,2000-01-03T18:28:35,42.2585,2.5413,6.9,MDD,ISC,ISC,1750359 P,mb,4.3,MDD,yrenees,,1,1"""), sep=',') 160 | d['time'] = pd.to_datetime(d['time']) 161 | d['event_type'] = d['event_type'].astype(str) 162 | d['webservice_id'].at[0] = ws_id 163 | 164 | mock_get_events_df.return_value = d 165 | 166 | mock_get_session.return_value=db.session 167 | # (close_session is ignored, as we will close the session with the db ficture) 168 | # Now define the mock for the config4download option 169 | logfilepath = pytestdir.newfile('.log') 170 | def c4d(logger, logfilebasepath, verbose): 171 | # config logger as usual, but redirects to a temp file 172 | # that will be deleted by pytest, instead of polluting the program 173 | # package: 174 | ret = configlog4download(logger, logfilepath, verbose) 175 | return ret 176 | 177 | mock_config4download.side_effect = c4d 178 | 179 | # mock the first function after channels are saved to skip useless stuff 180 | # raise NothingToDownload to speed up things: 181 | def func_(*a, **kw): 182 | raise NothingToDownload('YES') 183 | mock_merge_event_stations.side_effect = func_ 184 | 185 | cfg_file = data.path("download-network-filter.yaml") 186 | 187 | result = clirunner.invoke(cli, ['download', 188 | '-c', cfg_file, 189 | '--dburl', db.dburl, 190 | ]) 191 | assert clirunner.ok(result) 192 | # test we have downloaded some networks (not included in the negation filters): 193 | # WARNING: THIS TEST MIGHT RAISE A FALSE POSITIVE, I.E. WHEN TESTS FAIL 194 | # DUE TO CONNECTION ERRORS 195 | assert db.session.query(Station).filter((Station.network.in_(['CH', 'FR', 'IV']))).all() 196 | -------------------------------------------------------------------------------- /tests/download/test_url.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Nov 18, 2016 3 | 4 | @author: riccardo 5 | """ 6 | import threading 7 | from itertools import cycle 8 | from subprocess import call 9 | 10 | from unittest.mock import Mock, patch, MagicMock 11 | import pytest 12 | 13 | from stream2segment.download.url import _ismainthread, read_async 14 | from stream2segment.download.url import URLError 15 | 16 | 17 | class Test: 18 | 19 | # execute this fixture always even if not provided as argument: 20 | # https://docs.pytest.org/en/documentation-restructure/how-to/fixture.html#autouse-fixtures-xunit-setup-on-steroids 21 | @pytest.fixture(autouse=True) 22 | def init(self, request): 23 | 24 | self.urls = ["http://sdgfjvkherkdfvsffd", 25 | "http://www.google.com", 26 | # "http://www.apple.com", 27 | # "http://www.microsoft.com", 28 | # "http://www.amazon.com", 29 | # "http://www.facebook.com" 30 | ] 31 | self.thread = threading.current_thread() 32 | 33 | self.successes = [] 34 | self.errors = [] 35 | self.cancelled = [] 36 | self.progress = 0 37 | 38 | with patch('stream2segment.download.url.urlopen') as mock_urlopen: 39 | self.mock_urlopen = mock_urlopen 40 | yield 41 | 42 | def read_async(self, *a, **v): 43 | for obj, url, result, exc, code in read_async(*a, **v): 44 | assert _ismainthread() 45 | self.progress += 1 46 | if exc: 47 | self.errors.append(exc) 48 | else: 49 | self.successes.append(result) 50 | 51 | def read_async_raise_exc_in_called_func(self, *a, **v): 52 | """it is easy to check what happens if an unknown exception is raised from urllib: just mock it 53 | but what about an exception raised in the caller body, if urlread is ok? Check it here 54 | """ 55 | for obj, url, result, exc, code in read_async(*a, **v): 56 | assert _ismainthread() 57 | raise KeyboardInterrupt() 58 | # self.progress += 1 59 | # if exc: 60 | # self.errors.append(exc) 61 | # else: 62 | # self.successes.append(result) 63 | 64 | def config_urlopen(self, read_side_effect_as_list, sleep_time=None): 65 | a = Mock() 66 | read_side_effect_as_cycle = cycle(read_side_effect_as_list) 67 | def retfunc(*a, **v): 68 | if sleep_time: 69 | call(["sleep", "{:d}".format(sleep_time)]) 70 | # time.sleep(sleep_time) 71 | val = next(read_side_effect_as_cycle) 72 | if isinstance(val, Exception): 73 | raise val 74 | else: 75 | return val 76 | a.read.side_effect = retfunc # returns each item in list 77 | ret = MagicMock() 78 | ret.__enter__.return_value = a 79 | self.mock_urlopen.return_value = ret 80 | 81 | @property 82 | def mock_urlread(self): 83 | return self.mock_urlopen.return_value.__enter__.return_value.read 84 | 85 | def test_mocking_urlread(self): 86 | """Tests onsuccess. WE mock urllib2urlopen.read to return user defined strings""" 87 | 88 | data = [b'none', b'', b'google', b''] # supply an empty string otherwise urllib.read does not stop 89 | self.config_urlopen(data) 90 | 91 | # self.urls has a valid url (which should execute onsuccess) and an invalid one 92 | # which should execute onerror) 93 | successes = [] 94 | self.read_async(self.urls) 95 | 96 | assert len(self.successes) == 2 97 | 98 | data_ = list(self.successes) 99 | for res in data: 100 | if not res: # the empty byte is not returned, it serves only to stop urlread 101 | continue 102 | assert res in data_ 103 | 104 | assert self.mock_urlread.call_count == len(data) 105 | 106 | assert self.progress == 2 107 | 108 | def test_urlerrors(self): 109 | """Tests onerror. WE mock urllib2urlopen.read to raise an excpected Exception""" 110 | 111 | self.config_urlopen([URLError("")]) 112 | 113 | # self.urls has a valid url (which should execute onsuccess) and an invalid one 114 | # which should execute onerror) 115 | self.read_async(self.urls) 116 | 117 | assert len(self.errors) == 2 118 | assert self.mock_urlread.call_count == len(self.urls) 119 | 120 | assert self.progress == 2 121 | 122 | def test_general_exception_from_urlopen(self): 123 | self.config_urlopen([ValueError("")], sleep_time=None) 124 | 125 | # self.urls has a valid url (which should execute onsuccess) and an invalid one 126 | # which should execute onerror) 127 | with pytest.raises(ValueError): 128 | self.read_async(self.urls) 129 | assert self.progress == 0 130 | 131 | def test_general_exception_inside_yield(self): 132 | data = [b'none', b''] * 10000 # supply an empty string otherwise urllib.read does not stop 133 | self.config_urlopen(data) # , sleep_time=1) 134 | 135 | # self.urls has a valid url (which should execute onsuccess) and an invalid one 136 | # which should execute onerror) 137 | with pytest.raises(KeyboardInterrupt): 138 | self.read_async_raise_exc_in_called_func(self.urls) 139 | assert self.progress == 0 140 | # set the totalcounts of mock_urlread: 2 * len(url): 141 | totalcounts = 2 * len(self.urls) 142 | # assert we stopped before reading all url(s). Relax the condition by putting <=, as 143 | # if self.mock_urlread.call_count == totalcounts does not mean the test failed, it 144 | # can be due to the fact that we mock io-bound operations in urlread with non-io bound operations 145 | assert self.mock_urlread.call_count <= totalcounts 146 | 147 | # same regardless of urllib2 returned value: 148 | self.config_urlopen([URLError("")], sleep_time=None) 149 | # self.urls has a valid url (which should execute onsuccess) and an invalid one 150 | # which should execute onerror) 151 | with pytest.raises(KeyboardInterrupt): 152 | self.read_async_raise_exc_in_called_func(self.urls) 153 | assert self.progress == 0 154 | -------------------------------------------------------------------------------- /tests/io/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/io/__init__.py -------------------------------------------------------------------------------- /tests/io/test_fdsn_url.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mar 27, 2021 4 | 5 | @author: riccardo 6 | """ 7 | from itertools import product 8 | 9 | import pytest 10 | 11 | from stream2segment.io import Fdsnws 12 | 13 | 14 | def test_models_fdsn_url_1(): 15 | for url in ["mock/fdsnws/station/1/query", 16 | "mock/fdsnws/station/1/query?", 17 | "https://mock/fdsnws/station/1/query", 18 | "http://mock/fdsnws/station/1/query?", 19 | "https://mock/fdsnws/station/1/", 20 | "https://mock/fdsnws/station/1", 21 | "http://mock/fdsnws/station/1/query?h=8&b=76", 22 | "https://mock/fdsnws/station/1/auth?h=8&b=76", 23 | # "mock/station/fdsnws/station/1/" # invalid (see test_resif below) 24 | ]: 25 | fdsn = Fdsnws(url) 26 | expected_scheme = 'https' if url.startswith('https://') else 'http' 27 | assert fdsn.site == '%s://mock' % expected_scheme 28 | assert fdsn.service == Fdsnws.STATION 29 | assert str(fdsn.majorversion) == str(1) 30 | normalizedurl = fdsn.url() 31 | assert normalizedurl == '%s://mock/fdsnws/station/1/query' % expected_scheme 32 | for service in list(Fdsnws.SERVICES) + ['abc']: 33 | assert fdsn.url(service) == normalizedurl.replace('station', service) 34 | 35 | assert fdsn.url(majorversion=55) == normalizedurl.replace('1', '55') 36 | assert fdsn.url(majorversion='1.1') == normalizedurl.replace('1', '1.1') 37 | 38 | for method in list(Fdsnws.METHODS) + ['abcdefg']: 39 | assert fdsn.url(method=method) == normalizedurl.replace('query', method) 40 | 41 | for url in ["fdsnws/station/1/query", 42 | "/fdsnws/station/1/query", 43 | "http:mysite.org/fdsnws/dataselect/1", # Note: this has invalid scheme 44 | "http:mysite.org/and/another/path/fdsnws/dataselect/1", 45 | "http://mysite.org/and/another/path/fdsnws/dataselect/1", 46 | "http://www.google.com", 47 | "https://mock/fdsnws/station/abc/1/whatever/abcde?h=8&b=76", 48 | "https://mock/fdsnws/station/", "https://mock/fdsnws/station", 49 | "https://mock/fdsnws/station/1/abcde?h=8&b=76", 50 | "https://mock/fdsnws/station/1/whatever/abcde?h=8&b=76", 51 | "mock/station/fdsnws/station/1/", 52 | "http://ws.resif.fr/ph5/fdsnws/dataselect/1/query"]: 53 | with pytest.raises(ValueError): 54 | Fdsnws(url) 55 | 56 | 57 | def test_resif_url(): 58 | with pytest.raises(ValueError): 59 | url1 = Fdsnws("http://ws.resif.fr/ph5/fdsnws/dataselect/1/query").url() 60 | 61 | url1 = Fdsnws("http://ws.resif.fr/ph5/fdsnws/dataselect/1/query", 62 | strict_path=False).url() 63 | url2 = Fdsnws("http://ws.resif.fr/fdsnws/dataselect/1/query").url() 64 | assert url1 != url2 65 | assert url1.replace("/ph5", "") == url2 66 | 67 | 68 | def test_models_fdsn_url(): 69 | url_ = 'abc.org/fdsnws/station/1' 70 | for (pre, post, slash) in product(['', 'http://', 'https://'], 71 | ['' ] + list(Fdsnws.METHODS), 72 | ['', '/', '?'] 73 | ): 74 | if not post and slash == '?': 75 | continue # do not test "abc.org/fdsnws/station/1?" it's invalid 76 | elif slash == '?': 77 | asd = 6 78 | url = pre + url_ + ('/' if post else '') + post + slash 79 | fdsn = Fdsnws(url) 80 | if url.startswith('https'): 81 | assert fdsn.site == 'https://abc.org' 82 | else: 83 | assert fdsn.site == 'http://abc.org' 84 | assert fdsn.service == Fdsnws.STATION 85 | assert fdsn.majorversion == '1' 86 | 87 | normalizedurl = fdsn.url() 88 | for service in list(Fdsnws.SERVICES) + ['abc']: 89 | assert fdsn.url(service) == normalizedurl.replace('station', service) 90 | 91 | assert fdsn.url(majorversion=55) == normalizedurl.replace('1', '55') 92 | 93 | for method in list(Fdsnws.METHODS) + ['abcdefg']: 94 | assert fdsn.url(method=method) == normalizedurl.replace('query', method) 95 | 96 | 97 | @pytest.mark.parametrize(['url_'], 98 | [ 99 | ('',), 100 | ('/fdsnws/station/1',), 101 | ('fdsnws/station/1/',), 102 | ('fdsnws/station/1/query',), 103 | ('fdsnws/station/1/query/',), 104 | ('abc.org',), 105 | ('abc.org/',), 106 | ('abc.org/fdsnws',), 107 | ('abc.org/fdsnws/',), 108 | ('abc.org/fdsnws/bla',), 109 | ('abc.org/fdsnws/bla/',), 110 | ('abc.org/fdsnws/bla/1',), 111 | ('abc.org/fdsnws/bla/1r',), 112 | ('abc.org/fdsnws/station/a',), 113 | ('abc.org/fdsnws/station/b/',), 114 | ('abc.org//fdsnws/station/1.1/',), 115 | # ('abc.org/fdsnws/station/1?',), 116 | ('abc.org/fdsnws/station/1.1//',), 117 | ('abc.org/fdsnws/station/1.1/bla',), 118 | ('abc.org/fdsnws/station/1.1/bla/',),]) 119 | def test_models_bad_fdsn_url(url_): 120 | for url in [url_, 'http://' + url_, 'https://'+url_]: 121 | with pytest.raises(ValueError): 122 | Fdsnws(url) 123 | -------------------------------------------------------------------------------- /tests/misc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/misc/__init__.py -------------------------------------------------------------------------------- /tests/misc/test_compress_decompress.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Sep 14, 2017 3 | 4 | @author: riccardo 5 | """ 6 | import os 7 | from itertools import product 8 | 9 | from stream2segment.download.modules.stations import compress 10 | from stream2segment.process.db.models import decompress 11 | 12 | 13 | def test_compress_decompress(): 14 | """tests compression and decompression functions""" 15 | bytesdata = b"\x00"+os.urandom(1024*1024)+b"\x00" 16 | for comp, compresslevel in product(['bz2', 'zlib', 'gzip', 'zip'], list(range(1, 10))): 17 | compr_ = compress(bytesdata, comp, compresslevel) 18 | # assert len(compr_) <= len(self.data) 19 | dec = decompress(compr_) 20 | assert dec != compr_ 21 | # now test that a non compressed file is returned as-it-is: 22 | assert decompress(bytesdata) == bytesdata 23 | -------------------------------------------------------------------------------- /tests/misc/test_notebook.py: -------------------------------------------------------------------------------- 1 | try: 2 | import nbformat 3 | from nbconvert.preprocessors import ExecutePreprocessor 4 | except ImportError: 5 | nbformat = None 6 | # import pytest 7 | # pytest.mark.skip("Jupyter not installed, not testing notebooks correctness") 8 | 9 | import pytest 10 | import os 11 | from stream2segment.process import get_segment_help 12 | from stream2segment.resources import get_resource_abspath 13 | 14 | 15 | # pytest.skip(allow_module_level=True) 16 | 17 | def test_segment_help(): 18 | """This test check that we did not add any new method or attribute to the Segment 19 | object without considering it in the doc (either make it hidden or visible) 20 | """ 21 | get_segment_help() 22 | 23 | 24 | @pytest.mark.skipif(nbformat is None, 25 | reason="Jupyter not installed, not testing notebooks correctness") 26 | def test_notebook(data): 27 | 28 | # cwd = os.getcwd() 29 | for fle_ in ['Using-Stream2segment-in-your-Python-code.ipynb', 30 | 'The-Segment-object.ipynb']: 31 | fle = get_resource_abspath('templates', fle_) 32 | with open(fle) as f: 33 | nb = nbformat.read(f, as_version=4) 34 | ep = ExecutePreprocessor(timeout=600) # , kernel_name='python3') 35 | cwd = os.path.dirname(fle) 36 | ep.preprocess(nb, {'metadata': {'path': cwd}}) 37 | 38 | def test_imap(capsys): 39 | def my_processing_function(segment, config): 40 | """simple processing function. Take the segment stream and remove its instrumental response""" 41 | # Get ObsPy Trace object. If the waveform has no gapos/overlaps, the trace is the only element 42 | # of the segment stream object (otherwise the stream will have several traces): 43 | trace = segment.stream()[0] 44 | # remove the instrumental response of the Trace: 45 | # get ObsPy Inventory object: 46 | inventory = segment.inventory() 47 | # remove the response: 48 | trace_remresp = trace.remove_response(inventory) # see caveat below 49 | # return the segment.id, the event magnitude, the original trace and the trace with response removed 50 | return segment.id, segment.event.magnitude, segment.stream()[0], trace_remresp 51 | 52 | # create the selection dict. This dict select a single segment (id=2) for illustrative purposes: 53 | segments_selection = { 54 | 'has_data': 'true', 55 | 'maxgap_numsamples': '[-0.5, 0.5]', 56 | 'event_distance_deg': '[70, 80]' 57 | # other optional attributes (see cheatsheet below for details): 58 | # missing_data_sec: '<120' 59 | # missing_data_ratio: '<0.5' 60 | # id: '<300' 61 | # event.time: "(2014-01-01T00:00:00, 2014-12-31T23:59:59)" 62 | # event.latitude: "[24, 70]" 63 | # event.longitude: "[-11, 24]" 64 | } 65 | 66 | from stream2segment.process import SkipSegment 67 | def my_processing_function_raising(segment, config): 68 | if segment.sample_rate < 30: 69 | raise SkipSegment("segment sample rate too low") 70 | # ... implement your code here 71 | 72 | import os 73 | dbpath = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))), 74 | 'stream2segment', 'resources', 'templates', 'example.db.sqlite') 75 | dburl = 'sqlite:///' + dbpath 76 | 77 | from stream2segment.process import imap 78 | 79 | for (segment_id, mag, trace, trace_remresp) in imap(my_processing_function, dburl, 80 | segments_selection): 81 | print() 82 | print('Segment Id: %d (event magnitude: %.1f)' % (segment_id, mag)) 83 | print('Segment trace (first three points):') 84 | print(' - Counts units (no response removed): %s' % trace.data[:3]) 85 | print(' - Physical units (response removed): %s' % trace_remresp.data[:3]) -------------------------------------------------------------------------------- /tests/misc/test_request.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Sep 18, 2018 3 | 4 | @author: rizac 5 | """ 6 | 7 | import pytest 8 | from urllib.request import Request 9 | 10 | from stream2segment.download.url import read_async, urlread 11 | 12 | 13 | def no_connection(): 14 | from stream2segment.download.url import HTTPError 15 | try: 16 | data, err, code = urlread("https://geofon.gfz-potsdam.de/") 17 | return err is None or isinstance(err, HTTPError) 18 | except Exception: # noqa 19 | return True 20 | 21 | 22 | @pytest.mark.skipif(no_connection(), 23 | reason="no internet connection") 24 | def test_request(): 25 | '''This tests `read_async` in case of a REAL connection. Ignored if the computer 26 | is not online''' 27 | 28 | post_data_str = """* * * HH?,HL?,HN? 2017-01-01T00:00:00 2017-06-01T00:00:00 29 | format=text 30 | level=channel""" 31 | urls = ["http://geofon.gfz-potsdam.de/fdsnws/station/1/query", 32 | "http://geofon.gfz-potsdam.de/fdsnws/station/1/query2"] 33 | ids = [1] 34 | iterable = ((id_, Request(url, 35 | data=('format=text\nlevel=channel\n'+post_data_str).encode('utf8'))) 36 | for url, id_ in zip(urls, ids)) 37 | 38 | for obj, url, result, exc, code in read_async(iterable, urlkey=lambda obj: obj[-1], 39 | blocksize=1048576, 40 | max_workers=None, 41 | decode='utf8', timeout=120): 42 | 43 | pass 44 | # r = Request("http://geofon.gfz-potsdam.de/fdsnws/station/1/query", 45 | # data="""* * * HH?,HL?,HN? 2017-01-01T00:00:00 2017-06-01T00:00:00 46 | # format=text 47 | # level=channel""".encode('utf8')) 48 | # 49 | # urlread(r) 50 | # h = 9 51 | 52 | 53 | from stream2segment.cli import cli 54 | 55 | @pytest.mark.skipif(no_connection(), 56 | reason="no internet connection") 57 | def test_no_eventtype_column_db(clirunner, pytestdir, data): 58 | result = clirunner.invoke(cli, ['download', 59 | '-c', data.path('db.no_event_type_column.yaml'), 60 | ]) 61 | assert ("No row saved to table 'events' (error: table events " 62 | "has no column named event_type)") in result.output 63 | assert result.exit_code != 0 64 | 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /tests/misc/test_resources.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on 14 Mar 2018 3 | 4 | @author: riccardo 5 | """ 6 | import os 7 | 8 | from stream2segment.resources import (get_resource_abspath, get_templates_fpaths, 9 | get_templates_fpath) 10 | from stream2segment.io import yaml_load 11 | 12 | 13 | def test_yaml_load(): 14 | # NB: all dic keys must be strings 15 | dic1 = {'a': 7, '5': 'h'} 16 | dic2 = {'a': 7, '7': 'h'} 17 | d = yaml_load(dic1, **dic2) 18 | assert d['a'] == 7 19 | assert d['5'] == 'h' 20 | assert d['7'] == 'h' 21 | assert sorted(d.keys()) == sorted(['a', '5', '7']) 22 | 23 | dic1 = {'a': 7, '5': 'h', 'v': {1: 2, 3: 4}} 24 | dic2 = {'a': 7, '7': 'h', 'v': {1: 2, 3: 5}} 25 | d = yaml_load(dic1, **dic2) 26 | assert d['a'] == 7 27 | assert d['5'] == 'h' 28 | assert d['7'] == 'h' 29 | assert d['v'][1] == 2 30 | assert d['v'][3] == 5 31 | assert sorted(d.keys()) == sorted(['a', '5', '7', 'v']) 32 | 33 | # the test below was testing merging eventws params, which is now housing 34 | # non-required fdsn event-params only. It is also quite criptic and undocumented, skip: 35 | 36 | # dic1 = yaml_load(get_templates_fpath('download.yaml')) 37 | # key2test = 'minlat' 38 | # # This will also asserts minlat is a valid key. Otherwise, change to a valid key: 39 | # val2test = dic1['eventws_query_args'][key2test] 40 | # dic2 = yaml_load(get_templates_fpath('download.yaml'), 41 | # eventws_query_args={key2test: val2test - 1.1, 'wawa': 45.5}) 42 | # assert dic2['eventws_query_args'][key2test] == val2test - 1.1 43 | # assert dic2['eventws_query_args']['wawa'] == 45.5 44 | # 45 | # keys1 = set(dic1['eventws_query_args']) 46 | # keys2 = set(dic2['eventws_query_args']) 47 | # 48 | # assert keys1 - keys2 == set() 49 | # assert keys2 - keys1 == set(['wawa']) 50 | 51 | from os.path import abspath 52 | 53 | def test_templates_fpath(): 54 | basedir = get_resource_abspath("templates") 55 | 56 | assert abspath(basedir) == abspath(get_templates_fpaths('')[0]) == abspath(get_templates_fpath('')) 57 | 58 | res = get_templates_fpaths() 59 | assert sorted(res) == sorted(os.path.join(basedir, n) for n in os.listdir(basedir)) 60 | 61 | filenames = ['a', 'b'] 62 | res = get_templates_fpaths(*filenames) 63 | assert sorted(res) == sorted(os.path.join(basedir, n) for n in filenames) 64 | 65 | -------------------------------------------------------------------------------- /tests/misc/test_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Dec 12, 2016 3 | 4 | @author: riccardo 5 | """ 6 | from io import StringIO, BytesIO 7 | from urllib.request import Request 8 | from unittest.mock import Mock, patch 9 | import pytest 10 | from click.termui import progressbar 11 | 12 | from stream2segment.download.url import (urlread, URLError, socket, HTTPError) 13 | from stream2segment.io.cli import Nop, get_progressbar 14 | from stream2segment.io.db import secure_dburl 15 | from stream2segment.download.modules.utils import formatmsg 16 | 17 | 18 | DEFAULT_TIMEOUT = socket._GLOBAL_DEFAULT_TIMEOUT # noqa 19 | 20 | 21 | @patch('stream2segment.download.url.urlopen') 22 | def test_utils_url_read(mock_urlopen): 23 | 24 | def side_effect(argss): 25 | return StringIO(argss) 26 | 27 | mockread = Mock() 28 | class mybytesio: 29 | 30 | def __init__(self, url, **kwargs): 31 | mockread.reset_mock() 32 | if isinstance(url, Exception): 33 | self.a = url 34 | else: 35 | self.code = 200 36 | self.msg = 'Ok' 37 | self.a = BytesIO(url) 38 | 39 | def read(self, *a, **kw): 40 | if isinstance(self.a, Exception): 41 | raise self.a 42 | mockread(*a, **kw) 43 | return self.a.read(*a, **kw) 44 | 45 | def close(self, *a, **kw): 46 | if not isinstance(self.a, Exception): 47 | self.a.close() 48 | 49 | def __enter__(self,*a,**v): 50 | return self 51 | 52 | def __exit__(self, *a, **kw): 53 | return self.close(*a, **kw) 54 | 55 | 56 | mock_urlopen.side_effect = lambda url, **kw: mybytesio(url, **kw) 57 | with pytest.raises(TypeError): 58 | urlread('', "name") 59 | 60 | val = b'url' 61 | blockSize = 1024 * 1024 62 | assert urlread(val, blockSize)[0] == val 63 | mock_urlopen.assert_called_with(val) # , timeout=DEFAULT_TIMEOUT) 64 | assert mockread.call_count == 2 65 | mockread.assert_called_with(blockSize) 66 | 67 | mock_urlopen.side_effect = lambda url, **kw: mybytesio(url, **kw) 68 | 69 | assert urlread(val, arg_to_read=56)[0] == val 70 | mock_urlopen.assert_called_with(val, arg_to_read=56) 71 | assert mockread.call_count == 1 # because blocksize is -1 72 | 73 | mock_urlopen.side_effect = lambda url, **kw: mybytesio(URLError('wat?')) 74 | d, e, c = urlread(val) 75 | assert isinstance(e, URLError) 76 | 77 | mock_urlopen.side_effect = lambda url, **kw: mybytesio(socket.timeout()) 78 | d, e, c = urlread(val) 79 | assert isinstance(e, socket.error) 80 | 81 | mock_urlopen.side_effect = lambda url, **kw: mybytesio(HTTPError('url', 500, '?', None, None)) 82 | d, e, c = urlread(val) 83 | assert isinstance(e, HTTPError) 84 | 85 | err = HTTPError('url', 500, '?', None, None) 86 | mock_urlopen.side_effect = lambda url, **kw: mybytesio(err) 87 | assert urlread(val) == (None, err, 500) 88 | 89 | 90 | @pytest.mark.parametrize('input, expected_result, ', 91 | [ 92 | ("postgresql://scott:@localhost/mydatabase", 93 | "postgresql://scott:***@localhost/mydatabase"), 94 | ("postgresql://scott:tiger@localhost/mydatabase", 95 | "postgresql://scott:***@localhost/mydatabase"), 96 | ('postgresql+psycopg2://scott:tiger@localhost/mydatabase', 97 | 'postgresql+psycopg2://scott:***@localhost/mydatabase'), 98 | ('postgresql+pg8000://scott:tiger@localhost/mydatabase', 99 | 'postgresql+pg8000://scott:***@localhost/mydatabase'), 100 | ('mysql://scott:tiger@localhost/foo', 101 | 'mysql://scott:***@localhost/foo'), 102 | ('mysql+mysqldb://scott:tiger@localhost/foo', 103 | 'mysql+mysqldb://scott:***@localhost/foo'), 104 | ('sqlite:////absolute/path/to/foo.db', 105 | 'sqlite:////absolute/path/to/foo.db') 106 | ], 107 | ) 108 | def test_secure_dburl(input, expected_result): 109 | assert secure_dburl(input) == expected_result 110 | 111 | # IF RUNNING WITH ECLIPSE, UNCOMMENT THE LINE BELOW: 112 | # @pytest.mark.skip(reason="fails if run from within n eclipse because of cryptic bytes vs string propblem") 113 | @patch("stream2segment.io.cli.Nop", side_effect=lambda *a, **v: Nop(*a, **v)) 114 | @patch("stream2segment.io.cli.click_progressbar", side_effect=lambda *a, **v: progressbar(*a, **v)) 115 | def test_progressbar(mock_pbar, mock_nop): 116 | '''this test has problems with eclipse''' 117 | N = 5 118 | with get_progressbar(False) as bar: # no-op 119 | for i in range(N): 120 | bar.update(i) 121 | assert mock_nop.call_count == 1 122 | assert mock_pbar.call_count == 0 123 | 124 | with get_progressbar(False, length=0) as bar: # no-op 125 | for i in range(N): 126 | bar.update(i) 127 | assert mock_nop.call_count == 2 128 | assert mock_pbar.call_count == 0 129 | 130 | with get_progressbar(False, length=10) as bar: # normal progressbar 131 | for i in range(N): 132 | bar.update(i) 133 | assert mock_nop.call_count == 3 134 | assert mock_pbar.call_count == 0 135 | 136 | with get_progressbar(True, length=0) as bar: # normal progressbar 137 | for i in range(N): 138 | bar.update(i) 139 | assert mock_nop.call_count == 4 140 | assert mock_pbar.call_count == 0 141 | 142 | with get_progressbar(True, length=10) as bar: # normal progressbar 143 | for i in range(N): 144 | bar.update(i) 145 | assert mock_nop.call_count == 4 146 | assert mock_pbar.call_count == 1 147 | 148 | 149 | # IF RUNNING WITH ECLIPSE, UNCOMMENT THE LINE BELOW: 150 | # @pytest.mark.skip(reason="fails if run from within n eclipse because of cryptic bytes vs string propblem") 151 | def test_progressbar_functional(): 152 | """this test has problems with eclipse""" 153 | N = 5 154 | with get_progressbar(False) as bar: # no-op 155 | for i in range(N): 156 | bar.update(i) 157 | 158 | with get_progressbar(False, length=0) as bar: # no-op 159 | for i in range(N): 160 | bar.update(i) 161 | 162 | with get_progressbar(False, length=10) as bar: # normal progressbar 163 | for i in range(N): 164 | bar.update(i) 165 | 166 | with get_progressbar(True, length=0) as bar: # normal progressbar 167 | for i in range(N): 168 | bar.update(i) 169 | 170 | with get_progressbar(True, length=10) as bar: # normal progressbar 171 | for i in range(N): 172 | bar.update(i) 173 | 174 | 175 | def test_formatmsg(): 176 | req = Request('http://mysite/query', data='a'*1000) 177 | msg = formatmsg("action", "errmsg", req) 178 | expected = ("action (errmsg). url: http://mysite/query, POST data:\n%s\n" 179 | "...(showing first 200 characters only)") % ('a' * 200) 180 | assert msg == expected 181 | 182 | req = Request('http://mysite/query', data='a\n'*5) 183 | msg = formatmsg("action", "errmsg", req) 184 | expected = ("action (errmsg). url: http://mysite/query, POST data:\n%s") % ('a\n' * 5) 185 | assert msg == expected.strip() 186 | 187 | req = Request('http://mysite/query', data=b'a\n'*5) 188 | msg = formatmsg("action", "errmsg", req) 189 | expected = ("action (errmsg). url: http://mysite/query, POST data:\n" 190 | "b'a\\na\\na\\na\\na\\n'") 191 | assert msg == expected.strip() 192 | -------------------------------------------------------------------------------- /tests/process/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/process/__init__.py -------------------------------------------------------------------------------- /tests/process/db/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/process/db/__init__.py -------------------------------------------------------------------------------- /tests/process/db/test_dbqueries.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Jul 15, 2016 3 | 4 | @author: riccardo 5 | """ 6 | from datetime import datetime 7 | 8 | import pytest 9 | 10 | from stream2segment.process.db.models import (Event, WebService, Channel, Station, \ 11 | DataCenter, Segment, Download) 12 | 13 | 14 | class Test: 15 | 16 | # execute this fixture always even if not provided as argument: 17 | # https://docs.pytest.org/en/documentation-restructure/how-to/fixture.html#autouse-fixtures-xunit-setup-on-steroids 18 | @pytest.fixture(autouse=True) 19 | def init(self, request, db, data): 20 | # re-init: 21 | db.create(to_file=False, process=True) 22 | 23 | dc= DataCenter(station_url="345fbgfnyhtgrefs", dataselect_url='edfawrefdc') 24 | db.session.add(dc) 25 | 26 | utcnow = datetime.utcnow() 27 | 28 | run = Download(run_time=utcnow) 29 | db.session.add(run) 30 | 31 | ws = WebService(url='webserviceurl') 32 | db.session.add(ws) 33 | db.session.commit() 34 | 35 | id = '__abcdefghilmnopq' 36 | e = Event(event_id=id, webservice_id=ws.id, time=utcnow, latitude=89.5, longitude=6, 37 | depth_km=7.1, magnitude=56) 38 | db.session.add(e) 39 | 40 | db.session.commit() # refresh datacenter id (alo flush works) 41 | 42 | d = datetime.utcnow() 43 | 44 | s = Station(network='network', station='station', datacenter_id=dc.id, latitude=90, 45 | longitude=-45, start_time=d) 46 | db.session.add(s) 47 | 48 | def test_query4gui(self, db): 49 | s = db.session.query(Station).first() 50 | e = db.session.query(Event).first() 51 | dc = db.session.query(DataCenter).first() 52 | run = db.session.query(Download).first() 53 | 54 | channels = [ 55 | Channel(location='00', channel='HHE', sample_rate=6), 56 | Channel(location='00', channel='HHN', sample_rate=6), 57 | Channel(location='00', channel='HHZ', sample_rate=6), 58 | Channel(location='00', channel='HHW', sample_rate=6), 59 | 60 | Channel(location='10', channel='HHE', sample_rate=6), 61 | Channel(location='10', channel='HHN', sample_rate=6), 62 | Channel(location='10', channel='HHZ', sample_rate=6), 63 | 64 | Channel(location='', channel='HHE', sample_rate=6), 65 | Channel(location='', channel='HHN', sample_rate=6), 66 | 67 | Channel(location='30', channel='HHZ', sample_rate=6)] 68 | # expected lengths when querying for gui below. CHANGE THIS 69 | # IF YOU CHANGE THE PREVIOUS channels VARIABLE 70 | expected_lengths = [4, 4, 4, 4, 3, 3, 3, 2, 2, 1] 71 | 72 | s.channels.extend(channels) 73 | db.session.commit() 74 | 75 | args = dict(request_start=datetime.utcnow(), 76 | request_end=datetime.utcnow(), 77 | event_distance_deg=9, 78 | arrival_time=datetime.utcnow(), 79 | data=b'', 80 | event_id=e.id, 81 | datacenter_id=dc.id, 82 | download_id=run.id) 83 | segments = [] 84 | # and now it will work: 85 | for c in channels: 86 | segments.append(Segment(channel_id=c.id, **args)) 87 | 88 | db.session.add_all(segments) 89 | db.session.commit() 90 | 91 | for leng, segment in zip(expected_lengths, segments): 92 | # assert the other segments are the expected lengh. Note that leng INCLUDES current 93 | # segment whereas siblings DOES NOT. So compare to leng-1: 94 | assert segment.siblings().count() == leng-1 95 | # assert getallcomponents(db.session, segment.id).count() == leng 96 | -------------------------------------------------------------------------------- /tests/process/db/test_inspection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Jul 15, 2016 3 | 4 | @author: riccardo 5 | """ 6 | from sqlalchemy.ext.hybrid import hybrid_property 7 | 8 | import stream2segment.io.db.inspection as insp 9 | from stream2segment.io.db.inspection import get_related_models 10 | from stream2segment.process.db.models import Segment 11 | 12 | 13 | def test_attnames(): 14 | # Attach a property to the Segment attribute that is not Queriable, so that 15 | # `attnames(qatt=False)` returns something (currently, it yields nothing). 16 | # A hacky but efficient way is to attach a hybrid 17 | # property that raises: 18 | def raising_func(self): 19 | # this raises because we will access this proeprty on the class, which 20 | # does not have segment.station defined yet: 21 | return self.station.network 22 | Segment._non_queriable_att = hybrid_property(raising_func) 23 | # Now, attnames below will try to check: 24 | # isinstance(Segment._non_queriable_att, QueriableAttribute) -> raises 25 | # => attnames will determine that 26 | 27 | try: 28 | # queryable attributes keyed by their argument name: 29 | qatts = {'pkey': ['id'], 30 | 'fkey': ['event_id'], 31 | 'col': ['data', 'event_id', 'id'], 32 | 'rel': ['station'], 33 | 'qatt': ['id', 'event_id', 'data', 'station', 'has_data']} 34 | 35 | qatts = {'pkey', 'fkey', 'col', 'rel', 'qatt'} 36 | 37 | segment = Segment() 38 | assert sorted(insp.attnames(Segment)) == sorted(insp.attnames(segment)) 39 | 40 | def attnames(**args): 41 | return list(insp.attnames(Segment, **args)) 42 | 43 | anames = attnames() 44 | # assert we do NOT have stream and inventory: 45 | assert len(set(['stream', 'inventory', 'url']) & set(anames)) == 0 46 | # # assert we also have other expected attributes: 47 | # for k in qatts: 48 | # assert len(set(qatts[k]) & set(anames)) == len(qatts[k]) 49 | 50 | _ = attnames(pkey=True, fkey=True) 51 | assert not _ 52 | 53 | _ = attnames(qatt=False) 54 | assert sorted(_) == \ 55 | sorted(attnames(**{_: False for _ in qatts})) 56 | assert '_non_queriable_att' in set(_) 57 | 58 | attnames(qatt=False, pkey=True) == attnames(qatt=False, fkey=True) == \ 59 | attnames(qatt=False, col=True) == attnames(qatt=False, col=True, rel=False) == [] 60 | 61 | assert sorted(attnames(pkey=True)) == sorted(attnames(pkey=True, col=True)) 62 | assert sorted(attnames(fkey=True)) == sorted(attnames(fkey=True, col=True)) 63 | assert sorted(attnames(pkey=True)) == sorted(attnames(pkey=True, qatt=True)) 64 | assert sorted(attnames(pkey=True)) == sorted(attnames(pkey=True, qatt=True)) 65 | 66 | _ = set(attnames(pkey=True)) 67 | assert _ & set(attnames(col=True)) == _ 68 | assert _ & set(attnames(qatt=True)) == _ 69 | 70 | _ = set(attnames(fkey=True)) 71 | assert _ & set(attnames(col=True)) == _ 72 | assert _ & set(attnames(qatt=True)) == _ 73 | 74 | _ = set(attnames(rel=True)) 75 | assert _ & set(attnames(qatt=True)) == _ 76 | 77 | assert sorted(attnames(qatt=True, rel=True)) == sorted(attnames(rel=True)) 78 | 79 | assert not set(attnames(qatt=True, rel=False)) - set(attnames(qatt=True)) 80 | assert set(attnames(qatt=True)) - set(attnames(qatt=True, rel=False)) 81 | 82 | relnames = set(get_related_models(Segment).keys()) 83 | assert sorted(attnames(rel=True)) == sorted(relnames) 84 | assert not (relnames - {'download', 'station', 'classes', 'channel', 85 | 'event', 'datacenter'}) 86 | 87 | finally: 88 | if hasattr(Segment, '_non_queriable_att'): 89 | del Segment._non_queriable_att 90 | assert not hasattr(Segment, '_non_queriable_att') 91 | 92 | # all_attnames = set(insp.attnames(Segment)) 93 | # for pkey, fkey, col, rel, qatt in product([[False, True, None]] * 5): 94 | # attnames = set(insp.attnames(Segment, pkey, fkey, col, rel, qatt)) 95 | # 96 | # if qatt is False: 97 | # fkey = pkey = col = rel = False 98 | # 99 | # if col is False: 100 | # fkey = pkey = False 101 | # 102 | # if pkey is False: 103 | # 104 | # 105 | # expected_attnames = set(qatts[comb[0]]).intersection( 106 | # *[qatts[c] for c in comb[1:]]) 107 | # if not expected_attnames: 108 | # assert not attnames 109 | # else: 110 | # try: 111 | # assert len(expected_attnames & set(attnames)) == len(expected_attnames) 112 | # except AssertionError: 113 | # asd = 9 114 | 115 | 116 | 117 | # combine all possible arguments: 118 | # count = 0 119 | # for k in range(1, len(qatts)+1): 120 | # for comb in combinations(qatts, k): 121 | # attnames = list(insp.attnames(Segment, **{_: True for _ in comb})) 122 | # 123 | # for 124 | # 125 | # for seg in [Segment, segment]: 126 | # count += 1 127 | # attnames = list(insp.attnames(Segment, **{_: True for _ in comb})) 128 | # expected_attnames = set(qatts[comb[0]]).intersection(*[qatts[c] for c in comb[1:]]) 129 | # if not expected_attnames: 130 | # assert not attnames 131 | # else: 132 | # try: 133 | # assert len(expected_attnames & set(attnames)) == len(expected_attnames) 134 | # except AssertionError: 135 | # asd = 9 136 | # assert not (set(get_related_models(Segment).keys()) - 137 | # {'download', 'station', 'classes', 'channel', 'event', 'datacenter'}) -------------------------------------------------------------------------------- /tests/process/funclib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/process/funclib/__init__.py -------------------------------------------------------------------------------- /tests/process/funclib/test_coda.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Jul 25, 2016 3 | 4 | @author: riccardo 5 | """ 6 | from stream2segment.process.funclib import coda as coda_module 7 | 8 | 9 | def test_coda_jessie_mseed(data): 10 | mseed = data.read_stream("20091217_231838.FR.ESCA.00.HHZ.SAC") 11 | trace = mseed[0] 12 | coda_result = coda_module.analyze_coda(trace) 13 | coda_start_time = coda_result[0] 14 | assert coda_start_time > trace.stats.starttime 15 | coda_slope = coda_result[1] 16 | assert coda_slope < 0 17 | 18 | 19 | def test_coda_low_noise_level(data): 20 | mseed = data.read_stream("trace_GE.APE.mseed") 21 | ret = coda_module.analyze_coda(mseed[0]) 22 | assert ret is None 23 | -------------------------------------------------------------------------------- /tests/process/funclib/test_ndarrays_response_spectrum.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Nov 28, 2017 3 | 4 | @author: riccardo 5 | """ 6 | import numpy as np 7 | import pytest 8 | from obspy.core.trace import Trace 9 | 10 | from stream2segment.process.funclib.ndarrays import ResponseSpectrum as a_ResponseSpectrum 11 | from stream2segment.process.funclib.traces import ResponseSpectrum as t_ResponseSpectrum 12 | from stream2segment.process.funclib.ndarrays import respspec as a_rs 13 | from stream2segment.process.funclib.traces import respspec as t_rs 14 | 15 | 16 | @pytest.fixture(scope='module') 17 | def shareddata(request): 18 | accel = np.array([1, 2, 1, 2, 1, 2]) 19 | periods = np.array([1, 2]) 20 | deltat = 0.1 21 | trace = Trace(data=accel, header={'delta': deltat}) 22 | return accel, periods, deltat, trace 23 | 24 | 25 | def test_abstract(shareddata): 26 | accel, periods, deltat, trace = shareddata 27 | 28 | with pytest.raises(NotImplementedError): 29 | a_ResponseSpectrum(accel, deltat, periods).evaluate() 30 | 31 | with pytest.raises(NotImplementedError): 32 | t_ResponseSpectrum(trace, periods).evaluate() 33 | 34 | 35 | def test_arrays_traces_response_spectra(shareddata): 36 | '''this test just assures everything goes right without errors''' 37 | # FIXME: implement better tests!!! 38 | accel, periods, deltat, trace = shareddata 39 | 40 | tuple1a = a_rs('NewmarkBeta', accel, deltat, periods) 41 | tuple1b = a_rs('NigamJennings', accel, deltat, periods) 42 | tuple2a = t_rs('NewmarkBeta', trace, periods) 43 | tuple2b = t_rs('NigamJennings', trace, periods) 44 | 45 | # compare dicts: 46 | for tup1, tup2 in [[tuple1a, tuple2a], [tuple1b, tuple2b]]: 47 | for dic1, dic2 in zip(tup1, tup2): 48 | if hasattr(dic1, 'keys'): 49 | vals = [[dic1[key], dic2[key]] for key in dic1] 50 | else: 51 | vals = [[dic1, dic2]] 52 | for val1, val2 in vals: 53 | try: 54 | assert val1 == val2 55 | except ValueError: 56 | # arrays, assert allclose: 57 | assert np.allclose(val1, val2, atol=0, equal_nan=True) 58 | -------------------------------------------------------------------------------- /tests/process/funclib/test_traces.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on May 12, 2017 3 | 4 | @author: riccardo 5 | """ 6 | import pytest 7 | import numpy as np 8 | 9 | from stream2segment.process.funclib.traces import cumsumsq 10 | 11 | 12 | class Test: 13 | 14 | # execute this fixture always even if not provided as argument: 15 | # https://docs.pytest.org/en/documentation-restructure/how-to/fixture.html#autouse-fixtures-xunit-setup-on-steroids 16 | @pytest.fixture(autouse=True) 17 | def init(self, request, data): 18 | self.mseed = data.read_stream("trace_GE.APE.mseed") 19 | 20 | def testCum(self): 21 | t = self.mseed[0] 22 | # we did not write any processing to the trace: 23 | assert 'processing' not in t.stats or not t.stats.processing 24 | c1 = cumsumsq(t) 25 | assert t is not c1 26 | assert not np.allclose(t.data, c1.data, equal_nan=True) 27 | assert max(c1.data) <= 1 28 | # we wrote processing information in the trace: 29 | assert c1.stats.processing 30 | assert cumsumsq.__name__ in c1.stats.processing[0] 31 | 32 | c3 = cumsumsq(t, normalize=False) 33 | assert t is not c3 34 | assert not np.allclose(c1.data, c3.data, equal_nan=True) 35 | assert max(c3.data) > 1 36 | # we wrote processing information in the trace: 37 | assert c3.stats.processing 38 | assert cumsumsq.__name__ in c3.stats.processing[0] 39 | 40 | c2 = cumsumsq(t, copy=False) 41 | assert t is c2 42 | assert max(c2.data) <= 1 43 | assert np.allclose(c1.data, c2.data, equal_nan=True) 44 | # we wrote processing information in the trace: 45 | assert t.stats.processing 46 | assert cumsumsq.__name__ in c3.stats.processing[0] 47 | 48 | 49 | def test_searchsorted(): 50 | """this test is just a check to assure that the new implementation of timeswhere 51 | works as the original code""" 52 | arr = [1, 4.5, 6] 53 | tosearch = [-1, 3, 4.5, 6.0, 8.1] 54 | assert (np.array([np.searchsorted(arr, v) for v in tosearch]) \ 55 | == np.searchsorted(arr, tosearch)).all() 56 | 57 | 58 | -------------------------------------------------------------------------------- /tests/process/funclib/test_traces_with_local_mseeds.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Feb 23, 2016 3 | 4 | @author: riccardo 5 | """ 6 | import pytest 7 | import numpy as np 8 | from unittest.mock import patch 9 | from obspy.core import Trace 10 | 11 | from stream2segment.process.funclib.ndarrays import fft as orig_fft 12 | from stream2segment.process.funclib.traces import fft, bandpass, dfreq 13 | 14 | 15 | @pytest.mark.parametrize('arr, arr_len_after_trim, fft_npts', 16 | [([1, 2, 3, 4, 5, 6], 6, 4), 17 | ([1, 2, 3, 4, 5], 5, 3), 18 | ([1, 2, 3, 4], 4, 3), 19 | ([1, 2, 3], 3, 2), 20 | ]) 21 | @patch('stream2segment.process.funclib.traces._fft', 22 | side_effect=lambda *a, **k: orig_fft(*a, **k)) 23 | def test_fft(mock_mseed_fft, arr, arr_len_after_trim, fft_npts): 24 | t = Trace(np.array(arr)) 25 | df, f = fft(t) 26 | assert len(mock_mseed_fft.call_args[0][0]) == arr_len_after_trim 27 | assert len(f) == fft_npts 28 | assert df == dfreq(t.data, t.stats.delta) 29 | freqs0 = np.linspace(0, len(f) * df, len(f), endpoint=False) 30 | freqs, f = fft(t, return_freqs=True) 31 | assert (freqs == freqs0).all() # also assures they have same length 32 | assert np.allclose(freqs[1] - freqs[0], df) 33 | 34 | 35 | @pytest.fixture(scope="module") 36 | def _data(data): 37 | """returns a dict with fields 'mseed', 'mseed_ACC', 'mseed_VEL', 'mseed_DISP' (all Streams. 38 | The last three after removing the response) and 'inventory' (the stream inventory object 39 | used to remove the response)""" 40 | inv_name = 'inventory_GE.APE.xml' 41 | inv_obj = data.read_inv(inv_name) 42 | ret = {'inventory': data.read_inv(inv_name)} 43 | for inv_output in [None, 'ACC', 'VEL', 'DISP']: 44 | key = 'mseed' + ('' if not inv_output else "_" + inv_output) 45 | ret[key] = data.read_stream('trace_GE.APE.mseed', inv_name if inv_output else None, 46 | inv_output) 47 | return ret 48 | 49 | 50 | def test_bandpass(_data): 51 | trace = _data['mseed'][0] 52 | res = bandpass(trace, 2, 3) 53 | assert not np.array_equal(trace.data, res.data) 54 | assert trace.stats.starttime == res.stats.starttime 55 | assert trace.stats.endtime == res.stats.endtime 56 | assert trace.stats.npts == res.stats.npts 57 | assert len(trace.data) == len(res.data) 58 | -------------------------------------------------------------------------------- /tests/process/gui/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rizac/stream2segment/eb9de953fcaf3999a400138fd43e16404d5cd181/tests/process/gui/__init__.py -------------------------------------------------------------------------------- /tests/process/test_imap.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Feb 14, 2017 3 | 4 | @author: riccardo 5 | """ 6 | import os 7 | 8 | from unittest.mock import patch 9 | import pytest 10 | 11 | from stream2segment.io.inputvalidation import BadParam 12 | from stream2segment.process.main import imap 13 | from stream2segment.process import SkipSegment 14 | 15 | 16 | class patches: 17 | # paths container for class-level patchers used below. Hopefully 18 | # will mek easier debug when refactoring/move functions 19 | get_session = 'stream2segment.process.main.get_session' 20 | close_session = 'stream2segment.process.main.close_session' 21 | configlog4processing = 'stream2segment.process.main.configlog4processing' 22 | 23 | class Test: 24 | 25 | # The class-level `init` fixture is marked with autouse=true which implies that all test 26 | # methods in the class will use this fixture without a need to state it in the test 27 | # function signature or with a class-level usefixtures decorator. For info see: 28 | # https://docs.pytest.org/en/latest/fixture.html#autouse-fixtures-xunit-setup-on-steroids 29 | @pytest.fixture(autouse=True) 30 | def init(self, request, pytestdir, db4process): 31 | db4process.create(to_file=True) 32 | session = db4process.session 33 | 34 | # sets up the mocked functions: db session handling (using the already created 35 | # session) and log file handling: 36 | with patch(patches.get_session, return_value=session): 37 | with patch(patches.close_session, side_effect=lambda *a, **v: None): 38 | # with patch('stream2segment.main.configlog4processing') as mock2: 39 | # 40 | # def clogd(logger, logfilebasepath, verbose): 41 | # # config logger as usual, but redirects to a temp file 42 | # # that will be deleted by pytest, instead of polluting the program 43 | # # package: 44 | # ret = o_configlog4processing(logger, 45 | # pytestdir.newfile('.log') \ 46 | # if logfilebasepath else None, 47 | # verbose) 48 | # if ret: 49 | # self._logfilename = ret[0].baseFilename 50 | # return ret 51 | # 52 | # mock2.side_effect = clogd 53 | # 54 | # yield 55 | yield 56 | 57 | # ## ======== ACTUAL TESTS: ================================ 58 | 59 | # Recall: we have 6 segments, issued from all combination of 60 | # station_inventory in [true, false] and segment.data in [ok, with_gaps, empty] 61 | # use db4process(with_inventory, with_data, with_gap) to return sqlalchemy query for 62 | # those segments in case. For info see db4process in conftest.py 63 | @pytest.mark.parametrize("advanced_settings, cmdline_opts", 64 | [({}, []), 65 | ({'segments_chunksize': 1}, []), 66 | ({'segments_chunksize': 1}, ['--multi-process']), 67 | ({}, ['--multi-process']), 68 | ({'segments_chunksize': 1}, ['--multi-process', '--num-processes', '1']), 69 | ({}, ['--multi-process', '--num-processes', '1'])]) 70 | def test_imap(self, advanced_settings, 71 | cmdline_opts, 72 | # fixtures: 73 | pytestdir, db4process, capsys): 74 | """test the save2file python module, and also test a case when 75 | no output file is provided 76 | """ 77 | # set values which will override the yaml config in templates folder: 78 | cfg = {'snr_threshold': 0} 79 | seg_sel = {'has_data': 'true', 'id': '>3'} 80 | # if advanced_settings: 81 | # config_overrides['advanced_settings'] = advanced_settings 82 | 83 | # ret = {'a', 1} 84 | # cfg = yaml_load(pytestdir.yamlfile(get_templates_fpath('save2fs.yaml'), 85 | # **config_overrides)) 86 | # with capsys.disabled(): 87 | def func(segment, config): 88 | assert config == {} 89 | return segment.id 90 | 91 | for res in imap(func, db4process.dburl, seg_sel, None): 92 | # assert res == id 93 | assert res > 3 94 | pass 95 | 96 | def func(segment, config): 97 | assert cfg is config 98 | raise SkipSegment('a-6') 99 | 100 | count = 0 101 | for res in imap(func, db4process.dburl, seg_sel, cfg): 102 | # assert res == ret 103 | # assert id > 3 104 | count += 1 105 | pass 106 | assert count == 0 107 | 108 | def func(segment, config): 109 | raise ValueError('a-6') 110 | 111 | count = 0 112 | with pytest.raises(ValueError): 113 | for res in imap(func, db4process.dburl, seg_sel, cfg): 114 | count += 1 115 | pass 116 | assert count == 0 117 | 118 | 119 | def func(segment, config): 120 | raise ValueError('a-6') 121 | 122 | count = 0 123 | for res in imap(func, db4process.dburl, seg_sel, cfg, 124 | skip_exceptions=[ValueError]): 125 | count += 1 126 | pass 127 | assert count == 0 128 | 129 | def func(segment, config): 130 | return segment.id 131 | 132 | with pytest.raises(BadParam) as bparam: 133 | for res in imap(func, db4process.dburl, seg_sel, 'abc'): 134 | pass 135 | 136 | 137 | def test_imap_wrong_sqlite(# fixtures: 138 | pytestdir): 139 | """test a non existing sqlite checking that we have the right error 140 | """ 141 | 142 | # first test, provide a fake dburl that does not exist: 143 | with pytest.raises(BadParam) as bparam: 144 | fname = pytestdir.newfile('.sqlite', create=False) 145 | assert not os.path.isfile(fname) # for safety 146 | dburl = 'sqlite:///' + fname 147 | assert not os.path.isfile(dburl[10:]) 148 | for res in imap(lambda *a, **v: 0, dburl, {}): 149 | pass 150 | 151 | assert 'dburl' in str(bparam.value) 152 | -------------------------------------------------------------------------------- /tests/process/test_u_various_utilities.py: -------------------------------------------------------------------------------- 1 | """ 2 | Created on Oct 7, 2017 3 | 4 | @author: riccardo 5 | """ 6 | from io import BytesIO 7 | import time 8 | from datetime import datetime 9 | import pandas as pd 10 | 11 | from unittest.mock import patch 12 | import pytest 13 | import numpy as np 14 | from obspy.core.stream import read 15 | 16 | from stream2segment.process import SkipSegment, load_ints_from_txt, save_ints_to_txt 17 | from stream2segment.process.db.models import get_stream 18 | from stream2segment.process.main import get_slices 19 | from stream2segment.process.writers import HDFWriter 20 | 21 | 22 | class MockSegment: 23 | def __init__(self, data): 24 | self.data = data 25 | 26 | 27 | def test_get_stream(data): # <- data is a pytest fixture 28 | """test our get_stream calling obspy._read, and obspy.read: 29 | Rationale: process.db.get_stream reads a stream from a sequence of 30 | bytes (fetched our database). obspy read supports filelike object such as 31 | BytesIO, great right? no, because on error it tries to write to file and 32 | retry. This is absolutely insane. To avoid this, process.db.get_stream 33 | calls obspy._read instead. 34 | In this test, we want to assure that obpsy has still this weird 35 | implementation and that our get_stream is correct (_read is private it 36 | might be moved in the future) 37 | """ 38 | # PLEASE NOTE: we want to mock NamedTemporaryFile as used in obspy, 39 | # to check that it's called. Problem is, from obpsy version 1.2 whatever 40 | # they refactored and moved the packages. Thus, try-catch: 41 | try: 42 | from obspy.core.stream import NamedTemporaryFile 43 | patch_str = 'obspy.core.stream.NamedTemporaryFile' 44 | except ImportError: 45 | from obspy.core.util.base import NamedTemporaryFile 46 | patch_str = 'obspy.core.util.base.NamedTemporaryFile' 47 | 48 | with patch(patch_str, return_value=NamedTemporaryFile()) as mock_ntf: 49 | mseeddata = data.read('trace_GE.APE.mseed') 50 | 51 | segment = MockSegment(mseeddata) 52 | tobspy = time.time() 53 | stream_obspy = read(BytesIO(mseeddata)) 54 | tobspy = time.time() - tobspy 55 | tme = time.time() 56 | stream_me = get_stream(segment) 57 | tme = time.time() - tme 58 | # our routine WAS faster, as of 2022 it might be not anymore. So uncomment: 59 | # assert tme < tobspy 60 | assert (stream_obspy[0].data == stream_me[0].data).all() 61 | assert not mock_ntf.called 62 | 63 | with pytest.raises(TypeError): 64 | stream_obspy = read(BytesIO(mseeddata[:5])) 65 | assert mock_ntf.called 66 | 67 | mock_ntf.reset_mock() 68 | segment = MockSegment(mseeddata[:5]) 69 | with pytest.raises(SkipSegment): 70 | stream_me = get_stream(segment) 71 | assert not mock_ntf.called 72 | 73 | 74 | @pytest.mark.parametrize('input, expected_result, ', 75 | [ 76 | ((340, 113), [(0, 113), (113, 226), (226, 340)]), 77 | ((338, 113), [(0, 112), (112, 225), (225, 338)]), 78 | ((339, 113), [(0, 113), (113, 226), (226, 339)]) 79 | ], 80 | ) 81 | def test_get_slices(input, expected_result): 82 | expected_list = list(range(input[0])) 83 | assert len(expected_list) == input[0] 84 | real_list = [] 85 | slices = list(get_slices(*input)) 86 | assert len(slices) == len(expected_result) 87 | for (s, e), expected in zip(slices, expected_result): 88 | assert (s, e) == expected 89 | real_list += list(range(s, e)) 90 | assert real_list == expected_list 91 | 92 | # test with arrays as first argument. Use numpy arrays of dimension two to provide a more 93 | # general case: 94 | expected_list = np.array([[i, 2] for i in expected_list]) 95 | slices2 = list(get_slices(expected_list, input[1])) 96 | assert len(slices2) == len(slices) 97 | for nparray, (s, e) in zip(slices2, slices): 98 | assert np.array_equal(nparray, expected_list[s:e]) 99 | # test for safety that we get until the last element: 100 | assert np.array_equal(nparray[-1], expected_list[-1]) 101 | 102 | 103 | def test_writer_hdf( 104 | # fixtures: 105 | pytestdir): 106 | file = pytestdir.newfile('.hd') 107 | writer = HDFWriter(file, True) 108 | writer.chunksize = 1 109 | 110 | df1 = pd.DataFrame([{ 111 | 'str': 'a', 112 | 'dtime': datetime.utcnow(), 113 | 'float': 1.1, 114 | 'int': 1, 115 | 'bool': True 116 | }]) 117 | 118 | df2 = pd.DataFrame([{ 119 | 'str': 'abc', 120 | 'dtime': datetime.utcnow(), 121 | 'float': float('nan'), 122 | 'int': 1, 123 | 'bool': True 124 | }]) 125 | 126 | with pytest.raises(Exception): 127 | with writer: 128 | writer.write(1, df1) 129 | writer.write(2, df2) 130 | 131 | writer = HDFWriter(file, False, {'min_itemsize': {'str': 10}}) 132 | with writer: 133 | writer.write(1, df1) 134 | writer.write(2, df2) 135 | aps = writer.already_processed_segments() 136 | assert list(aps) == [1, 2] 137 | 138 | writer = HDFWriter(file, True, {'min_itemsize': {'str': 10}}) 139 | with writer: 140 | writer.write(3, df2.loc[0, :]) # series 141 | writer.write(4, df2.loc[0, :].to_dict()) 142 | aps = writer.already_processed_segments() 143 | assert list(aps) == [1, 2, 3, 4] 144 | 145 | 146 | @pytest.mark.parametrize('sep', [None, ' ', '\t', '\n']) 147 | def test_read_from_file(sep, # fixtures: 148 | pytestdir): 149 | import os 150 | outfile = os.path.join(pytestdir.makedir(), "ids.txt") 151 | ints = np.random.randint(0, 1000000, 10, dtype=int) 152 | save_ints_to_txt(outfile, ints, sep) 153 | ints2 = load_ints_from_txt(outfile, sep, False) 154 | assert np.alltrue(ints2 == ints) 155 | assert not isinstance(ints2, list) 156 | ints2 = load_ints_from_txt(outfile, sep) 157 | assert np.alltrue(ints2 == ints) 158 | assert isinstance(ints2, list) --------------------------------------------------------------------------------