├── src
├── __init__.py
├── apps
│ ├── __init__.py
│ └── arraytool.py
├── cluster
│ ├── __init__.py
│ ├── TestCluster.py
│ ├── ClusterProgs.py
│ ├── callcluster.py
│ └── evaluateCluster.py
├── common
│ ├── __init__.py
│ ├── CommonProgs.py
│ ├── ObspyFkt.py
│ ├── Program.py
│ ├── Globals.py
│ ├── DataTypes.py
│ └── Logfile.py
├── data
│ ├── __init__.py
│ ├── prem-no-ocean.l.nd
│ ├── ak135-f-continental.l.nd
│ ├── ak135-f-average-no-ocean.l.nd
│ ├── prem.l.nd
│ ├── ak135-f-average.l.nd
│ ├── prem-no-ocean.m.nd
│ ├── prem.m.nd
│ ├── ak135-f-continental.m.nd
│ ├── ak135-f-average-no-ocean.m.nd
│ ├── ak135-f-average.m.nd
│ ├── prem-no-ocean.f.nd
│ ├── prem.f.nd
│ ├── ak135-f-average.vf.csv
│ └── ak135-f-average.vf.nd
├── process
│ ├── __init__.py
│ ├── Version.py
│ ├── trigger.py
│ ├── times.py
│ ├── ProcessProgs.py
│ ├── noise_addition.py
│ ├── deserializer.py
│ ├── xcorrfilter.py
│ ├── noise_analyser.py
│ ├── stacking.py
│ └── waveform.py
├── tools
│ ├── __init__.py
│ ├── palantiri_init.py
│ ├── eventsearch.py
│ ├── create.py
│ └── ak135.model
├── skeleton
│ ├── __init__.py
│ ├── global.conf
│ ├── example.yaml
│ ├── plot_cluster.py
│ └── example.config
└── waveform
│ ├── __init__.py
│ ├── Version.py
│ └── DataDir.py
├── example
├── tttgrid
│ └── .gitkeep
├── global.conf
└── events
│ └── EGYPT_1995-11-22T22-16-55
│ └── EGYPT_1995-11-22T22-16-55.config
├── docs
├── configuration
│ ├── synthetic.rst
│ ├── index.rst
│ ├── global.rst
│ └── event.rst
├── overview.rst
├── errors.rst
├── Makefile
├── setup.rst
├── index.rst
├── processing.rst
└── conf.py
├── requirements.txt
├── setup.cfg
├── README.md
└── setup.py
/src/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/apps/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/cluster/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/common/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/data/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/process/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/tools/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/example/tttgrid/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/skeleton/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/waveform/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/docs/configuration/synthetic.rst:
--------------------------------------------------------------------------------
1 | syn.conf
2 | ==============
3 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | utm
2 | mpld3
3 | numpy
4 | scipy
5 | pyrocko
6 |
--------------------------------------------------------------------------------
/src/process/Version.py:
--------------------------------------------------------------------------------
1 | VERSION_STRING = 'Version 0.4 Sept. 2019'
2 |
--------------------------------------------------------------------------------
/src/waveform/Version.py:
--------------------------------------------------------------------------------
1 | VERSION_STRING = 'Version 0.2 - 8.May 2018'
2 |
--------------------------------------------------------------------------------
/src/common/CommonProgs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 |
5 | def start():
6 |
7 | if sys.argv[1] != 'new_version':
8 | return False
9 |
10 | at = os.path.join(os.getcwd(), 'Common', 'NewVersion.py')
11 |
12 | os.system(sys.executable + ' ' + at)
13 | return True
14 |
--------------------------------------------------------------------------------
/src/cluster/TestCluster.py:
--------------------------------------------------------------------------------
1 |
2 | import sys
3 |
4 | import Basic
5 | from Program import startTest # from Common
6 |
7 | Basic.onlyForInternalUse()
8 |
9 | sys.argv = startTest('cluster', workingDir='tools')
10 |
11 | import cluster2 # from cluster
12 | cluster2.MainProc()
13 |
14 |
--------------------------------------------------------------------------------
/docs/overview.rst:
--------------------------------------------------------------------------------
1 |
2 | Overview
3 | ========
4 |
5 | Palantiri is an open source seismology toolbox for backprojection using a multi-array approach.
6 | Seismic backprojection is a method where the reciprocity theorem is applied to seismic to waveforms to image the time dependent evolution of an coseismic rupture.
7 |
8 | .. raw:: html
9 |
10 |
11 |
--------------------------------------------------------------------------------
/src/skeleton/global.conf:
--------------------------------------------------------------------------------
1 | [eventsearchparameter]
2 |
3 | date_min = 1995-11-22T20:00:00.0
4 | date_max = 1995-11-22T24:00:00.0
5 | magmin = 2.5
6 | catalog = GCMT
7 | resultlimit = 10
8 |
9 | [stationsearchparameter]
10 |
11 | mindist = 23
12 | maxdist = 93
13 | blacklist = SY
14 |
15 | [acquistionparameter]
16 | #relativ to origin time [s]
17 | tmin = 0
18 | tmax = 3600
19 | duration = 0
20 | debug = 0
21 |
--------------------------------------------------------------------------------
/example/global.conf:
--------------------------------------------------------------------------------
1 | [eventsearchparameter]
2 |
3 | date_min = 1995-11-22T20:00:00.0
4 | date_max = 1995-11-22T24:00:00.0
5 | magmin = 2.5
6 | catalog = GCMT
7 | resultlimit = 10
8 |
9 | [stationsearchparameter]
10 |
11 | mindist = 23
12 | maxdist = 93
13 | blacklist = SY
14 |
15 | [acquistionparameter]
16 | #relativ to origin time [s]
17 | tmin = 0
18 | tmax = 3600
19 | duration = 0
20 | debug = 0
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | description-file=README.md
3 | license-file=LICENSE
4 |
5 | [bdist_wheel]
6 | universal=1
7 |
8 | [build_ext]
9 | inplace=0
10 |
11 | [nosetests]
12 | verbosity=2
13 | detailed-errors=1
14 | #with-coverage=0
15 | #cover-erase=1
16 | #cover-package=grond
17 |
18 | [coverage:report]
19 | exclude_lines =
20 | def __repr__
21 | def __str__
22 | raise AssertionError
23 | raise NotImplementedError
24 | raise ValueError
25 | raise
26 | if __name__ == .__main__.:
27 | logger.error
28 |
--------------------------------------------------------------------------------
/docs/configuration/index.rst:
--------------------------------------------------------------------------------
1 | Configuration
2 | ==============
3 |
4 | Here we explain the different configuration files and all adjustable parameters.
5 |
6 | From here on we will refer to the work directory in which the work is carried out as the palantiri main workdir.
7 | To create the palantiri main workdir type the following command in your console::
8 |
9 | palantiri_init foldername
10 |
11 | Palantiri uses three different configuration files, which specify the user input.
12 |
13 | .. toctree::
14 | :maxdepth: 2
15 |
16 | global
17 | event
18 | synthetic
19 |
--------------------------------------------------------------------------------
/docs/errors.rst:
--------------------------------------------------------------------------------
1 | Common errors and fixes
2 | ==============
3 |
4 | A list of common errors and how to avoid/fix them.
5 |
6 | 1. Time window too short
7 | ---------------
8 | If the time window given by duration and forerun is too short in comparison to the
9 | step and window size, no semblance can be reliably calculated.
10 | The following error will appear:
11 |
12 |
13 | .. highlight:: console
14 |
15 | ::
16 |
17 | $ ValueError: zero-size array to reduction operation maximum which has no identity
18 |
19 | A simple fix is to choose at least an forerun+duration length equal to the step size.
20 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = palantiri
8 | SOURCEDIR = .
9 | BUILDDIR = ../..
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/src/skeleton/example.yaml:
--------------------------------------------------------------------------------
1 | --- !palantiri.tools.config.Config
2 | name: backprojection_config
3 | config: !palantiri.tools.config.PalantiriConfig
4 | blacklist: []
5 | ttphases: ["P"]
6 |
7 | config_data: !palantiri.tools.config.PalantiriDataConfig
8 | quantity: "velocity"
9 | config_syn: !palantiri.tools.config.PalantiriSyntheticConfig
10 | synthetic_test: False
11 | config_weight: !palantiri.tools.config.PalantiriWeightConfig
12 | weight_by_azimuth: False
13 | config_filter: !palantiri.tools.config.PalantiriFilterConfig
14 | flo: [0.03, 1]
15 | fhi: [1., 1.5]
16 | ns: [4, 4]
17 | name: ["1", "2"]
18 | config_geometry: !palantiri.tools.config.PalantiriGeometryConfig
19 | dimx: 25
20 | depths: [12,12,1]
21 | config_xcorr: !palantiri.tools.config.PalantiriXcorrConfig
22 | xcorr: True
23 | config_cluster: !palantiri.tools.config.ClusterConfig
24 | cluster: []
25 |
--------------------------------------------------------------------------------
/src/common/ObspyFkt.py:
--------------------------------------------------------------------------------
1 | import obspy
2 | import obspy.core
3 | from obspy.geodetics import locations2degrees
4 | from palantiri.common import Basic
5 | from obspy.geodetics import kilometers2degrees
6 |
7 | def loc2degrees(a, b):
8 |
9 | if type(a) is dict:
10 | a1 = Basic.dictToLocation(a)
11 | else:
12 | a1 = a
13 |
14 | if type(b) is dict:
15 | b1 = Basic.dictToLocation(b)
16 | else:
17 | b1 = b
18 |
19 | delta = locations2degrees(float(a1.lat), float(a1.lon), float(b1.lat),
20 | float(b1.lon))
21 | return delta
22 |
23 |
24 | def obs_TravelTimes(delta1, depth1):
25 |
26 | model = obspy.taup.TauPyModel(model='ak135')
27 | return model.get_travel_times(distance_in_degree=delta1,
28 | source_depth_in_km=float(depth1))
29 |
30 | def obs_kilometer2degrees(km):
31 | return kilometers2degrees(float(km))
32 |
--------------------------------------------------------------------------------
/src/process/trigger.py:
--------------------------------------------------------------------------------
1 | import os
2 | from obspy.signal.trigger import recursive_sta_lta
3 | from obspy.signal.trigger import plot_trigger as plotTrigger
4 | from obspy.core.trace import Trace, Stats
5 | import numpy as np
6 |
7 |
8 | def writeSembMaxValue(sembmaxvalue, sembmaxlat, sembmaxlon, ntimes, Config,
9 | Folder):
10 |
11 | fobjsembmax = open(os.path.join(Folder['semb'], 'sembmaxvalue.txt'),'w')
12 |
13 | for i in range(ntimes):
14 | fobjsembmax.write('%d %d %.20f %.2f %.2f\n'%(i,i*float(Config.config_filter.step),sembmaxvalue[i],sembmaxlat[i],sembmaxlon[i]))
15 |
16 | fobjsembmax.close()
17 |
18 |
19 | def semblancestalta(sembmaxvaluevector, sembmaxlatvector, sembmaxlonvector):
20 |
21 | data = np.array(sembmaxvaluevector, dtype=np.float64)
22 |
23 | tr = Trace(data, header=None)
24 |
25 | sta = 0.5
26 | lta = 4
27 | cft = recursive_sta_lta(tr, int(sta * tr.stats.sampling_rate), int(lta * tr.stats.sampling_rate))
28 |
29 | thrOn = 0.5
30 | thrOff = 1.5
31 | plotTrigger(tr, cft, thrOn, thrOff)
32 |
--------------------------------------------------------------------------------
/src/process/times.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from obspy.core.utcdatetime import UTCDateTime
3 |
4 | from palantiri.common import Logfile
5 |
6 |
7 | def calculateTimeWindows(mint, maxt, Config, Origin, switch):
8 |
9 | tw = {}
10 | st = str(Origin.time)[:-1]
11 |
12 | if switch == 0:
13 | winlen = float(Config.config_filter.winlen)
14 | if switch == 1:
15 | winlen = float(Config.config_filter.winlen_f2)
16 |
17 | tw['start'] = UTCDateTime(UTCDateTime(st)+(mint-float(Config.config_filter.forerun)))
18 | tw['end'] = tw['start']+float(Config.config_filter.duration)
19 | #tw['end'] = UTCDateTime(UTCDateTime(st)+(maxt+float(Config['duration'])+winlen))
20 | #timespan = UTCDateTime(UTCDateTime(st)+(maxt+float(Config['duration'])+winlen)) - UTCDateTime(UTCDateTime(st)+(mint-int(Config['forerun'])))
21 | timespan = tw['end']-tw['start']
22 | Logfile.red('ORIGIN TIME %s ' % UTCDateTime(st))
23 | Logfile.red('TIME WINDOW: %s - %s ' % (tw['start'], tw['end']) )
24 | Logfile.red('TIME SPAN: %s Minutes ' % (timespan/60))
25 |
26 | return tw
27 |
--------------------------------------------------------------------------------
/src/common/Program.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from palantiri.common import Basic, Globals, Logfile
4 |
5 |
6 | class MainObj(object):
7 |
8 | def __init__(self, externClass, version, runTimeLog=None, errorLog=None):
9 |
10 | self.version = version
11 | self.runTimeLog = runTimeLog
12 | self.errorLog = errorLog
13 | self.extern = externClass
14 |
15 | def run(self):
16 |
17 | Logfile.init(self.runTimeLog, self.errorLog)
18 | Logfile.setStartMsg(self.version)
19 |
20 | if not self.extern.init():
21 | Logfile.abort()
22 |
23 | try:
24 | ret = self.extern.process()
25 |
26 | if ret:
27 | msg = 'Palantiri finished'
28 | else:
29 | msg = 'Palantiri finished with error - maybe Sauron looked back?'
30 |
31 | except KeyboardInterrupt:
32 | msg = 'Gandalf made Pippin drop the Palantiri by Control C'
33 | ret = False
34 |
35 | self.extern.finish()
36 | Logfile.showLabel(msg)
37 | return ret
38 |
--------------------------------------------------------------------------------
/docs/configuration/global.rst:
--------------------------------------------------------------------------------
1 | global.conf
2 | ==============
3 |
4 | First the file global.conf, which is located in the palantiri main workdir gives options for general download options, regarding data and event selection.
5 | An example file will be created when using the palantiri_init command. The user can give a date_min, date_max and a minimum magnitude to specify for which events should be searched.
6 | All FDSN catalogs are supported.
7 |
8 | An example configuration file::
9 |
10 | [eventsearchparameter]
11 |
12 | date_min = 1995-11-22T20:00:00.0
13 | date_max = 1995-11-22T24:00:00.0
14 | magmin = 2.5
15 | catalog = GCMT
16 | resultlimit = 10
17 |
18 | [stationsearchparameter]
19 |
20 | blacklist = SY
21 |
22 | In this example we select [eventsearchparameter] for an earthquake above magnitude 2.5, occurring between 1995-11-22T20:00:00.0 and 1995-11-22T24:00:00.0 in the GCMT catalog.
23 | We display only the first 10 results.
24 |
25 | The stationsearchparameter 'blacklist' is used in the clustering to ignore stations in all processing.
26 | A comma separated list is expected.
27 |
--------------------------------------------------------------------------------
/docs/setup.rst:
--------------------------------------------------------------------------------
1 |
2 | Setup
3 | ========
4 |
5 |
6 | Installation from source
7 | ^^^^^^^^^^^^^^^^^^^^^^^^
8 |
9 | 'git clone https://git.pyrocko.org/Palantiri'
10 | and than in the cloned directory::
11 |
12 | sudo python3 setup.py install
13 |
14 |
15 | Note: This tool is written in python3.
16 | A python2 branch is available as backup but not maintained.
17 |
18 |
19 | Prerequisites
20 | ^^^^^^^^^^^^^
21 |
22 | It is necessary to install the pyrocko `_. software package, which is used to handle
23 | the basic waveform handling and manipulation. Please follow the pyrocko packages installation guide, before installing this software.
24 |
25 | All prerequisites listed for the pyrocko software are assumed to be installed for the usage of this software package.
26 |
27 | For some advanced functionality (e.g. array response analysis) it is also necessary to install the obspy package:
28 | obspy: `_
29 | Some further basic requirements for plotting can be installed with::
30 |
31 | sudo pip3 install pyproj basemap affine
32 |
33 | These are only necessary for plotting and can be omitted.
34 |
--------------------------------------------------------------------------------
/src/cluster/ClusterProgs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from palantiri.common import Basic
4 | from palantiri.common import Globals
5 |
6 |
7 | def Usage():
8 | return 'arraytool.py cluster event_name [automatic array clustering and print arrayconfig]'
9 |
10 |
11 | class Intern(object):
12 |
13 | def __init__(self): dummy = 0
14 |
15 | def error(self, text):
16 | print('\nError: ' + text + '\n')
17 | print('Usage: ' + Usage())
18 | sys.exit('\n*** Palantiri clustering aborted ***')
19 |
20 | def checkProgramParameter(self, nMinParams, nMaxParams):
21 |
22 | eventName = sys.argv[2]
23 |
24 | if len(sys.argv) < nMinParams:
25 | self.error('event name missing')
26 | if len(sys.argv) > nMaxParams:
27 | self.error('Too many parameters')
28 |
29 | if not Globals.checkEventDirParameter(eventName):
30 |
31 | s = 'Invalid parameter - <' + eventName + '>'
32 | s += '\n '
33 | s += 'Cannot find directory ' + Globals.EventDir()
34 | self.error(s)
35 |
36 |
37 | def start(config):
38 |
39 | intern = Intern()
40 |
41 | if sys.argv[1] == 'cluster':
42 | intern.checkProgramParameter(3, 4)
43 |
44 | workDir = [Globals.EventDir(), 'tmp2', 'cluster']
45 | workDir = ['cluster']
46 | cmd = "palantiri_cluster" + ' -f ' + Globals.EventDir()
47 |
48 | else:
49 | return False
50 |
51 | Basic.changeDirectory(workDir)
52 |
53 | os.system(cmd)
54 | return True
55 |
--------------------------------------------------------------------------------
/src/cluster/callcluster.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from optparse import OptionParser
4 | from palantiri.tools import config
5 | from palantiri.tools.config import Event, Config, PalantiriConfig, PalantiriDataConfig, PalantiriXcorrConfig, PalantiriFilterConfig, PalantiriWeightConfig, PalantiriGeometryConfig, PalantiriSyntheticConfig
6 | from pyrocko import guts
7 | parser = OptionParser(usage="%prog -f eventpath ")
8 | parser.add_option("-f", "--evpath", type="string", dest="evpath", help="evpath")
9 | (options, args) = parser.parse_args()
10 | options.evpath = args[0]
11 |
12 |
13 | def init():
14 |
15 | C = config.Config(options.evpath)
16 | print(options.evpath)
17 | Config = C.parseConfig('config')
18 | yaml_file = C.parseConfig('yaml')
19 | cfg = guts.load(filename=yaml_file[0])
20 | tests = int(cfg.config_cluster.runs)
21 |
22 | import palantiri
23 | path = palantiri.__path__
24 | at = os.path.join(path[0], 'cluster/cluster2.py')
25 | cmd = sys.executable + ' ' + at + ' -f ' + options.evpath
26 | print('cmd = ', cmd)
27 |
28 | for i in range(tests):
29 | print('RUN: ', i)
30 | os.system(cmd)
31 |
32 | cmd = ('%s evaluatecluster.py -f %s') % (sys.executable,
33 | os.path.join(options.evpath,
34 | 'cluster'))
35 |
36 | at = os.path.join(path[0], 'cluster/evaluateCluster.py')
37 | cmd = sys.executable + ' ' + at + ' -f ' + os.path.join(options.evpath,
38 | "cluster")
39 | os.system(cmd)
40 |
41 |
42 | def main():
43 | init()
44 |
--------------------------------------------------------------------------------
/src/tools/palantiri_init.py:
--------------------------------------------------------------------------------
1 | import os
2 | import fnmatch
3 | import sys
4 | from optparse import OptionParser
5 | import logging
6 | import imp
7 | import obspy.core
8 | from palantiri.process import ProcessProgs
9 | from palantiri.cluster import ClusterProgs
10 | from palantiri.common import CommonProgs
11 | from palantiri.tools import config as config
12 | import palantiri
13 | import shutil
14 |
15 | logger = logging.getLogger(sys.argv[0])
16 | logger.setLevel(logging.DEBUG)
17 |
18 |
19 | def main():
20 | if len(sys.argv) < 2:
21 | print("workdir path/name missing")
22 | quit()
23 |
24 | foldername = sys.argv[1]
25 | workfolder = os.path.join(os.getcwd(), './', foldername)
26 | eventsfolder = os.path.join(os.getcwd(), './', foldername, 'events')
27 | tttgridsfolder = os.path.join(os.getcwd(), './', foldername, 'tttgrid')
28 | tmpfolder = os.path.join(os.getcwd(), './', foldername, 'tmpProcess')
29 |
30 | if os.access(workfolder, os.F_OK) is False:
31 | os.makedirs(workfolder)
32 | os.makedirs(tmpfolder)
33 | os.makedirs(tttgridsfolder)
34 | os.makedirs(eventsfolder)
35 |
36 | logger.info('\033[31m Working Super-FOLDER CREATED \033[0m \n')
37 |
38 | else:
39 | print("workdir already exists!")
40 | quit()
41 |
42 |
43 | dstfolder = foldername
44 | dstfile = ('global.conf')
45 | path = palantiri.__path__
46 | src = os.path.join(path[0], 'skeleton', 'global.conf')
47 | dst = os.path.join(dstfolder, dstfile)
48 | logger.info('\033[31m Created work directory \
49 | %s \033[0m \n' % (dstfolder.split('/')[-1]))
50 | shutil.copy(src, dst)
51 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 |
2 | Palantiri Manual
3 | ================
4 |
5 | Palantiri is an console command based open source toolbox for seismological backprojection to investigate source properties
6 | based on teleseismic data. Palantiri allows for downloading data and clustering stations into synthetics arrays.
7 | Bootstrapping of the weight of this arrays allow to investigate the uncertainty of the backprojection results.
8 | The tool allows for a number of fast synthetic tests, which are made possible using Greensfunctions stores from the Pyrocko package.
9 | For fast processing traveltime grids are pre-calculated. Arbitrary number of processes for different filter settings can be run.
10 | Backprojections can be further carried out on a single planar grid or in 3-D.
11 |
12 |
13 |
14 | .. raw:: html
15 |
16 |
17 |
18 | Features
19 | --------
20 |
21 | .. raw:: html
22 |
23 |
24 | time domain backprojection
25 | array forming by clustering
26 | automatic download of data
27 |
28 |
29 | Contents
30 | --------
31 |
32 | .. toctree::
33 | :maxdepth: 2
34 |
35 | overview
36 | setup
37 | configuration/index
38 | processing
39 | errors
40 |
41 | Indices and tables
42 | ------------------
43 |
44 | * :ref:`genindex`
45 | * :ref:`modindex`
46 | * :ref:`search`
47 |
48 | Literature
49 | ------------------
50 | KRÜGER, Frank; OHRNBERGER, Matthias.
51 | Tracking the rupture of the M w= 9.3 Sumatra earthquake over 1,150 km at teleseismic distance.
52 | Nature, 2005, 435. Jg., Nr. 7044, S. 937.
53 |
54 | Rössler, D., Krueger, F., Ohrnberger, M., & Ehlert, L. (2010).
55 | Rapid characterisation of large earthquakes by multiple seismic broadband arrays.
56 | Natural Hazards and Earth System Sciences, 10(4), 923-932.
57 |
58 |
59 | More resources:
60 | ---------------
61 |
62 | * `Green's Mill - Online resource for pre-calculated Green's functions `_
63 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Palantiri
2 |
3 | ### A seismological backprojection array tool
4 |
5 | GitHub is restricting access to their services based on user nationality and residence. Such restrictions are incompatible with scientific standards in international research communities like seismology. See the statement at https://pyrocko.org/.
6 |
7 | As researchers, we are obligated to retain open access to all. To achieve this, we are now migrating our code repositories away from GitHub to a new safe home. The new home of the Palantiri repository is at https://git.pyrocko.org/asteinbe/Palantiri.
8 |
9 | To ensure a smooth transition, we will keep a version of the code repository at GitHub until 2020-01-01.
10 |
11 |
12 | ## Documentation
13 |
14 | WIP Documentation: https://braunfuss.github.io/Palantiri/
15 |
16 |
17 | ## Citation
18 |
19 |
20 | ## License
21 | GNU General Public License, Version 3, 29 June 2007
22 |
23 | Copyright © 2018 University Potsdam, Potsdam, Germany and University of Kiel, Kiel, Germany
24 | Palantiri is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
25 | Palantiri is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
26 | You should have received a copy of the GNU General Public License along with this program. If not, see .
27 |
28 | ## Contact
29 | * Andreas Steinberg;
30 | andreas.steinberg@ifg.uni-kiel.de
31 |
32 | * Frank Krüger;
33 | kruegerf@geo.uni-potsdam.de
34 |
35 |
36 | ```
37 | University of Kiel
38 | Institute of Geosciences
39 | Otto-Hahn-Platz 1
40 | 24118 Kiel, Germany, Germany
41 |
42 | ```
43 |
44 | Avatar Image by By xDisciplExX, https://www.deviantart.com/xdisciplexx/art/Palantir-Stock-PNG-458559037 under Creative Commons Attribution-Noncommercial 3.0 License
45 |
46 |
--------------------------------------------------------------------------------
/src/process/ProcessProgs.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from palantiri.common import Basic
4 | from palantiri.common import Globals
5 |
6 |
7 | def Usage():
8 | return 'arraytool.py process event_name'
9 |
10 |
11 | class Intern(object):
12 |
13 | def __init__(self):
14 | dummy = 0
15 |
16 | def error(self, text):
17 | print('\nError: ' + text + '\n')
18 | print('Usage: ' + Usage())
19 | sys.exit('\n*** Gandalf made Pippin drop the Palantiri by ***')
20 |
21 | def checkProgramParameter(self, nMinParams, nMaxParams):
22 |
23 |
24 | eventName = sys.argv[2]
25 | if len(sys.argv) < nMinParams:
26 | self.error('event name missing')
27 | if not Globals.checkEventDirParameter(eventName):
28 | s = 'Invalid parameter - <' + eventName + '>'
29 | s += '\n '
30 | s += 'Cannot find directory ' + Globals.EventDir()
31 | self.error(s)
32 |
33 |
34 | def start(config):
35 | intern = Intern()
36 | if sys.argv[1] == 'process':
37 | force = False
38 | for argv in sys.argv[1:]:
39 | if argv == '--force':
40 | force = True
41 |
42 | intern.checkProgramParameter(3, 4)
43 |
44 | path = Globals.EventDir()
45 | path_emp = Globals.EventDir_emp()
46 | try:
47 | path_emp = Globals.EventDir_emp()
48 | at = os.path.join(os.getcwd(), 'Process', 'main.py')
49 | workDir = [path, 'tmp2', 'process']
50 | workDir = ['tmpProcess']
51 | if force is False:
52 | cmd = "palantiri_process" + ' -f ' + path + ' -e ' + path_emp
53 | else:
54 | cmd = "palantiri_process" + ' -f ' + path + ' -e ' + path_emp + '--force'
55 |
56 | except Exception:
57 | at = os.path.join(os.getcwd(), 'Process', 'main.py')
58 | workDir = [path, 'tmp2', 'process']
59 | workDir = ['tmpProcess']
60 | if force is False:
61 | cmd = "palantiri_process" + ' -f ' + path
62 | else:
63 | cmd = "palantiri_process" + ' -f ' + path + '--force'
64 |
65 | else:
66 | return False
67 |
68 | Basic.changeDirectory(workDir)
69 | os.system(cmd)
70 | return True
71 |
--------------------------------------------------------------------------------
/src/apps/arraytool.py:
--------------------------------------------------------------------------------
1 | import os
2 | import fnmatch
3 | import sys
4 | from optparse import OptionParser
5 | import logging
6 | import imp
7 | import obspy.core
8 | from palantiri.process import ProcessProgs
9 | from palantiri.cluster import ClusterProgs
10 | from palantiri.common import CommonProgs
11 | from palantiri.tools import config as config
12 |
13 | logger = logging.getLogger(sys.argv[0])
14 | logger.setLevel(logging.DEBUG)
15 |
16 | formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
17 |
18 | ch = logging.StreamHandler()
19 | ch.setLevel(logging.DEBUG)
20 | ch.setFormatter(formatter)
21 | logger.addHandler(ch)
22 |
23 | usage = ''' %prog [options] [args]
24 | %prog list [list all events from event folder]
25 | %prog process eventname
26 | %prog --force
27 | '''
28 |
29 |
30 | def folderContent(p):
31 | '''
32 | method to lookup necessary config files for event processing in
33 | the event folders
34 |
35 | return list of eventname if config and origin file are existing
36 | '''
37 | L = []
38 | for root, dirs, files in os.walk(p):
39 | flags = 0
40 |
41 | for i in files:
42 | if fnmatch.fnmatch(i, '*.config'):
43 | flags += 1
44 | if fnmatch.fnmatch(i, '*.origin'):
45 | flags += 1
46 |
47 | if flags == 2:
48 | name = root.split('/')
49 | L.append(name[-1:])
50 | return L
51 |
52 |
53 | def listEvents():
54 | '''
55 | method to list events in the event folder
56 | only list event if config and origin files are exisiting in event folder
57 |
58 | return list of events and print them
59 | '''
60 |
61 | for item in os.listdir(os.path.join(os.getcwd(), 'events')):
62 | print(item)
63 |
64 |
65 | def parseArguments(args):
66 | '''
67 | parse arguments of main and entry script for arraytool
68 |
69 | do what you gave as commandline argument to main script
70 | '''
71 |
72 | dir = 'tools'
73 | if ProcessProgs.start(config):
74 | return
75 | if ClusterProgs.start(config):
76 | return
77 |
78 | print(sys.argv[1])
79 | if sys.argv[1] == 'list':
80 | listEvents()
81 |
82 | else:
83 | logger.info('\033[31m Option not available \033[0m')
84 |
85 |
86 | def main():
87 | if len(sys.argv) > 1:
88 | parseArguments(sys.argv)
89 |
--------------------------------------------------------------------------------
/src/waveform/DataDir.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import sys
4 | import platform
5 |
6 | WINDOWS =(platform.system() == 'Windows')
7 |
8 | # add local directories to import path
9 |
10 | sys.path.append('../Common/')
11 |
12 | #import fnmatch
13 |
14 | import obspy.core.trace
15 |
16 | # Import from Common
17 |
18 | import Globals # Own global data
19 | import Basic # Own module with basic functions
20 | import Logfile # Implements logfile
21 | from DataTypes import Station
22 |
23 | DATA_DIR= 'data' # root name of data directory(relativ to event dir)
24 | FILE_NAME_FORMAT = '%s.%s.%s.%s.D.%s.%s'
25 |
26 | # -------------------------------------------------------------------------------------------------
27 |
28 | def filename(trace, day):
29 |
30 | postfix = str("%03d" % day.julday)
31 |
32 | if type(trace) is obspy.core.trace.Trace:
33 | t= trace.stats
34 | filename =(FILE_NAME_FORMAT) %(t.network, t.station, t.location, t.channel,
35 | t.starttime.year, postfix)
36 | else:
37 | Logfile.exception('DataDir.filename', str(type(trace)))
38 | Logfile.abort('')
39 |
40 | #Logfile.add(filename)
41 | return filename
42 |
43 | # -------------------------------------------------------------------------------------------------
44 |
45 | def getFileNames(eventDir=None):
46 |
47 | if eventDir == None: eventDir = Globals.EventDir()
48 |
49 | names = []
50 | path = os.path.join(eventDir, DATA_DIR)
51 |
52 | for root,dirs,files in os.walk(path):
53 | for s in files: names.append(s)
54 |
55 | #Logfile.addLines(names)
56 | return sorted(names)
57 |
58 | # -------------------------------------------------------------------------------------------------
59 |
60 | def getNetworks(eventDir=None):
61 |
62 | files= getFileNames(eventDir)
63 | networks = []
64 |
65 | for s in files:
66 | net = str.split(s, '.')[0]
67 | networks.append(net)
68 |
69 | networks = sorted(set(networks))
70 | #Logfile.addLines(networks)
71 |
72 | return networks
73 |
74 | def isNetwork(network, eventDir=None):
75 |
76 | assert network != None
77 | return network in getNetworks(eventDir)
78 |
79 | # -------------------------------------------------------------------------------------------------
80 |
81 |
82 |
83 |
--------------------------------------------------------------------------------
/src/process/noise_addition.py:
--------------------------------------------------------------------------------
1 | from pyrocko.client import catalog
2 | import logging
3 | import numpy as num
4 | from pyrocko.guts import Int, Bool, Float, String
5 | from pyrocko.gf.meta import OutOfBounds
6 | from pyrocko import io, pile
7 |
8 |
9 | def get_phase_arrival_time(engine, source, station, wavename, store_id):
10 | """
11 | Get arrival time from Greens Function store for respective
12 | :class:`pyrocko.gf.seismosizer.Target`,
13 | :class:`pyrocko.gf.meta.Location` pair.
14 |
15 | Parameters
16 | ----------
17 | engine : :class:`pyrocko.gf.seismosizer.LocalEngine`
18 | source : :class:`pyrocko.gf.meta.Location`
19 | can be therefore :class:`pyrocko.gf.seismosizer.Source` or
20 | :class:`pyrocko.model.Event`
21 | target : :class:`pyrocko.gf.seismosizer.Target`
22 | wavename : string
23 | of the tabulated phase_def that determines the phase arrival
24 |
25 | Returns
26 | -------
27 | scalar, float of the arrival time of the wave
28 | """
29 | store = engine.get_store(store_id)
30 | dist = station.distance_to(source)
31 | depth = source.depth
32 | return store.t(wavename, (depth, dist)) + source.time
33 |
34 |
35 | def add_noise(traces, engine, event, stations, store_id,
36 | phase_def='P'):
37 | '''
38 | Calculate variance of noise (half an hour) before P-Phase onset, and check
39 | for other events interfering
40 |
41 | Parameters
42 | ----------
43 | data_traces : list
44 | of :class:`pyrocko.trace.Trace` containing observed data
45 | event : :class:`pyrocko.meta.Event`
46 | reference event from catalog
47 | phase_def : :class:'pyrocko.gf.Timing'
48 |
49 | Returns
50 | -------
51 | :class:`numpy.ndarray`
52 | '''
53 | noised_traces = []
54 | for tr, station in zip(traces, stations):
55 |
56 | if tr is None:
57 | pass
58 | else:
59 | arrival_time = get_phase_arrival_time(
60 | engine=engine, source=event,
61 | station=station, wavename=phase_def,
62 | store_id=store_id)
63 | extracted = tr.chop(tr.tmin, arrival_time-15,
64 | inplace=False)
65 |
66 | mean = num.mean(extracted.ydata)
67 | var = num.var(extracted.ydata)
68 | noise_data = num.random.normal(loc=mean,
69 | scale=var,
70 | size=num.shape(tr.ydata))
71 | tr.ydata = tr.ydata+(noise_data)
72 | noised_traces.append(tr)
73 | return noised_traces
74 |
--------------------------------------------------------------------------------
/src/common/Globals.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import getpass
4 | import platform
5 | from palantiri.common import Basic
6 | from palantiri.common import Logfile
7 | from palantiri.common import ConfigFile
8 |
9 | isClient = False
10 | isDebug= False
11 |
12 | ConfigDict = None # Configuration file as dictionary
13 | ProtFileDir = None # directory for protocol files
14 |
15 | EVENTS = 'events'
16 |
17 | _eventDir = None
18 | _eventDir_emp = None
19 |
20 |
21 | def setEventDir(s):
22 | global _eventDir
23 |
24 | _eventDir = s
25 | return s
26 |
27 | def setEventDir_emp(s):
28 | global _eventDir_emp
29 |
30 | _eventDir_emp = s
31 | return s
32 |
33 | def EventDir(): # event directory
34 | global _eventDir
35 |
36 | s = os.path.join(os.getcwd(), EVENTS)
37 | n = len(sys.argv)
38 |
39 | eventDir = os.path.join(s, sys.argv[2])
40 |
41 | return eventDir
42 |
43 |
44 | def EventDir_emp(): # event directory
45 | global _eventDir_emp
46 |
47 | s = os.path.join(os.getcwd(), EVENTS)
48 | n = len(sys.argv)
49 | try:
50 | _eventDir_emp = os.path.join(s, sys.argv[3])
51 | except IndexError:
52 | _eventDir_emp = None
53 | return _eventDir_emp
54 |
55 | def TempFileName(name):
56 |
57 | assert ProtFileDir != None
58 | Basic.createDirectory(ProtFileDir)
59 | return os.path.join(ProtFileDir,name)
60 |
61 | def KeyfileFolder():
62 | return os.path.join(EventDir(), ConfigDict ['keyfilefolder'])
63 |
64 | # -------------------------------------------------------------------------------------------------
65 |
66 | def _error(text):
67 | print('\nError: ' + text + '\n')
68 | sys.exit(1)
69 |
70 |
71 | def checkNrOfParameter(nMin, nMax):
72 |
73 | if len(sys.argv) < nMin: _error('event name missing')
74 | if len(sys.argv) > nMax: _error('Too many parameters')
75 |
76 |
77 |
78 | def checkEventDirParameter(param):
79 |
80 | s1 = os.path.basename(param)
81 | dir = os.path.join(os.getcwd(), EVENTS, s1)
82 | return os.path.isdir(dir)
83 |
84 |
85 | def init(configFileName = None):
86 |
87 | global EventDir, ProtFileDir, ConfigDict, isDebug
88 |
89 | ProtFileDir = os.path.join(EventDir(), 'tmp1')
90 |
91 | if True: # not isClient:
92 | ConfigDict = ConfigFile.readGlobalConf(configFileName)
93 |
94 | if ConfigDict == None: return False
95 | key = 'DEBUG_FLAG'
96 |
97 | if not isClient:
98 | if isDebug: Logfile.add('Debugging is on')
99 |
100 | return True
101 |
--------------------------------------------------------------------------------
/src/data/prem-no-ocean.l.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.2 2.6 1456. 600.
2 | 15. 5.8 3.2 2.6 1456. 600.
3 | 15. 6.8 3.9 2.9 1350. 600.
4 | 24.4 6.8 3.9 2.9 1350. 600.
5 | mantle
6 | 24.4 8.111 4.491 3.381 1446. 600.
7 | 80. 8.077 4.47 3.375 1448. 600.
8 | 80. 8.077 4.47 3.375 195.5 80.
9 | 220. 7.99 4.419 3.36 195.8 80.
10 | 220. 8.559 4.644 3.436 362.9 143.
11 | 400. 8.905 4.77 3.543 372.3 143.
12 | 400. 9.134 4.933 3.724 366.3 143.
13 | 600. 10.16 5.516 3.976 362.3 143.
14 | 600. 10.16 5.516 3.976 362.3 143.
15 | 670. 10.27 5.57 3.992 362.9 143.
16 | 670. 10.75 5.945 4.381 759.3 312.
17 | 771. 11.07 6.24 4.443 730.4 312.
18 | 771. 11.08 6.229 4.445 734.4 312.
19 | 1165. 11.72 6.547 4.674 745. 312.
20 | 1460. 12.16 6.674 4.843 772.1 312.
21 | 2741. 13.68 7.273 5.495 820.6 312.
22 | 2741. 13.68 7.266 5.491 822.2 312.
23 | 2891. 13.72 7.265 5.566 826.7 312.
24 | outer-core
25 | 2891. 8.092 0. 9.927 5.782E+04 0.
26 | 3569. 9.07 0. 10.87 5.782E+04 0.
27 | 4246. 9.752 0. 11.57 5.782E+04 0.
28 | 5150. 10.37 0. 12.2 5.782E+04 0.
29 | inner-core
30 | 5150. 11.04 3.51 12.77 621.3 84.6
31 | 5760. 11.21 3.633 13.02 598.8 84.6
32 | 6371. 11.28 3.673 13.1 592.2 84.6
33 |
--------------------------------------------------------------------------------
/src/common/DataTypes.py:
--------------------------------------------------------------------------------
1 | class Station(object):
2 |
3 | def __init__(self, net, sta, loc, comp, lat=0, lon=0, ele=0, dip=0,
4 | azi=0, gain=0, inst=None):
5 |
6 | self.net = net
7 | self.sta = sta
8 | self.loc = loc
9 | self.comp = comp
10 | self.site = '???'
11 | self.lat = lat
12 | self.lon = lon
13 | self.ele = ele
14 | self.dip = dip
15 | self.azi = azi
16 | self.gain = gain
17 | self.inst = inst
18 | self.provider = None
19 |
20 | def fullName(self):
21 | return self.net + '.' + self.sta + '.' + self.loc + '.' + self.comp
22 |
23 | def stationName(self):
24 | return self.net + '.' + self.sta
25 |
26 | def location(self):
27 | return Location(self.lat, self.lon)
28 |
29 |
30 | class Location(object):
31 |
32 | def __init__(self, lat, lon):
33 |
34 | self.lat = float(lat)
35 | self.lon = float(lon)
36 |
37 | def __str__(self):
38 | return('(%f,%f)') % (self.lat, self.lon)
39 |
40 | def __eq__(self, other):
41 | return(self.lat == other.lat and self.lon == other.lon)
42 |
43 | def set(self, d):
44 |
45 | self.lat = d.lat
46 | self.lon = d.lon
47 |
48 |
49 | def dictToLocation(d):
50 |
51 | lat = float(d['lat'])
52 | lon = float(d['lon'])
53 | return Location(lat, lon)
54 |
55 |
56 | def _getDelim(name):
57 |
58 | if '.' in name:
59 | return '.'
60 | elif '_' in name:
61 | return '_'
62 |
63 | assert False
64 |
65 |
66 | def toStationNames(strings):
67 |
68 | names = []
69 |
70 | if len(strings) == 0:
71 | return names
72 |
73 | delim = _getDelim(strings[0])
74 |
75 | for i in range(len(strings)):
76 | s = strings[i].split(delim)
77 | names.append(s[0] + delim + s[1])
78 |
79 | return names
80 |
81 |
82 | def toNetwork(name):
83 | s = name.split(_getDelim(name))
84 | return s[0]
85 |
86 |
87 | def toStation(name):
88 | s = name.split(_getDelim(name))
89 | return s[1]
90 |
91 |
92 | def toNetAndStation(name):
93 | return toNetwork(name) + _getDelim(name) + toStation(name)
94 |
95 |
96 | def isSameNetwork(s1,s2):
97 | return(toNetwork(s1) == toNetwork(s2))
98 |
99 |
100 | def toNetworkNames(strings):
101 |
102 | names = []
103 |
104 | for i in range(len(strings)):
105 | names.append(toNetwork(strings[i]))
106 |
107 | return names
108 |
109 |
110 | def selectNetwork(stationList, network):
111 |
112 | result = []
113 |
114 | for s in stationList:
115 | if s.startswith(network):
116 | result.append(s)
117 |
118 | return result
119 |
--------------------------------------------------------------------------------
/src/data/ak135-f-continental.l.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.46 2.6 1264. 600.
2 | 20. 5.8 3.46 2.6 1264. 600.
3 | 20. 6.5 3.85 2.9 1283. 600.
4 | 35. 6.5 3.85 2.9 1283. 600.
5 | mantle
6 | 35. 8.04 4.48 3.58 1449. 600.
7 | 77.5 8.045 4.49 3.5 1445. 600.
8 | 77.5 8.045 4.49 3.5 180.6 75.
9 | 120. 8.05 4.5 3.427 180. 75.
10 | 120. 8.05 4.5 3.427 182.6 76.06
11 | 165. 8.175 4.509 3.371 188.7 76.55
12 | 210. 8.301 4.518 3.324 201. 79.4
13 | 210. 8.3 4.52 3.321 336.9 133.3
14 | 410. 9.03 4.871 3.504 376.5 146.1
15 | 410. 9.36 5.08 3.929 414.1 162.7
16 | 660. 10.2 5.611 3.918 428.5 172.9
17 | 660. 10.79 5.965 4.229 1349. 549.6
18 | 764. 11.07 6.215 4.391 1276. 536.7
19 | 1180. 11.75 6.503 4.699 1205. 491.9
20 | 1700. 12.45 6.786 4.955 1126. 445.8
21 | 2220. 13.05 7.018 5.21 1023. 394.3
22 | 2740. 13.64 7.246 5.44 933.6 350.9
23 | 2740. 13.65 7.248 5.694 725.4 272.7
24 | 2892. 13.66 7.282 5.773 724.5 274.5
25 | outer-core
26 | 2892. 7.99 0. 9.943 5.782E+04 0.
27 | 3571. 9.081 0. 10.88 5.782E+04 0.
28 | 4249. 9.78 0. 11.56 5.782E+04 0.
29 | 5154. 10.34 0. 12.16 5.782E+04 0.
30 | inner-core
31 | 5154. 11.04 3.509 12.71 631.1 85.03
32 | 5702. 11.2 3.626 12.93 608.5 85.03
33 | 6371. 11.28 3.676 13.03 599.9 85.03
34 |
--------------------------------------------------------------------------------
/src/data/ak135-f-average-no-ocean.l.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.2 2.6 1478. 600.
2 | 10. 5.8 3.2 2.6 1478. 600.
3 | 10. 6.8 3.9 2.92 1368. 600.
4 | 18. 6.8 3.9 2.92 1368. 600.
5 | mantle
6 | 18. 8.036 4.486 3.638 949.7 394.6
7 | 80. 8.04 4.481 3.5 1008. 417.6
8 | 80. 8.045 4.49 3.502 182. 75.6
9 | 120. 8.05 4.5 3.427 182.6 76.06
10 | 120. 8.05 4.5 3.427 182.6 76.06
11 | 165. 8.175 4.509 3.371 188.7 76.55
12 | 210. 8.301 4.518 3.324 201. 79.4
13 | 210. 8.3 4.52 3.321 336.9 133.3
14 | 410. 9.03 4.871 3.504 376.5 146.1
15 | 410. 9.36 5.08 3.929 414.1 162.7
16 | 660. 10.2 5.611 3.918 428.5 172.9
17 | 660. 10.79 5.965 4.229 1349. 549.6
18 | 764. 11.07 6.215 4.391 1276. 536.7
19 | 1180. 11.75 6.503 4.699 1205. 491.9
20 | 1700. 12.45 6.786 4.955 1126. 445.8
21 | 2220. 13.05 7.018 5.21 1023. 394.3
22 | 2740. 13.64 7.246 5.44 933.6 350.9
23 | 2740. 13.65 7.248 5.694 725.4 272.7
24 | 2892. 13.66 7.282 5.773 724.5 274.5
25 | outer-core
26 | 2892. 7.99 0. 9.943 5.782E+04 0.
27 | 3571. 9.081 0. 10.88 5.782E+04 0.
28 | 4249. 9.78 0. 11.56 5.782E+04 0.
29 | 5154. 10.34 0. 12.16 5.782E+04 0.
30 | inner-core
31 | 5154. 11.04 3.509 12.71 631.1 85.03
32 | 5702. 11.2 3.626 12.93 608.5 85.03
33 | 6371. 11.28 3.676 13.03 599.9 85.03
34 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from setuptools import setup
4 | from setuptools.command.install import install
5 |
6 |
7 | class CustomInstallCommand(install):
8 | def run(self):
9 | install.run(self)
10 |
11 |
12 | setup(
13 | cmdclass={
14 | 'install': CustomInstallCommand,
15 | },
16 |
17 | name='palantiri',
18 |
19 | description='A python based seismological backprojection tool.\
20 | In Quenya, palantíri means "far-seeing"',
21 |
22 | version='0.4',
23 |
24 | author='Andreas Steinberg',
25 |
26 | author_email='andreas.steinberg@ifg.uni-kiel.de',
27 |
28 | packages=[
29 | 'palantiri',
30 | 'palantiri.apps',
31 | 'palantiri.cluster',
32 | 'palantiri.common',
33 | 'palantiri.process',
34 | 'palantiri.skeleton',
35 | 'palantiri.tools',
36 | 'palantiri.data',
37 | 'palantiri.waveform',
38 | ],
39 | python_requires='>=3.5',
40 | entry_points={
41 | 'console_scripts': [
42 | 'bat = palantiri.apps.arraytool:main',
43 | 'palantiri_plot = palantiri.apps.plot:main',
44 | 'palantiri_down = palantiri.apps.palantiri_down:main',
45 | 'palantiri_geometry_export = palantiri.apps.palantiri_geometry_export:main',
46 | 'palantiri_eventsearch = palantiri.tools.eventsearch:main',
47 | 'palantiri_cluster = palantiri.cluster.callcluster:main',
48 | 'palantiri_create = palantiri.tools.create:main',
49 | 'palantiri_process = palantiri.process.main:main',
50 | 'palantiri_init = palantiri.tools.palantiri_init:main',
51 |
52 | ]
53 | },
54 | package_dir={'palantiri': 'src'},
55 |
56 | package_data={
57 | 'palantiri': [
58 | 'data/*nd',
59 | 'skeleton/global.conf',
60 | 'skeleton/example.yaml',
61 | 'skeleton/example.config']},
62 | data_files=[],
63 |
64 | license='GPLv3',
65 |
66 | classifiers=[
67 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
68 | 'Development Status :: 5 - Production/Stable',
69 | 'Intended Audience :: Science/Research',
70 | 'Programming Language :: Python :: 2.7',
71 | 'Programming Language :: Python :: 3',
72 | 'Programming Language :: Python :: Implementation :: CPython',
73 | 'Topic :: Scientific/Engineering',
74 | 'Topic :: Scientific/Engineering :: Physics',
75 | 'Topic :: Scientific/Engineering :: Visualization',
76 | 'Topic :: Scientific/Engineering :: Information Analysis',
77 | 'Topic :: Software Development :: Libraries :: Application Frameworks',
78 | ],
79 |
80 | keywords=[
81 | 'seismology, waveform analysis, earthquake modelling, geophysics,'
82 | ' backprojection'],
83 | )
84 |
--------------------------------------------------------------------------------
/src/tools/eventsearch.py:
--------------------------------------------------------------------------------
1 | from configparser import SafeConfigParser
2 | from urllib.parse import urlencode
3 | from urllib.request import urlopen
4 |
5 |
6 | class Event(object):
7 | def __init__(self, id, region, lat, lon, otime, mag):
8 | self.id = id
9 | self.region = region
10 | self.lat = lat
11 | self.lon = lon
12 | self.otime = otime
13 | self.mag = mag
14 |
15 | def __str__(self):
16 | return 'EventID: {0:10} ---> {1:35} {2:30} {3:10}{4:12}{5:12}'.format(
17 | self.id, self.region, self.otime, self.mag, self.lat, self.lon)
18 |
19 |
20 | def init():
21 | cDict = {}
22 | parser = SafeConfigParser()
23 | parser.read('global.conf')
24 | for section_name in parser.sections():
25 | for name, value in parser.items(section_name):
26 | cDict[name] = value
27 | return cDict
28 |
29 |
30 | def parseIrisEventWebservice(searchparameter):
31 |
32 | if not searchparameter['resultlimit']:
33 | searchparameter['resultlimit'] = 10
34 |
35 | url = 'http://service.iris.edu/fdsnws/event/1/query?'
36 | catalog = searchparameter['catalog']
37 | if len(catalog) > 1:
38 | parameter = urlencode({
39 | 'catalog': searchparameter['catalog'],
40 | 'minmag': searchparameter['magmin'],
41 | 'starttime': searchparameter['date_min'],
42 | 'endtime': searchparameter['date_max'],
43 | 'format': 'text',
44 | 'orderby': 'time',
45 | 'limit': searchparameter['resultlimit'],
46 | })
47 | else:
48 | parameter = urlencode({
49 | 'minmag': searchparameter['magmin'],
50 | 'starttime': searchparameter['date_min'],
51 | 'endtime': searchparameter['date_max'],
52 | 'format': 'text',
53 | 'orderby': 'time',
54 | 'limit': searchparameter['resultlimit'],
55 | })
56 | u = ('%s%s') % (url, parameter)
57 |
58 | data = urlopen(u).read()
59 | data = data.decode('utf_8')
60 | data = data.split('\n')
61 | dl = data[1:]
62 |
63 | EL = []
64 | for i in dl:
65 | if len(i) != 0:
66 | i = i.split('|')
67 | EL.append(Event(i[0], i[12], i[2], i[3], i[1], i[10]))
68 |
69 | print('\n\n # to get data for event use eventID #\n\n')
70 | if len(EL) is not 0:
71 | for event in EL:
72 | print(event)
73 | else:
74 | print('\033[31m No event entry found \033[0m\n')
75 |
76 |
77 | def searchEvent(searchparameter):
78 |
79 | parseIrisEventWebservice(searchparameter)
80 |
81 |
82 | def main():
83 | options = init()
84 | searchEvent(options)
85 |
--------------------------------------------------------------------------------
/src/data/prem.l.nd:
--------------------------------------------------------------------------------
1 | 0. 1.45 0. 1.02 5.782E+04 0.
2 | 3. 1.45 0. 1.02 5.782E+04 0.
3 | 3. 5.8 3.2 2.6 1456. 600.
4 | 15. 5.8 3.2 2.6 1456. 600.
5 | 15. 6.8 3.9 2.9 1350. 600.
6 | 24.4 6.8 3.9 2.9 1350. 600.
7 | mantle
8 | 24.4 8.111 4.491 3.381 1446. 600.
9 | 80. 8.077 4.47 3.375 1448. 600.
10 | 80. 8.077 4.47 3.375 195.5 80.
11 | 220. 7.99 4.419 3.36 195.8 80.
12 | 220. 8.559 4.644 3.436 362.9 143.
13 | 400. 8.905 4.77 3.543 372.3 143.
14 | 400. 9.134 4.933 3.724 366.3 143.
15 | 600. 10.16 5.516 3.976 362.3 143.
16 | 600. 10.16 5.516 3.976 362.3 143.
17 | 670. 10.27 5.57 3.992 362.9 143.
18 | 670. 10.75 5.945 4.381 759.3 312.
19 | 771. 11.07 6.24 4.443 730.4 312.
20 | 771. 11.08 6.229 4.445 734.4 312.
21 | 1165. 11.72 6.547 4.674 745. 312.
22 | 1460. 12.16 6.674 4.843 772.1 312.
23 | 2741. 13.68 7.273 5.495 820.6 312.
24 | 2741. 13.68 7.266 5.491 822.2 312.
25 | 2891. 13.72 7.265 5.566 826.7 312.
26 | outer-core
27 | 2891. 8.092 0. 9.927 5.782E+04 0.
28 | 3569. 9.07 0. 10.87 5.782E+04 0.
29 | 4246. 9.752 0. 11.57 5.782E+04 0.
30 | 5150. 10.37 0. 12.2 5.782E+04 0.
31 | inner-core
32 | 5150. 11.04 3.51 12.77 621.3 84.6
33 | 5760. 11.21 3.633 13.02 598.8 84.6
34 | 6371. 11.28 3.673 13.1 592.2 84.6
35 |
--------------------------------------------------------------------------------
/src/data/ak135-f-average.l.nd:
--------------------------------------------------------------------------------
1 | 0. 1.45 0. 1.02 5.782E+04 0.
2 | 3. 1.45 0. 1.02 5.782E+04 0.
3 | 3. 1.65 1. 2. 163.3 80.
4 | 3.3 1.65 1. 2. 163.3 80.
5 | 3.3 5.8 3.2 2.6 1478. 600.
6 | 10. 5.8 3.2 2.6 1478. 600.
7 | 10. 6.8 3.9 2.92 1368. 600.
8 | 18. 6.8 3.9 2.92 1368. 600.
9 | mantle
10 | 18. 8.036 4.486 3.638 949.7 394.6
11 | 80. 8.04 4.481 3.5 1008. 417.6
12 | 80. 8.045 4.49 3.502 182. 75.6
13 | 120. 8.05 4.5 3.427 182.6 76.06
14 | 120. 8.05 4.5 3.427 182.6 76.06
15 | 165. 8.175 4.509 3.371 188.7 76.55
16 | 210. 8.301 4.518 3.324 201. 79.4
17 | 210. 8.3 4.52 3.321 336.9 133.3
18 | 410. 9.03 4.871 3.504 376.5 146.1
19 | 410. 9.36 5.08 3.929 414.1 162.7
20 | 660. 10.2 5.611 3.918 428.5 172.9
21 | 660. 10.79 5.965 4.229 1349. 549.6
22 | 764. 11.07 6.215 4.391 1276. 536.7
23 | 1180. 11.75 6.503 4.699 1205. 491.9
24 | 1700. 12.45 6.786 4.955 1126. 445.8
25 | 2220. 13.05 7.018 5.21 1023. 394.3
26 | 2740. 13.64 7.246 5.44 933.6 350.9
27 | 2740. 13.65 7.248 5.694 725.4 272.7
28 | 2892. 13.66 7.282 5.773 724.5 274.5
29 | outer-core
30 | 2892. 7.99 0. 9.943 5.782E+04 0.
31 | 3571. 9.081 0. 10.88 5.782E+04 0.
32 | 4249. 9.78 0. 11.56 5.782E+04 0.
33 | 5154. 10.34 0. 12.16 5.782E+04 0.
34 | inner-core
35 | 5154. 11.04 3.509 12.71 631.1 85.03
36 | 5702. 11.2 3.626 12.93 608.5 85.03
37 | 6371. 11.28 3.676 13.03 599.9 85.03
38 |
--------------------------------------------------------------------------------
/src/process/deserializer.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from palantiri.common import Basic
3 | from collections import OrderedDict
4 |
5 | '''
6 | modul for deserializing pickle data from different processes
7 | '''
8 |
9 |
10 | def deserializeTTT(numproc, flag_rpe=False):
11 |
12 | L = []
13 |
14 | for i in range(numproc):
15 | if flag_rpe is True:
16 | data = Basic.loadDump(str(i)+'-ttt_emp.pkl')
17 |
18 | else:
19 |
20 | data = Basic.loadDump(str(i)+'-ttt.pkl')
21 |
22 | if data is not None:
23 | L.append(data)
24 |
25 | TTTGridMap = OrderedDict()
26 |
27 | for i in L:
28 | if sys.version_info.major >= 3:
29 | for j in sorted(i.keys()):
30 | TTTGridMap[j] = i[j]
31 | else:
32 | for j in i.keys():
33 | TTTGridMap[j] = i[j]
34 |
35 | return TTTGridMap
36 |
37 |
38 | def deserializeTTT_cube(numproc):
39 |
40 | L = []
41 |
42 | for i in range(numproc):
43 | data = Basic.loadDump(str(i)+'-ttt.pkl')
44 |
45 | if data is not None:
46 | L.append(data)
47 |
48 | TTTGridMap = OrderedDict()
49 |
50 | for i in L:
51 | if sys.version_info.major >= 3:
52 | for j in sorted(i.keys()):
53 | TTTGridMap[j] = i[j]
54 | else:
55 | for j in i.keys():
56 | TTTGridMap[j] = i[j]
57 |
58 | return TTTGridMap
59 |
60 |
61 | def deserializeMinTMaxT(numproc, flag_rpe=False):
62 |
63 | L = []
64 | for i in range(numproc):
65 | if flag_rpe is True:
66 | data = Basic.loadDump('minmax-emp'+str(i)+'.pkl')
67 | else:
68 | data = Basic.loadDump('minmax-'+str(i)+'.pkl')
69 | if data is not None:
70 | L.append(data)
71 |
72 | mint = min([x.mint for x in L])
73 | maxt = max([x.maxt for x in L])
74 |
75 | return mint, maxt
76 |
77 |
78 | def deserializeSembDict(numproc, flag_rpe=False):
79 |
80 | L = []
81 |
82 | for i in range(numproc):
83 | if flag_rpe is True:
84 | data = Basic.loadDump('sembDict-emp'+str(i)+'.pkl')
85 | else:
86 | data = Basic.loadDump('sembDict-'+str(i)+'.pkl')
87 | if data is not None:
88 | L.append(data)
89 |
90 | sembDict = OrderedDict()
91 |
92 | for i in L:
93 | if sys.version_info.major >= 3:
94 | for j in sorted(i.keys()):
95 | sembDict[j] = i[j]
96 | else:
97 | for j in i.keys():
98 | sembDict[j] = i[j]
99 |
100 | return sembDict
101 |
102 |
103 | def deserializeSembMaxFile(numproc, flag_rpe=False):
104 |
105 | L = []
106 |
107 | for i in range(numproc):
108 | if flag_rpe is True:
109 | data = Basic.loadDump('sembMAX-emp'+str(i)+'.pkl')
110 | else:
111 | data = Basic.loadDump('sembMAX-'+str(i)+'.pkl')
112 | if data is not None:
113 | L.append(data)
114 |
115 | sembMax = OrderedDict()
116 |
117 | for i in L:
118 | for j in i.keys():
119 | sembMax[j] = i[j]
120 |
121 | return sembMax
122 |
--------------------------------------------------------------------------------
/src/data/prem-no-ocean.m.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.2 2.6 1456. 600.
2 | 15. 5.8 3.2 2.6 1456. 600.
3 | 15. 6.8 3.9 2.9 1350. 600.
4 | 24.4 6.8 3.9 2.9 1350. 600.
5 | mantle
6 | 24.4 8.111 4.491 3.381 1446. 600.
7 | 80. 8.077 4.47 3.375 1448. 600.
8 | 80. 8.077 4.47 3.375 195.5 80.
9 | 220. 7.99 4.419 3.36 195.8 80.
10 | 220. 8.559 4.644 3.436 362.9 143.
11 | 400. 8.905 4.77 3.543 372.3 143.
12 | 400. 9.134 4.933 3.724 366.3 143.
13 | 600. 10.16 5.516 3.976 362.3 143.
14 | 600. 10.16 5.516 3.976 362.3 143.
15 | 670. 10.27 5.57 3.992 362.9 143.
16 | 670. 10.75 5.945 4.381 759.3 312.
17 | 771. 11.07 6.24 4.443 730.4 312.
18 | 771. 11.07 6.24 4.443 730.4 312.
19 | 971. 11.42 6.378 4.563 744. 312.
20 | 1171. 11.73 6.564 4.678 742.2 312.
21 | 1371. 12.02 6.619 4.79 766.2 312.
22 | 1571. 12.29 6.725 4.898 775.5 312.
23 | 1771. 12.54 6.825 5.003 784. 312.
24 | 1971. 12.78 6.92 5.106 792. 312.
25 | 2171. 13.02 7.011 5.207 799.8 312.
26 | 2371. 13.25 7.1 5.307 807.4 312.
27 | 2571. 13.45 7.189 5.407 811.7 312.
28 | 2741. 13.68 7.266 5.491 822.2 312.
29 | 2741. 13.68 7.266 5.491 822.2 312.
30 | 2891. 13.72 7.265 5.566 826.7 312.
31 | outer-core
32 | 2891. 8.083 0. 9.919 5.782E+04 0.
33 | 3456. 8.923 0. 10.72 5.782E+04 0.
34 | 4020. 9.544 0. 11.35 5.782E+04 0.
35 | 4585. 10. 0. 11.83 5.782E+04 0.
36 | 5150. 10.36 0. 12.18 5.782E+04 0.
37 | inner-core
38 | 5150. 11.04 3.507 12.76 622. 84.6
39 | 5577. 11.17 3.602 12.96 604.5 84.6
40 | 6005. 11.25 3.655 13.06 595.1 84.6
41 | 6371. 11.27 3.669 13.09 593. 84.6
42 |
--------------------------------------------------------------------------------
/src/data/prem.m.nd:
--------------------------------------------------------------------------------
1 | 0. 1.45 0. 1.02 5.782E+04 0.
2 | 3. 1.45 0. 1.02 5.782E+04 0.
3 | 3. 5.8 3.2 2.6 1456. 600.
4 | 15. 5.8 3.2 2.6 1456. 600.
5 | 15. 6.8 3.9 2.9 1350. 600.
6 | 24.4 6.8 3.9 2.9 1350. 600.
7 | mantle
8 | 24.4 8.111 4.491 3.381 1446. 600.
9 | 80. 8.077 4.47 3.375 1448. 600.
10 | 80. 8.077 4.47 3.375 195.5 80.
11 | 220. 7.99 4.419 3.36 195.8 80.
12 | 220. 8.559 4.644 3.436 362.9 143.
13 | 400. 8.905 4.77 3.543 372.3 143.
14 | 400. 9.134 4.933 3.724 366.3 143.
15 | 600. 10.16 5.516 3.976 362.3 143.
16 | 600. 10.16 5.516 3.976 362.3 143.
17 | 670. 10.27 5.57 3.992 362.9 143.
18 | 670. 10.75 5.945 4.381 759.3 312.
19 | 771. 11.07 6.24 4.443 730.4 312.
20 | 771. 11.07 6.24 4.443 730.4 312.
21 | 971. 11.42 6.378 4.563 744. 312.
22 | 1171. 11.73 6.564 4.678 742.2 312.
23 | 1371. 12.02 6.619 4.79 766.2 312.
24 | 1571. 12.29 6.725 4.898 775.5 312.
25 | 1771. 12.54 6.825 5.003 784. 312.
26 | 1971. 12.78 6.92 5.106 792. 312.
27 | 2171. 13.02 7.011 5.207 799.8 312.
28 | 2371. 13.25 7.1 5.307 807.4 312.
29 | 2571. 13.45 7.189 5.407 811.7 312.
30 | 2741. 13.68 7.266 5.491 822.2 312.
31 | 2741. 13.68 7.266 5.491 822.2 312.
32 | 2891. 13.72 7.265 5.566 826.7 312.
33 | outer-core
34 | 2891. 8.083 0. 9.919 5.782E+04 0.
35 | 3456. 8.923 0. 10.72 5.782E+04 0.
36 | 4020. 9.544 0. 11.35 5.782E+04 0.
37 | 4585. 10. 0. 11.83 5.782E+04 0.
38 | 5150. 10.36 0. 12.18 5.782E+04 0.
39 | inner-core
40 | 5150. 11.04 3.507 12.76 622. 84.6
41 | 5577. 11.17 3.602 12.96 604.5 84.6
42 | 6005. 11.25 3.655 13.06 595.1 84.6
43 | 6371. 11.27 3.669 13.09 593. 84.6
44 |
--------------------------------------------------------------------------------
/src/data/ak135-f-continental.m.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.46 2.6 1264. 600.
2 | 20. 5.8 3.46 2.6 1264. 600.
3 | 20. 6.5 3.85 2.9 1283. 600.
4 | 35. 6.5 3.85 2.9 1283. 600.
5 | mantle
6 | 35. 8.04 4.48 3.58 1449. 600.
7 | 77.5 8.045 4.49 3.5 1445. 600.
8 | 77.5 8.045 4.49 3.5 180.6 75.
9 | 120. 8.05 4.5 3.427 180. 75.
10 | 120. 8.05 4.5 3.427 182.6 76.06
11 | 165. 8.175 4.509 3.371 188.7 76.55
12 | 210. 8.301 4.518 3.324 201. 79.4
13 | 210. 8.3 4.519 3.323 338.2 133.7
14 | 300. 8.628 4.679 3.401 353.6 138.7
15 | 410. 9.03 4.87 3.506 377.5 146.5
16 | 410. 9.36 5.08 3.929 414.1 162.7
17 | 660. 10.2 5.611 3.918 428.5 172.9
18 | 660. 10.79 5.965 4.24 1349. 549.5
19 | 764. 11.07 6.215 4.359 1276. 537.1
20 | 849.1 11.21 6.272 4.463 1263. 527.3
21 | 1038. 11.52 6.407 4.616 1230. 506.9
22 | 1227. 11.81 6.527 4.71 1198. 488.
23 | 1416. 12.08 6.636 4.812 1168. 470.4
24 | 1605. 12.33 6.735 4.909 1141. 454.4
25 | 1795. 12.55 6.827 5.004 1111. 437.7
26 | 1984. 12.78 6.913 5.096 1065. 415.6
27 | 2173. 13. 6.997 5.186 1037. 400.6
28 | 2362. 13.21 7.079 5.273 995.4 381.2
29 | 2551. 13.43 7.162 5.357 968.1 367.3
30 | 2740. 13.65 7.248 5.439 932.5 350.6
31 | 2740. 13.65 7.248 5.693 722.7 271.7
32 | 2790. 13.65 7.259 5.72 726.9 274.
33 | 2839. 13.66 7.27 5.746 725.1 274.
34 | 2892. 13.66 7.282 5.772 723.1 274.
35 | outer-core
36 | 2892. 7.972 0. 9.928 5.782E+04 0.
37 | 3344. 8.744 0. 10.59 5.782E+04 0.
38 | 3797. 9.338 0. 11.12 5.782E+04 0.
39 | 4249. 9.759 0. 11.55 5.782E+04 0.
40 | 4702. 10.09 0. 11.88 5.782E+04 0.
41 | 5154. 10.32 0. 12.14 5.782E+04 0.
42 | inner-core
43 | 5154. 11.04 3.505 12.7 632.8 85.03
44 | 5397. 11.12 3.567 12.82 619.2 85.03
45 | 5884. 11.23 3.647 12.97 605. 85.03
46 | 6371. 11.27 3.672 13.02 600.6 85.03
47 |
--------------------------------------------------------------------------------
/src/data/ak135-f-average-no-ocean.m.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.2 2.6 1478. 600.
2 | 10. 5.8 3.2 2.6 1478. 600.
3 | 10. 6.8 3.9 2.92 1368. 600.
4 | 18. 6.8 3.9 2.92 1368. 600.
5 | mantle
6 | 18. 8.036 4.486 3.638 949.7 394.6
7 | 80. 8.04 4.481 3.5 1008. 417.6
8 | 80. 8.045 4.49 3.502 182. 75.6
9 | 120. 8.05 4.5 3.427 182.6 76.06
10 | 120. 8.05 4.5 3.427 182.6 76.06
11 | 165. 8.175 4.509 3.371 188.7 76.55
12 | 210. 8.301 4.518 3.324 201. 79.4
13 | 210. 8.3 4.519 3.323 338.2 133.7
14 | 300. 8.628 4.679 3.401 353.6 138.7
15 | 410. 9.03 4.87 3.506 377.5 146.5
16 | 410. 9.36 5.08 3.929 414.1 162.7
17 | 660. 10.2 5.611 3.918 428.5 172.9
18 | 660. 10.79 5.965 4.24 1349. 549.5
19 | 764. 11.07 6.215 4.359 1276. 537.1
20 | 849.1 11.21 6.272 4.463 1263. 527.3
21 | 1038. 11.52 6.407 4.616 1230. 506.9
22 | 1227. 11.81 6.527 4.71 1198. 488.
23 | 1416. 12.08 6.636 4.812 1168. 470.4
24 | 1605. 12.33 6.735 4.909 1141. 454.4
25 | 1795. 12.55 6.827 5.004 1111. 437.7
26 | 1984. 12.78 6.913 5.096 1065. 415.6
27 | 2173. 13. 6.997 5.186 1037. 400.6
28 | 2362. 13.21 7.079 5.273 995.4 381.2
29 | 2551. 13.43 7.162 5.357 968.1 367.3
30 | 2740. 13.65 7.248 5.439 932.5 350.6
31 | 2740. 13.65 7.248 5.693 722.7 271.7
32 | 2790. 13.65 7.259 5.72 726.9 274.
33 | 2839. 13.66 7.27 5.746 725.1 274.
34 | 2892. 13.66 7.282 5.772 723.1 274.
35 | outer-core
36 | 2892. 7.972 0. 9.928 5.782E+04 0.
37 | 3344. 8.744 0. 10.59 5.782E+04 0.
38 | 3797. 9.338 0. 11.12 5.782E+04 0.
39 | 4249. 9.759 0. 11.55 5.782E+04 0.
40 | 4702. 10.09 0. 11.88 5.782E+04 0.
41 | 5154. 10.32 0. 12.14 5.782E+04 0.
42 | inner-core
43 | 5154. 11.04 3.505 12.7 632.8 85.03
44 | 5397. 11.12 3.567 12.82 619.2 85.03
45 | 5884. 11.23 3.647 12.97 605. 85.03
46 | 6371. 11.27 3.672 13.02 600.6 85.03
47 |
--------------------------------------------------------------------------------
/src/process/xcorrfilter.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | sys.path.append('../tools/')
4 | sys.path.append('../Common/')
5 |
6 | import logging
7 |
8 | import obspy.core
9 | from obspy.core import read
10 | from obspy.core.utcdatetime import UTCDateTime
11 | import obspy.signal.cross_correlation
12 |
13 | import Logfile
14 | from ObspyFkt import loc2degrees, obs_TravelTimes
15 |
16 | logger = logging.getLogger('ARRAY-MP')
17 |
18 |
19 | class Xcorr(object):
20 |
21 | def __init__(self, Origin, StationMeta, EventPath):
22 |
23 | self.Origin = Origin
24 | self.StationMeta = StationMeta
25 | self.EventPath = EventPath
26 |
27 | def traveltimes(self):
28 |
29 | Logfile.red('Enter AUTOMATIC FILTER')
30 | T = []
31 | Wdict = {}
32 |
33 | for i in self.StationMeta:
34 | de = loc2degrees(self.Origin, i)
35 | tt = obs_TravelTimes(de, self.Origin.depth)
36 |
37 | if tt[0]['phase_name'] == 'P':
38 | time = tt[0]['time']
39 | T.append(time)
40 | tw = self.calculateTimeWindows(time)
41 | w = self.readWaveformsCross(i, tw)
42 | Wdict[i.getName()] = w
43 |
44 | Logfile.red('Exit AUTOMATIC FILTER')
45 |
46 | return Wdict
47 |
48 | def calculateTimeWindows(self, mint):
49 | tw = {}
50 | st = str(self.Origin.time)[:-1]
51 |
52 | tw['start'] = UTCDateTime(UTCDateTime(st)+(mint-5))
53 | tw['end'] = tw['start'] + 20
54 |
55 | Logfile.add(' ORIGIN TIME %s' % UTCDateTime(self.Origin.time))
56 | Logfile.add(' TIME WINDOW: %s - %s' % (tw['start'], tw['end']))
57 | return tw
58 |
59 | def filterWaveform(self, Waveform):
60 |
61 | Logfile.red('Filter Waveform:')
62 |
63 | for i in Waveform:
64 | i.detrend("simple")
65 | i.filter("bandpass", freqmin=0.05, freqmax=1, corners=3,
66 | zerophase=False)
67 |
68 | return Waveform
69 |
70 | def readWaveformsCross(self, station, tw):
71 |
72 | time = self.Origin.time
73 | ts = time.split('T')
74 |
75 | datet = ts[0]
76 | datet = datet.split('-')
77 | year = datet[0].strip()
78 | month = datet[1]
79 | day = datet[2]
80 |
81 | julday = UTCDateTime(int(year), int(month), int(day)).julday
82 | julday = "%03d" % julday
83 | sdspath = os.path.join(self.EventPath, 'data', year)
84 |
85 | if station.loc == '--':
86 | station.loc = ''
87 |
88 | streamData = station.net+'.'+station.sta+'.'+station.loc+'.'\
89 | + station.comp+'.D.'+str(year)+'.'+str(julday)
90 | entry = os.path.join(sdspath, station.net, station.sta,
91 | station.comp+'.D', streamData)
92 | st = read(entry, format="MSEED", starttime=tw['start'],
93 | endtime=tw['end'], nearest_sample=True)
94 |
95 | if len(st.getGaps()) > 0:
96 | st.merge(method=0, fill_value='interpolate',
97 | interpolation_samples=0)
98 |
99 | st[0].stats.starttime = UTCDateTime(1000)
100 |
101 | stream = self.filterWaveform(st)
102 | return stream
103 |
104 | def doXcorr(self):
105 |
106 | StreamDict = self.traveltimes()
107 | corrDict = {}
108 |
109 | for stream in StreamDict.keys():
110 | ref = StreamDict[stream][0].data
111 |
112 | Logfile.red('Enter Xcorr Procedure')
113 |
114 | for stream in StreamDict.keys():
115 | a, b = obspy.signal.cross_correlation.xcorr(ref,
116 | StreamDict[stream][0],
117 | 100)
118 | shift = a / StreamDict[stream][0].stats.sampling_rate
119 | corrDict[stream] = shift
120 |
121 | Logfile.red('Leave Xcorr Procedure')
122 | return corrDict
123 |
--------------------------------------------------------------------------------
/src/data/ak135-f-average.m.nd:
--------------------------------------------------------------------------------
1 | 0. 1.45 0. 1.02 5.782E+04 0.
2 | 3. 1.45 0. 1.02 5.782E+04 0.
3 | 3. 1.65 1. 2. 163.3 80.
4 | 3.3 1.65 1. 2. 163.3 80.
5 | 3.3 5.8 3.2 2.6 1478. 600.
6 | 10. 5.8 3.2 2.6 1478. 600.
7 | 10. 6.8 3.9 2.92 1368. 600.
8 | 18. 6.8 3.9 2.92 1368. 600.
9 | mantle
10 | 18. 8.036 4.486 3.638 949.7 394.6
11 | 80. 8.04 4.481 3.5 1008. 417.6
12 | 80. 8.045 4.49 3.502 182. 75.6
13 | 120. 8.05 4.5 3.427 182.6 76.06
14 | 120. 8.05 4.5 3.427 182.6 76.06
15 | 165. 8.175 4.509 3.371 188.7 76.55
16 | 210. 8.301 4.518 3.324 201. 79.4
17 | 210. 8.3 4.519 3.323 338.2 133.7
18 | 300. 8.628 4.679 3.401 353.6 138.7
19 | 410. 9.03 4.87 3.506 377.5 146.5
20 | 410. 9.36 5.08 3.929 414.1 162.7
21 | 660. 10.2 5.611 3.918 428.5 172.9
22 | 660. 10.79 5.965 4.24 1349. 549.5
23 | 764. 11.07 6.215 4.359 1276. 537.1
24 | 849.1 11.21 6.272 4.463 1263. 527.3
25 | 1038. 11.52 6.407 4.616 1230. 506.9
26 | 1227. 11.81 6.527 4.71 1198. 488.
27 | 1416. 12.08 6.636 4.812 1168. 470.4
28 | 1605. 12.33 6.735 4.909 1141. 454.4
29 | 1795. 12.55 6.827 5.004 1111. 437.7
30 | 1984. 12.78 6.913 5.096 1065. 415.6
31 | 2173. 13. 6.997 5.186 1037. 400.6
32 | 2362. 13.21 7.079 5.273 995.4 381.2
33 | 2551. 13.43 7.162 5.357 968.1 367.3
34 | 2740. 13.65 7.248 5.439 932.5 350.6
35 | 2740. 13.65 7.248 5.693 722.7 271.7
36 | 2790. 13.65 7.259 5.72 726.9 274.
37 | 2839. 13.66 7.27 5.746 725.1 274.
38 | 2892. 13.66 7.282 5.772 723.1 274.
39 | outer-core
40 | 2892. 7.972 0. 9.928 5.782E+04 0.
41 | 3344. 8.744 0. 10.59 5.782E+04 0.
42 | 3797. 9.338 0. 11.12 5.782E+04 0.
43 | 4249. 9.759 0. 11.55 5.782E+04 0.
44 | 4702. 10.09 0. 11.88 5.782E+04 0.
45 | 5154. 10.32 0. 12.14 5.782E+04 0.
46 | inner-core
47 | 5154. 11.04 3.505 12.7 632.8 85.03
48 | 5397. 11.12 3.567 12.82 619.2 85.03
49 | 5884. 11.23 3.647 12.97 605. 85.03
50 | 6371. 11.27 3.672 13.02 600.6 85.03
51 |
--------------------------------------------------------------------------------
/src/data/prem-no-ocean.f.nd:
--------------------------------------------------------------------------------
1 | 0. 5.8 3.2 2.6 1456. 600.
2 | 15. 5.8 3.2 2.6 1456. 600.
3 | 15. 6.8 3.9 2.9 1350. 600.
4 | 24.4 6.8 3.9 2.9 1350. 600.
5 | mantle
6 | 24.4 8.111 4.491 3.381 1446. 600.
7 | 80. 8.077 4.47 3.375 1448. 600.
8 | 80. 8.077 4.47 3.375 195.5 80.
9 | 150. 8.034 4.444 3.367 195.7 80.
10 | 220. 7.99 4.419 3.36 195.8 80.
11 | 220. 8.559 4.644 3.436 362.9 143.
12 | 310. 8.732 4.707 3.49 367.7 143.
13 | 400. 8.905 4.77 3.543 372.3 143.
14 | 400. 9.134 4.933 3.724 366.3 143.
15 | 500. 9.646 5.224 3.85 364.2 143.
16 | 600. 10.16 5.516 3.976 362.3 143.
17 | 600. 10.16 5.516 3.976 362.3 143.
18 | 670. 10.27 5.57 3.992 362.9 143.
19 | 670. 10.75 5.945 4.381 759.3 312.
20 | 771. 11.07 6.24 4.443 730.4 312.
21 | 771. 11.07 6.24 4.443 730.4 312.
22 | 971. 11.42 6.378 4.563 744. 312.
23 | 1171. 11.73 6.564 4.678 742.2 312.
24 | 1371. 12.02 6.619 4.79 766.2 312.
25 | 1571. 12.29 6.725 4.898 775.5 312.
26 | 1771. 12.54 6.825 5.003 784. 312.
27 | 1971. 12.78 6.92 5.106 792. 312.
28 | 2171. 13.02 7.011 5.207 799.8 312.
29 | 2371. 13.25 7.1 5.307 807.4 312.
30 | 2571. 13.45 7.189 5.407 811.7 312.
31 | 2741. 13.68 7.266 5.491 822.2 312.
32 | 2741. 13.68 7.266 5.491 822.2 312.
33 | 2771. 13.69 7.266 5.506 823.1 312.
34 | 2891. 13.72 7.265 5.566 826.7 312.
35 | outer-core
36 | 2891. 8.065 0. 9.903 5.782E+04 0.
37 | 2971. 8.199 0. 10.03 5.782E+04 0.
38 | 3171. 8.513 0. 10.33 5.782E+04 0.
39 | 3371. 8.796 0. 10.6 5.782E+04 0.
40 | 3571. 9.05 0. 10.85 5.782E+04 0.
41 | 3771. 9.279 0. 11.08 5.782E+04 0.
42 | 3971. 9.484 0. 11.29 5.782E+04 0.
43 | 4171. 9.669 0. 11.48 5.782E+04 0.
44 | 4371. 9.835 0. 11.65 5.782E+04 0.
45 | 4571. 9.986 0. 11.81 5.782E+04 0.
46 | 4771. 10.12 0. 11.95 5.782E+04 0.
47 | 4971. 10.25 0. 12.07 5.782E+04 0.
48 | 5150. 10.36 0. 12.17 5.782E+04 0.
49 | inner-core
50 | 5150. 11.03 3.504 12.76 622.6 84.6
51 | 5171. 11.04 3.51 12.77 621.5 84.6
52 | 5371. 11.11 3.558 12.87 612.4 84.6
53 | 5571. 11.16 3.598 12.95 605.2 84.6
54 | 5771. 11.21 3.628 13.01 599.8 84.6
55 | 5971. 11.24 3.65 13.05 596. 84.6
56 | 6171. 11.26 3.663 13.08 593.7 84.6
57 | 6371. 11.27 3.668 13.09 593.4 84.6
58 |
59 |
--------------------------------------------------------------------------------
/src/data/prem.f.nd:
--------------------------------------------------------------------------------
1 | 0. 1.45 0. 1.02 5.782E+04 0.
2 | 3. 1.45 0. 1.02 5.782E+04 0.
3 | 3. 5.8 3.2 2.6 1456. 600.
4 | 15. 5.8 3.2 2.6 1456. 600.
5 | 15. 6.8 3.9 2.9 1350. 600.
6 | 24.4 6.8 3.9 2.9 1350. 600.
7 | mantle
8 | 24.4 8.111 4.491 3.381 1446. 600.
9 | 80. 8.077 4.47 3.375 1448. 600.
10 | 80. 8.077 4.47 3.375 195.5 80.
11 | 150. 8.034 4.444 3.367 195.7 80.
12 | 220. 7.99 4.419 3.36 195.8 80.
13 | 220. 8.559 4.644 3.436 362.9 143.
14 | 310. 8.732 4.707 3.49 367.7 143.
15 | 400. 8.905 4.77 3.543 372.3 143.
16 | 400. 9.134 4.933 3.724 366.3 143.
17 | 500. 9.646 5.224 3.85 364.2 143.
18 | 600. 10.16 5.516 3.976 362.3 143.
19 | 600. 10.16 5.516 3.976 362.3 143.
20 | 670. 10.27 5.57 3.992 362.9 143.
21 | 670. 10.75 5.945 4.381 759.3 312.
22 | 771. 11.07 6.24 4.443 730.4 312.
23 | 771. 11.07 6.24 4.443 730.4 312.
24 | 971. 11.42 6.378 4.563 744. 312.
25 | 1171. 11.73 6.564 4.678 742.2 312.
26 | 1371. 12.02 6.619 4.79 766.2 312.
27 | 1571. 12.29 6.725 4.898 775.5 312.
28 | 1771. 12.54 6.825 5.003 784. 312.
29 | 1971. 12.78 6.92 5.106 792. 312.
30 | 2171. 13.02 7.011 5.207 799.8 312.
31 | 2371. 13.25 7.1 5.307 807.4 312.
32 | 2571. 13.45 7.189 5.407 811.7 312.
33 | 2741. 13.68 7.266 5.491 822.2 312.
34 | 2741. 13.68 7.266 5.491 822.2 312.
35 | 2771. 13.69 7.266 5.506 823.1 312.
36 | 2891. 13.72 7.265 5.566 826.7 312.
37 | outer-core
38 | 2891. 8.065 0. 9.903 5.782E+04 0.
39 | 2971. 8.199 0. 10.03 5.782E+04 0.
40 | 3171. 8.513 0. 10.33 5.782E+04 0.
41 | 3371. 8.796 0. 10.6 5.782E+04 0.
42 | 3571. 9.05 0. 10.85 5.782E+04 0.
43 | 3771. 9.279 0. 11.08 5.782E+04 0.
44 | 3971. 9.484 0. 11.29 5.782E+04 0.
45 | 4171. 9.669 0. 11.48 5.782E+04 0.
46 | 4371. 9.835 0. 11.65 5.782E+04 0.
47 | 4571. 9.986 0. 11.81 5.782E+04 0.
48 | 4771. 10.12 0. 11.95 5.782E+04 0.
49 | 4971. 10.25 0. 12.07 5.782E+04 0.
50 | 5150. 10.36 0. 12.17 5.782E+04 0.
51 | inner-core
52 | 5150. 11.03 3.504 12.76 622.6 84.6
53 | 5171. 11.04 3.51 12.77 621.5 84.6
54 | 5371. 11.11 3.558 12.87 612.4 84.6
55 | 5571. 11.16 3.598 12.95 605.2 84.6
56 | 5771. 11.21 3.628 13.01 599.8 84.6
57 | 5971. 11.24 3.65 13.05 596. 84.6
58 | 6171. 11.26 3.663 13.08 593.7 84.6
59 | 6371. 11.27 3.668 13.09 593.4 84.6
60 |
61 |
--------------------------------------------------------------------------------
/src/skeleton/plot_cluster.py:
--------------------------------------------------------------------------------
1 | from affine import Affine
2 | from pyproj import Proj, transform
3 | from mpl_toolkits.basemap import Basemap
4 | import numpy as num
5 | import numpy as np
6 | import matplotlib.pyplot as plt
7 | from mpl_toolkits.basemap import Basemap
8 | import os
9 | from pathlib import Path
10 | import sys
11 | from matplotlib.pyplot import cm
12 | from matplotlib.widgets import Slider
13 | from pylab import plot, show, figure, scatter, axes, draw
14 | from itertools import cycle
15 | import random
16 | import csv
17 | from obspy.imaging.beachball import beach
18 | from matplotlib.patches import Circle, Polygon
19 | w=25480390.0
20 |
21 | def shoot(lon, lat, azimuth, maxdist=None):
22 | """Shooter Function
23 | Original javascript on http://williams.best.vwh.net/gccalc.htm
24 | Translated to python by Thomas Lecocq
25 | """
26 | glat1 = lat * np.pi / 180.
27 | glon1 = lon * np.pi / 180.
28 | s = maxdist / 1.852
29 | faz = azimuth * np.pi / 180.
30 |
31 | EPS= 0.00000000005
32 | if ((np.abs(np.cos(glat1)) EPS):
59 |
60 | sy = np.sin(y)
61 | cy = np.cos(y)
62 | cz = np.cos(b + y)
63 | e = 2. * cz * cz - 1.
64 | c = y
65 | x = e * cy
66 | y = e + e - 1.
67 | y = (((sy * sy * 4. - 3.) * y * cz * d / 6. + x) *
68 | d / 4. - cz) * sy * d + tu
69 |
70 | b = cu * cy * cf - su * sy
71 | c = r * np.sqrt(sa * sa + b * b)
72 | d = su * cy + cu * sy * cf
73 | glat2 = (np.arctan2(d, c) + np.pi) % (2*np.pi) - np.pi
74 | c = cu * cy - su * sy * cf
75 | x = np.arctan2(sy * sf, c)
76 | c = ((-3. * c2a + 4.) * f + 4.) * c2a * f / 16.
77 | d = ((e * cy * c + cz) * sy * c + y) * sa
78 | glon2 = ((glon1 + x - (1. - c) * d * f + np.pi) % (2*np.pi)) - np.pi
79 |
80 | baz = (np.arctan2(sa, b) + np.pi) % (2 * np.pi)
81 |
82 | glon2 *= 180./np.pi
83 | glat2 *= 180./np.pi
84 | baz *= 180./np.pi
85 |
86 | return (glon2, glat2, baz)
87 |
88 | def great(m, startlon, startlat, azimuth,*args, **kwargs):
89 | glon1 = startlon
90 | glat1 = startlat
91 | glon2 = glon1
92 | glat2 = glat1
93 |
94 | step = 50
95 |
96 | glon2, glat2, baz = shoot(glon1, glat1, azimuth, step)
97 | if azimuth-180 >= 0:
98 | while glon2 <= startlon:
99 | m.drawgreatcircle(glon1, glat1, glon2, glat2,del_s=50,**kwargs)
100 | azimuth = baz + 180.
101 | glat1, glon1 = (glat2, glon2)
102 |
103 | glon2, glat2, baz = shoot(glon1, glat1, azimuth, step)
104 | elif azimuth-180 < 0:
105 | while glon2 >= startlon:
106 | m.drawgreatcircle(glon1, glat1, glon2, glat2,del_s=50,**kwargs)
107 | azimuth = baz + 180.
108 | glat1, glon1 = (glat2, glon2)
109 |
110 | glon2, glat2, baz = shoot(glon1, glat1, azimuth, step)
111 |
112 | def equi(m, centerlon, centerlat, radius, *args, **kwargs):
113 | glon1 = centerlon
114 | glat1 = centerlat
115 | X = []
116 | Y = []
117 | for azimuth in range(0, 360):
118 | glon2, glat2, baz = shoot(glon1, glat1, azimuth, radius)
119 | X.append(glon2)
120 | Y.append(glat2)
121 | X.append(X[0])
122 | Y.append(Y[0])
123 | X,Y = m(X,Y)
124 | plt.plot(X,Y,color='gray',**kwargs)
125 |
126 |
127 |
128 | stats = 'event.stations'
129 |
130 | f = open("event.stations") # open your file
131 |
132 | f=open('event.stations',"r")
133 | lines=f.readlines()
134 | result=[]
135 | for x in lines:
136 | result.append(x.split(' ')[:])
137 | f.close()
138 | event_cor_y = 37.6199
139 | event_cor_x = 95.8853
140 | lat_0 = 37.6199
141 | lon_0 = 95.8853
142 |
143 | map = Basemap(width=29000000,height=29000000,
144 | resolution='l',projection='stere',\
145 | lat_ts=event_cor_y,lat_0=event_cor_y,lon_0=event_cor_x)
146 | map.drawcoastlines()
147 |
148 | ax = plt.gca()
149 | np1 = [101, 60, 83]
150 | x, y = map(95.8853,37.6199)
151 |
152 | beach1 = beach(np1, xy=(x, y), width=700000)
153 | ax.add_collection(beach1)
154 |
155 | x = []
156 | y = []
157 | z = []
158 | for i in result:
159 | x.append(i[:][2])
160 | y.append(i[:][1])
161 | z.append(int(i[:][3]))
162 |
163 |
164 | colors = cm.nipy_spectral(np.linspace(0,1,np.max(z)+1))
165 |
166 | x = np.asarray(x)
167 | y = np.asarray(y)
168 |
169 | x, y = map(x,y)
170 | map.scatter(x,y,c=[colors[index] for index in z])
171 | try:
172 | plt.text(x[0],y[0],'r'+str(data[0,0])[:], fontsize=12)
173 | except:
174 | pass
175 |
176 | radii = [1000,5000,10000]
177 | for radius in radii:
178 | equi(map, lon_0, lat_0, radius,lw=2.)
179 | plt.show()
180 |
--------------------------------------------------------------------------------
/src/process/noise_analyser.py:
--------------------------------------------------------------------------------
1 | from pyrocko.client import catalog
2 |
3 | import logging
4 | import numpy as num
5 | from pyrocko.guts import Int, Bool, Float, String
6 | from pyrocko.gf.meta import OutOfBounds
7 |
8 | logger = logging.getLogger('NoiseAnalyser')
9 |
10 |
11 | def get_phase_arrival_time(engine, source, station, wavename, store_id):
12 | """
13 | Get arrival time from Greens Function store for respective
14 | :class:`pyrocko.gf.seismosizer.Target`,
15 | :class:`pyrocko.gf.meta.Location` pair.
16 |
17 | Parameters
18 | ----------
19 | engine : :class:`pyrocko.gf.seismosizer.LocalEngine`
20 | source : :class:`pyrocko.gf.meta.Location`
21 | can be therefore :class:`pyrocko.gf.seismosizer.Source` or
22 | :class:`pyrocko.model.Event`
23 | target : :class:`pyrocko.gf.seismosizer.Target`
24 | wavename : string
25 | of the tabulated phase_def that determines the phase arrival
26 |
27 | Returns
28 | -------
29 | scalar, float of the arrival time of the wave
30 | """
31 | store = engine.get_store(store_id)
32 | dist = station.distance_to(source)
33 | depth = source.depth
34 | return store.t(wavename, (depth, dist)) + source.time
35 |
36 |
37 | def seismic_noise_variance(traces, engine, event, stations,
38 | nwindows, pre_event_noise_duration,
39 | check_events, phase_def, store_id):
40 | '''
41 | Calculate variance of noise (half an hour) before P-Phase onset, and check
42 | for other events interfering
43 |
44 | Parameters
45 | ----------
46 | data_traces : list
47 | of :class:`pyrocko.trace.Trace` containing observed data
48 | engine : :class:`pyrocko.gf.seismosizer.LocalEngine`
49 | processing object for synthetics calculation
50 | event : :class:`pyrocko.meta.Event`
51 | reference event from catalog
52 | targets : list
53 | of :class:`pyrocko.gf.seismosizer.Targets`
54 | nwindows : integer
55 | if given, windowed analysis of noise, else
56 | variance is calculated on the entire pre-event
57 | noise
58 | phase_def : :class:'pyrocko.gf.Timing'
59 |
60 | Returns
61 | -------
62 | :class:`numpy.ndarray`
63 | '''
64 |
65 | var_ds = []
66 | global_cmt_catalog = catalog.GlobalCMT()
67 | var_ds = []
68 | ev_ws = []
69 | for tr, station in zip(traces, stations):
70 | stat_w = 1.
71 | if tr is None:
72 | var_ds.append(0.)
73 | ev_ws.append(0.)
74 | else:
75 |
76 | arrival_time = get_phase_arrival_time(
77 | engine=engine, source=event,
78 | station=station, wavename=phase_def,
79 | store_id=store_id)
80 | if check_events:
81 | events = global_cmt_catalog.get_events(
82 | time_range=(
83 | arrival_time-pre_event_noise_duration-50.*60.,
84 | arrival_time),
85 | magmin = 4.)
86 | ev_sum = 0.
87 | for ev in events:
88 | ev_sum += ev.magnitude
89 | for ev in events:
90 | try:
91 | arrival_time_pre = get_phase_arrival_time(
92 | engine=engine,
93 | source=ev,
94 | station=station,
95 | wavename=phase_def,
96 | store_id=store_id)
97 | if arrival_time_pre > arrival_time \
98 | - pre_event_noise_duration \
99 | and arrival_time_pre < arrival_time:
100 |
101 | stat_w = 0.
102 | logger.info(
103 | 'Noise analyser found event %s phase onset of '
104 | '%s for %s' % (
105 | ev.name, phase_def, station))
106 |
107 | if arrival_time_pre > arrival_time-30.*60.\
108 | and arrival_time_pre < arrival_time - \
109 | pre_event_noise_duration:
110 | stat_w *= 0.5*(ev.magnitude/ev.sum)
111 | logger.info(
112 | 'Noise analyser found event %s possibly '
113 | 'contaminating the noise' % ev.name)
114 | # 0.5 arbitrary
115 | except Exception:
116 | pass
117 | ev_ws.append(stat_w)
118 |
119 | if nwindows == 1:
120 | vtrace_var = num.nanvar(tr.ydata)
121 | var_ds.append(vtrace_var)
122 | else:
123 | win = arrival_time -(arrival_time -
124 | pre_event_noise_duration)
125 | win_len = win/nwindows
126 | v_traces_w = []
127 | for i in range(0, nwindows):
128 | vtrace_w = tr.chop(
129 | tmin=win+win_len*i,
130 | tmax=arrival_time+win_len*i+1,
131 | inplace=False)
132 | v_traces_w.append(vtrace_w.ydata)
133 | v_traces_w = num.nanmean(v_traces_w)
134 | var_ds.append(v_traces_w)
135 | var_ds = num.array(var_ds, dtype=num.float)
136 | ev_ws = num.array(ev_ws, dtype=num.float)
137 | return var_ds, ev_ws
138 |
139 |
140 | def analyse(traces, engine, event, stations,
141 | pre_event_noise_duration, store_id, nwindows=1,
142 | check_events='True', phase_def='P'):
143 |
144 | if pre_event_noise_duration == 0:
145 | return
146 |
147 | var_ds, ev_ws = seismic_noise_variance(
148 | traces, engine, event, stations,
149 | nwindows, pre_event_noise_duration,
150 | check_events, phase_def, store_id)
151 | norm_noise = num.nanmedian(var_ds)
152 | if norm_noise == 0:
153 | logger.info('Noise Analyser returned a weight of 0 for \
154 | all stations')
155 |
156 | weights = norm_noise/var_ds
157 | if check_events:
158 | weights = weights*ev_ws
159 | return num.mean(weights)
160 |
--------------------------------------------------------------------------------
/src/process/stacking.py:
--------------------------------------------------------------------------------
1 | """
2 | Modififed Utility module of the EQcorrscan package to allow for different
3 | methods of stacking of seismic signal in one place.
4 |
5 | Modififed: Andreas Steinberg,
6 | :copyright:
7 | EQcorrscan developers.
8 |
9 | :license:
10 | GNU Lesser General Public License, Version 3
11 | (https://www.gnu.org/copyleft/lesser.html)
12 | """
13 | from __future__ import absolute_import
14 | from __future__ import division
15 | from __future__ import print_function
16 | from __future__ import unicode_literals
17 |
18 | import numpy as np
19 |
20 | from scipy.signal import hilbert
21 | from copy import deepcopy
22 |
23 |
24 | def linstack(streams, normalize=True):
25 | """
26 | Compute the linear stack of a series of seismic streams of \
27 | multiplexed data.
28 |
29 | :type streams: list
30 | :param streams: List of streams to stack
31 | :type normalize: bool
32 | :param normalize: Normalize traces before stacking, normalizes by the RMS \
33 | amplitude.
34 |
35 | :returns: stacked data
36 | :rtype: :class:`obspy.core.stream.Stream`
37 | """
38 | stack = streams[np.argmax([len(stream) for stream in streams])].copy()
39 | if normalize:
40 | for tr in stack:
41 | tr.data = tr.data / np.sqrt(np.mean(np.square(tr.data)))
42 | tr.data = np.nan_to_num(tr.data)
43 | for i in range(1, len(streams)):
44 | for tr in stack:
45 | matchtr = streams[i].select(station=tr.stats.station,
46 | channel=tr.stats.channel)
47 | if matchtr:
48 | # Normalize the data before stacking
49 | if normalize:
50 | norm = matchtr[0].data /\
51 | np.sqrt(np.mean(np.square(matchtr[0].data)))
52 | norm = np.nan_to_num(norm)
53 | else:
54 | norm = matchtr[0].data
55 | tr.data = np.sum((norm, tr.data), axis=0)
56 | return stack
57 |
58 |
59 | def PWS_stack(streams, weight=2, normalize=True):
60 | """
61 | Compute the phase weighted stack of a series of streams.
62 |
63 | .. note:: It is recommended to align the traces before stacking.
64 |
65 | :type streams: list
66 | :param streams: List of :class:`obspy.core.stream.Stream` to stack.
67 | :type weight: float
68 | :param weight: Exponent to the phase stack used for weighting.
69 | :type normalize: bool
70 | :param normalize: Normalize traces before stacking.
71 |
72 | :return: Stacked stream.
73 | :rtype: :class:`obspy.core.stream.Stream`
74 | """
75 | # First get the linear stack which we will weight by the phase stack
76 | Linstack = linstack(streams)
77 | # Compute the instantaneous phase
78 | instaphases = []
79 | print("Computing instantaneous phase")
80 | for stream in streams:
81 | instaphase = stream.copy()
82 | for tr in instaphase:
83 | analytic = hilbert(tr.data)
84 | envelope = np.sqrt(np.sum((np.square(analytic),
85 | np.square(tr.data)), axis=0))
86 | tr.data = analytic / envelope
87 | instaphases.append(instaphase)
88 | # Compute the phase stack
89 | print("Computing the phase stack")
90 | Phasestack = linstack(instaphases, normalize=normalize)
91 | # Compute the phase-weighted stack
92 | for tr in Phasestack:
93 | tr.data = Linstack.select(station=tr.stats.station)[0].data *\
94 | np.abs(tr.data ** weight)
95 | return Phasestack
96 |
97 |
98 | def align_traces(trace_list, shift_len, master=False, positive=False,
99 | plot=False):
100 | """
101 | Align traces relative to each other based on their cross-correlation value.
102 |
103 | Uses the :func:`eqcorrscan.core.match_filter.normxcorr2` function to find
104 | the optimum shift to align traces relative to a master event. Either uses
105 | a given master to align traces, or uses the trace with the highest MAD
106 | amplitude.
107 |
108 | :type trace_list: list
109 | :param trace_list: List of traces to align
110 | :type shift_len: int
111 | :param shift_len: Length to allow shifting within in samples
112 | :type master: obspy.core.trace.Trace
113 | :param master: Master trace to align to, if set to False will align to \
114 | the largest amplitude trace (default)
115 | :type positive: bool
116 | :param positive: Return the maximum positive cross-correlation, or the \
117 | absolute maximum, defaults to False (absolute maximum).
118 | :type plot: bool
119 | :param plot: If true, will plot each trace aligned with the master.
120 |
121 | :returns: list of shifts and correlations for best alignment in seconds.
122 | :rtype: list
123 | """
124 | from eqcorrscan.core.match_filter import normxcorr2
125 | from eqcorrscan.utils.plotting import xcorr_plot
126 | traces = deepcopy(trace_list)
127 | if not master:
128 | # Use trace with largest MAD amplitude as master
129 | master = traces[0]
130 | MAD_master = np.median(np.abs(master.data))
131 | for i in range(1, len(traces)):
132 | if np.median(np.abs(traces[i].data)) > MAD_master:
133 | master = traces[i]
134 | MAD_master = np.median(np.abs(master.data))
135 | else:
136 | print('Using master given by user')
137 | shifts = []
138 | ccs = []
139 | for i in range(len(traces)):
140 | if not master.stats.sampling_rate == traces[i].stats.sampling_rate:
141 | raise ValueError('Sampling rates not the same')
142 | cc_vec = normxcorr2(template=traces[i].data.
143 | astype(np.float32)[shift_len:-shift_len],
144 | image=master.data.astype(np.float32))
145 | cc_vec = cc_vec[0]
146 | shift = np.abs(cc_vec).argmax()
147 | cc = cc_vec[shift]
148 | if plot:
149 | xcorr_plot(template=traces[i].data.
150 | astype(np.float32)[shift_len:-shift_len],
151 | image=master.data.astype(np.float32), shift=shift,
152 | cc=cc)
153 | shift -= shift_len
154 | if cc < 0 and positive:
155 | cc = cc_vec.max()
156 | shift = cc_vec.argmax() - shift_len
157 | shifts.append(shift / master.stats.sampling_rate)
158 | ccs.append(cc)
159 | return shifts, ccs
160 |
161 |
162 | if __name__ == "__main__":
163 | import doctest
164 | doctest.testmod()
165 |
--------------------------------------------------------------------------------
/src/tools/create.py:
--------------------------------------------------------------------------------
1 | import os
2 | from configparser import SafeConfigParser
3 | import sys
4 | import shutil
5 | import logging
6 | from urllib.request import urlopen
7 | import dateutil.parser
8 | from obspy.core.utcdatetime import UTCDateTime
9 |
10 | logger = logging.getLogger('ARRAY-MP')
11 | logger.setLevel(logging.DEBUG)
12 | formatter = logging.Formatter("%(asctime)s - %(name)s -\
13 | %(levelname)s - %(message)s")
14 | ch = logging.StreamHandler()
15 | ch.setLevel(logging.DEBUG)
16 | ch.setFormatter(formatter)
17 | logger.addHandler(ch)
18 |
19 |
20 | def init():
21 | '''
22 | method to parse global conf
23 |
24 | return Configdictionary
25 | '''
26 | confname = os.path.join(os.getcwd(), '..', 'global.conf')
27 |
28 | cDict = {}
29 | parser = SafeConfigParser()
30 | parser.read(confname)
31 | for section_name in parser.sections():
32 | for name, value in parser.items(section_name):
33 | cDict[name] = value
34 |
35 | logger.info('\033[31m Global Configuration %s \033[0m \n' % (cDict))
36 | return cDict
37 |
38 |
39 | def parseEvent(eventID):
40 |
41 | eventID = eventID[1].replace('_', '')
42 |
43 | url = 'http://service.iris.edu/fdsnws/event/1/query?eventid=%s&format=text' % (eventID)
44 | data = urlopen(url).read()
45 | data = data.decode('utf_8')
46 | data = data.split('\n')
47 | dl = data[1:]
48 | i = dl[0]
49 | i = i.split('|')
50 | time = i[1].replace(':', '-').strip()
51 | name = i[12].replace(' ', '-').strip()
52 | eventname = ('%s_%s') % (name, time)
53 |
54 | return eventname
55 |
56 |
57 | def createWorkingDirectory(args):
58 | '''
59 | method to create working directory of event to be processed
60 |
61 | return folder path and event_id
62 | '''
63 | foldername = parseEvent(args)
64 | absfolder = os.path.join(os.getcwd(), 'events', foldername)
65 |
66 | if os.access(absfolder, os.F_OK) is False:
67 | os.makedirs(absfolder)
68 | logger.info('\033[31m WORKING FOLDER CREATED \033[0m \n')
69 |
70 | logger.info('\033[31m Folder: %s EventID: %s \033[0m \n' % (absfolder,
71 | foldername))
72 |
73 | return absfolder, foldername
74 |
75 |
76 | def writeOriginFile(path, ev_id):
77 | '''
78 | method to write origin(event) file in the event directory to be processed
79 |
80 | return origin time of event
81 | '''
82 | fname = os.path.basename(path)+'.origin'
83 | fobj = open(os.path.join(path, fname), 'w')
84 | fobj.write('[origin]\n\n')
85 |
86 | eventID = ev_id[1].replace('_', '')
87 |
88 | url = 'http://service.iris.edu/fdsnws/event/1/query?eventid=%s&format=text' % (eventID)
89 | data = urlopen(url).read()
90 | data = data.decode('utf_8')
91 | data = data.split('\n')
92 | dl = data[1:]
93 | i = dl[0]
94 | i = i.split('|')
95 | time = str(dateutil.parser.parse(i[1]))[:19]
96 | fobj.write('region = %s\n' % i[12].strip())
97 | fobj.write('lat = %s\n' % i[2])
98 | fobj.write('lon = %s\n' % i[3])
99 | fobj.write('depth = %s\n' % i[4])
100 | fobj.write('time = %s\n' % time)
101 | fobj.write('strike = -999\n')
102 | fobj.write('dip = -999\n')
103 | fobj.write('rake = -999\n')
104 |
105 | fobj.close()
106 |
107 | return time
108 |
109 |
110 | def writeSynFile(path, ev_id):
111 | '''
112 | method to write synthetic input(event) file in the event directory to be
113 | processed
114 |
115 | '''
116 | fname = os.path.basename(path)+'.syn'
117 | fobj = open(os.path.join(path, fname), 'w')
118 | fobj.write('[synthetic parameter]\n\n')
119 |
120 | eventID = ev_id[1].replace('_', '')
121 |
122 | url = 'http://service.iris.edu/fdsnws/event/1/query?eventid=%s&format=text' % (eventID)
123 | data = urlopen(url).read()
124 | data = data.decode('utf_8')
125 | data = data.split('\n')
126 | dl = data[1:]
127 | i = dl[0]
128 | i = i.split('|')
129 | time = i[1]
130 | fobj.write('region = %s\n' % i[12].strip())
131 | fobj.write('nsources = 1\n')
132 | fobj.write('lat_0 = %s\n' % i[2])
133 | fobj.write('lon_0 = %s\n' % i[3])
134 | fobj.write('depth_0 = %s\n' % i[4])
135 | fobj.write('time_0 = %sZ\n' % i[1])
136 | fobj.write('strike_0 = -999\n')
137 | fobj.write('dip_0 = -999\n')
138 | fobj.write('rake_0 = -999\n')
139 | fobj.write('width_0 = -999\n')
140 | fobj.write('length_0 = -999\n')
141 | fobj.write('slip_0 = -999\n')
142 | fobj.write('nucleation_x_0 = 0\n')
143 | fobj.write('nucleation_y_0 = 0\n')
144 | fobj.write('store = store_id\n')
145 | fobj.write('store_superdirs = dir of store\n')
146 | fobj.write('use_specific_stf = 0\n')
147 | fobj.write('stf = stf=gf.HalfSinusoidSTF()\n')
148 | fobj.write('source = RectangularSource\n')
149 |
150 | fobj.close()
151 |
152 | return time
153 |
154 |
155 | def copyConfigSkeleton(evfolder):
156 | '''
157 | method to copy the example config from skeleton directory to the
158 | event directory
159 | '''
160 | logger.info('\033[31m Copy example.config to %s \033[0m \n' % (evfolder))
161 |
162 | dstfile = os.path.split(evfolder)[1]+'.config'
163 | dstfile = os.path.split(evfolder)[1]+'.yaml'
164 | import palantiri
165 | path = palantiri.__path__
166 | src = os.path.join(path[0], 'skeleton', 'example.config')
167 | src_yaml = os.path.join(path[0], 'skeleton', 'example.yaml')
168 | dst = os.path.join(evfolder, dstfile)
169 | dst_yaml = os.path.join(evfolder, dstfile_yaml)
170 | logger.info('\033[31m Created event directory \
171 | %s \033[0m \n' % (evfolder.split('/')[-1]))
172 | shutil.copy(src, dst)
173 | shutil.copy(src_yaml, dst_yaml)
174 |
175 | event = evfolder.split('/')[-1]
176 | eventname = event.split('_')[0]
177 | time = event.split('_')[-1]
178 | time = UTCDateTime(time)
179 | time = time.format_iris_web_service()
180 | time_p1 = time[0:10]
181 | time_p2 = time[11:-1]
182 | time_pf = time_p1 + " " + time_p2
183 |
184 | logger.info('\033[31mNEXT PROCESSING STEP (e.g.): \n\n \
185 | palantiri_down {evdirectory} "%s" 10352.323104588522 0.1 10 --radius-min=1110 %s \n\n\033[0m'.format(evdirectory=str(event.strip('[]'))) %(time_pf, eventname))
186 |
187 |
188 | def main():
189 |
190 | if len(sys.argv) == 2:
191 | absf, evid = createWorkingDirectory(sys.argv)
192 | writeSynFile(absf, sys.argv)
193 | writeOriginFile(absf, sys.argv)
194 | copyConfigSkeleton(absf)
195 | else:
196 | logger.info('\033[31m Nothing to do %s \033[0m \n')
197 |
--------------------------------------------------------------------------------
/src/cluster/evaluateCluster.py:
--------------------------------------------------------------------------------
1 | import os
2 | from optparse import OptionParser
3 | import fnmatch
4 |
5 | from palantiri.common import Logfile
6 | from palantiri.common.ObspyFkt import loc2degrees, obs_kilometer2degrees
7 | from palantiri.tools.config import Config
8 | from palantiri.cluster.cluster2 import Centroid, Station
9 | from palantiri.common.ConfigFile import ConfigObj
10 |
11 | parser = OptionParser(usage="%prog -f eventpath ")
12 | parser.add_option("-f", "--evpath", type="string", dest="evpath", help="evpath")
13 |
14 | (options, args) = parser.parse_args()
15 |
16 |
17 | class Result(object):
18 |
19 | def __init__(self, meanvalue, minvalue, centroidcount, usedstationcount,
20 | path):
21 |
22 | self.meanvalue = meanvalue
23 | self.minvalue = minvalue
24 | self.centroidcount = centroidcount
25 | self.usedstationcount = usedstationcount
26 | self.path = path
27 |
28 |
29 | class BestSolution(object):
30 |
31 | def __init__(self, station, cluster, lat, lon):
32 |
33 | self.station = station
34 | self.cluster = cluster
35 | self.lat = lat
36 | self.lon = lon
37 |
38 |
39 | def getStatistics(clusterresults):
40 | p = os.path.join(options.evpath, 'stat.dat')
41 | fobjstat = open(p, 'w')
42 | resDict = {}
43 | for root, dirs, files in os.walk(clusterresults):
44 | for i in files:
45 | if i == 'event.statistic':
46 | fname = os.path.join(root, i)
47 | bspath = os.path.join('/',*fname.split('/')[:-1])
48 | fobj = open(fname, 'r')
49 |
50 | for line in fobj:
51 | line = line.split()
52 | resDict[fname] = Result(line[0], line[1], line[2],
53 | line[3], bspath)
54 | fobjstat.write(('%s: %s %s %s %s\n') % (fname, line[0],
55 | line[1], line[2], line[3]))
56 | fobj.close()
57 |
58 | fobjstat.close()
59 | return resDict
60 |
61 |
62 | def getBestSolution(resultDictionary):
63 |
64 | bestsolution = -100000
65 | for i in resultDictionary.keys():
66 | resultDictionary[i].meanvalue = float(resultDictionary[i].meanvalue)
67 |
68 | if bestsolution < resultDictionary[i].meanvalue:
69 | bestsolution = resultDictionary[i].meanvalue
70 |
71 | L = []
72 | for j in resultDictionary.keys():
73 | if bestsolution == resultDictionary[j].meanvalue:
74 | L.append(resultDictionary[j])
75 |
76 | return L[0]
77 |
78 |
79 | def copycluster2EventConfig(clusterDict, evpath):
80 |
81 | epath = os.path.join(*evpath.split('/')[:-1])
82 | t = epath.split(os.path.sep)[-1]
83 | fname = t+'.config'
84 | fullfname = os.path.join(epath, fname)
85 | L = []
86 | fobj = open(fullfname,'r')
87 |
88 | for index,line in enumerate(fobj):
89 | L.append(line)
90 |
91 | if fnmatch.fnmatch(line,'*array parameter*'):
92 | firstend = index+1
93 |
94 | if fnmatch.fnmatch(line,'*beamforming method*'):
95 | secondbegin = index
96 |
97 | fobj.close()
98 |
99 | Confpart1 = L [:firstend]
100 | Confpart2 = L [secondbegin:]
101 |
102 | fobj = open(fullfname,'w')
103 | fobj.write(''.join(Confpart1))
104 | nlist=''
105 | for i in clusterDict.keys():
106 | if len(clusterDict[i]) > 0:
107 | nlist += 'r'+str(i)+','
108 |
109 | fobj.write(('networks=%s\n')%(nlist[:-1]))
110 | fobj.write('\n')
111 |
112 | for i in clusterDict.keys():
113 | if len(clusterDict[i]) > 0:
114 | aname = 'r'+str(i)
115 | fobj.write(('%s=%s\n')%(aname,clusterDict[i][:-1]))
116 | fobj.write(('%srefstation=\n')%(aname))
117 | fobj.write(('%sphase=P\n')%(aname))
118 |
119 | fobj.write('\n')
120 | fobj.write(''.join(Confpart2))
121 | fobj.close()
122 |
123 |
124 | def printBestSolution(solution):
125 |
126 | maxline = -100
127 | L = []
128 | M = []
129 | print(solution.path)
130 | solution.path = solution.path[1:]
131 | Logfile.add('eventpath: ', os.path.join(solution.path,'event.stations'))
132 | fobj = open(os.path.join(solution.path, 'event.stations'),'r')
133 | for line in fobj:
134 | line = line.split()
135 | M.append(float(line[3]))
136 | L.append(BestSolution(line[0], float(line[3]), float(line[1]),
137 | float(line[2])))
138 |
139 | fobj.close()
140 |
141 | maxline = max(M)
142 | C = {}
143 | fobjarrays = open(os.path.join(os.path.join(
144 | *solution.path.split('/')[:-2]),
145 | 'arraycluster.dat'),'w')
146 | for i in range(int(maxline)+1):
147 | array = ''
148 |
149 | for j in L:
150 | if int(j.cluster) == i:
151 | array+=j.station+'|'
152 |
153 | ar = 'r'+str(i)+'='+array[:-1]+'\n'
154 | C[i] = array
155 | print(ar,len(ar))
156 | fobjarrays.write(ar)
157 |
158 | fobjarrays.close()
159 |
160 | return C
161 |
162 |
163 | def copyAndShowBestSolution(solution):
164 |
165 | dest = solution.path
166 | src = os.path.join('/',*solution.path.split('/')[:-4])
167 | src = os.path.join(src,'skeleton','clusterplot.sh')
168 |
169 |
170 | def filterBestSolution(solution):
171 |
172 | evp = os.path.join('/',*solution.path.split('/')[:-2])
173 | C= Config(evp)
174 | Conf = C.parseConfig('config')
175 | cfg = ConfigObj(dict=Conf)
176 |
177 | SL = []
178 | M= []
179 | fobj = open(os.path.join(solution.path, 'event.stations'),'r')
180 |
181 | for s in fobj:
182 | try:
183 | line = s.split()
184 | net,sta,loc,comp = line[0].split('.')
185 |
186 | slat= line[1]
187 | slon= line[2]
188 | smember = line[3]
189 |
190 | M.append(smember)
191 | SL.append(Station(net,sta,loc,comp,lat=slat,lon=slon,member=smember))
192 |
193 | except:
194 | Logfile.exception('filterBestSolution', '<' + s + '>')
195 | continue
196 |
197 | fobj.close()
198 |
199 | M = list(set(M))
200 |
201 | Logfile.add('number of clusters ' + str(len(M)),
202 | 'number of stations ' + str(len(SL)))
203 |
204 | kd = obs_kilometer2degrees(cfg.Distance('intraclusterdistance'))
205 | Logfile.add('icd ' + str(kd))
206 |
207 | maxdist = -1
208 |
209 | for i in SL:
210 | counter = 0
211 |
212 | for k in SL:
213 | if i.member == '8' and k.member == '8':
214 | if i.getName() != k.getName():
215 | delta = loc2degrees(i, k)
216 |
217 | if delta > maxdist:
218 | maxdist = delta
219 |
220 | if delta < kd:
221 | counter +=1
222 |
223 | print(i, 'less then allowd ', counter)
224 |
225 | print('masxdist ', maxdist)
226 |
227 |
228 | if __name__ == "__main__":
229 | rD = getStatistics(options.evpath)
230 | bs = getBestSolution(rD)
231 | CD = printBestSolution(bs)
232 | copyAndShowBestSolution(bs)
233 |
234 | copycluster2EventConfig(CD, options.evpath)
235 |
--------------------------------------------------------------------------------
/src/skeleton/example.config:
--------------------------------------------------------------------------------
1 | [clusterparameter]
2 |
3 | maxcluster = 100
4 |
5 | minStationAroundInitialcluster = 10
6 | initialstationdistance = 7
7 | cutoff = 10
8 | runs = 1
9 |
10 | #minimum distance of initial centroids
11 | centroidminDistance = 2
12 |
13 | comparedelta = 2
14 |
15 | #maximum distance from station to cluster center
16 | stationdistance = 5
17 | minclusterStation = 5
18 |
19 |
20 | [traveltime calculation options]
21 |
22 | tttopt = -ph P
23 |
24 | [algorithm method]
25 | #2 kmeans
26 | cam = 2
27 |
28 | [data]
29 |
30 | # Phase to consider [right now only P and S possible!]
31 | ttphases=P
32 | # resampling data to frequency in Hz or s, should match your gf store
33 | new_frequence = 0.5
34 | # if download of was done with pyrocko_download command, set to 1
35 | pyrocko_download = 1
36 | # if download with pyrocko was done you can choose between velocity and displacement
37 | quantity = velocity
38 |
39 | #if not using pyrocko download:
40 | export_unfiltered=false
41 | export_filtered=false
42 | export_resampled=false
43 |
44 | # if colosseo synthetics should be used, set to 1
45 | colesseo_input = 0
46 | # give the colosseo scenario.yml file
47 | colosseo_scenario_yml = /media/asteinbe/data/asteinbe/mydl/scenario.yml
48 |
49 | [synthetic test]
50 |
51 | # do synthetic test with a real station distribution, specify the
52 | # parameters in eventfolder with event.syn
53 | synthetic_test = 1
54 | # add noise to the synthetic, based on the variance of the real station
55 | # covariance of noise not enabled right now
56 | synthetic_test_add_noise = 0
57 | synthetic_test_pertub_arrivals = 0
58 | shift_max = 4 # [s] also the maxium shift for empirical corrections
59 | # weight the arrays based on equal weights per azimuth. Azimuth gets divided in
60 | # 12 blocks. Any arrays in each block will be of combined weight 1.
61 | weight_by_azimuth = 1
62 | # bootstrap the arrays to estimate the uncerainity:
63 | bootstrap_array_weights = 0
64 | # number of bootstraps to carry out:
65 | n_bootstrap = 0
66 |
67 | [general parameter]
68 | # enable to run an optimization for the timeshifts, which lead to the highest semblance (foreshock/aftershock)
69 | correct_shifts_empirical_run = 0
70 | # turn valid to enable shift correction, either empirical(switch below) or xcorr (default)
71 | correct_shifts = 0
72 | # enable to apply empirical time shifts atm the empirical_run than needs to be disabled
73 | correct_shifts_empirical = 0
74 | # enable to use brute force search for a common array wide timeshift
75 | correct_shifts_empirical_manual = 0
76 | # enable to use brute force search for station based timeshift
77 | correct_shifts_empirical_manual_station_wise = 0
78 |
79 | # dimx of grid for empirical correction (should be the same as for the main process in most cases)
80 | dimx_emp = 50
81 | # dimy of grid (should be the same as for the main process in most cases)
82 | dimy_emp = 50
83 | # step length in s.
84 | step_emp = 2
85 | # window length in s.
86 | winlen_emp = 8
87 |
88 | #calculate semblance from all arrays in a combined step (else each arrays semblance will be calculated
89 | # seperatly and than combined, weighted by the azimuthal distribution of arrays)
90 | combine_all = 0
91 | #normalize all semblances to 0-1
92 | norm_all=1
93 | #after each depth/filter inspect the semblance
94 | inspect_semb = 0
95 |
96 | #depths= from,to,steps relative to depth in origin config
97 | depths=0,0,5
98 | # run each depth step with a number of filter(s), used for high vs. low freq.
99 | filters=2
100 | # dimx of grid
101 | dimx = 50
102 | # dimy of grid
103 | dimy = 50
104 | # min distance to origin of stations
105 | minDist = 23
106 | # max distance to origin of stations
107 | maxDist = 93
108 | # step length in s.
109 | step = 2
110 | # window length in s.
111 | winlen = 8
112 | # step length in s.44
113 | step_f2 = 2
114 | # window length in s.
115 | winlen_f2 = 8
116 | # length of data before phase onset in s.
117 | forerun = 10
118 | # length of data after phase onset in s.
119 | duration = 20
120 | security = 200
121 | # gridspacing in degree
122 | gridspacing = 0.025
123 | # save processed waveforms and load them in later BPs
124 | load_wdf = 0
125 |
126 | traveltime_model = ak135-f-continental.m.nd
127 |
128 | #apply futterman_attenuation to S waves
129 | futterman_attenuation = 0
130 |
131 | [Optimization parameter]
132 | # Optimize for a model with array responses as input
133 | # enable optimzation
134 | optimize = 0
135 | # enable optimization of combined semblance
136 | optimize_all = 0
137 |
138 |
139 |
140 | [process parameter]
141 | #number of cores for traveltime calculation
142 | ncore = 2
143 | # create output of compressed sensing as grid [warning: experimental]
144 | cs = 0
145 | #weight array contributions by noise variance (pre-event)
146 | weight_by_noise = 0
147 | # shift the traces to theoretical onset, disregarding curved travel times
148 | # produces better crosscorrelations, but only valid for small arrays
149 | shift_by_phase_onset = 0
150 | # use a phase weighted stacking
151 | shift_by_phase_pws = 0
152 | # shift by crosscorrelation
153 | shift_by_phase_cc = 0
154 | # create an obspy array response
155 | array_response = 0
156 |
157 | [focal mechanism solution values from event file]
158 | #not used right now
159 | fm = 1
160 |
161 | [automatic picker and correlation parameter]
162 |
163 | xcorr=1
164 | # for manual phase shift picking for each array set autoxcorrcorrectur to 1:
165 | autoxcorrcorrectur = 1
166 | # crosscorrelation threshold for excluding stations
167 | xcorrtreshold = 0.6
168 |
169 | #filter for referencestation for automatic picker
170 | #should match your main filter
171 | refstationfreqmin=0.03
172 | refstationfreqmax=1.00
173 | refstationcorners=2
174 | refstationzph=false
175 |
176 | #STA/LTA parameter
177 | refsta=0.5
178 | reflta=4
179 |
180 | [array parameter]
181 | #example only!
182 | networks=r1
183 |
184 | r1=XK.B03SL..BHZ|XK.B04KH..BHZ|XK.B05MO..BHZ|XK.B06OR..BHZ|XK.B07DX..BHZ|XK.B08TS..BHZ|XK.B09NK..BHZ|XK.B10PP..BHZ|XK.B11ET..BHZ|XK.B12SS..BHZ|XK.B13NX..BHZ|XK.B14MH..BHZ|XK.B15MW..BHZ|XK.B17CI..BHZ
185 | r1refstation=
186 | r1phase=P
187 |
188 |
189 | [beamforming method]
190 | #delaysum
191 | #capon
192 | beam = delaysum
193 |
194 | [filterparameter]
195 | #use dynamic filtering (attached to theoretical moment release)
196 | dynamic_filter = 0
197 | # define main filter for picking etc:
198 | filterswitch=1
199 | ###############################################################
200 | #Parameter for first filter
201 | #bp butterworth
202 |
203 | # low cut corner frequency
204 | flo = 0.08
205 |
206 | # high cut corner frequency
207 | fhi = 0.24
208 |
209 | # number of filter sections
210 | ns = 4
211 |
212 | # TRUE -> zero phase filter
213 | zph = false
214 |
215 |
216 | ###############################################################
217 | #Parameter for second filter
218 | #bp butterworth
219 |
220 | # low cut corner frequency
221 | flo2 = 0.1
222 |
223 | # high cut corner frequency
224 | fhi2 = 0.5
225 |
226 | # number of filter sections
227 | ns2 = 4
228 |
229 | # TRUE -> zero phase filter
230 | zph2 = false
231 |
232 | ###############################################################
233 | #Alternative lowpass filter example
234 | #lowpass butterworth
235 | l_fc = 1.5
236 | # number of filter sections
237 | l_ns = 4
238 | # TRUE -> zero phase filter
239 | l_zph = false
240 |
241 | ###############################################################
242 | #Alternative highpass filter example
243 | #highpass butterworth
244 | h_fc = 2
245 | # number of filter sections
246 | h_ns = 4
247 | # TRUE -> zero phase filter
248 | h_zph = false
249 |
250 | ##################################################################
251 |
--------------------------------------------------------------------------------
/src/data/ak135-f-average.vf.csv:
--------------------------------------------------------------------------------
1 | 0.0000 5.8000 3.2000 2.6000 1478.3000 599.9900
2 | 3.3000 5.8000 3.2000 2.6000 1478.3000 599.9900
3 | 3.3000 5.8000 3.2000 2.6000 1478.3000 599.9900
4 | 10.0000 5.8000 3.2000 2.6000 1478.3000 599.9900
5 | 10.0000 6.8000 3.9000 2.9200 1368.0200 599.9900
6 | 18.0000 6.8000 3.9000 2.9200 1368.0200 599.9900
7 | 18.0000 8.0355 4.4839 3.6410 950.5000 394.6200
8 | 43.0000 8.0379 4.4856 3.5801 972.7700 403.9300
9 | 80.0000 8.0400 4.4800 3.5020 1008.7100 417.5900
10 | 80.0000 8.0450 4.4900 3.5020 182.0300 75.6000
11 | 120.0000 8.0505 4.5000 3.4268 182.5700 76.0600
12 | 165.0000 8.1750 4.5090 3.3711 188.7200 76.5500
13 | 210.0000 8.3007 4.5184 3.3243 200.9700 79.4000
14 | 210.0000 8.3007 4.5184 3.3243 338.4700 133.7200
15 | 260.0000 8.4822 4.6094 3.3663 346.3700 136.3800
16 | 310.0000 8.6650 4.6964 3.4110 355.8500 139.3800
17 | 360.0000 8.8476 4.7832 3.4577 366.3400 142.7600
18 | 410.0000 9.0302 4.8702 3.5068 377.9300 146.5700
19 | 410.0000 9.3601 5.0806 3.9317 413.6600 162.5000
20 | 460.0000 9.5280 5.1864 3.9273 417.3200 164.8700
21 | 510.0000 9.6962 5.2922 3.9233 419.9400 166.8000
22 | 560.0000 9.8640 5.3989 3.9218 422.5500 168.7800
23 | 610.0000 10.0320 5.5047 3.9206 425.5100 170.8200
24 | 660.0000 10.2000 5.6104 3.9201 428.6900 172.9300
25 | 660.0000 10.7909 5.9607 4.2387 1350.5400 549.4500
26 | 710.0000 10.9222 6.0898 4.2986 1311.1700 543.4800
27 | 760.0000 11.0553 6.2100 4.3565 1277.9300 537.6300
28 | 809.5000 11.1355 6.2424 4.4118 1269.4400 531.9100
29 | 859.0000 11.2228 6.2799 4.4650 1260.6800 526.3200
30 | 908.5000 11.3068 6.3164 4.5162 1251.6900 520.8300
31 | 958.0000 11.3897 6.3519 4.5654 1243.0200 515.4600
32 | 1007.5000 11.4704 6.3860 4.5926 1234.5400 510.2000
33 | 1057.0000 11.5493 6.4182 4.6198 1226.5200 505.0500
34 | 1106.5000 11.6265 6.4514 4.6467 1217.9100 500.0000
35 | 1156.0000 11.7020 6.4822 4.6735 1210.0200 495.0500
36 | 1205.5000 11.7768 6.5131 4.7001 1202.0400 490.2000
37 | 1255.0000 11.8491 6.5431 4.7266 1193.9900 485.4400
38 | 1304.5000 11.9208 6.5728 4.7528 1186.0600 480.7700
39 | 1354.0000 11.9891 6.6009 4.7790 1178.1900 476.1900
40 | 1403.5000 12.0571 6.6285 4.8050 1170.5300 471.7000
41 | 1453.0000 12.1247 6.6554 4.8307 1163.1600 467.2900
42 | 1502.5000 12.1912 6.6813 4.8562 1156.0400 462.9600
43 | 1552.0000 12.2558 6.7070 4.8817 1148.7600 458.7200
44 | 1601.5000 12.3181 6.7323 4.9069 1141.3200 454.5500
45 | 1651.0000 12.3813 6.7579 4.9321 1134.0100 450.4500
46 | 1700.5000 12.4427 6.7820 4.9570 1127.0200 446.4300
47 | 1750.0000 12.5030 6.8056 4.9817 1120.0900 442.4800
48 | 1799.5000 12.5638 6.8289 5.0062 1108.5800 436.6800
49 | 1849.0000 12.6226 6.8517 5.0306 1097.1600 431.0300
50 | 1898.5000 12.6807 6.8743 5.0548 1085.9700 425.5300
51 | 1948.0000 12.7384 6.8972 5.0789 1070.3800 418.4100
52 | 1997.5000 12.7956 6.9194 5.1027 1064.2300 414.9400
53 | 2047.0000 12.8524 6.9416 5.1264 1058.0300 411.5200
54 | 2096.5000 12.9093 6.9625 5.1499 1048.0900 406.5000
55 | 2146.0000 12.9663 6.9852 5.1732 1042.0700 403.2300
56 | 2195.5000 13.0226 7.0069 5.1963 1032.1400 398.4100
57 | 2245.0000 13.0786 7.0286 5.2192 1018.3800 392.1600
58 | 2294.5000 13.1337 7.0504 5.2420 1008.7900 387.6000
59 | 2344.0000 13.1895 7.0722 5.2646 999.4400 383.1400
60 | 2393.5000 13.2465 7.0932 5.2870 990.7700 378.7900
61 | 2443.0000 13.3017 7.1144 5.3092 985.6300 375.9400
62 | 2492.5000 13.3584 7.1368 5.3313 976.8100 371.7500
63 | 2542.0000 13.4156 7.1584 5.3531 968.4600 367.6500
64 | 2591.5000 13.4741 7.1804 5.3748 960.3600 363.6400
65 | 2640.0000 13.5311 7.2031 5.3962 952.0000 359.7100
66 | 2690.0000 13.5899 7.2253 5.4176 940.8800 354.6100
67 | 2740.0000 13.6498 7.2485 5.4387 933.2100 350.8800
68 | 2740.0000 13.6498 7.2485 5.6934 722.7300 271.7400
69 | 2789.6700 13.6533 7.2593 5.7196 726.8700 273.9700
70 | 2839.3300 13.6570 7.2700 5.7458 725.1100 273.9700
71 | 2891.5000 13.6601 7.2817 5.7721 723.1200 273.9700
72 | 2891.5000 8.0000 0.0000 9.9145 57822.0000 0.0000
73 | 2939.3300 8.0382 0.0000 9.9942 57822.0000 0.0000
74 | 2989.6600 8.1283 0.0000 10.0722 57822.0000 0.0000
75 | 3039.9900 8.2213 0.0000 10.1485 57822.0000 0.0000
76 | 3090.3200 8.3122 0.0000 10.2233 57822.0000 0.0000
77 | 3140.6600 8.4001 0.0000 10.2964 57822.0000 0.0000
78 | 3190.9900 8.4861 0.0000 10.3679 57822.0000 0.0000
79 | 3241.3200 8.5692 0.0000 10.4378 57822.0000 0.0000
80 | 3291.6500 8.6496 0.0000 10.5062 57822.0000 0.0000
81 | 3341.9800 8.7283 0.0000 10.5731 57822.0000 0.0000
82 | 3392.3100 8.8036 0.0000 10.6385 57822.0000 0.0000
83 | 3442.6400 8.8761 0.0000 10.7023 57822.0000 0.0000
84 | 3492.9700 8.9461 0.0000 10.7647 57822.0000 0.0000
85 | 3543.3000 9.0138 0.0000 10.8257 57822.0000 0.0000
86 | 3593.6400 9.0792 0.0000 10.8852 57822.0000 0.0000
87 | 3643.9700 9.1426 0.0000 10.9434 57822.0000 0.0000
88 | 3694.3000 9.2042 0.0000 11.0001 57822.0000 0.0000
89 | 3744.6300 9.2634 0.0000 11.0555 57822.0000 0.0000
90 | 3794.9600 9.3205 0.0000 11.1095 57822.0000 0.0000
91 | 3845.2900 9.3760 0.0000 11.1623 57822.0000 0.0000
92 | 3895.6200 9.4297 0.0000 11.2137 57822.0000 0.0000
93 | 3945.9500 9.4814 0.0000 11.2639 57822.0000 0.0000
94 | 3996.2800 9.5306 0.0000 11.3127 57822.0000 0.0000
95 | 4046.6200 9.5777 0.0000 11.3604 57822.0000 0.0000
96 | 4096.9500 9.6232 0.0000 11.4069 57822.0000 0.0000
97 | 4147.2800 9.6673 0.0000 11.4521 57822.0000 0.0000
98 | 4197.6100 9.7100 0.0000 11.4962 57822.0000 0.0000
99 | 4247.9400 9.7513 0.0000 11.5391 57822.0000 0.0000
100 | 4298.2700 9.7914 0.0000 11.5809 57822.0000 0.0000
101 | 4348.6000 9.8304 0.0000 11.6216 57822.0000 0.0000
102 | 4398.9300 9.8682 0.0000 11.6612 57822.0000 0.0000
103 | 4449.2600 9.9051 0.0000 11.6998 57822.0000 0.0000
104 | 4499.6000 9.9410 0.0000 11.7373 57822.0000 0.0000
105 | 4549.9300 9.9761 0.0000 11.7737 57822.0000 0.0000
106 | 4600.2600 10.0103 0.0000 11.8092 57822.0000 0.0000
107 | 4650.5900 10.0439 0.0000 11.8437 57822.0000 0.0000
108 | 4700.9200 10.0768 0.0000 11.8772 57822.0000 0.0000
109 | 4751.2500 10.1095 0.0000 11.9098 57822.0000 0.0000
110 | 4801.5800 10.1415 0.0000 11.9414 57822.0000 0.0000
111 | 4851.9100 10.1739 0.0000 11.9722 57822.0000 0.0000
112 | 4902.2400 10.2049 0.0000 12.0001 57822.0000 0.0000
113 | 4952.5800 10.2329 0.0000 12.0311 57822.0000 0.0000
114 | 5002.9100 10.2565 0.0000 12.0593 57822.0000 0.0000
115 | 5053.2400 10.2745 0.0000 12.0867 57822.0000 0.0000
116 | 5103.5700 10.2854 0.0000 12.1133 57822.0000 0.0000
117 | 5153.5000 10.2890 0.0000 12.1391 57822.0000 0.0000
118 | 5153.5000 11.0427 3.5043 12.7037 633.2600 85.0300
119 | 5204.6100 11.0585 3.5187 12.7289 629.8900 85.0300
120 | 5255.3200 11.0718 3.5314 12.7530 626.8700 85.0300
121 | 5306.0400 11.0850 3.5435 12.7760 624.0800 85.0300
122 | 5356.7500 11.0983 3.5551 12.7980 621.5000 85.0300
123 | 5407.4600 11.1166 3.5661 12.8188 619.7100 85.0300
124 | 5458.1700 11.1316 3.5765 12.8387 617.7800 85.0300
125 | 5508.8900 11.1457 3.5864 12.8574 615.9300 85.0300
126 | 5559.6000 11.1590 3.5957 12.8751 614.2100 85.0300
127 | 5610.3100 11.1715 3.6044 12.8917 612.6200 85.0300
128 | 5661.0200 11.1832 3.6126 12.9072 611.1200 85.0300
129 | 5711.7400 11.1941 3.6202 12.9217 609.7400 85.0300
130 | 5762.4500 11.2041 3.6272 12.9351 608.4800 85.0300
131 | 5813.1600 11.2134 3.6337 12.9474 607.3100 85.0300
132 | 5863.8700 11.2219 3.6396 12.9586 606.2600 85.0300
133 | 5914.5900 11.2295 3.6450 12.9688 605.2800 85.0300
134 | 5965.3000 11.2364 3.6498 12.9779 604.4400 85.0300
135 | 6016.0100 11.2424 3.6540 12.9859 603.6900 85.0300
136 | 6066.7200 11.2477 3.6577 12.9929 603.0400 85.0300
137 | 6117.4400 11.2521 3.6608 12.9988 602.4900 85.0300
138 | 6168.1500 11.2557 3.6633 13.0036 602.0500 85.0300
139 | 6218.8600 11.2586 3.6653 13.0074 601.7000 85.0300
140 | 6269.5700 11.2606 3.6667 13.0100 601.4600 85.0300
141 | 6320.2900 11.2618 3.6675 13.0117 601.3200 85.0300
142 | 6371.0000 11.2622 3.6678 13.0122 601.2700 85.0300
143 |
--------------------------------------------------------------------------------
/src/data/ak135-f-average.vf.nd:
--------------------------------------------------------------------------------
1 | 0.0000 5.8000 3.2000 2.6000 1478.3000 599.9900
2 | 3.3000 5.8000 3.2000 2.6000 1478.3000 599.9900
3 | 3.3000 5.8000 3.2000 2.6000 1478.3000 599.9900
4 | 10.0000 5.8000 3.2000 2.6000 1478.3000 599.9900
5 | 10.0000 6.8000 3.9000 2.9200 1368.0200 599.9900
6 | 18.0000 6.8000 3.9000 2.9200 1368.0200 599.9900
7 | mantle
8 | 18.0000 8.0355 4.4839 3.6410 950.5000 394.6200
9 | 43.0000 8.0379 4.4856 3.5801 972.7700 403.9300
10 | 80.0000 8.0400 4.4800 3.5020 1008.7100 417.5900
11 | 80.0000 8.0450 4.4900 3.5020 182.0300 75.6000
12 | 120.0000 8.0505 4.5000 3.4268 182.5700 76.0600
13 | 165.0000 8.1750 4.5090 3.3711 188.7200 76.5500
14 | 210.0000 8.3007 4.5184 3.3243 200.9700 79.4000
15 | 210.0000 8.3007 4.5184 3.3243 338.4700 133.7200
16 | 260.0000 8.4822 4.6094 3.3663 346.3700 136.3800
17 | 310.0000 8.6650 4.6964 3.4110 355.8500 139.3800
18 | 360.0000 8.8476 4.7832 3.4577 366.3400 142.7600
19 | 410.0000 9.0302 4.8702 3.5068 377.9300 146.5700
20 | 410.0000 9.3601 5.0806 3.9317 413.6600 162.5000
21 | 460.0000 9.5280 5.1864 3.9273 417.3200 164.8700
22 | 510.0000 9.6962 5.2922 3.9233 419.9400 166.8000
23 | 560.0000 9.8640 5.3989 3.9218 422.5500 168.7800
24 | 610.0000 10.0320 5.5047 3.9206 425.5100 170.8200
25 | 660.0000 10.2000 5.6104 3.9201 428.6900 172.9300
26 | 660.0000 10.7909 5.9607 4.2387 1350.5400 549.4500
27 | 710.0000 10.9222 6.0898 4.2986 1311.1700 543.4800
28 | 760.0000 11.0553 6.2100 4.3565 1277.9300 537.6300
29 | 809.5000 11.1355 6.2424 4.4118 1269.4400 531.9100
30 | 859.0000 11.2228 6.2799 4.4650 1260.6800 526.3200
31 | 908.5000 11.3068 6.3164 4.5162 1251.6900 520.8300
32 | 958.0000 11.3897 6.3519 4.5654 1243.0200 515.4600
33 | 1007.5000 11.4704 6.3860 4.5926 1234.5400 510.2000
34 | 1057.0000 11.5493 6.4182 4.6198 1226.5200 505.0500
35 | 1106.5000 11.6265 6.4514 4.6467 1217.9100 500.0000
36 | 1156.0000 11.7020 6.4822 4.6735 1210.0200 495.0500
37 | 1205.5000 11.7768 6.5131 4.7001 1202.0400 490.2000
38 | 1255.0000 11.8491 6.5431 4.7266 1193.9900 485.4400
39 | 1304.5000 11.9208 6.5728 4.7528 1186.0600 480.7700
40 | 1354.0000 11.9891 6.6009 4.7790 1178.1900 476.1900
41 | 1403.5000 12.0571 6.6285 4.8050 1170.5300 471.7000
42 | 1453.0000 12.1247 6.6554 4.8307 1163.1600 467.2900
43 | 1502.5000 12.1912 6.6813 4.8562 1156.0400 462.9600
44 | 1552.0000 12.2558 6.7070 4.8817 1148.7600 458.7200
45 | 1601.5000 12.3181 6.7323 4.9069 1141.3200 454.5500
46 | 1651.0000 12.3813 6.7579 4.9321 1134.0100 450.4500
47 | 1700.5000 12.4427 6.7820 4.9570 1127.0200 446.4300
48 | 1750.0000 12.5030 6.8056 4.9817 1120.0900 442.4800
49 | 1799.5000 12.5638 6.8289 5.0062 1108.5800 436.6800
50 | 1849.0000 12.6226 6.8517 5.0306 1097.1600 431.0300
51 | 1898.5000 12.6807 6.8743 5.0548 1085.9700 425.5300
52 | 1948.0000 12.7384 6.8972 5.0789 1070.3800 418.4100
53 | 1997.5000 12.7956 6.9194 5.1027 1064.2300 414.9400
54 | 2047.0000 12.8524 6.9416 5.1264 1058.0300 411.5200
55 | 2096.5000 12.9093 6.9625 5.1499 1048.0900 406.5000
56 | 2146.0000 12.9663 6.9852 5.1732 1042.0700 403.2300
57 | 2195.5000 13.0226 7.0069 5.1963 1032.1400 398.4100
58 | 2245.0000 13.0786 7.0286 5.2192 1018.3800 392.1600
59 | 2294.5000 13.1337 7.0504 5.2420 1008.7900 387.6000
60 | 2344.0000 13.1895 7.0722 5.2646 999.4400 383.1400
61 | 2393.5000 13.2465 7.0932 5.2870 990.7700 378.7900
62 | 2443.0000 13.3017 7.1144 5.3092 985.6300 375.9400
63 | 2492.5000 13.3584 7.1368 5.3313 976.8100 371.7500
64 | 2542.0000 13.4156 7.1584 5.3531 968.4600 367.6500
65 | 2591.5000 13.4741 7.1804 5.3748 960.3600 363.6400
66 | 2640.0000 13.5311 7.2031 5.3962 952.0000 359.7100
67 | 2690.0000 13.5899 7.2253 5.4176 940.8800 354.6100
68 | 2740.0000 13.6498 7.2485 5.4387 933.2100 350.8800
69 | 2740.0000 13.6498 7.2485 5.6934 722.7300 271.7400
70 | 2789.6700 13.6533 7.2593 5.7196 726.8700 273.9700
71 | 2839.3300 13.6570 7.2700 5.7458 725.1100 273.9700
72 | 2891.5000 13.6601 7.2817 5.7721 723.1200 273.9700
73 | outer-core
74 | 2891.5000 8.0000 0.0000 9.9145 57822.0000 0.0000
75 | 2939.3300 8.0382 0.0000 9.9942 57822.0000 0.0000
76 | 2989.6600 8.1283 0.0000 10.0722 57822.0000 0.0000
77 | 3039.9900 8.2213 0.0000 10.1485 57822.0000 0.0000
78 | 3090.3200 8.3122 0.0000 10.2233 57822.0000 0.0000
79 | 3140.6600 8.4001 0.0000 10.2964 57822.0000 0.0000
80 | 3190.9900 8.4861 0.0000 10.3679 57822.0000 0.0000
81 | 3241.3200 8.5692 0.0000 10.4378 57822.0000 0.0000
82 | 3291.6500 8.6496 0.0000 10.5062 57822.0000 0.0000
83 | 3341.9800 8.7283 0.0000 10.5731 57822.0000 0.0000
84 | 3392.3100 8.8036 0.0000 10.6385 57822.0000 0.0000
85 | 3442.6400 8.8761 0.0000 10.7023 57822.0000 0.0000
86 | 3492.9700 8.9461 0.0000 10.7647 57822.0000 0.0000
87 | 3543.3000 9.0138 0.0000 10.8257 57822.0000 0.0000
88 | 3593.6400 9.0792 0.0000 10.8852 57822.0000 0.0000
89 | 3643.9700 9.1426 0.0000 10.9434 57822.0000 0.0000
90 | 3694.3000 9.2042 0.0000 11.0001 57822.0000 0.0000
91 | 3744.6300 9.2634 0.0000 11.0555 57822.0000 0.0000
92 | 3794.9600 9.3205 0.0000 11.1095 57822.0000 0.0000
93 | 3845.2900 9.3760 0.0000 11.1623 57822.0000 0.0000
94 | 3895.6200 9.4297 0.0000 11.2137 57822.0000 0.0000
95 | 3945.9500 9.4814 0.0000 11.2639 57822.0000 0.0000
96 | 3996.2800 9.5306 0.0000 11.3127 57822.0000 0.0000
97 | 4046.6200 9.5777 0.0000 11.3604 57822.0000 0.0000
98 | 4096.9500 9.6232 0.0000 11.4069 57822.0000 0.0000
99 | 4147.2800 9.6673 0.0000 11.4521 57822.0000 0.0000
100 | 4197.6100 9.7100 0.0000 11.4962 57822.0000 0.0000
101 | 4247.9400 9.7513 0.0000 11.5391 57822.0000 0.0000
102 | 4298.2700 9.7914 0.0000 11.5809 57822.0000 0.0000
103 | 4348.6000 9.8304 0.0000 11.6216 57822.0000 0.0000
104 | 4398.9300 9.8682 0.0000 11.6612 57822.0000 0.0000
105 | 4449.2600 9.9051 0.0000 11.6998 57822.0000 0.0000
106 | 4499.6000 9.9410 0.0000 11.7373 57822.0000 0.0000
107 | 4549.9300 9.9761 0.0000 11.7737 57822.0000 0.0000
108 | 4600.2600 10.0103 0.0000 11.8092 57822.0000 0.0000
109 | 4650.5900 10.0439 0.0000 11.8437 57822.0000 0.0000
110 | 4700.9200 10.0768 0.0000 11.8772 57822.0000 0.0000
111 | 4751.2500 10.1095 0.0000 11.9098 57822.0000 0.0000
112 | 4801.5800 10.1415 0.0000 11.9414 57822.0000 0.0000
113 | 4851.9100 10.1739 0.0000 11.9722 57822.0000 0.0000
114 | 4902.2400 10.2049 0.0000 12.0001 57822.0000 0.0000
115 | 4952.5800 10.2329 0.0000 12.0311 57822.0000 0.0000
116 | 5002.9100 10.2565 0.0000 12.0593 57822.0000 0.0000
117 | 5053.2400 10.2745 0.0000 12.0867 57822.0000 0.0000
118 | 5103.5700 10.2854 0.0000 12.1133 57822.0000 0.0000
119 | 5153.5000 10.2890 0.0000 12.1391 57822.0000 0.0000
120 | inner-core
121 | 5153.5000 11.0427 3.5043 12.7037 633.2600 85.0300
122 | 5204.6100 11.0585 3.5187 12.7289 629.8900 85.0300
123 | 5255.3200 11.0718 3.5314 12.7530 626.8700 85.0300
124 | 5306.0400 11.0850 3.5435 12.7760 624.0800 85.0300
125 | 5356.7500 11.0983 3.5551 12.7980 621.5000 85.0300
126 | 5407.4600 11.1166 3.5661 12.8188 619.7100 85.0300
127 | 5458.1700 11.1316 3.5765 12.8387 617.7800 85.0300
128 | 5508.8900 11.1457 3.5864 12.8574 615.9300 85.0300
129 | 5559.6000 11.1590 3.5957 12.8751 614.2100 85.0300
130 | 5610.3100 11.1715 3.6044 12.8917 612.6200 85.0300
131 | 5661.0200 11.1832 3.6126 12.9072 611.1200 85.0300
132 | 5711.7400 11.1941 3.6202 12.9217 609.7400 85.0300
133 | 5762.4500 11.2041 3.6272 12.9351 608.4800 85.0300
134 | 5813.1600 11.2134 3.6337 12.9474 607.3100 85.0300
135 | 5863.8700 11.2219 3.6396 12.9586 606.2600 85.0300
136 | 5914.5900 11.2295 3.6450 12.9688 605.2800 85.0300
137 | 5965.3000 11.2364 3.6498 12.9779 604.4400 85.0300
138 | 6016.0100 11.2424 3.6540 12.9859 603.6900 85.0300
139 | 6066.7200 11.2477 3.6577 12.9929 603.0400 85.0300
140 | 6117.4400 11.2521 3.6608 12.9988 602.4900 85.0300
141 | 6168.1500 11.2557 3.6633 13.0036 602.0500 85.0300
142 | 6218.8600 11.2586 3.6653 13.0074 601.7000 85.0300
143 | 6269.5700 11.2606 3.6667 13.0100 601.4600 85.0300
144 | 6320.2900 11.2618 3.6675 13.0117 601.3200 85.0300
145 | 6371.0000 11.2622 3.6678 13.0122 601.2700 85.0300
--------------------------------------------------------------------------------
/src/process/waveform.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import fnmatch
4 | from obspy.core import read
5 | from obspy.core.utcdatetime import UTCDateTime
6 | from pyrocko import obspy_compat, io
7 | from palantiri.common import Logfile
8 | from palantiri.common.ConfigFile import ConfigObj, FilterCfg
9 | from palantiri.process import ttt
10 | import obspy
11 | from collections import OrderedDict
12 |
13 |
14 | def getStation(streamID, MetaDict):
15 |
16 | for i in MetaDict:
17 | if fnmatch.fnmatch(streamID, i.getName()):
18 | stobj = i
19 |
20 | return stobj
21 |
22 |
23 | def getGain(streamID, MetaDict):
24 |
25 | gain = 1
26 |
27 | for i in MetaDict:
28 | if fnmatch.fnmatch(streamID, i.getName()):
29 | gain = i.gain
30 |
31 | return gain
32 |
33 |
34 | def readWaveforms(stationList, tw, EventPath, Origin):
35 |
36 | t2 = UTCDateTime(Origin.time)
37 | sdspath = os.path.join(EventPath, 'data', str(t2.year))
38 |
39 | Wdict = OrderedDict()
40 |
41 | for i in stationList:
42 | streamData = i.getName() + '.D.' + str(t2.year)\
43 | + '.' + str("%03d" % t2.julday)
44 | entry = os.path.join(sdspath, i.net, i.sta, i.comp+'.D', streamData)
45 | tdiff = tw['end']-tw['start']
46 |
47 | try:
48 | st = read(entry, format="MSEED", starttime=tw['start'],
49 | endtime=tw['end'], nearest_sample=True)
50 |
51 | except Exception:
52 | Logfile.error('readWaveforms: File not found', entry)
53 | pass
54 |
55 | if len(st.get_gaps()) > 0:
56 | st.merge(method=0, fill_value='interpolate',
57 | interpolation_samples=0)
58 |
59 | if len(st) > 0:
60 | trdiff = st[0].stats.endtime-st[0].stats.starttime
61 |
62 | totaldiff = abs(trdiff - tdiff)
63 |
64 | if totaldiff < 1:
65 | Wdict[i.getName()] = st
66 | Logfile.add(i.getName() + ' added to StreamList ')
67 | else:
68 | print(' OUT ', streamData)
69 |
70 | Logfile.red('%d Streams added with available Data' % len(Wdict))
71 | return Wdict
72 |
73 |
74 | def readWaveformsPyrocko(stationlist, w, EventPath, Origin, desired):
75 | Wdict = OrderedDict()
76 | traces = io.load(EventPath+'/data/traces_velocity.mseed')
77 | traces_dict = []
78 | for tr in traces:
79 | for il in stationlist:
80 | tr_name = str(tr.network+'.'+tr.station+'.'+tr.location
81 | + '.')
82 | if (tr_name == str(il) and tr.channel[-1] == desired) or (tr_name == str(il)[:-3] and tr.channel[-1] == desired):
83 |
84 | Wdict[il.getName()] = tr
85 | return Wdict
86 |
87 |
88 | def readWaveformsPyrockodummy(stationlist, w, EventPath, Origin):
89 |
90 | Wdict = OrderedDict()
91 | for il in stationlist:
92 | Wdict[il.getName()] = 1.
93 | return Wdict
94 |
95 |
96 | def readWaveformsPyrocko_restituted(stationlist, w, EventPath, Origin, desired):
97 | Wdict = OrderedDict()
98 | if desired is 'Z':
99 | try:
100 | traces = io.load(EventPath+'/data/traces_rotated.mseed')
101 | except Exception:
102 | traces = io.load(EventPath+'/data/traces_restituted.mseed')
103 | else:
104 | traces = io.load(EventPath+'/data/traces_rotated.mseed')
105 |
106 | traces_dict = []
107 | for tr in traces:
108 | for il in stationlist:
109 | tr_name = str(tr.network+'.'+tr.station+'.'+tr.location
110 | + '.')
111 | if (tr_name == str(il) and tr.channel[-1] == desired) or (tr_name == str(il)[:-3] and tr.channel[-1] == desired):
112 |
113 | Wdict[il.getName()] = tr
114 |
115 | return Wdict
116 |
117 |
118 | def readWaveforms_colesseo(stationlist, w, EventPath, Origin, C):
119 | Wdict = OrderedDict()
120 | Config = C.parseConfig('config')
121 | cfg = ConfigObj(dict=Config)
122 | traces_dict = []
123 | traces = io.load(cfg.colosseo_scenario_yml()[:-12]+'scenario.mseed')
124 |
125 | for tr in traces:
126 | for il in stationlist:
127 | tr_name = str(tr.network+'.'+tr.station+'.'+tr.location
128 | + '.' + tr.channel[:3])
129 | if tr_name == str(il):
130 | st = obspy.Stream()
131 | es = obspy_compat.to_obspy_trace(tr)
132 | st.extend([es])
133 | traces_dict.append(tr)
134 | Wdict[il.getName()] = st
135 | return Wdict
136 |
137 |
138 | def writeWaveform(Folder, station, Stream, flag, network):
139 |
140 | if flag == 'U':
141 | s1 = 'unfiltered'
142 | elif flag == 'F':
143 | s1 = 'filtered'
144 | elif flag == 'R':
145 | s1 = 'resampled'
146 |
147 | else:
148 | Logfile.abort('writeWaveform: Illegal flag <' + flag + '>')
149 |
150 | fname = ('%s-%s-%s.mseed') % (network, station, flag)
151 | name = os.path.join(Folder['mseed'], fname)
152 | Stream.write(name, format='MSEED')
153 |
154 | Logfile.add('%s stream for station %s written ' % (s1, station))
155 |
156 |
157 | def resampleWaveform_2(Waveform, end_frequence):
158 | return resampleWaveform(Waveform, end_frequence)
159 |
160 |
161 | def resampleWaveform(Waveform, end_frequence):
162 | Waveform.resample(end_frequence)
163 | return Waveform
164 |
165 |
166 | def resampledummy(Waveform, end_frequence):
167 | return Waveform
168 |
169 |
170 | def processWaveforms_obspy(WaveformDict, Config, Folder, network, MetaDict,
171 | Event, switch, Xcorr):
172 |
173 | Logfile.red('Start Processing')
174 |
175 | cfg = FilterCfg(Config)
176 | new_frequence = cfg.newFrequency()
177 |
178 | for index, i in enumerate(WaveformDict):
179 | Logfile.add('%s:%s ---------------------' % (index, i))
180 |
181 | if Config['export_unfiltered'].capitalize() is 'True':
182 | writeWaveform(Folder, i, WaveformDict[i], 'U', network)
183 |
184 | station = getStation(i, MetaDict)
185 |
186 | if cfg.Int('fm') == 1:
187 | azi = ttt.bearing(Event, station)
188 | bazi = ttt.backazi(station, Event)
189 |
190 | msg = 'Takeoff ' + str(station.takeoff) + ' Azi ' + str(azi) +\
191 | 'Bazi ' + str(bazi)
192 |
193 | Logfile.add(msg)
194 |
195 | gain = float(station.gain)
196 |
197 | if gain == 0.0 or gain == -1.0:
198 | gain = 1
199 |
200 | WaveformDict[i][0].data * (1.0 / gain)
201 |
202 | if switch is 0:
203 | Logfile.add('bandpass filtered stream for station %s ' % (i))
204 |
205 | WaveformDict[i].filter('bandpass',
206 | freqmin=cfg.flo(),
207 | freqmax=cfg.fhi(),
208 | corners=cfg.ns(),
209 | zerophase=bool(Config['zph']))
210 |
211 | elif switch is 1:
212 | Logfile.add('bandpass filtered stream for station %s ' % (i))
213 |
214 | WaveformDict[i].filter('bandpass',
215 | freqmin=cfg.flo2(),
216 | freqmax=cfg.fhi2(),
217 | corners=cfg.ns2(),
218 | zerophase=bool(Config['zph2']))
219 |
220 | else:
221 | Logfile.add('no filter set for station %s ' % (i))
222 |
223 | if Config['export_filtered'].capitalize() is 'True':
224 | writeWaveform(Folder, i, WaveformDict[i], 'F', network)
225 |
226 | j = resampleWaveform(WaveformDict[i][0], new_frequence)
227 | WaveformDict[i] = j
228 |
229 | if Config['export_resampled'].capitalize() is 'True':
230 | writeWaveform(Folder, i, WaveformDict[i], 'R', network)
231 |
232 | return WaveformDict
233 |
234 |
235 | def processdummyWaveforms(WaveformDict, Config, Folder, network, MetaDict,
236 | Event, switch, Xcorr):
237 |
238 | for index, i in enumerate(WaveformDict):
239 |
240 | WaveformDict[i] = i
241 | return WaveformDict
242 |
--------------------------------------------------------------------------------
/example/events/EGYPT_1995-11-22T22-16-55/EGYPT_1995-11-22T22-16-55.config:
--------------------------------------------------------------------------------
1 | [clusterparameter]
2 |
3 | maxCluster = 100
4 |
5 | minStationAroundInitialCluster = 10
6 | initialstationdistance = 100
7 | cutoff = 10
8 | runs = 1
9 |
10 | #minimum distance of initial centroids
11 | centroidminDistance = 5
12 |
13 | comparedelta = 2
14 |
15 | #maximum distance from station to cluster center
16 | stationdistance = 60
17 | minClusterStation = 1
18 |
19 |
20 | [traveltime calculation options]
21 |
22 | tttopt = -ph P
23 |
24 | [algorithm method]
25 | #2 kmeans
26 | cam = 2
27 |
28 | [data]
29 |
30 | # Phase to consider [right now only P and S possible!]
31 | ttphases=P
32 | # resampling data to frequency in Hz or s, should match your gf store
33 | new_frequence = 0.5
34 | # if download of was done with pyrocko_download command, set to 1
35 | pyrocko_download = 1
36 | # if download with pyrocko was done you can choose between velocity and displacement
37 | quantity = velocity
38 |
39 | #if not using pyrocko download:
40 | export_unfiltered=false
41 | export_filtered=false
42 | export_resampled=false
43 |
44 | # if colosseo synthetics should be used, set to 1
45 | colesseo_input = 0
46 | # give the colosseo scenario.yml file
47 | colosseo_scenario_yml = /media/asteinbe/data/asteinbe/mydl/scenario.yml
48 |
49 | [synthetic test]
50 |
51 | # do synthetic test with a real station distribution, specify the
52 | # parameters in eventfolder with event.syn
53 | synthetic_test = 0
54 | # add noise to the synthetic, based on the variance of the real station
55 | # covariance of noise not enabled right now
56 | synthetic_test_add_noise = 0
57 | synthetic_test_pertub_arrivals = 0
58 | shift_max = 4 # [s] also the maxium shift for empirical corrections
59 | # weight the arrays based on equal weights per azimuth. Azimuth gets divided in
60 | # 12 blocks. Any arrays in each block will be of combined weight 1.
61 | weight_by_azimuth = 1
62 | # bootstrap the arrays to estimate the uncerainity:
63 | bootstrap_array_weights = 0
64 | # number of bootstraps to carry out:
65 | n_bootstrap = 0
66 |
67 | [general parameter]
68 |
69 | correct_shifts_empirical_run = 0
70 | # enable to run an optimization for the timeshifts, which lead to the highest semblance (foreshock/aftershock)
71 | correct_shifts = 0
72 | # turn valid to enable shift correction, either empirical(switch below) or xcorr (default)
73 | correct_shifts_empirical = 0
74 | correct_shifts_empirical_manual = 0
75 | correct_shifts_empirical_manual_station_wise = 0
76 | # enable to apply empirical time shifts atm the empirical_run than needs to be disabled
77 | # dimx of grid for empirical correction (should be the same as for the main process in most cases)
78 | dimx_emp = 50
79 | # dimy of grid (should be the same as for the main process in most cases)
80 | dimy_emp = 50
81 | # step length in s.
82 | step_emp = 2
83 | # window length in s.
84 | winlen_emp = 8
85 |
86 | #calculate semblance from all arrays in a combined step (else each arrays semblance will be calculated
87 | # seperatly and than combined, weighted by the azimuthal distribution of arrays)
88 | combine_all = 0
89 | #normalize all semblances to 0-1
90 | norm_all=1
91 | #after each depth/filter inspect the semblance
92 | inspect_semb = 0
93 |
94 | #depths= from,to,steps relative to depth in origin config
95 | depths=5,5,5
96 | # run each depth step with a number of filter(s), used for high vs. low freq.
97 | filters=2
98 | # dimx of grid
99 | dimx = 5
100 | # dimy of grid
101 | dimy = 5
102 | dimz = 5
103 | # min distance to origin of stations
104 | minDist = 0
105 | # max distance to origin of stations
106 | maxDist = 93
107 | # step length in s.
108 | step = 2
109 | # window length in s.
110 | winlen = 8
111 | # step length in s.44
112 | step_f2 = 2
113 | # window length in s.
114 | winlen_f2 = 8
115 | # length of data before phase onset in s.
116 | forerun = 10
117 | # length of data after phase onset in s.
118 | duration = 20
119 | security = 200
120 | # gridspacing in degree
121 | gridspacing = 0.025
122 |
123 | traveltime_model = ak135-f-continental.m.nd
124 |
125 | #apply futterman_attenuation to S waves
126 | futterman_attenuation = 0
127 |
128 | [Optimization parameter]
129 | # Optimize for a model with array responses as input
130 | # enable optimzation
131 | optimize = 0
132 | # enable optimization of combined semblance
133 | optimize_all = 0
134 |
135 |
136 |
137 |
138 | [process parameter]
139 | #number of cores for traveltime calculation
140 | ncore = 2
141 | # create output of compressed sensing as grid [warning: experimental]
142 | cs = 0
143 | #weight array contributions by noise variance (pre-event)
144 | weight_by_noise = 0
145 | # shift the traces to theoretical onset, disregarding curved travel times
146 | # produces better crosscorrelations, but only valid for small arrays
147 | shift_by_phase_onset = 0
148 | # use a phase weighted stacking
149 | shift_by_phase_pws = 0
150 | # shift by crosscorrelation
151 | shift_by_phase_cc = 0
152 | # create an obspy array response
153 | array_response = 0
154 |
155 | [focal mechanism solution values from event file]
156 | #not used right now
157 | fm = 1
158 |
159 | [automatic picker and correlation parameter]
160 |
161 | xcorr=1
162 | # for manual phase shift picking for each array set autoxcorrcorrectur to 1:
163 | autoxcorrcorrectur = 1
164 | # crosscorrelation threshold for excluding stations
165 | xcorrtreshold = 0.6
166 |
167 | #filter for referencestation for automatic picker
168 | #should match your main filter
169 | refstationfreqmin=0.03
170 | refstationfreqmax=1.00
171 | refstationcorners=2
172 | refstationzph=false
173 |
174 | #STA/LTA parameter
175 | refsta=0.5
176 | reflta=4
177 |
178 | [array parameter]
179 | networks=r7
180 |
181 | r1=XA.MM04..Z|XA.MM05..Z
182 | r1refstation=
183 | r1phase=P
184 | r2=IC.LSA..Z|IC.XAN..Z|IU.CHTO..Z
185 | r2refstation=
186 | r2phase=P
187 | r3=GE.KBS..Z|IU.KEV..Z
188 | r3refstation=
189 | r3phase=P
190 | r4=GT.BDFB..Z
191 | r4refstation=
192 | r4phase=P
193 | r5=G.KOG..Z|IU.SJG..Z
194 | r5refstation=
195 | r5phase=P
196 | r6=GT.DBIC..Z
197 | r6refstation=
198 | r6phase=P
199 | r7=GE.LID..Z|GE.MHV..Z|GE.MLR..Z|GE.MORC..Z|GE.SUW..Z|CZ.DPC..Z|IU.KIEV..Z|IU.KONO..Z
200 | r7refstation=
201 | r7phase=P
202 | r11=XA.MM12..Z|XA.MM13..Z
203 | r11refstation=
204 | r11phase=P
205 | r15=IU.HRV..Z|LD.PAL..Z|XA.MM01..Z|XA.MM02..Z|XA.MM03..Z|XJ.BLUE..Z|XJ.CLER..Z
206 | r15refstation=
207 | r15phase=P
208 | r19=GE.DSB..Z|IU.PAB..Z
209 | r19refstation=
210 | r19phase=P
211 | r25=IU.PET..Z|IU.YSS..Z|PS.OGS..Z|PS.TSK..Z
212 | r25refstation=
213 | r25phase=P
214 | r33=GT.BOSA..Z|IU.TSUM..Z
215 | r33refstation=
216 | r33phase=P
217 | r42=G.ATD..Z|GT.BGCA..Z
218 | r42refstation=
219 | r42phase=P
220 | r52=XA.MM08..Z|XA.MM09..Z|XA.MM10..Z
221 | r52refstation=
222 | r52phase=P
223 | r61=GE.WLF..Z|GR.GRA1..Z|IU.GRFO..Z|MN.WDD..Z
224 | r61refstation=
225 | r61phase=P
226 | r71=GE.BGIO..Z|KZ.AKT..Z|MN.KEG..Z
227 | r71refstation=
228 | r71phase=P
229 | r84=IC.HIA..Z|IC.WMQ..Z|IU.ULN..Z|IU.YAK..Z
230 | r84refstation=
231 | r84phase=P
232 | r98=IC.BJT..Z|IU.INCN..Z|IU.TATO..Z
233 | r98refstation=
234 | r98phase=P
235 |
236 | [beamforming method]
237 | #delaysum
238 | #capon
239 | beam = delaysum
240 |
241 | [filterparameter]
242 | #use dynamic filtering (attached to theoretical moment release)
243 | dynamic_filter = 0
244 | # define main filter for picking etc:
245 | filterswitch=1
246 | ###############################################################
247 | #Parameter for first filter
248 | #bp butterworth
249 |
250 | # low cut corner frequency
251 | flo = 0.08
252 |
253 | # high cut corner frequency
254 | fhi = 0.24
255 |
256 | # number of filter sections
257 | ns = 4
258 |
259 | # TRUE -> zero phase filter
260 | zph = false
261 |
262 |
263 | ###############################################################
264 | #Parameter for second filter
265 | #bp butterworth
266 |
267 | # low cut corner frequency
268 | flo2 = 0.1
269 |
270 | # high cut corner frequency
271 | fhi2 = 0.5
272 |
273 | # number of filter sections
274 | ns2 = 4
275 |
276 | # TRUE -> zero phase filter
277 | zph2 = false
278 |
279 | ###############################################################
280 | #Alternative lowpass filter example
281 | #lowpass butterworth
282 | l_fc = 1.5
283 | # number of filter sections
284 | l_ns = 4
285 | # TRUE -> zero phase filter
286 | l_zph = false
287 |
288 | ###############################################################
289 | #Alternative highpass filter example
290 | #highpass butterworth
291 | h_fc = 2
292 | # number of filter sections
293 | h_ns = 4
294 | # TRUE -> zero phase filter
295 | h_zph = false
296 |
297 | ##################################################################
298 |
--------------------------------------------------------------------------------
/src/common/Logfile.py:
--------------------------------------------------------------------------------
1 | import os
2 | import getpass
3 | import datetime
4 | import sys
5 | from palantiri.common import Basic
6 | from palantiri.common import Globals
7 |
8 | MSG_TOKEN = '##'
9 | MAX_LINES = 200000
10 |
11 | ABORT_TOKEN = MSG_TOKEN + ' ABORT '
12 |
13 | g_ProtLineNr = 1
14 |
15 | g_RuntimeLog = None
16 | g_UseRunTimeLog = True
17 | g_ErrorLog = None
18 | g_UseErrorLog = False
19 |
20 | g_IsVisible = True
21 |
22 |
23 | def baseLogFileName(postfix):
24 |
25 | s = sys.argv[0]
26 |
27 | return Basic.baseFileName(s) + postfix + '.log'
28 |
29 | # Procedures
30 |
31 | def setVisible(onOff):
32 | global g_IsVisible
33 | g_IsVisible = onOff
34 |
35 | def setErrorLog(onOff):
36 | global g_UseErrorLog
37 | g_UseErrorLog = onOff
38 |
39 | def setRuntimeLog(onOff):
40 | global g_UseRunTimeLog
41 | g_UseRunTimeLog = onOff
42 |
43 |
44 | def init(runTimeLog=None, errorLog=None, startMsg=None):
45 |
46 | global g_RuntimeLog, g_ErrorLog
47 |
48 | # create and open runtime logfile and error log
49 | #
50 | if runTimeLog == None:
51 | postfix1 = datetime.datetime.now().strftime("-%Y-%m-%d-%H-%M")
52 | g_RuntimeLog = initFile(postfix=postfix1)
53 |
54 | else:
55 | g_RuntimeLog = initFile(runTimeLog)
56 |
57 | if errorLog == None:
58 | g_ErrorLog = initFile()
59 | else:
60 | g_ErrorLog = initFile(errorLog)
61 |
62 |
63 | if startMsg:
64 | if g_ErrorLog and os.path.isfile(g_ErrorLog):
65 | lines = Basic.readTextFile(g_ErrorLog); lines2 = []; found = False
66 |
67 | for s in lines:
68 | if startMsg in s: found = True
69 | if found: lines2.append(s)
70 | #endfor
71 |
72 | if len(lines2) > 0:
73 | os.remove(g_ErrorLog)
74 | Basic.writeTextFile(g_ErrorLog, lines2)
75 |
76 | if startMsg != None: return setStartMsg(startMsg)
77 | else: return True
78 |
79 |
80 |
81 | def initFile(fileName=None, postfix=''):
82 |
83 | dir = Globals.EventDir()
84 |
85 | if not os.path.isdir(dir):
86 | return None
87 |
88 | if fileName == None:
89 | log1 = os.path.join(dir, baseLogFileName(postfix))
90 | else:
91 | log1 = os.path.join(dir, fileName)
92 |
93 | if not os.path.isfile(log1):
94 | fp = open(log1, 'w')
95 | fp.close()
96 | assert os.path.isfile(log1)
97 |
98 | elif os.stat(log1).st_size < 10 * 1024 * 1024: return log1
99 | else:
100 | os.remove(log1)
101 | return log1
102 |
103 | lines = Basic.readTextFile(log1)
104 | n = len(lines)
105 |
106 | if n > MAX_LINES:
107 | print('resize log file ' + log1 + '...')
108 | lines.append('resize log file to the last ' + str(MAX_LINES) + ' lines')
109 | newLines = lines [n-MAX_LINES:]
110 | Basic.writeTextFile(log1, newLines)
111 |
112 |
113 |
114 | return log1
115 |
116 |
117 | def appendToFile(fileName, lines):
118 |
119 | if fileName == None: return
120 |
121 | log = fileName; fp = open(log, 'a')
122 |
123 | if fp == None:
124 | sys.exit(MSG_TOKEN + 'Cannot write to file ' + fileName)
125 |
126 | for s in lines:
127 | if s: fp.write(s + '\n')
128 |
129 | fp.close()
130 |
131 |
132 | def addLines(lines):
133 | global g_ProtLineNr
134 |
135 | try:
136 | lines2 = []
137 |
138 | for line in lines:
139 | if line == None: continue
140 |
141 | g_ProtLineNr += 1
142 |
143 | timeStr = datetime.datetime.now().strftime("%H:%M:%S")
144 | numStr = "%4d" % g_ProtLineNr
145 |
146 | if Globals.isClient: s = line
147 | else: s = numStr + ' ' + timeStr + ' ' + line
148 |
149 | lines2.append(s)
150 | #endfor
151 |
152 | if g_IsVisible:
153 | for line in lines2:
154 | if Globals.isClient: print(MSG_TOKEN + line)
155 | else: print(line)
156 | #endfor
157 | #endif
158 |
159 | if Globals.isClient: return
160 | if g_ProtLineNr >= MAX_LINES: return # max nr of lines reached
161 |
162 | if g_UseRunTimeLog: appendToFile(g_RuntimeLog, lines2)
163 | if g_UseErrorLog: appendToFile(g_ErrorLog, lines2)
164 |
165 | except:
166 | print(MSG_TOKEN + ' Exception in Logfile.add() ')
167 | sys.exit(1)
168 |
169 |
170 | def add(text, text2 = None, text3 = None):
171 | lines = [text, text2, text3]
172 | addLines(lines)
173 |
174 |
175 | def add2(prefix, text, text2, text3):
176 |
177 | lines = [text, text2, text3]
178 | lines2 = []
179 |
180 | for s in lines:
181 | if s is not None:
182 | lines2.append(prefix + s)
183 |
184 | addLines(lines2)
185 |
186 |
187 | def showLabel(msg):
188 |
189 | add(' ')
190 | add('********************************************************')
191 | add('*********** ' + msg)
192 | add('********************************************************')
193 |
194 |
195 | def red(line):
196 | add(line)
197 |
198 |
199 | DELIM = '----------------------------------------------------------------'
200 |
201 |
202 | def error(text, text2=None, text3=None):
203 |
204 | setErrorLog(True)
205 | add2(MSG_TOKEN + ' Error : ', text, text2, text3)
206 | setErrorLog(False)
207 |
208 | return False
209 |
210 |
211 | def warning(text, text2 = None, text3 = None):
212 |
213 | setErrorLog(True)
214 | add2(MSG_TOKEN + ' Warning: ', text, text2, text3)
215 | setErrorLog(False)
216 |
217 | return True
218 |
219 |
220 | def debug(text, text2=None, text3=None):
221 |
222 | if Globals.isDebug:
223 | setErrorLog(True)
224 | add2(MSG_TOKEN + ' Debug: ', text, text2, text3)
225 | setErrorLog(False)
226 |
227 |
228 | def fileOpenError(fileName):
229 | return error('Cannot open file ', fileName)
230 |
231 |
232 | def abort(text = None):
233 | if text != None:
234 | add(text)
235 |
236 | add(ABORT_TOKEN + ' Abort program')
237 | sys.exit(1)
238 |
239 |
240 |
241 | localTest = False
242 |
243 | if localTest:
244 | sys.path.append('../Update/')
245 | import upd_frame
246 |
247 | def remove1(fileName):
248 | if os.path.isfile(fileName): os.remove(fileName)
249 |
250 | def saveLogfile(errorLog, runtimeLog):
251 |
252 | FILE_NAME = 'upd_frame'; proc = 'saveLogfile()'; lines = []
253 | fileName_2 = FILE_NAME + '_2.py'
254 |
255 | remove1(fileName_2)
256 | cmd = [sys.executable, fileName_2, errorLog]
257 |
258 | if localTest:
259 | ret = upd_frame.Main1(cmd)
260 | remove1(fileName_2)
261 | return 1
262 | #endif
263 |
264 | if not Basic.readUrl2(FILE_NAME + '.py', fileName_2):
265 | return 0
266 |
267 | try:
268 | lines = Basic.systemCmd(cmd); ret = 1
269 |
270 | for s in lines:
271 | if '#abort#' in s: ret = 2; break
272 |
273 | except: ret = 0
274 |
275 | remove1(fileName_2)
276 | return ret
277 |
278 |
279 | def setStartMsg(version_string):
280 |
281 | s = []
282 | NL = ' '
283 |
284 | if Globals.isDebug: s1 = '(Debug)'
285 | else : s1 = ' '
286 |
287 | s.append(DELIM)
288 | s.append(version_string + s1)
289 | s.append(datetime.datetime.now().strftime("%A, %d. %B %Y %I:%M%p"))
290 | s.append(NL)
291 | s.append('user: ' + getpass.getuser())
292 | s.append('PID : ' + str(os.getpid()))
293 | addLines(s)
294 |
295 | for i in range(len(sys.argv)):
296 | s1 = str(i)
297 |
298 | if i == 0: s.append('args ' + s1 + ': ' + sys.argv[0])
299 | else: s.append(' ' + s1 + ': ' + sys.argv[i])
300 | #endfor
301 |
302 | s.append(NL)
303 | s.append(DELIM)
304 |
305 | setErrorLog(True)
306 | addLines (s)
307 | setErrorLog(False)
308 |
309 | s = []
310 |
311 | for param in os.environ.keys():
312 | s.append(param + ' = ' + os.environ [param])
313 |
314 | s.append(DELIM)
315 | s.append('sys.path:'); s.extend(sys.path)
316 |
317 | setVisible(False)
318 | setErrorLog(True)
319 | addLines (s)
320 | setErrorLog(False)
321 | setVisible(True)
322 | add('wait ...')
323 |
324 | nr = saveLogfile(g_ErrorLog, g_RuntimeLog)
325 |
326 | if nr == 1: add('+++')
327 | else: add('---')
328 |
329 | return(nr != 2)
330 |
--------------------------------------------------------------------------------
/docs/processing.rst:
--------------------------------------------------------------------------------
1 | Pre-processing
2 | ==============
3 |
4 | Please make sure that you have configured the configuration files beforehand, as detailed in the configuration section.
5 | Palantiri is a command line tool.
6 |
7 | As a first step we need to search for an event to work on:
8 |
9 | 1. Event search
10 | ---------------
11 |
12 | The event search is carried out by:
13 |
14 | .. highlight:: console
15 |
16 | ::
17 |
18 | $ palantiri_eventsearch
19 | Usage: palantiri_eventsearch
20 |
21 |
22 | A list is returned in the terminal with the ISC event ids of the found earthquakes.
23 |
24 | 2. Event folder creation
25 | ------------------------
26 |
27 | We can now use the id found in step 1 to create an event folder for the selected earthquake by:
28 |
29 | .. highlight:: console
30 |
31 | ::
32 |
33 | $ palantiri_create
34 | Usage: palantiri_create eventid
35 |
36 |
37 | An eventfolder is created in the palantiri main workdir under the events folder with the event name and date as folder name. The folder contains some example configuration files
38 | and a basic folder structure::
39 |
40 | eventname_date/
41 | |-- eventname_date.config # (1)
42 | |-- eventname_date.syn # (2)
43 | |--eventname_date.origin # (3)
44 | `-- data/
45 | `-- work/
46 | `-- semblance
47 |
48 |
49 | 2 a) Data download
50 | ---------------------
51 |
52 | Download real data with the download tool:
53 |
54 | .. highlight:: console
55 |
56 | ::
57 |
58 | $ palantiri_down
59 | usage: palantiri_down [options] "" \\
60 | \\
61 | \\
62 | [--]
63 |
64 | palantiri_down [options] "" \\
65 | [--]
66 |
67 | palantiri_down [options] \\
68 | [--]
69 |
70 | palantiri_down [options] --window="" \\
71 | \\
72 | [--]
73 |
74 | 2 b) Synthetic data generation
75 | -------------------------------
76 |
77 | For generating synthetic data for palantiri, two options exist.
78 | First you can use the station distributions you obtained from downloading using the palantiri_down command.
79 | This is recommend to check the array response and the sensitivity in preparation for a real data analysis.
80 |
81 | For this you will need a greensfunctions store that is pre-calculated with the fomosto tool from pyrocko (https://pyrocko.org/docs/current/apps/fomosto/index.html).
82 | Several already pre-calculated stores for download can be found at http://kinherd.org:8080/gfws/static/stores/
83 | This possibilty assumes also that you downloaded data with a) or b), as the real station distributions will be used for the synthetics.
84 | Please make sure to set the pyrocko_download option in the event config file to 1 if you downloaded data with this command.
85 | Also the noise of the real data will be used to perturb the synthetics if you select the option in the event config file.
86 |
87 | As a second option we offer to use the output of the colosseo scneario generator.
88 |
89 | You can also use the output of the pyrocko scenario generator colosseo.
90 | After you followed the steps to create a purely synthetic dataset at https://pyrocko.org/docs/current/apps/colosseo/index.html
91 | you have to give the scenario.yml of the scenario as input in the event configuration file and set the variable colosseo input to
92 | 1. Please make sure that you unset other types of input. Also give the greensfunctions store used in the synthetic input file
93 | (eventname_date.syn). Disregard all other parameters in the synthetic input file for this case, as the generated event from the scenario
94 | will be used. You will need to merge all mseed files in the scenarios waveform directory into one file called scenario.mseed, located
95 | in the same folder as the scenario.yml. This can be done with jackseis or simply by using cat.
96 |
97 |
98 | 3. Array clustering
99 | -------------------------------
100 |
101 | In this step we cluster the stations into virtual arrays, based on the configuration in the eventname_date.config. This is handled as an kmeans problem and returns a set of virtual arrays.
102 | The command to cluster the stations into virtual arrays is:
103 |
104 | .. highlight:: console
105 |
106 | ::
107 | $ cd events
108 | $ palantiri_cluster
109 | Usage: palantiri_cluster eventname_date
110 |
111 |
112 | The command needs to be executed in the events folder.
113 | The desired stations in each array can be given/modified in the eventname_date.config file, also allowing for manual array creation.
114 | As input a comma separated list of station names is expected in the format::
115 |
116 | Network.Station..Channel
117 |
118 | The output arrays are numbered and assigned a prefix ``r``. Note that the numbering might not be consecutive as some arrays will be disregarded after the clustering because of the settings in the configuration file (distance from source to stations, distance between arrays).
119 |
120 | Each array can also be given a reference station (``arrayrefstation``) which will be used for cross-correlation and plotting purposes.
121 | If not given the station which is closest to the center of the array will be used as reference.
122 |
123 |
124 | Processing
125 | ==========
126 | The last step is the actual processing.
127 | This chapter describes the main processing. After the pre-processing you will have a folder named after the specific event in the events subfolder and your eventname_date.config file contains a list of arrays.
128 | The eventfolder will contain all work and data specific to this event. If you reprocess a certain event the results will be overwritten.
129 | For beamforming several methods are incorporated, including linear, phase-weighted and coherence based stacking.
130 | In the Palantiri code we also implemented the possibility to weight the individual traces contribution to each virtual arrays semblance by the variance of the pre-event noise.
131 | Furthermore our code supports also linear and coherency stacking. In the coherency stacking approach we stack according to the coherence of all stations in an an array to the reference station of that array. We than stack the coherence of all arrays together.
132 | This only works given that all stations in an array have the same polarity.
133 |
134 | The MUSIC algorithm is at this stage partly supported but will be fully implemented in later versions.
135 |
136 |
137 | The next steps are based on the input you have chosen before. Be sure to not mix different types of input. Remove or move the folders eventname_date/cluster and
138 | eventname_date/work if you want to start over for different input or array setup.
139 | Again be careful to check the eventname_date.config file in the event folder and adjust it your liking.
140 | Note that the option for several filters are build in. With this option the processing will be done separately for different filter setups
141 | and according outputs are generated. An arbitrary number of filter can used. The filter parameters names are assumed to be consecutively numbered. This processing of different filter settings is useful for exploring e.g. high- and low-frequency content.
142 | Also note that several depths can be selected to iterate over. Else only one -planar (equi-depth)- grid is considered for the semblance and traveltime
143 | calculation. If several depths are chosen the processing will be repeated for each depth and the semblance will be output for each depth.
144 | Arrays can be weighted by pre-event noise variance and azimuthal coverage.
145 |
146 |
147 | The semblance output is located in the eventfolder/work/semblance as txt files with the ending ``.asc``. They are
148 |
149 | First the data of each array can be cross-correlated. Stations under the threshold (xcorrtreshold) given in the eventname_date.config are disregarded. They are crosscorrelated by default to the first station of the array but a reference station can be manually given to each array. xcorr=1 enables a correction of timeshifts at each based on cross correlations. If also autoxcorrcorrectur = 1 is selected for each array a manual picking of phase onsets is done before the processing. This will return a reference waveform of one of the stations
150 | in the virtual array in a figure and a snuffler window. Marker for STA/LTA and theoretical phase onsets will be given.
151 | After closing both figures, the user can then input a manual traveltime shift in second in regard to the xcorr window start (also markers in the snuffler). The traveltimes for this array will than be statically corrected using this manual selected value. Both methods allows for handling of velocity model inadequacies.
152 |
153 |
154 | Second the traveltimes for each gridpoint to each station will be pre-calculated. This can take some time, depending on your gridsize. The traveltime grids are
155 | saved automatically in the folder tttgrids for each array separately. They will automatically be loaded in when starting step 5 again. This is very useful for synthetic
156 | test as it saves a lot of time. If you change the setup of arrays however you will have to delete the saved tttgrid files for the affected arrays. If the dimensions of the grid change they will have to be calculated again as well.
157 |
158 | Lastly for the semblance calculation two options exists. Firstly the semblance can be calculated for each array separately and then combined. The combination can be weighted by the average SNR of the arrays if the option
159 | is chosen in the eventname_date.config. The output are grids for each timestep of semblance which are stored in eventname_date/work/semblance for each array in a different folder with the ending
160 | .asc. The combined semblance for all arrays can be found directly in eventname_date/work/semblance also with the ending ``*.asc``. If you used multiple filter, the files will have a numeral matching the
161 | listing of the filter. Also for each depth chosen a different output will be generated.
162 |
163 | The actual processing is carried out by calling the bat command:
164 |
165 | .. highlight:: console
166 |
167 | ::
168 |
169 | $ bat
170 | Usage: bat process eventname_date
171 |
--------------------------------------------------------------------------------
/docs/configuration/event.rst:
--------------------------------------------------------------------------------
1 | event.conf
2 | ===========
3 |
4 | Secondly, event dependent options can be changed in the config file (eventname.config) of the event in the eventfolder, which will be created after step 2.
5 | Please make sure to investigate this configuration file closely before further processing.
6 |
7 | And lastly, synthetic sources can be specified in the syn config file (eventname.syn) in the eventfolder, also created after step 2. A user definded number of
8 | RectangularSources or DoubleCouble sources can used to create synthetic data with free user input of the source parameters.
9 | See the pyrocko documentation for details about the sources. This configuration file also contains paths to greensfunction stores.
10 |
11 | The main event configuration file is created when the palantiri_create command is called.
12 |
13 |
14 | The main event configuration file is separated in several sub sections.
15 |
16 | Clusterparameter
17 | ^^^^^^^^^^^^^^^^
18 | The first subsection, [clusterparameter], defines the parameters describing the cluster algorithm::
19 |
20 | [clusterparameter]
21 |
22 | The following parameter described the maximum number of desired array clusters that should be optimized for.
23 | Note that the actual number will vary from this.
24 | maxCluster = 100
25 |
26 | The number of the minimum number of stations in initial clusters (should be higher than the desired minimum number of stations per array)
27 | minStationAroundInitialCluster = 10
28 |
29 | The distance which is used initially to gather stations in an array. The final minimum array aperture will not depend on this. It should be approximately 2x times the desired minimum array aperture.
30 | initialstationdistance = 7
31 |
32 | Cutoff is a hard break option in number of iterations for the clustering algorithm.
33 | cutoff = 100
34 | runs defines the number of times the clustering algorithm is applied.
35 | The final cluster setup to be used will be the one that performs best in terms of maximized number of stations and clusters used from all runs.
36 | runs = 1
37 |
38 | This defines the minimum distance of initial seach centroids [in degree]. Larger numbers means that less search centroids are spawned. This results in more distance between arrays.
39 | centroidminDistance = 2
40 |
41 | The maximum distance from station to cluster center (maximum array aperture)
42 | stationdistance = 5
43 | The minimum number of stations per cluster
44 | minClusterStation = 5
45 |
46 |
47 | traveltime calculation options
48 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
49 |
50 | The next chapter in the configuration file is for traveltime definitions and traveltime grid calculations::
51 |
52 | [traveltime calculation options]
53 |
54 | traveltime_model = ak135-f-continental.m.nd
55 | #number of cores for traveltime calculation
56 | ncore = 2
57 |
58 | #depths= Repeat semblance calculation from,to,steps relative to depth in origin config
59 | depths=0,0,5
60 | # run each depth step with a number of filter(s), used for high vs. low freq.
61 | filters=2
62 |
63 | # dimx of grid
64 | dimx = 20
65 | # dimy of grid
66 | dimy = 20
67 |
68 | True or false. The traveltimes and the semblance will be calculated in a cube. The parameter for the minimum and maximum depth and step size are given in the depths parameter.
69 | dimz= 0
70 |
71 | # gridspacing in degree
72 | gridspacing = 0.035
73 | # Phase to consider [right now only P possible!]
74 | ttphases=P
75 |
76 | futterman_attenuation = 0
77 |
78 | Traveltime corrections
79 | ^^^^^^^^^^^^^^^^^^^^^^
80 |
81 | traveltime correction options::
82 |
83 | [Traveltime corrections]
84 |
85 | Whether to apply shifts or not. Can be independent from the run to calculate the shifts.
86 | correct_shifts = 0
87 |
88 | Set true if you wish to calculate the empirical shifts or if you want to load in already calculated shifts.
89 | correct_shifts_empirical_run = 0
90 |
91 | Use empirical shift from fore or aftershock or calculate shifts based on maximizing the correlation [default].
92 | correct_shifts_empirical = 0
93 |
94 | Load in a manual file
95 | correct_shifts_empirical_manual = 0
96 |
97 | Calculate the empirical shifts per array (faster, default) or for each station individually.
98 | correct_shifts_empirical_manual_station_wise = 0
99 |
100 | Use synthetic or real event [default]:
101 | correct_shifts_empirical_synthetic=0
102 |
103 | # dimx of grid for empirical correction (should be the same as for the main process in most cases)
104 | dimx_emp = 1
105 | # dimy of grid (should be the same as for the main process in most cases)
106 | dimy_emp = 1
107 | # step length in s.
108 | step_emp = 16
109 | # window length in s.
110 | winlen_emp = 16
111 | the duration of the waveform in [s] after the theoretical onset of the specified phase
112 | duration_emp = 8
113 | the duration of the waveform in [s] before the theoretical onset of the specified phase
114 | forerun_emp = 8
115 |
116 |
117 | Beamforming method
118 | ^^^^^^^^^^^^^^^^^^^^^^
119 |
120 | Several methods for the backprojection are possible: 1. traditional (capon) beamforming
121 | 2. coherence based stacking (bp_coh), 3. phase weighted stacking (pws), 4. stacking in the frequency domain (bp_freq),
122 | 5. music (bp_music).
123 | Compressed sensing is only partly supported right now.
124 |
125 | Select the method for Backprojection in the following section::
126 |
127 | [beamforming method]
128 |
129 | #delaysum
130 | #capon
131 | beam = delaysum
132 |
133 | bp_freq= 0
134 | bp_coh= 0
135 | bp_music = 0
136 |
137 | # use a phase weighted stacking
138 | shift_by_phase_pws = 0
139 |
140 | # create output of compressed sensing as grid [warning: experimental]
141 | cs = 0
142 |
143 | Algorithm settings
144 | ^^^^^^^^^^^^^^^^^^^^^
145 | Define general settings::
146 |
147 | [algorithm settings]
148 |
149 | weight_by_noise = 0
150 | weight_by_azimuth = 0
151 | # bootstrap the arrays to estimate the uncertainty:
152 | bootstrap_array_weights = 0
153 | # number of bootstraps to carry out:
154 | n_bootstrap = 0
155 |
156 | Use arrays separately [default] or all in one big step:
157 | combine_all = 0
158 | Normalize the semblance of each array to 1.
159 | norm_all=1
160 |
161 | General settings
162 | ^^^^^^^^^^^^^^^^^^^^^
163 | Define general settings::
164 |
165 | [general parameter]
166 |
167 | # min distance to origin of stations
168 | minDist = 2
169 | # max distance to origin of stations
170 | maxDist = 93
171 | # if download of was done with palantiri_down command, set to 1
172 | pyrocko_download = 1
173 | # if download with pyrocko was done you can choose between velocity and displacement
174 | quantity = velocity
175 | Calculate and plot the array response of each array.
176 | array_response = 0
177 |
178 | Visualize the semblance after the calculation for direct inspection
179 | inspect_semb = 0
180 |
181 |
182 | [Synthetic Test]
183 |
184 | # do a synthetic test with a real station distribution, specify the
185 | # parameters in eventfolder with event.syn
186 | synthetic_test = 1
187 | # add noise to the synthetic, based on the variance of the real station
188 | # covariance of noise not enabled right now
189 | synthetic_test_add_noise = 0
190 | synthetic_test_pertub_arrivals = 0
191 | shift_max = 10
192 | # if colosseo synthetics should be used, set to 1
193 | colesseo_input = 0
194 | # give the colosseo scenario.yml file
195 | colosseo_scenario_yml = /media/asteinbe/data/asteinbe/mydl/scenario.yml
196 |
197 |
198 | [Filter and time window settings]
199 |
200 | # step length in s.
201 | step = 6
202 | # window length in s.
203 | winlen = 12
204 | # step length in s.
205 | step_f2 = 10
206 | # window length in s.
207 | winlen_f2 = 20
208 | # length of data before phase onset in s.
209 | forerun = 10
210 | # length of data after phase onset in s.
211 | duration = 40
212 | # resampling data to frequency in Hz or s, should match your gf store
213 | new_frequence = 0.5
214 |
215 |
216 | [Manual shifting]
217 |
218 | # shift the traces to theoretical onset
219 | shift_by_phase_onset = 0
220 | # shift by crosscorrelation
221 | shift_by_phase_cc = 0
222 |
223 |
224 | [Optimization]
225 |
226 |
227 | # Optimize for pyrocko sources with array responses as input with the semblance(all)
228 | optimize = 0
229 | optimize_all = 0
230 |
231 | [focal mechanism solution values from event file]
232 | #only = 1 possible
233 | fm = 1
234 |
235 | [xcorrskript parameter]
236 |
237 | xcorr=0
238 | # for manual qc set autoxcorrcorrectur to 1:
239 | autoxcorrcorrectur = 0
240 | # crosscorrelation threshold for excluding stations
241 | xcorrtreshold = 0.6
242 |
243 | #filter for referencestation for automatic picker
244 | #should match your filter
245 | refstationfreqmin=0.03
246 | refstationfreqmax=0.08
247 | refstationcorners=2
248 | refstationzph=false
249 |
250 | #STA/LTA parameter
251 | refsta=0.5
252 | reflta=4
253 |
254 |
255 | [filterparameter]
256 |
257 | filterswitch=1
258 | Calculate the filters based on the estimated corner frequency. The source parameters need to be defined in the event config file.
259 | dynamic_filter = 0
260 |
261 | ###############################################################
262 | #Parameter for first filter
263 | #bp butterworth
264 |
265 | # low cut corner frequency
266 | flo = 0.1
267 |
268 | # high cut corner frequency
269 | fhi = 0.24
270 |
271 | # number of filter sections
272 | ns = 4
273 |
274 | # TRUE -> zero phase filter
275 | zph = false
276 |
277 |
278 | ###############################################################
279 | #Example Parameter for a second filter
280 | #bp butterworth
281 |
282 | # low cut corner frequency
283 | flo2 = 0.03
284 |
285 | # high cut corner frequency
286 | fhi2 = 0.24
287 |
288 | # number of filter sections
289 | ns2 = 4
290 |
291 | # TRUE -> zero phase filter
292 | zph2 = false
293 |
294 |
295 | [array parameter]
296 | Here follow all the arrays. This will be updated by the cluster algorithm.
297 |
298 | networks=r1,r2
299 | r1=XK.B03SL..BHZ|XK.B04KH..BHZ|XK.B05MO..BHZ|XK.B06OR..BHZ|XK.B07DX..BHZ|XK.B08TS..BHZ|XK.B09NK..BHZ|XK.B10PP..BHZ|XK.B11ET..BHZ|XK.B12SS..BHZ|XK.B13NX..BHZ|XK.B14MH..BHZ|XK.B15MW..BHZ|XK.B17CI..BHZ
300 | r1refstation=
301 | r1phase=P
302 | r2=NM.OLIL..BHZ|PN.PPCWF..BHZ|TA.O44A..BHZ|TA.O45A..BHZ|TA.P43A..BHZ|TA.P44A..BHZ|TA.P45A..BHZ|TA.P46A..BHZ|TA.Q43A..BHZ|TA.Q44A..BHZ|TA.Q45A..BHZ|TA.Q46A..BHZ|TA.R45A..BHZ|XO.LA19..BHZ|XO.LA21..BHZ|XO.LB20..BHZ|XO.LB22..BHZ
303 | r2refstation=
304 | r2phase=P
305 |
--------------------------------------------------------------------------------
/src/tools/ak135.model:
--------------------------------------------------------------------------------
1 | 0.00 1.0200 1.4500 0.0000 57822.00 0.00 0.000
2 | 3.00 1.0200 1.4500 0.0000 57822.00 0.00 0.000
3 | 3.00 2.0000 1.6500 1.0000 163.35 80.00 0.000
4 | 3.30 2.0000 1.6500 1.0000 163.35 80.00 0.000
5 | 3.30 2.6000 5.8000 3.2000 1478.30 599.99 0.000
6 | 10.00 2.6000 5.8000 3.2000 1478.30 599.99 0.000
7 | 10.00 2.9200 6.8000 3.9000 1368.02 599.99 0.000
8 | 18.00 2.9200 6.8000 3.9000 1368.02 599.99 0.000
9 | 18.00 3.6410 8.0355 4.4839 950.50 394.62 0.000
10 | 43.00 3.5801 8.0379 4.4856 972.77 403.93 0.000
11 | 80.00 3.5020 8.0400 4.4800 1008.71 417.59 0.000
12 | 80.00 3.5020 8.0450 4.4900 182.03 75.60 0.000
13 | 120.00 3.4268 8.0505 4.5000 182.57 76.06 0.000
14 | 120.00 3.4268 8.0505 4.5000 182.57 76.06 0.000
15 | 165.00 3.3711 8.1750 4.5090 188.72 76.55 0.000
16 | 210.00 3.3243 8.3007 4.5184 200.97 79.40 0.000
17 | 210.00 3.3243 8.3007 4.5184 338.47 133.72 0.000
18 | 260.00 3.3663 8.4822 4.6094 346.37 136.38 0.000
19 | 310.00 3.4110 8.6650 4.6964 355.85 139.38 0.000
20 | 360.00 3.4577 8.8476 4.7832 366.34 142.76 0.000
21 | 410.00 3.5068 9.0302 4.8702 377.93 146.57 0.000
22 | 410.00 3.9317 9.3601 5.0806 413.66 162.50 0.000
23 | 460.00 3.9273 9.5280 5.1864 417.32 164.87 0.000
24 | 510.00 3.9233 9.6962 5.2922 419.94 166.80 0.000
25 | 560.00 3.9218 9.8640 5.3989 422.55 168.78 0.000
26 | 610.00 3.9206 10.0320 5.5047 425.51 170.82 0.000
27 | 660.00 3.9201 10.2000 5.6104 428.69 172.93 0.000
28 | 660.00 4.2387 10.7909 5.9607 1350.54 549.45 0.000
29 | 710.00 4.2986 10.9222 6.0898 1311.17 543.48 0.000
30 | 760.00 4.3565 11.0553 6.2100 1277.93 537.63 0.000
31 | 809.50 4.4118 11.1355 6.2424 1269.44 531.91 0.000
32 | 859.00 4.4650 11.2228 6.2799 1260.68 526.32 0.000
33 | 908.50 4.5162 11.3068 6.3164 1251.69 520.83 0.000
34 | 958.00 4.5654 11.3897 6.3519 1243.02 515.46 0.000
35 | 1007.50 4.5926 11.4704 6.3860 1234.54 510.20 0.000
36 | 1057.00 4.6198 11.5493 6.4182 1226.52 505.05 0.000
37 | 1106.50 4.6467 11.6265 6.4514 1217.91 500.00 0.000
38 | 1156.00 4.6735 11.7020 6.4822 1210.02 495.05 0.000
39 | 1205.50 4.7001 11.7768 6.5131 1202.04 490.20 0.000
40 | 1255.00 4.7266 11.8491 6.5431 1193.99 485.44 0.000
41 | 1304.50 4.7528 11.9208 6.5728 1186.06 480.77 0.000
42 | 1354.00 4.7790 11.9891 6.6009 1178.19 476.19 0.000
43 | 1403.50 4.8050 12.0571 6.6285 1170.53 471.70 0.000
44 | 1453.00 4.8307 12.1247 6.6554 1163.16 467.29 0.000
45 | 1502.50 4.8562 12.1912 6.6813 1156.04 462.96 0.000
46 | 1552.00 4.8817 12.2558 6.7070 1148.76 458.72 0.000
47 | 1601.50 4.9069 12.3181 6.7323 1141.32 454.55 0.000
48 | 1651.00 4.9321 12.3813 6.7579 1134.01 450.45 0.000
49 | 1700.50 4.9570 12.4427 6.7820 1127.02 446.43 0.000
50 | 1750.00 4.9817 12.5030 6.8056 1120.09 442.48 0.000
51 | 1799.50 5.0062 12.5638 6.8289 1108.58 436.68 0.000
52 | 1849.00 5.0306 12.6226 6.8517 1097.16 431.03 0.000
53 | 1898.50 5.0548 12.6807 6.8743 1085.97 425.53 0.000
54 | 1948.00 5.0789 12.7384 6.8972 1070.38 418.41 0.000
55 | 1997.50 5.1027 12.7956 6.9194 1064.23 414.94 0.000
56 | 2047.00 5.1264 12.8524 6.9416 1058.03 411.52 0.000
57 | 2096.50 5.1499 12.9093 6.9625 1048.09 406.50 0.000
58 | 2146.00 5.1732 12.9663 6.9852 1042.07 403.23 0.000
59 | 2195.50 5.1963 13.0226 7.0069 1032.14 398.41 0.000
60 | 2245.00 5.2192 13.0786 7.0286 1018.38 392.16 0.000
61 | 2294.50 5.2420 13.1337 7.0504 1008.79 387.60 0.000
62 | 2344.00 5.2646 13.1895 7.0722 999.44 383.14 0.000
63 | 2393.50 5.2870 13.2465 7.0932 990.77 378.79 0.000
64 | 2443.00 5.3092 13.3017 7.1144 985.63 375.94 0.000
65 | 2492.50 5.3313 13.3584 7.1368 976.81 371.75 0.000
66 | 2542.00 5.3531 13.4156 7.1584 968.46 367.65 0.000
67 | 2591.50 5.3748 13.4741 7.1804 960.36 363.64 0.000
68 | 2640.00 5.3962 13.5311 7.2031 952.00 359.71 0.000
69 | 2690.00 5.4176 13.5899 7.2253 940.88 354.61 0.000
70 | 2740.00 5.4387 13.6498 7.2485 933.21 350.88 0.000
71 | 2740.00 5.6934 13.6498 7.2485 722.73 271.74 0.000
72 | 2789.67 5.7196 13.6533 7.2593 726.87 273.97 0.000
73 | 2839.33 5.7458 13.6570 7.2700 725.11 273.97 0.000
74 | 2891.50 5.7721 13.6601 7.2817 723.12 273.97 0.000
75 | 2891.50 9.9145 8.0000 0.0000 57822.00 0.00 0.000
76 | 2939.33 9.9942 8.0382 0.0000 57822.00 0.00 0.000
77 | 2989.66 10.0722 8.1283 0.0000 57822.00 0.00 0.000
78 | 3039.99 10.1485 8.2213 0.0000 57822.00 0.00 0.000
79 | 3090.32 10.2233 8.3122 0.0000 57822.00 0.00 0.000
80 | 3140.66 10.2964 8.4001 0.0000 57822.00 0.00 0.000
81 | 3190.99 10.3679 8.4861 0.0000 57822.00 0.00 0.000
82 | 3241.32 10.4378 8.5692 0.0000 57822.00 0.00 0.000
83 | 3291.65 10.5062 8.6496 0.0000 57822.00 0.00 0.000
84 | 3341.98 10.5731 8.7283 0.0000 57822.00 0.00 0.000
85 | 3392.31 10.6385 8.8036 0.0000 57822.00 0.00 0.000
86 | 3442.64 10.7023 8.8761 0.0000 57822.00 0.00 0.000
87 | 3492.97 10.7647 8.9461 0.0000 57822.00 0.00 0.000
88 | 3543.30 10.8257 9.0138 0.0000 57822.00 0.00 0.000
89 | 3593.64 10.8852 9.0792 0.0000 57822.00 0.00 0.000
90 | 3643.97 10.9434 9.1426 0.0000 57822.00 0.00 0.000
91 | 3694.30 11.0001 9.2042 0.0000 57822.00 0.00 0.000
92 | 3744.63 11.0555 9.2634 0.0000 57822.00 0.00 0.000
93 | 3794.96 11.1095 9.3205 0.0000 57822.00 0.00 0.000
94 | 3845.29 11.1623 9.3760 0.0000 57822.00 0.00 0.000
95 | 3895.62 11.2137 9.4297 0.0000 57822.00 0.00 0.000
96 | 3945.95 11.2639 9.4814 0.0000 57822.00 0.00 0.000
97 | 3996.28 11.3127 9.5306 0.0000 57822.00 0.00 0.000
98 | 4046.62 11.3604 9.5777 0.0000 57822.00 0.00 0.000
99 | 4096.95 11.4069 9.6232 0.0000 57822.00 0.00 0.000
100 | 4147.28 11.4521 9.6673 0.0000 57822.00 0.00 0.000
101 | 4197.61 11.4962 9.7100 0.0000 57822.00 0.00 0.000
102 | 4247.94 11.5391 9.7513 0.0000 57822.00 0.00 0.000
103 | 4298.27 11.5809 9.7914 0.0000 57822.00 0.00 0.000
104 | 4348.60 11.6216 9.8304 0.0000 57822.00 0.00 0.000
105 | 4398.93 11.6612 9.8682 0.0000 57822.00 0.00 0.000
106 | 4449.26 11.6998 9.9051 0.0000 57822.00 0.00 0.000
107 | 4499.60 11.7373 9.9410 0.0000 57822.00 0.00 0.000
108 | 4549.93 11.7737 9.9761 0.0000 57822.00 0.00 0.000
109 | 4600.26 11.8092 10.0103 0.0000 57822.00 0.00 0.000
110 | 4650.59 11.8437 10.0439 0.0000 57822.00 0.00 0.000
111 | 4700.92 11.8772 10.0768 0.0000 57822.00 0.00 0.000
112 | 4751.25 11.9098 10.1095 0.0000 57822.00 0.00 0.000
113 | 4801.58 11.9414 10.1415 0.0000 57822.00 0.00 0.000
114 | 4851.91 11.9722 10.1739 0.0000 57822.00 0.00 0.000
115 | 4902.24 12.0001 10.2049 0.0000 57822.00 0.00 0.000
116 | 4952.58 12.0311 10.2329 0.0000 57822.00 0.00 0.000
117 | 5002.91 12.0593 10.2565 0.0000 57822.00 0.00 0.000
118 | 5053.24 12.0867 10.2745 0.0000 57822.00 0.00 0.000
119 | 5103.57 12.1133 10.2854 0.0000 57822.00 0.00 0.000
120 | 5153.50 12.1391 10.2890 0.0000 57822.00 0.00 0.000
121 | 5153.50 12.7037 11.0427 3.5043 633.26 85.03 0.000
122 | 5204.61 12.7289 11.0585 3.5187 629.89 85.03 0.000
123 | 5255.32 12.7530 11.0718 3.5314 626.87 85.03 0.000
124 | 5306.04 12.7760 11.0850 3.5435 624.08 85.03 0.000
125 | 5356.75 12.7980 11.0983 3.5551 621.50 85.03 0.000
126 | 5407.46 12.8188 11.1166 3.5661 619.71 85.03 0.000
127 | 5458.17 12.8387 11.1316 3.5765 617.78 85.03 0.000
128 | 5508.89 12.8574 11.1457 3.5864 615.93 85.03 0.000
129 | 5559.60 12.8751 11.1590 3.5957 614.21 85.03 0.000
130 | 5610.31 12.8917 11.1715 3.6044 612.62 85.03 0.000
131 | 5661.02 12.9072 11.1832 3.6126 611.12 85.03 0.000
132 | 5711.74 12.9217 11.1941 3.6202 609.74 85.03 0.000
133 | 5762.45 12.9351 11.2041 3.6272 608.48 85.03 0.000
134 | 5813.16 12.9474 11.2134 3.6337 607.31 85.03 0.000
135 | 5863.87 12.9586 11.2219 3.6396 606.26 85.03 0.000
136 | 5914.59 12.9688 11.2295 3.6450 605.28 85.03 0.000
137 | 5965.30 12.9779 11.2364 3.6498 604.44 85.03 0.000
138 | 6016.01 12.9859 11.2424 3.6540 603.69 85.03 0.000
139 | 6066.72 12.9929 11.2477 3.6577 603.04 85.03 0.000
140 | 6117.44 12.9988 11.2521 3.6608 602.49 85.03 0.000
141 | 6168.15 13.0036 11.2557 3.6633 602.05 85.03 0.000
142 | 6218.86 13.0074 11.2586 3.6653 601.70 85.03 0.000
143 | 6269.57 13.0100 11.2606 3.6667 601.46 85.03 0.000
144 | 6320.29 13.0117 11.2618 3.6675 601.32 85.03 0.000
145 | 6371.00 13.0122 11.2622 3.6678 601.27 85.03 0.000
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Pyrocko documentation build configuration file, created by sphinx-quickstart
4 | # on Tue Jan 25 22:08:33 2011.
5 | #
6 | # This file is execfile()d with the current directory set to its containing
7 | # dir.
8 | #
9 | # Note that not all possible configuration values are present in this
10 | # autogenerated file.
11 | #
12 | # All configuration values have a default; values that are commented out serve
13 | # to show the default.
14 |
15 | import sys
16 | import os
17 | from datetime import datetime as dt
18 | import sphinx_sleekcat_theme
19 |
20 |
21 | # If extensions (or modules to document with autodoc) are in another directory,
22 | # add these directories to sys.path here. If the directory is relative to the
23 | # documentation root, use os.path.abspath to make it absolute, like shown here.
24 | # sys.path.insert(0, os.path.abspath('extensions'))
25 |
26 | # -- General configuration ----------------------------------------------------
27 |
28 | # If your documentation needs a minimal Sphinx version, state it here.
29 | #needs_sphinx = '1.0'
30 |
31 | # Add any Sphinx extension module names here, as strings. They can be
32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
33 | extensions = ['sphinx.ext.autodoc',
34 | 'sphinx.ext.imgmath', # 'sphinx.ext.jsmath',
35 | 'sphinx.ext.viewcode',
36 | 'sphinx.ext.intersphinx',
37 | 'sphinx.ext.autosummary'
38 | ]
39 |
40 | intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/',
41 | None),
42 | 'scipy': ('https://docs.scipy.org/doc/scipy/reference/',
43 | None),
44 | 'matplotlib': ('https://matplotlib.org/',
45 | None),
46 | 'python': ('https://docs.python.org/3.5',
47 | None),
48 | 'obspy': ('https://docs.obspy.org/',
49 | None)}
50 |
51 | # Add any paths that contain templates here, relative to this directory.
52 | templates_path = ['_templates']
53 |
54 | # The suffix of source filenames.
55 | source_suffix = '.rst'
56 | imgmath_image_format = 'svg'
57 |
58 | # The encoding of source files.
59 | #source_encoding = 'utf-8-sig'
60 |
61 | # The master toctree document.
62 | master_doc = 'index'
63 |
64 | # General information about the project.
65 | project = u'Palantiri'
66 | copyright = u'%d, The Palantiri Developers' % dt.now().year
67 |
68 | # The version info for the project you're documenting, acts as replacement for
69 | # |version| and |release|, also used in various other places throughout the
70 | # built documents.
71 | #
72 | # The short X.Y version.
73 | version = '0.4'
74 | # The full version, including alpha/beta/rc tags.
75 | release = '0.4'
76 |
77 | # The language for content autogenerated by Sphinx. Refer to documentation
78 | # for a list of supported languages.
79 | language = 'en'
80 |
81 | # There are two options for replacing |today|: either, you set today to some
82 | # non-false value, then it is used:
83 | #today = ''
84 | # Else, today_fmt is used as the format for a strftime call.
85 | #today_fmt = '%B %d, %Y'
86 |
87 | # List of patterns, relative to source directory, that match files and
88 | # directories to ignore when looking for source files.
89 | exclude_patterns = ['_build']
90 |
91 | # The reST default role (used for this markup: `text`) to use for all documents.
92 | #default_role = None
93 |
94 | # If true, '()' will be appended to :func: etc. cross-reference text.
95 | #add_function_parentheses = True
96 |
97 | # If true, the current module name will be prepended to all description
98 | # unit titles (such as .. function::).
99 | add_module_names = False
100 |
101 | # If true, sectionauthor and moduleauthor directives will be shown in the
102 | # output. They are ignored by default.
103 | #show_authors = False
104 |
105 | # The name of the Pygments (syntax highlighting) style to use.
106 | pygments_style = 'sphinx'
107 |
108 | # A list of ignored prefixes for module index sorting.
109 | modindex_common_prefix = [ 'palantiri.' ]
110 |
111 |
112 | # -- Options for HTML output ---------------------------------------------------
113 |
114 | # The theme to use for HTML and HTML Help pages. See the documentation for
115 | # a list of builtin themes.
116 | html_theme = 'sphinx_sleekcat_theme'
117 |
118 | # Theme options are theme-specific and customize the look and feel of a theme
119 | # further. For a list of options available for each theme, see the
120 | # documentation.
121 | html_theme_options = {
122 | # 'githuburl': 'https://github.com/pyrocko/pyrocko/',
123 | 'bodyfont': '"Lucida Grande",Arial,sans-serif',
124 | 'headfont': '"Lucida Grande",Arial,sans-serif',
125 | 'codefont': 'monospace,sans-serif',
126 | 'linkcolor': '#204a87',
127 | 'visitedlinkcolor': '#204a87',
128 | 'nosidebar': True,
129 | # 'appendcss': open('style.css').read(),
130 | # 'googlewebfonturl': 'https://fonts.googleapis.com/css?family=Roboto+Slab',
131 | # 'bodyfont': '"Roboto Slab",Arial,sans-serif',
132 | }
133 | pygments_style = 'friendly'
134 |
135 | # Add any paths that contain custom themes here, relative to this directory.
136 | html_theme_path = [sphinx_sleekcat_theme.get_html_theme_path()]
137 |
138 | # The name for this set of Sphinx documents. If None, it defaults to
139 | # " v documentation".
140 | html_title = u"%s v%s Manual" % (project, release)
141 |
142 | # A shorter title for the navigation bar. Default is the same as html_title.
143 | #html_short_title = u"%s Manual" % project
144 |
145 | # The name of an image file (relative to this directory) to place at the top
146 | # of the sidebar.
147 | html_logo = None
148 |
149 | # The name of an image file (within the static path) to use as favicon of the
150 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
151 | # pixels large
152 | #html_favicon = None
153 |
154 | # Add any paths that contain custom static files (such as style sheets) here,
155 | # relative to this directory. They are copied after the builtin static files,
156 | # so a file named "default.css" will overwrite the builtin "default.css".
157 | html_static_path = ['static']
158 |
159 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
160 | # using the given strftime format.
161 | #html_last_updated_fmt = '%b %d, %Y'
162 |
163 | # If true, SmartyPants will be used to convert quotes and dashes to
164 | # typographically correct entities.
165 | #html_use_smartypants = True
166 |
167 | # Custom sidebar templates, maps document names to template names.
168 | #html_sidebars = {}
169 |
170 | # Additional templates that should be rendered to pages, maps page names to
171 | # template names.
172 | #html_additional_pages = {}
173 |
174 | # If false, no module index is generated.
175 | #html_domain_indices = True
176 |
177 | # If false, no index is generated.
178 | #html_use_index = True
179 |
180 | # If true, the index is split into individual pages for each letter.
181 | #html_split_index = False
182 |
183 | # If true, links to the reST sources are added to the pages.
184 | #html_show_sourcelink = True
185 |
186 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
187 | html_show_sphinx = False
188 |
189 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
190 | #html_show_copyright = True
191 |
192 | # If true, an OpenSearch description file will be output, and all pages will
193 | # contain a tag referring to it. The value of this option must be the
194 | # base URL from which the finished HTML is served.
195 | html_use_opensearch = ''
196 |
197 | # This is the file name suffix for HTML files (e.g. ".xhtml").
198 | #html_file_suffix = None
199 |
200 | # Output file base name for HTML help builder.
201 | htmlhelp_basename = 'Palantiridoc'
202 |
203 |
204 | # -- Options for LaTeX output --------------------------------------------------
205 |
206 | latex_engine = 'xelatex'
207 |
208 | # The paper size ('letter' or 'a4').
209 | #latex_paper_size = 'a4'
210 | latex_elements = {
211 | 'papersize': 'a4paper',
212 | 'preamble': '''
213 | \usepackage[utf8x]{inputenc}
214 | \setcounter{tocdepth}{4}''',
215 | 'utf8extra': '',
216 |
217 | }
218 |
219 | # The font size ('10pt', '11pt' or '12pt').
220 | #latex_font_size = '10pt'
221 |
222 | # Grouping the document tree into LaTeX files. List of tuples
223 | # (source start file, target name, title, author, documentclass [howto/manual]).
224 | latex_documents = [
225 | ('index', 'palantiri.tex', u'Palantiri Documentation',
226 | u'The Palantiri Developers', 'manual'),
227 | ]
228 |
229 | # The name of an image file (relative to this directory) to place at the top of
230 | # the title page.
231 | # latex_logo = None
232 |
233 | # For "manual" documents, if this is true, then toplevel headings are parts,
234 | # not chapters.
235 | # latex_use_parts = False
236 |
237 | # If true, show page references after internal links.
238 | # latex_show_pagerefs = False
239 |
240 | # If true, show URL addresses after external links.
241 | # latex_show_urls = False
242 |
243 | # Additional stuff for the LaTeX preamble.
244 | # latex_preamble = ''
245 |
246 | # Documents to append as an appendix to all manuals.
247 | # latex_appendices = []
248 |
249 | # If false, no module index is generated.
250 | # latex_domain_indices = True
251 |
252 |
253 | # -- Options for autodoc
254 |
255 | autodoc_member_order = 'bysource'
256 |
257 | # -- Options for manual page output --------------------------------------------
258 |
259 | # One entry per manual page. List of tuples
260 | # (source start file, name, description, authors, manual section).
261 | man_pages = [
262 | ('index', 'palantiri', u'Palantiri Documentation',
263 | [u'The Palantiri Developers'], 1)
264 | ]
265 |
266 |
267 | def process_signature(app, what, name, obj, options, signature,
268 | return_annotation):
269 |
270 | from pyrocko import guts
271 |
272 | # if what == 'attribute' and isinstance(obj, guts.TBase):
273 | # return (str(obj), '')
274 |
275 | if what == 'class' and issubclass(obj, guts.Object):
276 | if obj.dummy_for is not None:
277 | return ('(dummy)', '%s' % obj.dummy_for.__name__)
278 | return
279 |
280 |
281 | def skip_member(app, what, name, obj, skip, options):
282 | from pyrocko import guts
283 |
284 | if what == 'class' and name == 'dummy_for':
285 | return True
286 | if what == 'class' and name == 'T':
287 | return True
288 |
289 |
290 | def setup(app):
291 | app.connect('autodoc-process-signature', process_signature)
292 | app.connect('autodoc-skip-member', skip_member)
293 |
--------------------------------------------------------------------------------