├── .gitignore
├── LICENSE.txt
├── README.md
├── annotation.csv
├── backfill.py
├── catfill.py
├── clearJunk.py
├── clusterLocs.py
├── compareCatalog.py
├── createPDFFamily.py
├── createPDFOverview.py
├── createReport.py
├── distantFamilies.py
├── extendTable.py
├── forcePlot.py
├── img
├── annotation.png
├── bokeh.png
├── cluster.png
├── flowchart.png
└── logo.png
├── initialize.py
├── lapine.cfg
├── makeMeta.py
├── mshcat.csv
├── plotJunk.py
├── redpy
├── __init__.py
├── cluster.py
├── config.py
├── correlation.py
├── optics.py
├── plotting.py
├── printing.py
├── table.py
└── trigger.py
├── redpy37.yml
├── removeFamily.py
├── removeFamilyGUI.py
├── removeSmallFamily.py
├── settings.cfg
└── spec-file.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | */
2 | !redpy/
3 | *.pyc
4 | *.csv
5 | *.cfg
6 | *.h5
7 | *.out
8 | *.sh
9 | *.txt
10 | *.html
11 | *.new
12 | .DS_Store
13 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # REDPy has moved!
2 |
3 | After over a year of concentrated development work and many hurdles overcome, REDPy has moved to its new home as official U.S. Geological Survey software at [**https://code.usgs.gov/vsc/REDPy/**](https://code.usgs.gov/vsc/REDPy/).
4 |
5 | I did a lot of work under the hood to make it more efficient, and I have many plans to add more features in the coming years.
6 |
7 | _**All future development will be hosted in the new repository and this repository will no longer be updated.**_
8 |
9 | New users should download the [latest version](https://code.usgs.gov/vsc/REDPy/-/releases), and read the [updated Wiki](https://code.usgs.gov/vsc/REDPy/-/wikis/home) for documentation.
10 |
11 | A snapshot of the last commit in this repository with an intact `README.md` file is available [here](https://github.com/ahotovec/REDPy/releases/) for posterity as "Version 0" of the code. I will no longer be supporting users of Version 0, but I am happy to help you migrate to the new one.
12 |
13 | ~ Alicia Hotovec-Ellis ([ahotovec-ellis@usgs.gov](mailto:ahotovec-ellis@usgs.gov))
14 |
--------------------------------------------------------------------------------
/annotation.csv:
--------------------------------------------------------------------------------
1 | Time,Color,Weight,Line Type,Alpha,Comment
2 | 2004-10-01T19:02:00,orange,1,solid,0.5,Explosion
--------------------------------------------------------------------------------
/backfill.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import argparse
6 | import redpy
7 | import numpy as np
8 | import obspy
9 | from obspy import UTCDateTime
10 | import time
11 |
12 | """
13 | Run this script to fill the table with data from the past. If a start time is not
14 | specified, it will check the attributes of the repeater table to pick up where it left
15 | off. Additionally, if this is the first run and a start time is not specified, it will
16 | assume one time chunk prior to the end time. If an end time is not specified, "now" is
17 | assumed. The end time updates at the end of each time chunk processed (default: by hour,
18 | set in configuration). This script can be run as a cron job that will pick up where it
19 | left off if a chunk is missed. Use -n if you are backfilling with a large amount of time;
20 | it will consume less time downloading the data in small chunks if NSEC is an hour or a day
21 | instead of a few minutes, but at the cost of keeping orphans for longer.
22 |
23 | usage: backfill.py [-h] [-v] [-t] [-s STARTTIME] [-e ENDTIME] [-c CONFIGFILE] [-n NSEC]
24 |
25 | optional arguments:
26 | -h, --help show this help message and exit
27 | -v, --verbose increase written print statements
28 | -t, --troubleshoot run in troubleshoot mode (without try/except)
29 | -s STARTTIME, --starttime STARTTIME
30 | optional start time to begin filling (YYYY-MM-DDTHH:MM:SS)
31 | -e ENDTIME, --endtime ENDTIME
32 | optional end time to end filling (YYYY-MM-DDTHH:MM:SS)
33 | -c CONFIGFILE, --configfile CONFIGFILE
34 | use configuration file named CONFIGFILE instead of
35 | default settings.cfg
36 | -n NSEC, --nsec NSEC overwrite opt.nsec from configuration file with NSEC this run only
37 | """
38 |
39 | t = time.time()
40 |
41 | parser = argparse.ArgumentParser(description=
42 | "Backfills table with data from the past")
43 | parser.add_argument("-v", "--verbose", action="count", default=0,
44 | help="increase written print statements")
45 | parser.add_argument("-t", "--troubleshoot", action="count", default=0,
46 | help="run in troubleshoot mode (without try/except)")
47 | parser.add_argument("-s", "--starttime",
48 | help="optional start time to begin filling (YYYY-MM-DDTHH:MM:SS)")
49 | parser.add_argument("-e", "--endtime",
50 | help="optional end time to end filling (YYYY-MM-DDTHH:MM:SS)")
51 | parser.add_argument("-c", "--configfile",
52 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
53 | parser.add_argument("-n", "--nsec", type=int,
54 | help="overwrite opt.nsec from configuration file with NSEC this run only")
55 | args = parser.parse_args()
56 |
57 | if args.configfile:
58 | opt = redpy.config.Options(args.configfile)
59 | if args.verbose: print("Using config file: {0}".format(args.configfile))
60 | else:
61 | opt = redpy.config.Options("settings.cfg")
62 | if args.verbose: print("Using config file: settings.cfg")
63 |
64 | if args.nsec:
65 | opt.nsec = args.nsec
66 |
67 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
68 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
69 |
70 | # Check for MPL version mismatch
71 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
72 |
73 | if args.endtime:
74 | tend = UTCDateTime(args.endtime)
75 | else:
76 | tend = UTCDateTime()
77 |
78 | if args.starttime:
79 | tstart = UTCDateTime(args.starttime)
80 | if rtable.attrs.ptime:
81 | rtable.attrs.ptime = UTCDateTime(tstart)
82 | else:
83 | if rtable.attrs.ptime:
84 | tstart = UTCDateTime(rtable.attrs.ptime)
85 | else:
86 | tstart = tend-opt.nsec
87 |
88 | if len(ttable) > 0:
89 | ttimes = ttable.cols.startTimeMPL[:]
90 | else:
91 | ttimes = 0
92 |
93 | n = 0
94 | rlen = len(rtable)
95 | while tstart+n*opt.nsec < tend:
96 |
97 | ti = time.time()
98 | print(tstart+n*opt.nsec)
99 |
100 | # Download and trigger
101 | if args.troubleshoot:
102 | endtime = tstart+(n+1)*opt.nsec+opt.atrig
103 | if endtime > tend:
104 | endtime = tend
105 | st, stC = redpy.trigger.getData(tstart+n*opt.nsec-opt.atrig, endtime, opt)
106 | alltrigs = redpy.trigger.trigger(st, stC, rtable, opt)
107 | else:
108 | try:
109 | endtime = tstart+(n+1)*opt.nsec+opt.atrig
110 | if endtime > tend:
111 | endtime = tend
112 | st, stC = redpy.trigger.getData(tstart+n*opt.nsec-opt.atrig, endtime, opt)
113 | alltrigs = redpy.trigger.trigger(st, stC, rtable, opt)
114 | except (TypeError, obspy.clients.fdsn.header.FDSNException, Exception):
115 | print('Could not download or trigger data... moving on')
116 | alltrigs = []
117 |
118 | # Clean out data spikes etc.
119 | trigs, junk, junkFI, junkKurt = redpy.trigger.dataClean(alltrigs, opt, flag=1)
120 |
121 | # Save junk triggers in separate table for quality checking purposes
122 | for i in range(len(junk)):
123 | redpy.table.populateJunk(jtable, junk[i], 2, opt) # Both types of junk
124 | for i in range(len(junkKurt)):
125 | redpy.table.populateJunk(jtable, junkKurt[i], 1, opt) # Just kurtosis junk
126 | for i in range(len(junkFI)):
127 | redpy.table.populateJunk(jtable, junkFI[i], 0, opt) # Just 'teleseisms'
128 |
129 | # Append times of triggers to ttable to compare total seismicity later
130 | redpy.table.populateTriggers(ttable, trigs, ttimes, opt)
131 |
132 | # Check triggers against deleted events
133 | if len(dtable) > 0:
134 | trigs = redpy.correlation.compareDeleted(trigs, dtable, opt)
135 |
136 | if len(trigs) > 0:
137 | id = rtable.attrs.previd
138 | if len(trigs) == 1:
139 | ostart = 0
140 | if len(otable) == 0:
141 | # First trigger goes to orphans table
142 | redpy.table.populateOrphan(otable, 0, trigs[0], opt)
143 | ostart = 1
144 | else:
145 | id = id + 1
146 | redpy.correlation.runCorrelation(rtable, otable, ctable, ftable, ttimes,
147 | trigs[0], id, opt)
148 | else:
149 | ostart = 0
150 | if len(otable) == 0:
151 | # First trigger goes to orphans table
152 | redpy.table.populateOrphan(otable, 0, trigs[0], opt)
153 | ostart = 1
154 | # Loop through remaining triggers
155 | for i in range(ostart,len(trigs)):
156 | id = id + 1
157 | redpy.correlation.runCorrelation(rtable, otable, ctable, ftable, ttimes,
158 | trigs[i], id, opt)
159 | rtable.attrs.previd = id
160 |
161 | redpy.table.clearExpiredOrphans(otable, opt, tstart+(n+1)*opt.nsec)
162 |
163 | # Print some stats
164 | if args.verbose:
165 | print("Length of Orphan table: {}".format(len(otable)))
166 | if len(rtable) > 1:
167 | print("Number of repeaters: {}".format(len(rtable)))
168 | print("Number of clusters: {}".format(ftable.attrs.nClust))
169 |
170 | # Update tend if an end date is not specified so this will run until it is fully
171 | # caught up, instead of running to when the script was originally run.
172 | if not args.endtime:
173 | tend = UTCDateTime()
174 |
175 | n = n+1
176 |
177 | if args.verbose: print("Time spent this iteration: {} minutes".format(
178 | (time.time()-ti)/60))
179 |
180 | print("Caught up to: {}".format(endtime-opt.atrig))
181 |
182 | if args.verbose: print("Updating plots...")
183 | redpy.plotting.createPlots(rtable, ftable, ttable, ctable, otable, opt)
184 |
185 | if args.verbose: print("Closing table...")
186 | h5file.close()
187 |
188 | print("Total time spent: {} minutes".format((time.time()-t)/60))
189 | if args.verbose: print("Done")
190 |
--------------------------------------------------------------------------------
/catfill.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import argparse
6 | import redpy
7 | import numpy as np
8 | import obspy
9 | from obspy import UTCDateTime
10 | import time
11 | import pandas as pd
12 |
13 | """
14 | Run this script to fill the table with data from the past using a catalog of events.
15 |
16 | usage: catfill.py [-h] [-v] [-c CONFIGFILE] csvfile
17 |
18 | positional arguments:
19 | csvfile catalog csv file with a 'Time UTC' column of event times
20 |
21 | optional arguments:
22 | -h, --help show this help message and exit
23 | -v, --verbose increase written print statements
24 | -c CONFIGFILE, --configfile CONFIGFILE
25 | use configuration file named CONFIGFILE instead of
26 | default settings.cfg
27 | """
28 |
29 | t = time.time()
30 |
31 | parser = argparse.ArgumentParser(description=
32 | "Backfills table with data from the past")
33 | parser.add_argument("csvfile",
34 | help="catalog csv file with a 'Time UTC' column of event times")
35 | parser.add_argument("-v", "--verbose", action="count", default=0,
36 | help="increase written print statements")
37 | parser.add_argument("-c", "--configfile",
38 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
39 | args = parser.parse_args()
40 |
41 | if args.configfile:
42 | opt = redpy.config.Options(args.configfile)
43 | if args.verbose: print("Using config file: {0}".format(args.configfile))
44 | else:
45 | opt = redpy.config.Options("settings.cfg")
46 | if args.verbose: print("Using config file: settings.cfg")
47 |
48 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
49 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
50 |
51 | # Check for MPL version mismatch
52 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
53 |
54 | # Read in csv file using pandas
55 | df = pd.read_csv(args.csvfile)
56 | # Grab event times from 'Time UTC' column, convert to datetimes also
57 | eventlist = pd.to_datetime(df['Time UTC']).tolist()
58 | # Sort so events are processed in order of occurrence
59 | eventlist.sort()
60 |
61 | for event in eventlist:
62 |
63 | etime = UTCDateTime(event)
64 | if len(ttable) > 0:
65 | ttimes = ttable.cols.startTimeMPL[:]
66 | else:
67 | ttimes = 0
68 |
69 | if args.verbose: print(etime)
70 |
71 | # Download and trigger
72 | try:
73 | st, stC = redpy.trigger.getData(etime-5*opt.atrig, etime+5*opt.atrig, opt)
74 | alltrigs = redpy.trigger.trigger(st, stC, rtable, opt)
75 | # Reset ptime for refilling later
76 | rtable.attrs.ptime = []
77 | except (TypeError, obspy.fdsn.header.FDSNException, Exception):
78 | print('Could not download or trigger data... moving on')
79 | alltrigs = []
80 |
81 | # Clean out data spikes etc.
82 | trigs, junk, junkFI, junkKurt = redpy.trigger.dataClean(alltrigs, opt, flag=1)
83 |
84 | # Save junk triggers in separate table for quality checking purposes
85 | for i in range(len(junk)):
86 | redpy.table.populateJunk(jtable, junk[i], 2, opt)
87 | for i in range(len(junkKurt)):
88 | redpy.table.populateJunk(jtable, junkKurt[i], 1, opt)
89 | for i in range(len(junkFI)):
90 | redpy.table.populateJunk(jtable, junkFI[i], 0, opt)
91 |
92 | # Append times of triggers to ttable to compare total seismicity later
93 | redpy.table.populateTriggers(ttable, trigs, ttimes, opt)
94 |
95 | # Check triggers against deleted events
96 | if len(dtable) > 0:
97 | trigs = redpy.correlation.compareDeleted(trigs, dtable, opt)
98 |
99 | if len(trigs) > 0:
100 | id = rtable.attrs.previd
101 | if len(trigs) == 1:
102 | ostart = 0
103 | if len(otable) == 0:
104 | # First trigger goes to orphans table
105 | redpy.table.populateOrphan(otable, 0, trigs[0], opt)
106 | ostart = 1
107 | else:
108 | id = id + 1
109 | redpy.correlation.runCorrelation(rtable, otable, ctable, ftable,
110 | ttimes, trigs[0], id, opt)
111 | else:
112 | ostart = 0
113 | if len(otable) == 0:
114 | # First trigger goes to orphans table
115 | redpy.table.populateOrphan(otable, 0, trigs[0], opt)
116 | ostart = 1
117 | # Loop through remaining triggers
118 | for i in range(ostart,len(trigs)):
119 | id = id + 1
120 | redpy.correlation.runCorrelation(rtable, otable, ctable, ftable,
121 | ttimes, trigs[i], id, opt)
122 | rtable.attrs.previd = id
123 |
124 | # Don't expire orphans in the catalog?
125 | # redpy.table.clearExpiredOrphans(otable, opt, tstart+(n+1)*opt.nsec)
126 |
127 | # Print some stats
128 | if args.verbose:
129 | print("Length of Orphan table: {}".format(len(otable)))
130 | if len(rtable) > 1:
131 | print("Number of repeaters: {}".format(len(rtable)))
132 | print("Number of clusters: {}".format(ftable.attrs.nClust))
133 |
134 | if len(rtable) > 1:
135 | if args.verbose: print("Creating plots...")
136 | redpy.plotting.createPlots(rtable, ftable, ttable, ctable, otable, opt)
137 | else:
138 | print("No repeaters to plot.")
139 |
140 | if args.verbose: print("Closing table...")
141 | h5file.close()
142 |
143 | if args.verbose: print("Total time spent: {} minutes".format((time.time()-t)/60))
144 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/clearJunk.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import redpy.plotting
8 | import argparse
9 | import numpy as np
10 | import os
11 |
12 | """
13 | Run this script to clear the contents of the junk table.
14 |
15 | usage: clearJunk.py [-h] [-v] [-c CONFIGFILE]
16 |
17 | optional arguments:
18 | -h, --help show this help message and exit
19 | -v, --verbose increase written print statements
20 | -c CONFIGFILE, --configfile CONFIGFILE
21 | use configuration file named CONFIGFILE instead of
22 | default settings.cfg
23 | """
24 |
25 | parser = argparse.ArgumentParser(description=
26 | "Run this script to clear the contents of the junk table.")
27 | parser.add_argument("-v", "--verbose", action="count", default=0,
28 | help="increase written print statements")
29 | parser.add_argument("-c", "--configfile",
30 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
31 | args = parser.parse_args()
32 |
33 | if args.configfile:
34 | opt = redpy.config.Options(args.configfile)
35 | if args.verbose: print("Using config file: {0}".format(args.configfile))
36 | else:
37 | opt = redpy.config.Options("settings.cfg")
38 | if args.verbose: print("Using config file: settings.cfg")
39 |
40 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
41 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
42 |
43 | if args.verbose: print("Removing junk...")
44 |
45 | if len(jtable) > 1:
46 | # This will remove all but the last row (have to leave one)
47 | for n in range(len(jtable)-1,0,-1):
48 | jtable.remove_row(n)
49 | jtable.flush()
50 | else:
51 | if args.verbose: print("No junk to remove!")
52 |
53 | if args.verbose: print("Closing table...")
54 | h5file.close()
55 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/clusterLocs.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import argparse
6 | import redpy.config
7 | import glob, os, itertools
8 | import numpy as np
9 |
10 | """
11 | Run this script to print out the median location of each cluster. This currently only
12 | parses the existing .html files rather than querying ComCat directly! Default behavior
13 | only uses locations for local matched earthquakes.
14 |
15 | usage: clusterLocs.py [-h] [-v] [-c CONFIGFILE] [-d]
16 |
17 | optional arguments:
18 | -h, --help show this help message and exit
19 | -v, --verbose increase written print statements, including table of matches
20 | -c CONFIGFILE, --configfile CONFIGFILE
21 | use configuration file named CONFIGFILE instead of
22 | default settings.cfg
23 | -d, --distant include distant (regional, teleseismic) matches in addition to
24 | local seismicity
25 | -r, --regional include regional matches in addition to local seismicity
26 | """
27 |
28 | parser = argparse.ArgumentParser(description=
29 | "Finds families with regional/teleseismic matches by parsing their .html files")
30 | parser.add_argument("-v", "--verbose", action="count", default=0,
31 | help="increase written print statements, including table of matches")
32 | parser.add_argument("-c", "--configfile",
33 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
34 | parser.add_argument("-d", "--distant", action="count", default=0,
35 | help="include distant (regional, teleseismic) matches in addition to local seismicity")
36 | parser.add_argument("-r", "--regional", action="count", default=0,
37 | help="include regional matches in addition to local seismicity")
38 | args = parser.parse_args()
39 |
40 | if args.configfile:
41 | opt = redpy.config.Options(args.configfile)
42 | if args.verbose: print("Using config file: {0}".format(args.configfile))
43 | else:
44 | opt = redpy.config.Options("settings.cfg")
45 | if args.verbose: print("Using config file: settings.cfg")
46 |
47 | flist = np.array(list(itertools.chain.from_iterable(glob.iglob(os.path.join(
48 | root,'*.html')) for root, dirs, files in os.walk(
49 | '{}{}/clusters/'.format(opt.outputPath,opt.groupName)))))
50 |
51 |
52 | ### Open output file to write to here
53 | with open('{}{}/clusterlocs.txt'.format(opt.outputPath,opt.groupName), 'w') as outfile:
54 |
55 | outfile.write('cnum latitude longitude depth\n')
56 |
57 | # Sort by family number (the list is in a strange order)
58 | fnums = []
59 | for f in flist:
60 | fnum = int(f.split("/")[-1][:-5])
61 | fnums.append(fnum)
62 |
63 | # Parse each file, counting the number of times a word/phrase is matched
64 | for f in flist[np.argsort(fnums)]:
65 |
66 | file = open(f, "r")
67 | fnum = f.split("/")[-1][:-5]
68 | data = file.readlines()
69 |
70 | lats = np.array([])
71 | lons = np.array([])
72 | deps = np.array([])
73 |
74 | lines = data[20].split('>')
75 | for line in lines:
76 | if args.distant:
77 | if line.count("teleseismic") or line.count("regional") or line.count(
78 | "Potential local match:"):
79 | lats = np.append(lats,float(line.split(' ')[4].strip('(,')))
80 | lons = np.append(lons,float(line.split(' ')[5].strip(')')))
81 | deps = np.append(deps,float(line.split(' ')[6].strip('km')))
82 | elif args.regional:
83 | if line.count("regional") or line.count("Potential local match:"):
84 | lats = np.append(lats,float(line.split(' ')[4].strip('(,')))
85 | lons = np.append(lons,float(line.split(' ')[5].strip(')')))
86 | deps = np.append(deps,float(line.split(' ')[6].strip('km')))
87 | else: # Default behavior
88 | if line.count("Potential local match:"):
89 | lats = np.append(lats,float(line.split(' ')[4].strip('(,')))
90 | lons = np.append(lons,float(line.split(' ')[5].strip(')')))
91 | deps = np.append(deps,float(line.split(' ')[6].strip('km')))
92 |
93 | if len(lats)>0:
94 | outfile.write('{} {:6.4f} {:7.4f} {:3.2f}\n'.format(
95 | fnum,np.median(lats),np.median(lons),np.median(deps)))
96 | else:
97 | outfile.write('{} \n'.format(fnum))
98 |
99 | outfile.close()
100 | if args.verbose: print('Done writing to {}{}/clusterlocs.txt'.format(
101 | opt.outputPath,opt.groupName))
102 |
--------------------------------------------------------------------------------
/compareCatalog.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import argparse
6 | import redpy
7 | import numpy as np
8 | import obspy
9 | from obspy import UTCDateTime
10 | import time
11 | import datetime as dt
12 | import pandas as pd
13 | from matplotlib.dates import num2date, date2num
14 |
15 | """
16 | Run this script to compare table with a specific catalog of events for agreement.
17 |
18 | usage: compareCatalog.py [-h] [-v] [-c CONFIGFILE] csvfile
19 |
20 | positional arguments:
21 | csvfile catalog csv file with required column 'Time UTC'
22 |
23 | optional arguments:
24 | -h, --help show this help message and exit
25 | -v, --verbose increase written print statements
26 | -c CONFIGFILE, --configfile CONFIGFILE
27 | use configuration file named CONFIGFILE instead of
28 | default settings.cfg
29 | """
30 |
31 | parser = argparse.ArgumentParser(description=
32 | "Compares REDPy catalog with csv catalog")
33 | parser.add_argument("csvfile",
34 | help="catalog csv file with required column 'Time UTC'")
35 | parser.add_argument("-v", "--verbose", action="count", default=0,
36 | help="increase written print statements")
37 | parser.add_argument("-c", "--configfile",
38 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
39 | args = parser.parse_args()
40 |
41 | if args.configfile:
42 | opt = redpy.config.Options(args.configfile)
43 | if args.verbose: print("Using config file: {0}".format(args.configfile))
44 | else:
45 | opt = redpy.config.Options("settings.cfg")
46 | if args.verbose: print("Using config file: settings.cfg")
47 |
48 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
49 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
50 |
51 | # Check for MPL version mismatch
52 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
53 |
54 | # Read in csv file using pandas
55 | df = pd.read_csv(args.csvfile)
56 |
57 | # First off, a huge assumption here is that the event times are simply within some number
58 | # of seconds of the trigger times. Unless otherwise necessary, I'm going to start off with
59 | # the assumption that they're within the length of time used for the correlation window.
60 | terr = opt.winlen/opt.samprate
61 |
62 | # I'll append the best candidate family and the number of seconds the event times differ,
63 | # so that the user can identify questionable matches. I may add the ability to assume a
64 | # location and do some simple ray-tracing like in checkComCat(), but at this point I'm
65 | # not convinced it's necessary. Column dt here is the REDPy trigger time - csv catalog
66 | # time; if it is negative, REDPy either triggered early or it may not be a match.
67 | # Amplitude is amplitude on printed station.
68 | df['Cluster'] = ''
69 | df['dt'] = ''
70 | df['Amplitude'] = ''
71 | df['FI'] = ''
72 |
73 | # Set up times to compare
74 | evtimes = date2num(np.array(pd.to_datetime(df['Time UTC']).tolist()))
75 | rtimes = rtable.cols.startTimeMPL[:]+rtable.cols.windowStart[:]/86400.0/opt.samprate
76 | ramps = rtable.cols.windowAmp[:][:,opt.printsta]
77 | fi = np.nanmean(rtable.cols.FI[:], axis=1)
78 | otimes = otable.cols.startTimeMPL[:]+otable.cols.windowStart[:]/86400.0/opt.samprate
79 | oamps = otable.cols.windowAmp[:][:,opt.printsta]
80 | ofi = np.nanmean(otable.cols.FI[:], axis=1)
81 | jtimes = np.zeros(len(jtable),)
82 | for i in range(len(jtable)):
83 | try:
84 | jtimes[i] = date2num(dt.datetime.strptime(jtable.cols.startTime[i].decode('utf-8'),
85 | '%Y-%m-%dT%H:%M:%S.%f')+dt.timedelta(
86 | seconds=jtable.cols.windowStart[i]/opt.samprate))
87 | except:
88 | jtimes[i] = date2num(dt.datetime.strptime(jtable.cols.startTime[i].decode('utf-8'),
89 | '%Y-%m-%dT%H:%M:%S')+dt.timedelta(
90 | seconds=jtable.cols.windowStart[i]/opt.samprate))
91 | ttimes = ttable.cols.startTimeMPL[:]
92 |
93 | # Flatten families to list
94 | famlist = np.zeros((len(rtimes),)).astype(int)
95 | for fnum in range(len(ftable)):
96 | members = np.fromstring(ftable[fnum]['members'], dtype=int, sep=' ')
97 | famlist[members] = fnum
98 |
99 | for i in range(len(df)):
100 |
101 | if i%1000 == 0 and i>0:
102 | print('{:3.2f}% complete'.format(100.0*i/len(df)))
103 |
104 | # See if there's junk that matches
105 | if len(jtimes) > 0:
106 | dtimesj = jtimes-evtimes[i]
107 | bestjunk = dtimesj[np.argmin(np.abs(dtimesj))]
108 | if np.abs(bestjunk) < terr/86400:
109 | df['Cluster'][i] = 'junk'
110 | df['dt'][i] = bestjunk*86400
111 | df['Amplitude'][i] = 'NaN'
112 | df['FI'][i] = 'NaN'
113 |
114 | # See if there are any expired orphans that match
115 | if len(ttimes) > 0:
116 | dtimest = np.array(ttimes)-evtimes[i]
117 | besttrig = dtimest[np.argmin(np.abs(dtimest))]
118 | if np.abs(besttrig) < terr/86400:
119 | df['Cluster'][i] = 'expired'
120 | df['dt'][i] = besttrig*86400
121 | df['Amplitude'][i] = 'NaN'
122 | df['FI'][i] = 'NaN'
123 |
124 | # See if there's an orphan that matches
125 | if len(otimes) > 0:
126 | dtimeso = otimes-evtimes[i]
127 | bestorph = dtimeso[np.argmin(np.abs(dtimeso))]
128 | if np.abs(bestorph) < terr/86400:
129 | df['Cluster'][i] = 'orphan'
130 | df['dt'][i] = bestorph*86400
131 | df['Amplitude'][i] = oamps[np.argmin(np.abs(dtimeso))]
132 | df['FI'][i] = ofi[np.argmin(np.abs(dtimeso))]
133 |
134 | # See if there's a repeater that matches
135 | if len(rtimes) > 0:
136 | dtimesr = rtimes-evtimes[i]
137 | bestr = dtimesr[np.argmin(np.abs(dtimesr))]
138 | if np.abs(bestr) < terr/86400:
139 | df['Cluster'][i] = famlist[np.argmin(np.abs(dtimesr))]
140 | df['dt'][i] = bestr*86400
141 | df['Amplitude'][i] = ramps[np.argmin(np.abs(dtimesr))]
142 | df['FI'][i] = fi[np.argmin(np.abs(dtimesr))]
143 |
144 | # Write to matches.csv
145 | if args.verbose: print("Saving to matches_{}.csv".format(opt.groupName))
146 | df.to_csv('matches_{}.csv'.format(opt.groupName), index=False)
147 |
148 |
149 | if args.verbose: print("Closing table...")
150 | h5file.close()
151 |
152 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/createPDFFamily.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 | import os
9 | import matplotlib
10 | import numpy as np
11 |
12 | """
13 | Run this script to manually produce editable PDF versions of family pages in the clusters
14 | directory (same location as fam*.png) with custom time span
15 |
16 | usage: createPDFFamily.py [-h] [-v] [-c CONFIGFILE] N [N ...]
17 |
18 | positional arguments:
19 | N family number(s) to be plotted
20 |
21 | optional arguments:
22 | -h, --help show this help message and exit
23 | -v, --verbose increase written print statements
24 | -c CONFIGFILE, --configfile CONFIGFILE
25 | use configuration file named CONFIGFILE instead of
26 | default settings.cfg
27 | -s STARTTIME, --starttime STARTTIME
28 | earliest time to plot, defaults to first event
29 | -e ENDTIME, --endtime ENDTIME
30 | latest time to plot, defaults to last event
31 | """
32 |
33 | parser = argparse.ArgumentParser(description=
34 | "Run this script to manually produce editable PDF versions of family pages in the "+
35 | "clusters directory (same location as fam*.png)")
36 | parser.add_argument('famnum', metavar='N', type=int, nargs='+',
37 | help="family number(s) to be plotted")
38 | parser.add_argument("-v", "--verbose", action="count", default=0,
39 | help="increase written print statements")
40 | parser.add_argument("-c", "--configfile",
41 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
42 | parser.add_argument("-s", "--starttime",
43 | help="earliest time to plot, defaults to first trigger")
44 | parser.add_argument("-e", "--endtime",
45 | help="latest time to plot, defaults to last trigger")
46 | args = parser.parse_args()
47 |
48 | if args.configfile:
49 | opt = redpy.config.Options(args.configfile)
50 | if args.verbose: print("Using config file: {0}".format(args.configfile))
51 | else:
52 | opt = redpy.config.Options("settings.cfg")
53 | if args.verbose: print("Using config file: settings.cfg")
54 |
55 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
56 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
57 |
58 | # Check for MPL version mismatch
59 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
60 |
61 | # Load into memory
62 | startTimeMPL = rtable.cols.startTimeMPL[:]
63 | windowAmp = rtable.cols.windowAmp[:][:,opt.printsta]
64 | windowStart = rtable.cols.windowStart[:]
65 | fi = rtable.cols.FI[:]
66 | ids = rtable.cols.id[:]
67 | id1 = ctable.cols.id1[:]
68 | id2 = ctable.cols.id2[:]
69 | ccc = ctable.cols.ccc[:]
70 |
71 | # Process arguments
72 | if args.starttime:
73 | tmin = matplotlib.dates.date2num(np.datetime64(args.starttime))
74 | else:
75 | tmin = 0
76 |
77 | if args.endtime:
78 | tmax = matplotlib.dates.date2num(np.datetime64(args.endtime))
79 | else:
80 | tmax = 0
81 |
82 | for fnum in args.famnum:
83 | if args.verbose: print("Creating PDF for family {}...".format(fnum))
84 | redpy.plotting.plotSingleFamily(rtable, ftable, ctable, startTimeMPL, windowAmp,
85 | windowStart, fi, ids, id1, id2, ccc, 'pdf', 100, fnum, tmin, tmax, opt)
86 |
87 | if args.verbose: print("Closing table...")
88 | h5file.close()
89 | if args.verbose: print("Done")
90 |
--------------------------------------------------------------------------------
/createPDFOverview.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 | import os
9 | import numpy as np
10 | import matplotlib.dates
11 |
12 | """
13 | Run this script to manually produce an editable PDF version of the overview page
14 |
15 | usage: createPDFOverview.py [-h] [-v] [-c CONFIGFILE] [-s STARTTIME] [-e ENDTIME]
16 | [-b BINSIZE] [-u] [-m MINMEMBERS] [-o OCCURHEIGHT] [-f FORMAT]
17 |
18 | optional arguments:
19 | -h, --help show this help message and exit
20 | -v, --verbose increase written print statements
21 | -c CONFIGFILE, --configfile CONFIGFILE
22 | use configuration file named CONFIGFILE instead of
23 | default settings.cfg
24 | -s STARTTIME, --starttime STARTTIME
25 | earliest time to plot, defaults to first trigger
26 | -e ENDTIME, --endtime ENDTIME
27 | latest time to plot, defaults to last trigger
28 | -b BINSIZE, --binsize BINSIZE
29 | custom time bin size in days, defaults to overview.html's
30 | binsize
31 | -u, --usehrs use hours instead of days for definition of binsize
32 | -m MINMEMBERS, --minmembers MINMEMBERS
33 | minimum number of members to plot, defaults to
34 | overview.html's minmembers
35 | -o OCCURHEIGHT, --occurheight OCCURHEIGHT
36 | integer multiplier for how much taller the occurrence
37 | plot should be compared to other plots, defaults to 3
38 | -f FORMAT, --format FORMAT
39 | comma separated list of plots to be rendered
40 | """
41 |
42 | parser = argparse.ArgumentParser(description=
43 | "Run this script to manually produce an editable PDF version of the overview page")
44 | parser.add_argument("-v", "--verbose", action="count", default=0,
45 | help="increase written print statements")
46 | parser.add_argument("-c", "--configfile",
47 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
48 | parser.add_argument("-s", "--starttime",
49 | help="earliest time to plot, defaults to first trigger")
50 | parser.add_argument("-e", "--endtime",
51 | help="latest time to plot, defaults to last trigger")
52 | parser.add_argument("-b", "--binsize",
53 | help="custom time bin size, defaults to overview.html's binsize")
54 | parser.add_argument("-u", "--usehrs", action="count", default=0,
55 | help="use hours instead of days for definition of binsize")
56 | parser.add_argument("-m", "--minmembers",
57 | help="minimum number of members to plot, defaults to overview.html's minmembers")
58 | parser.add_argument("-o", "--occurheight",
59 | help="integer multiplier for how much taller the occurrence plot should be compared" +
60 | " to other plots, defaults to 3")
61 | parser.add_argument("-f", "--format",
62 | help="comma separated list of plots to be rendered")
63 | args = parser.parse_args()
64 |
65 | if args.configfile:
66 | opt = redpy.config.Options(args.configfile)
67 | if args.verbose: print("Using config file: {0}".format(args.configfile))
68 | else:
69 | opt = redpy.config.Options("settings.cfg")
70 | if args.verbose: print("Using config file: settings.cfg")
71 |
72 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
73 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
74 |
75 | # Check for MPL version mismatch
76 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
77 |
78 | # Process arguments
79 | if args.starttime:
80 | tmin = matplotlib.dates.date2num(np.datetime64(args.starttime))
81 | else:
82 | tmin = 0
83 |
84 | if args.endtime:
85 | tmax = matplotlib.dates.date2num(np.datetime64(args.endtime))
86 | else:
87 | tmax = 0
88 |
89 | if args.binsize:
90 | if args.usehrs:
91 | binsize = float(args.binsize)/24
92 | else:
93 | binsize = float(args.binsize)
94 | else:
95 | binsize = opt.dybin
96 |
97 | if args.minmembers:
98 | minmembers = int(args.minmembers)
99 | else:
100 | minmembers = opt.minplot
101 |
102 | if args.occurheight:
103 | occurheight = int(args.occurheight)
104 | else:
105 | occurheight = 3
106 |
107 | if args.format:
108 | plotformat = args.format
109 | else:
110 | plotformat = 'eqrate,fi,occurrence,longevity'
111 |
112 | if args.verbose: print("Creating overview.pdf in main output directory...")
113 | redpy.plotting.customPDFoverview(rtable, ftable, ttable, tmin, tmax, binsize, minmembers,
114 | occurheight, plotformat, opt)
115 |
116 | if args.verbose: print("Closing table...")
117 | h5file.close()
118 | if args.verbose: print("Done")
119 |
--------------------------------------------------------------------------------
/createReport.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 | import os
9 |
10 | """
11 | Run this script to manually produce a more detailed 'report' page for a given family
12 | (or families)
13 |
14 | usage: createReport.py [-h] [-v] [-o] [-c CONFIGFILE] N [N ...]
15 |
16 | positional arguments:
17 | N family number(s) to be reported on
18 |
19 | optional arguments:
20 | -h, --help show this help message and exit
21 | -v, --verbose increase written print statements
22 | -o, --ordered order plots by OPTICS
23 | -m, --matrixtofile save correlation matrix to file
24 | -c CONFIGFILE, --configfile CONFIGFILE
25 | use configuration file named CONFIGFILE instead of
26 | default settings.cfg
27 | """
28 |
29 | parser = argparse.ArgumentParser(description=
30 | "Run this script to manually produce a more detailed 'report' page for a given " +
31 | "family (or families)")
32 | parser.add_argument('famnum', metavar='N', type=int, nargs='+',
33 | help="family number(s) to be reported on")
34 | parser.add_argument("-v", "--verbose", action="count", default=0,
35 | help="increase written print statements")
36 | parser.add_argument("-o", "--ordered", action="count", default=0,
37 | help="order plots by OPTICS")
38 | parser.add_argument("-m", "--matrixtofile", action="count", default=0,
39 | help="save correlation matrix to file")
40 | parser.add_argument("-c", "--configfile",
41 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
42 | args = parser.parse_args()
43 |
44 | if args.configfile:
45 | opt = redpy.config.Options(args.configfile)
46 | if args.verbose: print("Using config file: {0}".format(args.configfile))
47 | else:
48 | opt = redpy.config.Options("settings.cfg")
49 | if args.verbose: print("Using config file: settings.cfg")
50 |
51 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
52 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
53 |
54 | # Check for MPL version mismatch
55 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
56 |
57 |
58 | if args.verbose: print("Creating folder to store files '{}{}/reports'".format(
59 | opt.outputPath, opt.groupName))
60 |
61 | try:
62 | os.mkdir('{}{}/reports'.format(opt.outputPath,opt.groupName))
63 | except OSError:
64 | print("Folder exists.")
65 |
66 | for fnum in args.famnum:
67 | if args.verbose: print("Creating report for family {}...".format(fnum))
68 | redpy.plotting.plotReport(rtable, ftable, ctable, fnum, args.ordered,
69 | args.matrixtofile, opt)
70 |
71 | if args.verbose: print("Closing table...")
72 | h5file.close()
73 | if args.verbose: print("Done")
74 |
--------------------------------------------------------------------------------
/distantFamilies.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import argparse
6 | import redpy.config
7 | import glob, os, itertools
8 | import numpy as np
9 |
10 | """
11 | Run this script to print out the families with a minimum percentage of regional and/or
12 | teleseismic matches from ComCat that can then be copy/pasted into removeFamily.py. An
13 | optional table is printed that summarizes matches of each type. This only parses the
14 | existing .html files rather than querying ComCat directly!
15 |
16 | usage: distantFamilies.py [-h] [-v] [-c CONFIGFILE] [-e ETC] [-p PERCENT]
17 |
18 | optional arguments:
19 | -h, --help show this help message and exit
20 | -v, --verbose increase written print statements, including table of matches
21 | -c CONFIGFILE, --configfile CONFIGFILE
22 | use configuration file named CONFIGFILE instead of
23 | default settings.cfg
24 | -e ETC, --etc ETC phrase to explicitly search for, e.g., name of a specific area
25 | -p PERCENT, --percent PERCENT
26 | minimum percentage of regional/teleseismic/etc matches, default 90
27 | """
28 |
29 | parser = argparse.ArgumentParser(description=
30 | "Finds families with regional/teleseismic matches by parsing their .html files")
31 | parser.add_argument("-v", "--verbose", action="count", default=0,
32 | help="increase written print statements, including table of matches")
33 | parser.add_argument("-c", "--configfile",
34 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
35 | parser.add_argument("-e", "--etc",
36 | help="phrase to explicitly search for, e.g., name of a specific area")
37 | parser.add_argument("-p", "--percent", type=float,
38 | help="minimum percentage of regional/teleseismic matches, default 90", default=90.0)
39 | args = parser.parse_args()
40 |
41 | if args.configfile:
42 | opt = redpy.config.Options(args.configfile)
43 | if args.verbose: print("Using config file: {0}".format(args.configfile))
44 | else:
45 | opt = redpy.config.Options("settings.cfg")
46 | if args.verbose: print("Using config file: settings.cfg")
47 |
48 | flist = np.array(list(itertools.chain.from_iterable(glob.iglob(os.path.join(
49 | root,'*.html')) for root, dirs, files in os.walk(
50 | '{}{}/clusters/'.format(opt.outputPath,opt.groupName)))))
51 |
52 | fnums = []
53 | removeNums = ''
54 | removeNumsReg = ''
55 | removeNumsRegiTele = ''
56 | removeNumsTele = ''
57 | removeNumsETC = ''
58 | removeNumsReg3 = ''
59 |
60 | # Sort by family number (the list is in a strange order)
61 | for f in flist:
62 | fnum = int(f.split("/")[-1][:-5])
63 | fnums.append(fnum)
64 |
65 | # Parse each file, counting the number of times a word/phrase is matched
66 | for f in flist[np.argsort(fnums)]:
67 |
68 | file = open(f, "r")
69 | fnum = f.split("/")[-1][:-5]
70 | data = file.read()
71 |
72 | reg = data.count("regional")
73 | tele = data.count("teleseismic")
74 | local = data.count("Potential local match:") # Excludes the last two lines!
75 |
76 | if args.etc:
77 | etc = data.count(args.etc)
78 | local = local-etc
79 | else:
80 | etc = 0
81 |
82 | if reg+tele+etc > 0:
83 |
84 | if args.verbose:
85 | if args.etc:
86 | print("Fam {:4} : L {:2} | R {:2} | T {:2} | E {:2} | Distant {:5.1f}% | \
87 | Etc {:5.1f}%".format(
88 | fnum, local, reg, tele, etc, 100*(reg+tele+etc)/(reg+tele+local+etc),
89 | 100*(etc)/(reg+tele+local+etc)))
90 | else:
91 | print("Fam {:4} : L {:2} | R {:2} | T {:2} | Distant {:5.1f}%".format(
92 | fnum, local, reg, tele, 100*(reg+tele)/(reg+tele+local)))
93 |
94 | if 100*(reg+tele)/(reg+tele+local+etc) >= args.percent:
95 | removeNums+=' {}'.format(fnum)
96 |
97 | if 100*(reg)/(reg+tele+local+etc) >= args.percent:
98 | removeNumsReg+=' {}'.format(fnum)
99 |
100 | if reg > 0:
101 | if 100*(reg)/(reg+local+etc) >= args.percent:
102 | removeNumsRegiTele+=' {}'.format(fnum)
103 |
104 | if 100*(tele)/(reg+tele+local+etc) >= args.percent:
105 | removeNumsTele+=' {}'.format(fnum)
106 |
107 | if args.etc:
108 | if 100*(etc)/(reg+tele+local+etc) >= args.percent:
109 | removeNumsETC+=' {}'.format(fnum)
110 |
111 | if reg >= 3: # If at least 3 located regionals regardless of %
112 | removeNumsReg3+=' {}'.format(fnum)
113 |
114 |
115 | print('\n{}%+ Teleseismic:\n{}\n'.format(args.percent,removeNumsTele))
116 |
117 | print('\n{}%+ Regional+Teleseismic:\n{}\n'.format(args.percent,removeNums))
118 |
119 | print('\n{}%+ Regional:\n{}\n'.format(args.percent,removeNumsReg))
120 |
121 | print('\n{}%+ Regional (ignore Teleseisms):\n{}\n'.format(args.percent,removeNumsRegiTele))
122 |
123 | print('\n3+ Regional Matches: \n{}\n'.format(removeNumsReg3))
124 |
125 | if args.etc:
126 | print('{}%+ containing {}:\n{}\n'.format(args.percent,args.etc,removeNumsETC))
127 |
--------------------------------------------------------------------------------
/extendTable.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 | import shutil
9 | import os
10 | import numpy as np
11 |
12 | """
13 | Run this script to create space for additional stations while preserving data in an
14 | existing table or to change the directory name for a run. Additional stations should
15 | always be included at the end of the station list; reordering that list is currently not
16 | supported. Running this script will overwrite any existing table with the same name
17 | defined by filename in the new .cfg file. If the table names in both .cfg files are the
18 | same, the original table will be renamed and then deleted. All output files are also
19 | remade to reflect the additional station, unless flagged otherwise.
20 |
21 | usage: extendTable.py [-h] [-v] [-n] CONFIGFILE_FROM CONFIGFILE_TO
22 |
23 | positional arguments:
24 | CONFIGFILE_FROM old .cfg file corresponding to table to be copied from
25 | CONFIGFILE_TO new .cfg file corresponding to table to be copied to
26 |
27 | optional arguments:
28 | -h, --help show this help message and exit
29 | -v, --verbose increase written print statements
30 | -n, --noplot do not re-render plots after extending
31 | """
32 |
33 | parser = argparse.ArgumentParser(description=
34 | "Create space for additional stations based on an existing table")
35 | parser.add_argument("-v", "--verbose", action="count", default=0,
36 | help="increase written print statements")
37 | parser.add_argument("-n", "--noplot", action="count", default=0,
38 | help="do not re-render plots after extending")
39 | parser.add_argument('cfgfrom', metavar='CONFIGFILE_FROM', type=str, nargs=1,
40 | help="old .cfg file corresponding to table to be copied from")
41 | parser.add_argument('cfgto', metavar='CONFIGFILE_TO', type=str, nargs=1,
42 | help="new .cfg file corresponding to table to be copied to")
43 |
44 | args = parser.parse_args()
45 |
46 |
47 | if args.verbose: print("Using old config file: {0}".format(args.cfgfrom[0]))
48 | optfrom = redpy.config.Options(args.cfgfrom)
49 |
50 | if args.verbose: print("Using new config file: {0}".format(args.cfgto[0]))
51 | optto = redpy.config.Options(args.cfgto)
52 |
53 | if args.verbose: print("Making working copy of old hdf5 table...")
54 | shutil.copy(optfrom.filename,'{}.old'.format(optfrom.filename))
55 |
56 | # Change filename in optfrom to point to the .old version
57 | optfrom.filename = '{}.old'.format(optfrom.filename)
58 |
59 | if args.verbose: print("Creating empty hdf5 table: {0}".format(optto.filename))
60 | redpy.table.initializeTable(optto)
61 |
62 | if args.verbose: print("Opening hdf5 table: {0}".format(optfrom.filename))
63 | h5filefrom, rtablefrom, otablefrom, ttablefrom, ctablefrom, jtablefrom, dtablefrom, ftablefrom = redpy.table.openTable(optfrom)
64 |
65 | # Check for MPL version mismatch
66 | redpy.table.checkMPL(rtablefrom, ftablefrom, ttablefrom, otablefrom, dtablefrom, opt)
67 |
68 | if args.verbose: print("Opening hdf5 table: {0}".format(optto.filename))
69 | h5fileto, rtableto, otableto, ttableto, ctableto, jtableto, dtableto, ftableto = redpy.table.openTable(optto)
70 |
71 | # Define how many stations need to be added
72 | dsta = optto.nsta - optfrom.nsta
73 |
74 | # DO ALL THE COPYING
75 | for rfrom in rtablefrom.iterrows():
76 | rto = rtableto.row
77 | # These stay the same
78 | rto['id'] = rfrom['id']
79 | rto['startTime'] = rfrom['startTime']
80 | rto['startTimeMPL'] = rfrom['startTimeMPL']
81 | rto['windowStart'] = rfrom['windowStart']
82 | # These must be extended
83 | rto['windowAmp'] = np.append(rfrom['windowAmp'],np.zeros(dsta))
84 | rto['windowCoeff'] = np.append(rfrom['windowCoeff'],np.zeros(dsta))
85 | rto['FI'] = np.append(rfrom['FI'],np.empty(dsta)*np.nan)
86 | rto['waveform'] = np.append(rfrom['waveform'],np.zeros(dsta*optto.wshape))
87 | rto['windowFFT'] = np.append(rfrom['windowFFT'],np.zeros(dsta*optto.winlen))
88 | rto.append()
89 | rtableto.attrs.ptime = rtablefrom.attrs.ptime
90 | rtableto.attrs.previd = rtablefrom.attrs.previd
91 | rtableto.flush()
92 |
93 | for ofrom in otablefrom.iterrows():
94 | oto = otableto.row
95 | # These stay the same
96 | oto['id'] = ofrom['id']
97 | oto['startTime'] = ofrom['startTime']
98 | oto['startTimeMPL'] = ofrom['startTimeMPL']
99 | oto['windowStart'] = ofrom['windowStart']
100 | oto['expires'] = ofrom['expires']
101 | # These must be extended
102 | oto['windowAmp'] = np.append(ofrom['windowAmp'],np.zeros(dsta))
103 | oto['windowCoeff'] = np.append(ofrom['windowCoeff'],np.zeros(dsta))
104 | oto['FI'] = np.append(ofrom['FI'],np.empty(dsta)*np.nan)
105 | oto['waveform'] = np.append(ofrom['waveform'],np.zeros(dsta*optto.wshape))
106 | oto['windowFFT'] = np.append(ofrom['windowFFT'],np.zeros(dsta*optto.winlen))
107 | oto.append()
108 | otableto.flush()
109 |
110 | for tfrom in ttablefrom.iterrows():
111 | tto = ttableto.row
112 | # This stays the same
113 | tto['startTimeMPL'] = tfrom['startTimeMPL']
114 | tto.append()
115 | ttableto.flush()
116 |
117 | for dfrom in dtablefrom.iterrows():
118 | dto = dtableto.row
119 | # These stay the same
120 | dto['id'] = dfrom['id']
121 | dto['startTime'] = dfrom['startTime']
122 | dto['startTimeMPL'] = dfrom['startTimeMPL']
123 | dto['windowStart'] = dfrom['windowStart']
124 | # These must be extended
125 | dto['windowAmp'] = np.append(dfrom['windowAmp'],np.zeros(dsta))
126 | dto['windowCoeff'] = np.append(dfrom['windowCoeff'],np.zeros(dsta))
127 | dto['FI'] = np.append(dfrom['FI'],np.empty(dsta)*np.nan)
128 | dto['waveform'] = np.append(dfrom['waveform'],np.zeros(dsta*optto.wshape))
129 | dto['windowFFT'] = np.append(dfrom['windowFFT'],np.zeros(dsta*optto.winlen))
130 | dto.append()
131 | dtableto.flush()
132 |
133 | for jfrom in jtablefrom.iterrows():
134 | jto = jtableto.row
135 | # These stay the same
136 | jto['startTime'] = jfrom['startTime']
137 | jto['windowStart'] = jfrom['windowStart']
138 | jto['isjunk'] = jfrom['isjunk']
139 | # This must be extended
140 | jto['waveform'] = np.append(jfrom['waveform'],np.zeros(dsta*optto.wshape))
141 | jto.append()
142 | jtableto.flush()
143 |
144 | for cfrom in ctablefrom.iterrows():
145 | cto = ctableto.row
146 | # All stay the same
147 | cto['id1'] = cfrom['id1']
148 | cto['id2'] = cfrom['id2']
149 | cto['ccc'] = cfrom['ccc']
150 | cto.append()
151 | ctableto.flush()
152 |
153 | for ffrom in ftablefrom.iterrows():
154 | fto = ftableto.row
155 | # All stay the same, but printme == 1
156 | fto['members'] = ffrom['members']
157 | fto['core'] = ffrom['core']
158 | fto['startTime'] = ffrom['startTime']
159 | fto['longevity'] = ffrom['longevity']
160 | fto['lastprint'] = ffrom['lastprint']
161 | if args.noplot:
162 | fto['printme'] = ffrom['printme']
163 | else:
164 | fto['printme'] = 1
165 | fto.append()
166 | ftableto.attrs.nClust = ftablefrom.attrs.nClust
167 | ftableto.flush()
168 |
169 | if args.verbose: print("Creating plots...")
170 | redpy.plotting.createPlots(rtableto, ftableto, ttableto, ctableto, otableto, optto)
171 |
172 | if args.verbose: print("Closing tables...")
173 | h5filefrom.close()
174 | h5fileto.close()
175 |
176 | if args.verbose: print("Deleting working copy of old hdf5 table...")
177 | os.remove(optfrom.filename)
178 |
179 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/forcePlot.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import redpy.plotting
8 | import argparse
9 | import numpy as np
10 | import os
11 |
12 | """
13 | Run this script to force plotting. Can be used after killing mid-run or updating settings.
14 | Note that the -s and -e settings follow Python convention for arrays, i.e., start at 0
15 | and do not include the ending number. For example, -s 1 -e 5 would replot 1, 2, 3, and 4.
16 |
17 | usage: forcePlot.py [-h] [-v] [-a] [-c CONFIGFILE]
18 |
19 | optional arguments:
20 | -h, --help show this help message and exit
21 | -v, --verbose increase written print statements
22 | -a, --all replot everything, not just updated families
23 | -r, --resetlp reset the 'last print' column (use for 'missing file' errors)
24 | -s STARTFAM, --startfam STARTFAM
25 | manual starting family to replot (assumes ENDFAM is -1 if not set)
26 | -e ENDFAM, --endfam ENDFAM
27 | manual (noninclusive) ending family to replot (assumes STARTFAM is
28 | 0 if not set)
29 | -f, --famplot only replot the family plots, not html files
30 | -l, --html only render the html, not any images
31 | -c CONFIGFILE, --configfile CONFIGFILE
32 | use configuration file named CONFIGFILE instead of
33 | default settings.cfg
34 | """
35 |
36 | parser = argparse.ArgumentParser(description=
37 | "Run this script to force plotting. Can be used after killing mid-run or updating settings.")
38 | parser.add_argument("-v", "--verbose", action="count", default=0,
39 | help="increase written print statements")
40 | parser.add_argument("-a", "--all", action="count", default=0,
41 | help="replot everything, not just updated families")
42 | parser.add_argument("-r", "--resetlp", action="count", default=0,
43 | help="reset the 'last print' column (use for 'missing file' errors)")
44 | parser.add_argument("-s", "--startfam", type=int, default=0,
45 | help="manual starting family to replot (assumes ENDFAM is -1 if not set)")
46 | parser.add_argument("-e", "--endfam", type=int, default=0,
47 | help="manual (noninclusive) ending family to replot (assumes STARTFAM is 0 if not set)")
48 | parser.add_argument("-f", "--famplot", action="count", default=0,
49 | help="only replot the family plots, not html files")
50 | parser.add_argument("-l", "--html", action="count", default=0,
51 | help="only render the html, not any images")
52 | parser.add_argument("-c", "--configfile",
53 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
54 | args = parser.parse_args()
55 |
56 | if args.configfile:
57 | opt = redpy.config.Options(args.configfile)
58 | if args.verbose: print("Using config file: {0}".format(args.configfile))
59 | else:
60 | opt = redpy.config.Options("settings.cfg")
61 | if args.verbose: print("Using config file: settings.cfg")
62 |
63 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
64 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
65 |
66 | # Check for MPL version mismatch
67 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
68 |
69 | if args.all:
70 | if args.verbose: print("Resetting plotting column...")
71 | ftable.cols.printme[0:ftable.attrs.nClust] = np.ones((ftable.attrs.nClust,))
72 |
73 | if args.resetlp:
74 | if args.verbose: print("Resetting last print column...")
75 | ftable.cols.lastprint[:] = np.arange(len(ftable))
76 |
77 | if args.startfam or args.endfam:
78 | ftable.cols.printme[:] = np.zeros((len(ftable),))
79 | if args.startfam and not args.endfam:
80 | ftable.cols.printme[args.startfam:ftable.attrs.nClust] = np.ones(
81 | (ftable.attrs.nClust-args.startfam,))
82 | elif args.endfam and not args.startfam:
83 | ftable.cols.printme[0:args.endfam] = np.ones((args.endfam,))
84 | else:
85 | ftable.cols.printme[args.startfam:args.endfam] = np.ones(
86 | (args.endfam-args.startfam,))
87 |
88 | if args.verbose: print("Creating requested plots...")
89 |
90 | if args.famplot:
91 | redpy.plotting.plotFamilies(rtable, ftable, ctable, opt)
92 |
93 | if args.html:
94 | redpy.plotting.plotFamilyHTML(rtable, ftable, opt)
95 |
96 | if args.html or args.famplot:
97 | ftable.cols.printme[:] = np.zeros((len(ftable),))
98 | ftable.cols.lastprint[:] = np.arange(len(ftable))
99 | else:
100 | redpy.plotting.createPlots(rtable, ftable, ttable, ctable, otable, opt)
101 |
102 |
103 | if args.verbose: print("Closing table...")
104 | h5file.close()
105 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/img/annotation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahotovec/REDPy/c38ddf5e34e9f2ca82e0e6a2172e88e21b387398/img/annotation.png
--------------------------------------------------------------------------------
/img/bokeh.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahotovec/REDPy/c38ddf5e34e9f2ca82e0e6a2172e88e21b387398/img/bokeh.png
--------------------------------------------------------------------------------
/img/cluster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahotovec/REDPy/c38ddf5e34e9f2ca82e0e6a2172e88e21b387398/img/cluster.png
--------------------------------------------------------------------------------
/img/flowchart.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahotovec/REDPy/c38ddf5e34e9f2ca82e0e6a2172e88e21b387398/img/flowchart.png
--------------------------------------------------------------------------------
/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahotovec/REDPy/c38ddf5e34e9f2ca82e0e6a2172e88e21b387398/img/logo.png
--------------------------------------------------------------------------------
/initialize.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2018 Alicia Hotovec-Ellis (ahotovec@gmail.com)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 | import os
9 |
10 | """
11 | Run this script first to initialize the hdf5 table where everything will be stored.
12 | Warning: Running this script will overwrite an existing table with the same name defined
13 | by filename in the .cfg file.
14 |
15 | usage: initialize.py [-h] [-v] [-c CONFIGFILE]
16 |
17 | optional arguments:
18 | -h, --help show this help message and exit
19 | -v, --verbose increase written print statements
20 | -c CONFIGFILE, --configfile CONFIGFILE
21 | use configuration file named CONFIGFILE instead of
22 | default settings.cfg
23 | """
24 |
25 | parser = argparse.ArgumentParser(description=
26 | "Initialize hdf5 table using configuration, overwrites existing table defined in config")
27 | parser.add_argument("-v", "--verbose", action="count", default=0,
28 | help="increase written print statements")
29 | parser.add_argument("-c", "--configfile",
30 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
31 | args = parser.parse_args()
32 |
33 | if args.configfile:
34 | opt = redpy.config.Options(args.configfile)
35 | if args.verbose: print("Using config file: {}".format(args.configfile))
36 | else:
37 | opt = redpy.config.Options("settings.cfg")
38 | if args.verbose: print("Using config file: settings.cfg")
39 |
40 | if args.verbose: print("Writing hdf5 table: {}".format(opt.filename))
41 |
42 | redpy.table.initializeTable(opt)
43 |
44 | if args.verbose: print("Creating folder to store images '{}{}'".format(opt.outputPath,
45 | opt.groupName))
46 | try:
47 | os.mkdir('{}{}'.format(opt.outputPath,opt.groupName))
48 | except OSError:
49 | print("Folder exists.")
50 |
51 | if args.verbose: print("Creating folder to store core images '{}{}/clusters'".format(
52 | opt.outputPath,opt.groupName))
53 | try:
54 | os.mkdir('{}{}/clusters'.format(opt.outputPath,opt.groupName))
55 | except OSError:
56 | print("Folder exists.")
57 |
58 | if args.verbose: print("Done")
59 |
--------------------------------------------------------------------------------
/lapine.cfg:
--------------------------------------------------------------------------------
1 | [Settings]
2 | title=La Pine Test
3 | filename=LPtest.h5
4 | groupName=lapine
5 | groupDesc=La Pine Swarm 2015
6 | station=SVIC
7 | network=CC
8 | channel=EHZ
9 | winlen=2048
10 | cmin=0.65
11 | minplot=2
12 | dybin=0.125
13 | nstaC=1
14 | nsta=1
15 | ncor=1
16 | printsta=0
17 | # Testing seems to indicate 80 is way too high for this run
18 | kurtmax=10.
19 | # Check ComCat for matches near SVIC
20 | checkComCat=True
21 | stalat=43.643080
22 | stalon=-121.253270
23 |
24 | ### CRBU settings to test waveserver source ###
25 | #station=CRBU
26 | #winlen=512
27 | #samprate=50.
28 | #server=mazama.ess.washington.edu
29 | #port=16017
30 |
--------------------------------------------------------------------------------
/makeMeta.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import argparse
6 | import numpy as np
7 | import os
8 |
9 | """
10 | Run this script to generate 'meta.html' in a specified directory and with a list of runs.
11 | This page gathers the 'meta_recent.html' tabbed overviews within the output directories
12 | into a single page.
13 |
14 | usage: makeMeta.py [-h] [-v] [-p PATH] [-r RUNS] [-t TOPATH]
15 |
16 | optional arguments:
17 | -h, --help show this help message and exit
18 | -v, --verbose increase written print statements
19 | -p PATH, --path PATH relative path to where meta.html should be created (e.g., ./)
20 | and ending in /
21 | -r RUNS, --runs RUNS comma-separated list of runs to include, which should match their
22 | groupName
23 | -t TOPATH, --topath TOPATH
24 | relative path from meta.html to the runs, ending in /
25 | """
26 |
27 | parser = argparse.ArgumentParser(description=
28 | """Run this script to generate 'meta.html' in a specified directory and with a list
29 | of runs. This page gathers the 'meta_recent.html' tabbed overviews within the
30 | output directories into a single page.""")
31 | parser.add_argument("-v", "--verbose", action="count", default=0,
32 | help="increase written print statements")
33 | parser.add_argument("-p", "--path",
34 | help="relative path to where meta.html should be created (e.g., ./) and ending in /",
35 | default='./')
36 | parser.add_argument("-r", "--runs",
37 | help="comma-separated list of runs to include, which should match their groupName")
38 | parser.add_argument("-t", "--topath",
39 | help="relative path from meta.html to the runs, ending in /", default='./')
40 |
41 | args = parser.parse_args()
42 |
43 | filename = '{}meta.html'.format(args.path)
44 | if args.verbose: print("Creating {}...".format(filename))
45 |
46 | if args.runs:
47 | if args.verbose: print("Looping over runs: {}".format(args.runs))
48 | runs = args.runs
49 | else:
50 | print("No runs supplied, assuming 'default' only")
51 | runs = 'default'
52 |
53 | with open(filename, 'w') as f:
54 | f.write('
REDPy Meta Overview')
55 | f.write('')
56 |
57 | for run in runs.split(','):
58 | f.write("""
59 |
61 | """.format(args.topath,run))
62 | f.write('')
63 |
64 |
65 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/mshcat.csv:
--------------------------------------------------------------------------------
1 | Evid,Magnitude,Epoch(UTC),Time UTC,Time Local,Distance From,Lat,Lon,Depth Km,Depth Mi
2 | 10627613,-1.3,1095983010.79,2004/09/23 23:43:30,2004/09/23 16:43:30 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2005,-122.1865,2.1,1.3
3 | 10627598,-0.2,1095982940.94,2004/09/23 23:42:20,2004/09/23 16:42:20 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1993,-122.1918,1.8,1.1
4 | 10627578,-1.1,1095982612.1,2004/09/23 23:36:52,2004/09/23 16:36:52 PDT,"41.0 km ( 25.5 mi) S ( 171. azimuth) from Morton, WA",46.194,-122.1953,0.0,0.0
5 | 10627553,-1.6,1095982074.05,2004/09/23 23:27:54,2004/09/23 16:27:54 PDT,"41.6 km ( 25.9 mi) S ( 172. azimuth) from Morton, WA",46.1877,-122.1977,0.0,0.0
6 | 10627533,-0.8,1095981765.32,2004/09/23 23:22:45,2004/09/23 16:22:45 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2017,-122.1870,1.5,0.9
7 | 10627493,-0.6,1095981075.97,2004/09/23 23:11:15,2004/09/23 16:11:15 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1998,-122.1900,1.5,0.9
8 | 10627483,-0.8,1095980385.88,2004/09/23 22:59:45,2004/09/23 15:59:45 PDT,"40.0 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2037,-122.1872,2.3,1.4
9 | 10627468,0.4,1095980178.93,2004/09/23 22:56:18,2004/09/23 15:56:18 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2013,-122.1843,2.1,1.3
10 | 10627448,-0.8,1095979975.26,2004/09/23 22:52:55,2004/09/23 15:52:55 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2012,-122.1957,1.3,0.8
11 | 10627438,-0.9,1095979760,2004/09/23 22:49:20,2004/09/23 15:49:20 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2008,-122.1898,1.6,1.0
12 | 10627408,-0.3,1095978908.61,2004/09/23 22:35:08,2004/09/23 15:35:08 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2015,-122.1942,1.9,1.2
13 | 10627388,-1.1,1095978682.29,2004/09/23 22:31:22,2004/09/23 15:31:22 PDT,"40.6 km ( 25.2 mi) S ( 170. azimuth) from Morton, WA",46.1985,-122.1872,2.0,1.2
14 | 10627373,-1.1,1095978498.57,2004/09/23 22:28:18,2004/09/23 15:28:18 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2015,-122.1850,1.6,1.0
15 | 10627363,-0.1,1095978306.59,2004/09/23 22:25:06,2004/09/23 15:25:06 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1988,-122.1950,0.0,0.0
16 | 10627343,-0.3,1095978090.57,2004/09/23 22:21:30,2004/09/23 15:21:30 PDT,"40.6 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1975,-122.1943,1.0,0.6
17 | 10627333,-1.1,1095978015.25,2004/09/23 22:20:15,2004/09/23 15:20:15 PDT,"40.3 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2007,-122.1860,1.7,1.0
18 | 10627318,0.0,1095977946.89,2004/09/23 22:19:06,2004/09/23 15:19:06 PDT,"40.3 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2005,-122.1882,1.9,1.2
19 | 10627303,-0.1,1095977832.74,2004/09/23 22:17:12,2004/09/23 15:17:12 PDT,"40.6 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1975,-122.1950,1.2,0.8
20 | 10627293,-0.2,1095977674.65,2004/09/23 22:14:34,2004/09/23 15:14:34 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2018,-122.1903,1.4,0.8
21 | 10627278,-1.1,1095977402.53,2004/09/23 22:10:02,2004/09/23 15:10:02 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2008,-122.1888,1.9,1.2
22 | 10627268,-0.3,1095977203.19,2004/09/23 22:06:43,2004/09/23 15:06:43 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1995,-122.1887,1.4,0.9
23 | 10627258,0.0,1095977152.68,2004/09/23 22:05:52,2004/09/23 15:05:52 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2003,-122.1872,1.6,1.0
24 | 10627243,-0.4,1095976866.99,2004/09/23 22:01:06,2004/09/23 15:01:06 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2013,-122.1893,1.5,0.9
25 | 10627233,-1.1,1095976768.43,2004/09/23 21:59:28,2004/09/23 14:59:28 PDT,"40.8 km ( 25.3 mi) S ( 171. azimuth) from Morton, WA",46.1957,-122.1943,0.2,0.1
26 | 10627213,0.2,1095976644.78,2004/09/23 21:57:24,2004/09/23 14:57:24 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.1997,-122.1878,1.4,0.9
27 | 10627203,-0.3,1095976542.29,2004/09/23 21:55:42,2004/09/23 14:55:42 PDT,"40.5 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.198,-122.1938,0.0,0.0
28 | 10627183,-0.1,1095976319.72,2004/09/23 21:51:59,2004/09/23 14:51:59 PDT,"41.2 km ( 25.6 mi) S ( 171. azimuth) from Morton, WA",46.192,-122.1943,0.0,0.0
29 | 10627163,-1.6,1095976253.17,2004/09/23 21:50:53,2004/09/23 14:50:53 PDT,"41.4 km ( 25.7 mi) S ( 172. azimuth) from Morton, WA",46.1898,-122.1962,0.0,0.0
30 | 10627133,-0.6,1095975963.78,2004/09/23 21:46:03,2004/09/23 14:46:03 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1998,-122.1883,1.6,1.0
31 | 10627073,-0.9,1095975554.86,2004/09/23 21:39:14,2004/09/23 14:39:14 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.1998,-122.1877,1.7,1.0
32 | 10627028,-0.5,1095974879.63,2004/09/23 21:27:59,2004/09/23 14:27:59 PDT,"40.1 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.2022,-122.1940,1.6,1.0
33 | 10627008,-0.8,1095974777.23,2004/09/23 21:26:17,2004/09/23 14:26:17 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2008,-122.1877,1.9,1.2
34 | 10626998,-1.6,1095974400.29,2004/09/23 21:20:00,2004/09/23 14:20:00 PDT,"40.4 km ( 25.1 mi) S ( 172. azimuth) from Morton, WA",46.1985,-122.1985,0.9,0.6
35 | 10626978,-0.6,1095974172.77,2004/09/23 21:16:12,2004/09/23 14:16:12 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2023,-122.1852,2.2,1.4
36 | 10626923,-0.2,1095973502.74,2004/09/23 21:05:02,2004/09/23 14:05:02 PDT,"40.3 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2007,-122.1868,1.7,1.1
37 | 10626898,0.2,1095973182.07,2004/09/23 20:59:42,2004/09/23 13:59:42 PDT,"40.3 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2002,-122.1930,1.3,0.8
38 | 10626888,-0.8,1095973076.9,2004/09/23 20:57:56,2004/09/23 13:57:56 PDT,"40.3 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2008,-122.1868,2.3,1.4
39 | 10626878,-0.9,1095973027.21,2004/09/23 20:57:07,2004/09/23 13:57:07 PDT,"40.6 km ( 25.2 mi) S ( 172. azimuth) from Morton, WA",46.197,-122.2008,0.0,0.0
40 | 10626873,-0.8,1095972919.73,2004/09/23 20:55:19,2004/09/23 13:55:19 PDT,"40.3 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2002,-122.1923,1.3,0.8
41 | 10626853,-0.8,1095972778.2,2004/09/23 20:52:58,2004/09/23 13:52:58 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2015,-122.1868,1.6,1.0
42 | 10626838,-0.7,1095972635.44,2004/09/23 20:50:35,2004/09/23 13:50:35 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2003,-122.1850,1.1,0.7
43 | 10626823,-0.6,1095972514,2004/09/23 20:48:34,2004/09/23 13:48:34 PDT,"40.0 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2032,-122.1883,0.0,0.0
44 | 10626813,-1.1,1095972178.2,2004/09/23 20:42:58,2004/09/23 13:42:58 PDT,"39.8 km ( 24.7 mi) S ( 171. azimuth) from Morton, WA",46.2048,-122.1955,0.0,0.0
45 | 10626778,-0.8,1095971720.04,2004/09/23 20:35:20,2004/09/23 13:35:20 PDT,"39.5 km ( 24.6 mi) S ( 170. azimuth) from Morton, WA",46.2083,-122.1830,1.8,1.1
46 | 10626763,-0.5,1095971567.62,2004/09/23 20:32:47,2004/09/23 13:32:47 PDT,"41.2 km ( 25.6 mi) S ( 171. azimuth) from Morton, WA",46.1928,-122.1888,0.0,0.0
47 | 10626748,-0.8,1095970733.04,2004/09/23 20:18:53,2004/09/23 13:18:53 PDT,"40.0 km ( 24.8 mi) S ( 171. azimuth) from Morton, WA",46.2032,-122.1927,1.3,0.8
48 | 10626738,-0.1,1095970453.53,2004/09/23 20:14:13,2004/09/23 13:14:13 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2,-122.1913,1.7,1.1
49 | 10626723,0.6,1095970359.24,2004/09/23 20:12:39,2004/09/23 13:12:39 PDT,"40.8 km ( 25.4 mi) S ( 173. azimuth) from Morton, WA",46.1938,-122.2130,1.7,1.1
50 | 10626718,-0.1,1095970227.47,2004/09/23 20:10:27,2004/09/23 13:10:27 PDT,"40.4 km ( 25.1 mi) S ( 172. azimuth) from Morton, WA",46.199,-122.1978,0.5,0.3
51 | 10626698,0.5,1095970208.09,2004/09/23 20:10:08,2004/09/23 13:10:08 PDT,"40.0 km ( 24.8 mi) S ( 170. azimuth) from Morton, WA",46.2043,-122.1838,2.1,1.3
52 | 10626688,-1.1,1095969962.23,2004/09/23 20:06:02,2004/09/23 13:06:02 PDT,"40.0 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.2032,-122.1903,1.9,1.2
53 | 10626648,-0.1,1095969547.68,2004/09/23 19:59:07,2004/09/23 12:59:07 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2008,-122.1882,1.8,1.1
54 | 10626528,0.2,1095967292.66,2004/09/23 19:21:32,2004/09/23 12:21:32 PDT,"39.8 km ( 24.7 mi) S ( 171. azimuth) from Morton, WA",46.2045,-122.1980,2.8,1.7
55 | 10626523,-0.6,1095967235.26,2004/09/23 19:20:35,2004/09/23 12:20:35 PDT,"40.0 km ( 24.8 mi) S ( 170. azimuth) from Morton, WA",46.2043,-122.1833,0.7,0.4
56 | 10626508,-0.9,1095967206.06,2004/09/23 19:20:06,2004/09/23 12:20:06 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2002,-122.1942,1.1,0.7
57 | 10626498,-0.2,1095967181.54,2004/09/23 19:19:41,2004/09/23 12:19:41 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1997,-122.1902,1.9,1.2
58 | 10626478,0.4,1095967163.97,2004/09/23 19:19:23,2004/09/23 12:19:23 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2002,-122.1873,1.4,0.9
59 | 10626458,0.3,1095967130.58,2004/09/23 19:18:50,2004/09/23 12:18:50 PDT,"40.6 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1973,-122.1963,0.1,0.0
60 | 10626448,-0.7,1095967010.17,2004/09/23 19:16:50,2004/09/23 12:16:50 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2015,-122.1882,1.7,1.0
61 | 10626438,-0.8,1095966709.24,2004/09/23 19:11:49,2004/09/23 12:11:49 PDT,"40.6 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1977,-122.1900,1.5,0.9
62 | 10626428,-0.6,1095966599.46,2004/09/23 19:09:59,2004/09/23 12:09:59 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.201,-122.1873,1.7,1.1
63 | 10626303,-0.2,1095966038.61,2004/09/23 19:00:38,2004/09/23 12:00:38 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2017,-122.1887,1.6,1.0
64 | 10626298,0.2,1095965988.17,2004/09/23 18:59:48,2004/09/23 11:59:48 PDT,"37.2 km ( 23.1 mi) S ( 174. azimuth) from Morton, WA",46.226,-122.2202,8.4,5.2
65 | 10626278,0.1,1095965936.15,2004/09/23 18:58:56,2004/09/23 11:58:56 PDT,"40.1 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.2027,-122.1903,1.7,1.1
66 | 10626258,0.2,1095965761.63,2004/09/23 18:56:01,2004/09/23 11:56:01 PDT,"40.6 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1983,-122.1902,1.1,0.7
67 | 10626238,0.1,1095964969.85,2004/09/23 18:42:49,2004/09/23 11:42:49 PDT,"40.4 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2002,-122.1872,1.6,1.0
68 | 10626223,-0.9,1095964636.44,2004/09/23 18:37:16,2004/09/23 11:37:16 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2002,-122.1958,0.8,0.5
69 | 10626208,-0.6,1095964444.59,2004/09/23 18:34:04,2004/09/23 11:34:04 PDT,"40.3 km ( 25.1 mi) S ( 170. azimuth) from Morton, WA",46.2005,-122.1882,1.2,0.8
70 | 10626198,-0.6,1095964353.88,2004/09/23 18:32:33,2004/09/23 11:32:33 PDT,"40.9 km ( 25.4 mi) S ( 170. azimuth) from Morton, WA",46.1957,-122.1845,2.6,1.6
71 | 10626183,0.2,1095964034.3,2004/09/23 18:27:14,2004/09/23 11:27:14 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2005,-122.1905,1.8,1.1
72 | 10626168,-0.5,1095963920.78,2004/09/23 18:25:20,2004/09/23 11:25:20 PDT,"40.9 km ( 25.4 mi) S ( 171. azimuth) from Morton, WA",46.195,-122.1928,0.9,0.6
73 | 10626158,-0.1,1095963801.21,2004/09/23 18:23:21,2004/09/23 11:23:21 PDT,"40.9 km ( 25.4 mi) S ( 172. azimuth) from Morton, WA",46.1937,-122.2018,0.0,0.0
74 | 10626133,-0.2,1095963647.79,2004/09/23 18:20:47,2004/09/23 11:20:47 PDT,"40.3 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2005,-122.1887,1.8,1.1
75 | 10626113,-0.1,1095963492.34,2004/09/23 18:18:12,2004/09/23 11:18:12 PDT,"40.5 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.199,-122.1922,1.5,0.9
76 | 10626088,0.1,1095963296.98,2004/09/23 18:14:56,2004/09/23 11:14:56 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2015,-122.1917,1.2,0.8
77 | 10626053,0.0,1095963193.81,2004/09/23 18:13:13,2004/09/23 11:13:13 PDT,"40.0 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2035,-122.1883,1.9,1.2
78 | 10626078,-0.1,1095963163.55,2004/09/23 18:12:43,2004/09/23 11:12:43 PDT,"39.6 km ( 24.6 mi) S ( 170. azimuth) from Morton, WA",46.2068,-122.1880,1.7,1.0
79 | 10626033,-0.2,1095962990,2004/09/23 18:09:50,2004/09/23 11:09:50 PDT,"39.7 km ( 24.7 mi) S ( 171. azimuth) from Morton, WA",46.2057,-122.1902,1.5,0.9
80 | 10626008,0.8,1095962358.56,2004/09/23 17:59:18,2004/09/23 10:59:18 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2007,-122.1918,1.6,1.0
81 | 10625998,-0.5,1095962252.23,2004/09/23 17:57:32,2004/09/23 10:57:32 PDT,"39.8 km ( 24.8 mi) S ( 171. azimuth) from Morton, WA",46.2047,-122.1915,2.0,1.3
82 | 10625983,-0.8,1095962220.68,2004/09/23 17:57:00,2004/09/23 10:57:00 PDT,"40.5 km ( 25.2 mi) S ( 172. azimuth) from Morton, WA",46.1977,-122.2030,0.1,0.0
83 | 10625968,-0.3,1095962191.19,2004/09/23 17:56:31,2004/09/23 10:56:31 PDT,"41.5 km ( 25.8 mi) S ( 172. azimuth) from Morton, WA",46.1885,-122.2027,0.0,0.0
84 | 10625928,0.7,1095961241.65,2004/09/23 17:40:41,2004/09/23 10:40:41 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2013,-122.1878,1.3,0.8
85 | 10625898,-0.5,1095960760.35,2004/09/23 17:32:40,2004/09/23 10:32:40 PDT,"40.1 km ( 24.9 mi) S ( 172. azimuth) from Morton, WA",46.2015,-122.1995,1.8,1.1
86 | 10625883,-0.5,1095960567.6,2004/09/23 17:29:27,2004/09/23 10:29:27 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1998,-122.1932,0.0,0.0
87 | 10625878,0.0,1095960432.88,2004/09/23 17:27:12,2004/09/23 10:27:12 PDT,"40.1 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.2023,-122.1892,1.4,0.9
88 | 10625863,0.5,1095960196.84,2004/09/23 17:23:16,2004/09/23 10:23:16 PDT,"39.9 km ( 24.8 mi) S ( 170. azimuth) from Morton, WA",46.2048,-122.1883,0.9,0.6
89 | 10625838,0.1,1095959942,2004/09/23 17:19:02,2004/09/23 10:19:02 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2012,-122.1880,1.7,1.1
90 | 10625798,-0.6,1095959250.3,2004/09/23 17:07:30,2004/09/23 10:07:30 PDT,"38.9 km ( 24.2 mi) SSE ( 168. azimuth) from Morton, WA",46.2158,-122.1688,3.8,2.4
91 | 10625763,0.2,1095958659.78,2004/09/23 16:57:39,2004/09/23 09:57:39 PDT,"40.5 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1983,-122.1928,0.8,0.5
92 | 10625743,0.4,1095957194.45,2004/09/23 16:33:14,2004/09/23 09:33:14 PDT,"39.9 km ( 24.8 mi) S ( 171. azimuth) from Morton, WA",46.2045,-122.1915,1.7,1.1
93 | 10625728,0.4,1095957056.05,2004/09/23 16:30:56,2004/09/23 09:30:56 PDT,"40.3 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2003,-122.1897,1.5,0.9
94 | 10625708,-0.5,1095955552.97,2004/09/23 16:05:52,2004/09/23 09:05:52 PDT,"36.8 km ( 22.9 mi) S ( 169. azimuth) from Morton, WA",46.2338,-122.1818,3.1,1.9
95 | 10625663,0.4,1095954597.19,2004/09/23 15:49:57,2004/09/23 08:49:57 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2012,-122.1875,1.2,0.8
96 | 10625658,-0.6,1095954563.26,2004/09/23 15:49:23,2004/09/23 08:49:23 PDT,"40.3 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2002,-122.1910,1.6,1.0
97 | 10625643,-0.6,1095954514.05,2004/09/23 15:48:34,2004/09/23 08:48:34 PDT,"39.7 km ( 24.7 mi) S ( 171. azimuth) from Morton, WA",46.2053,-122.1965,1.4,0.8
98 | 10625618,-0.6,1095954479.56,2004/09/23 15:47:59,2004/09/23 08:47:59 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.201,-122.1952,1.5,1.0
99 | 10625608,-0.2,1095954468.62,2004/09/23 15:47:48,2004/09/23 08:47:48 PDT,"41.6 km ( 25.8 mi) S ( 173. azimuth) from Morton, WA",46.1873,-122.2052,3.4,2.1
100 | 10625588,-0.5,1095954300.29,2004/09/23 15:45:00,2004/09/23 08:45:00 PDT,"39.7 km ( 24.7 mi) S ( 171. azimuth) from Morton, WA",46.206,-122.1908,3.6,2.2
101 | 10625578,0.7,1095953886.4,2004/09/23 15:38:06,2004/09/23 08:38:06 PDT,"39.6 km ( 24.6 mi) S ( 171. azimuth) from Morton, WA",46.2068,-122.1918,3.1,1.9
102 | 10625523,0.7,1095951959.63,2004/09/23 15:05:59,2004/09/23 08:05:59 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2022,-122.1873,1.6,1.0
103 | 10625513,0.3,1095951917.18,2004/09/23 15:05:17,2004/09/23 08:05:17 PDT,"40.1 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2032,-122.1877,0.1,0.0
104 | 10625498,0.0,1095951907.4,2004/09/23 15:05:07,2004/09/23 08:05:07 PDT,"40.0 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.2033,-122.1895,1.4,0.9
105 | 10625488,0.2,1095951780.71,2004/09/23 15:03:00,2004/09/23 08:03:00 PDT,"40.1 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2025,-122.1883,0.9,0.6
106 | 10625478,-0.2,1095951763.37,2004/09/23 15:02:43,2004/09/23 08:02:43 PDT,"39.9 km ( 24.8 mi) S ( 176. azimuth) from Morton, WA",46.2003,-122.2365,3.6,2.2
107 | 10625468,-0.1,1095951433.93,2004/09/23 14:57:13,2004/09/23 07:57:13 PDT,"41.0 km ( 25.5 mi) S ( 171. azimuth) from Morton, WA",46.1947,-122.1878,0.0,0.0
108 | 10625458,-0.3,1095951009.4,2004/09/23 14:50:09,2004/09/23 07:50:09 PDT,"38.7 km ( 24.1 mi) S ( 171. azimuth) from Morton, WA",46.2147,-122.1937,0.0,0.0
109 | 10625423,-0.3,1095950146.05,2004/09/23 14:35:46,2004/09/23 07:35:46 PDT,"40.5 km ( 25.2 mi) S ( 170. azimuth) from Morton, WA",46.1998,-122.1810,1.2,0.7
110 | 10625398,0.4,1095947322.38,2004/09/23 13:48:42,2004/09/23 06:48:42 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.202,-122.1858,1.3,0.8
111 | 10625383,-0.5,1095947295.65,2004/09/23 13:48:15,2004/09/23 06:48:15 PDT,"40.1 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.202,-122.1918,1.3,0.8
112 | 10625373,-0.9,1095947263.4,2004/09/23 13:47:43,2004/09/23 06:47:43 PDT,"40.1 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2035,-122.1843,1.7,1.0
113 | 10625358,-0.5,1095947245.27,2004/09/23 13:47:25,2004/09/23 06:47:25 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2013,-122.1937,1.5,0.9
114 | 10625343,-0.8,1095947235.21,2004/09/23 13:47:15,2004/09/23 06:47:15 PDT,"41.1 km ( 25.5 mi) S ( 171. azimuth) from Morton, WA",46.1932,-122.1910,0.4,0.3
115 | 10625333,-0.1,1095946772.24,2004/09/23 13:39:32,2004/09/23 06:39:32 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2013,-122.1923,0.7,0.4
116 | 10625228,0.6,1095943887.44,2004/09/23 12:51:27,2004/09/23 05:51:27 PDT,"40.3 km ( 25.0 mi) S ( 172. azimuth) from Morton, WA",46.1998,-122.1995,1.2,0.7
117 | 10625208,0.2,1095943522.78,2004/09/23 12:45:22,2004/09/23 05:45:22 PDT,"40.5 km ( 25.2 mi) S ( 172. azimuth) from Morton, WA",46.1977,-122.2023,2.3,1.4
118 | 10625193,-0.3,1095943494.1,2004/09/23 12:44:54,2004/09/23 05:44:54 PDT,"40.6 km ( 25.2 mi) S ( 172. azimuth) from Morton, WA",46.1967,-122.2042,0.0,0.0
119 | 10625178,-0.3,1095942790.55,2004/09/23 12:33:10,2004/09/23 05:33:10 PDT,"40.2 km ( 24.9 mi) S ( 171. azimuth) from Morton, WA",46.2013,-122.1960,0.8,0.5
120 | 10625148,0.7,1095940886.05,2004/09/23 12:01:26,2004/09/23 05:01:26 PDT,"40.7 km ( 25.3 mi) S ( 171. azimuth) from Morton, WA",46.1968,-122.1952,0.1,0.0
121 | 10625138,-1.1,1095939590.3,2004/09/23 11:39:50,2004/09/23 04:39:50 PDT,"41.1 km ( 25.6 mi) S ( 170. azimuth) from Morton, WA",46.1937,-122.1848,3.5,2.2
122 | 10625123,-0.5,1095939458.02,2004/09/23 11:37:38,2004/09/23 04:37:38 PDT,"40.8 km ( 25.4 mi) S ( 171. azimuth) from Morton, WA",46.1957,-122.1920,0.3,0.2
123 | 10625088,-0.8,1095938096.85,2004/09/23 11:14:56,2004/09/23 04:14:56 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.1997,-122.1922,1.6,1.0
124 | 10625043,-0.1,1095935358.07,2004/09/23 10:29:18,2004/09/23 03:29:18 PDT,"38.6 km ( 24.0 mi) S ( 169. azimuth) from Morton, WA",46.2175,-122.1770,3.1,1.9
125 | 10624933,-0.5,1095934280.02,2004/09/23 10:11:20,2004/09/23 03:11:20 PDT,"39.5 km ( 24.5 mi) S ( 171. azimuth) from Morton, WA",46.2078,-122.1925,2.8,1.8
126 | 10624918,0.1,1095934267.44,2004/09/23 10:11:07,2004/09/23 03:11:07 PDT,"40.2 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2018,-122.1888,2.0,1.2
127 | 10624873,-0.1,1095932377.85,2004/09/23 09:39:37,2004/09/23 02:39:37 PDT,"40.1 km ( 24.9 mi) S ( 172. azimuth) from Morton, WA",46.201,-122.2003,0.0,0.0
128 | 10624853,-0.5,1095930802.64,2004/09/23 09:13:22,2004/09/23 02:13:22 PDT,"39.7 km ( 24.7 mi) S ( 171. azimuth) from Morton, WA",46.2055,-122.1980,2.2,1.4
129 | 10624818,0.2,1095930158.48,2004/09/23 09:02:38,2004/09/23 02:02:38 PDT,"40.8 km ( 25.4 mi) S ( 172. azimuth) from Morton, WA",46.1948,-122.1977,0.0,0.0
130 | 10624728,-0.8,1095904989,2004/09/23 02:03:09,2004/09/22 19:03:09 PDT,"38.6 km ( 24.0 mi) SSE ( 164. azimuth) from Morton, WA",46.224,-122.1400,6.4,4.0
131 | 10624638,0.5,1095897252.12,2004/09/22 23:54:12,2004/09/22 16:54:12 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2013,-122.1850,1.8,1.1
132 | 10624608,-0.8,1095883063.98,2004/09/22 19:57:43,2004/09/22 12:57:43 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.201,-122.1870,3.7,2.3
133 | 10624598,0.2,1095882248.48,2004/09/22 19:44:08,2004/09/22 12:44:08 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2022,-122.1843,3.2,2.0
134 | 10624503,-0.8,1095865662.06,2004/09/22 15:07:42,2004/09/22 08:07:42 PDT,"40.5 km ( 25.1 mi) SSE ( 168. azimuth) from Morton, WA",46.202,-122.1678,4.6,2.9
135 | 10624353,0.3,1095825126.72,2004/09/22 03:52:06,2004/09/21 20:52:06 PDT,"40.2 km ( 25.0 mi) S ( 178. azimuth) from Morton, WA",46.1968,-122.2527,8.6,5.3
136 | 10624228,0.1,1095798047.54,2004/09/21 20:20:47,2004/09/21 13:20:47 PDT,"40.4 km ( 25.1 mi) S ( 171. azimuth) from Morton, WA",46.2002,-122.1890,3.5,2.2
137 | 10623833,-1.0,1095696923.88,2004/09/20 16:15:23,2004/09/20 09:15:23 PDT,"40.5 km ( 25.2 mi) S ( 171. azimuth) from Morton, WA",46.1988,-122.1913,3.7,2.3
138 | 10623423,0.5,1095569301.33,2004/09/19 04:48:21,2004/09/18 21:48:21 PDT,"40.1 km ( 24.9 mi) S ( 170. azimuth) from Morton, WA",46.2032,-122.1835,3.8,2.4
139 | 10622128,-0.3,1095441975.45,2004/09/17 17:26:15,2004/09/17 10:26:15 PDT,"40.7 km ( 25.3 mi) S ( 171. azimuth) from Morton, WA",46.197,-122.1895,3.0,1.9
140 | 10621883,0.9,1095354142.09,2004/09/16 17:02:22,2004/09/16 10:02:22 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.201,-122.1878,1.9,1.2
141 | 10621783,0.5,1095312133.98,2004/09/16 05:22:13,2004/09/15 22:22:13 PDT,"40.3 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2012,-122.1875,2.0,1.2
142 | 10621738,0.9,1095307096.97,2004/09/16 03:58:16,2004/09/15 20:58:16 PDT,"40.2 km ( 25.0 mi) S ( 170. azimuth) from Morton, WA",46.2018,-122.1865,1.7,1.1
143 | 10620898,0.0,1095210595.35,2004/09/15 01:09:55,2004/09/14 18:09:55 PDT,"40.3 km ( 25.0 mi) S ( 171. azimuth) from Morton, WA",46.2007,-122.1912,1.9,1.2
144 | 10620888,1.1,1095210461.28,2004/09/15 01:07:41,2004/09/14 18:07:41 PDT,"39.6 km ( 24.6 mi) S ( 171. azimuth) from Morton, WA",46.2062,-122.1982,0.0,0.0
145 | 10620868,0.3,1095207875.44,2004/09/15 00:24:35,2004/09/14 17:24:35 PDT,"39.9 km ( 24.8 mi) S ( 171. azimuth) from Morton, WA",46.204,-122.1940,4.2,2.6
146 |
--------------------------------------------------------------------------------
/plotJunk.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import redpy.plotting
8 | import argparse
9 | import numpy as np
10 | import os
11 |
12 | """
13 | Run this script to output the contents of the junk table for troubleshooting.
14 |
15 | usage: plotJunk.py [-h] [-v] [-c CONFIGFILE]
16 |
17 | optional arguments:
18 | -h, --help show this help message and exit
19 | -v, --verbose increase written print statements
20 | -c CONFIGFILE, --configfile CONFIGFILE
21 | use configuration file named CONFIGFILE instead of
22 | default settings.cfg
23 | """
24 |
25 | parser = argparse.ArgumentParser(description=
26 | "Run this script to output the contents of the junk table for troubleshooting.")
27 | parser.add_argument("-v", "--verbose", action="count", default=0,
28 | help="increase written print statements")
29 | parser.add_argument("-c", "--configfile",
30 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
31 | args = parser.parse_args()
32 |
33 | if args.configfile:
34 | opt = redpy.config.Options(args.configfile)
35 | if args.verbose: print("Using config file: {}".format(args.configfile))
36 | else:
37 | opt = redpy.config.Options("settings.cfg")
38 | if args.verbose: print("Using config file: settings.cfg")
39 |
40 | if args.verbose: print("Creating folder to store junk images named '{}{}'/junk".format(
41 | opt.outputPath,opt.groupName))
42 | try:
43 | os.mkdir('{}{}/junk'.format(opt.outputPath,opt.groupName))
44 | except OSError:
45 | print("Folder exists.")
46 |
47 | if args.verbose: print("Opening hdf5 table: {}".format(opt.filename))
48 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
49 |
50 | if args.verbose: print("Creating junk plots...")
51 | redpy.plotting.createJunkPlots(jtable, opt)
52 |
53 | if args.verbose: print("Closing table...")
54 | h5file.close()
55 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/redpy/__init__.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | from obspy.core.trace import Trace
6 | import redpy.table
7 | import redpy.trigger
8 | import redpy.correlation
9 | import redpy.cluster
10 | import redpy.optics
11 | import redpy.config
12 | import redpy.plotting
13 | import redpy.printing
--------------------------------------------------------------------------------
/redpy/cluster.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import numpy as np
6 | import sys
7 | from tables import *
8 | from redpy.optics import *
9 |
10 | def runFamOPTICS(rtable, ctable, ftable, fnum, opt):
11 |
12 | """
13 | Runs OPTICS ordering within a single family
14 |
15 | rtable: Repeater table
16 | ctable: Correlation matrix table
17 | ftable: Families table
18 | fnum: Family number to run
19 | opt: Options object describing station/run parameters
20 |
21 | Returns slightly different ordering than full version, but probably better
22 | """
23 |
24 | fam = np.fromstring(ftable[fnum]['members'], dtype=int, sep=' ')
25 |
26 | if len(fam) in (3, 4, 5, 6, 10, 15, 25, 50, 100, 250, 500, 1000, 2500, 5000, 10000,
27 | 25000, 50000, 100000, 250000, 500000):
28 |
29 | # Could be sped up if these three don't have to be called every time
30 | id1 = ctable.cols.id1[:]
31 | id2 = ctable.cols.id2[:]
32 | ccc = 1-ctable.cols.ccc[:]
33 |
34 | # Create distance matrix
35 | ids = rtable[fam]['id']
36 | ix = np.where(np.in1d(id2,ids))
37 | r = np.zeros((max(ids)+1,)).astype('int')
38 | r[ids] = range(len(ids))
39 | D = np.ones((len(ids),len(ids)))
40 | D[r[id2[ix]],r[id1[ix]]] = ccc[ix]
41 | D[r[id1[ix]],r[id2[ix]]] = ccc[ix]
42 | D[range(len(ids)),range(len(ids))] = 0
43 |
44 | # Sort so most connected event is always considered for core
45 | s = np.argsort(sum(D))[::-1]
46 | D = D[s,:]
47 | D = D[:,s]
48 | fam = fam[s]
49 |
50 | # Run OPTICS
51 | ttree = setOfObjects(D)
52 | prep_optics(ttree,1)
53 | build_optics(ttree,1)
54 | order = np.array(ttree._ordered_list)
55 | core = fam[np.argmin(ttree._reachability)]
56 |
57 | # Write to ftable
58 | np.set_printoptions(threshold=sys.maxsize)
59 | np.set_printoptions(linewidth=sys.maxsize)
60 | ftable.cols.members[fnum] = np.array2string(fam[order])[1:-1]
61 | ftable.cols.core[fnum] = core
62 |
63 | ftable.cols.startTime[fnum] = np.min(rtable[fam]['startTimeMPL'])
64 | ftable.cols.longevity[fnum] = np.max(rtable[fam]['startTimeMPL']) - np.min(
65 | rtable[fam]['startTimeMPL'])
66 | ftable.cols.printme[fnum] = 1
67 | ftable.cols.printme[-1] = 1
68 | ftable.flush()
69 |
--------------------------------------------------------------------------------
/redpy/config.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import numpy as np
6 | import configparser
7 |
8 | class Options(object):
9 |
10 | def __init__(self, configfile='settings.cfg'):
11 |
12 | """
13 | Defines the settings that are often passed to routines and that define the table.
14 | These are also written to the attributes of the table for posterity.
15 |
16 | Requires a configuration file with section header [Settings] on the first line and
17 | any of the following configurations below it. Passing a configuration file with
18 | only the header defaults to a test run laid out with the settings below. Format of
19 | the file below the header is simply:
20 |
21 | name=value
22 |
23 | where name is the name of the parameter, and value is either a string (no quotes)
24 | or number. Comments are allowed on separate lines beginning with a #, and the
25 | parameters may be in any order desired. An example configuration file called
26 | 'settings.cfg' is included in the distribution that contains all of the default
27 | settings and may be edited. The name of the configfile used is also stored.
28 |
29 | TABLE DEFINITIONS:
30 | title: Name of the table, used also in plotting titles (default 'REDPy Catalog')
31 | filename: Filename/path for the table, should end in .h5 (default 'redpytable.h5')
32 | outputPath: Absolute or relative path to outputs (defaults to current directory)
33 | groupName: Short string describing the name of the station, may not contain spaces
34 | (default 'default')
35 |
36 | STATION PARAMETERS:
37 | nsta: Number of stations (default 8)
38 | station: String of ordered station names
39 | (default 'SEP,YEL,HSR,SHW,EDM,STD,JUN,SOS')
40 | channel: String of channels of interest, no wildcards
41 | (default 'EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ')
42 | network: String of network code (default 'UW,UW,UW,UW,UW,UW,UW,UW')
43 | location: String of location code (default '--,--,--,--,--,--,--,--')
44 | samprate: Sampling rate of that station (default 100.0 Hz)
45 | server: Source of data (fdsnws://server, waveserver://ws_name:ws_port,
46 | seedlink://sl_IP,sl_port; default "IRIS", otherwise "file")
47 | port: Port number for server (deprecated, default 16017, not used if using IRIS)
48 | searchdir: Path to directory with local files ending in / (default './', not used
49 | if using IRIS or waveserver)
50 | filepattern: Wildcard for selecting subset of files based on their name
51 | (default "*")
52 | nsec: Number of seconds to download from server at a time (default 3600 s)
53 |
54 | WINDOWING PARAMETERS:
55 | winlen: Length of window for cross-correlation (default 1024 samples, 2^n is best)
56 | ptrig: Length of time cut prior to trigger (default 10.0 s)
57 | atrig: Length of time cut after trigger (default 20.0 s)
58 | wshape: A derived value (cannot be explicitly defined) corresponding to the number
59 | of samples that will be cut based on ptrig and atrig
60 |
61 | TRIGGERING PARAMETERS:
62 | trigalg: Trigger algorithm to be used for STALTA (default 'classicstalta')
63 | lwin: Length of long window for STALTA (default 7.0 s)
64 | swin: Length of short window for STALTA (default 0.8 s)
65 | trigon: Cutoff ratio for triggering STALTA (default 3.0)
66 | trigoff: Cutoff ratio for ending STALTA trigger (default 2.0)
67 | mintrig: A derived value (set to 75% of winlen) for the minimum spacing between
68 | subsequent triggers
69 | nstaC: Minimum number of stations a trigger must show up on (default 4)
70 | offset: Optional time offset to advance waveforms as a list of positive floats
71 | (default 0.0)
72 | kurtmax: Maximum kurtosis allowed for event window, to eliminate spikes; ~80-100
73 | is appropriate for 5 s window, ~130 for 15 s, ~200 for 25 s (default 80.0)
74 | kurtfmax: Maximum kurtosis of frequency amplitude spectrum to eliminate
75 | calibration pulses with unnaturally harmonic signals; be careful not
76 | to set too low or you could eliminate real harmonic events (default 150.0)
77 | kurtwin: Length of window to use for kurtosis, in seconds, around the trigger
78 | time, will be centered on the trigger time (default 5 s)
79 | oratiomax: Maximum ratio of outliers to total number of datapoints in trace
80 | (default 0.15 (15%))
81 |
82 | FILTERING PARAMETERS:
83 | fmin: Lower band of bandpass filter (default 1.0 Hz)
84 | fmax: Upper band of bandpass filter (default 10.0 Hz)
85 |
86 | FREQUENCY INDEX WINDOWS:
87 | filomin: Lower bound on low window (default 1.0 Hz)
88 | filomax: Upper bound on low window (default 2.5 Hz)
89 | fiupmin: Lower bound on upper window (default 5.0 Hz)
90 | fiupmax: Upper bound on upper window (default 10.0 Hz)
91 | fispanlow: Lower bound of frequency index for occurrencefi plot (default -0.5)
92 | fispanhigh: Upper bound of frequency index for occurrencefi plot (default 0.5)
93 |
94 | CLUSTERING PARAMETERS:
95 | cmin: Minimum correlation to be considered a repeater (default 0.7)
96 | ncor: Number of stations correlation must be exceeded on (default 4)
97 |
98 | ORPHAN EXPIRATION PARAMETERS
99 | minorph: Minimum amount of time (days) to keep the smaller orphans alive
100 | (corresponds to trigon) (default 7 days)
101 | maxorph: Maximum amount of time (days) to keep the largest orphans alive
102 | (corresponds to trigon+7) (default 30 days)
103 |
104 | PLOTTING PARAMETERS
105 | plotformat: List and order of plots to be included on the timeline, separated by
106 | either , (new row) or + (group into tabs) without spaces. List of currently
107 | supported plot types are: eqrate, fi, occurrence, occurrencefi, and longevity
108 | (default 'eqrate,fi,occurrence+occurrencefi,longevity')
109 | minplot: Minimum number of members required in order to be plotted to full
110 | overview timeline (default 5)
111 | mminplot: Minimum number of members required in order to be plotted to meta
112 | timeline (default 0 (all members plotted))
113 | dybin: Width of bin in days for full histogram (default 1 day)
114 | hrbin: Width of bin in hours for recent histogram (default 1 hour)
115 | mhrbin: Width of bin in hours for meta histogram (default 1 hour)
116 | occurbin: Width of bin for occurrence plot; specified in .cfg as hours,
117 | converted to days in redpy/config (default 1 hr -> 1/24 day)
118 | recbin: Width of bin for recent occurrence; specified in .cfg as hours,
119 | converted to days in redpy/config (default 1 hr -> 1/24 day)
120 | mrecbin: Width of bin for recent occurrence in meta plot; specified in .cfg as
121 | hours, converted to days in redpy/config (default 1 hr -> 1/24 day)
122 | fixedheight: Whether the occurrence plot should have the same height as the other
123 | subplots or expand in height as more families are plotted (default False)
124 | recplot: Number of days for 'recent' plot (default 14 days)
125 | mrecplot: Number of days for meta plot (default 14 days)
126 | plotsta: Station index in station list to be plotted (default 2)
127 | verbosecatalog: Add additional columns to the catalog file (default False)
128 | amplims: Use 'global' or 'family' to define amplitude plot limits (default global)
129 |
130 | COMCAT PARAMETERS
131 | checkComCat: Use ComCat to find located seismicity that might match repeaters
132 | (default False)
133 | stalats: List of station latitudes (defaults to MSH network:
134 | '46.200210,46.209550,46.174280,46.193470,46.197170,46.237610,46.147060,
135 | 46.243860')
136 | stalons: List of station longitudes (defaults to MSH network:
137 | '-122.190600,-122.188990,-122.180650,-122.236350,-122.151210,-122.223960,
138 | -122.152430,-122.137870')
139 | serr: Seconds of allowable difference in trigger and projected arrival time
140 | (default 5.0 s)
141 | locdeg: Degrees of distance to be considered a local event (default 0.5 degrees)
142 | regdeg: Degrees of distance to be considered a regional event (default 2.0 degrees)
143 | regmag: Minimum magnitude for regional events (default M2.5)
144 | telemag: Minimum magnitude for teleseismic events (default M4.5)
145 | matchMax: Number of largest events to match (default 0 (all))
146 |
147 | This list will likely expand.
148 | """
149 |
150 | self.configfile = configfile
151 |
152 | # Load parameters from config file
153 | config = configparser.ConfigParser()
154 | config.read(self.configfile)
155 |
156 | # Set parameters to default if not in config file
157 | self.title=config.get('Settings','title') if config.has_option(
158 | 'Settings','title') else 'REDPy Catalog'
159 | self.filename=config.get('Settings','filename') if config.has_option(
160 | 'Settings','filename') else 'redpytable.h5'
161 | self.outputPath=config.get('Settings','outputPath') if config.has_option(
162 | 'Settings','outputPath') else ''
163 | self.groupName=config.get('Settings','groupName') if config.has_option(
164 | 'Settings','groupName') else 'default'
165 | self.groupDesc=config.get('Settings','groupDesc') if config.has_option(
166 | 'Settings','groupDesc') else 'Default Test Run'
167 | self.nsta=config.getint('Settings','nsta') if config.has_option(
168 | 'Settings','nsta') else 8
169 | self.station=config.get('Settings','station') if config.has_option(
170 | 'Settings','station') else 'SEP,YEL,HSR,SHW,EDM,STD,JUN,SOS'
171 | self.channel=config.get('Settings','channel') if config.has_option(
172 | 'Settings','channel') else 'EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ'
173 | self.network=config.get('Settings','network') if config.has_option(
174 | 'Settings','network') else 'UW,UW,UW,UW,UW,UW,UW,UW'
175 | self.location=config.get('Settings','location') if config.has_option(
176 | 'Settings','location') else '--,--,--,--,--,--,--,--'
177 | self.samprate=config.getfloat('Settings','samprate') if config.has_option(
178 | 'Settings','samprate') else 100.
179 | self.nstaC=config.getint('Settings','nstaC') if config.has_option(
180 | 'Settings','nstaC') else 5
181 | self.printsta=config.getint('Settings','printsta') if config.has_option(
182 | 'Settings','printsta') else 2
183 | self.server=config.get('Settings','server') if config.has_option(
184 | 'Settings','server') else 'IRIS'
185 | self.port=config.getint('Settings','port') if config.has_option(
186 | 'Settings','port') else 16017
187 | self.searchdir=config.get('Settings','searchdir') if config.has_option(
188 | 'Settings','searchdir') else './'
189 | self.filepattern=config.get('Settings','filepattern') if config.has_option(
190 | 'Settings','filepattern') else '*'
191 | self.nsec=config.getint('Settings','nsec') if config.has_option(
192 | 'Settings','nsec') else 3600
193 | self.trigalg=config.get('Settings','trigalg') if config.has_option(
194 | 'Settings','trigalg') else 'classicstalta'
195 | self.lwin=config.getfloat('Settings','lwin') if config.has_option(
196 | 'Settings','lwin') else 7.
197 | self.swin=config.getfloat('Settings','swin') if config.has_option(
198 | 'Settings','swin') else 0.8
199 | self.trigon=config.getfloat('Settings','trigon') if config.has_option(
200 | 'Settings','trigon') else 3.
201 | self.trigoff=config.getfloat('Settings','trigoff') if config.has_option(
202 | 'Settings','trigoff') else 2.
203 | self.offset=config.get('Settings','offset') if config.has_option(
204 | 'Settings','offset') else '0'
205 | self.kurtmax=config.getfloat('Settings','kurtmax') if config.has_option(
206 | 'Settings','kurtmax') else 80.
207 | self.kurtfmax=config.getfloat('Settings','kurtfmax') if config.has_option(
208 | 'Settings','kurtfmax') else 150.
209 | self.oratiomax=config.getfloat('Settings','oratiomax') if config.has_option(
210 | 'Settings','oratiomax') else 0.15
211 | self.kurtwin=config.getfloat('Settings','kurtwin') if config.has_option(
212 | 'Settings','kurtwin') else 5.
213 | self.winlen=config.getint('Settings','winlen') if config.has_option(
214 | 'Settings','winlen') else 1024
215 | self.fmin=config.getfloat('Settings','fmin') if config.has_option(
216 | 'Settings','fmin') else 1.
217 | self.fmax=config.getfloat('Settings','fmax') if config.has_option(
218 | 'Settings','fmax') else 10.
219 | self.filomin=config.getfloat('Settings','filomin') if config.has_option(
220 | 'Settings','filomin') else 1.
221 | self.filomax=config.getfloat('Settings','filomax') if config.has_option(
222 | 'Settings','filomax') else 2.5
223 | self.fiupmin=config.getfloat('Settings','fiupmin') if config.has_option(
224 | 'Settings','fiupmin') else 5.
225 | self.fiupmax=config.getfloat('Settings','fiupmax') if config.has_option(
226 | 'Settings','fiupmax') else 10.
227 | self.fispanlow=config.getfloat('Settings','fispanlow') if config.has_option(
228 | 'Settings','fispanlow') else -0.5
229 | self.fispanhigh=config.getfloat('Settings','fispanhigh') if config.has_option(
230 | 'Settings','fispanhigh') else 0.5
231 | self.telefi=config.getfloat('Settings','telefi') if config.has_option(
232 | 'Settings','telefi') else -1.
233 | self.teleok=config.getint('Settings','teleok') if config.has_option(
234 | 'Settings','teleok') else 1
235 | self.cmin=config.getfloat('Settings','cmin') if config.has_option(
236 | 'Settings','cmin') else 0.7
237 | self.ncor=config.getint('Settings','ncor') if config.has_option(
238 | 'Settings','ncor') else 4
239 | self.minorph=config.getfloat('Settings','minorph') if config.has_option(
240 | 'Settings','minorph') else 0.05
241 | self.maxorph=config.getfloat('Settings','maxorph') if config.has_option(
242 | 'Settings','maxorph') else 7.
243 | self.plotformat=config.get('Settings','plotformat') if config.has_option(
244 | 'Settings','plotformat') else 'eqrate,fi,occurrence+occurrencefi,longevity'
245 | self.minplot=config.getint('Settings','minplot') if config.has_option(
246 | 'Settings','minplot') else 5
247 | self.dybin=config.getfloat('Settings','dybin') if config.has_option(
248 | 'Settings','dybin') else 1.
249 | self.hrbin=config.getfloat('Settings','hrbin') if config.has_option(
250 | 'Settings','hrbin') else 1.
251 | # settings.cfg (hours) immediately converted to days
252 | self.occurbin=config.getfloat('Settings','occurbin')/24 if config.has_option(
253 | 'Settings','occurbin') else 1/24
254 | # settings.cfg (hours) immediately converted to days
255 | self.recbin=config.getfloat('Settings','recbin')/24 if config.has_option(
256 | 'Settings','recbin') else 1/24
257 | self.recplot=config.getfloat('Settings','recplot') if config.has_option(
258 | 'Settings','recplot') else 14.
259 | self.mminplot=config.getint('Settings','mminplot') if config.has_option(
260 | 'Settings','mminplot') else 0
261 | self.mhrbin=config.getfloat('Settings','mhrbin') if config.has_option(
262 | 'Settings','mhrbin') else 1.
263 | self.mrecbin=config.getfloat('Settings','mrecbin')/24 if config.has_option(
264 | 'Settings','mrecbin') else 1/24
265 | self.mrecplot=config.getfloat('Settings','mrecplot') if config.has_option(
266 | 'Settings','mrecplot') else 30. # Default to last month instead of 2 weeks
267 | self.fixedheight=config.getboolean('Settings','fixedheight') if config.has_option(
268 | 'Settings','fixedheight') else False
269 | self.printVerboseCat=config.getboolean('Settings','verbosecatalog') if config.has_option(
270 | 'Settings','verbosecatalog') else False
271 | self.amplims=config.get('Settings','amplims') if config.has_option(
272 | 'Settings','amplims') else 'global'
273 | self.anotfile=config.get('Settings','anotfile') if config.has_option(
274 | 'Settings','anotfile') else ''
275 | self.checkComCat=config.getboolean('Settings','checkComCat') if config.has_option(
276 | 'Settings','checkComCat') else False
277 | self.matchMax=config.getint('Settings','matchMax') if config.has_option(
278 | 'Settings','matchMax') else 0
279 | self.stalats=config.get('Settings','stalats') if config.has_option(
280 | 'Settings','stalats') else ('46.200210,46.209550,46.174280,46.193470,'
281 | '46.197170,46.237610,46.147060,46.243860')
282 | self.stalons=config.get('Settings','stalons') if config.has_option(
283 | 'Settings','stalons') else ('-122.190600,-122.188990,-122.180650,-122.236350,'
284 | '-122.151210,-122.223960,-122.152430,-122.137870')
285 | self.serr=config.getfloat('Settings','serr') if config.has_option(
286 | 'Settings','serr') else 5.
287 | self.locdeg=config.getfloat('Settings','locdeg') if config.has_option(
288 | 'Settings','locdeg') else 0.5
289 | self.regdeg=config.getfloat('Settings','regdeg') if config.has_option(
290 | 'Settings','regdeg') else 2.
291 | self.regmag=config.getfloat('Settings','regmag') if config.has_option(
292 | 'Settings','regmag') else 2.5
293 | self.telemag=config.getfloat('Settings','telemag') if config.has_option(
294 | 'Settings','telemag') else 4.5
295 |
296 | # Derived Settings
297 | self.ptrig=1.5*self.winlen/self.samprate
298 | self.atrig=3*self.winlen/self.samprate
299 | self.mintrig=0.75*self.winlen/self.samprate
300 | self.wshape = int((self.ptrig + self.atrig)*self.samprate) + 1
301 | self.maxdt = np.max(np.fromstring(self.offset, sep=','))
302 |
--------------------------------------------------------------------------------
/redpy/correlation.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import numpy as np
6 | import obspy.core.trace as trace
7 | import redpy.table
8 | import redpy.cluster
9 | import datetime
10 | import matplotlib
11 | from scipy.fftpack import fft, ifft
12 |
13 | def calcWindow(waveform, windowStart, opt, winlen=1):
14 |
15 | """
16 | Calculates the amplitude coefficient, FFT, and frequency index for a window of data.
17 |
18 | waveform: numpy array of waveform data
19 | windowStart: starting sample of window
20 | opt: Options object describing station/run parameters
21 | winlen: Fraction of window to use (optional)
22 |
23 | Returns windowCoeff, windowFFT, and
24 | """
25 |
26 | # Shift window left by 10% of winlen
27 | windowStart = windowStart - opt.winlen/10
28 | windowCoeff = []
29 | windowFFT = np.zeros(opt.winlen*opt.nsta,).astype(np.complex64)
30 | windowFI = []
31 |
32 | for n in range(opt.nsta):
33 | winstart = int(n*opt.wshape + windowStart)
34 | winend = int(n*opt.wshape + windowStart + opt.winlen*winlen)
35 | fftwin = np.reshape(fft(waveform[winstart:winend]),(opt.winlen*winlen,))
36 | if np.median(np.abs(waveform[winstart:winend]))==0:
37 | windowCoeff.append(0)
38 | windowFI.append(np.nan) #?
39 | else:
40 | windowCoeff.append(1/np.sqrt(sum(
41 | waveform[winstart:winend] * waveform[winstart:winend])))
42 | windowFI.append(np.log10(np.mean(np.abs(np.real(
43 | fftwin[int(opt.fiupmin*opt.winlen*winlen/opt.samprate):int(
44 | opt.fiupmax*opt.winlen*winlen/opt.samprate)])))/np.mean(np.abs(np.real(
45 | fftwin[int(opt.filomin*opt.winlen*winlen/opt.samprate):int(
46 | opt.filomax*opt.winlen*winlen/opt.samprate)])))))
47 |
48 | windowFFT[n*opt.winlen:(n+1)*opt.winlen] = fftwin
49 |
50 | return windowCoeff, windowFFT, windowFI
51 |
52 |
53 | def xcorr1x1(windowFFT1, windowFFT2, windowCoeff1, windowCoeff2, opt):
54 |
55 | """
56 | Calculates the cross-correlation coefficient and lag for two windows.
57 |
58 | windowFFT1: FFT of first window
59 | windowFFT2: FFT of second window
60 | windowCoeff1: amplitude coefficient of first window
61 | windowCoeff2: amplitude coefficient of second window
62 | Order matters for sign of lag, but not CCC.
63 |
64 | Returns maximum cross-correlation and optimal lag (in samples)
65 | """
66 |
67 | M = opt.winlen
68 |
69 | cor = []
70 | lag = []
71 | for n in range(opt.nsta):
72 | coeff = windowCoeff1[n] * windowCoeff2[n]
73 | cors = np.real(ifft(windowFFT1[n*M:(n+1)*M] * np.conj(
74 | windowFFT2[n*M:(n+1)*M]))) * coeff
75 |
76 | indx = np.argmax(cors)
77 | cor.append(cors[indx])
78 |
79 | if indx <= M/2:
80 | lag.append(indx)
81 | else:
82 | lag.append(indx-M)
83 |
84 | nthcor = np.sort(np.array(cor))[::-1][opt.ncor-1]
85 | maxcor = np.amax(cor)
86 |
87 | if nthcor >= opt.cmin:
88 | maxlag = np.median(np.array(lag)[np.argsort(cor)[::-1][0:opt.ncor]])
89 | else:
90 | maxlag = lag[np.argmax(cor)]
91 |
92 | return maxcor, maxlag, nthcor
93 |
94 |
95 | def xcorr1xtable(coeffi, ffti, subtable, opt):
96 |
97 | """
98 | Correlates a new event with all events in a subtable.
99 |
100 | coeffi: amplitude coefficient of new event
101 | ffti: FFT of new event
102 | subtable: a table of either repeaters or orphans to compare to the new event
103 | opt: Options object describing station/run parameters
104 |
105 | Returns correlation and lag arrays
106 |
107 | The 'subtable' can be a full table (the full orphan table) or a selection of
108 | rows (cluster centers from repeaters, or a full family)
109 |
110 | Contemplating figuring how to run this in parallel...
111 | """
112 |
113 | cor = np.zeros((len(subtable),))
114 | lag = np.zeros((len(subtable),))
115 | nthcor = np.zeros((len(subtable),))
116 |
117 | j = -1
118 | for rj in subtable:
119 | j = j+1
120 | cor[j], lag[j], nthcor[j] = xcorr1x1(
121 | ffti, rj['windowFFT'], coeffi, rj['windowCoeff'], opt)
122 |
123 | return cor, lag, nthcor
124 |
125 |
126 | def compare2Family(rtable, ctable, ftable, rnumber, cnum, opt):
127 |
128 | """
129 | Correlates a known repeater with all events in a family except the core.
130 |
131 | rtable: Repeater table
132 | ctable: Correlation matrix table
133 | rnumber: Row of repeater in rtable
134 | cnum: Cluster or family number
135 | opt: Options object describing station/run parameters
136 |
137 | Writes correlations to ctable
138 | """
139 |
140 | members = np.fromstring(ftable[cnum]['members'], dtype=int, sep=' ')
141 | core = ftable[cnum]['core']
142 |
143 | famtable = rtable[np.setdiff1d(members, core)]
144 | ids = famtable['id']
145 | rid = rtable[rnumber]['id']
146 |
147 | cor, lag, nthcor = xcorr1xtable(rtable[rnumber]['windowCoeff'],
148 | rtable[rnumber]['windowFFT'], famtable, opt)
149 |
150 | if len(np.where(nthcor>=opt.cmin)[0]) > 0:
151 | for j in range(len(cor)):
152 | if (nthcor[j] >= opt.cmin):
153 | redpy.table.appendCorrelation(ctable, rid, ids[j], cor[j], opt)
154 |
155 |
156 | def compareDeleted(trigs, dtable, opt):
157 |
158 | """
159 | Compares trigger against deleted events
160 |
161 | trigs: Triggers to be checked
162 | dtable: Deleted table (manually removed from rtable)
163 | opt: Options object describing station/run parameters
164 |
165 | Returns trigs that do not match deleted events
166 | """
167 |
168 | for t in trigs:
169 |
170 | coeffi, ffti, fii = calcWindow(t.data, int(opt.ptrig*opt.samprate), opt)
171 | cor, lag, nthcor = xcorr1xtable(coeffi, ffti, dtable, opt)
172 |
173 | if np.where(cor >= opt.cmin - 0.05)[0].any():
174 | trigs.remove(t)
175 |
176 | return trigs
177 |
178 |
179 | def compareGoodOrphans(rtable, otable, ctable, ftable, trig, id, coeffi, ffti, cor, lag,
180 | nthcor, opt):
181 |
182 | """
183 | Goes and finds the matches of the new event in the orphan table, appends them to
184 | the repeater table, and then compares to cores
185 |
186 | rtable: Repeater table
187 | otable: Orphan table
188 | ctable: Correlation matrix table
189 | trig: New trigger to compare
190 | id: Unique ID of new trigger
191 | coeffi: Scaling coefficient for trigger
192 | ffti: FFT of trigger
193 | cor: Correlation of trigger to orphans
194 | lag: Lag ot trigger to orphans
195 | opt: Options object describing station/run parameters
196 |
197 | """
198 |
199 | # Loop through potential matches
200 | written = 0
201 | while len(cor[cor >= opt.cmin - 0.05]) > 0:
202 |
203 | # If not written to rtable yet, realign new event
204 | if written == 0:
205 | lagmax = lag[np.argmax(cor)]
206 | coeffi2, ffti2, fii2 = calcWindow(trig.data, int(
207 | opt.ptrig*opt.samprate + lagmax), opt)
208 | coeffj2 = otable[np.argmax(cor)]['windowCoeff']
209 | fftj2 = otable[np.argmax(cor)]['windowFFT']
210 | fij2 = otable[np.argmax(cor)]['FI']
211 | # If written already, realign older orphan to new event
212 | else:
213 | coeffj2, fftj2, fij2 = calcWindow(otable[np.argmax(cor)]['waveform'],
214 | int(opt.ptrig*opt.samprate + lagmax - lag[np.argmax(cor)]), opt)
215 |
216 | cor2, lag2, nthcor2 = xcorr1x1(ffti2, fftj2, coeffi2, coeffj2, opt)
217 |
218 | # If actually matches...
219 | if nthcor2 >= opt.cmin:
220 | # Move both the orphans to the repeater table
221 | if written == 0:
222 | redpy.table.populateRepeater(rtable, ftable, id, trig, opt,
223 | int(opt.ptrig*opt.samprate + lagmax))
224 | redpy.table.moveOrphan(rtable, otable, ftable, np.argmax(cor), opt)
225 | redpy.table.appendCorrelation(ctable, id, rtable.cols.id[-1], cor2, opt)
226 | written = 2
227 | # Update the table to reflect the new window, then move it
228 | else:
229 | otable.cols.windowFFT[np.argmax(cor)] = fftj2
230 | otable.cols.windowCoeff[np.argmax(cor)] = coeffj2
231 | otable.cols.FI[np.argmax(cor)] = fij2
232 | otable.cols.windowStart[np.argmax(cor)] = int(opt.ptrig*opt.samprate +
233 | lagmax - lag[np.argmax(cor)])
234 | redpy.table.moveOrphan(rtable, otable, ftable, np.argmax(cor), opt)
235 | redpy.table.appendCorrelation(ctable, id, rtable.cols.id[-1], cor2, opt)
236 | written = written+1
237 |
238 | lag = np.delete(lag, np.argmax(cor))
239 | nthcor = np.delete(nthcor, np.argmax(cor))
240 | cor = np.delete(cor, np.argmax(cor))
241 |
242 | # If there are no actual matches in the orphans, check new event with cores
243 | if written == 0:
244 | if len(rtable) > 0:
245 | compareSingleOrphan2Cores(rtable, otable, ctable, ftable, trig, id, coeffi,
246 | ffti, opt)
247 | else:
248 | redpy.table.populateOrphan(otable, id, trig, opt)
249 | # If there is a match, check new event and its matches with cores
250 | else:
251 | compareMultipleOrphans2Cores(rtable, ctable, ftable, written, opt)
252 |
253 |
254 | def compareMultipleOrphans2Cores(rtable, ctable, ftable, written, opt):
255 |
256 | """
257 | Compares multiple orphans that have already been written to the end of the repeater
258 | table to the other repeaters
259 |
260 | rtable: Repeater table
261 | ctable: Correlation matrix table
262 | written: Number of new repeaters written to rtable
263 | opt: Options object describing station/run parameters
264 |
265 | Note: Currently only runs clustering if there are no matches to cores, and this
266 | is the ONLY case where full clustering is run
267 | """
268 |
269 | wfam = []
270 | wlag = []
271 |
272 | found = 0
273 | if len(ftable) >= 1:
274 | cores = rtable[ftable.cols.core[:]]
275 | fftjs = cores['windowFFT']
276 | coeffjs = cores['windowCoeff']
277 | ids = cores['id']
278 | coresNum = range(ftable.attrs.nClust)
279 |
280 | coeffi = rtable.cols.windowCoeff[-written]
281 | ffti = rtable.cols.windowFFT[-written]
282 | cor, lag, nthcor = xcorr1xtable(coeffi, ffti, cores, opt)
283 |
284 | # Loop through families that match
285 | while len(cor[cor >= opt.cmin - 0.05]) > 0:
286 |
287 | if found == 0:
288 | lagmax2 = lag[np.argmax(cor)]
289 | coeffi2, ffti2, fii2 = calcWindow(rtable[-written]['waveform'],
290 | int(rtable[-written]['windowStart'] + lagmax2), opt)
291 |
292 | cor2, lag2, nthcor2 = xcorr1x1(ffti2, fftjs[np.argmax(cor)], coeffi2,
293 | coeffjs[np.argmax(cor)], opt)
294 |
295 | if nthcor2 >= opt.cmin:
296 | if found == 0:
297 | found = 1
298 | wlag.append(0)
299 | # Realign all new events in the repeater catalog to the matched family
300 | for i in range(-written,0):
301 | rtable.cols.windowCoeff[i], rtable.cols.windowFFT[i], rtable.cols.FI[i] = calcWindow(
302 | rtable.cols.waveform[i], int(rtable.cols.windowStart[i] +
303 | lagmax2), opt)
304 | rtable.cols.windowStart[i] = int(rtable.cols.windowStart[i] + lagmax2)
305 | rtable.flush()
306 | ftable.cols.members[coresNum[np.argmax(cor)]] = ftable.cols.members[
307 | coresNum[np.argmax(cor)]].decode('utf-8')+' {}'.format(
308 | len(rtable)+i)
309 | ftable.cols.printme[coresNum[np.argmax(cor)]] = 1
310 | ftable.flush()
311 | else:
312 | wlag.append(lag2)
313 |
314 | wfam.append(coresNum[np.argmax(cor)])
315 |
316 | # Compare to full family, write to correlation table
317 | for i in range(-written,0):
318 | cor3, lag3, nthcor3 = xcorr1x1(rtable[i]['windowFFT'],
319 | fftjs[np.argmax(cor)], rtable[i]['windowCoeff'],
320 | coeffjs[np.argmax(cor)], opt)
321 | if nthcor3 >= opt.cmin:
322 | redpy.table.appendCorrelation(ctable, rtable[i]['id'],
323 | ids[np.argmax(cor)], cor3, opt)
324 | compare2Family(rtable, ctable, ftable, i,
325 | coresNum[np.argmax(cor)], opt)
326 |
327 | else:
328 | members = np.fromstring(ftable[coresNum[np.argmax(cor)]]['members'],
329 | dtype=int, sep=' ')
330 | famtable = rtable[members]
331 | corx, lagx, nthcorx = xcorr1xtable(coeffi2, ffti2, famtable, opt)
332 | if max(nthcorx) >= opt.cmin:
333 | if found == 0:
334 | found = 1
335 | wlag.append(0)
336 | # Realign all new events in the repeater catalog to the matched family
337 | for i in range(-written,0):
338 | rtable.cols.windowCoeff[i], rtable.cols.windowFFT[i], rtable.cols.FI[i] = calcWindow(
339 | rtable.cols.waveform[i], int(rtable.cols.windowStart[i] +
340 | lagmax2 + lagx[np.argmax(corx)]), opt)
341 | rtable.cols.windowStart[i] = int(
342 | rtable.cols.windowStart[i] + lagmax2 + lagx[np.argmax(corx)])
343 | rtable.flush()
344 | ftable.cols.members[coresNum[np.argmax(cor)]] = ftable.cols.members[
345 | coresNum[np.argmax(cor)]].decode('utf-8')+' {}'.format(
346 | len(rtable)+i)
347 | ftable.cols.printme[coresNum[np.argmax(cor)]] = 1
348 | ftable.flush()
349 | found = 1
350 | else:
351 | wlag.append(lagx[np.argmax(corx)])
352 |
353 | wfam.append(coresNum[np.argmax(cor)])
354 |
355 | for x in range(len(corx)):
356 | if nthcorx[x] >= opt.cmin:
357 | redpy.table.appendCorrelation(ctable, rtable[-written]['id'],
358 | famtable[x]['id'], corx[x], opt)
359 |
360 | coresNum = np.delete(coresNum, np.argmax(cor))
361 | fftjs = np.delete(fftjs, np.argmax(cor), axis=0)
362 | coeffjs = np.delete(coeffjs, np.argmax(cor), axis=0)
363 | ids = np.delete(ids, np.argmax(cor))
364 | lag = np.delete(lag, np.argmax(cor))
365 | cor = np.delete(cor, np.argmax(cor))
366 |
367 | # Make sure to save correlation of new events with each other
368 | for i in range(-written+1,0):
369 | cor4, lag4, nthcor4 = xcorr1x1(rtable[i]['windowFFT'],
370 | rtable[-written]['windowFFT'], rtable[i]['windowCoeff'],
371 | rtable[-written]['windowCoeff'], opt)
372 | if nthcor4 >= opt.cmin:
373 | redpy.table.appendCorrelation(ctable, rtable[-written]['id'], rtable[i]['id'],
374 | cor4, opt)
375 |
376 | if found == 0:
377 | members = np.arange(len(rtable)-written,len(rtable)).astype(int)
378 | core = len(rtable)-written
379 | redpy.table.createNewFamily(rtable, ftable, members, core, opt)
380 | else:
381 | if len(wfam) == 1:
382 | redpy.cluster.runFamOPTICS(rtable, ctable, ftable, wfam[0], opt)
383 | else:
384 | redpy.table.mergeFamilies(rtable, ctable, ftable, wfam, wlag, opt)
385 |
386 |
387 | def compareSingleOrphan2Cores(rtable, otable, ctable, ftable, trig, id, coeffi, ffti, opt):
388 |
389 | """
390 | Compares a single orphan to the cluster cores, adds the orphan to the best cluster
391 | if it matches, else appends to the orphan table
392 |
393 | rtable: Repeater table
394 | otable: Orphan table
395 | ctable: Correlation matrix table
396 | trig: New trigger to compare
397 | id: Unique ID of new trigger
398 | coeffi: Scaling coefficient for trigger
399 | ffti: FFT of trigger
400 | opt: Options object describing station/run parameters
401 |
402 | """
403 |
404 | cores = rtable[ftable.cols.core[:]]
405 | fftjs = cores['windowFFT']
406 | coeffjs = cores['windowCoeff']
407 | ids = cores['id']
408 | coresNum = np.arange(ftable.attrs.nClust)
409 |
410 | cor, lag, nthcor = xcorr1xtable(coeffi, ffti, cores, opt)
411 | wfam = []
412 | wlag = []
413 |
414 | written = 0
415 | # Loop through potential matching families
416 | while len(cor[cor >= opt.cmin - 0.05]) > 0:
417 |
418 | if written == 0:
419 | lagmax = lag[np.argmax(cor)]
420 | coeffi2, ffti2, fii2 = calcWindow(trig.data, int(opt.ptrig*opt.samprate + lagmax),
421 | opt)
422 |
423 | cor2, lag2, nthcor2 = xcorr1x1(ffti2, fftjs[np.argmax(cor)], coeffi2,
424 | coeffjs[np.argmax(cor)], opt)
425 |
426 | # If it definitely matches a family...
427 | if nthcor2 >= opt.cmin:
428 | if written == 0:
429 | # Move the orphan to the repeater table
430 | redpy.table.populateRepeater(rtable, ftable, id, trig, opt,
431 | int(opt.ptrig*opt.samprate + lagmax))
432 | ftable.cols.members[coresNum[np.argmax(cor)]] = ftable.cols.members[
433 | coresNum[np.argmax(cor)]].decode('utf-8')+' {}'.format(
434 | len(rtable)-1)
435 | ftable.flush()
436 | wlag.append(0)
437 | written = 1
438 | else:
439 | wlag.append(lag2)
440 |
441 | # Append to family list that needs to be ordered/merged
442 | wfam.append(coresNum[np.argmax(cor)])
443 |
444 | # Correlate with other members of the family
445 | redpy.table.appendCorrelation(ctable, id, ids[np.argmax(cor)], cor2, opt)
446 | compare2Family(rtable, ctable, ftable, -1, coresNum[np.argmax(cor)],
447 | opt)
448 |
449 | # Check to make sure...
450 | else:
451 | members = np.fromstring(ftable[coresNum[np.argmax(cor)]]['members'],
452 | dtype=int, sep=' ')
453 | famtable = rtable[members]
454 | idf = famtable['id']
455 | corx, lagx, nthcorx = xcorr1xtable(coeffi2, ffti2, famtable, opt)
456 | if max(nthcorx) >= opt.cmin:
457 | if written == 0:
458 | # Move the orphan to the repeater table
459 | redpy.table.populateRepeater(rtable, ftable, id, trig, opt,
460 | int(opt.ptrig*opt.samprate + lagmax + lagx[np.argmax(corx)]))
461 | ftable.cols.members[coresNum[np.argmax(cor)]] = ftable.cols.members[
462 | coresNum[np.argmax(cor)]].decode('utf-8')+' {}'.format(
463 | len(rtable)-1)
464 | ftable.flush()
465 | wlag.append(0)
466 | written = 1
467 | else:
468 | wlag.append(lagx[np.argmax(corx)])
469 | wfam.append(coresNum[np.argmax(cor)])
470 | for x in range(len(corx)):
471 | if nthcorx[x] >= opt.cmin:
472 | redpy.table.appendCorrelation(ctable, id, idf[x], corx[x], opt)
473 |
474 | coresNum = np.delete(coresNum, np.argmax(cor))
475 | fftjs = np.delete(fftjs, np.argmax(cor), axis=0)
476 | coeffjs = np.delete(coeffjs, np.argmax(cor), axis=0)
477 | ids = np.delete(ids, np.argmax(cor))
478 | lag = np.delete(lag, np.argmax(cor))
479 | cor = np.delete(cor, np.argmax(cor))
480 |
481 | # If doesn't match anything, append as orphan
482 | if written == 0:
483 | redpy.table.populateOrphan(otable, id, trig, opt)
484 | else:
485 | if len(wfam) == 1:
486 | redpy.cluster.runFamOPTICS(rtable, ctable, ftable, wfam[0], opt)
487 | else:
488 | redpy.table.mergeFamilies(rtable, ctable, ftable, wfam, wlag, opt)
489 |
490 |
491 | def runCorrelation(rtable, otable, ctable, ftable, ttimes, trig, id, opt):
492 |
493 | """
494 | Adds a new trigger to the correct table, runs the correlations and clustering
495 |
496 | rtable: Repeater table
497 | otable: Orphan table
498 | ctable: Correlation matrix table
499 | ftable: Families table
500 | ttimes: Trigger times
501 | trig: New trigger to compare
502 | id: Unique ID of new trigger
503 | opt: Options object describing station/run parameters
504 |
505 | This is the top-level logic for processing; detailed logic is within the two compare
506 | functions.
507 | """
508 |
509 | # Check to ensure this isn't a duplicate in either rtable or otable
510 | try:
511 | stime = matplotlib.dates.date2num(datetime.datetime.strptime(
512 | trig.stats.starttime.isoformat(), '%Y-%m-%dT%H:%M:%S.%f'))
513 | except ValueError:
514 | stime = matplotlib.dates.date2num(datetime.datetime.strptime(
515 | trig.stats.starttime.isoformat(), '%Y-%m-%dT%H:%M:%S'))
516 |
517 | if not len(np.intersect1d(np.where(ttimes > stime - opt.mintrig/86400), np.where(
518 | ttimes < stime + opt.mintrig/86400))):
519 |
520 | coeffi, ffti, fii = calcWindow(trig.data, int(opt.ptrig*opt.samprate), opt)
521 |
522 | # Correlate with the new event with all the orphans
523 | cor, lag, nthcor = xcorr1xtable(coeffi, ffti, otable, opt)
524 |
525 | try:
526 | # If there's a match, run the most complex function
527 | if max(cor) >= opt.cmin - 0.05:
528 | compareGoodOrphans(rtable, otable, ctable, ftable, trig, id, coeffi, ffti,
529 | cor, lag, nthcor, opt)
530 | else:
531 | # Compare that orphan to the cores in the repeater table
532 | if len(rtable) > 0:
533 | compareSingleOrphan2Cores(rtable, otable, ctable, ftable, trig, id,
534 | coeffi, ffti, opt)
535 | # Populate as an orphan if there are no repeaters yet
536 | else:
537 | redpy.table.populateOrphan(otable, id, trig, opt)
538 | except ValueError:
539 | print('Could not properly correlate, moving on...')
540 | redpy.table.populateOrphan(otable, id, trig, opt)
541 |
--------------------------------------------------------------------------------
/redpy/optics.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import scipy
6 | import numpy as np
7 |
8 | # Based on https://github.com/espg/OPTICS
9 |
10 | class setOfObjects(object):
11 |
12 | """
13 | Build data structure with processing index from given data
14 | in preparation for OPTICS Algorithm
15 |
16 | distance_pairs: Distance matrix (array [n_samples, n_samples])
17 |
18 | """
19 |
20 | def __init__(self, distance_pairs):
21 |
22 | self.data = distance_pairs
23 | self._n = len(self.data)
24 | self._processed = scipy.zeros((self._n, 1), dtype=bool)
25 | self._reachability = scipy.ones(self._n) * scipy.inf
26 | self._core_dist = scipy.ones(self._n) * scipy.nan
27 | self._index = scipy.array(range(self._n))
28 | self._nneighbors = scipy.ones(self._n, dtype=int)*self._n
29 | self._cluster_id = -scipy.ones(self._n, dtype=int)
30 | self._is_core = scipy.ones(self._n, dtype=bool)
31 | self._ordered_list = []
32 |
33 |
34 | def prep_optics(SetofObjects, epsilon):
35 |
36 | """
37 | Prep data set for main OPTICS loop
38 |
39 | SetofObjects: Instantiated instance of 'setOfObjects' class
40 | epsilon: Determines maximum object size that can be extracted. Smaller epsilons
41 | reduce run time.
42 |
43 | Returns modified setOfObjects tree structure
44 |
45 | """
46 |
47 | for j in SetofObjects._index:
48 | # Find smallest nonzero distance
49 | SetofObjects._core_dist[j] = np.sort(SetofObjects.data[j,:])[1]
50 |
51 |
52 | def build_optics(SetOfObjects, epsilon):
53 |
54 | """
55 | Builds OPTICS ordered list of clustering structure
56 |
57 | SetofObjects: Instantiated and prepped instance of 'setOfObjects' class
58 | epsilon: Determines maximum object size that can be extracted. Smaller epsilons
59 | reduce run time.
60 |
61 | """
62 |
63 | for point in SetOfObjects._index:
64 | if not SetOfObjects._processed[point]:
65 | expandClusterOrder(SetOfObjects, point, epsilon)
66 |
67 |
68 | def expandClusterOrder(SetOfObjects, point, epsilon):
69 |
70 | """
71 | Expands OPTICS ordered list of clustering structure
72 |
73 | SetofObjects: Instantiated and prepped instance of 'setOfObjects' class
74 | epsilon: Determines maximum object size that can be extracted. Smaller epsilons
75 | reduce run time.
76 |
77 | """
78 |
79 | if SetOfObjects._core_dist[point] <= epsilon:
80 | while not SetOfObjects._processed[point]:
81 | SetOfObjects._processed[point] = True
82 | SetOfObjects._ordered_list.append(point)
83 | point = set_reach_dist(SetOfObjects, point, epsilon)
84 | else:
85 | SetOfObjects._processed[point] = True
86 |
87 |
88 | def set_reach_dist(SetOfObjects, point_index, epsilon):
89 |
90 | """
91 | Sets reachability distance and ordering. This function is the primary workhorse of
92 | the OPTICS algorithm.
93 |
94 | SetofObjects: Instantiated and prepped instance of 'setOfObjects' class
95 | epsilon: Determines maximum object size that can be extracted. Smaller epsilons
96 | reduce run time. (float)
97 |
98 | """
99 |
100 | row = [SetOfObjects.data[point_index,:]]
101 | indices = np.argsort(row)
102 | distances = np.sort(row)
103 |
104 | if scipy.iterable(distances):
105 |
106 | unprocessed = indices[(SetOfObjects._processed[indices] < 1)[0].T]
107 | rdistances = scipy.maximum(distances[(SetOfObjects._processed[indices] < 1)[0].T],
108 | SetOfObjects._core_dist[point_index])
109 | SetOfObjects._reachability[unprocessed] = scipy.minimum(
110 | SetOfObjects._reachability[unprocessed], rdistances)
111 |
112 | if unprocessed.size > 0:
113 | return unprocessed[np.argsort(np.array(SetOfObjects._reachability[
114 | unprocessed]))[0]]
115 | else:
116 | return point_index
117 | else:
118 | return point_index
119 |
--------------------------------------------------------------------------------
/redpy/printing.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import numpy as np
6 | import matplotlib.dates
7 | from obspy import UTCDateTime
8 |
9 | def printCatalog(rtable, ftable, opt):
10 | """
11 | Prints flat catalog to text file
12 |
13 | rtable: Repeater table
14 | ftable: Families table
15 | opt: Options object describing station/run parameters
16 |
17 | Note: Time in text file corresponds to current trigger time by alignment
18 | """
19 |
20 | with open('{}{}/catalog.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
21 |
22 | startTimes = rtable.cols.startTime[:]
23 | windowStarts = rtable.cols.windowStart[:]
24 |
25 | for cnum in range(ftable.attrs.nClust):
26 | fam = np.fromstring(ftable[cnum]['members'], dtype=int, sep=' ')
27 | for i in np.argsort(startTimes[fam]):
28 | f.write("{0} {1}\n".format(cnum,(UTCDateTime(startTimes[fam][i]) +
29 | windowStarts[fam][i]/opt.samprate).isoformat()))
30 |
31 |
32 | def printTriggerCatalog(ttable, opt):
33 | """
34 | Prints flat catalog of all triggers to text file
35 |
36 | ttable: Triggers table
37 | opt: Options object describing station/run parameters
38 |
39 | Note: Time in text file corresponds to original STA/LTA trigger time
40 | """
41 |
42 | with open('{}{}/triggers.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
43 |
44 | startTimes = ttable.cols.startTimeMPL[:]
45 |
46 | for i in np.argsort(startTimes):
47 | f.write("{0}\n".format((UTCDateTime(matplotlib.dates.num2date(
48 | startTimes[i]))+opt.ptrig).isoformat()))
49 |
50 |
51 | def printOrphanCatalog(otable, opt):
52 | """
53 | Prints flat catalog of current orphans to text file
54 |
55 | otable: Orphans table
56 | opt: Options object describing station/run parameters
57 |
58 | Note: Time in text file corresponds to original STA/LTA trigger time
59 | """
60 |
61 | with open('{}{}/orphancatalog.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
62 |
63 | startTimes = otable.cols.startTime[:]
64 |
65 | for i in np.argsort(startTimes):
66 | f.write("{0}\n".format((UTCDateTime(startTimes[i])+opt.ptrig).isoformat()))
67 |
68 |
69 | def printJunk(jtable, opt):
70 | """
71 | Prints flat catalog of contents of junk table to text file for debugging
72 |
73 | jtable: Junk table
74 | opt: Options object describing station/run parameters
75 |
76 | Note: Time in text file corresponds to original STA/LTA trigger time
77 | """
78 |
79 | with open('{}{}/junk.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
80 |
81 | startTimes = jtable.cols.startTime[:]
82 | jtype = jtable.cols.isjunk[:]
83 |
84 | for i in np.argsort(startTimes):
85 | f.write("{0} - {1}\n".format((
86 | UTCDateTime(startTimes[i])+opt.ptrig).isoformat(),jtype[i]))
87 |
88 |
89 | def printCoresCatalog(rtable, ftable, opt):
90 | """
91 | Prints flat catalog of only core events to text file
92 |
93 | rtable: Repeater table
94 | ftable: Families table
95 | opt: Options object describing station/run parameters
96 |
97 | Note: Time in text file corresponds to current trigger time by alignment
98 | """
99 |
100 | with open('{}{}/cores.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
101 |
102 | startTimes = rtable.cols.startTime[:]
103 | windowStarts = rtable.cols.windowStart[:]
104 |
105 | for cnum in range(ftable.attrs.nClust):
106 | fam = np.fromstring(ftable[cnum]['members'], dtype=int, sep=' ')
107 | core = ftable[cnum]['core']
108 | f.write("{0} {1}\n".format(cnum,(UTCDateTime(startTimes[core]) +
109 | windowStarts[core]/opt.samprate).isoformat()))
110 |
111 |
112 | def printEventsperDay(rtable, ftable, opt):
113 | """
114 | Prints daily counts of each family in a tablulated text file
115 |
116 | rtable: Repeater table
117 | ftable: Families table
118 | opt: Options object describing station/run parameters
119 |
120 | Each column (with the exception of first and last) correspond to individual families;
121 | first column is date and last column is total across all families.
122 | """
123 |
124 | with open('{}{}/dailycounts.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
125 |
126 | startTimes = rtable.cols.startTimeMPL[:]
127 | firstDay = np.floor(np.min(startTimes)).astype(int)
128 | lastDay = np.ceil(np.max(startTimes)).astype(int)
129 | hists = np.zeros((ftable.attrs.nClust,lastDay-firstDay))
130 |
131 | # Calculate histograms
132 | for cnum in range(ftable.attrs.nClust):
133 | fam = np.fromstring(ftable[cnum]['members'], dtype=int, sep=' ')
134 | hists[cnum,:], edges = np.histogram(startTimes[fam], bins=np.arange(
135 | firstDay,lastDay+1,1))
136 |
137 | # Header
138 | f.write(" Date\t")
139 | for cnum in range(ftable.attrs.nClust):
140 | f.write("{}\t".format(cnum))
141 | f.write("Total\n")
142 |
143 | # Write daily counts
144 | for day in range(firstDay,lastDay):
145 | f.write("{}\t".format(matplotlib.dates.num2date(day).strftime('%Y/%m/%d')))
146 | for cnum in range(ftable.attrs.nClust):
147 | f.write("{}\t".format(hists[cnum,day-firstDay].astype(int)))
148 | f.write("{}\n".format(np.sum(hists[:,day-firstDay].astype(int))))
149 |
150 |
151 | def printVerboseCatalog(rtable, ftable, ctable, opt):
152 | """
153 | Prints flat catalog to text file with additional columns
154 |
155 | rtable: Repeater table
156 | ftable: Families table
157 | ctable: Correlation table
158 | opt: Options object describing station/run parameters
159 |
160 | Columns correspond to cluster number, event time, frequency index, amplitude, time
161 | since last event in hours, correlation coefficient with respect to the best
162 | correlated event, and correlation coefficient with respect to the core event.
163 | """
164 |
165 | with open('{}{}/catalog.txt'.format(opt.outputPath, opt.groupName), 'w') as f:
166 |
167 | startTimes = rtable.cols.startTime[:]
168 | startTimeMPL = rtable.cols.startTimeMPL[:]
169 | windowStarts = rtable.cols.windowStart[:]
170 | windowAmps = rtable.cols.windowAmp[:]
171 | ids = rtable.cols.id[:]
172 | id1 = ctable.cols.id1[:]
173 | id2 = ctable.cols.id2[:]
174 | ccc = ctable.cols.ccc[:]
175 | fi = np.nanmean(rtable.cols.FI[:], axis=1)
176 |
177 | f.write("cnum\tevTime \tfi\txcormax\txcorcore\tdt(hr)\tamps\n")
178 | for cnum in range(ftable.attrs.nClust):
179 | fam = np.fromstring(ftable[cnum]['members'], dtype=int, sep=' ')
180 |
181 | catalogind = np.argsort(startTimeMPL[fam])
182 | catalog = startTimeMPL[fam][catalogind]
183 | spacing = np.diff(catalog)*24
184 | idf = ids[fam]
185 | ix = np.where(np.in1d(id2,idf))
186 | C = np.eye(len(idf))
187 | r1 = [np.where(idf==xx)[0][0] for xx in id1[ix]]
188 | r2 = [np.where(idf==xx)[0][0] for xx in id2[ix]]
189 | C[r1,r2] = ccc[ix]
190 | C[r2,r1] = ccc[ix]
191 | xcorrmax = C[np.argmax(np.sum(C,0)),:]
192 | core = ftable[cnum]['core']
193 | xcorrcore = C[np.where(fam==core)[0][0],:]
194 |
195 | j = -1
196 | for i in catalogind:
197 | evTime = (UTCDateTime(startTimes[fam][i]) +
198 | windowStarts[fam][i]/opt.samprate)
199 | amp = windowAmps[fam[i],:]
200 | if j == -1:
201 | dt = np.nan
202 | else:
203 | dt = spacing[j]
204 | j += 1
205 |
206 | f.write("{0}\t{1}\t{2: 4.3f}\t{4:3.2f}\t{5:3.2f}\t{3:12.6f}\t[".format(
207 | cnum,evTime.isoformat(),fi[fam][i],dt,xcorrmax[i],xcorrcore[i]))
208 | for a in amp:
209 | f.write(" {:10.2f} ".format(a))
210 | f.write("]\n")
211 |
212 |
213 | def printSwarmCatalog(rtable, ftable, ttable, opt):
214 |
215 | """
216 | Writes a .csv file for use in annotating repeating events in Swarm v2.8.5+
217 |
218 | rtable: Repeater table
219 | ftable: Families table
220 | opt: Options object describing station/run parameters
221 |
222 | """
223 |
224 | nets = opt.network.split(',')
225 | stas = opt.station.split(',')
226 | locs = opt.location.split(',')
227 | chas = opt.channel.split(',')
228 |
229 | with open('{}{}/swarm.csv'.format(opt.outputPath, opt.groupName), 'w') as f:
230 |
231 | startTimes = rtable.cols.startTime[:]
232 | windowStarts = rtable.cols.windowStart[:]
233 |
234 | for cnum in range(ftable.attrs.nClust):
235 | fam = np.fromstring(ftable[cnum]['members'], dtype=int, sep=' ')
236 | for i in np.argsort(startTimes[fam]):
237 | # Format for Swarm is 'Date Time, STA CHA NET LOC, label'
238 | # The SCNL defaults to whichever station was chosen for the preview,
239 | # which can be changed by a global search/replace in a text editor.
240 | # The label name is the same as the folder name (groupName) followed by
241 | # the family number. Highlighting families of interest in a different
242 | # color can be done by editing the EventClassifications.config file in
243 | # the Swarm folder, and adding a line for each cluster of interest
244 | # followed by a hex code for color, such as:
245 | # default1, #ffff00
246 | # to highlight family 1 from the default run in yellow compared to other
247 | # repeaters in red/orange.
248 | f.write("{}, {} {} {} {}, {}{}\n".format((UTCDateTime(startTimes[fam][i])+
249 | windowStarts[fam][i]/opt.samprate).isoformat(sep=' '),
250 | stas[opt.printsta],chas[opt.printsta],nets[opt.printsta],
251 | locs[opt.printsta],opt.groupName,cnum))
252 |
253 | with open('{}{}/triggerswarm.csv'.format(opt.outputPath, opt.groupName), 'w') as f:
254 |
255 | startTimes = ttable.cols.startTimeMPL[:]
256 |
257 | for i in np.argsort(startTimes):
258 | f.write("{}, {} {} {} {}, trigger\n".format((UTCDateTime(
259 | matplotlib.dates.num2date(startTimes[i]))+opt.ptrig).isoformat(sep=' '),
260 | stas[opt.printsta],chas[opt.printsta],nets[opt.printsta],
261 | locs[opt.printsta]))
--------------------------------------------------------------------------------
/redpy/trigger.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | from obspy import UTCDateTime
6 | import obspy
7 | from obspy.clients.fdsn import Client
8 | from obspy.clients.earthworm import Client as EWClient
9 | from obspy.clients.seedlink import Client as SeedLinkClient
10 | from obspy.core.trace import Trace
11 | from obspy.core.stream import Stream
12 | from obspy.signal.trigger import coincidence_trigger
13 | import numpy as np
14 | from scipy import stats
15 | from scipy.fftpack import fft
16 | import glob, os, itertools
17 |
18 | import warnings
19 | warnings.filterwarnings("ignore")
20 |
21 | def getData(tstart, tend, opt):
22 |
23 | """
24 | Download data from files in a folder, from IRIS, or a Earthworm waveserver
25 |
26 | A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
27 | your data files, please ensure that your headers contain the correct SCNL information!
28 |
29 | tstart: UTCDateTime of beginning of period of interest
30 | tend: UTCDateTime of end of period of interest
31 | opt: Options object describing station/run parameters
32 |
33 | Returns ObsPy stream objects, one for cutting and the other for triggering
34 | """
35 |
36 | nets = opt.network.split(',')
37 | stas = opt.station.split(',')
38 | locs = opt.location.split(',')
39 | chas = opt.channel.split(',')
40 |
41 | st = Stream()
42 |
43 | if opt.server == 'file':
44 |
45 | # Generate list of files
46 | if opt.server == 'file':
47 | flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
48 | root,opt.filepattern)) for root, dirs, files in os.walk(opt.searchdir)))
49 |
50 | # Determine which subset of files to load based on start and end times and
51 | # station name; we'll fully deal with stations below
52 | flist_sub = []
53 | for f in flist:
54 | # Load header only
55 | stmp = obspy.read(f, headonly=True)
56 | # Check if station is contained in the stas list
57 | if stmp[0].stats.station in stas:
58 | # Check if contains either start or end time
59 | ststart = stmp[0].stats.starttime
60 | stend = stmp[-1].stats.endtime
61 | if (ststart<=tstart and tstart<=stend) or (ststart<=tend and
62 | tend<=stend) or (tstart<=stend and ststart<=tend):
63 | flist_sub.append(f)
64 |
65 | # Fully load data from file
66 | stmp = Stream()
67 | for f in flist_sub:
68 | tmp = obspy.read(f, starttime=tstart, endtime=tend+opt.maxdt)
69 | if len(tmp) > 0:
70 | stmp = stmp.extend(tmp)
71 |
72 | # Filter and merge
73 | stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax, corners=2,
74 | zerophase=True)
75 | stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
76 | for m in range(len(stmp)):
77 | if stmp[m].stats.sampling_rate != opt.samprate:
78 | stmp[m] = stmp[m].resample(opt.samprate)
79 | stmp = stmp.merge(method=1, fill_value=0)
80 |
81 | # Only grab stations/channels that we want and in order
82 | netlist = []
83 | stalist = []
84 | chalist = []
85 | loclist = []
86 | for s in stmp:
87 | stalist.append(s.stats.station)
88 | chalist.append(s.stats.channel)
89 | netlist.append(s.stats.network)
90 | loclist.append(s.stats.location)
91 |
92 | # Find match of SCNL in header or fill empty
93 | for n in range(len(stas)):
94 | for m in range(len(stalist)):
95 | if (stas[n] in stalist[m] and chas[n] in chalist[m] and nets[n] in
96 | netlist[m] and locs[n] in loclist[m]):
97 | st = st.append(stmp[m])
98 | if len(st) == n:
99 | print("Couldn't find "+stas[n]+'.'+chas[n]+'.'+nets[n]+'.'+locs[n])
100 | trtmp = Trace()
101 | trtmp.stats.sampling_rate = opt.samprate
102 | trtmp.stats.station = stas[n]
103 | st = st.append(trtmp.copy())
104 |
105 | else:
106 |
107 | if '://' not in opt.server:
108 | # Backward compatibility with previous setting files
109 | if '.' not in opt.server:
110 | client = Client(opt.server)
111 | else:
112 | client = EWClient(opt.server, opt.port)
113 | # New server syntax (more options and server and port on same variable)
114 | elif 'fdsnws://' in opt.server:
115 | server = opt.server.split('fdsnws://',1)[1]
116 | client = Client(server)
117 | elif 'waveserver://' in opt.server:
118 | server_str = opt.server.split('waveserver://',1)[1]
119 | try:
120 | server = server_str.split(':',1)[0]
121 | port = server_str.split(':',1)[1]
122 | except:
123 | server = server_str
124 | port = '16017'
125 | client = EWClient(server, int(port))
126 | elif 'seedlink://' in opt.server:
127 | server_str = opt.server.split('seedlink://',1)[1]
128 | try:
129 | server = server_str.split(':',1)[0]
130 | port = server_str.split(':',1)[1]
131 | except:
132 | server = server_str
133 | port = '18000'
134 | client = SeedLinkClient(server, port=int(port), timeout=1)
135 |
136 | for n in range(len(stas)):
137 | try:
138 | stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
139 | tstart, tend+opt.maxdt)
140 | for m in range(len(stmp)):
141 | stmp[m].data = np.where(stmp[m].data == -2**31, 0, stmp[m].data) # replace -2**31 (Winston NaN token) w 0
142 | stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
143 | corners=2, zerophase=True)
144 | stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
145 | for m in range(len(stmp)):
146 | if stmp[m].stats.sampling_rate != opt.samprate:
147 | stmp[m] = stmp[m].resample(opt.samprate)
148 | stmp = stmp.merge(method=1, fill_value=0)
149 | except (obspy.clients.fdsn.header.FDSNException):
150 | try: # try again
151 | stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
152 | tstart, tend+opt.maxdt)
153 | for m in range(len(stmp)):
154 | stmp[m].data = np.where(stmp[m].data == -2**31, 0, stmp[m].data) # replace -2**31 (Winston NaN token) w 0
155 | stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
156 | corners=2, zerophase=True)
157 | stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
158 | for m in range(len(stmp)):
159 | if stmp[m].stats.sampling_rate != opt.samprate:
160 | stmp[m] = stmp[m].resample(opt.samprate)
161 | stmp = stmp.merge(method=1, fill_value=0)
162 | except (obspy.clients.fdsn.header.FDSNException):
163 | print('No data found for {0}.{1}'.format(stas[n],nets[n]))
164 | trtmp = Trace()
165 | trtmp.stats.sampling_rate = opt.samprate
166 | trtmp.stats.station = stas[n]
167 | stmp = Stream().extend([trtmp.copy()])
168 |
169 | # Last check for length; catches problem with empty waveserver
170 | if len(stmp) != 1:
171 | print('No data found for {0}.{1}'.format(stas[n],nets[n]))
172 | trtmp = Trace()
173 | trtmp.stats.sampling_rate = opt.samprate
174 | trtmp.stats.station = stas[n]
175 | stmp = Stream().extend([trtmp.copy()])
176 |
177 | st.extend(stmp.copy())
178 |
179 | # Edit 'start' time if using offset option
180 | if opt.maxdt:
181 | dts = np.fromstring(opt.offset, sep=',')
182 | for n, tr in enumerate(st):
183 | tr.stats.starttime = tr.stats.starttime-dts[n]
184 |
185 | st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
186 | stC = st.copy()
187 |
188 | return st, stC
189 |
190 |
191 | def trigger(st, stC, rtable, opt):
192 |
193 | """
194 | Run triggering algorithm on a stream of data.
195 |
196 | st: OBSPy stream of data
197 | rtable: Repeater table contains reference time of previous trigger in samples
198 | opt: Options object describing station/run parameters
199 |
200 | Returns triggered traces as OBSPy trace object updates ptime for next run
201 | """
202 |
203 | tr = st[0]
204 | t = tr.stats.starttime
205 |
206 | cft = coincidence_trigger(opt.trigalg, opt.trigon, opt.trigoff, stC, opt.nstaC,
207 | sta=opt.swin, lta=opt.lwin, details=True)
208 |
209 | if len(cft) > 0:
210 |
211 | ind = 0
212 |
213 | # Slice out the data from st and save the maximum STA/LTA ratio value for
214 | # use in orphan expiration
215 |
216 | # Convert ptime from time of last trigger to seconds before start time
217 | if rtable.attrs.ptime:
218 | ptime = (UTCDateTime(rtable.attrs.ptime) - t)
219 | else:
220 | ptime = -opt.mintrig
221 |
222 | for n in range(len(cft)):
223 |
224 | ttime = cft[n]['time'] # This is a UTCDateTime, not samples
225 |
226 | if (ttime >= t + opt.atrig) and (ttime >= t + ptime +
227 | opt.mintrig) and (ttime < t + len(tr.data)/opt.samprate -
228 | 2*opt.atrig):
229 |
230 | ptime = ttime - t
231 |
232 | # Cut out and append all data to first trace
233 | tmp = st.slice(ttime - opt.ptrig, ttime + opt.atrig)
234 | ttmp = tmp.copy()
235 | ttmp = ttmp.trim(ttime - opt.ptrig, ttime + opt.atrig + 0.05, pad=True,
236 | fill_value=0)
237 | ttmp[0].data = ttmp[0].data[0:opt.wshape] - np.mean(
238 | ttmp[0].data[0:opt.wshape])
239 | for s in range(1,len(ttmp)):
240 | ttmp[0].data = np.append(ttmp[0].data, ttmp[s].data[
241 | 0:opt.wshape] - np.mean(ttmp[s].data[0:opt.wshape]))
242 | ttmp[0].stats.maxratio = np.max(cft[n]['cft_peaks'])
243 | if ind is 0:
244 | trigs = Stream(ttmp[0])
245 | ind = ind+1
246 | else:
247 | trigs = trigs.append(ttmp[0])
248 |
249 | if ind is 0:
250 | return []
251 | else:
252 | rtable.attrs.ptime = (t + ptime).isoformat()
253 | return trigs
254 | else:
255 | return []
256 |
257 |
258 | def dataClean(alltrigs, opt, flag=1):
259 |
260 | """
261 | Examine triggers and weed out spikes and calibration pulses using kurtosis and
262 | outlier ratios
263 |
264 | alltrigs: triggers output from triggering
265 | opt: opt from config
266 | flag: 1 if defining window to check, 0 if want to check whole waveform for spikes
267 | (note that different threshold values should be used for different window lengths)
268 |
269 | Returns good trigs (trigs) and several junk types (junk, junkFI, junkKurt)
270 | """
271 |
272 | trigs=Stream()
273 | junkFI=Stream()
274 | junkKurt=Stream()
275 | junk=Stream()
276 | for i in range(len(alltrigs)):
277 |
278 | njunk = 0
279 | ntele = 0
280 |
281 | for n in range(opt.nsta):
282 |
283 | dat = alltrigs[i].data[n*opt.wshape:(n+1)*opt.wshape]
284 | if flag == 1:
285 | datcut=dat[range(int((opt.ptrig-opt.kurtwin/2)*opt.samprate),
286 | int((opt.ptrig+opt.kurtwin/2)*opt.samprate))]
287 | else:
288 | datcut=dat
289 |
290 | if np.sum(np.abs(dat))!=0.0:
291 | # Calculate kurtosis in window
292 | k = stats.kurtosis(datcut)
293 | # Compute kurtosis of frequency amplitude spectrum next
294 | datf = np.absolute(fft(dat))
295 | kf = stats.kurtosis(datf)
296 | # Calculate outlier ratio using z ((data-median)/mad)
297 | mad = np.nanmedian(np.absolute(dat - np.nanmedian(dat)))
298 | z = (dat-np.median(dat))/mad
299 | # Outliers have z > 4.45
300 | orm = len(z[z>4.45])/np.array(len(z)).astype(float)
301 |
302 | if k >= opt.kurtmax or orm >= opt.oratiomax or kf >= opt.kurtfmax:
303 | njunk+=1
304 |
305 | winstart = int(opt.ptrig*opt.samprate - opt.winlen/10)
306 | winend = int(opt.ptrig*opt.samprate - opt.winlen/10 + opt.winlen)
307 | fftwin = np.reshape(fft(dat[winstart:winend]),(opt.winlen,))
308 | if np.median(np.abs(dat[winstart:winend]))!=0:
309 | fi = np.log10(np.mean(np.abs(np.real(
310 | fftwin[int(opt.fiupmin*opt.winlen/opt.samprate):int(
311 | opt.fiupmax*opt.winlen/opt.samprate)])))/np.mean(np.abs(np.real(
312 | fftwin[int(opt.filomin*opt.winlen/opt.samprate):int(
313 | opt.filomax*opt.winlen/opt.samprate)]))))
314 | if fi 0:
322 | if ntele > 0:
323 | junk.append(alltrigs[i])
324 | else:
325 | junkKurt.append(alltrigs[i])
326 | else:
327 | junkFI.append(alltrigs[i])
328 |
329 | return trigs, junk, junkFI, junkKurt
330 |
331 |
332 | def aicpick(st, initialTrigger, opt):
333 |
334 | """
335 | An autopicker to (hopefully) improve consistency in triggering
336 |
337 | st: OBSPy stream of data containing trigger
338 | initialTrigger: initial guess at trigger time (in number of samples into stream)
339 | opt: Options object describing station/run parameters
340 |
341 | Returns updated trigger time
342 |
343 | AIC stands for Akaike Information Criterion. This code is based on the formula in
344 | Zhang, Thurber, and Rowe [2003] (originally from Maeda [1985]) to calculate AIC
345 | directly from the waveform. It is a purely statistical picker, and the minimum of
346 | the AIC corresponds to where one can divide the signal into two different parts (in
347 | this case, noise followed by signal).
348 | """
349 |
350 | t = st[0].stats.starttime
351 | x0 = st.slice(t - opt.ptrig/2 + initialTrigger/opt.samprate,
352 | t + opt.ptrig/2 + initialTrigger/opt.samprate)
353 | x = x0[0].data
354 | nsamp = int(opt.ptrig*opt.samprate)
355 |
356 | AIC = np.zeros([nsamp,1])
357 |
358 | for k in range(nsamp):
359 |
360 | # Calculate the Akaike Information Criteria
361 | var1 = np.var(x[0:k+1])
362 | var2 = np.var(x[k:nsamp+1])
363 |
364 | if var1 == 0 or var2 == 0:
365 | AIC[k] = np.NaN
366 | else:
367 | AIC[k] = (k+1)*np.log10(var1) + (nsamp-k)*np.log10(var2)
368 |
369 | # Pad 10 samples on either end to prevent encountering edge effects
370 | picksamp = np.argmin(AIC[10:nsamp-10]) + initialTrigger - nsamp/2
371 |
372 | return picksamp
373 |
--------------------------------------------------------------------------------
/redpy37.yml:
--------------------------------------------------------------------------------
1 | name: redpy
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - blosc=1.21.1=h97e831e_2
7 | - bokeh=2.4.2=py37hf985489_1
8 | - brotli=1.0.9=h5eb16cf_7
9 | - brotli-bin=1.0.9=h5eb16cf_7
10 | - brotlipy=0.7.0=py37h69ee0a8_1004
11 | - bzip2=1.0.8=h0d85af4_4
12 | - c-ares=1.18.1=h0d85af4_0
13 | - ca-certificates=2021.10.8=h033912b_0
14 | - cartopy=0.18.0=py37hf1ba7ce_1
15 | - certifi=2021.10.8=py37hf985489_2
16 | - cffi=1.15.0=py37h446072c_0
17 | - charset-normalizer=2.0.12=pyhd8ed1ab_0
18 | - cryptography=36.0.2=py37h20b3391_1
19 | - cycler=0.11.0=pyhd8ed1ab_0
20 | - decorator=5.1.1=pyhd8ed1ab_0
21 | - fonttools=4.33.3=py37h994c40b_0
22 | - freetype=2.10.4=h4cff582_1
23 | - geos=3.8.0=hb1e8313_0
24 | - giflib=5.2.1=hbcb3906_2
25 | - greenlet=1.1.2=py37h0582d14_2
26 | - hdf5=1.12.1=nompi_ha60fbc9_104
27 | - icu=70.1=h96cf925_0
28 | - idna=3.3=pyhd8ed1ab_0
29 | - importlib-metadata=4.11.3=py37hf985489_1
30 | - jbig=2.1=h0d85af4_2003
31 | - jinja2=3.1.2=pyhd8ed1ab_0
32 | - jpeg=9e=h5eb16cf_1
33 | - kiwisolver=1.4.2=py37h18621fa_1
34 | - krb5=1.19.3=hb49756b_0
35 | - lcms2=2.12=h577c468_0
36 | - lerc=3.0=he49afe7_0
37 | - libblas=3.9.0=14_osx64_openblas
38 | - libbrotlicommon=1.0.9=h5eb16cf_7
39 | - libbrotlidec=1.0.9=h5eb16cf_7
40 | - libbrotlienc=1.0.9=h5eb16cf_7
41 | - libcblas=3.9.0=14_osx64_openblas
42 | - libcurl=7.83.1=h372c54d_0
43 | - libcxx=14.0.3=hc203e6f_0
44 | - libdeflate=1.10=h0d85af4_0
45 | - libedit=3.1.20191231=h0678c8f_2
46 | - libev=4.33=haf1e3a3_1
47 | - libffi=3.4.2=h0d85af4_5
48 | - libgfortran=5.0.0=9_3_0_h6c81a4c_23
49 | - libgfortran5=9.3.0=h6c81a4c_23
50 | - libiconv=1.16=haf1e3a3_0
51 | - liblapack=3.9.0=14_osx64_openblas
52 | - libnghttp2=1.47.0=h942079c_0
53 | - libopenblas=0.3.20=openmp_hb3cd9ec_0
54 | - libpng=1.6.37=h7cec526_2
55 | - libssh2=1.10.0=h52ee1ee_2
56 | - libtiff=4.3.0=h17f2ce3_3
57 | - libwebp=1.2.2=h28dabe5_0
58 | - libwebp-base=1.2.2=h0d85af4_1
59 | - libxcb=1.13=h0d85af4_1004
60 | - libxml2=2.9.14=h08a9926_0
61 | - libxslt=1.1.33=h5bff336_4
62 | - libzlib=1.2.11=h6c3fc93_1014
63 | - llvm-openmp=14.0.3=ha654fa7_0
64 | - lxml=4.8.0=py37h69ee0a8_3
65 | - lz4-c=1.9.3=he49afe7_1
66 | - markupsafe=2.1.1=py37h69ee0a8_1
67 | - matplotlib-base=3.5.2=py37h80cb303_0
68 | - munkres=1.1.4=pyh9f0ad1d_0
69 | - ncurses=6.3=h96cf925_1
70 | - numexpr=2.8.0=py37hb276a58_2
71 | - numpy=1.21.6=py37h345d48f_0
72 | - obspy=1.3.0=py37h4105427_0
73 | - openjpeg=2.4.0=h6e7aa92_1
74 | - openssl=1.1.1o=hfe4f2af_0
75 | - packaging=21.3=pyhd8ed1ab_0
76 | - pandas=1.3.4=py37h5b83a90_1
77 | - pillow=9.1.0=py37h2540ef4_2
78 | - pip=22.1=pyhd8ed1ab_0
79 | - proj=6.2.1=h773a61f_0
80 | - pthread-stubs=0.4=hc929b4f_1001
81 | - pycparser=2.21=pyhd8ed1ab_0
82 | - pyopenssl=22.0.0=pyhd8ed1ab_0
83 | - pyparsing=3.0.9=pyhd8ed1ab_0
84 | - pyshp=2.3.0=pyhd8ed1ab_0
85 | - pysocks=1.7.1=py37hf985489_5
86 | - pytables=3.7.0=py37hc0663ee_0
87 | - python=3.7.12=haf480d7_100_cpython
88 | - python-dateutil=2.8.2=pyhd8ed1ab_0
89 | - python_abi=3.7=2_cp37m
90 | - pytz=2022.1=pyhd8ed1ab_0
91 | - pyyaml=6.0=py37h69ee0a8_4
92 | - readline=8.1=h05e3726_0
93 | - requests=2.27.1=pyhd8ed1ab_0
94 | - scipy=1.7.3=py37h4e3cf02_0
95 | - setuptools=59.8.0=py37hf985489_1
96 | - shapely=1.7.1=py37h9250791_0
97 | - six=1.16.0=pyh6c4a22f_0
98 | - snappy=1.1.9=h6e38e02_0
99 | - sqlalchemy=1.4.36=py37h994c40b_0
100 | - sqlite=3.38.5=hd9f0692_0
101 | - tk=8.6.12=h5dbffcc_0
102 | - tornado=6.1=py37h69ee0a8_3
103 | - typing-extensions=4.2.0=hd8ed1ab_1
104 | - typing_extensions=4.2.0=pyha770c72_1
105 | - unicodedata2=14.0.0=py37h69ee0a8_1
106 | - urllib3=1.26.9=pyhd8ed1ab_0
107 | - wheel=0.37.1=pyhd8ed1ab_0
108 | - xorg-libxau=1.0.9=h35c211d_0
109 | - xorg-libxdmcp=1.1.3=h35c211d_0
110 | - xz=5.2.5=haf1e3a3_1
111 | - yaml=0.2.5=h0d85af4_2
112 | - zipp=3.8.0=pyhd8ed1ab_0
113 | - zlib=1.2.11=h6c3fc93_1014
114 | - zstd=1.5.2=h582d3a0_0
115 | prefix: /Users/ahotovecellis/opt/anaconda3/envs/redpy
116 |
--------------------------------------------------------------------------------
/removeFamily.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 |
9 | """
10 | Run this script to manually remove families/clusters (e.g., correlated noise that made it
11 | past the 'junk' detector). Reclusters and remakes images when done.
12 |
13 | usage: removeFamily.py [-h] [-v] [-c CONFIGFILE] N [N ...]
14 |
15 | positional arguments:
16 | N family number(s) to be moved and deleted
17 |
18 | optional arguments:
19 | -h, --help show this help message and exit
20 | -v, --verbose increase written print statements
21 | -c CONFIGFILE, --configfile CONFIGFILE
22 | use configuration file named CONFIGFILE instead of
23 | default settings.cfg
24 | """
25 |
26 | parser = argparse.ArgumentParser(description=
27 | "Run this script to manually remove families/clusters")
28 | parser.add_argument('famnum', metavar='N', type=int, nargs='+',
29 | help="family number(s) to be moved and deleted")
30 | parser.add_argument("-v", "--verbose", action="count", default=0,
31 | help="increase written print statements")
32 | parser.add_argument("-c", "--configfile",
33 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
34 | args = parser.parse_args()
35 |
36 | if args.configfile:
37 | opt = redpy.config.Options(args.configfile)
38 | if args.verbose: print("Using config file: {0}".format(args.configfile))
39 | else:
40 | opt = redpy.config.Options("settings.cfg")
41 | if args.verbose: print("Using config file: settings.cfg")
42 |
43 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
44 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
45 |
46 | # Check for MPL version mismatch
47 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
48 |
49 |
50 | oldnClust = ftable.attrs.nClust
51 |
52 | redpy.table.removeFamilies(rtable, ctable, dtable, ftable, args.famnum, opt)
53 |
54 | if args.verbose: print("Creating plots...")
55 | redpy.plotting.createPlots(rtable, ftable, ttable, ctable, otable, opt)
56 |
57 | if args.verbose: print("Cleaning up .html files...")
58 | redpy.plotting.cleanHTML(oldnClust, ftable.attrs.nClust, opt)
59 |
60 | if args.verbose: print("Closing table...")
61 | h5file.close()
62 | if args.verbose: print("Done")
--------------------------------------------------------------------------------
/removeFamilyGUI.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import matplotlib
6 | matplotlib.use('TkAgg')
7 | # ^ Not sure why this needs to be here, but apparently Tk hates me otherwise...
8 |
9 | import tkinter as tk
10 | import redpy.config
11 | import redpy.table
12 | import argparse
13 | from PIL import Image
14 | import os
15 | import glob
16 |
17 | """
18 | Run this script to manually remove families/clusters (e.g., correlated noise that made it
19 | past the 'junk' detector) using a GUI interface. Reclusters and remakes images when done.
20 | Note: using large NCOLS may make the window too wide for your monitor, and the GUI does
21 | not currently support side scrolling...
22 |
23 | usage: removeFamilyGUI.py [-h] [-v] [-c CONFIGFILE]
24 |
25 | optional arguments:
26 | -h, --help show this help message and exit
27 | -v, --verbose increase written print statements
28 | -c CONFIGFILE, --configfile CONFIGFILE
29 | use configuration file named CONFIGFILE instead of
30 | default settings.cfg
31 | -n NCOLS, --ncols NCOLS
32 | adjust number of columns in layout (default 3)
33 | -m MINCLUST, --minclust MINCLUST
34 | only look at clusters with numbers at or above MINCLUST
35 | """
36 |
37 |
38 | # Define some functions specific to this GUI
39 | def remove(*args):
40 | """
41 | Run the removal script using checked boxes
42 | """
43 | print('\nYou have selected the following families to remove:')
44 | removethese = []
45 | for n in range(len(var)):
46 | if var[n].get() > 0:
47 | removethese.append(n+m)
48 | print(removethese)
49 | root.destroy() # Close the window
50 |
51 | redpy.table.removeFamilies(rtable, ctable, dtable, ftable, removethese, opt)
52 |
53 | if len(removethese) > 0:
54 | print("Creating plots...")
55 | redpy.plotting.createPlots(rtable, ftable, ttable, ctable, otable, opt)
56 |
57 | def close(*args):
58 | """
59 | Close the window and the table
60 | """
61 | root.destroy()
62 |
63 | def onFrameConfigure(canvas):
64 | """
65 | Reset the scroll region to encompass the inner frame
66 | """
67 | canvas.configure(scrollregion=canvas.bbox("all"))
68 |
69 | def mouse_wheel(event):
70 | """
71 | Mousewheel scrolling is a bit squiffy
72 | (only scrolls down, but better than nothing?)
73 | """
74 | canvas.yview_scroll(-1*(event.delta/120), "units")
75 |
76 |
77 | parser = argparse.ArgumentParser(description=
78 | "Run this script to manually remove families/clusters using a GUI")
79 | parser.add_argument("-v", "--verbose", action="count", default=0,
80 | help="increase written print statements")
81 | parser.add_argument("-c", "--configfile",
82 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
83 | parser.add_argument("-n", "--ncols", default=3, type=int,
84 | help="adjust number of columns in layout (default 3)")
85 | parser.add_argument("-m", "--minclust", default=0, type=int,
86 | help="only look at clusters with numbers at or above MINCLUST")
87 | args = parser.parse_args()
88 |
89 | if args.configfile:
90 | opt = redpy.config.Options(args.configfile)
91 | if args.verbose: print("Using config file: {0}".format(args.configfile))
92 | else:
93 | opt = redpy.config.Options("settings.cfg")
94 | if args.verbose: print("Using config file: settings.cfg")
95 |
96 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
97 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
98 |
99 | # Check for MPL version mismatch
100 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
101 |
102 |
103 | oldnClust = ftable.attrs.nClust
104 |
105 | if args.minclust:
106 | m = args.minclust
107 | else:
108 | m = 0
109 |
110 | # Create GUI window
111 | root = tk.Tk()
112 | root.title("REDPy - Check Families to Permanently Remove")
113 | canvas = tk.Canvas(root, borderwidth=0, width=560*args.ncols, height=1500, background="#ffffff")
114 | frame = tk.Frame(canvas, background="#ffffff")
115 | vsb = tk.Scrollbar(root, orient="vertical", command=canvas.yview)
116 | canvas.configure(yscrollcommand=vsb.set)
117 | vsb.pack(side="right", fill="y")
118 | canvas.pack(side="left", fill="both", expand=True)
119 | canvas.create_window((4,4), window=frame, anchor="nw")
120 | frame.bind("", lambda event, canvas=canvas: onFrameConfigure(canvas))
121 |
122 | # Build grid of families
123 | fams = range(len(ftable))
124 | r = 1
125 | c = 1
126 | imgobj = []
127 | invimgobj = []
128 | check = []
129 | var = []
130 | for n in fams:
131 | if n >= args.minclust:
132 | im = Image.open('{}{}/clusters/{}.png'.format(opt.outputPath,
133 | opt.groupName,n)).convert('RGB')
134 | im.save('{}{}/clusters/{}.gif'.format(opt.outputPath,opt.groupName,n))
135 |
136 | # Create 'inverted' selection image
137 | source = im.split()
138 | blk = source[1].point(lambda i: i*0)
139 | source[1].paste(blk)
140 | source[2].paste(blk)
141 | invim = Image.merge('RGB', source)
142 |
143 | invim.save('{}{}/clusters/{}_inv.gif'.format(opt.outputPath,opt.groupName,n))
144 | imgobj.append(tk.PhotoImage(file='{}{}/clusters/{}.gif'.format(
145 | opt.outputPath,opt.groupName,n)))
146 | invimgobj.append(tk.PhotoImage(file='{}{}/clusters/{}_inv.gif'.format(
147 | opt.outputPath,opt.groupName,n)))
148 | var.append(tk.IntVar())
149 | check.append(tk.Checkbutton(frame, image=imgobj[n-m],
150 | variable = var[n-m], selectimage=invimgobj[n-m]).grid(
151 | column=c, row=r, sticky='N'))
152 | c = c+1
153 | if c == args.ncols+1:
154 | c = 1
155 | r = r+1
156 | if r > 255:
157 | print("Ran out of rows. Use -n or -m flags to view more...")
158 |
159 | # Add buttons
160 | tk.Button(frame, text="Remove Checked", background="#ffffff", command=remove).grid(
161 | column=1, row=r+1, columnspan=args.ncols, sticky='N')
162 | tk.Button(frame, text="Cancel", background="#ffffff", command=close).grid(
163 | column=1, row=r+2, columnspan=args.ncols, sticky='S')
164 |
165 | # Bind MouseWheel, Return, Escape keys to be more useful
166 | root.bind_all("", mouse_wheel)
167 | root.bind('', remove)
168 | root.bind('', close)
169 |
170 | # Add some padding
171 | for child in frame.winfo_children(): child.grid_configure(padx=15, pady=15)
172 |
173 | # Go!
174 | root.mainloop()
175 |
176 | # Clean up
177 | print("\nCleaning up .gif files...")
178 | dlist = glob.glob('{}{}/clusters/*.gif'.format(opt.outputPath,opt.groupName))
179 |
180 | for tmp in dlist:
181 | os.remove(tmp)
182 |
183 | if args.verbose: print("Cleaning up .html files...")
184 | redpy.plotting.cleanHTML(oldnClust, ftable.attrs.nClust, opt)
185 |
186 | print("Closing table...")
187 | h5file.close()
188 | print("Done")
--------------------------------------------------------------------------------
/removeSmallFamily.py:
--------------------------------------------------------------------------------
1 | # REDPy - Repeating Earthquake Detector in Python
2 | # Copyright (C) 2016-2020 Alicia Hotovec-Ellis (ahotovec-ellis@usgs.gov)
3 | # Licensed under GNU GPLv3 (see LICENSE.txt)
4 |
5 | import redpy.config
6 | import redpy.table
7 | import argparse
8 |
9 | """
10 | Run this script to remove small families/clusters (i.e., families that have less than M members and are more than D days
11 | old). Reclusters and remakes images when done. This module works by determining the families that need to be removed,
12 | and then it passes those families to removeFamily.py. Note: Removing families from datasets with manny families and
13 | repeaters may take a significant amount of time.
14 |
15 | usage: removeSmallFamily.py [-h] [-v] [-c CONFIGFILE] [-m MINMEMBERS] [-d MAXDAYS] [-t SEEDTIME]
16 |
17 | optional arguments:
18 | -h, --help show this help message and exit
19 | -v, --verbose increase written print statements
20 | -m, --MINMEMBERS minimum family size to keep (default: 5)
21 | -d, --MAXDAYS maximum age of a family (days) to keep; measured from first member in family
22 | in other words: keep families less than or equal to d days old
23 | (default: 0; i.e., removes all small families)
24 | -t --SEEDTIME Time from which to compute families' ages (default: last trigger time UTC)
25 | If a family started after the seedtime, it will be kept
26 | -l --LIST Lists families to keep and remove, but does not actually modify anything. Automatically uses
27 | verbose mode.
28 | -c CONFIGFILE, --configfile CONFIGFILE
29 | use configuration file named CONFIGFILE instead of
30 | default settings.cfg
31 | """
32 |
33 |
34 | parser = argparse.ArgumentParser(description=
35 | "Run this script to manually remove small families/clusters")
36 | parser.add_argument("-v", "--verbose", action="count", default=0,
37 | help="increase written print statements")
38 | parser.add_argument("-c", "--configfile",
39 | help="use configuration file named CONFIGFILE instead of default settings.cfg")
40 | parser.add_argument("-m", "--minmembers", type=int, default=5,
41 | help="minimum family size to keep")
42 | parser.add_argument("-d", "--maxdays", type=int, default=0,
43 | help="maximum age of a family to be saved (default: 0, i.e., removes every small family regardless of age")
44 | parser.add_argument("-t", "--seedtime", default=False,
45 | help="time from which to compute families' repose times (YYYY-MM-DDTHH:MM:SS) (deafult: last trigger time UTC)")
46 | parser.add_argument("-l", "--list", action="store_true", default=False,
47 | help="list families to keep and remove, but do not execute")
48 | args = parser.parse_args()
49 |
50 |
51 | def main(args):
52 |
53 | if args.configfile:
54 | opt = redpy.config.Options(args.configfile)
55 | if args.verbose: print("Using config file: {0}".format(args.configfile))
56 | else:
57 | opt = redpy.config.Options("settings.cfg")
58 | if args.verbose: print("Using config file: settings.cfg")
59 |
60 | if args.verbose: print("Opening hdf5 table: {0}".format(opt.filename))
61 | h5file, rtable, otable, ttable, ctable, jtable, dtable, ftable = redpy.table.openTable(opt)
62 |
63 | # Check for MPL version mismatch
64 | redpy.table.checkMPL(rtable, ftable, ttable, otable, dtable, opt)
65 |
66 | oldnClust = ftable.attrs.nClust
67 |
68 | # Determines which families to remove, sends to table.removeFamilies(), outputs number of families removed
69 | cnums = redpy.table.removeSmallFamilies(rtable, ctable, dtable, ftable, ttable, args.minmembers, args.maxdays,
70 | args.seedtime, opt, list_only=args.list, verbose=args.verbose)
71 |
72 | if len(cnums) > 0:
73 | # Only update plots if there are families removed
74 | if args.verbose: print("Creating plots...")
75 | redpy.plotting.createPlots(rtable, ftable, ttable, ctable, otable, opt)
76 |
77 | if args.verbose: print("Cleaning up old .html & .png files...")
78 | redpy.plotting.cleanHTML(oldnClust, ftable.attrs.nClust, opt)
79 | else:
80 | if args.verbose: print("No families removed. No plots to update...")
81 |
82 | if args.verbose: print("Closing table...")
83 | h5file.close()
84 | if args.verbose: print("Done")
85 |
86 |
87 | if __name__ == "__main__":
88 | main(args)
89 |
--------------------------------------------------------------------------------
/settings.cfg:
--------------------------------------------------------------------------------
1 | [Settings]
2 | # Above line is REQUIRED, do not edit!
3 |
4 | ############################# Configuration file for REDPy #############################
5 | # Settings not explicitly defined will be assigned a default value, so feel free to #
6 | # delete all of the comment lines and any parameters you wish to keep at default for #
7 | # a cleaner configuration file. #
8 | ########################################################################################
9 |
10 |
11 | ###### RUN PARAMETERS ######
12 |
13 | # Describe the run - this will be used in the title of the webpages and master plot title
14 | title=REDPy Catalog
15 | # Path to the folder that will contain outputs, not including groupName (below); leave
16 | # empty to use current directory, and ensure ends with / or \ as appropriate for your
17 | # operating system
18 | outputPath=
19 | # groupName may not have spaces, corresponds to folder where outputs are stored
20 | groupName=default
21 | # filename must end in .h5, and can point to a relative or absolute path to the file
22 | filename=redpytable.h5
23 | # Amount of days to keep orphans in the queue based on size before permanent deletion
24 | minorph=0.05
25 | maxorph=7.
26 | # nsec should be the maximum amount of time you want to process at once (in seconds)
27 | # This depends on how much data your computer can hold in memory
28 | nsec=3600
29 |
30 |
31 | ###### STATION PARAMETERS ######
32 |
33 | # Number of stations
34 | nsta=8
35 | # List of stations in order of storage preference, NO SPACES
36 | station=SEP,YEL,HSR,SHW,EDM,STD,JUN,SOS
37 | # List of channels, etc., one per station even if they're all the same
38 | channel=EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ
39 | network=UW,UW,UW,UW,UW,UW,UW,UW
40 | location=--,--,--,--,--,--,--,--
41 | # Sampling rate to store all waveforms at (will resample if data do not match)
42 | samprate=100.
43 | # Bandpass filter (Hz)
44 | fmin=1.
45 | fmax=10.
46 | # Frequency Index windows (should be within bounds of filtering above)
47 | # Please note that these settings are ALSO used for teleseism filtering (below); if you
48 | # change these from the default you will likely need to change 'telefi' or change
49 | # 'teleok' to the same value as 'nsta' if you do not want to exclude teleseisms.
50 | # FI lower bounds
51 | filomin=1.
52 | filomax=2.5
53 | # FI upper bounds
54 | fiupmin=5.
55 | fiupmax=10.
56 | # FI span for occurrencefi plot colorbar
57 | fispanlow=-0.5
58 | fispanhigh=0.5
59 |
60 |
61 | ###### DATA SOURCE ######
62 |
63 | # Server accepts any FDSN web service that has waveforms (see obspy.clients.fdsn for full
64 | # list or specify a server, e.g., server=fdsnws://http://service.iris.edu).
65 | # It may also be a waveserver (e.g., server=waveserver://mazama.ess.washington.edu:16024),
66 | # or a seedlink server (e.g., server=seedlink://rtserve.iris.washington.edu:18000) or
67 | # server=file for local files in any format ObsPy can read. If using local files like SAC
68 | # or miniSEED, please be sure your headers EXACLTY match what you use for the SCNL above,
69 | # as this is what is used to associate station information and not the filename.
70 | # Seedlink default port : 18000
71 | # WaveServer default port : 16017
72 | server=IRIS
73 | # Port is only necessary if using a waveserver with legacy format
74 | # Now deprecated, place port in server value, see above!
75 | port=16017
76 | # If using local files, define the path to the top directory where they exist,
77 | # ending in / or \ as appropriate for your operating system. If there are files in any
78 | # subdirectories within this directory, they will be found
79 | searchdir=./
80 | # You can specify a pattern for your files to reduce the files within the directory
81 | # searched. For example, filepattern=2019.06.*.mseed if your files are miniSEED files
82 | # named by date and you only want those from June 2019. Simple wildcarding is supported
83 | # (i.e., * and ?, [] for ranges of values or lists) but not full regular expressions.
84 | filepattern=*
85 |
86 |
87 | ###### TRIGGERING SETTINGS ######
88 |
89 | # Choose the STALTA algorithm. Options include a selection of trigger algorithms
90 | # supported by ObsPy's network coincidence trigger (suggested values for 'lwin' and 'swin'
91 | # are in parentheses, followed by suggested values for 'trigon' and 'trigoff'):
92 | # * 'classicstalta' : Classic STA/LTA (8., 0.7), (3., 2.)
93 | # * 'recstalta' : Recursive STA/LTA (3., 8.), (2., 1.5)
94 | # * 'delayedstalta' : Delayed STA/LTA (5., 10.), (5., 10,)
95 | # For more details, please visit:
96 | # https://docs.obspy.org/packages/autogen/obspy.core.trace.Trace.trigger.html#obspy.core.trace.Trace.trigger
97 | # and https://docs.obspy.org/tutorial/code_snippets/trigger_tutorial.html
98 | trigalg=classicstalta
99 | # How many stations need to be triggered in order to be considered using a coincidence
100 | # trigger? I like using just over half of nsta. Using less allows more triggers through,
101 | # but that may not be real events or are of dubious quality. Requiring all stations to
102 | # trigger is not advised unless you have 100% uptime on all stations.
103 | nstaC=5
104 | # STALTA trigger settings for long and short windows
105 | lwin=8.
106 | swin=0.7
107 | # Higher trigon will reduce number of triggers, but may miss small events
108 | trigon=3.
109 | # trigoff should be slightly less than trigon
110 | trigoff=2.
111 | # Optional time offset
112 | # Accepts a list (in same order as stations) of positive numbers corresponding to number
113 | # of seconds to move the trace to the left. The way I think of this is as a list of
114 | # arrival times of waves from an expected location. Can be used to quickly assess whether
115 | # an event of interest is located near a known point (e.g., beneath the summit of a
116 | # volcano or near the main shock in an aftershock sequence), or to bring arrivals from
117 | # distant stations to be within a short correlation window. Be aware that the trigger time
118 | # listed in the catalog will also be offset by the smallest value given. If used, should
119 | # have values for each station. If not used, can just be 0.
120 | # Example for default Mount St. Helens network might be:
121 | # offset=0.00,0.17,0.49,0.60,0.50,0.82,1.09,1.05
122 | offset=0
123 |
124 |
125 | ### CROSS-CORRELATION PARAMETERS ###
126 |
127 | # Cross-correlation window length (in samples, 2^n is best)
128 | winlen=1024
129 | # Minimum correlation coefficient to be considered a repeater
130 | cmin=0.7
131 | # Number of stations this value must be exceeded on to be counted as a repeater. Like
132 | # nstaC, I like to use about half.
133 | ncor=4
134 |
135 |
136 | ### PLOTTING PARAMETERS ###
137 | # List and order of plots to be included on the timeline, separated by either , (new row)
138 | # or + (group into tabs) without spaces. List of currently supported plot types are:
139 | # eqrate, fi, occurrence, and longevity. Try out tabs by setting this to:
140 | # plotformat=eqrate+fi+longevity,occurrence
141 | plotformat=eqrate,fi,occurrence+occurrencefi,longevity
142 | # Station number to use for plotting cores & amplitudes, beginning at 0, in station list
143 | printsta=2
144 | # Minimum number of members within a family needed to appear on the timeline plot
145 | minplot=5
146 | # Width (in days) of bins for histogram subplot (default is 1 day)
147 | dybin=1
148 | # Width (in hours) of bins for histogram subplot on 'recent' timeline (default is 1 hour)
149 | hrbin=1.
150 | # Width (in hours) of bars in occurrence plot (default is 1 hour) - if this is set for
151 | # a day or longer, the color bar will be scaled automatically up to a maximum of 1000
152 | # events per bin; recommend customizing this setting if your run extends >1 year
153 | occurbin=1
154 | # Width (in hours) of bars in recent occurrence plot (default is 1 hour)
155 | recbin=1
156 | # Whether the occurrence plot should have the same height as the other subplots or expand
157 | # in height as more families are plotted. This can be useful if you plan to group
158 | # occurrence plots with other plots in tabs, though may create cramped plots if you have
159 | # many families plotted (default False)
160 | fixedheight=False
161 | # Number of days prior to last repeater to show on 'recent' timeline (default is 2 weeks)
162 | recplot=14.
163 | # Meta overview puts all plots as tabs. The following settings allow for separate view
164 | # customizations for this plot, but should resemble the 'recent' plots
165 | mminplot=0
166 | mhrbin=1.
167 | mrecbin=1.
168 | mrecplot=30.
169 | # Print a more verbose version of the catalog
170 | verbosecatalog=False
171 | # Annotation file (see included annotation.csv for format)
172 | anotfile=annotation.csv
173 | # Use 'global' or 'family' to define amplitude plot limits
174 | amplims=global
175 |
176 | ### CHECK COMCAT FOR MATCHES ###
177 | # If used, will search the ANSS Comprehensive Catalog for potential located seismicity
178 | # that has arrival times consistent with the trigger time of repeaters, and if found,
179 | # will write those matches to the end of the family HTML file with a map if at least one
180 | # match is local to the study area. This can be time consuming for very large families!
181 | # If the location of interest is in Northern California, it will also check NCEDC for
182 | # located seismicity that may not have made it to ComCat.
183 | checkComCat=False
184 | # Station latitudes and longitudes (can also be a single value near the center of your
185 | # network, here these match the stations above)
186 | stalats=46.200210,46.209550,46.174280,46.193470,46.197170,46.237610,46.147060,46.243860
187 | stalons=-122.190600,-122.188990,-122.180650,-122.236350,-122.151210,-122.223960,-122.152430,-122.137870
188 | # Seconds of error allowable to be considered a match
189 | serr = 5.
190 | # Distance in degrees to search for local matches
191 | locdeg = 0.5
192 | # Distance in degrees to search for regional matches
193 | regdeg = 2.
194 | # Minimum magnitude to allow for regional matches
195 | regmag = 2.5
196 | # Minimum magnitude to allow for teleseismic matches
197 | telemag = 4.5
198 | # Maximum number of events to try to match, chosen in descending order by amplitude on
199 | # 'printSta' station. Defaults to 0, which matches all. I like 50 as a tradeoff between
200 | # matching everything in most clusters and limiting the amount of time spent trying to
201 | # match everything in extremely large clusters with 1000+ members
202 | matchMax = 0
203 |
204 |
205 | ### AUTOMATED SPIKE AND TELESEISM REMOVAL ###
206 | # Spikes, harmonic noise, and teleseisms are automatically put into the 'junk' table and
207 | # never considered as a repeater if they exceed the following thresholds. You can view the
208 | # contents of this table with the plotJunk.py helper script and experiment with settings
209 | # that might be more appropriate for your data. So far these settings have done well at
210 | # not throwing away too many real events and not letting too many bad triggers through.
211 | # Kurtosis of waveform in a window: ~80-100 is appropriate for 5 s window, ~130 for 15 s,
212 | # and ~200 for 25 s
213 | kurtwin=5.
214 | kurtmax=80.
215 | # Kurtosis of amplitude spectrum to eliminate calibration pulses with unnaturally harmonic
216 | # signals--be careful not to set too low or you could eliminate real harmonic events
217 | kurtfmax=150.
218 | # Maximum ratio of outliers to total number of datapoints in trace
219 | oratiomax=0.15
220 | # Teleseisms are filtered based on frequency index (uses above settings)
221 | # Some tests with the default windows suggest that using a cutoff of -1.0 on
222 | # multiple stations should catch a decent number of teleseisms without throwing
223 | # out too many local LF events
224 | # Frequency index minimum threshold (above this is OK)
225 | telefi = -1.0
226 | # Number of stations allowed to pass that exceed threshold
227 | teleok = 2
228 |
--------------------------------------------------------------------------------
/spec-file.txt:
--------------------------------------------------------------------------------
1 | # This file may be used to create an environment using:
2 | # $ conda create --name --file
3 | # platform: osx-64
4 | @EXPLICIT
5 | https://conda.anaconda.org/conda-forge/osx-64/bzip2-1.0.8-h0d85af4_4.tar.bz2
6 | https://conda.anaconda.org/conda-forge/osx-64/c-ares-1.18.1-h0d85af4_0.tar.bz2
7 | https://conda.anaconda.org/conda-forge/osx-64/ca-certificates-2021.10.8-h033912b_0.tar.bz2
8 | https://conda.anaconda.org/conda-forge/osx-64/giflib-5.2.1-hbcb3906_2.tar.bz2
9 | https://conda.anaconda.org/conda-forge/osx-64/jbig-2.1-h0d85af4_2003.tar.bz2
10 | https://conda.anaconda.org/conda-forge/osx-64/jpeg-9e-h5eb16cf_1.tar.bz2
11 | https://conda.anaconda.org/conda-forge/osx-64/libbrotlicommon-1.0.9-h5eb16cf_7.tar.bz2
12 | https://conda.anaconda.org/conda-forge/osx-64/libcxx-14.0.3-hc203e6f_0.tar.bz2
13 | https://conda.anaconda.org/conda-forge/osx-64/libdeflate-1.10-h0d85af4_0.tar.bz2
14 | https://conda.anaconda.org/conda-forge/osx-64/libev-4.33-haf1e3a3_1.tar.bz2
15 | https://conda.anaconda.org/conda-forge/osx-64/libffi-3.4.2-h0d85af4_5.tar.bz2
16 | https://conda.anaconda.org/conda-forge/osx-64/libiconv-1.16-haf1e3a3_0.tar.bz2
17 | https://conda.anaconda.org/conda-forge/osx-64/libwebp-base-1.2.2-h0d85af4_1.tar.bz2
18 | https://conda.anaconda.org/conda-forge/osx-64/libzlib-1.2.11-h6c3fc93_1014.tar.bz2
19 | https://conda.anaconda.org/conda-forge/osx-64/llvm-openmp-14.0.3-ha654fa7_0.tar.bz2
20 | https://conda.anaconda.org/conda-forge/osx-64/ncurses-6.3-h96cf925_1.tar.bz2
21 | https://conda.anaconda.org/conda-forge/osx-64/pthread-stubs-0.4-hc929b4f_1001.tar.bz2
22 | https://conda.anaconda.org/conda-forge/osx-64/xorg-libxau-1.0.9-h35c211d_0.tar.bz2
23 | https://conda.anaconda.org/conda-forge/osx-64/xorg-libxdmcp-1.1.3-h35c211d_0.tar.bz2
24 | https://conda.anaconda.org/conda-forge/osx-64/xz-5.2.5-haf1e3a3_1.tar.bz2
25 | https://conda.anaconda.org/conda-forge/osx-64/yaml-0.2.5-h0d85af4_2.tar.bz2
26 | https://repo.anaconda.com/pkgs/main/osx-64/geos-3.8.0-hb1e8313_0.conda
27 | https://conda.anaconda.org/conda-forge/osx-64/icu-70.1-h96cf925_0.tar.bz2
28 | https://conda.anaconda.org/conda-forge/osx-64/lerc-3.0-he49afe7_0.tar.bz2
29 | https://conda.anaconda.org/conda-forge/osx-64/libbrotlidec-1.0.9-h5eb16cf_7.tar.bz2
30 | https://conda.anaconda.org/conda-forge/osx-64/libbrotlienc-1.0.9-h5eb16cf_7.tar.bz2
31 | https://conda.anaconda.org/conda-forge/osx-64/libedit-3.1.20191231-h0678c8f_2.tar.bz2
32 | https://conda.anaconda.org/conda-forge/osx-64/libgfortran5-9.3.0-h6c81a4c_23.tar.bz2
33 | https://conda.anaconda.org/conda-forge/osx-64/libxcb-1.13-h0d85af4_1004.tar.bz2
34 | https://conda.anaconda.org/conda-forge/osx-64/lz4-c-1.9.3-he49afe7_1.tar.bz2
35 | https://conda.anaconda.org/conda-forge/osx-64/openssl-1.1.1o-hfe4f2af_0.tar.bz2
36 | https://conda.anaconda.org/conda-forge/osx-64/readline-8.1-h05e3726_0.tar.bz2
37 | https://conda.anaconda.org/conda-forge/osx-64/snappy-1.1.9-h6e38e02_0.tar.bz2
38 | https://conda.anaconda.org/conda-forge/osx-64/tk-8.6.12-h5dbffcc_0.tar.bz2
39 | https://conda.anaconda.org/conda-forge/osx-64/zlib-1.2.11-h6c3fc93_1014.tar.bz2
40 | https://conda.anaconda.org/conda-forge/osx-64/brotli-bin-1.0.9-h5eb16cf_7.tar.bz2
41 | https://conda.anaconda.org/conda-forge/osx-64/krb5-1.19.3-hb49756b_0.tar.bz2
42 | https://conda.anaconda.org/conda-forge/osx-64/libgfortran-5.0.0-9_3_0_h6c81a4c_23.tar.bz2
43 | https://conda.anaconda.org/conda-forge/osx-64/libnghttp2-1.47.0-h942079c_0.tar.bz2
44 | https://conda.anaconda.org/conda-forge/osx-64/libpng-1.6.37-h7cec526_2.tar.bz2
45 | https://conda.anaconda.org/conda-forge/osx-64/libssh2-1.10.0-h52ee1ee_2.tar.bz2
46 | https://conda.anaconda.org/conda-forge/osx-64/libxml2-2.9.14-h08a9926_0.tar.bz2
47 | https://conda.anaconda.org/conda-forge/osx-64/sqlite-3.38.5-hd9f0692_0.tar.bz2
48 | https://conda.anaconda.org/conda-forge/osx-64/zstd-1.5.2-h582d3a0_0.tar.bz2
49 | https://conda.anaconda.org/conda-forge/osx-64/blosc-1.21.1-h97e831e_2.tar.bz2
50 | https://conda.anaconda.org/conda-forge/osx-64/brotli-1.0.9-h5eb16cf_7.tar.bz2
51 | https://conda.anaconda.org/conda-forge/osx-64/freetype-2.10.4-h4cff582_1.tar.bz2
52 | https://conda.anaconda.org/conda-forge/osx-64/libcurl-7.83.1-h372c54d_0.tar.bz2
53 | https://conda.anaconda.org/conda-forge/osx-64/libopenblas-0.3.20-openmp_hb3cd9ec_0.tar.bz2
54 | https://conda.anaconda.org/conda-forge/osx-64/libtiff-4.3.0-h17f2ce3_3.tar.bz2
55 | https://conda.anaconda.org/conda-forge/osx-64/libxslt-1.1.33-h5bff336_4.tar.bz2
56 | https://conda.anaconda.org/conda-forge/osx-64/proj-6.2.1-h773a61f_0.tar.bz2
57 | https://conda.anaconda.org/conda-forge/osx-64/python-3.7.12-haf480d7_100_cpython.tar.bz2
58 | https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.12-pyhd8ed1ab_0.tar.bz2
59 | https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2
60 | https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2
61 | https://conda.anaconda.org/conda-forge/osx-64/hdf5-1.12.1-nompi_ha60fbc9_104.tar.bz2
62 | https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2
63 | https://conda.anaconda.org/conda-forge/osx-64/lcms2-2.12-h577c468_0.tar.bz2
64 | https://conda.anaconda.org/conda-forge/osx-64/libblas-3.9.0-14_osx64_openblas.tar.bz2
65 | https://conda.anaconda.org/conda-forge/osx-64/libwebp-1.2.2-h28dabe5_0.tar.bz2
66 | https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2
67 | https://conda.anaconda.org/conda-forge/osx-64/openjpeg-2.4.0-h6e7aa92_1.tar.bz2
68 | https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2
69 | https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2
70 | https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.0-pyhd8ed1ab_0.tar.bz2
71 | https://conda.anaconda.org/conda-forge/osx-64/python_abi-3.7-2_cp37m.tar.bz2
72 | https://conda.anaconda.org/conda-forge/noarch/pytz-2022.1-pyhd8ed1ab_0.tar.bz2
73 | https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2
74 | https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.2.0-pyha770c72_1.tar.bz2
75 | https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2
76 | https://conda.anaconda.org/conda-forge/noarch/zipp-3.8.0-pyhd8ed1ab_0.tar.bz2
77 | https://conda.anaconda.org/conda-forge/osx-64/certifi-2021.10.8-py37hf985489_2.tar.bz2
78 | https://conda.anaconda.org/conda-forge/osx-64/cffi-1.15.0-py37h446072c_0.tar.bz2
79 | https://conda.anaconda.org/conda-forge/osx-64/greenlet-1.1.2-py37h0582d14_2.tar.bz2
80 | https://conda.anaconda.org/conda-forge/osx-64/importlib-metadata-4.11.3-py37hf985489_1.tar.bz2
81 | https://conda.anaconda.org/conda-forge/osx-64/libcblas-3.9.0-14_osx64_openblas.tar.bz2
82 | https://conda.anaconda.org/conda-forge/osx-64/liblapack-3.9.0-14_osx64_openblas.tar.bz2
83 | https://conda.anaconda.org/conda-forge/osx-64/lxml-4.8.0-py37h69ee0a8_3.tar.bz2
84 | https://conda.anaconda.org/conda-forge/osx-64/markupsafe-2.1.1-py37h69ee0a8_1.tar.bz2
85 | https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2
86 | https://conda.anaconda.org/conda-forge/osx-64/pillow-9.1.0-py37h2540ef4_2.tar.bz2
87 | https://conda.anaconda.org/conda-forge/osx-64/pysocks-1.7.1-py37hf985489_5.tar.bz2
88 | https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2
89 | https://conda.anaconda.org/conda-forge/osx-64/pyyaml-6.0-py37h69ee0a8_4.tar.bz2
90 | https://conda.anaconda.org/conda-forge/osx-64/setuptools-59.8.0-py37hf985489_1.tar.bz2
91 | https://conda.anaconda.org/conda-forge/osx-64/tornado-6.1-py37h69ee0a8_3.tar.bz2
92 | https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.2.0-hd8ed1ab_1.tar.bz2
93 | https://conda.anaconda.org/conda-forge/osx-64/unicodedata2-14.0.0-py37h69ee0a8_1.tar.bz2
94 | https://conda.anaconda.org/conda-forge/osx-64/brotlipy-0.7.0-py37h69ee0a8_1004.tar.bz2
95 | https://conda.anaconda.org/conda-forge/osx-64/cryptography-36.0.2-py37h20b3391_1.tar.bz2
96 | https://conda.anaconda.org/conda-forge/osx-64/fonttools-4.33.3-py37h994c40b_0.tar.bz2
97 | https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_0.tar.bz2
98 | https://conda.anaconda.org/conda-forge/osx-64/kiwisolver-1.4.2-py37h18621fa_1.tar.bz2
99 | https://conda.anaconda.org/conda-forge/osx-64/numpy-1.21.6-py37h345d48f_0.tar.bz2
100 | https://conda.anaconda.org/conda-forge/noarch/pip-22.1-pyhd8ed1ab_0.tar.bz2
101 | https://conda.anaconda.org/conda-forge/osx-64/sqlalchemy-1.4.36-py37h994c40b_0.tar.bz2
102 | https://conda.anaconda.org/conda-forge/osx-64/bokeh-2.4.2-py37hf985489_1.tar.bz2
103 | https://conda.anaconda.org/conda-forge/osx-64/matplotlib-base-3.5.2-py37h80cb303_0.tar.bz2
104 | https://conda.anaconda.org/conda-forge/osx-64/numexpr-2.8.0-py37hb276a58_2.tar.bz2
105 | https://conda.anaconda.org/conda-forge/osx-64/pandas-1.3.4-py37h5b83a90_1.tar.bz2
106 | https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_0.tar.bz2
107 | https://conda.anaconda.org/conda-forge/osx-64/scipy-1.7.3-py37h4e3cf02_0.tar.bz2
108 | https://repo.anaconda.com/pkgs/main/osx-64/shapely-1.7.1-py37h9250791_0.conda
109 | https://repo.anaconda.com/pkgs/main/osx-64/cartopy-0.18.0-py37hf1ba7ce_1.conda
110 | https://conda.anaconda.org/conda-forge/osx-64/pytables-3.7.0-py37hc0663ee_0.tar.bz2
111 | https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.9-pyhd8ed1ab_0.tar.bz2
112 | https://conda.anaconda.org/conda-forge/noarch/requests-2.27.1-pyhd8ed1ab_0.tar.bz2
113 | https://conda.anaconda.org/conda-forge/osx-64/obspy-1.3.0-py37h4105427_0.tar.bz2
114 |
--------------------------------------------------------------------------------