├── __init__.py ├── dwseis ├── __init__.py ├── sta.lst ├── __pycache__ │ ├── shit.cpython-36.pyc │ ├── dwseis.cpython-36.pyc │ └── __init__.cpython-36.pyc ├── config ├── readme ├── ex_seed.py ├── dwseis_conti.py ├── dwseis.py ├── station_us └── mail ├── TwoStation ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-36.pyc │ └── two_station.cpython-36.pyc ├── reject_outliers.py ├── two_station.py └── Station.py ├── ZHratio ├── __init__.py ├── check_sacheader_b_filename.py ├── pre_process.py └── extract.py ├── README.md ├── sac.py ├── Tomo ├── util.py ├── form_test_data.py └── barmin.py ├── pre_process.py ├── Event.py ├── FastTS ├── select.py └── meta.py ├── metadata.py ├── Plot └── gmt.py └── cps.py /__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dwseis/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /TwoStation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ZHratio/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dwseis/sta.lst: -------------------------------------------------------------------------------- 1 | M05C TA 2 | P08A TA 3 | -------------------------------------------------------------------------------- /dwseis/__pycache__/shit.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HouseJaay/Geopy/HEAD/dwseis/__pycache__/shit.cpython-36.pyc -------------------------------------------------------------------------------- /dwseis/__pycache__/dwseis.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HouseJaay/Geopy/HEAD/dwseis/__pycache__/dwseis.cpython-36.pyc -------------------------------------------------------------------------------- /dwseis/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HouseJaay/Geopy/HEAD/dwseis/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /TwoStation/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HouseJaay/Geopy/HEAD/TwoStation/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /TwoStation/__pycache__/two_station.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HouseJaay/Geopy/HEAD/TwoStation/__pycache__/two_station.cpython-36.pyc -------------------------------------------------------------------------------- /dwseis/config: -------------------------------------------------------------------------------- 1 | .NAME newhao 2 | .INST nju 3 | .MAIL nju 4 | .EMAIL seishao@126.com 5 | .PHONE 88888 6 | .FAX 66666 7 | .MEDIA FTP 8 | .ALTERNATE MEDIA DLT 9 | .ALTERNATE MEDIA DVD-R 10 | -------------------------------------------------------------------------------- /dwseis/readme: -------------------------------------------------------------------------------- 1 | download seismic data 2 | by ShiJie Hao 3 | 4 | 5 | ---------dwseis_conti.py < sta.lst config 6 | download continuous data ,time range is set up in code 7 | station list is set up in sta.lst 8 | meta information is set up in config 9 | -------------------------------------------------------------------------------- /dwseis/ex_seed.py: -------------------------------------------------------------------------------- 1 | from glob import glob 2 | import os 3 | 4 | seed_list = glob('*.seed') 5 | for seed_file in seed_list: 6 | out_dir = "../" + seed_file.split('.')[0] 7 | command = "rdseed -R -d -z 1 -q %s -f %s" %(out_dir,seed_file) 8 | os.mkdir(out_dir) 9 | os.system(command) 10 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Geopy 2 | ## 介绍 3 | 地震学科研实用程序,包括:(1)振幅比计算程序,(2)双台法计算相速度程序,(3)调用其它地震研究程序的接口等。 4 | ## 功能介绍 5 | ### 用双台法计算面波相速度-FastTS 6 | 基于TwoStation改进,通过把常用结果保存在内存,提高计算效率。 7 | ### 计算振幅比-ZHratio 8 | 计算单台的振幅比。 9 | ### 发送数据申请-dwseis 10 | 利用breq_fast服务申请地震数据。 11 | ### cps接口-cps.py 12 | 调用cps面波相关程序的接口,包括计算理论相速度、群速度频散曲线等。 13 | ### 画图辅助-Plot 14 | 用gmt将多张图画在一起需要比较繁琐的调整,本程序提供了拼贴功能,把不同深度或周期的成像结果拼成两列或三列,并自动裁去gmt输出中的白边。 15 | -------------------------------------------------------------------------------- /ZHratio/check_sacheader_b_filename.py: -------------------------------------------------------------------------------- 1 | import obspy 2 | from glob import glob 3 | 4 | dirs = glob('*') 5 | for dirname in dirs: 6 | filenames = glob(dirname + '/' + '*.BHZ.SAC') 7 | for filename in filenames: 8 | st = obspy.read(filename,headonly=True) 9 | start = st[0].stats.starttime 10 | common = "%d.%03d.%02d.%02d.%02d.00"%( 11 | start.year,start.julday,start.hour,start.minute,start.second) 12 | if common != filename[15:-8]: 13 | print(common) 14 | print(filename) 15 | -------------------------------------------------------------------------------- /dwseis/dwseis_conti.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | 4 | sta_file="sta.lst" #station list 5 | start_time=datetime.date(2006,2,1) #time range 6 | end_time=datetime.date(2007,11,1) 7 | time_delta=datetime.timedelta(days=1) #file length 8 | 9 | with open(sta_file,'r') as f: 10 | stas=f.read().splitlines() 11 | for sta in stas: 12 | with open('config','r') as f: 13 | text=f.read() 14 | label=sta.split(' ')[1]+'_'+sta.split(' ')[0] 15 | text=text+'.LABEL '+label+'\n.QUALITY B\n.END\n' 16 | day=start_time 17 | while(day threshold: 47 | density.add(str(rnodes[i][0]) + '_' + str(rnodes[i][1])) 48 | return density 49 | 50 | 51 | if __name__ == '__main__': 52 | number_of_path('/home/haosj/data/last2/result2_rejected2/', (10, 80)) 53 | rays = read_ray_from_into('/home/haosj/data/last2/tomo/tibet_into') 54 | -------------------------------------------------------------------------------- /ZHratio/pre_process.py: -------------------------------------------------------------------------------- 1 | from subprocess import Popen,PIPE 2 | from glob import glob 3 | import obspy 4 | import os 5 | from os.path import isfile 6 | 7 | os.putenv("SAC_DISPLAY_COPYRIGHT",'0') 8 | 9 | def trans(filename,respname,writename,f,d): 10 | p = Popen(['sac'], stdin=PIPE, stdout=PIPE) 11 | s = "" 12 | s += "r %s\n" % filename 13 | s += "decimate %d;decimate %d\n" % (d[0],d[1]) 14 | s += "rmean;rtrend\n" 15 | s += "transfer from polezero subtype %s to none freq %f %f %f %f\n" % ( 16 | respname,f[0],f[1],f[2],f[3]) 17 | s += "w %s\n" % writename 18 | s += "q\n" 19 | r = p.communicate(s.encode()) 20 | #print(r) 21 | 22 | def do_trans(sacdir,respdir,writedir): 23 | saclist = glob(sacdir+'*.SAC') 24 | print('processing %s' %sacdir) 25 | for sacpath in saclist: 26 | sacfile = sacpath.split('/')[-1] 27 | sta,ch = sacfile.split('.')[1],sacfile.split('.')[-2] 28 | respname = respdir + "SAC_PZs_X2_%s_%s_00" %(sta,ch) 29 | writename = writedir + '.'.join(sacfile.split('.')[:-2]) +\ 30 | '.' + ch[-1] 31 | trans(sacpath,respname,writename,(0.008,0.012,3,4),(5,2)) 32 | 33 | def do_rotate(peddir): 34 | zfiles = glob(peddir+'*.Z') 35 | for zfile in zfiles: 36 | common = '.'.join(zfile.split('.')[:-1]) 37 | efile,nfile = common + '.E',common + '.N' 38 | rfile,tfile = common + '.R',common + '.T' 39 | if isfile(efile) and isfile(nfile): 40 | p = Popen(['sac'],stdin=PIPE,stdout=PIPE) 41 | s = "" 42 | s += "r %s\n" %efile 43 | s += "ch cmpinc 90 cmpaz 90;wh\n" 44 | s += "r %s\n" %nfile 45 | s += "ch cmpinc 90 cmpaz 0;wh\n" 46 | s += "r %s %s\n" %(efile,nfile) 47 | s += "rotate to gcp\n" 48 | s += "w %s %s\n" %(rfile,tfile) 49 | s += "q\n" 50 | r = p.communicate(s.encode()) 51 | #print(r) 52 | else: 53 | print('file error %s' %zfile) 54 | 55 | if __name__ == '__main__': 56 | root = '/home/haosj/data/tibet/' 57 | dirs = os.listdir(root+'ordos/') 58 | for sta in dirs: 59 | writedir = root + 'ped/' + sta + '/' 60 | os.mkdir(writedir) 61 | do_trans(root+'ordos/'+sta+'/',root+'RESP/RESP/',writedir) 62 | do_rotate(writedir) 63 | -------------------------------------------------------------------------------- /pre_process.py: -------------------------------------------------------------------------------- 1 | from subprocess import Popen,PIPE 2 | from glob import glob 3 | import obspy 4 | import os 5 | from os.path import isfile 6 | 7 | os.putenv("SAC_DISPLAY_COPYRIGHT", '0') 8 | 9 | 10 | def trans(filename, respname, writename, f, d): 11 | p = Popen(['sac'], stdin=PIPE, stdout=PIPE) 12 | s = "" 13 | s += "r %s\n" % filename 14 | s += "decimate %d;decimate %d\n" % (d[0], d[1]) 15 | s += "rmean;rtrend\n" 16 | s += "transfer from polezero subtype %s to none freq %f %f %f %f\n" % ( 17 | respname, f[0], f[1], f[2], f[3]) 18 | s += "w %s\n" % writename 19 | s += "q\n" 20 | r = p.communicate(s.encode()) 21 | #print(r) 22 | 23 | 24 | def do_trans(sacdir, respdir, writedir): 25 | saclist = glob(sacdir+'*.BH?') 26 | print('processing %s' %sacdir) 27 | for sacpath in saclist: 28 | sacfile = sacpath.split('/')[-1] 29 | sta, ch = sacfile.split('.')[1], sacfile.split('.')[-1] 30 | respname = respdir + "SAC_PZs_X2_%s_%s_00" % (sta, ch) 31 | writename = writedir + '.'.join(sacfile.split('.')[:-1]) +\ 32 | '.' + ch[-1] 33 | trans(sacpath, respname, writename, (0.008, 0.01, 3, 4), (5, 2)) 34 | 35 | 36 | def do_rotate(peddir): 37 | zfiles = glob(peddir+'*.Z') 38 | for zfile in zfiles: 39 | common = '.'.join(zfile.split('.')[:-1]) 40 | efile,nfile = common + '.E',common + '.N' 41 | rfile,tfile = common + '.R',common + '.T' 42 | if isfile(efile) and isfile(nfile): 43 | p = Popen(['sac'], stdin=PIPE,stdout=PIPE) 44 | s = "" 45 | s += "r %s\n" % efile 46 | s += "ch cmpinc 90 cmpaz 90;wh\n" 47 | s += "r %s\n" %nfile 48 | s += "ch cmpinc 90 cmpaz 0;wh\n" 49 | s += "r %s %s\n" %(efile, nfile) 50 | s += "rotate to gcp\n" 51 | s += "w %s %s\n" % (rfile, tfile) 52 | s += "q\n" 53 | r = p.communicate(s.encode()) 54 | #print(r) 55 | else: 56 | print('file error %s' %zfile) 57 | 58 | 59 | if __name__ == '__main__': 60 | root = '/home/haosj/data/tibet/' 61 | dirs = os.listdir(root+'ordos/') 62 | for sta in dirs: 63 | writedir = root + 'ped/' + sta + '/' 64 | os.mkdir(writedir) 65 | do_trans(root+'ordos/'+sta+'/',root+'RESP/RESP/',writedir) 66 | do_rotate(writedir) -------------------------------------------------------------------------------- /dwseis/dwseis.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | from email.mime.text import MIMEText 4 | import smtplib 5 | from time import sleep 6 | import random 7 | import sys 8 | import Geopy.metadata as mt 9 | 10 | sys.path.insert(0,'/home/hao_shijie/work/two_station') 11 | 12 | import cal_station as cs 13 | 14 | def def_mail(row_pair,evt): 15 | with open('config','r') as f: 16 | header = f.read() 17 | label = row_pair['station1'] +'_'+ row_pair['station2'] 18 | header = header + '.LABEL ' + label + '\n.QUALITY B\n.END\n' 19 | def def_window(evtime): 20 | begin = evtime 21 | end = evtime + 5000 22 | data = " %d %02d %02d %02d %02d 00.0 %d %02d %02d %02d %02d 00.0 1 BHZ\n" \ 23 | % (begin.year,begin.month,begin.day,begin.hour,begin.minute,\ 24 | end.year,end.month,end.day,end.hour,end.minute) 25 | return data 26 | 27 | body = '' 28 | sta1 = row_pair['station1'] +' '+ row_pair['net1'] 29 | sta2 = row_pair['station2'] +' '+ row_pair['net2'] 30 | for i in evt.index: 31 | evtime = evt.loc[i]['time'] 32 | req = def_window(evtime) 33 | body += sta1 + req 34 | body += sta2 + req 35 | return header+body 36 | 37 | def send_mail(text): 38 | msg = MIMEText(text,'plain','utf-8') 39 | from_addr = "*********" 40 | password = "*********" 41 | to_addr = "breq_fast@iris.washington.edu" 42 | smtp_server = "smtp.126.com" 43 | msg['From'] = from_addr 44 | msg['To'] = to_addr 45 | msg['Subject'] = "Thesis task" 46 | server = smtplib.SMTP(smtp_server, 25) 47 | server.set_debuglevel(1) 48 | server.login(from_addr, password) 49 | server.sendmail(from_addr, [to_addr], msg.as_string()) 50 | server.quit() 51 | 52 | def do_send(pair,evt_full): 53 | for i in pair.index: 54 | row_pair = pair.loc[i] 55 | evt = cs.get_event(row_pair,evt_full,dep_max,dist_min,dist_max,mag_min) 56 | print(len(evt)) 57 | text = def_mail(row_pair,evt) 58 | send_mail(text) 59 | print('send mail for data %s %s' %(row_pair['station1'],row_pair['station2'])) 60 | sleep(random.randrange(30,90)) 61 | 62 | dep_max = 30 63 | dist_min = 2000 64 | dist_max = 9000 65 | mag_min = 5.8 66 | 67 | if __name__=="__main__": 68 | evt_full = mt.read_event('evt_2004_2016') 69 | sta = mt.read_station('station_us') 70 | pair = mt.mk_sta_pairs(sta) 71 | pair.loc[:,'event'] = pair.apply(cs.do_check,axis='columns', 72 | args=(evt_full,dep_max,dist_min,dist_max,mag_min)) 73 | pair_temp = pair[pair['event']>5] 74 | #do_send(pair_temp,evt_full) 75 | -------------------------------------------------------------------------------- /Event.py: -------------------------------------------------------------------------------- 1 | import obspy 2 | from glob import glob 3 | from collections import namedtuple 4 | import matplotlib.pyplot as plt 5 | import cartopy.crs as ccrs 6 | 7 | 8 | class Event(object): 9 | def __init__(self, tr, filename): 10 | Stats = namedtuple('Stats', ['time', 'lat', 'lon', 'depth']) 11 | time = tr.stats.starttime + tr.stats.sac['o'] 12 | self.evtinfo = Stats(time=time, lat=tr.stats.sac['evla'], 13 | lon=tr.stats.sac['evlo'], depth=tr.stats.sac['evdp']) 14 | self.sta = {} 15 | if tr.stats.station in self.sta: 16 | raise NameError("duplicate event in %s" % tr.stats.station) 17 | else: 18 | self.sta[tr.stats.station] = filename 19 | 20 | def addstation(self, filename): 21 | station = (filename.split('/')[-1]).split('.')[1] 22 | self.sta[station] = filename 23 | 24 | def getfile(self, staname): 25 | return self.sta[staname] 26 | 27 | def getlatlon(self): 28 | return self.evtinfo.lat, self.evtinfo.lon 29 | 30 | def gettime(self): 31 | return self.evtinfo.time 32 | 33 | def __str__(self): 34 | temp = "%.2f %.2f %.2f %d " % ( 35 | self.evtinfo.lat, self.evtinfo.lon, self.evtinfo.depth, len(self.sta)) 36 | return temp + str(self.evtinfo.time) 37 | 38 | 39 | class Events(object): 40 | def __init__(self): 41 | self.evts = {} 42 | 43 | def addfromdir(self, directory, timewild, filewild='*.Z'): 44 | times = map(lambda x: x.split('/')[-1], glob(directory + timewild)) 45 | for time in times: 46 | files = glob(directory + time + '/' + filewild) 47 | if len(files) > 0: 48 | filename = files[0] 49 | tr = obspy.read(filename)[0] 50 | self.evts[time] = Event(tr, filename) 51 | for file in files: 52 | self.evts[time].addstation(file) 53 | 54 | def __iter__(self): 55 | return iter(self.evts.values()) 56 | 57 | def __len__(self): 58 | return len(self.evts) 59 | 60 | def plotlocation(self): 61 | fig = plt.figure(figsize=(10, 5)) 62 | ax = fig.add_subplot( 63 | 1, 1, 1, projection=ccrs.AzimuthalEquidistant(105, 33)) 64 | 65 | ax.set_global() 66 | ax.stock_img() 67 | ax.coastlines() 68 | 69 | for evt in self.evts.values(): 70 | ax.plot( 71 | *evt.getlatlon()[::-1], marker='*', color='red', 72 | markersize=5, transform=ccrs.Geodetic()) 73 | 74 | plt.show() 75 | 76 | 77 | if __name__ == '__main__': 78 | directory = '/home/haosj/data/neTibet/data/' 79 | evts = Events() 80 | evts.addfromdir(directory, '2013*') 81 | for evt in evts: 82 | print(evt) 83 | -------------------------------------------------------------------------------- /FastTS/select.py: -------------------------------------------------------------------------------- 1 | from glob import glob 2 | import numpy as np 3 | from scipy import stats 4 | import matplotlib.pyplot as plt 5 | import os 6 | 7 | 8 | def max_conti_true(arr, thr): 9 | """ 10 | find longest continuous true subarray which length > thr 11 | :param arr: boolean array 12 | :param thr: minimum length, suppose thr > len(arr)/2 13 | :return: subarray index boundary [l, r] 14 | if no such subarray, return [0, 0] 15 | """ 16 | j = 0 17 | while j < len(arr): 18 | while j < len(arr) and (not arr[j]): 19 | j += 1 20 | k = j 21 | while j < len(arr) and arr[j]: 22 | j += 1 23 | if j - k > thr: 24 | return [k, j] 25 | return [0, 0] 26 | 27 | 28 | def window(arr, win): 29 | arr[:win[0]] = np.NaN 30 | arr[win[1]:] = np.NaN 31 | 32 | 33 | def reject_outliers(prev_dir, out_dir, thr_num=5, thr_len=60, thr_std=0.03, 34 | x_value=np.arange(10, 80), n=1.5, verbose=False): 35 | """ 36 | statistically reject outliers 37 | :param prev_dir: directory of dispersion curve 38 | :param out_dir: directory to save selected dispersion, 39 | result[0] mean vel, result[1] std 40 | :param thr_num: threshold quantities of result 41 | :param thr_len: threshold quantities of continuous data point 42 | :param thr_std: threshold of std/mean 43 | :param x_value: frequency of peroids corresponding to result data, used to plot 44 | :param n: control outlier threshold 45 | :param verbose: display detailed plot 46 | :return: None 47 | """ 48 | if not os.path.exists(out_dir): 49 | os.mkdir(out_dir) 50 | prev_dirs = glob(prev_dir + '*') 51 | for dire in prev_dirs: 52 | print(dire) 53 | pair = dire.split('/')[-1] 54 | results = [] 55 | disp_files = glob(dire + '/*') 56 | if len(disp_files) < thr_num: 57 | print('1') 58 | continue 59 | for disp_file in disp_files: 60 | results.append(np.loadtxt(disp_file)) 61 | results = np.array(results) 62 | mean = np.nanmean(results, axis=0) 63 | std = np.nanstd(results, axis=0) 64 | # reject outliers 65 | selected = np.zeros(results.shape) 66 | selected[:, :] = np.NaN 67 | for i in range(len(disp_files)): 68 | mask = abs(results[i] - mean) < n * std 69 | # gradient 70 | mask = (np.gradient(results[i]) > 0) & mask 71 | l, r = max_conti_true(mask, thr_len) 72 | selected[i, l:r] = results[i, l:r] 73 | print(pair) 74 | # calculate mean and std of selected data, and window it 75 | smean = np.nanmean(selected, axis=0) 76 | # sstd = np.nanstd(selected, axis=0) 77 | sstd = stats.sem(selected, axis=0, nan_policy='omit') 78 | mask = (sstd / smean) < thr_std 79 | mask = mask & (np.sum(~np.isnan(selected), axis=0) > thr_num) 80 | out_range = max_conti_true(mask, thr_len) 81 | window(smean, out_range) 82 | window(sstd, out_range) 83 | # write 84 | np.savetxt(out_dir+pair, np.vstack([smean, sstd])) 85 | # verbose mode 86 | if verbose: 87 | # ylim = (1.0, 1.7) 88 | ylim = (2.9, 4.3) 89 | fig, axes = plt.subplots(1, 2, sharex=True, sharey=True) 90 | fig.set_size_inches(10, 5) 91 | for i in range(len(results)): 92 | axes[0].plot(x_value, results[i], color='orange') 93 | axes[0].plot(x_value, selected[i], color='black') 94 | axes[1].plot(x_value, mean, color='red', label='raw') 95 | axes[1].plot(x_value, smean, color='blue', label='selected') 96 | axes[1].grid(color='grey', linestyle='dashed') 97 | axes[0].grid(color='grey', linestyle='dashed') 98 | axes[1].legend() 99 | axes[0].set_ylim(*ylim) 100 | axes[0].set_xlim(min(x_value), max(x_value)) 101 | plt.show(block=False) 102 | _ = input('>') 103 | plt.close() 104 | -------------------------------------------------------------------------------- /metadata.py: -------------------------------------------------------------------------------- 1 | import distaz 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | import pandas 5 | from obspy.core import UTCDateTime 6 | 7 | 8 | # cut every seismogram in a stream 9 | def cut(st, start, end): 10 | delta = st[0].stats.delta 11 | width = end - start 12 | npts = int(width/delta) 13 | # check 14 | if(start > end): 15 | raise ValueError('starttime must be smaller than endtime') 16 | for i in range(len(st)): 17 | if(st[i].stats.delta != delta): 18 | raise ValueError('all data must have same delta') 19 | if(st[i].stats.starttime > start or st[i].stats.endtime < end): 20 | raise ValueError('invalid time range') 21 | for i in range(len(st)): 22 | n = int((start - st[i].stats.starttime)/delta) 23 | st[i].data = st[i].data[n:n+npts] 24 | st[i].stats.starttime = start 25 | 26 | # plot seismogram in a stream 27 | def plot(st): 28 | n = st[0].stats.npts 29 | delta = st[0].stats.delta 30 | t = np.arange(n)*delta 31 | rows = len(st) 32 | fig,axes = plt.subplots(nrows=rows,ncols=1) 33 | for i in range(rows): 34 | axes[i].plot(t,st[i].data,color='black') 35 | plt.show() 36 | 37 | # input file is bqmail output format 38 | def read_station(filename): 39 | """ 40 | input file format: 41 | net station lat lon start end 42 | """ 43 | col_name = ['net','station','lat','lon','start','end'] 44 | sta = pandas.read_table(filename,sep='\s+',names=col_name) 45 | sta['start'] = sta['start'].map(read_date) 46 | sta['end'] = sta['end'].map(read_date) 47 | return sta 48 | 49 | # read date in year/month/day to UTCDateTime format 50 | def read_date(date): 51 | """ 52 | read date year/month/day 53 | return UTCDateTime 54 | """ 55 | temp = date.split('/') 56 | year = int(temp[0]) 57 | month = int(temp[1]) 58 | day = int(temp[2]) 59 | return UTCDateTime(year,month,day) 60 | 61 | def merge_date(start1,end1,start2,end2): 62 | if(start1 > start2): 63 | start = start1 64 | else: 65 | start = start2 66 | if(end1 > end2): 67 | end = end2 68 | else: 69 | end = end1 70 | if(end < start): 71 | raise ValueError('station pair must have common work date') 72 | else: 73 | return start,end 74 | 75 | # input value is output of read_table 76 | def mk_sta_pairs(sta): 77 | col_name = ['net1','station1','lat1','lon1','net2','station2','lat2','lon2','start','end'] 78 | pair = pandas.DataFrame(columns=col_name) 79 | row = 0 80 | for i in range(len(sta)-1): 81 | for j in range(i+1,len(sta)): 82 | temp1 = [sta['net'][i],sta['station'][i],sta['lat'][i],sta['lon'][i]] 83 | temp2 = [sta['net'][j],sta['station'][j],sta['lat'][j],sta['lon'][j]] 84 | try: 85 | start,end = merge_date(sta['start'][i],sta['end'][i],sta['start'][j],sta['end'][j]) 86 | except ValueError: 87 | print("date error:%s %s and %s %s" % (sta['net'][i],sta['station'][i],sta['net'][j],sta['station'][j])) 88 | continue 89 | pair.loc[row] = temp1 + temp2 + [start,end] 90 | row += 1 91 | return pair 92 | 93 | def cal_dist(row): 94 | d = distaz.distaz(row['lat1'],row['lon1'],row['lat2'],row['lon2']).getDelta() 95 | return d 96 | 97 | # input file in bqmail output format 98 | def read_event(filename): 99 | """ 100 | input file format: 101 | year month day jday hour min sec lat lon dep mw 102 | """ 103 | col_name = ['year','month','day','jday','hour','min','sec','lat','lon','dep','mw'] 104 | evt = pandas.read_table(filename, sep='\s+', names=col_name) 105 | time = lambda x : UTCDateTime(int(x['year']),int(x['month']),int(x['day']),int(x['hour']),int(x['min']),int(x['sec'])) 106 | evt['time'] = evt.apply(time,axis='columns') 107 | del evt['year'], evt['month'], evt['day'], evt['jday'], evt['hour'], evt['min'], evt['sec'] 108 | return evt 109 | 110 | # input list of filenames 111 | def read_data(filelist): 112 | pass 113 | def plot_data(data): 114 | pass 115 | -------------------------------------------------------------------------------- /TwoStation/reject_outliers.py: -------------------------------------------------------------------------------- 1 | from glob import glob 2 | import numpy as np 3 | from scipy import stats 4 | import matplotlib.pyplot as plt 5 | import os 6 | 7 | 8 | def max_conti_true(arr, thr): 9 | """ 10 | find longest continuous true subarray which length > thr 11 | :param arr: boolean array 12 | :param thr: minimum length, suppose thr > len(arr)/2 13 | :return: subarray index boundary [l, r] 14 | if no such subarray, return [0, 0] 15 | """ 16 | j = 0 17 | while j < len(arr): 18 | while j < len(arr) and (not arr[j]): 19 | j += 1 20 | k = j 21 | while j < len(arr) and arr[j]: 22 | j += 1 23 | if j - k > thr: 24 | return [k, j] 25 | return [0, 0] 26 | 27 | 28 | def window(arr, win): 29 | arr[:win[0]] = np.NaN 30 | arr[win[1]:] = np.NaN 31 | 32 | 33 | def reject_outliers(prev_dir, out_dir, thr_num=5, thr_len=40, thr_std=0.03, 34 | x_value=np.arange(10, 80), n=1.5, verbose=False): 35 | """ 36 | statistically reject outliers 37 | :param prev_dir: directory of dispersion curve 38 | :param out_dir: directory to save selected dispersion, 39 | result[0] mean vel, result[1] std 40 | :param thr_num: threshold quantities of result 41 | :param thr_len: threshold quantities of continuous data point 42 | :param thr_std: threshold of std/mean 43 | :param x_value: frequency of peroids corresponding to result data, used to plot 44 | :param n: control outlier threshold 45 | :param verbose: display detailed plot 46 | :return: None 47 | """ 48 | if not os.path.exists(out_dir): 49 | os.mkdir(out_dir) 50 | prev_dirs = glob(prev_dir + '*') 51 | for dire in prev_dirs: 52 | print(dire) 53 | pair = dire.split('/')[-1] 54 | results = [] 55 | disp_files = glob(dire + '/*') 56 | if len(disp_files) < thr_num: 57 | print('1') 58 | continue 59 | for disp_file in disp_files: 60 | results.append(np.loadtxt(disp_file)) 61 | results = np.array(results) 62 | mean = np.nanmean(results, axis=0) 63 | std = np.nanstd(results, axis=0) 64 | # quantity and quality(std) of data 65 | print(std/mean) 66 | mask = (std / mean) < thr_std 67 | mask = mask & (np.sum(~np.isnan(results), axis=0) > thr_len) 68 | out_range = max_conti_true(mask, thr_len) 69 | if out_range[1] - out_range[0] == 0: 70 | print('2') 71 | continue 72 | # reject outliers 73 | selected = np.zeros(results.shape) 74 | selected[:, :] = np.NaN 75 | for i in range(len(disp_files)): 76 | mask = abs(results[i] - mean) < n * std 77 | l, r = max_conti_true(mask, thr_len) 78 | selected[i, l:r] = results[i, l:r] 79 | print(pair) 80 | # calculate mean and std of selected data, and window it 81 | smean = np.nanmean(selected, axis=0) 82 | # sstd = np.nanstd(selected, axis=0) 83 | sstd = stats.sem(selected, axis=0, nan_policy='omit') 84 | window(smean, out_range) 85 | window(sstd, out_range) 86 | # write 87 | np.savetxt(out_dir+pair, np.vstack([smean, sstd])) 88 | # verbose mode 89 | if verbose: 90 | ylim = (1.0, 1.7) 91 | # ylim = (2.9, 4.3) 92 | fig, axes = plt.subplots(1, 2, sharex=True, sharey=True) 93 | fig.set_size_inches(10, 5) 94 | for i in range(len(results)): 95 | axes[0].plot(x_value, results[i], color='orange') 96 | axes[0].plot(x_value, selected[i], color='black') 97 | axes[1].plot(x_value, mean, color='red', label='raw') 98 | axes[1].plot(x_value, smean, color='blue', label='selected') 99 | axes[1].grid(color='grey', linestyle='dashed') 100 | axes[0].grid(color='grey', linestyle='dashed') 101 | axes[1].legend() 102 | axes[0].set_ylim(*ylim) 103 | axes[0].set_xlim(min(x_value), max(x_value)) 104 | plt.show(block=False) 105 | _ = input('>') 106 | plt.close() 107 | 108 | 109 | if __name__ == '__main__': 110 | root = '/home/haosj/data/neTibet/' 111 | reject_outliers(root+'result/', root+'result_new/') 112 | -------------------------------------------------------------------------------- /dwseis/station_us: -------------------------------------------------------------------------------- 1 | TA 109C 32.89 -117.11 2004/05/04 2599/12/31 2 | TA 112A 32.54 -114.58 2007/05/06 2008/10/22 3 | TA BNLO 37.13 -122.17 2005/04/20 2007/09/02 4 | TA ELFS 40.62 -120.73 2005/05/20 2007/10/09 5 | TA HAST 36.39 -121.55 2004/07/19 2007/09/02 6 | TA HATC 40.82 -121.46 2005/05/19 2007/09/07 7 | TA HELL 36.68 -119.02 2005/02/11 2007/10/14 8 | TA LAVA 38.76 -120.74 2004/12/09 2007/12/08 9 | TA M02C 41.39 -122.85 2005/06/30 2016/05/01 10 | TA M03C 41.27 -122.12 2005/07/13 2007/11/13 11 | TA M04C 41.78 -121.84 2005/06/30 2016/04/25 12 | TA M05C 41.36 -121.15 2005/06/29 2007/10/10 13 | TA M06C 41.20 -120.48 2005/06/28 2007/10/21 14 | TA M07A 41.39 -119.17 2006/02/17 2008/04/15 15 | TA M08A 41.45 -118.38 2006/02/14 2008/04/02 16 | TA M09A 41.42 -117.45 2006/04/26 2008/04/01 17 | TA M10A 41.52 -116.54 2006/05/05 2008/08/18 18 | TA M11A 41.43 -115.79 2006/05/06 2008/08/11 19 | TA M12A 41.42 -114.92 2006/05/05 2008/08/09 20 | TA M13A 41.36 -114.17 2006/05/04 2008/08/19 21 | TA N02C 40.82 -123.31 2005/07/01 2007/11/19 22 | TA N02D 40.97 -122.70 2009/11/05 2016/04/28 23 | TA N06A 40.75 -119.83 2006/07/13 2008/04/14 24 | TA N07A 40.77 -118.97 2006/02/21 2006/04/27 25 | TA N07B 40.78 -118.97 2006/04/27 2008/03/18 26 | TA N08A 40.78 -118.13 2006/02/15 2008/03/20 27 | TA N09A 40.85 -117.52 2006/04/20 2008/03/07 28 | TA N10A 40.72 -116.51 2006/05/02 2008/08/15 29 | TA N11A 40.82 -115.74 2006/05/03 2008/08/13 30 | TA N12A 40.85 -115.04 2006/05/10 2008/08/09 31 | TA N13A 40.86 -114.20 2006/05/19 2008/08/20 32 | TA O01C 40.14 -123.82 2005/08/25 2007/11/17 33 | TA O02C 40.18 -122.79 2005/07/20 2007/10/11 34 | TA O02D 40.18 -122.79 2010/08/01 2016/05/02 35 | TA O03C 40.00 -122.03 2005/04/19 2007/09/06 36 | TA O03D 40.29 -121.80 2010/08/02 2012/10/01 37 | TA O03E 40.29 -121.80 2012/10/08 2016/05/02 38 | TA O04C 40.32 -121.09 2005/05/25 2007/11/27 39 | TA O05C 39.96 -120.92 2005/04/20 2007/11/20 40 | TA O06A 40.16 -119.83 2006/02/24 2008/03/05 41 | TA O07A 40.16 -118.88 2006/03/01 2008/03/19 42 | TA O08A 40.29 -118.16 2006/02/24 2008/03/06 43 | TA O09A 40.17 -117.19 2006/04/22 2008/03/08 44 | TA O10A 40.29 -116.50 2006/05/16 2008/08/15 45 | TA O11A 40.13 -115.66 2006/05/17 2008/08/12 46 | TA O12A 40.27 -114.75 2006/05/18 2008/09/03 47 | TA P01C 39.47 -123.34 2005/08/24 2007/09/04 48 | TA P05C 39.30 -120.61 2005/06/07 2007/10/13 49 | TA P06A 39.68 -119.90 2006/04/28 2008/03/04 50 | TA P07A 39.54 -118.89 2006/02/25 2008/03/10 51 | TA P08A 39.69 -118.08 2006/02/26 2008/03/11 52 | TA P09A 39.55 -117.14 2006/04/14 2008/03/08 53 | TA P10A 39.62 -116.46 2006/06/07 2008/09/08 54 | TA P11A 39.55 -115.75 2006/05/28 2008/09/09 55 | TA P12A 39.47 -114.91 2006/05/30 2008/09/02 56 | TA P13A 39.45 -114.02 2007/03/01 2008/09/05 57 | TA Q03C 38.63 -122.01 2005/06/28 2007/12/04 58 | TA Q04C 38.84 -121.38 2004/12/10 2007/10/12 59 | TA Q07A 38.94 -118.81 2006/02/27 2008/03/13 60 | TA Q08A 38.86 -117.93 2006/03/02 2008/03/14 61 | TA Q09A 38.83 -117.18 2006/04/13 2008/03/22 62 | TA Q10A 38.82 -116.40 2007/02/05 2008/09/13 63 | TA Q11A 38.85 -115.65 2006/06/08 2008/09/11 64 | TA Q12A 39.04 -114.83 2006/05/26 2008/09/04 65 | TA Q13A 38.96 -114.02 2007/03/02 2008/09/04 66 | TA R04C 38.26 -120.94 2004/12/08 2007/12/04 67 | TA R05C 38.70 -120.08 2005/08/05 2007/10/05 68 | TA R06C 38.52 -119.45 2005/08/04 2008/03/15 69 | TA R07C 38.09 -119.05 2005/08/03 2007/10/02 70 | TA R08A 38.35 -118.11 2006/03/03 2008/03/24 71 | TA R09A 38.24 -117.07 2006/06/10 2008/03/22 72 | TA R10A 38.29 -116.30 2006/06/09 2008/09/11 73 | TA R11A 38.35 -115.59 2007/02/09 2017/03/16 74 | TA R12A 38.33 -114.61 2007/01/25 2008/09/10 75 | TA S04C 37.50 -121.33 2004/12/15 2007/09/03 76 | TA S05C 37.35 -120.33 2005/04/18 2007/12/08 77 | TA S06C 37.88 -119.85 2005/10/08 2007/10/04 78 | TA S08C 37.50 -118.17 2005/08/02 2007/10/03 79 | TA S09A 37.72 -117.22 2006/04/12 2008/03/24 80 | TA S10A 37.92 -116.59 2007/01/22 2008/09/12 81 | TA S11A 37.64 -115.75 2007/01/23 2008/09/15 82 | TA S12A 37.61 -114.85 2007/02/10 2008/09/17 83 | TA SUTB 39.23 -121.79 2005/05/17 2007/09/05 84 | TA T05C 36.90 -120.67 2005/02/14 2007/09/03 85 | TA T06C 37.01 -119.71 2004/12/06 2007/12/06 86 | TA T11A 37.24 -115.22 2007/02/11 2008/09/16 87 | TA T12A 36.73 -114.71 2007/01/24 2008/10/11 88 | TA TFRD 33.49 -116.60 2012/06/20 2012/12/12 89 | TA TPFO 33.61 -116.45 2010/04/09 2599/12/31 90 | TA U04C 36.36 -120.78 2004/12/05 2007/12/11 91 | TA U05C 36.34 -120.12 2004/12/04 2007/12/06 92 | TA U10A 36.42 -116.33 2006/06/17 2008/10/15 93 | TA U11A 36.42 -115.38 2006/06/20 2008/10/13 94 | TA U12A 36.43 -114.54 2006/06/21 2008/10/11 95 | TA V03C 36.02 -121.24 2005/02/23 2007/12/05 96 | TA V04C 35.64 -120.87 2004/06/07 2007/09/01 97 | TA V05C 35.87 -119.90 2005/04/11 2007/12/07 98 | TA V11A 35.84 -115.43 2006/06/16 2008/10/14 99 | TA V12A 35.73 -114.85 2006/06/19 2008/10/16 100 | TA W12A 35.30 -114.87 2006/03/25 2008/10/14 101 | TA Y12C 33.75 -114.52 2004/10/08 2014/08/08 102 | -------------------------------------------------------------------------------- /TwoStation/two_station.py: -------------------------------------------------------------------------------- 1 | import obspy 2 | from scipy import signal 3 | import numpy as np 4 | import obspy.signal.filter 5 | 6 | VRANGE = (3, 5) 7 | CH = 'BHZ' 8 | 9 | 10 | def pick_global(array): 11 | arr_env = obspy.signal.filter.envelope(array) 12 | return arr_env.argmax() 13 | 14 | 15 | def window(array,n0,width): 16 | l = n0 - width 17 | r = n0 + width 18 | if l<0: l=0 19 | if r>=len(array): r=len(array)-1 20 | w = np.zeros(len(array)) 21 | w[l:r] = 1 22 | array = w*array 23 | return array 24 | 25 | 26 | def norm(array): 27 | ma,mi = array.max(),array.min() 28 | m = max(abs(ma),abs(mi)) 29 | return array/m 30 | 31 | 32 | def pick(cor, uini, u): 33 | for i in range(len(u)): 34 | if u[i] <= uini: 35 | j = i 36 | break 37 | if j==0 or j==(len(u)-1): 38 | return -1 39 | if cor[j+1]>cor[j]: 40 | while j<(len(u)-1) and cor[j+1]>cor[j]: 41 | j+=1 42 | i=j 43 | elif cor[j-1]>cor[j]: 44 | while j>0 and cor[j-1]>cor[j]: 45 | j-=1 46 | i=j 47 | return u[i] 48 | 49 | 50 | def onclick(event): 51 | global click_x,click_y 52 | click_x,click_y=event.xdata,event.ydata 53 | 54 | 55 | def two_station(st ,dist, vrange, prange, plotarg): 56 | axes,ax2 = plotarg[0], plotarg[1] 57 | 58 | delta = st[0].stats.delta 59 | npts = st[0].stats.npts 60 | 61 | len_cor = 2*npts-1 62 | t = np.arange(1,int((len_cor+1)/2))*delta 63 | v = dist/t 64 | mask = (v>vrange[0]) * (v st[1].stats.sac.dist: 72 | tr1 = st[0].copy() 73 | tr2 = st[1].copy() 74 | else: 75 | tr1 = st[1].copy() 76 | tr2 = st[0].copy() 77 | # plot prepare 78 | nrows = (prange[1] - prange[0]) // 10 + 1 79 | ax_t = np.arange(len(tr1.data))*delta 80 | axes[0][0].plot(ax_t, tr1.data, 'b-') 81 | axes[0][0].set_yticklabels([]) 82 | axes[0][0].set_xticklabels([]) 83 | axes[0][0].set_title(tr1.stats.station) 84 | axes[0][1].plot(ax_t, tr2.data, 'b-') 85 | axes[0][1].set_yticklabels([]) 86 | axes[0][1].set_xticklabels([]) 87 | axes[0][1].set_title(tr2.stats.station) 88 | 89 | row = 0 90 | for period in range(prange[0], prange[1]): 91 | b = signal.firwin(1001,[1.0/(period+0.2),1.0/(period-0.2)],window=('kaiser',9),nyq=1/delta/2,pass_zero=False) 92 | # filter 93 | array1 = signal.lfilter(b,1,tr1.data) 94 | array2 = signal.lfilter(b,1,tr2.data) 95 | # normalize 96 | array1 = norm(array1) 97 | array1 = signal.detrend(array1) 98 | array2 = norm(array2) 99 | array2 = signal.detrend(array2) 100 | # window and plot 101 | if row % 10 == 0: 102 | nrow = int(row/10)+1 103 | axes[nrow][0].plot(ax_t, array1, 'b-') 104 | axes[nrow][0].set_yticklabels([]) 105 | axes[nrow][1].plot(ax_t, array2, 'b-') 106 | axes[nrow][1].set_yticklabels([]) 107 | if nrow < nrows-1: 108 | axes[nrow][0].set_xticklabels([]) 109 | axes[nrow][1].set_xticklabels([]) 110 | # correlate , first input signal has larger epicenter distance 111 | corr = signal.correlate(array1,array2,mode='full') 112 | # data prepare 113 | cor = corr[int((len_cor+1)/2):len_cor] 114 | cor = cor[mask] 115 | cor = norm(cor) 116 | COR[row] = cor 117 | row += 1 118 | # pick 119 | ax2.contourf(P, V, COR) 120 | result = np.empty(prange[1]-prange[0]) 121 | result[:] = np.NaN 122 | pini = 60 123 | uini = pick(COR[pini-prange[0]], 4.0, v) 124 | 125 | result[pini-prange[0]] = uini 126 | utemp = uini 127 | for period in range(pini+1, prange[1], 1): 128 | utemp = pick(COR[period-prange[0]], utemp, v) 129 | if(utemp > 0): 130 | result[period-prange[0]] = utemp 131 | else: 132 | break 133 | utemp=uini 134 | for period in range(pini-1,prange[0]-1,-1): 135 | utemp = pick(COR[period-prange[0]], utemp, v) 136 | if(utemp > 0): 137 | result[period-prange[0]] = utemp 138 | else: 139 | break 140 | return result 141 | 142 | 143 | def do_ts(Disp,PRANGE=(20,60)): 144 | for index,e in Disp.evt.iterrows(): 145 | dist = abs(e['dist'][0]-e['dist'][1]) 146 | two_station(e['data1'],e['data2'],dist,VRANGE,PRANGE,e['disp']) 147 | 148 | 149 | if __name__ == '__main__': 150 | testdir = '/home/haosj/work/Geopy/testdata/twostation/' 151 | st = obspy.read(testdir + '*.z') 152 | print(st) 153 | result = two_station(st, 1111.95, (2, 6), (10, 100)) 154 | forward = np.loadtxt(testdir + 'forward').transpose() 155 | fig, ax = plt.subplots() 156 | ax.set_xlim(10, 100) 157 | ax.set_ylim(3, 4) 158 | ax.plot(np.arange(10, 100), result) 159 | ax.plot(forward[0], forward[1]) 160 | plt.show() 161 | -------------------------------------------------------------------------------- /Tomo/form_test_data.py: -------------------------------------------------------------------------------- 1 | from pyproj import Geod 2 | from Geopy.Tomo.barmin import sphere2cartesian 3 | import numpy as np 4 | from math import * 5 | import random 6 | 7 | 8 | def model1(lat, lon): 9 | """ 10 | :param lat: 11 | :param lon: 12 | :return: [cI, A1, A2, A3, A4] 13 | """ 14 | lat, lon = int(lat), int(lon) 15 | cI = ((lat % 2 and lon % 2) or (not lat % 2 and not lon % 2)) * 0.5 + 3.2 16 | return cI, 0, 0, 0, 0 17 | 18 | 19 | def model_fake_anis(lat, lon): 20 | lat, lon = int(lat), int(lon) 21 | if lon < 105: 22 | cI = 3.2 23 | else: 24 | cI = 3.7 25 | return cI, 0, 0, 0, 0 26 | 27 | 28 | def model2(lat, lon): 29 | """ 30 | :param lat: 31 | :param lon: 32 | :return: [cI, A1, A2, A3, A4] 33 | """ 34 | lat, lon = int(lat), int(lon) 35 | block = ((lat % 2 and lon % 2) or (not lat % 2 and not lon % 2)) 36 | cI = block * 0.5 + 3.2 37 | la, lo = lat//2, lon//2 38 | block2 = ((la % 2 and lo % 2) or (not la % 2 and not lo % 2)) 39 | A1 = block2 * 0.02 * cI 40 | A2 = (1 - block2) * 0.02 * cI 41 | return cI, A1, A2, 0, 0 42 | 43 | 44 | def model3(lat, lon): 45 | """ 46 | :param lat: 47 | :param lon: 48 | :return: [cI, A1, A2, A3, A4] 49 | """ 50 | lat, lon = int(lat), int(lon) 51 | block = ((lat % 2 and lon % 2) or (not lat % 2 and not lon % 2)) 52 | cI = block * 0.5 + 3.2 53 | la, lo = lat//2, lon//2 54 | block2 = ((la % 2 and lo % 2) or (not la % 2 and not lo % 2)) 55 | A1 = 0.01 * cI * (-1)**(block2+1) 56 | A2 = 0 57 | return cI, A1, A2, 0, 0 58 | 59 | 60 | def model_test(lat, lon): 61 | """ 62 | :param lat: 63 | :param lon: 64 | :return: [cI, A1, A2, A3, A4] 65 | """ 66 | lat, lon = int(lat), int(lon) 67 | block = ((lat % 2 and lon % 2) or (not lat % 2 and not lon % 2)) 68 | cI = block * 0.5 + 3.2 69 | la, lo = lat//2, lon//2 70 | block2 = ((la % 2 and lo % 2) or (not la % 2 and not lo % 2)) 71 | A2 = -0.01 * cI 72 | A1 = 0 73 | return cI, A1, A2, 0, 0 74 | 75 | 76 | def write_model(model, ins, anis=None): 77 | f = open(ins, 'w') 78 | for lon in np.arange(-180, 180, 0.1): 79 | for lat in np.arange(-90, 90, 0.1): 80 | f.write("%f %f %f\n" % (lon, lat, model(lat, lon)[0])) 81 | f.close() 82 | if anis: 83 | f = open(anis, 'w') 84 | for lon in np.arange(-180, 180, 0.5): 85 | for lat in np.arange(-90, 90, 0.5): 86 | a1, a2 = model(lat, lon)[1:3] 87 | theta = atan2(a1, a2) 88 | theta = theta * 180 / pi 89 | azfast = (90 - theta) / 2 90 | gmt_azfast = 90 - azfast 91 | amp = sqrt(a1 ** 2 + a2 ** 2) 92 | gmt_amp = amp / (0.01*4) 93 | f.write("%f %f %f %fc 0.1c\n" % (lon, lat, gmt_azfast, gmt_amp)) 94 | 95 | 96 | def test_integrate(lat1, lon1, lat2, lon2): 97 | g = Geod(ellps='sphere') 98 | distance1 = g.inv(lon1, lat1, lon2, lat2)[2] / 1000.0 99 | points = g.npts(lon1, lat1, lon2, lat2, int(distance1)//1) 100 | distance2 = 0 101 | distance3 = 0 102 | res = 0 103 | for i in range(len(points)-1): 104 | lo1, la1, lo2, la2 = points[i] + points[i+1] 105 | p1 = sphere2cartesian(90-la1, 180-lo1) * 6371 106 | p2 = sphere2cartesian(90-la2, 180-lo2) * 6371 107 | distance2 += np.linalg.norm(p1-p2) 108 | az, baz, d = g.inv(lo1, la1, lo2, la2) 109 | distance3 += d 110 | res += abs(abs(az - baz) - 180) 111 | distance3 /= 1000.0 112 | res /= len(points) 113 | print(distance1, distance2, distance3) 114 | print(res) 115 | 116 | 117 | def syn_ray(ray, model): 118 | """ 119 | compute travel time along given ray 120 | :param ray: (lat1, lon1, lat2, lon2) 121 | :param model: function(lat, lon), return model 122 | :return: travel time, velocity 123 | """ 124 | g = Geod(ellps='sphere') 125 | distance = g.inv(ray[1], ray[0], ray[3], ray[2])[2] / 1000.0 126 | points = g.npts(ray[1], ray[0], ray[3], ray[2], int(distance)//1) 127 | time = 0 128 | for i in range(len(points)-1): 129 | lo1, la1, lo2, la2 = points[i] + points[i+1] 130 | az, baz, d = g.inv(lo1, la1, lo2, la2) 131 | d /= 1000.0 132 | if az < 0: 133 | az += 180 134 | phi = az * pi / 180 135 | m = model(la1, lo1) 136 | c = m[0] 137 | c += m[1] * cos(2*phi) + m[2] * sin(2*phi) +\ 138 | m[3] * cos(4*phi) + m[4] * sin(4*phi) 139 | time += d / c 140 | return time, distance / time 141 | 142 | 143 | def checkboard_ds2004(into_file, model, out, gauss_std=0): 144 | """ 145 | form intomodesVs type file for DS2004 146 | :param into_file: intomodesVs type file contain rays 147 | :param model: checkboard model 148 | :param out: output intomodesVs file 149 | :param gauss_std: add gauss noise 150 | :return: 151 | """ 152 | fin = open(into_file, 'r') 153 | fout = open(out, 'w') 154 | lines = fin.readlines() 155 | for i in range(0, len(lines), 4): 156 | cur = lines[i:i+4] 157 | vel = syn_ray(list(map(lambda x: float(x), cur[1].split())), model)[1] 158 | vel += random.gauss(0, gauss_std) 159 | cur[2] = str(vel) + '\n' 160 | fout.write(''.join(cur)) 161 | fin.close() 162 | fout.close() 163 | 164 | 165 | if __name__ == '__main__': 166 | write_model(model_test, 'tomo_model_test', 'tomo_anvs_model_test') 167 | # test_integrate(0, 5, 80, 130) 168 | # t = syn_ray((30, 100, 35, 110), model1) 169 | -------------------------------------------------------------------------------- /Plot/gmt.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import matplotlib.image as mpimg 3 | import numpy as np 4 | from glob import glob 5 | 6 | 7 | def white_edge_cutter(img): 8 | """ 9 | cut white edge of image 10 | :param img: array, rgb value of a image 11 | :return: function to cut white edge 12 | """ 13 | left = 0 14 | while all(img[:, left, :].sum(axis=1) == 3): 15 | left += 1 16 | right = img.shape[1] - 1 17 | while all(img[:, right, :].sum(axis=1) == 3): 18 | right -= 1 19 | top = 0 20 | while all(img[top, :, :].sum(axis=1) == 3): 21 | top += 1 22 | bottom = img.shape[0] - 1 23 | while all(img[bottom, :, :].sum(axis=1) == 3): 24 | bottom -= 1 25 | 26 | def cutter(img): 27 | return img[top-1:bottom, left-1:right, :] 28 | return cutter 29 | 30 | 31 | def merge_img(imgs, ncol): 32 | """ 33 | merge images 34 | :param ncol: number of images each row 35 | :param imgs: images. iterable 36 | :return: merged big image 37 | """ 38 | BLANK_WIDTH = 0.05 39 | BLANK_HEIGHT = 0.01 40 | w, h = imgs[0].shape[1], imgs[0].shape[0] 41 | blankw = int(w * BLANK_WIDTH) 42 | blankh = int(h * BLANK_HEIGHT) 43 | rows = [] 44 | for i in range(0, len(imgs), ncol): 45 | temp = imgs[i] 46 | for j in range(1, ncol): 47 | if i+j < len(imgs): 48 | temp = np.hstack([temp, np.ones([h, blankw, 3]), imgs[i+j]]) 49 | rows.append(temp) 50 | merged_width = rows[0].shape[1] 51 | if rows[-1].shape[1] != merged_width: 52 | diff = merged_width - rows[-1].shape[1] 53 | conwid1 = diff//2 54 | conwid2 = diff - conwid1 55 | rows[-1] = np.hstack([np.ones([h, conwid1, 3]), rows[-1], np.ones([h, conwid2, 3])]) 56 | result = rows[0] 57 | for i in range(1, len(rows)): 58 | result = np.vstack((result, np.ones([blankh, merged_width, 3]), rows[i])) 59 | return result 60 | 61 | 62 | def quick_view(name, save=None, column=2): 63 | """ 64 | quick view multiple plots 65 | :param name: png file name, support wild card, or list of filename 66 | :param save: save path for merged image 67 | :param column: columns of plots 68 | :return: merged image 69 | """ 70 | if isinstance(name, list): 71 | files = name 72 | elif isinstance(name, str): 73 | files = glob(name) 74 | else: 75 | raise TypeError("name should be string or list") 76 | imgs = list(map(mpimg.imread, files)) 77 | cutter = white_edge_cutter(imgs[0]) 78 | imgs_cut = list(map(cutter, imgs)) 79 | merged_img = merge_img(imgs_cut, column) 80 | plt.imshow(merged_img) 81 | if save: 82 | mpimg.imsave(save, merged_img) 83 | return merged_img 84 | 85 | 86 | def _view_row(row): 87 | show = [] 88 | for _ in range(20): 89 | show.append(row) 90 | show = np.array(show) 91 | plt.imshow(show) 92 | 93 | 94 | def get_cpt(image, cptout): 95 | """ 96 | extract cptfile from a colorbar image file 97 | assumption: this image should have white background and black rectangle border 98 | :param image: colorbar image file 99 | :param cptout: output cpt file path 100 | :return: 101 | """ 102 | head = """# COLOR_MODEL = RGB 103 | """ 104 | border_color = np.array([0, 0, 0]) # black border 105 | 106 | def check_border(line): 107 | diff = abs(line[:, :3] - border_color) 108 | is_black = np.mean(diff, axis=1) < 0.5 109 | is_continuous = np.array([False for _ in range(len(is_black))]) 110 | for j in range(len(line)-1): 111 | if np.mean(line[j] - line[j+1]) < 0.1: 112 | is_continuous[j] = True 113 | is_border = is_black & is_continuous 114 | max_len, beg, end = 0, 0, 0 115 | cur_beg = None 116 | for j in range(len(is_border)): 117 | if is_border[j]: 118 | if cur_beg is None: 119 | cur_beg = j 120 | if (j == len(is_border) - 1) or (j < len(is_border) - 1 and (not is_border[j+1])): 121 | if j - cur_beg > max_len: 122 | beg, end = cur_beg, j 123 | max_len = end - beg 124 | cur_beg = None 125 | return [max_len, beg, end] 126 | 127 | img = mpimg.imread(image) 128 | result = [] 129 | for nrow in range(img.shape[0]): 130 | result.append(check_border(img[nrow, :, :])) 131 | return result 132 | is_vertical = True 133 | colorbar = None 134 | for nrow in range(img.shape[0]): 135 | if len(set(np.sum(img[nrow, :, :], axis=1))) > 0.3 * img.shape[1]: 136 | print(nrow) 137 | is_vertical = False 138 | colorbar = img[nrow+5, :, :] 139 | break 140 | if is_vertical: 141 | for ncol in range(img.shape[1]): 142 | if len(set(np.sum(img[:, ncol, :], axis=1))) > 0.3 * img.shape[0]: 143 | colorbar = img[:, ncol+5, :] 144 | break 145 | if colorbar is None: 146 | raise ValueError('colorbar must account for most of the input image') 147 | prev, cur = None, None 148 | for i in range(1, len(colorbar)): # find edge of colorbar 149 | if abs(sum(colorbar[i, :3]) - sum(colorbar[i-1, :3])) > 2.5: 150 | if prev is None: 151 | prev = i 152 | else: 153 | if cur is None: 154 | cur = i 155 | else: 156 | prev = cur 157 | cur = i 158 | if cur - prev > 0.3 * len(colorbar): 159 | print(prev, cur) 160 | colorbar = colorbar[prev+1:cur-1] 161 | break 162 | # TODO need more test and output cpt 163 | _view_row(colorbar) 164 | return colorbar 165 | 166 | 167 | if __name__ == '__main__': 168 | colorbar = get_cpt('./testdata/colorbar.png', 'temp') 169 | -------------------------------------------------------------------------------- /Tomo/barmin.py: -------------------------------------------------------------------------------- 1 | import os 2 | import numpy as np 3 | from distaz import distaz 4 | import math 5 | import matplotlib.pyplot as plt 6 | import cartopy.crs as ccrs 7 | 8 | 9 | def read_dispersion(disp_dir, stationlst, index): 10 | """ 11 | read dispersion file, return start and end position of ray path, 12 | travel time and corresponding misfit 13 | :param disp_dir: directory of dispersion file 14 | :param stationlst: station info file 15 | :param index: correspond to certain peroid 16 | :return: (ray, time, mis) 17 | ray: lat1, lon1, lat2, lon2 18 | time: travel time 19 | mis: misfit 20 | """ 21 | ray, time, mis = [], [], [] 22 | sta = {} 23 | with open(stationlst, 'r') as f: 24 | for line in f: 25 | l = line[:-1].split(' ') 26 | sta[l[0]] = (float(l[1]), float(l[2])) 27 | disp_files = os.listdir(disp_dir) 28 | for disp_file in disp_files: 29 | disp = np.loadtxt(disp_dir+disp_file) 30 | if sum(np.isnan(disp[:, index])) == 0: 31 | vel, mis_v = disp[0, index], disp[1, index] 32 | sta1, sta2 = disp_file.split('_') 33 | latlon1, latlon2 = sta[sta1], sta[sta2] 34 | distance = distaz(*latlon2, *latlon1).degreesToKilometers() 35 | time.append(vel*distance) 36 | mis.append(mis_v*distance) 37 | ray.append(latlon1+latlon2) 38 | return ray, time, mis 39 | 40 | 41 | def define_F(alpha, delta, rnodes): 42 | """ 43 | define smooth matrix F 44 | :param alpha: smooth weight 45 | :param delta: smooth spatial width, 46 | len(delta)==len(alpha)==n+1 47 | :param rnodes: position of nodes 48 | :return: F matrix, (n+1)M x (n+1)M 49 | """ 50 | def S(r1, r2, delta): 51 | return math.exp(-((r1[0]-r2[0])**2 + (r1[1]-r2[1])**2) / (2*delta**2)) 52 | n, M = len(alpha)-1, len(rnodes) 53 | F = np.zeros([(n+1)*M, (n+1)*M], dtype='float') 54 | for k in range(len(alpha)): 55 | for j1 in range(k*M, (k+1)*M): 56 | for j2 in range(j1+1, (k+1)*M): 57 | s = S(rnodes[j1-k*M], rnodes[j2-k*M], delta[k]) 58 | F[j1, j2] = s 59 | F[j2, j1] = s 60 | F[j1] /= np.sum(F[j1]) 61 | for j in range(k*M, (k+1)*M): 62 | F[j, j] = -1 63 | F[k*M:(k+1)*M, k*M:(k+1)*M] *= -alpha[k] 64 | return F 65 | 66 | 67 | def sphere2cartesian(colat, lon): 68 | theta = colat * math.pi / 180.0 69 | phi = lon * math.pi / 180.0 70 | z = math.cos(theta) 71 | x = math.sin(theta) * math.cos(phi) 72 | y = math.sin(theta) * math.sin(phi) 73 | return np.array([x, y, z]) 74 | 75 | 76 | def ray_dist_to_node(ray, rnode): 77 | """ 78 | compute distance from point to great circle 79 | :param ray: start,end location, (colat1,lon1), (colat2,lon2) 80 | :param rnode: location of node 81 | :return: (bool, dist) 82 | if node is in range of arc, True 83 | distance in degrees 84 | """ 85 | ray = [(90-ray[0][0], 180-ray[0][1]), (90-ray[1][0], 180-ray[1][1])] 86 | rnode = [90-rnode[0], 180-rnode[1]] 87 | a, b = sphere2cartesian(*ray[0]), sphere2cartesian(*ray[1]) 88 | n = np.cross(a, b) / np.linalg.norm(np.cross(a, b)) 89 | r = sphere2cartesian(*rnode) 90 | distance = abs(math.asin(np.dot(n, r))) * 180/math.pi 91 | rplane = r - np.dot(r, n) * n 92 | rplane = rplane / np.linalg.norm(rplane) 93 | in_arc = ( 94 | abs(math.acos(np.dot(a, b)) - 95 | (math.acos(np.dot(a, rplane)) + math.acos(np.dot(b, rplane)))) 96 | < 0.01) 97 | return in_arc, distance 98 | 99 | 100 | def test_ray_dist_to_node(): 101 | from random import random 102 | from distaz import distaz 103 | ray = [ 104 | ((random()-0.5)*180, (random()-0.5)*360), ((random()-0.5)*180, (random()-0.5)*360)] 105 | ax = plt.axes(projection=ccrs.Robinson()) 106 | ax.set_global() 107 | ax.coastlines() 108 | plt.plot([ray[0][1], ray[1][1]], [ray[0][0], ray[1][0]], color='red', transform=ccrs.Geodetic()) 109 | for lon in range(-180, 180, 10): 110 | for lat in range(-90, 90, 10): 111 | color = 'grey' 112 | inarc, dist = ray_dist_to_node(ray, (lat, lon)) 113 | 114 | if dist < 5 and inarc: 115 | color = 'red' 116 | elif dist < 5: 117 | d1 = distaz(lat, lon, *ray[1]).getDelta() 118 | d2 = distaz(lat, lon, *ray[0]).getDelta() 119 | if d1 < 5 or d2 < 5: 120 | color = 'red' 121 | 122 | plt.plot([lon], [lat], color=color, marker='.', transform=ccrs.Geodetic()) 123 | plt.show() 124 | 125 | 126 | def compute_ray_density(rays, rnodes, radius): 127 | """ 128 | compute ray density to determine H 129 | :param rays: location of station pairs 130 | :param rnodes: location of nodes 131 | :param radius: radius of node 132 | :return: rho and chi, ray density and azimuthal distribution 133 | """ 134 | rho = [0 for _ in range(len(rnodes))] 135 | azs = [[] for _ in range(len(rnodes))] 136 | for ray in rays: 137 | ray = [(ray[0], ray[1]), (ray[2], ray[3])] 138 | az = distaz(*ray[0], *ray[1]).getAz() 139 | if az >= 180: 140 | az -= 180 141 | for i in range(len(rnodes)): 142 | inarc, dist = ray_dist_to_node(ray, rnodes[i]) 143 | if dist < radius: 144 | d1 = distaz(*rnodes[i], *ray[0]).getDelta() 145 | d2 = distaz(*rnodes[i], *ray[1]).getDelta() 146 | if inarc or (d1 < radius or d2 < radius): 147 | rho[i] += 1 148 | azs[i].append(az) 149 | # TODO fix bug. azimuths is not constant on one ray 150 | chi = [0 for _ in range(len(rnodes))] 151 | for i in range(len(chi)): 152 | if len(azs[i]) == 0: 153 | chi[i] = 0 154 | else: 155 | hist = [0 for _ in range(10)] 156 | for az in azs[i]: 157 | hist[int(az)//18] += 1 158 | chi[i] = sum(hist) / (10 * max(hist)) 159 | return rho, chi 160 | 161 | 162 | def define_H(rho, chi, beta, lambda0, thres_chi): 163 | """ 164 | define regularization matrix H 165 | :param rho: ray path density 166 | :param chi: ray azimuth density 167 | :param beta: damp parameter 168 | :param lambda0: parameter define function from rho to damp coefficient 169 | :param thres_chi: if chi\n%f %f\n%f %f\n" % (ray[1], ray[0], ray[3], ray[2])) 219 | f.close() 220 | 221 | 222 | # project on a single plane, because current research area is small 223 | # change later 224 | def proj_to_cube(lat, lon, clon=105): 225 | """ 226 | project sphere point on square, edge length = 2 227 | :param lat: latitude (-45 ~ 45) 228 | :param lon: longitude (clon-45 ~ clon+45) 229 | :param clon: central longitude 230 | :return: coordinate on plane 231 | """ 232 | lat = lat * math.pi / 180.0 233 | lon = lon * math.pi / 180.0 234 | clon = clon * math.pi / 180.0 235 | y = math.tan(lat) 236 | x = math.tan(lon-clon) 237 | return x, y 238 | 239 | 240 | def proj_to_sphere(x, y, clon=105): 241 | lat = math.atan(y) * 180 / math.pi 242 | lon = math.atan(x) * 180 / math.pi + clon 243 | return lat, lon 244 | 245 | 246 | if __name__ == '__main__': 247 | data = '/home/haosj/data/neTibet/' 248 | laran, loran = (30, 37), (101, 109) 249 | radius = 0.5 250 | ray, time, mis = read_dispersion( 251 | data+'result3/', data+'metadata/sta_36_south.lst', 10) 252 | rnodes = [ 253 | (x, y) for x in np.arange(laran[0], laran[1]+radius, radius) 254 | for y in np.arange(loran[0], loran[1]+radius, radius)] 255 | # F = define_F([1, 2], [2, 3], rnodes) 256 | # plt.matshow(F) 257 | # plt.colorbar() 258 | # plt.show(block=True) 259 | # test_ray_dist_to_node() 260 | # plot_ray(ray, loran+laran) 261 | # rho, chi = compute_ray_density(ray, rnodes, radius/2.0) 262 | # gmt_density(rho, rnodes, './plot/rho') 263 | # gmt_density(chi, rnodes, './plot/chi') 264 | # plot_density(rho, rnodes, loran+laran) 265 | gmt_ray(ray, './plot/ray') 266 | -------------------------------------------------------------------------------- /FastTS/meta.py: -------------------------------------------------------------------------------- 1 | import obspy 2 | from glob import glob 3 | from collections import namedtuple 4 | from distaz import distaz 5 | from scipy import signal 6 | import numpy as np 7 | import os 8 | 9 | 10 | class Event(object): 11 | def __init__(self, tr, filename, name): 12 | self.PRANGE = (10, 80) 13 | self.pairs = {} 14 | self.name = name 15 | Stats = namedtuple('Stats', ['time', 'lat', 'lon', 'depth', 'mag']) 16 | time = tr.stats.starttime + tr.stats.sac['o'] 17 | self.evtinfo = Stats(time=time, lat=tr.stats.sac['evla'], 18 | lon=tr.stats.sac['evlo'], depth=tr.stats.sac['evdp'], 19 | mag=tr.stats.sac['mag']) 20 | self.sta = {} 21 | if tr.stats.station in self.sta: 22 | raise NameError("duplicate event in %s" % tr.stats.station) 23 | else: 24 | self.sta[tr.stats.station] = filename 25 | 26 | def add_pair(self, pairname, dist): 27 | self.pairs[pairname] = dist 28 | 29 | def do(self, outdir, delta=1.0): 30 | if not self.pairs: 31 | return 32 | filtered = {} 33 | epic_center_dist = {} 34 | for s in self.sta: 35 | epic_center_dist[s] = obspy.read( 36 | self.sta[s], headonly=True)[0].stats.sac['dist'] 37 | 38 | def def_b(x): 39 | return signal.firwin( 40 | 1001, [1.0/(x+0.2), 1.0/(x-0.2)], 41 | window=('kaiser', 9), nyq=1/delta/2, pass_zero=False) 42 | B = list(map(def_b, range(*self.PRANGE))) 43 | epic_dist_max = max(epic_center_dist.values()) 44 | epic_dist_min = min(epic_center_dist.values()) 45 | starttime = self.gettime() + epic_dist_min/6.0 46 | endtime = self.gettime() + epic_dist_max/2.0 47 | npts = int((endtime - starttime) / delta) 48 | for s in self.sta: 49 | # TODO optimize space consume 50 | st = obspy.read(self.sta[s]) 51 | if all(np.isnan(st[0].data)): 52 | continue 53 | try: 54 | data_cut = cut(st, starttime, npts) 55 | except IndexError: 56 | print(self.name+" "+s+" cut error") 57 | continue 58 | filtered[s] = norm2d(multi_filter(B, data_cut)) 59 | for pair in self.pairs: 60 | outpath = outdir + pair + '/' 61 | if not os.path.exists(outpath): 62 | os.mkdir(outpath) 63 | sta1, sta2 = pair.split('_') 64 | if not(sta1 in filtered and sta2 in filtered): 65 | continue 66 | if epic_center_dist[sta2] > epic_center_dist[sta1]: 67 | sta1, sta2 = sta2, sta1 68 | result = two_station( 69 | filtered[sta1], filtered[sta2], self.pairs[pair], delta, (2.5, 4.5)) 70 | np.savetxt(outpath+self.name, result) 71 | 72 | def addstation(self, filename): 73 | station = (filename.split('/')[-1]).split('.')[1] 74 | self.sta[station] = filename 75 | 76 | def getfile(self, staname): 77 | return self.sta[staname] 78 | 79 | def getlatlon(self): 80 | return self.evtinfo.lat, self.evtinfo.lon 81 | 82 | def gettime(self): 83 | return self.evtinfo.time 84 | 85 | def getmag(self): 86 | return self.evtinfo.mag 87 | 88 | def getdepth(self): 89 | return self.evtinfo.depth 90 | 91 | def __str__(self): 92 | temp = "%.2f %.2f %.2f %d " % ( 93 | self.evtinfo.lat, self.evtinfo.lon, self.evtinfo.depth, len(self.sta)) 94 | return temp + str(self.evtinfo.time) 95 | 96 | 97 | class Events(object): 98 | def __init__(self): 99 | self.evts = {} 100 | 101 | def addfromdir(self, directory, timewild, filewild='*.Z'): 102 | times = map(lambda x: x.split('/')[-1], glob(directory + timewild)) 103 | for time in times: 104 | files = glob(directory + time + '/' + filewild) 105 | if len(files) > 0: 106 | filename = files[0] 107 | tr = obspy.read(filename)[0] 108 | self.evts[time] = Event(tr, filename, time) 109 | for file in files: 110 | self.evts[time].addstation(file) 111 | 112 | def prep_pairs(self, pairs): 113 | for pair in pairs: 114 | pair.setevents(self) 115 | 116 | def __iter__(self): 117 | return iter(self.evts.values()) 118 | 119 | def __len__(self): 120 | return len(self.evts) 121 | 122 | 123 | class Pairs(object): 124 | def __init__(self, stafile): 125 | self.pairs = {} 126 | statemp = [] 127 | with open(stafile, 'r') as f: 128 | for line in f: 129 | l = line[:-1].split(' ') 130 | statemp.append([l[0], float(l[1]), float(l[2])]) 131 | for i in range(len(statemp)-1): 132 | for j in range(i+1, len(statemp)): 133 | key = self.getkey(statemp[i][0], statemp[j][0]) 134 | self.pairs[key] = Pair(*statemp[i], *statemp[j]) 135 | 136 | @staticmethod 137 | def getkey(name1, name2): 138 | if name1 < name2: 139 | key = name1 + '_' + name2 140 | else: 141 | key = name2 + '_' + name1 142 | return key 143 | 144 | def __iter__(self): 145 | return iter(self.pairs.values()) 146 | 147 | 148 | class Pair(object): 149 | """ 150 | usage: 151 | pair.setevents -> pair.do_ts 152 | or pair.setevents -> pair.load 153 | """ 154 | def __init__(self, sta1, lat1, lon1, sta2, lat2, lon2): 155 | self.name = Pairs.getkey(sta1, sta2) 156 | self.sta1 = sta1 157 | self.sta2 = sta2 158 | self.latlon1 = (lat1, lon1) 159 | self.latlon2 = (lat2, lon2) 160 | self.staAz = distaz(*self.latlon2, *self.latlon1).getAz() 161 | self.staDist = distaz(*self.latlon2, *self.latlon1).degreesToKilometers() 162 | self.evts = [] 163 | self.dispfile = [] 164 | self.disp = [] 165 | self.PRANGE = (10, 80) 166 | 167 | def getdiffaz(self, event): 168 | return abs(self.staAz - distaz(*event.getlatlon(), *self.latlon1).getAz()) 169 | 170 | def setevents(self, events): 171 | for evt in events: 172 | diff = self.getdiffaz(evt) 173 | if evt.getdepth() > 50: 174 | continue 175 | if distaz(*evt.getlatlon(), 36, 105).getDelta() > 120: 176 | continue 177 | if not(diff < 2.0 or abs(diff - 180) < 2.0): 178 | continue 179 | if evt.getmag() <= 5.5: 180 | continue 181 | dist = min(distaz(*evt.getlatlon(), *self.latlon1).getDelta(), 182 | distaz(*evt.getlatlon(), *self.latlon2).getDelta()) 183 | if dist < 10: 184 | continue 185 | try: 186 | evt.getfile(self.sta1) 187 | evt.getfile(self.sta2) 188 | except KeyError: 189 | continue 190 | self.evts.append(evt) 191 | evt.add_pair(self.name, self.staDist) 192 | return 193 | 194 | 195 | def cut(st, start, npts): 196 | delta = st[0].stats.delta 197 | n0 = int((start - st[0].stats.starttime) / delta) 198 | if n0 < 0 or n0+npts >= len(st[0].data): 199 | raise IndexError 200 | return st[0].data[n0:n0+npts] 201 | 202 | 203 | def multi_filter(B, data): 204 | mat = [] 205 | for b in B: 206 | mat.append(signal.lfilter(b, 1, data)) 207 | return np.array(mat) 208 | 209 | 210 | def norm2d(mat): 211 | ma, mi = mat.max(axis=1), mat.min(axis=1) 212 | m = np.c_[abs(ma), abs(mi)].max(axis=1) 213 | return mat / m.reshape(len(m), 1) 214 | 215 | 216 | def pick(cor, uini, u): 217 | j = 0 218 | for i in range(len(u)): 219 | if u[i] <= uini: 220 | j = i 221 | break 222 | if j == 0 or j == (len(u)-1): 223 | return -1 224 | if cor[j+1] > cor[j]: 225 | while j < (len(u)-1) and cor[j+1] > cor[j]: 226 | j += 1 227 | i = j 228 | elif cor[j-1] > cor[j]: 229 | while j > 0 and cor[j-1] > cor[j]: 230 | j -= 1 231 | i = j 232 | return u[i] 233 | 234 | 235 | def two_station(data1, data2, dist, delta, vrange): 236 | npts = len(data1[0]) 237 | len_cor = 2*npts - 1 238 | t = np.arange(1, int((len_cor + 1) / 2)) * delta 239 | v = dist / t 240 | mask = (v > vrange[0]) * (v < vrange[1]) 241 | v = v[mask] 242 | COR = [] 243 | for i in range(len(data1)): 244 | COR.append(signal.correlate(data1[i], data2[i], mode='full')) 245 | COR = np.array(COR) 246 | COR = COR[:, int((len_cor+1)/2):len_cor] 247 | COR = COR[:, mask] 248 | result = np.empty(len(data1)) 249 | result[:] = np.nan 250 | index = 50 251 | uini = pick(COR[index], 4.0, v) 252 | result[index] = uini 253 | utemp = uini 254 | for i in range(index+1, len(COR)): 255 | utemp = pick(COR[i], utemp, v) 256 | if utemp > 0: 257 | result[i] = utemp 258 | else: 259 | break 260 | utemp = uini 261 | for i in range(index-1, -1, -1): 262 | utemp = pick(COR[i], utemp, v) 263 | if utemp > 0: 264 | result[i] = utemp 265 | else: 266 | break 267 | return result 268 | 269 | 270 | def test_ts(f1, f2, refdisp): 271 | # dist f1 > dist f2 272 | import matplotlib.pyplot as plt 273 | dire = '/home/haosj/data/fk_data2/' 274 | # dire = '/home/haosj/work/Geopy/testdata/twostation/' 275 | st = obspy.read(dire+f1) 276 | st += obspy.read(dire+f2) 277 | delta = st[0].stats.delta 278 | 279 | def cut2(st, start, end): 280 | delta = st[0].stats.delta 281 | width = end - start 282 | npts = int(width / delta) 283 | n0 = int((start - st[0].stats.starttime) / delta) 284 | n1 = int((start - st[1].stats.starttime) / delta) 285 | if n0 < 0 or n1 < 0 or n0+npts >= len(st[0].data) or n1+npts >= len(st[1].data): 286 | raise IndexError 287 | st[0].data = st[0].data[n0:n0+npts] 288 | st[1].data = st[1].data[n1:n1+npts] 289 | st[0].stats.starttime = start 290 | st[1].stats.starttime = start 291 | 292 | cut2(st, max(st[0].stats.starttime, st[1].stats.starttime), 293 | min(st[0].stats.endtime, st[1].stats.endtime)) 294 | 295 | def def_b(x): 296 | return signal.firwin( 297 | 1001, [1.0 / (x + 0.2), 1.0 / (x - 0.2)], 298 | window=('kaiser', 9), nyq=1 / delta / 2, pass_zero=False) 299 | B = list(map(def_b, range(10, 80))) 300 | mat1 = norm2d(multi_filter(B, st[0].data)) 301 | mat2 = norm2d(multi_filter(B, st[1].data)) 302 | result = two_station( 303 | mat1, mat2, st[0].stats.sac['dist']-st[1].stats.sac['dist'], delta, (2, 6)) 304 | forward = np.loadtxt(dire + refdisp).transpose() 305 | fig, ax = plt.subplots() 306 | ax.set_xlim(10, 80) 307 | ax.set_ylim(3, 4.5) 308 | ax.plot(np.arange(10, 80), result, '.', color='blue') 309 | ax.plot(forward[0], forward[1], color='orange') 310 | plt.show() 311 | return result 312 | 313 | 314 | if __name__ == '__main__': 315 | dire = '/home/haosj/data/neTibet/' 316 | pairs = Pairs(dire + 'sta_II.lst') 317 | evts = Events() 318 | evts.addfromdir(dire+'data/', '2015*') 319 | evts.prep_pairs(pairs) 320 | evt = evts.evts['20150102.002.082155.900'] 321 | # result = test_ts('g70.z', 'g60.z', 'forward_rayleigh') 322 | -------------------------------------------------------------------------------- /ZHratio/extract.py: -------------------------------------------------------------------------------- 1 | from scipy import signal 2 | from scipy.fftpack import hilbert 3 | import obspy 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | from Geopy import cps 7 | from obspy.signal.filter import envelope 8 | import os 9 | from glob import glob 10 | 11 | 12 | class station: 13 | def __init__(self, station_info, event_dir): 14 | self.station_files = {} 15 | event_files = {} 16 | for event in glob(event_dir+'*'): 17 | event_files[event] = glob(event+'/*') 18 | with open(station_info, 'r') as f: 19 | station_lines = f.readlines() 20 | for station_line in station_lines: 21 | station_name = station_line.split()[0] 22 | self.station_files[station_name] = [] 23 | for event in event_files: 24 | files = event_files[event] 25 | for filename in files: 26 | if filename[-1] == 'Z': 27 | staname = filename.split('/')[-1].split('.')[1] 28 | if staname in self.station_files: 29 | self.station_files[staname].append( 30 | '.'.join(filename.split('.')[:-1])) 31 | 32 | def do(self, freqs, out): 33 | for sta in self.station_files: 34 | print(sta) 35 | result = [] 36 | outdir = out + sta + '/' 37 | if not os.path.exists(outdir): 38 | os.makedirs(outdir) 39 | for filename in self.station_files[sta]: 40 | if os.path.exists(filename+'.Z') and os.path.exists(filename+'.R'): 41 | temp = cal_zhratio(filename+'.Z', filename+'.R', freqs) 42 | if isinstance(temp, np.ndarray): 43 | result.append(temp) 44 | np.savetxt(outdir+filename.split('/')[-1], temp) 45 | 46 | 47 | def plot_progress(zdata, hdata, Z, Z_env, H, H_env, freqs, Win): 48 | fig, axes = plt.subplots(len(range(0, len(freqs), 2))+1, sharex=True) 49 | zcolor, hcolor = 'red', 'blue' 50 | axes[0].plot(zdata, color=zcolor) 51 | axes[0].plot(hdata, color=hcolor) 52 | for i in range(0, len(freqs), 2): 53 | j = i//2+1 54 | axes[j].plot(Z[i], color=zcolor) 55 | axes[j].plot(Z_env[i], color=zcolor) 56 | axes[j].plot(H[i], color=hcolor) 57 | axes[j].plot(H_env[i], color=hcolor) 58 | axes[j].set_ylabel('freq_'+str(freqs[i])) 59 | axes[j].plot(Win[i]*Z[i].max()) 60 | plt.show() 61 | 62 | 63 | def group_vel_win(filename, stats, freqs, n): 64 | dist = stats.sac['dist'] 65 | delta = stats.delta 66 | timediff = stats.sac['b'] - stats.sac['o'] 67 | npts = stats.npts 68 | gvdisp = cps.do_mft(filename, 'R', dist) 69 | gvinterp = np.interp(freqs, gvdisp[0], gvdisp[1]) 70 | pers = 1/freqs 71 | left = (dist/(gvinterp+n) - timediff)/delta 72 | left = left.astype('int') 73 | right = (dist/(gvinterp-n) -timediff)/delta 74 | right = right.astype('int') 75 | 76 | def cut(bound): 77 | win = np.zeros(npts) 78 | l = 0 if bound[0] < 0 else bound[0] 79 | r = npts-1 if bound[1] > npts-1 else bound[1] 80 | win[l:r] = 1 81 | return win 82 | return list(map(cut, zip(left, right))) 83 | 84 | 85 | def sel_evt(st): 86 | def helper(tr): 87 | if tr.stats.sac['mag'] > 5 and tr.stats.sac['evdp'] <= 40 and\ 88 | tr.stats.sac['dist'] > 1500 and tr.stats.sac['dist'] < 8000: 89 | start = tr.stats.starttime 90 | return "X2.%s.%d.%03d.%02d.%02d.%02d.00" %( 91 | tr.stats.station,start.year,start.julday,start.hour, 92 | start.minute,start.second) 93 | else: 94 | return None 95 | return list(filter(lambda x:True if x else False,map(helper,st))) 96 | 97 | 98 | def check_corr2(Z,H,freqs,delta): 99 | norm = list(map(lambda x: (np.dot(x[0], x[0])*np.dot(x[1], x[1]))**0.5 100 | , zip(Z, H))) 101 | COR = list(map(lambda x:signal.correlate(x[0],x[1])/x[2],zip(Z,H,norm))) 102 | 103 | def helper(x): 104 | cor,freq = x[0],x[1] 105 | n = int((len(cor)-1)/2) + int(1/freq/4/delta) 106 | return cor[n] 107 | return np.array(list(map(helper,zip(COR,freqs)))) 108 | 109 | 110 | def cal_zhratio(zfile, rfile, freqs, bpwidth=0.002, outname=None, plot=None, 111 | threshold=0.8, minpoint=3): 112 | try: 113 | st = obspy.read(zfile) 114 | st += obspy.read(rfile) 115 | except TypeError: 116 | print("file read error %s %s" % (zfile, rfile)) 117 | return 0 118 | if len(st[0].data) != len(st[1].data): 119 | print('different length error:') 120 | print(zfile) 121 | return 0 122 | if any(np.isnan(st[0].data)) or any(np.isnan(st[1].data)): 123 | print('data have nan value error:') 124 | print(zfile) 125 | return 0 126 | st[0].data = signal.detrend(st[0].data) # hilbert transform z component 127 | st[0].data = hilbert(st[0].data) 128 | st[1].data = signal.detrend(st[1].data) 129 | delta = st[0].stats.delta 130 | dist = st[0].stats.sac['dist'] 131 | def_b = lambda f: signal.firwin(1001, [f-bpwidth, f+bpwidth], 132 | window=('kaiser', 9), nyq=1/delta/2, pass_zero=False) 133 | B = list(map(def_b, freqs)) 134 | try: 135 | Win = group_vel_win(zfile, st[0].stats, freqs, 1) 136 | except TypeError: 137 | print('calculate group velocity error:') 138 | print(zfile) 139 | return 0 140 | Z = list(map(lambda b: signal.lfilter(b, 1, st[0].data), B)) 141 | Z = list(map(lambda x: x[0]*x[1], zip(Win, Z))) 142 | H = list(map(lambda b: signal.lfilter(b, 1, st[1].data), B)) 143 | H = list(map(lambda x: x[0]*x[1], zip(Win, H))) 144 | Z_env = list(map(envelope, Z)) 145 | H_env = list(map(envelope, H)) 146 | 147 | cor_eff = np.array(list(map(lambda x: np.corrcoef( 148 | x[0], x[1])[1, 0], zip(H, Z)))) 149 | if len(cor_eff[cor_eff > threshold]) < minpoint: 150 | return False 151 | 152 | ratio = np.array(list(map(lambda e: max(e[0])/max(e[1]), 153 | zip(Z_env, H_env)))) 154 | result = np.vstack((freqs, ratio)).transpose() 155 | if outname: 156 | np.savetxt(outname, result, fmt='%.2f') 157 | if plot == 1: 158 | plt.plot(freqs, ratio, '.') 159 | plt.show() 160 | if plot == 2: 161 | plot_progress(st[0].data, st[1].data, Z, Z_env, H, H_env, freqs, Win) 162 | return np.array(list(map(lambda x: x[0] if x[1] > threshold else np.nan, 163 | zip(ratio, cor_eff))), dtype=np.float) 164 | 165 | 166 | def do_single_sta(sta_dir, freqs): 167 | st = obspy.read(sta_dir + '*.Z',headonly=True) 168 | commons = sel_evt(st) 169 | print(len(commons)) 170 | baz,results = [],[] 171 | for common in commons: 172 | fname = sta_dir + common 173 | try: 174 | temp = cal_zhratio(fname+'.Z',fname+'.R',freqs,plot=0,threshold=0.8) 175 | except Exception as e: 176 | print(fname+':') 177 | print(e) 178 | temp = False 179 | if isinstance(temp,np.ndarray): 180 | results.append(temp) 181 | st = obspy.read(fname+'.Z',headonly=True) 182 | baz.append(st[0].stats.sac['baz']) 183 | ratio_baz = sorted(zip(results,baz), key=lambda x:x[1], reverse=True) 184 | deg_bin = 30 185 | cent_bazs,mean_results,std_results = [],[],[] 186 | for freq_rbound in range(deg_bin,360,deg_bin): 187 | cur_results = [] 188 | while ratio_baz and ratio_baz[-1][1] < freq_rbound: 189 | cur_results.append(ratio_baz.pop()[0]) 190 | if len(cur_results) > 1: 191 | cent_bazs.append(freq_rbound - deg_bin//2) 192 | mean_results.append(np.nanmean(cur_results, axis=0)) 193 | std_results.append(np.nanstd(cur_results, axis=0)) 194 | return results,baz,cent_bazs,mean_results,std_results 195 | 196 | 197 | if __name__ == '__main__': 198 | if False: # plot synthetic test 199 | dists = ['20','30','40','50','60','70','80'] 200 | rows,cols = len(dists)//2+1,2 201 | fig = plt.figure() 202 | freqs = np.arange(0.01,0.21,0.01) 203 | for i,dist in enumerate(dists): 204 | ax = fig.add_subplot(rows,cols,i+1) 205 | zfile = 'g%s.z' %dist 206 | rfile = 'g%s.r' %dist 207 | ex = cal_zhratio(zfile,rfile,freqs,threshold=0.8) 208 | forward = np.loadtxt('zhr_forward') 209 | 210 | ax.plot(forward[0],forward[1]) 211 | ax.plot(freqs,ex,'.',color='red') 212 | ax.set_ylabel('Z/Hratio '+dist) 213 | ax.set_xlim(0.005,0.21) 214 | ax.set_ylim(1,1.6) 215 | ax.set_xlabel('frequency(Hz)') 216 | plt.show() 217 | #plt.savefig('synthetic.png') 218 | if False: 219 | freqs = np.arange(0.01,0.21,0.01) 220 | cal_zhratio('g30_cut.z','g30_cut.r',freqs,plot=2) 221 | if False: 222 | root = '/home/haosj/data/tibet/' 223 | sta = pandas.read_table(root+'metadata/ordos_sta.lst', 224 | names=['name','lat','lon','height'],sep='\s+', 225 | dtype={'name':str,'lat':np.float,'lon':np.float64}) 226 | Mean,Std = [],[] 227 | mask = [True for i in range(len(sta))] 228 | for i in range(sta.shape[0]): 229 | filename = '%sped/%s/*.Z' %(root,sta['name'][i]) 230 | st = obspy.read(filename,headonly=True) 231 | commons = sel_evt(st) 232 | freqs = np.arange(0.01,0.11,0.01) 233 | results = [] 234 | for common in commons: 235 | fname = '%sped/%s/%s' %(root,sta['name'][i],common) 236 | temp = cal_zhratio(fname+'.Z',fname+'.R',freqs,plot=0, 237 | threshold=0.8) 238 | if isinstance(temp,np.ndarray): 239 | results.append(temp) 240 | if len(results)>1: 241 | results = np.array(results) 242 | for i in range(len(freqs)): 243 | if np.count_nonzero(~np.isnan(results[:,i]))<3: 244 | results[:,i] = np.nan 245 | Mean.append(np.nanmean(results,axis=0)) 246 | Std.append(np.nanstd(results,axis=0)) 247 | else: 248 | mask[i] = False 249 | sta = sta[mask] 250 | Mean = np.array(Mean) 251 | Std = np.array(Std) 252 | lat = np.array(sta['lat']) 253 | lon = np.array(sta['lon']) 254 | zh = Mean[:,5] 255 | if True: 256 | freqs = np.arange(0.02,0.11,0.01) 257 | out = '/home/haosj/seis/zhratio/anis/' 258 | #stas = os.listdir('/home/haosj/data/tibet/ped/') 259 | stas = ['64046','64050','64053'] 260 | for sta in stas: 261 | results,baz,cent_bazs,results_mean,results_std = do_single_sta('/home/haosj/data/tibet/ped/%s/' %sta,freqs) 262 | mean,std = np.array(results_mean),np.array(results_std) 263 | results = np.array(results) 264 | outdir = out+sta+'/' 265 | os.makedirs(outdir,exist_ok=True) 266 | for i,freq in enumerate(freqs): 267 | fig, ax = plt.subplots(1) 268 | ax.plot(cent_bazs, mean[:,i], '.') 269 | ax.errorbar(cent_bazs, mean[:,i], yerr=std[:,i], fmt="none") 270 | ax.set_xlim(0,360) 271 | fmean = 'mean_' + str(freq) + '.png' 272 | plt.savefig(outdir+fmean) 273 | 274 | fig, ax = plt.subplots(1) 275 | ax.plot(baz,results[:,i],'.') 276 | ax.set_xlim(0,360) 277 | fscat = 'all_' + str(freq) +'.png' 278 | plt.savefig(outdir+fscat) 279 | 280 | 281 | -------------------------------------------------------------------------------- /TwoStation/Station.py: -------------------------------------------------------------------------------- 1 | from distaz import distaz 2 | from Geopy.Event import Events 3 | import cartopy.crs as ccrs 4 | import matplotlib.pyplot as plt 5 | import matplotlib.gridspec as gridspec 6 | from Geopy.TwoStation import two_station as ts 7 | from Geopy import cps 8 | import obspy 9 | import numpy as np 10 | from scipy import signal 11 | import os 12 | 13 | 14 | class Pair(object): 15 | """ 16 | usage: 17 | pair.setevents -> pair.do_ts 18 | or pair.setevents -> pair.load 19 | """ 20 | def __init__(self, sta1, lat1, lon1, sta2, lat2, lon2): 21 | self.sta1 = sta1 22 | self.sta2 = sta2 23 | self.latlon1 = (lat1, lon1) 24 | self.latlon2 = (lat2, lon2) 25 | self.staAz = distaz(*self.latlon2, *self.latlon1).getAz() 26 | self.staDist = distaz(*self.latlon2, *self.latlon1).degreesToKilometers() 27 | self.evts = [] 28 | self.dispfile = [] 29 | self.disp = [] 30 | self.PRANGE = (10, 80) 31 | 32 | def getdiffaz(self, event): 33 | return abs(self.staAz - distaz(*event.getlatlon(), *self.latlon1).getAz()) 34 | 35 | def setevents(self, events): 36 | for evt in events: 37 | flag = True 38 | diff = self.getdiffaz(evt) 39 | if not(diff < 2.0 or abs(diff - 180) < 2.0): 40 | flag = False 41 | try: 42 | evt.getfile(self.sta1) 43 | evt.getfile(self.sta2) 44 | except KeyError: 45 | flag = False 46 | if flag: 47 | self.evts.append(evt) 48 | return 49 | 50 | def plotlocation(self): 51 | fig = plt.figure(figsize=(10, 5)) 52 | ax = fig.add_subplot( 53 | 1, 1, 1, projection=ccrs.AzimuthalEquidistant(*self.latlon1[::-1])) 54 | ax.set_global() 55 | ax.stock_img() 56 | ax.coastlines() 57 | ax.plot(*self.latlon1[::-1], marker='^', markersize=10, transform=ccrs.Geodetic()) 58 | for evt in self.evts: 59 | ax.plot( 60 | *evt.getlatlon()[::-1], marker='*', color='red', 61 | markersize=5, transform=ccrs.Geodetic()) 62 | plt.show() 63 | 64 | @staticmethod 65 | def cut(st, start, end): 66 | delta = st[0].stats.delta 67 | width = end - start 68 | npts = int(width / delta) 69 | n0 = int((start - st[0].stats.starttime) / delta) 70 | n1 = int((start - st[1].stats.starttime) / delta) 71 | if n0 < 0 or n1 < 0 or n0+npts >= len(st[0].data) or n1+npts >= len(st[1].data): 72 | raise IndexError 73 | st[0].data = st[0].data[n0:n0+npts] 74 | st[1].data = st[1].data[n1:n1+npts] 75 | st[0].stats.starttime = start 76 | st[1].stats.starttime = start 77 | 78 | def do_ts(self, out_path='/home/haosj/data/neTibet/result/', wavetype='R', manual=False): 79 | snrthreshold = 5 80 | self._set_reference_disp(wavetype) 81 | out_path += self.sta1 + '_' + self.sta2 + '/' 82 | if not os.path.exists(out_path): 83 | os.mkdir(out_path) 84 | evt_temp = [] 85 | for evt in self.evts: 86 | out_file = out_path + str(evt.gettime()) 87 | # if os.path.exists(out_file): 88 | # phvel_read = np.loadtxt(out_file) 89 | # self.disp.append(phvel_read) 90 | # print('pass', str(evt.gettime())) 91 | # continue 92 | st = obspy.read(evt.getfile(self.sta1)) 93 | st += obspy.read(evt.getfile(self.sta2)) 94 | dist = (st[0].stats.sac.dist + st[1].stats.sac.dist) / 2.0 95 | starttime = evt.gettime() + dist/6 96 | endtime = evt.gettime() + dist/2 97 | try: 98 | Pair.cut(st, starttime, endtime) 99 | except IndexError: 100 | continue 101 | if all(np.isnan(st[0].data)) or all(np.isnan(st[1].data)): 102 | continue 103 | if snr(st[0]) < snrthreshold or snr(st[1]) < snrthreshold: 104 | continue 105 | print(st) 106 | print(snr(st[0]), snr(st[1])) 107 | # prepare plot 108 | fig = plt.gcf() 109 | fig.set_size_inches(15, 5) 110 | nrows = 1 + (self.PRANGE[1] - self.PRANGE[0]) // 10 111 | gs = gridspec.GridSpec(nrows, 21) 112 | axes = [] 113 | for i in range(nrows): 114 | axes.append([plt.subplot(gs[i, :7]), plt.subplot(gs[i, 7:14])]) 115 | ax2 = plt.subplot(gs[:, 15:]) 116 | # compute and select 117 | phvel = ts.two_station(st, self.staDist, (2.9, 4.3), self.PRANGE, (axes, ax2)) 118 | hand = self._selectvelocity(phvel, ax2, manual=manual) 119 | if hand: 120 | self.disp.append(phvel) 121 | np.savetxt(out_file, phvel) 122 | self.dispfile.append(out_file) 123 | evt_temp.append(evt) 124 | self.disp = np.array(self.disp) 125 | self.evts = evt_temp 126 | # self.plot() 127 | 128 | def load(self, directory): 129 | """ 130 | load from disp files 131 | :param directory: result directory 132 | :return: None 133 | """ 134 | result_path = directory + self.sta1 + '_' + self.sta2 + '/' 135 | evt_temp = [] 136 | for evt in self.evts: 137 | target = result_path + str(evt.gettime()) 138 | if os.path.exists(target): 139 | phvel_read = np.loadtxt(target) 140 | self.disp.append(phvel_read) 141 | evt_temp.append(evt) 142 | self.evts = evt_temp 143 | 144 | def two_side_plot(self): 145 | """ 146 | analyze branch phenomenon 147 | plot dispersion curve from both side of station pairs in different color 148 | :return: None 149 | """ 150 | fig = plt.figure(figsize=(10, 5)) 151 | ax1 = fig.add_subplot( 152 | 1, 2, 1, projection=ccrs.AzimuthalEquidistant(*self.latlon1[::-1])) 153 | ax2 = fig.add_subplot(1, 2, 2) 154 | ax1.set_global() 155 | ax1.stock_img() 156 | ax1.coastlines() 157 | colors = ['red', 'blue'] 158 | stla = (self.latlon1[0] + self.latlon2[0]) / 2.0 159 | for i in range(len(self.evts)): 160 | evla = self.evts[i].getlatlon()[0] 161 | marker = 0 if stla > evla else 1 162 | plotdispax(self.disp[i], np.arange(*self.PRANGE), ax2, color=colors[marker]) 163 | ax1.plot( 164 | *self.evts[i].getlatlon()[::-1], marker='*', markersize=5, 165 | transform=ccrs.Geodetic(), color=colors[marker]) 166 | plt.show() 167 | 168 | def _selectvelocity(self, velocity, ax, manual=True): 169 | """ 170 | select phase velocity,set unwanted data to np.NaN 171 | :param velocity: ndarray,phase velocity 172 | :return: if want to keep this data return True else False 173 | """ 174 | th1, th2 = 0.15, 0.008 175 | th_minlen = 50 176 | 177 | mask = (abs(velocity - self.refdisp)/self.refdisp < th1) 178 | mask = mask & (abs( 179 | np.gradient(velocity) - np.gradient(self.refdisp))/self.refdisp < th2) 180 | i = 0 181 | while i < len(mask): 182 | while i < len(mask) and (not mask[i]): 183 | i += 1 184 | j = i 185 | while i < len(mask) and mask[i]: 186 | i += 1 187 | if i-j < th_minlen: 188 | mask[j:i] = False 189 | if manual: # if manual==True, manually selected data 190 | plotdispax(velocity, np.arange(*self.PRANGE), ax) 191 | plt.show(block=False) 192 | hand = input('>') 193 | pmin, pmax = self.PRANGE 194 | if hand == 'd': 195 | return False 196 | elif not hand: 197 | pass 198 | elif hand[0] == 's': 199 | temp = hand.split(' ') 200 | start, end = int(temp[1]) - pmin, int(temp[2]) - pmin 201 | mask = np.array([False for _ in range(*self.PRANGE)], dtype='bool') 202 | mask[start:end] = True 203 | plt.close() 204 | if all(~mask): 205 | return False 206 | for i in range(len(mask)): 207 | if not mask[i]: 208 | velocity[i] = np.NaN 209 | return True 210 | 211 | def _set_reference_disp(self, wavetype): 212 | """ 213 | set reference disp curve, in order to auto select results 214 | :return: None 215 | """ 216 | # TODO add love wave support 217 | meanlat = (self.latlon1[0] + self.latlon2[0]) / 2.0 218 | meanlon = (self.latlon1[1] + self.latlon2[1]) / 2.0 219 | cps.litho_to_mod96(meanlat, meanlon, 'litho.m') 220 | if wavetype == 'R': 221 | result = cps.forward_rayleigh('litho.m') 222 | elif wavetype == 'L': 223 | result = cps.forward_love('litho.m') 224 | os.remove('litho.m') 225 | self.refdisp = np.interp(range(*self.PRANGE), 1.0/result[0][::-1], result[1][::-1]) 226 | 227 | def plot(self): 228 | if len(self.disp) == 0: 229 | print('no result') 230 | return 231 | fig, ax = plt.subplots() 232 | ax.set_xlabel('period(s)') 233 | ax.set_ylabel('phase velocity(km/s)') 234 | ax.set_xlim(*self.PRANGE) 235 | ax.set_ylim(2.9, 4.2) 236 | for i in range(len(self.disp)): 237 | plt.plot(np.arange(*self.PRANGE), self.disp[i], color='grey') 238 | plt.plot(np.arange(*self.PRANGE), self.refdisp, color='blue') 239 | plt.plot(np.arange(*self.PRANGE), np.nanmean(self.disp, axis=0), color='red') 240 | plt.show() 241 | 242 | 243 | class Pairs(object): 244 | def __init__(self, stafile, events): 245 | self.events = events 246 | self.pairs = {} 247 | statemp = [] 248 | with open(stafile, 'r') as f: 249 | for line in f: 250 | l = line[:-1].split(' ') 251 | statemp.append([l[0], float(l[1]), float(l[2])]) 252 | for i in range(len(statemp)-1): 253 | for j in range(i+1, len(statemp)): 254 | key = self._getkey(statemp[i][0], statemp[j][0]) 255 | self.pairs[key] = Pair(*statemp[i], *statemp[j]) 256 | 257 | def getpair(self, sta1, sta2): 258 | key = self._getkey(sta1, sta2) 259 | pair = self.pairs[key] 260 | if pair.disp == []: 261 | pair.setevents(self.events) 262 | pair.plotlocation() 263 | pair.do_ts() 264 | return pair 265 | 266 | def _getkey(self, name1, name2): 267 | if name1 < name2: 268 | key = name1 + '_' + name2 269 | else: 270 | key = name2 + '_' + name1 271 | return key 272 | 273 | def plotdisp(self, *stanames): 274 | colors = ['b', 'r', 'g', 'c', 'm', 'y', 'k', 'w'] 275 | PRANGE = (20, 80) 276 | if len(stanames) > len(colors): 277 | print('too many stations') 278 | return 279 | else: 280 | colors = iter(colors) 281 | fig, ax = plt.subplots() 282 | ax.set_xlabel('period(s)') 283 | ax.set_ylabel('phase velocity(km/s)') 284 | ax.set_xlim(*PRANGE) 285 | ax.set_ylim(2.9, 4.2) 286 | handle = [] 287 | for staname in stanames: 288 | color = next(colors) 289 | key = self._getkey(*staname) 290 | pair = self.getpair(*staname) 291 | for i in range(len(pair.disp)): 292 | temp, = ax.plot(np.arange(*PRANGE), 293 | pair.disp[i], '-', color=color, label=key) 294 | if i == 0: 295 | handle.append(temp) 296 | ax.legend(handles=handle) 297 | plt.show() 298 | 299 | 300 | def plotdispfile(filelist, pmin, pmax): 301 | fig, ax = plt.subplots() 302 | ax.set_xlabel('period(s)') 303 | ax.set_ylabel('phase velocity') 304 | ax.set_ylim(3, 5) 305 | ax.set_xlim(pmin, pmax) 306 | for f in filelist: 307 | disp = np.loadtxt(f) 308 | disp = disp.transpose() 309 | indexer = disp[0].argsort() 310 | ax.plot(disp[0][indexer], disp[1][indexer], '-') 311 | plt.show() 312 | 313 | 314 | def snr(tr): 315 | data = abs(tr.data) 316 | mid = len(data)//2 317 | return data.max()/data[mid:].mean() 318 | 319 | 320 | def plotdispax(disp, peroid, ax, color='black'): 321 | ax.plot(peroid, disp, color=color) 322 | ax.set_xlabel('period(s)') 323 | ax.set_ylabel('phase velocity') 324 | ax.set_ylim(2.9, 4.3) 325 | ax.set_xlim(peroid[0], peroid[-1]) 326 | ax.grid(color='grey', linestyle='dashed') 327 | 328 | 329 | if __name__ == '__main__': 330 | directory = '/home/haosj/data/neTibet/data/' 331 | evts = Events() 332 | evts.addfromdir(directory, '2015*') 333 | # pair = Pair('15639', 38.681, 104.352, '61061', 36.526, 108.772) 334 | pairs = Pairs('/home/haosj/data/neTibet/sta_36_south.lst', evts) 335 | # pair = pairs.getpair('51535', '62315') 336 | -------------------------------------------------------------------------------- /dwseis/mail: -------------------------------------------------------------------------------- 1 | .NAME code6hao 2 | .INST nju 3 | .MAIL nju 4 | .EMAIL seishao@126.com 5 | .PHONE 88888 6 | .FAX 66666 7 | .MEDIA FTP 8 | .ALTERNATE MEDIA DLT 9 | .ALTERNATE MEDIA DVD-R 10 | .LABEL HELLQ03C 11 | .QUALITY B 12 | .END 13 | HELL TA 2005 08 01 04 40 00.0 2005 08 01 06 03 00.0 1 BHZ 14 | Q03C TA 2005 08 01 04 40 00.0 2005 08 01 06 03 00.0 1 BHZ 15 | HELL TA 2005 08 03 01 23 00.0 2005 08 03 02 46 00.0 1 BHZ 16 | Q03C TA 2005 08 03 01 23 00.0 2005 08 03 02 46 00.0 1 BHZ 17 | HELL TA 2005 08 05 00 56 00.0 2005 08 05 02 20 00.0 1 BHZ 18 | Q03C TA 2005 08 05 00 56 00.0 2005 08 05 02 20 00.0 1 BHZ 19 | HELL TA 2005 08 13 06 25 00.0 2005 08 13 07 49 00.0 1 BHZ 20 | Q03C TA 2005 08 13 06 25 00.0 2005 08 13 07 49 00.0 1 BHZ 21 | HELL TA 2005 08 20 05 05 00.0 2005 08 20 06 28 00.0 1 BHZ 22 | Q03C TA 2005 08 20 05 05 00.0 2005 08 20 06 28 00.0 1 BHZ 23 | HELL TA 2005 10 02 03 15 00.0 2005 10 02 04 39 00.0 1 BHZ 24 | Q03C TA 2005 10 02 03 15 00.0 2005 10 02 04 39 00.0 1 BHZ 25 | HELL TA 2005 11 16 16 51 00.0 2005 11 16 18 14 00.0 1 BHZ 26 | Q03C TA 2005 11 16 16 51 00.0 2005 11 16 18 14 00.0 1 BHZ 27 | HELL TA 2005 12 12 21 01 00.0 2005 12 12 22 25 00.0 1 BHZ 28 | Q03C TA 2005 12 12 21 01 00.0 2005 12 12 22 25 00.0 1 BHZ 29 | HELL TA 2006 01 03 10 53 00.0 2006 01 03 12 16 00.0 1 BHZ 30 | Q03C TA 2006 01 03 10 53 00.0 2006 01 03 12 16 00.0 1 BHZ 31 | HELL TA 2006 02 21 21 00 00.0 2006 02 21 22 23 00.0 1 BHZ 32 | Q03C TA 2006 02 21 21 00 00.0 2006 02 21 22 23 00.0 1 BHZ 33 | HELL TA 2006 02 23 18 29 00.0 2006 02 23 19 52 00.0 1 BHZ 34 | Q03C TA 2006 02 23 18 29 00.0 2006 02 23 19 52 00.0 1 BHZ 35 | HELL TA 2006 04 22 02 04 00.0 2006 04 22 03 28 00.0 1 BHZ 36 | Q03C TA 2006 04 22 02 04 00.0 2006 04 22 03 28 00.0 1 BHZ 37 | HELL TA 2006 05 21 02 07 00.0 2006 05 21 03 30 00.0 1 BHZ 38 | Q03C TA 2006 05 21 02 07 00.0 2006 05 21 03 30 00.0 1 BHZ 39 | HELL TA 2006 06 05 18 02 00.0 2006 06 05 19 25 00.0 1 BHZ 40 | Q03C TA 2006 06 05 18 02 00.0 2006 06 05 19 25 00.0 1 BHZ 41 | HELL TA 2006 06 14 04 18 00.0 2006 06 14 05 42 00.0 1 BHZ 42 | Q03C TA 2006 06 14 04 18 00.0 2006 06 14 05 42 00.0 1 BHZ 43 | HELL TA 2006 06 14 04 46 00.0 2006 06 14 06 10 00.0 1 BHZ 44 | Q03C TA 2006 06 14 04 46 00.0 2006 06 14 06 10 00.0 1 BHZ 45 | HELL TA 2006 06 17 10 00 00.0 2006 06 17 11 23 00.0 1 BHZ 46 | Q03C TA 2006 06 17 10 00 00.0 2006 06 17 11 23 00.0 1 BHZ 47 | HELL TA 2006 06 25 01 46 00.0 2006 06 25 03 10 00.0 1 BHZ 48 | Q03C TA 2006 06 25 01 46 00.0 2006 06 25 03 10 00.0 1 BHZ 49 | HELL TA 2006 06 27 17 16 00.0 2006 06 27 18 40 00.0 1 BHZ 50 | Q03C TA 2006 06 27 17 16 00.0 2006 06 27 18 40 00.0 1 BHZ 51 | HELL TA 2006 07 02 03 53 00.0 2006 07 02 05 17 00.0 1 BHZ 52 | Q03C TA 2006 07 02 03 53 00.0 2006 07 02 05 17 00.0 1 BHZ 53 | HELL TA 2006 07 07 17 43 00.0 2006 07 07 19 06 00.0 1 BHZ 54 | Q03C TA 2006 07 07 17 43 00.0 2006 07 07 19 06 00.0 1 BHZ 55 | HELL TA 2006 07 09 04 16 00.0 2006 07 09 05 39 00.0 1 BHZ 56 | Q03C TA 2006 07 09 04 16 00.0 2006 07 09 05 39 00.0 1 BHZ 57 | HELL TA 2006 07 09 05 46 00.0 2006 07 09 07 10 00.0 1 BHZ 58 | Q03C TA 2006 07 09 05 46 00.0 2006 07 09 07 10 00.0 1 BHZ 59 | HELL TA 2006 08 04 07 45 00.0 2006 08 04 09 09 00.0 1 BHZ 60 | Q03C TA 2006 08 04 07 45 00.0 2006 08 04 09 09 00.0 1 BHZ 61 | HELL TA 2006 08 10 07 53 00.0 2006 08 10 09 16 00.0 1 BHZ 62 | Q03C TA 2006 08 10 07 53 00.0 2006 08 10 09 16 00.0 1 BHZ 63 | HELL TA 2006 08 14 14 15 00.0 2006 08 14 15 39 00.0 1 BHZ 64 | Q03C TA 2006 08 14 14 15 00.0 2006 08 14 15 39 00.0 1 BHZ 65 | HELL TA 2006 08 15 12 26 00.0 2006 08 15 13 49 00.0 1 BHZ 66 | Q03C TA 2006 08 15 12 26 00.0 2006 08 15 13 49 00.0 1 BHZ 67 | HELL TA 2006 08 22 16 51 00.0 2006 08 22 18 14 00.0 1 BHZ 68 | Q03C TA 2006 08 22 16 51 00.0 2006 08 22 18 14 00.0 1 BHZ 69 | HELL TA 2006 09 18 03 45 00.0 2006 09 18 05 09 00.0 1 BHZ 70 | Q03C TA 2006 09 18 03 45 00.0 2006 09 18 05 09 00.0 1 BHZ 71 | HELL TA 2006 09 26 02 25 00.0 2006 09 26 03 48 00.0 1 BHZ 72 | Q03C TA 2006 09 26 02 25 00.0 2006 09 26 03 48 00.0 1 BHZ 73 | HELL TA 2006 09 26 23 51 00.0 2006 09 27 01 15 00.0 1 BHZ 74 | Q03C TA 2006 09 26 23 51 00.0 2006 09 27 01 15 00.0 1 BHZ 75 | HELL TA 2006 09 27 08 04 00.0 2006 09 27 09 27 00.0 1 BHZ 76 | Q03C TA 2006 09 27 08 04 00.0 2006 09 27 09 27 00.0 1 BHZ 77 | HELL TA 2006 09 28 01 36 00.0 2006 09 28 03 00 00.0 1 BHZ 78 | Q03C TA 2006 09 28 01 36 00.0 2006 09 28 03 00 00.0 1 BHZ 79 | HELL TA 2006 09 28 23 56 00.0 2006 09 29 01 19 00.0 1 BHZ 80 | Q03C TA 2006 09 28 23 56 00.0 2006 09 29 01 19 00.0 1 BHZ 81 | HELL TA 2006 09 30 18 33 00.0 2006 09 30 19 56 00.0 1 BHZ 82 | Q03C TA 2006 09 30 18 33 00.0 2006 09 30 19 56 00.0 1 BHZ 83 | HELL TA 2006 10 01 00 04 00.0 2006 10 01 01 27 00.0 1 BHZ 84 | Q03C TA 2006 10 01 00 04 00.0 2006 10 01 01 27 00.0 1 BHZ 85 | HELL TA 2006 10 01 02 23 00.0 2006 10 01 03 47 00.0 1 BHZ 86 | Q03C TA 2006 10 01 02 23 00.0 2006 10 01 03 47 00.0 1 BHZ 87 | HELL TA 2006 10 01 09 06 00.0 2006 10 01 10 29 00.0 1 BHZ 88 | Q03C TA 2006 10 01 09 06 00.0 2006 10 01 10 29 00.0 1 BHZ 89 | HELL TA 2006 10 01 13 07 00.0 2006 10 01 14 30 00.0 1 BHZ 90 | Q03C TA 2006 10 01 13 07 00.0 2006 10 01 14 30 00.0 1 BHZ 91 | HELL TA 2006 10 02 07 09 00.0 2006 10 02 08 32 00.0 1 BHZ 92 | Q03C TA 2006 10 02 07 09 00.0 2006 10 02 08 32 00.0 1 BHZ 93 | HELL TA 2006 10 02 17 25 00.0 2006 10 02 18 49 00.0 1 BHZ 94 | Q03C TA 2006 10 02 17 25 00.0 2006 10 02 18 49 00.0 1 BHZ 95 | HELL TA 2006 10 03 17 10 00.0 2006 10 03 18 33 00.0 1 BHZ 96 | Q03C TA 2006 10 03 17 10 00.0 2006 10 03 18 33 00.0 1 BHZ 97 | HELL TA 2006 10 04 19 44 00.0 2006 10 04 21 07 00.0 1 BHZ 98 | Q03C TA 2006 10 04 19 44 00.0 2006 10 04 21 07 00.0 1 BHZ 99 | HELL TA 2006 10 13 05 21 00.0 2006 10 13 06 44 00.0 1 BHZ 100 | Q03C TA 2006 10 13 05 21 00.0 2006 10 13 06 44 00.0 1 BHZ 101 | HELL TA 2006 10 13 13 47 00.0 2006 10 13 15 10 00.0 1 BHZ 102 | Q03C TA 2006 10 13 13 47 00.0 2006 10 13 15 10 00.0 1 BHZ 103 | HELL TA 2006 10 19 08 52 00.0 2006 10 19 10 16 00.0 1 BHZ 104 | Q03C TA 2006 10 19 08 52 00.0 2006 10 19 10 16 00.0 1 BHZ 105 | HELL TA 2006 11 08 14 56 00.0 2006 11 08 16 20 00.0 1 BHZ 106 | Q03C TA 2006 11 08 14 56 00.0 2006 11 08 16 20 00.0 1 BHZ 107 | HELL TA 2006 11 08 20 47 00.0 2006 11 08 22 10 00.0 1 BHZ 108 | Q03C TA 2006 11 08 20 47 00.0 2006 11 08 22 10 00.0 1 BHZ 109 | HELL TA 2006 11 09 02 26 00.0 2006 11 09 03 49 00.0 1 BHZ 110 | Q03C TA 2006 11 09 02 26 00.0 2006 11 09 03 49 00.0 1 BHZ 111 | HELL TA 2006 11 10 06 33 00.0 2006 11 10 07 56 00.0 1 BHZ 112 | Q03C TA 2006 11 10 06 33 00.0 2006 11 10 07 56 00.0 1 BHZ 113 | HELL TA 2006 11 15 19 25 00.0 2006 11 15 20 48 00.0 1 BHZ 114 | Q03C TA 2006 11 15 19 25 00.0 2006 11 15 20 48 00.0 1 BHZ 115 | HELL TA 2006 11 15 21 22 00.0 2006 11 15 22 45 00.0 1 BHZ 116 | Q03C TA 2006 11 15 21 22 00.0 2006 11 15 22 45 00.0 1 BHZ 117 | HELL TA 2006 11 16 04 57 00.0 2006 11 16 06 20 00.0 1 BHZ 118 | Q03C TA 2006 11 16 04 57 00.0 2006 11 16 06 20 00.0 1 BHZ 119 | HELL TA 2006 11 16 05 34 00.0 2006 11 16 06 57 00.0 1 BHZ 120 | Q03C TA 2006 11 16 05 34 00.0 2006 11 16 06 57 00.0 1 BHZ 121 | HELL TA 2006 11 16 08 37 00.0 2006 11 16 10 00 00.0 1 BHZ 122 | Q03C TA 2006 11 16 08 37 00.0 2006 11 16 10 00 00.0 1 BHZ 123 | HELL TA 2006 11 17 04 09 00.0 2006 11 17 05 33 00.0 1 BHZ 124 | Q03C TA 2006 11 17 04 09 00.0 2006 11 17 05 33 00.0 1 BHZ 125 | HELL TA 2006 11 17 06 33 00.0 2006 11 17 07 57 00.0 1 BHZ 126 | Q03C TA 2006 11 17 06 33 00.0 2006 11 17 07 57 00.0 1 BHZ 127 | HELL TA 2006 11 17 08 32 00.0 2006 11 17 09 56 00.0 1 BHZ 128 | Q03C TA 2006 11 17 08 32 00.0 2006 11 17 09 56 00.0 1 BHZ 129 | HELL TA 2006 11 17 13 48 00.0 2006 11 17 15 11 00.0 1 BHZ 130 | Q03C TA 2006 11 17 13 48 00.0 2006 11 17 15 11 00.0 1 BHZ 131 | HELL TA 2006 11 17 17 12 00.0 2006 11 17 18 36 00.0 1 BHZ 132 | Q03C TA 2006 11 17 17 12 00.0 2006 11 17 18 36 00.0 1 BHZ 133 | HELL TA 2006 11 18 00 57 00.0 2006 11 18 02 20 00.0 1 BHZ 134 | Q03C TA 2006 11 18 00 57 00.0 2006 11 18 02 20 00.0 1 BHZ 135 | HELL TA 2006 11 18 02 35 00.0 2006 11 18 03 58 00.0 1 BHZ 136 | Q03C TA 2006 11 18 02 35 00.0 2006 11 18 03 58 00.0 1 BHZ 137 | HELL TA 2006 11 18 08 50 00.0 2006 11 18 10 14 00.0 1 BHZ 138 | Q03C TA 2006 11 18 08 50 00.0 2006 11 18 10 14 00.0 1 BHZ 139 | HELL TA 2006 11 18 18 54 00.0 2006 11 18 20 17 00.0 1 BHZ 140 | Q03C TA 2006 11 18 18 54 00.0 2006 11 18 20 17 00.0 1 BHZ 141 | HELL TA 2006 11 19 15 16 00.0 2006 11 19 16 40 00.0 1 BHZ 142 | Q03C TA 2006 11 19 15 16 00.0 2006 11 19 16 40 00.0 1 BHZ 143 | HELL TA 2006 11 20 23 05 00.0 2006 11 21 00 29 00.0 1 BHZ 144 | Q03C TA 2006 11 20 23 05 00.0 2006 11 21 00 29 00.0 1 BHZ 145 | HELL TA 2006 11 21 15 47 00.0 2006 11 21 17 10 00.0 1 BHZ 146 | Q03C TA 2006 11 21 15 47 00.0 2006 11 21 17 10 00.0 1 BHZ 147 | HELL TA 2006 11 21 23 30 00.0 2006 11 22 00 53 00.0 1 BHZ 148 | Q03C TA 2006 11 21 23 30 00.0 2006 11 22 00 53 00.0 1 BHZ 149 | HELL TA 2006 11 22 02 41 00.0 2006 11 22 04 04 00.0 1 BHZ 150 | Q03C TA 2006 11 22 02 41 00.0 2006 11 22 04 04 00.0 1 BHZ 151 | HELL TA 2006 11 22 09 58 00.0 2006 11 22 11 22 00.0 1 BHZ 152 | Q03C TA 2006 11 22 09 58 00.0 2006 11 22 11 22 00.0 1 BHZ 153 | HELL TA 2006 11 22 10 37 00.0 2006 11 22 12 00 00.0 1 BHZ 154 | Q03C TA 2006 11 22 10 37 00.0 2006 11 22 12 00 00.0 1 BHZ 155 | HELL TA 2006 11 22 23 09 00.0 2006 11 23 00 33 00.0 1 BHZ 156 | Q03C TA 2006 11 22 23 09 00.0 2006 11 23 00 33 00.0 1 BHZ 157 | HELL TA 2006 11 23 20 04 00.0 2006 11 23 21 28 00.0 1 BHZ 158 | Q03C TA 2006 11 23 20 04 00.0 2006 11 23 21 28 00.0 1 BHZ 159 | HELL TA 2006 11 23 22 59 00.0 2006 11 24 00 22 00.0 1 BHZ 160 | Q03C TA 2006 11 23 22 59 00.0 2006 11 24 00 22 00.0 1 BHZ 161 | HELL TA 2006 11 24 15 34 00.0 2006 11 24 16 57 00.0 1 BHZ 162 | Q03C TA 2006 11 24 15 34 00.0 2006 11 24 16 57 00.0 1 BHZ 163 | HELL TA 2006 11 24 21 02 00.0 2006 11 24 22 25 00.0 1 BHZ 164 | Q03C TA 2006 11 24 21 02 00.0 2006 11 24 22 25 00.0 1 BHZ 165 | HELL TA 2006 11 26 12 58 00.0 2006 11 26 14 21 00.0 1 BHZ 166 | Q03C TA 2006 11 26 12 58 00.0 2006 11 26 14 21 00.0 1 BHZ 167 | HELL TA 2006 11 28 08 01 00.0 2006 11 28 09 25 00.0 1 BHZ 168 | Q03C TA 2006 11 28 08 01 00.0 2006 11 28 09 25 00.0 1 BHZ 169 | HELL TA 2006 11 30 01 39 00.0 2006 11 30 03 03 00.0 1 BHZ 170 | Q03C TA 2006 11 30 01 39 00.0 2006 11 30 03 03 00.0 1 BHZ 171 | HELL TA 2006 12 02 23 36 00.0 2006 12 03 00 59 00.0 1 BHZ 172 | Q03C TA 2006 12 02 23 36 00.0 2006 12 03 00 59 00.0 1 BHZ 173 | HELL TA 2006 12 07 19 10 00.0 2006 12 07 20 33 00.0 1 BHZ 174 | Q03C TA 2006 12 07 19 10 00.0 2006 12 07 20 33 00.0 1 BHZ 175 | HELL TA 2006 12 12 22 32 00.0 2006 12 12 23 55 00.0 1 BHZ 176 | Q03C TA 2006 12 12 22 32 00.0 2006 12 12 23 55 00.0 1 BHZ 177 | HELL TA 2006 12 13 11 40 00.0 2006 12 13 13 04 00.0 1 BHZ 178 | Q03C TA 2006 12 13 11 40 00.0 2006 12 13 13 04 00.0 1 BHZ 179 | HELL TA 2006 12 17 08 41 00.0 2006 12 17 10 05 00.0 1 BHZ 180 | Q03C TA 2006 12 17 08 41 00.0 2006 12 17 10 05 00.0 1 BHZ 181 | HELL TA 2006 12 26 15 19 00.0 2006 12 26 16 43 00.0 1 BHZ 182 | Q03C TA 2006 12 26 15 19 00.0 2006 12 26 16 43 00.0 1 BHZ 183 | HELL TA 2006 12 30 22 34 00.0 2006 12 30 23 57 00.0 1 BHZ 184 | Q03C TA 2006 12 30 22 34 00.0 2006 12 30 23 57 00.0 1 BHZ 185 | HELL TA 2007 01 02 13 36 00.0 2007 01 02 15 00 00.0 1 BHZ 186 | Q03C TA 2007 01 02 13 36 00.0 2007 01 02 15 00 00.0 1 BHZ 187 | HELL TA 2007 01 07 08 01 00.0 2007 01 07 09 25 00.0 1 BHZ 188 | Q03C TA 2007 01 07 08 01 00.0 2007 01 07 09 25 00.0 1 BHZ 189 | HELL TA 2007 01 08 20 12 00.0 2007 01 08 21 35 00.0 1 BHZ 190 | Q03C TA 2007 01 08 20 12 00.0 2007 01 08 21 35 00.0 1 BHZ 191 | HELL TA 2007 01 10 18 19 00.0 2007 01 10 19 42 00.0 1 BHZ 192 | Q03C TA 2007 01 10 18 19 00.0 2007 01 10 19 42 00.0 1 BHZ 193 | HELL TA 2007 01 11 20 34 00.0 2007 01 11 21 58 00.0 1 BHZ 194 | Q03C TA 2007 01 11 20 34 00.0 2007 01 11 21 58 00.0 1 BHZ 195 | HELL TA 2007 01 13 04 23 00.0 2007 01 13 05 46 00.0 1 BHZ 196 | Q03C TA 2007 01 13 04 23 00.0 2007 01 13 05 46 00.0 1 BHZ 197 | HELL TA 2007 01 13 17 37 00.0 2007 01 13 19 00 00.0 1 BHZ 198 | Q03C TA 2007 01 13 17 37 00.0 2007 01 13 19 00 00.0 1 BHZ 199 | HELL TA 2007 01 13 19 37 00.0 2007 01 13 21 00 00.0 1 BHZ 200 | Q03C TA 2007 01 13 19 37 00.0 2007 01 13 21 00 00.0 1 BHZ 201 | HELL TA 2007 01 13 23 40 00.0 2007 01 14 01 03 00.0 1 BHZ 202 | Q03C TA 2007 01 13 23 40 00.0 2007 01 14 01 03 00.0 1 BHZ 203 | HELL TA 2007 01 14 05 22 00.0 2007 01 14 06 46 00.0 1 BHZ 204 | Q03C TA 2007 01 14 05 22 00.0 2007 01 14 06 46 00.0 1 BHZ 205 | HELL TA 2007 01 14 13 16 00.0 2007 01 14 14 39 00.0 1 BHZ 206 | Q03C TA 2007 01 14 13 16 00.0 2007 01 14 14 39 00.0 1 BHZ 207 | HELL TA 2007 01 15 01 18 00.0 2007 01 15 02 41 00.0 1 BHZ 208 | Q03C TA 2007 01 15 01 18 00.0 2007 01 15 02 41 00.0 1 BHZ 209 | HELL TA 2007 01 17 09 45 00.0 2007 01 17 11 09 00.0 1 BHZ 210 | Q03C TA 2007 01 17 09 45 00.0 2007 01 17 11 09 00.0 1 BHZ 211 | HELL TA 2007 01 29 12 37 00.0 2007 01 29 14 00 00.0 1 BHZ 212 | Q03C TA 2007 01 29 12 37 00.0 2007 01 29 14 00 00.0 1 BHZ 213 | HELL TA 2007 02 05 06 42 00.0 2007 02 05 08 06 00.0 1 BHZ 214 | Q03C TA 2007 02 05 06 42 00.0 2007 02 05 08 06 00.0 1 BHZ 215 | HELL TA 2007 02 05 13 35 00.0 2007 02 05 14 58 00.0 1 BHZ 216 | Q03C TA 2007 02 05 13 35 00.0 2007 02 05 14 58 00.0 1 BHZ 217 | HELL TA 2007 02 08 07 15 00.0 2007 02 08 08 38 00.0 1 BHZ 218 | Q03C TA 2007 02 08 07 15 00.0 2007 02 08 08 38 00.0 1 BHZ 219 | HELL TA 2007 02 12 20 33 00.0 2007 02 12 21 56 00.0 1 BHZ 220 | Q03C TA 2007 02 12 20 33 00.0 2007 02 12 21 56 00.0 1 BHZ 221 | HELL TA 2007 02 24 07 33 00.0 2007 02 24 08 56 00.0 1 BHZ 222 | Q03C TA 2007 02 24 07 33 00.0 2007 02 24 08 56 00.0 1 BHZ 223 | HELL TA 2007 02 24 10 31 00.0 2007 02 24 11 54 00.0 1 BHZ 224 | Q03C TA 2007 02 24 10 31 00.0 2007 02 24 11 54 00.0 1 BHZ 225 | HELL TA 2007 03 22 05 45 00.0 2007 03 22 07 09 00.0 1 BHZ 226 | Q03C TA 2007 03 22 05 45 00.0 2007 03 22 07 09 00.0 1 BHZ 227 | HELL TA 2007 03 29 21 07 00.0 2007 03 29 22 30 00.0 1 BHZ 228 | Q03C TA 2007 03 29 21 07 00.0 2007 03 29 22 30 00.0 1 BHZ 229 | HELL TA 2007 04 05 21 27 00.0 2007 04 05 22 50 00.0 1 BHZ 230 | Q03C TA 2007 04 05 21 27 00.0 2007 04 05 22 50 00.0 1 BHZ 231 | HELL TA 2007 04 08 03 21 00.0 2007 04 08 04 45 00.0 1 BHZ 232 | Q03C TA 2007 04 08 03 21 00.0 2007 04 08 04 45 00.0 1 BHZ 233 | HELL TA 2007 04 11 10 30 00.0 2007 04 11 11 54 00.0 1 BHZ 234 | Q03C TA 2007 04 11 10 30 00.0 2007 04 11 11 54 00.0 1 BHZ 235 | HELL TA 2007 05 09 07 50 00.0 2007 05 09 09 13 00.0 1 BHZ 236 | Q03C TA 2007 05 09 07 50 00.0 2007 05 09 09 13 00.0 1 BHZ 237 | HELL TA 2007 05 10 21 54 00.0 2007 05 10 23 18 00.0 1 BHZ 238 | Q03C TA 2007 05 10 21 54 00.0 2007 05 10 23 18 00.0 1 BHZ 239 | HELL TA 2007 05 11 21 37 00.0 2007 05 11 23 01 00.0 1 BHZ 240 | Q03C TA 2007 05 11 21 37 00.0 2007 05 11 23 01 00.0 1 BHZ 241 | HELL TA 2007 05 15 11 22 00.0 2007 05 15 12 45 00.0 1 BHZ 242 | Q03C TA 2007 05 15 11 22 00.0 2007 05 15 12 45 00.0 1 BHZ 243 | HELL TA 2007 07 08 18 54 00.0 2007 07 08 20 17 00.0 1 BHZ 244 | Q03C TA 2007 07 08 18 54 00.0 2007 07 08 20 17 00.0 1 BHZ 245 | HELL TA 2007 07 14 03 28 00.0 2007 07 14 04 52 00.0 1 BHZ 246 | Q03C TA 2007 07 14 03 28 00.0 2007 07 14 04 52 00.0 1 BHZ 247 | HELL TA 2007 08 02 03 21 00.0 2007 08 02 04 45 00.0 1 BHZ 248 | Q03C TA 2007 08 02 03 21 00.0 2007 08 02 04 45 00.0 1 BHZ 249 | HELL TA 2007 08 02 06 23 00.0 2007 08 02 07 46 00.0 1 BHZ 250 | Q03C TA 2007 08 02 06 23 00.0 2007 08 02 07 46 00.0 1 BHZ 251 | HELL TA 2007 08 10 11 47 00.0 2007 08 10 13 10 00.0 1 BHZ 252 | Q03C TA 2007 08 10 11 47 00.0 2007 08 10 13 10 00.0 1 BHZ 253 | HELL TA 2007 08 24 04 20 00.0 2007 08 24 05 43 00.0 1 BHZ 254 | Q03C TA 2007 08 24 04 20 00.0 2007 08 24 05 43 00.0 1 BHZ 255 | HELL TA 2007 09 16 21 16 00.0 2007 09 16 22 40 00.0 1 BHZ 256 | Q03C TA 2007 09 16 21 16 00.0 2007 09 16 22 40 00.0 1 BHZ 257 | -------------------------------------------------------------------------------- /cps.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import pandas as pd 4 | import matplotlib.pyplot as plt 5 | import numpy as np 6 | from glob import glob 7 | import pdb 8 | 9 | 10 | def do_mft(filename, wave, dist): 11 | """ 12 | call sacmft96 from Computer Programs in Seismology 13 | calculate group velocity curve 14 | :param filename: sac file name 15 | :param wave: wave type ,'L' or 'R' 16 | :param dist: epicenter distance(km) 17 | :return: result[0] frequency, result[1] group velocity 18 | """ 19 | chose_alpha = {1000: 25, 2000: 50, 4000: 100, 180000: 200} 20 | command = ['sacmft96', '-f', filename] 21 | for distmax in chose_alpha: 22 | if dist <= distmax: 23 | alpha = chose_alpha[distmax] 24 | break 25 | command.extend(['-a0', str(alpha)]) 26 | command.extend(['-PMIN', '5', '-PMAX', '120']) 27 | if wave == 'R': 28 | command.append('-R') 29 | elif wave == 'L': 30 | command.append('-L') 31 | with open(os.devnull, 'w') as devnull: 32 | subprocess.run(command, check=True, stdout=devnull) 33 | subprocess.run("awk '{print $5,$6,$10}' mft96.disp > temp", shell=True) 34 | df = pd.read_table('temp', sep='\s+', names=['per', 'vel', 'amp']) 35 | result = df.groupby('per').apply(lambda x: x['vel'][x['amp'].argmax()]) 36 | #plt.plot(result) 37 | #plt.show() 38 | subprocess.run("rm mft* MFT* temp", shell=True) 39 | f = result.index 40 | f = 1.0/f 41 | c = result.values 42 | return np.vstack([f[::-1], c[::-1]]) 43 | 44 | 45 | def litho_to_mod96(lat, lon, outname, firstrow_name='CRUST1-TOP', lastrow_name=None): 46 | """ 47 | access LITHO1.0 and convert to mod96 format 48 | :param lat: latitude 49 | :param lon: longitude 50 | :param outname: mod96 model file name 51 | :param firstrow_name: select shallowest layer 52 | :param lastrow_name: select deepest layer and set it to 100 53 | :return: 54 | """ 55 | subprocess.run( 56 | "access_litho -p %d %d | awk '{print $1,$2,$3,$4,$5,$6}' > litho.temp" 57 | % (lat, lon), shell=True) 58 | model = np.loadtxt("litho.temp") 59 | 60 | # select rows 61 | subprocess.run( 62 | "access_litho -p %d %d | awk '{print $10}' > layername.temp" 63 | % (lat, lon), shell=True) 64 | first_row = 0 65 | last_row = len(model) 66 | with open('layername.temp', 'r') as f: 67 | layer_name = f.read().split()[::-1] 68 | for i in range(len(layer_name)): 69 | if firstrow_name and firstrow_name == layer_name[i]: 70 | first_row = i 71 | if lastrow_name and lastrow_name == layer_name[i]: 72 | last_row = i+1 73 | 74 | model = model[::-1, :] 75 | model = model[first_row:last_row, :] 76 | model[:, :4] = model[:, :4]/1000 77 | model[:, 0] = model[:, 0] - model[0][0] 78 | # convert Qmu,Qkappa to Qp,Qs 79 | L = (4/3) * (model[:, 3]/model[:, 2])**2 80 | Qp = 1/L * model[:, 5] + 1/(1-L) * model[:, 4] 81 | Qs = model[:, 5] 82 | model[:, 4], model[:, 5] = Qp, Qs 83 | # change column sequence 84 | model[:, 1], model[:, 2], model[:, 3] = \ 85 | model[:, 2].copy(), model[:, 3].copy(), model[:, 1].copy() 86 | # convert depth to layer thieckness 87 | cps_model = [] 88 | if len(model) % 2 == 0: 89 | raise ValueError('unexpected model for lat:%d lon:%d' % (lat, lon)) 90 | for i in range(0, len(model)-1, 2): 91 | layer_thickness = model[i+1, 0] - model[i, 0] 92 | if all(model[i, 1:] == model[i+1, 1:]): 93 | model[i, 0] = layer_thickness 94 | cps_model.append(model[i, :]) 95 | else: 96 | raise ValueError('unexpected model for lat:%d lon:%d' % (lat, lon)) 97 | model[-1, 0] = 100 98 | cps_model.append(model[-1, :]) 99 | cps_model = np.array(cps_model) 100 | # add ETAP ETAS FREFP FREFS 101 | comp = np.zeros([len(cps_model), 4]) 102 | comp[:, 2:] = 1 103 | cps_model = np.concatenate((cps_model, comp), axis=1) 104 | 105 | header = "%s\nlitho1.0\n0" % outname 106 | np.savetxt("model.temp", cps_model, header=header, comments="", fmt='%8.4f') 107 | subprocess.run("cat model.temp | mkmod96", shell=True) 108 | subprocess.run("rm litho.temp model.temp layername.temp", shell=True) 109 | return cps_model 110 | 111 | 112 | def vs_to_mod96(vs, depth, outname, ratio=1.75): 113 | """ 114 | use empirical relationship get vp and density 115 | write mod96 116 | :param vs: ndarray vs (km/s) 117 | :param depth: ndarray depth (km) 118 | :param outname: output mod96 file name 119 | :param ratio: vp/vs 120 | :return: 121 | """ 122 | def density(vp): 123 | """ 124 | density empirical function 125 | ref: Tomas M.Brocher 2015 BSSA 126 | """ 127 | return 1.6612*vp - 0.4721*vp**2 + 0.0671*vp**3 -\ 128 | 0.0043*vp**4 + 0.000106*vp**5 129 | vp = vs * ratio 130 | density = density(vp) 131 | qp = np.ones(len(vs)) * 1330 132 | qs = np.ones(len(vs)) * 600 133 | prev_depth = np.zeros(len(depth)) 134 | prev_depth[1:] = depth[:-1] 135 | thick = depth - prev_depth 136 | one = np.ones(len(vs)) 137 | zero = np.zeros(len(vs)) 138 | model_mat = np.c_[thick, vp, vs, density, qp, qs, zero, zero, one, one] 139 | header = "%s\nfromvs\n0" % outname 140 | np.savetxt("model.temp", model_mat, header=header, comments="", fmt='%8.4f') 141 | subprocess.run("cat model.temp | mkmod96", shell=True) 142 | subprocess.run("rm model.temp", shell=True) 143 | 144 | 145 | def write_surf96_from_anis_vs( 146 | vsmean, depth, anis_strength, anis_angle, step, name, outdir, prange, is_meangv=False): 147 | """ 148 | write surf96 file 149 | :param vsmean: a in function : a(1+dcos(2(x-e))) 150 | :param depth: depth 151 | :param anis_strength: d in that function 152 | :param anis_angle: e in that function 153 | :param step: step of azimuth 154 | :param name: output file is name_azi 155 | :param outdir: output directory 156 | :param is_meangv: output isotropic group vel 157 | :param prange: period range of dispersion 158 | :return: 159 | """ 160 | if not os.path.exists(outdir): 161 | os.mkdir(outdir) 162 | 163 | # get mean gv 164 | vs_to_mod96(vsmean, depth, 'mean.mod') 165 | mean_vel = forward_rayleigh('mean.mod').transpose()[::-1] 166 | mean_vel[:, 0] = 1.0 / mean_vel[:, 0] 167 | gv_mean = np.interp(prange, mean_vel[:, 0], mean_vel[:, 2]) 168 | # pdb.set_trace() 169 | 170 | def cos_func(x, a, d, e): 171 | return a*(1 + d*np.cos(2*(x-e)*np.pi/180)) 172 | for azi in np.arange(-180, 181, step): 173 | vs = cos_func(azi, vsmean, anis_strength, anis_angle) 174 | vs_to_mod96(vs, depth, 'mod.temp') 175 | freq_vel = forward_rayleigh('mod.temp').transpose()[::-1] 176 | freq_vel[:, 0] = 1.0 / freq_vel[:, 0] 177 | gv = np.interp(prange, freq_vel[:, 0], freq_vel[:, 2]) 178 | pv = np.interp(prange, freq_vel[:, 0], freq_vel[:, 1]) 179 | f = open(outdir+name+"_%.3f" % azi, 'w') 180 | for i in range(len(prange)): 181 | if is_meangv: 182 | f.write("SURF96 R U T 0 %.2f %.2f 0.003\n" % (prange[i], gv_mean[i])) 183 | else: 184 | f.write("SURF96 R U T 0 %.2f %.2f 0.003\n" % (prange[i], gv[i])) 185 | f.write("SURF96 R C T 0 %.2f %.2f 0.001\n" % (prange[i], pv[i])) 186 | f.close() 187 | subprocess.run("rm mod.temp mean.mod", shell=True) 188 | 189 | 190 | def read_anis_disp(indir, prefix, vel_type, period): 191 | """ 192 | read azimuth dependant dispersion file 193 | :param indir: disp file directory 194 | :param prefix: filename pattern is prefix_azi 195 | :param vel_type: 'C' or 'U' for phase or group velocity 196 | :param period: period of velocity 197 | :return: azi, surfv, std 198 | """ 199 | names = glob(indir+prefix+'_*') 200 | azis, surfvs, stds = [], [], [] 201 | for name in names: 202 | azi = float(name.split('_')[-1]) 203 | with open(name, 'r') as f: 204 | lines = f.readlines() 205 | for line in lines: 206 | temp = line.split() 207 | if temp[2] == vel_type and float(temp[5]) == period: 208 | surfv, std = float(temp[6]), float(temp[7]) 209 | if not(np.isnan(surfv) or np.isnan(std)): 210 | surfvs.append(surfv) 211 | stds.append(std) 212 | azis.append(azi) 213 | surfvs = [x for _, x in sorted(zip(azis, surfvs))] 214 | stds = [x for _, x in sorted(zip(azis, stds))] 215 | return sorted(azis), surfvs, stds 216 | 217 | 218 | def forward_rayleigh(modelname): 219 | """ 220 | using given model, forward compute surface wave phase velocity 221 | group velocity and ZHratio 222 | :param modelname: mod96 format model 223 | :return: result[0] frequency, result[1] phase velocity, 224 | result[2] group velocity, result[3] ZHratio 225 | """ 226 | subprocess.run( 227 | "sprep96 -M %s -HS 5 -HR 0 -DT 0.5 -NPTS 2048 -R -L -NMOD 1" % modelname, 228 | shell=True) 229 | subprocess.run('sdisp96', shell=True) 230 | subprocess.run('sregn96', shell=True) 231 | subprocess.run('slegn96', shell=True) 232 | subprocess.run( 233 | "sdpegn96 -R -C -U -PER -YMIN 2 -YMAX 5 -XMIN 1 -XMAX 80 -ASC", 234 | shell=True) 235 | subprocess.run("awk '{print $4,$5,$6,$9}' SREGN.ASC > temp", shell=True) 236 | result = np.loadtxt('temp', skiprows=1) 237 | subprocess.run( 238 | 'rm sdisp* temp slegn96.egn sregn96.egn SREGN*', shell=True) 239 | 240 | result = result.transpose() 241 | result[3] = 1 / result[3] 242 | return result[:, ::-1] # frequency ascending order 243 | 244 | 245 | def forward_love(modelname): 246 | """ 247 | :param modelname: 248 | :return: result[0] frequency, result[1] phase velocity, 249 | result[2] group velocity 250 | """ 251 | subprocess.run( 252 | "sprep96 -M %s -HS 5 -HR 0 -DT 0.5 -NPTS 2048 -R -L -NMOD 1" % modelname, 253 | shell=True) 254 | subprocess.run('sdisp96', shell=True) 255 | subprocess.run('sregn96', shell=True) 256 | subprocess.run('slegn96', shell=True) 257 | subprocess.run( 258 | "sdpegn96 -L -C -U -PER -YMIN 2 -YMAX 5 -XMIN 1 -XMAX 80 -ASC", 259 | shell=True) 260 | subprocess.run("awk '{print $4,$5,$6}' SLEGN.ASC > temp", shell=True) 261 | result = np.loadtxt('temp', skiprows=1) 262 | subprocess.run( 263 | 'rm sdisp* temp slegn96.egn sregn96.egn SLEGN*', shell=True) 264 | 265 | result = result.transpose() 266 | return result[:, ::-1] # frequency ascending order 267 | 268 | 269 | def compute2d(lat, lon, mark, outname, step=1, freqs=(0.1, 0.05, 0.04, 0.025, 0.02, 0.0125)): 270 | """ 271 | compute 2d data 272 | :param lat: latitude range 273 | :param lon: longitude range 274 | :param mark: 1 phase vel, 2 group vel, 3 ZHratio 275 | :param outname: write filename 276 | :param step: step between nodes 277 | :param freqs: frequency of result 278 | :return: 279 | """ 280 | out = "" 281 | for la in np.arange(lat[0], lat[1]+step, step): 282 | for lo in np.arange(lon[0], lon[1]+step, step): 283 | litho_to_mod96(la, lo, 'temp') 284 | forward = forward_rayleigh('temp') 285 | results = np.interp(freqs, forward[0], forward[mark]) 286 | out += str(lo) + ' ' + str(la) + ' ' 287 | out += ' '.join(map(lambda x: '%.4s' % x, results)) + '\n' 288 | with open(outname, 'w') as f: 289 | f.write(out) 290 | 291 | 292 | def read_kernel(kernel_file, wavetype): 293 | """ 294 | read kernel file from srfker96 295 | need sobs.d tdisp.d 296 | :param kernel_file: kernel file path 297 | :param wavetype: 'R' or 'L' 298 | :return: 299 | """ 300 | table = {'R': 4, 'L': 2} 301 | index = table[wavetype] 302 | count = 0 303 | with open(kernel_file, 'r') as f: 304 | lines = f.readlines() 305 | i = 0 306 | while count < index: 307 | if lines[i][0] == '_': 308 | count += 1 309 | i += 1 310 | i += 2 311 | result = [] 312 | while lines[i][0] != '_': 313 | result.append(list(map(lambda x: float(x), lines[i].split()))) 314 | i += 1 315 | result = np.array(result) 316 | depth = result[:, 1] 317 | sensi = result[:, 3] 318 | fig, ax = plt.subplots() 319 | fig.set_size_inches(5, 6) 320 | plt.title("Sensitivity(dC/dVs)") 321 | ax.set_ylabel("Depth(km)") 322 | ax.invert_yaxis() 323 | ax.plot(sensi, np.arange(len(sensi))) 324 | plt.show() 325 | return depth, sensi 326 | 327 | 328 | def plot_mod96(filename): 329 | fig, ax = plt.subplots() 330 | with open(filename, 'r') as f: 331 | models = f.readlines() 332 | vp, vs, h = [], [], [] 333 | for layer in models[12:]: 334 | vp.append(float(layer.split()[1])) 335 | h.append(float(layer.split()[0])) 336 | vs.append(float(layer.split()[2])) 337 | if h[-1] == 0: 338 | h[-1] = sum(h)*0.1 # infinite half space 339 | vpd, vsd, depth = [], [], [] 340 | for i in range(len(h)): 341 | if not depth: 342 | depth.append(0) 343 | else: 344 | depth.append(depth[-1]) 345 | vpd.append(vp[i]) 346 | vsd.append(vs[i]) 347 | depth.append(depth[-1]+h[i]) 348 | vpd.append(vp[i]) 349 | vsd.append(vs[i]) 350 | plt.title(models[1][:-1], y=1.08) 351 | ax.set_xlim(min(vs)-1, max(vp)+1) 352 | ax.set_xlabel("velocity (km/s)") 353 | ax.set_ylim(0, sum(h)) 354 | ax.set_ylabel("depth (km)") 355 | ax.invert_yaxis() 356 | ax.xaxis.tick_top() 357 | ax.plot(vsd, depth, 'red', label='vs') 358 | ax.plot(vpd, depth, 'blue', label='vp') 359 | plt.legend() 360 | plt.show() 361 | 362 | 363 | def plot_single_mod96_vs(filename, ax, color, label=None): 364 | with open(filename, 'r') as f: 365 | models = f.readlines() 366 | vp, vs, h = [], [], [] 367 | for layer in models[12:]: 368 | vp.append(float(layer.split()[1])) 369 | h.append(float(layer.split()[0])) 370 | vs.append(float(layer.split()[2])) 371 | if h[-1] == 0: 372 | h[-1] = sum(h)*0.1 # infinite half space 373 | vpd, vsd, depth = [], [], [] 374 | for i in range(len(h)): 375 | if not depth: 376 | depth.append(0) 377 | else: 378 | depth.append(depth[-1]) 379 | vpd.append(vp[i]) 380 | vsd.append(vs[i]) 381 | depth.append(depth[-1]+h[i]) 382 | vpd.append(vp[i]) 383 | vsd.append(vs[i]) 384 | ax.plot(vsd, depth, color=color, label=label) 385 | 386 | 387 | def plot_multi_mod96_vs(vlim, hlim, models): 388 | """ 389 | plot multiple models 390 | :param vlim: (vmin, vmax) 391 | :param hlim: (hmin, hmax) 392 | :param models: ([file1, file2, ...], color, label) 393 | :return: None 394 | """ 395 | fig, ax = plt.subplots() 396 | ax.set_xlim(*vlim) 397 | ax.set_ylim(*hlim) 398 | ax.set_xlabel("S-velocity (km/s)") 399 | ax.set_ylabel("depth (km)") 400 | ax.invert_yaxis() 401 | ax.xaxis.tick_top() 402 | for model in models: 403 | plot_single_mod96_vs(model[0][0], ax, model[1], label=model[2]) 404 | for m in model[0][1:]: 405 | plot_single_mod96_vs(m, ax, color=model[1], label=None) 406 | plt.legend() 407 | plt.show() 408 | 409 | 410 | def _plot_disp_from_mod96(modelname, ax, color, label=None): 411 | result = forward_rayleigh(modelname) 412 | per = 1 / result[0] 413 | pha = result[1] 414 | ax.plot(per, pha, color=color, label=label) 415 | 416 | 417 | def _plot_disp_from_file(dispfile, ax, color, label=None): 418 | subprocess.run("awk '{print $6,$7}' %s > temp.d" % dispfile, shell=True) 419 | disp = np.loadtxt('temp.d') 420 | ax.plot(disp[:, 0], disp[:, 1], '.', color=color, label=label) 421 | os.remove('./temp.d') 422 | 423 | 424 | def plot_multi_disp(vlim, plim, disps, models): 425 | fig, ax = plt.subplots() 426 | ax.set_xlim(*plim) 427 | ax.set_ylim(*vlim) 428 | ax.set_xlabel("peroid (s)") 429 | ax.set_ylabel("S-velocity (km/s)") 430 | for model in models: 431 | _plot_disp_from_mod96(model[0][0], ax, model[1], label=model[2]) 432 | for m in model[0][1:]: 433 | _plot_disp_from_mod96(m, ax, color=model[1], label=None) 434 | for disp in disps: 435 | _plot_disp_from_file(disp[0][0], ax, disp[1], label=disp[2]) 436 | for d in disp[0][1:]: 437 | _plot_disp_from_file(d, ax, disp[1], label=None) 438 | plt.legend() 439 | plt.show() 440 | 441 | 442 | class Inv: 443 | @staticmethod 444 | def write_sobs(modelname, dispname): 445 | """ 446 | write config file of inversion program 447 | """ 448 | f = open('sobs.d', 'w') 449 | f.write("0.005 0.005 0.0 0.005 0.0\n") 450 | f.write("0 0 0 0 0 1 0 0 1 0\n") 451 | f.write("%s\n" % modelname) 452 | f.write("%s\n" % dispname) 453 | 454 | @staticmethod 455 | def write_surf96(filename, wave, type, flag, mode, peroid_arr, value_arr, err_arr): 456 | """ 457 | write to surf96 format dispersion file 458 | :param :refer to cps doc 459 | :param filename: filename to write 460 | """ 461 | f = open(filename, 'w') 462 | for i in range(len(peroid_arr)): 463 | f.write("SURF96 %s %s %s %d %f %f %f\n" % ( 464 | wave, type, flag, mode, peroid_arr[i], value_arr[i], err_arr[i] 465 | )) 466 | f.close() 467 | 468 | @staticmethod 469 | def read_disp(filename, peroid): 470 | disp = np.loadtxt(filename) 471 | p_disp = np.c_[peroid.T, disp.T][~np.isnan(disp[0])] 472 | Inv.write_surf96('disp.d', 'R', 'C', 'X', 0, p_disp[:, 0], 473 | p_disp[:, 1], p_disp[:, 2]) 474 | 475 | @staticmethod 476 | def do_inv_netibet(datadir, outdir, smooth=True, iter_num=5): 477 | disps = glob(datadir+'*') 478 | for disp in disps: 479 | name = disp.split('/')[-1] 480 | Inv.read_disp(disp, np.arange(10, 80)) 481 | # TODO write model 482 | # TODO change parameter 483 | if not smooth: 484 | subprocess.run("surf96 36 0", shell=True) 485 | subprocess.run("surf96"+" 1 2 6"*iter_num, shell=True) 486 | subprocess.run("surf96 28 %s" % outdir+name, shell=True) 487 | subprocess.run("surf96 39", shell=True) 488 | # TODO monitor output 489 | 490 | @staticmethod 491 | def conv_to_plain_vsmodel(datadir, outdir): 492 | models = glob(datadir+'*') 493 | for model in models: 494 | name = model.split('/')[-1] 495 | with open(model, 'r') as f: 496 | lines = f.readlines()[12:] 497 | vs = "" 498 | err = "" 499 | for line in lines: 500 | vs += line.split()[2] + ' ' 501 | err += '0.05 ' # TODO change it 502 | with open(outdir+name, 'w') as f: 503 | f.write(vs + '\n') 504 | f.write(err + '\n') 505 | 506 | 507 | if __name__ == '__main__': 508 | model = litho_to_mod96(30, 108, "./testdata/ZHratio/test.d") 509 | groupv = do_mft('./testdata/ZHratio/g30.r', 'R', 3335.8) 510 | forward = forward_rayleigh("./testdata/ZHratio/test.d") 511 | compute2d((32, 42), (96, 108), 1, 'litho1.0_phasevel') 512 | --------------------------------------------------------------------------------