├── ts_process
├── makefile
├── rotd50
│ ├── baseline.h
│ ├── makefile
│ ├── splint.f
│ ├── spline.f
│ ├── ft_th.f
│ ├── sort.f
│ ├── fftsub.f
│ ├── calcrsp.f
│ └── rotd50.f
├── remove_bbp_padding.py
├── plot_timeseries.py
├── create_plot_index.py
├── awp2bbp.py
├── rwgdata2bbp.py
├── rwg2bbp.py
├── edge2bbp.py
├── her2bbp.py
├── compare_timeseries.py
├── process_timeseries.py
├── ts_plot_library.py
├── smc2bbp.py
└── file_utilities.py
├── .github
├── scripts
│ ├── ts-process-test-ci.sh
│ └── ts-process-build-ci.sh
└── workflows
│ └── ts-process-ci.yml
├── README.md
├── LICENSE
└── .gitignore
/ts_process/makefile:
--------------------------------------------------------------------------------
1 | all:
2 | cd rotd50; make -f makefile
3 | clean:
4 | cd rotd50; make -f makefile clean;
5 |
--------------------------------------------------------------------------------
/ts_process/rotd50/baseline.h:
--------------------------------------------------------------------------------
1 | integer MAXPTS, MAXPARAM
2 | parameter ( MAXPTS=2000000, MAXPARAM=10, MAX_MATCH=250)
3 |
--------------------------------------------------------------------------------
/ts_process/rotd50/makefile:
--------------------------------------------------------------------------------
1 | FC=gfortran
2 | FFLAGS = -O2 -ffixed-line-length-none
3 | HEADS = baseline.h
4 | COMMON_OBJS = calcrsp.o fftsub.o ft_th.o sort.o spline.o splint.o
5 | ROTD50_OBJS = ${COMMON_OBJS} rotd50.o
6 |
7 | all: rotd50
8 |
9 | rotd50: ${ROTD50_OBJS}
10 | ${FC} ${FFLAGS} -o rotd50 ${ROTD50_OBJS}
11 |
12 | ${OBJS}: ${HEADS}
13 |
14 | clean:
15 | rm -f ${ROTD50_OBJS} rotd50 *~
16 |
--------------------------------------------------------------------------------
/.github/scripts/ts-process-test-ci.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Set basic parameters
4 | VERSION=22.4.0
5 | BASEDIR="${RUNNER_WORKSPACE}"
6 | BBPDIR="${BASEDIR}/bbp/bbp"
7 | SRCDIR="${BBPDIR}/src"
8 |
9 | export BBP_DIR=${BBPDIR}
10 | export BBP_GF_DIR=${BASEDIR}/bbp_gf
11 | export BBP_VAL_DIR=${BASEDIR}/bbp_val
12 | export PYTHONPATH=${BBPDIR}/comps:${BBPDIR}/comps/PySeismoSoil:${PYTHONPATH}
13 | export BBP_DATA_DIR=${BASEDIR}/bbp_data
14 | export PATH=${BBPDIR}/comps:${BBPDIR}/utils/batch:$PATH
15 | ulimit -s unlimited
16 |
17 | echo
18 | echo "===> Running Unit Tests..."
19 |
20 | cd $BBP_DIR/tests
21 | ./UnitTestsCI.py
22 |
--------------------------------------------------------------------------------
/ts_process/rotd50/splint.f:
--------------------------------------------------------------------------------
1 | SUBROUTINE splint(xa,ya,y2a,n,x,y)
2 | INTEGER n
3 | REAL x,y,xa(n),y2a(n),ya(n)
4 | INTEGER k,khi,klo
5 | REAL a,b,h
6 | klo=1
7 | khi=n
8 | 1 if (khi-klo.gt.1) then
9 | k=(khi+klo)/2
10 | if(xa(k).gt.x)then
11 | khi=k
12 | else
13 | klo=k
14 | endif
15 | goto 1
16 | endif
17 | h=xa(khi)-xa(klo)
18 | ! if (h.eq.0.) pause 'bad xa input in splint'
19 | if (h.eq.0.) STOP
20 | a=(xa(khi)-x)/h
21 | b=(x-xa(klo))/h
22 | y=a*ya(klo)+b*ya(khi)+((a**3-a)*y2a(klo)+(b**3-b)*y2a(khi))*(h**
23 | *2)/6.
24 | return
25 | END
26 |
--------------------------------------------------------------------------------
/ts_process/rotd50/spline.f:
--------------------------------------------------------------------------------
1 | SUBROUTINE spline(x,y,n,yp1,ypn,y2,u, MAXPTS)
2 | INTEGER n,MAXPTS
3 | REAL yp1,ypn,x(MAXPTS),y(MAXPTS),y2(MAXPTS), u(MAXPTS)
4 | INTEGER i,k
5 | REAL p,qn,sig,un
6 | if (yp1 .gt. .99e30) then
7 | y2(1)=0.
8 | u(1)=0.
9 | else
10 | y2(1)=-0.5
11 | u(1)=(3./(x(2)-x(1)))*((y(2)-y(1))/(x(2)-x(1))-yp1)
12 | endif
13 | do 11 i=2,n-1
14 |
15 | sig=(x(i)-x(i-1))/(x(i+1)-x(i-1))
16 | p=sig*y2(i-1)+2.
17 | y2(i)=(sig-1.)/p
18 | u(i)=(6.*((y(i+1)-y(i))/(x(i+
19 | *1)-x(i))-(y(i)-y(i-1))/(x(i)-x(i-1)))/(x(i+1)-x(i-1))-sig*
20 | *u(i-1))/p
21 | 11 continue
22 | if (ypn.gt..99e30) then
23 | qn=0.
24 | un=0.
25 | else
26 | qn=0.5
27 | un=(3./(x(n)-x(n-1)))*(ypn-(y(n)-y(n-1))/(x(n)-x(n-1)))
28 | endif
29 | y2(n)=(un-qn*u(n-1))/(qn*y2(n-1)+1.)
30 | do 12 k=n-1,1,-1
31 | y2(k)=y2(k)*y2(k+1)+u(k)
32 | 12 continue
33 | return
34 | END
35 |
--------------------------------------------------------------------------------
/.github/workflows/ts-process-ci.yml:
--------------------------------------------------------------------------------
1 | name: ts-process-ci
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 |
7 | jobs:
8 | ts-process-build-linux:
9 | runs-on: ubuntu-20.04
10 | strategy:
11 | matrix:
12 | python-version: [ "3.7", "3.8", "3.9", "3.10" ]
13 | steps:
14 | - name: Setup Python ${{ matrix.python-version }}
15 | uses: actions/setup-python@v3
16 | with:
17 | python-version: ${{ matrix.python-version }}
18 | - name: configure Python
19 | run: |
20 | sudo apt-get update
21 | sudo apt-get install g++-8 -y
22 | sudo apt-get install gfortran-8 -y
23 | pip install numpy
24 | pip install scipy
25 | pip install matplotlib
26 | - name: checkout ts-process main
27 | uses: actions/checkout@v2
28 | with:
29 | ref: main
30 | - name: build ts-process
31 | run: ./.github/scripts/ts-process-build-ci.sh
32 | shell: bash
33 | # - name: test ts-process
34 | # run: ./.github/scripts/ts-process-test-ci.sh
35 | # shell: bash
36 |
--------------------------------------------------------------------------------
/.github/scripts/ts-process-build-ci.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo
4 |
5 | OLD_DIR=`pwd`
6 | mkdir ${RUNNER_WORKSPACE}/bin
7 | cd ${RUNNER_WORKSPACE}/bin
8 | ln -s /usr/bin/gcc-8 gcc
9 | ln -s /usr/bin/gfortran-8 gfortran
10 | cd ${OLD_DIR}
11 | export PATH=${RUNNER_WORKSPACE}/bin:$PATH
12 |
13 | echo "======================GCC===================="
14 | gcc --version
15 |
16 | echo "===================GFORTRAN=================="
17 | gfortran --version
18 |
19 | echo "===================Python 3=================="
20 | python3 --version
21 | python3 -c "import numpy; print('Numpy: ', numpy.__version__)"
22 | python3 -c "import scipy; print('SciPy: ', scipy.__version__)"
23 | python3 -c "import matplotlib; print('Matplotlib: ', matplotlib.__version__)"
24 |
25 | # Set basic parameters
26 | BASEDIR="${RUNNER_WORKSPACE}"
27 | TSPROCESSDIR="${BASEDIR}/ts-process/ts_process"
28 |
29 | # Compile source distribution
30 | echo "=> ts-process source distribution"
31 | echo "==> Compiling..."
32 | OLD_DIR=`pwd`
33 | cd ${TSPROCESSDIR}
34 | make
35 | cd ${OLD_DIR}
36 | # Done with main source distribution
37 | echo "==> Source code compiled!"
38 | echo
39 |
40 | echo "==> Build steps completed!"
41 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ts-process
2 | [](https://www.python.org)
3 | [](https://opensource.org/licenses/BSD-3-Clause)
4 | 
5 | [](https://github.com/SCECcode/ts-process/actions)
6 |
7 | Ground motion time series processing tools
8 |
9 | This is a collection of Python3-based software programs that are ground motions time series processing utilities designed to integrate 3D ground motion simulation seismograms from AWP-ODC and Hercules with Broadband Platform time series. The ts-process library also provides codes for calculating ROTD50 so that a common implementation is used to process both 3D simulation seismograms and 1D broadband platform seismograms.
10 |
11 | These codes have been developed as part of earthquake ground motion research performed by the Southern California Earthquake Center (SCEC) www.scec.org.
12 |
13 | ## Primary Developers of ts-process library:
14 |
15 | * Ricardo Taborda - Universidad EAFIT Medellín Colombia
16 | * Christine Goulet - University of Southern California
17 | * Fabio Silva - Southern California Earthquake Center
18 |
19 | ## Software support:
20 | * software @ scec.org
21 |
--------------------------------------------------------------------------------
/ts_process/rotd50/ft_th.f:
--------------------------------------------------------------------------------
1 |
2 | subroutine InterpFreq ( u, npts0, cu1, NN, nTotal, dt )
3 | real u(1)
4 | integer npts, m2, npts2, NN
5 | complex cu1(1)
6 |
7 |
8 | m2 = int( alog(float(npts0))/alog(2.) + 0.9999 )
9 | npts2 = 2**m2
10 |
11 | c PAD TO POWER OF 2 in the time domain
12 | do i=npts0+1,npts2
13 | u(i) = 0.
14 | enddo
15 | npts = npts2
16 |
17 | c FILL COMPLEX ARRAY
18 | do 10 i=1,npts
19 | cu1(i) = cmplx(u(i),0.0)
20 | 10 continue
21 |
22 | c CALCULATE FORWARD FFT
23 | call cool ( -1., m2, cu1 )
24 | df = 1./(npts*dt)
25 | c do i=1,npts
26 | c write (60,'( f10.4, 2f12.4)') (i-1)*df, cu1(i)
27 | c enddo
28 |
29 | c Pad in the frequency domain
30 | iNyq = npts/2 + 1
31 | iNyq2 = NN * (npts/2) + 1
32 | c write (*,'( 2i10)') iNyq, iNyq2
33 | do i=iNyq+1,iNyq2
34 | cu1(i) = cmplx(0., 0.)
35 | enddo
36 |
37 | c Reset nyquist ot half its value (the other half will be added to the neg freq)
38 | cu1(iNyq) = cu1(iNyq)/2.
39 |
40 | c Load the negative frequencies
41 | nTotal = 2 * (iNyq2-1)
42 | do i=iNyq2+1,nTotal
43 | cu1(i) = conjg (cu1(nTotal+2-i))
44 | enddo
45 |
46 | c CALCULATE INVERSE FFT
47 | m3 = m2 + int( alog(float(NN))/alog(2.) + 0.5 )
48 | call cool ( 1., m3, cu1 )
49 |
50 | c Scale
51 | do i=1,nTotal
52 | u(i) = real(cu1(i)) /npts
53 | enddo
54 |
55 | return
56 | end
57 |
58 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2020, The University of Southern California
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | # C and Fortran intermediate files
104 | *.o
105 | *.mod
106 | bin/
107 | *.version
108 | *.a
109 |
110 | # RotD50 binary
111 | *rotd50
112 | !rotd50/
113 |
--------------------------------------------------------------------------------
/ts_process/rotd50/sort.f:
--------------------------------------------------------------------------------
1 | c --------------------------
2 | C
3 | C ________________________________________________________
4 | C | |
5 | C | SORT AN ARRAY IN INCREASING ORDER |
6 | C | |
7 | C | INPUT: |
8 | C | |
9 | C | X --ARRAY OF NUMBERS |
10 | C | |
11 | C | Y --WORKING ARRAY (LENGTH AT LEAST N) |
12 | C | |
13 | C | N --NUMBER OF ARRAY ELEMENTS TO SORT |
14 | C | |
15 | C | OUTPUT: |
16 | C | |
17 | C | X --SORTED ARRAY |
18 | C |________________________________________________________|
19 | C
20 | SUBROUTINE SORT(X,Y,N)
21 | REAL X(1),Y(1),S,T
22 | INTEGER I,J,K,L,M,N
23 | I = 1
24 | 10 K = I
25 | 20 J = I
26 | I = I + 1
27 | IF ( J .EQ. N ) GOTO 30
28 | IF ( X(I) .GE. X(J) ) GOTO 20
29 | Y(K) = I
30 | GOTO 10
31 | 30 IF ( K .EQ. 1 ) RETURN
32 | Y(K) = N + 1
33 | 40 M = 1
34 | L = 1
35 | 50 I = L
36 | IF ( I .GT. N ) GOTO 120
37 | S = X(I)
38 | J = Y(I)
39 | K = J
40 | IF ( J .GT. N ) GOTO 100
41 | T = X(J)
42 | L = Y(J)
43 | X(I) = L
44 | 60 IF ( S .GT. T ) GOTO 70
45 | Y(M) = S
46 | M = M + 1
47 | I = I + 1
48 | IF ( I .EQ. K ) GOTO 80
49 | S = X(I)
50 | GOTO 60
51 | 70 Y(M)= T
52 | M = M + 1
53 | J = J + 1
54 | IF ( J .EQ. L ) GOTO 110
55 | T = X(J)
56 | GOTO 60
57 | 80 Y(M) = T
58 | K = M + L - J
59 | I = J - M
60 | 90 M = M + 1
61 | IF ( M .EQ. K ) GOTO 50
62 | Y(M) = X(M+I)
63 | GOTO 90
64 | 100 X(I) = J
65 | L = J
66 | 110 Y(M) = S
67 | K = M + K - I
68 | I = I - M
69 | GOTO 90
70 | 120 I = 1
71 | 130 K = I
72 | J = X(I)
73 | 140 X(I) = Y(I)
74 | I = I + 1
75 | IF ( I .LT. J ) GOTO 140
76 | Y(K) = I
77 | IF ( I .LE. N ) GOTO 130
78 | IF ( K .EQ. 1 ) RETURN
79 | GOTO 40
80 | END
81 |
82 |
--------------------------------------------------------------------------------
/ts_process/rotd50/fftsub.f:
--------------------------------------------------------------------------------
1 | subroutine rdc (x,npts,iflag)
2 |
3 | c This subroutine removes a dc shift from the data
4 |
5 | c IFLAG = 0 remove the mean
6 | c IFLAG = 1 remove the mean value of the first 10 points
7 | c IFLAG = 2 manual set of DC value to be removed
8 |
9 | real x(1),sum,mean
10 | integer npts,iflag
11 |
12 | if (iflag .eq. 0) then
13 | sum = 0.0
14 | do 10 i=1,npts
15 | sum = x(i) + sum
16 | 10 continue
17 | mean = sum/float(npts)
18 |
19 | elseif (iflag .eq. 1) then
20 | sum = 0.0
21 | do 20 i=1,10
22 | sum = x(i) + sum
23 | 20 continue
24 | mean = sum / float(npts)
25 |
26 | else
27 | write (*,1000)
28 | read (*,1001) mean
29 |
30 | endif
31 |
32 | do 100 i=1,npts
33 | x(i) = x(i) - mean
34 | 100 continue
35 |
36 | c write (*,1010) mean
37 | c 1010 format( 2x,'Remove DC of ',f12.8)
38 |
39 | return
40 | 1000 format( 2x,'Enter mean to be removed')
41 | 1001 format( f12.8)
42 | end
43 |
44 | c ---------------------------------------------------------------------------
45 |
46 | subroutine CosTaper (x,npts,tb,te)
47 |
48 | c This subroutine tapers the x array
49 |
50 | real x(1), arg
51 | integer npts,tb,te
52 | pi = 3.1415926
53 |
54 | if (tb .ne. 0.) then
55 | n = (npts*tb)/100
56 | do 10 i=1,n
57 | arg = pi*float(i-1)/float(n) + pi
58 | x(i) = x(i)*(1.+cos(arg))/2.
59 | 10 continue
60 | 1000 format( 2x,'Taper beginning ',i2,' percent')
61 |
62 | endif
63 |
64 | if (te .ne. 0.) then
65 | n = (npts*te)/100
66 | do 20 i=1,n
67 | arg = pi*float(i-1)/float(n) + pi
68 | x(npts-i+1) = x(npts-i+1) * (1.+cos(arg))/2.
69 | 20 continue
70 | 1001 format( 2x,'Taper end ',i2,' percent')
71 |
72 | endif
73 | return
74 | end
75 |
76 | c ---------------------------------------------------------------------------
77 |
78 | subroutine cool ( signi, n, cx )
79 | c FFT subroutine.
80 | c signi = -1. forward transform
81 | c = 1. inverse transform
82 | c n = log base 2 (npts)
83 |
84 | complex cx(1), carg, temp, cw
85 | pi = 4. * atan(1.) * signi
86 | lx = 2**n
87 | j = 1
88 | do 30 i=1,lx
89 | if (i .gt. j) goto 10
90 | temp = cx(j)
91 | cx(j) = cx(i)
92 | cx(i) = temp
93 | 10 m = lx/2
94 | 20 if (j .le. m) goto 25
95 | j = j-m
96 | m = m/2
97 | if (m .ge. 1) goto 20
98 | 25 j = j+m
99 | 30 continue
100 | l = 1
101 | 40 istep = l+l
102 | do 50 m=1,l
103 | carg = cmplx( 0., pi * float(m-1) / float(l) )
104 | cw = cexp(carg)
105 | do 45 i=m,lx,istep
106 | temp = cw * cx(i+l)
107 | cx(i+l) = cx(i) - temp
108 | cx(i) = cx(i) + temp
109 | 45 continue
110 | 50 continue
111 | l = istep
112 | if (l .lt. lx) goto 40
113 | return
114 | end
115 |
--------------------------------------------------------------------------------
/ts_process/remove_bbp_padding.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Copyright 2010-2020 University Of Southern California
4 |
5 | Licensed under the Apache License, Version 2.0 (the "License");
6 | you may not use this file except in compliance with the License.
7 | You may obtain a copy of the License at
8 |
9 | http://www.apache.org/licenses/LICENSE-2.0
10 |
11 | Unless required by applicable law or agreed to in writing, software
12 | distributed under the License is distributed on an "AS IS" BASIS,
13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | See the License for the specific language governing permissions and
15 | limitations under the License.
16 |
17 | This module removes the padding added to a BBP file.
18 | """
19 | from __future__ import division, print_function
20 |
21 | # Import Python modules
22 | import os
23 | import sys
24 | import glob
25 | import shutil
26 | import argparse
27 | from file_utilities import get_dt, read_padding_bbp
28 |
29 | def parse_arguments():
30 | """
31 | Parse command-line options
32 | """
33 | parser = argparse.ArgumentParser(description="Remove padding "
34 | "from a set of BBP seismograms.")
35 | parser.add_argument("--input_dir", "-i", dest="input_dir",
36 | required=True, help="input directory")
37 | parser.add_argument("--output_dir", "-o", dest="output_dir",
38 | required=True, help="output directory")
39 | parser.add_argument("--prefix", "-p", dest="prefix",
40 | default="",
41 | help="prefix for input files")
42 | parser.add_argument("--suffix", "-s", dest="suffix",
43 | default="",
44 | help="suffix for input files")
45 | args = parser.parse_args()
46 |
47 | return args
48 |
49 | def remove_padding(input_file, output_file, padding):
50 | """
51 | Remove padding from BBP file
52 | """
53 | # Read DT
54 | dt = get_dt(input_file)
55 | current_time = 0.0
56 | total_points = 0
57 | current_point = 0
58 |
59 | # First need to figure out how many datapoints we have
60 | in_fp = open(input_file, 'r')
61 | for line in in_fp:
62 | line = line.strip()
63 | if not line:
64 | continue
65 | if line.startswith("#") or line.startswith("%"):
66 | continue
67 | total_points = total_points + 1
68 | in_fp.close()
69 |
70 | end_point = total_points - padding
71 | print("total: %d, padding: %d, end: %d" % (total_points, padding, end_point))
72 |
73 | in_fp = open(input_file, 'r')
74 | out_fp = open(output_file, 'w')
75 | for line in in_fp:
76 | line = line.strip()
77 | if not line:
78 | out_fp.write("\n")
79 | continue
80 | if line.startswith("#") or line.startswith("%"):
81 | # Header, copy but skip padding
82 | if line.find("padding=") > 0:
83 | out_fp.write("# padding= 0\n")
84 | continue
85 | out_fp.write("%s\n" % (line))
86 | continue
87 |
88 | # Check if we are done
89 | if current_point == end_point:
90 | break
91 |
92 | # Keep track of points
93 | current_point = current_point + 1
94 |
95 | # Actual data
96 | if padding > 0:
97 | # Skip this point
98 | padding = padding - 1
99 | continue
100 |
101 | # Use this point
102 | tokens = line.split()
103 | tokens = [float(token) for token in tokens]
104 | out_fp.write("%5.7f %5.9e %5.9e %5.9e\n" %
105 | (current_time, tokens[1], tokens[2], tokens[3]))
106 | # Advance time
107 | current_time = current_time + dt
108 |
109 | in_fp.close()
110 | out_fp.close()
111 |
112 | def bbp_remove_padding():
113 | """
114 | Create a set of BBP files without padding
115 | """
116 | # Get all we need from the command-line
117 | args = parse_arguments()
118 |
119 | # Get list of matching input files
120 | files = glob.glob("%s/%s*%s" % (args.input_dir, args.prefix, args.suffix))
121 |
122 | for input_file in sorted(files):
123 | print("[PROCESSING]: %s" % (os.path.basename(input_file)))
124 | input_base = os.path.basename(input_file)
125 | output_file = os.path.join(args.output_dir, input_base)
126 | padding = read_padding_bbp(input_file)
127 | if padding > 0:
128 | # Found padding that needs to be removed
129 | print("[INFO]: Found padding %d..." % (padding))
130 | remove_padding(input_file, output_file, padding)
131 | else:
132 | print("[COPYING]: Found no padding, copying file...")
133 | shutil.copy2(input_file, output_file)
134 |
135 | if __name__ == '__main__':
136 | bbp_remove_padding()
137 |
--------------------------------------------------------------------------------
/ts_process/rotd50/calcrsp.f:
--------------------------------------------------------------------------------
1 | c ----------------------------------------------------------------------
2 | c This subroutine calculates the response spectrum.
3 | subroutine CalcRsp ( acc, npts, dt, w, damping,
4 | 1 nQ, time, SA, polarity, minTime, maxTime)
5 |
6 | real w(1), damping(1), acc(1), dt, SA(1), time(1), minTime(1),
7 | 1 maxTime(1)
8 | integer npts, nQ, i, j , polarity(1), timeIndex
9 |
10 | c LOOP FOR EACH FREQUENCY
11 | do i=1,nQ
12 | if ( w(i) .gt. 628. ) then
13 | call CalcPGA ( acc, npts, sa(i), timeIndex, polarity(i) )
14 | else
15 | call CalcOneRsp ( acc, npts, dt, w(i), damping(i), sa(i),
16 | 1 timeIndex, polarity(i), minTime(i),maxTime(i))
17 | endif
18 | time(i) = (timeIndex-1) * dt
19 | enddo
20 |
21 | return
22 | end
23 |
24 | c ----------------------------------------------------------------------
25 |
26 | subroutine CalcPGA ( acc, npts, sa, timeIndex, polarity )
27 |
28 | real acc(1), sa
29 | integer npts, polarity, timeIndex
30 |
31 | accMax = 0.0
32 | do i=1,npts
33 | if ( abs(acc(i)) .gt. accMax ) then
34 | accMax = abs(acc(i) )
35 | sa = accMax
36 | if ( acc(i) .ge. 0. ) then
37 | polarity = 1.
38 | else
39 | polarity = -1
40 | endif
41 | timeIndex = i
42 | endif
43 | enddo
44 | write (*,'( 2x,''pga ='',f10.4)') sa
45 |
46 | return
47 | end
48 |
49 | c ----------------------------------------------------------------------
50 |
51 | subroutine CalcRspTH ( acc, npts, dt, w, damping, rsp )
52 | include 'baseline.h'
53 |
54 | real rsp(1), w, damping, acc(1), dt
55 | integer npts
56 |
57 | c Compute coeff
58 | call coeff ( w, damping, dt )
59 |
60 | c CALCULATE THE RESPONSES
61 | call brs ( acc, w, damping, npts, rsp )
62 |
63 | return
64 | end
65 |
66 | c ----------------------------------------------------------------------
67 |
68 | subroutine CalcOneRsp ( acc, npts, dt, w, damping,
69 | 1 sa, timeIndex, polarity, minTime, maxTime)
70 | include 'baseline.h'
71 |
72 | real rsp(MAXPTS), w, damping, acc(1), dt, SA, minTime, maxTime
73 | integer npts, i, j, polarity, timeIndex, iTime, iTime2
74 |
75 | c Compute coeff
76 | call coeff ( w, damping, dt )
77 |
78 | c CALCULATE THE RESPONSES
79 | call brs ( acc, w, damping, npts, rsp )
80 |
81 | c FIND THE MAXIMUM OF THE RESPONSE
82 | SA = 0.0
83 | iTime = int( minTime/dt) + 1
84 | iTime2 = int( maxTime/dt) -1
85 | do j=iTime,iTime2
86 | if (abs(rsp(j)) .gt. SA) then
87 | SA = abs(rsp(j))
88 | timeIndex = j
89 | if ( rsp(j) .ge. 0. ) then
90 | polarity = 1
91 | else
92 | polarity = -1
93 | endif
94 | endif
95 | enddo
96 |
97 | return
98 | end
99 |
100 | c ----------------------------------------------------------------------
101 |
102 | subroutine coeff ( w, beta1, dt1 )
103 | real beta1, dt1, w
104 | real*8 a11, a12, a21, a22, b11, b12, b21, b22
105 | real*8 beta, dt, t1, t2, t3, t4, s1, s2
106 | common /coef/a11,a12,a21,a22,b11,b12,b21,b22
107 |
108 | beta = dble( beta1 )
109 | dt = dble( dt1 )
110 | c
111 | c Set up repeated terms
112 | t1 = sqrt(1.-beta**2)
113 | t2 = sin (w*t1*dt)
114 | t3 = cos (w*t1*dt)
115 | t4 = exp (-beta*w*dt)
116 | s1 = (2.*beta**2-1.) / (w**2*dt)
117 | s2 = 2.*beta / (w**3*dt)
118 | c write (*,'( 10f10.5)') t1, t2, t3, t4, s1, s2
119 | c
120 | c calculate the a's
121 | a11 = t4*(beta*t2/t1+t3)
122 | a12 = t4*t2 / (w*t1)
123 | a21 = -t4*w*t2 / t1
124 | a22 = t4*(t3-beta*t2/t1)
125 | c
126 | c calculate the b's
127 | b11 = t4*((s1+beta/w)*t2 / (w*t1) + (s2+1./w**2)*t3) - s2
128 | b12 = -t4*(s1*t2/(w*t1)+s2*t3) - 1./w**2 + s2
129 | b21 = (s1+beta/w) * (t3-beta*t2/t1)
130 | b21 = t4*(b21 - (s2+1./w**2)*(w*t1*t2+beta*w*t3)) + 1./(w**2*dt)
131 | b22 = s1*(t3-beta*t2/t1)
132 | b22 = -t4*(b22 - s2*(w*t1*t2+beta*w*t3)) - 1./(w**2*dt)
133 |
134 | return
135 | end
136 |
137 | c ----------------------------------------------------------------------
138 | subroutine brs (x,w,beta,npts,rsp)
139 | real x(1), rsp(1), beta
140 | real w
141 | real*8 d, v, a, z, ap1, dp1, vp1, t1, t2
142 | real*8 a11, a12, a21, a22, b11, b12, b21, b22
143 | common /coef/ a11,a12,a21,a22,b11,b12,b21,b22
144 | c
145 | c initialize
146 | t1 = 2.*beta*w
147 | t2 = w**2
148 | d = 0.
149 | v = 0.
150 | a = 0.
151 | c
152 | c calculate the response
153 | do 10 i=1,npts
154 | ap1 = dble( x(i) )
155 | dp1 = a11*d + a12*v + b11*a + b12*ap1
156 | vp1 = a21*d + a22*v + b21*a + b22*ap1
157 | z = -(t1*vp1 + t2*dp1)
158 |
159 | c absolute acc
160 | c rsp(i) = sngl( z )
161 |
162 | c Psedo-acceleration
163 | rsp(i) = sngl( dp1 ) * t2
164 | a = ap1
165 | v = vp1
166 | d = dp1
167 | 10 continue
168 |
169 | return
170 | end
171 |
172 | c ----------------------------------------------------------------------
173 |
--------------------------------------------------------------------------------
/ts_process/plot_timeseries.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2018, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | This program plots a several timeseries together without any processing
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import matplotlib as mpl
42 | if mpl.get_backend() != 'agg':
43 | mpl.use('Agg') # Disables use of Tk/X11
44 |
45 | # Import seismtools functions
46 | from file_utilities import read_files
47 | from ts_library import calculate_distance
48 | from ts_plot_library import plot_overlay_timeseries
49 |
50 | def parse_arguments():
51 | """
52 | This function takes care of parsing the command-line arguments and
53 | asking the user for any missing parameters that we need
54 | """
55 | parser = argparse.ArgumentParser(description="Creates comparison plots of "
56 | " a number of timeseries files.")
57 | parser.add_argument("-o", "--output", dest="outfile", required=True,
58 | help="output png file")
59 | parser.add_argument("--epicenter-lat", dest="epicenter_lat", type=float,
60 | help="earthquake epicenter latitude")
61 | parser.add_argument("--epicenter-lon", dest="epicenter_lon", type=float,
62 | help="earthquake epicenter longitude")
63 | parser.add_argument("--st-lat", "--station-latitude", dest="st_lat",
64 | type=float, help="station latitude")
65 | parser.add_argument("--st-lon", "--station-longitude", dest="st_lon",
66 | type=float, help="station longitude")
67 | parser.add_argument("-s", "--station-name", "--station", dest="station",
68 | help="station name")
69 | parser.add_argument("--station-list", dest="station_list",
70 | help="station list with latitude and longitude")
71 | parser.add_argument("--xmin", dest="xmin", type=float,
72 | help="xmin to plot")
73 | parser.add_argument("--xmax", dest="xmax", type=float,
74 | help="xmax to plot")
75 | parser.add_argument('input_files', nargs='*')
76 | args = parser.parse_args()
77 |
78 | if args.st_lat is not None and args.st_lon is not None:
79 | args.st_loc = [args.st_lat, args.st_lon]
80 | else:
81 | args.st_loc = None
82 | if args.epicenter_lat is not None and args.epicenter_lon is not None:
83 | args.epicenter = [args.epicenter_lat, args.epicenter_lon]
84 | else:
85 | args.epicenter = None
86 | if args.xmin is None:
87 | args.xmin = 0.0
88 | if args.xmax is None:
89 | args.xmax = 30.0
90 |
91 | return args
92 |
93 | def plot_timeseries_main():
94 | """
95 | Main function for plot_timeseries
96 | """
97 | # Parse command-line options
98 | args = parse_arguments()
99 | # Copy inputs
100 | output_file = args.outfile
101 | filenames = args.input_files
102 |
103 | # Set plot title
104 | plot_title = None
105 | if args.station is not None:
106 | plot_title = "%s" % (args.station)
107 | # Set title if station name provided and epicenter are provided
108 | if args.station is not None and args.epicenter is not None:
109 | # Calculate distance if locations are provided
110 | if args.st_loc is None and args.station_list is not None:
111 | # Find station coordinates from station list
112 | st_file = open(args.station_list, 'r')
113 | for line in st_file:
114 | line = line.strip()
115 | if not line:
116 | # skip blank lines
117 | continue
118 | if line.startswith("#") or line.startswith("%"):
119 | # Skip comments
120 | continue
121 | pieces = line.split()
122 | if len(pieces) < 3:
123 | # Skip line with insufficient tokens
124 | continue
125 | if pieces[2].lower() != args.station.lower():
126 | # Not a match
127 | continue
128 | # Match!
129 | args.st_loc = [float(pieces[1]), float(pieces[0])]
130 | break
131 | # All done processing station file
132 | st_file.close()
133 |
134 | if args.st_loc is not None:
135 | # Calculate distance here
136 | distance = calculate_distance(args.epicenter, args.st_loc)
137 | # Set plot title
138 | plot_title = "%s, Dist: ~%dkm" % (args.station,
139 | distance)
140 |
141 | # Read data
142 | _, stations = read_files(None, filenames)
143 | filenames = [os.path.basename(filename) for filename in filenames]
144 |
145 | # Create plot
146 | plot_overlay_timeseries(args, filenames, stations,
147 | output_file, plot_title=plot_title)
148 |
149 | # ============================ MAIN ==============================
150 | if __name__ == "__main__":
151 | plot_timeseries_main()
152 | # end of main program
153 |
--------------------------------------------------------------------------------
/ts_process/create_plot_index.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | The program creates a html index for a plot folder
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import glob
41 | import argparse
42 | from ts_library import calculate_distance
43 |
44 | def parse_arguments():
45 | """
46 | This function takes care of parsing the command-line arguments and
47 | asking the user for any missing parameters that we need
48 | """
49 | parser = argparse.ArgumentParser(description="Creates a html index for "
50 | " a direcory of plots.")
51 | parser.add_argument("-o", "--output", dest="outfile", required=True,
52 | help="output html file")
53 | parser.add_argument("--plotdir", dest="plotdir", required=True,
54 | help="directory containing plots")
55 | parser.add_argument("--epicenter-lat", dest="epicenter_lat", type=float,
56 | help="earthquake epicenter latitude")
57 | parser.add_argument("--epicenter-lon", dest="epicenter_lon", type=float,
58 | help="earthquake epicenter longitude")
59 | parser.add_argument("--station-list", dest="station_list", required=True,
60 | help="station list with latitude and longitude")
61 | parser.add_argument("--freqs", dest="freqs",
62 | help="frequencies used for the simulation")
63 | parser.add_argument("--alpha", dest="alpha",
64 | default=False, action='store_true',
65 | help="sort output alphabetically")
66 | parser.add_argument("--title", dest="title",
67 | help="title for the html file")
68 | args = parser.parse_args()
69 |
70 | if args.epicenter_lat is not None and args.epicenter_lon is not None:
71 | args.epicenter = [args.epicenter_lat, args.epicenter_lon]
72 | else:
73 | args.epicenter = None
74 | if args.freqs is not None:
75 | args.freqs = args.freqs.strip().split(",")
76 | args.freqs = [freq.strip() for freq in args.freqs]
77 | if args.alpha:
78 | args.order = "alpha"
79 | else:
80 | args.order = "distance"
81 | if args.title is None:
82 | args.title = "Results"
83 |
84 | return args
85 |
86 | def calculate_distances(station_list, epicenter):
87 | """
88 | Calculates distanes from all stations
89 | """
90 | distances = {}
91 |
92 | # Find station coordinates from station list
93 | st_file = open(station_list, 'r')
94 | for line in st_file:
95 | line = line.strip()
96 | if not line:
97 | # skip blank lines
98 | continue
99 | if line.startswith("#") or line.startswith("%"):
100 | # Skip comments
101 | continue
102 | pieces = line.split()
103 | if len(pieces) < 3:
104 | # Skip line with insufficient tokens
105 | continue
106 | station_id = pieces[2].upper()
107 | station_loc = [float(pieces[1]), float(pieces[0])]
108 | # Calculate distance here
109 | station_dist = calculate_distance(epicenter, station_loc)
110 | distances[station_id] = station_dist
111 |
112 | st_file.close()
113 |
114 | return distances
115 |
116 | def create_plot_index_main():
117 | """
118 | Main function for create_plot_index
119 | """
120 | # Parse command-line options
121 | args = parse_arguments()
122 | # Copy inputs
123 | output_file = args.outfile
124 | plots_dir = args.plotdir
125 | freqs = args.freqs
126 |
127 | # Calculate distances from epicenter
128 | distances = calculate_distances(args.station_list, args.epicenter)
129 |
130 | # Create ordered lists
131 | alpha_sort = sorted(distances)
132 | dist_sort2 = sorted(distances.items(), key=lambda item: item[1])
133 | dist_sort = [item for item, _ in dist_sort2]
134 |
135 | if args.order == "distance":
136 | station_order = dist_sort
137 | else:
138 | station_order = alpha_sort
139 |
140 | # Open html file
141 | html_output = open(output_file, 'w')
142 | html_output.write("\n")
143 | html_output.write("
%s\n" % (args.title))
144 | html_output.write("\n")
145 | html_output.write("%s
\n" % (args.title))
146 | html_output.write("\n")
147 | html_output.write("\n")
148 | html_output.write("| Station ID | \n")
149 | html_output.write("Distance (km) | \n")
150 | for freq in args.freqs:
151 | html_output.write("%sHz | \n" % (freq))
152 | html_output.write("
\n")
153 |
154 | for station in station_order:
155 | files = glob.glob("%s/%s*" % (plots_dir, station))
156 | # Skip stations that don't have any plots
157 | if not files:
158 | continue
159 | html_output.write("\n")
160 | html_output.write("| %s | \n" % (station))
161 | html_output.write("%f | \n" % (distances[station]))
162 | for freq in args.freqs:
163 | files = glob.glob("%s/%s-%s*" % (plots_dir, station, freq))
164 | if len(files) != 1:
165 | html_output.write(" | \n")
166 | continue
167 | html_output.write('%s | \n' %
168 | (os.path.basename(files[0]), freq))
169 | html_output.write("
\n")
170 |
171 | html_output.write("
\n")
172 | html_output.write("\n")
173 | html_output.write("\n")
174 | html_output.close()
175 |
176 | # ============================ MAIN ==============================
177 | if __name__ == "__main__":
178 | create_plot_index_main()
179 | # end of main program
180 |
--------------------------------------------------------------------------------
/ts_process/awp2bbp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Utility to convert AWP time history files to BBP format
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import numpy as np
42 | from ts_library import integrate, derivative, TimeseriesComponent, rotate_timeseries
43 |
44 | def get_dt(input_file):
45 | """
46 | Read AWP file and return DT
47 | """
48 | val1 = None
49 | val2 = None
50 | file_dt = None
51 |
52 | # Figure out dt first, we need it later
53 | ifile = open(input_file)
54 | for line in ifile:
55 | # Skip comments
56 | if line.startswith("#") or line.startswith("%"):
57 | continue
58 | pieces = line.split()
59 | pieces = [float(piece) for piece in pieces]
60 | if val1 is None:
61 | val1 = pieces[0]
62 | continue
63 | if val2 is None:
64 | val2 = pieces[0]
65 | break
66 | ifile.close()
67 |
68 | # Quit if cannot figure out dt
69 | if val1 is None or val2 is None:
70 | print("[ERROR]: Cannot determine dt from AWP file! Exiting...")
71 | sys.exit(1)
72 |
73 | # Calculate dt
74 | file_dt = val2 - val1
75 |
76 | return file_dt
77 | # end get_dt
78 |
79 | def read_awp(input_file):
80 | """
81 | Reads the input file in awp format and returns arrays containing
82 | vel_ns, vel_ew, vel_ud components
83 | """
84 | time = []
85 | vel_ns = []
86 | vel_ew = []
87 | vel_ud = []
88 |
89 | # Get AWP file dt
90 | delta_t = get_dt(input_file)
91 | current_time = 0.0
92 |
93 | try:
94 | input_fp = open(input_file, 'r')
95 | for line in input_fp:
96 | line = line.strip()
97 | if line.startswith("#") or line.startswith("%"):
98 | continue
99 | pieces = line.split()
100 | pieces = [float(piece) for piece in pieces]
101 | # Add values to out arrays
102 | # Note that in AWP files, channels are EW/NS/UD instead of NS/EW/UD
103 | time.append(current_time)
104 | current_time = current_time + delta_t
105 | vel_ew.append(pieces[1])
106 | vel_ns.append(pieces[2])
107 | vel_ud.append(pieces[3])
108 | except IOError as e:
109 | print(e)
110 | sys.exit(-1)
111 |
112 | # All done
113 | input_fp.close()
114 |
115 | # Convert to NumPy Arrays
116 | time = np.array(time)
117 | vel_ew = np.array(vel_ew)
118 | vel_ns = np.array(vel_ns)
119 | vel_ud = np.array(vel_ud)
120 |
121 | return delta_t, time, vel_ns, vel_ew, vel_ud
122 |
123 | def write_bbp_header(out_fp, file_type, file_unit, args):
124 | """
125 | This function writes the bbp header
126 | """
127 | orientation = args.orientation.strip()
128 | orientations = orientation.split(",")
129 | orientations = [val.strip() for val in orientations]
130 |
131 | # Write header
132 | out_fp.write("# Station= %s\n" % (args.station_name))
133 | out_fp.write("# time= %s\n" % (args.time))
134 | out_fp.write("# lon= %s\n" % (args.longitude))
135 | out_fp.write("# lat= %s\n" % (args.latitude))
136 | out_fp.write("# units= %s\n" % (file_unit))
137 | out_fp.write("# padding= 0\n")
138 | out_fp.write("# orientation= %s\n" % (args.orientation))
139 | out_fp.write("#\n")
140 | out_fp.write("# Data fields are TAB-separated\n")
141 | out_fp.write("# Column 1: Time (s)\n")
142 | out_fp.write("# Column 2: H1 component ground "
143 | "%s (+ is %s)\n" % (file_type, orientations[0]))
144 | out_fp.write("# Column 3: H2 component ground "
145 | "%s (+ is %s)\n" % (file_type, orientations[1]))
146 | out_fp.write("# Column 4: V component ground "
147 | "%s (+ is %s)\n" % (file_type, orientations[2]))
148 | out_fp.write("#\n")
149 |
150 | def awp2bbp_main():
151 | """
152 | Script to convert AWP files to BBP format
153 | """
154 | parser = argparse.ArgumentParser(description="Converts an AWP "
155 | "file to BBP format, generating "
156 | "displacement, velocity and acceleration "
157 | "BBP files.")
158 | parser.add_argument("-s", "--station-name", dest="station_name",
159 | default="NoName",
160 | help="provides the name for this station")
161 | parser.add_argument("--lat", dest="latitude", type=float, default=0.0,
162 | help="provides the latitude for the station")
163 | parser.add_argument("--lon", dest="longitude", type=float, default=0.0,
164 | help="provides the longitude for the station")
165 | parser.add_argument("-t", "--time", default="00/00/00,0:0:0.0 UTC",
166 | help="provides timing information for this timeseries")
167 | parser.add_argument("-o", "--orientation", default="0,90,UP",
168 | dest="orientation",
169 | help="orientation, default: 0,90,UP")
170 | parser.add_argument("--azimuth", type=float, dest="azimuth",
171 | help="azimuth for rotation (degrees)")
172 | parser.add_argument("input_file", help="AWP input timeseries")
173 | parser.add_argument("output_stem",
174 | help="output BBP filename stem without the "
175 | " .{dis,vel,acc}.bbp extensions")
176 | parser.add_argument("-d", dest="output_dir", default="",
177 | help="output directory for the BBP file")
178 | args = parser.parse_args()
179 |
180 | input_file = args.input_file
181 | output_file_dis = "%s.dis.bbp" % (os.path.join(args.output_dir,
182 | args.output_stem))
183 | output_file_vel = "%s.vel.bbp" % (os.path.join(args.output_dir,
184 | args.output_stem))
185 | output_file_acc = "%s.acc.bbp" % (os.path.join(args.output_dir,
186 | args.output_stem))
187 |
188 | # Check orientation
189 | orientation = args.orientation.split(",")
190 | if len(orientation) != 3:
191 | print("[ERROR]: Need to specify orientation for all 3 components!")
192 | sys.exit(-1)
193 | orientation[0] = float(orientation[0])
194 | orientation[1] = float(orientation[1])
195 | orientation[2] = orientation[2].lower()
196 | if orientation[2] != "up" and orientation[2] != "down":
197 | print("[ERROR]: Vertical orientation must be up or down!")
198 | sys.exit(-1)
199 |
200 | # Read AWP file
201 | print("[INFO]: Reading file %s ..." % (os.path.basename(input_file)))
202 | delta_t, times, vel_h1, vel_h2, vel_ver = read_awp(input_file)
203 |
204 | # Calculate displacement
205 | dis_h1 = integrate(vel_h1, delta_t)
206 | dis_h2 = integrate(vel_h2, delta_t)
207 | dis_ver = integrate(vel_ver, delta_t)
208 |
209 | # Calculate acceleration
210 | acc_h1 = derivative(vel_h1, delta_t)
211 | acc_h2 = derivative(vel_h2, delta_t)
212 | acc_ver = derivative(vel_ver, delta_t)
213 |
214 | # Create station data structures
215 | samples = vel_h1.size
216 |
217 | # samples, dt, data, acceleration, velocity, displacement
218 | signal_h1 = TimeseriesComponent(samples, delta_t, orientation[0],
219 | acc_h1, vel_h1, dis_h1)
220 | signal_h2 = TimeseriesComponent(samples, delta_t, orientation[1],
221 | acc_h2, vel_h2, dis_h2)
222 | signal_ver = TimeseriesComponent(samples, delta_t, orientation[2],
223 | acc_ver, vel_ver, dis_ver)
224 |
225 | station = [signal_h1, signal_h2, signal_ver]
226 |
227 | # Rotate timeseries if needed
228 | if args.azimuth is not None:
229 | print("[INFO]: Rotating timeseries - %f degrees" % (args.azimuth))
230 | station = rotate_timeseries(station, args.azimuth)
231 |
232 | # Update orientation after rotation so headers reflect any changes
233 | args.orientation = "%s,%s,%s" % (str(station[0].orientation),
234 | str(station[1].orientation),
235 | str(station[2].orientation))
236 |
237 | # Pull data back
238 | acc_h1 = station[0].acc.tolist()
239 | vel_h1 = station[0].vel.tolist()
240 | dis_h1 = station[0].dis.tolist()
241 | acc_h2 = station[1].acc.tolist()
242 | vel_h2 = station[1].vel.tolist()
243 | dis_h2 = station[1].dis.tolist()
244 | acc_ver = station[2].acc.tolist()
245 | vel_ver = station[2].vel.tolist()
246 | dis_ver = station[2].dis.tolist()
247 |
248 | # Write header
249 | o_dis_file = open(output_file_dis, 'w')
250 | o_vel_file = open(output_file_vel, 'w')
251 | o_acc_file = open(output_file_acc, 'w')
252 | write_bbp_header(o_dis_file, "displacement", 'm', args)
253 | write_bbp_header(o_vel_file, "velocity", 'm/s', args)
254 | write_bbp_header(o_acc_file, "acceleration", 'm/s^2', args)
255 |
256 | # Write files
257 | for (time, disp_h1, disp_h2, disp_ver,
258 | velo_h1, velo_h2, velo_ver,
259 | accel_h1, accel_h2, accel_ver) in zip(times, dis_h1, dis_h2, dis_ver,
260 | vel_h1, vel_h2, vel_ver,
261 | acc_h1, acc_h2, acc_ver):
262 | o_dis_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
263 | (time, disp_h1, disp_h2, disp_ver))
264 | o_vel_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
265 | (time, velo_h1, velo_h2, velo_ver))
266 | o_acc_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
267 | (time, accel_h1, accel_h2, accel_ver))
268 |
269 | # All done
270 | o_dis_file.close()
271 | o_vel_file.close()
272 | o_acc_file.close()
273 |
274 | # ============================ MAIN ==============================
275 | if __name__ == "__main__":
276 | awp2bbp_main()
277 | # end of main program
278 |
--------------------------------------------------------------------------------
/ts_process/rwgdata2bbp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Utility to convert RWG observation data files to BBP format
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import numpy as np
42 |
43 | # Import tsprocess needed functions
44 | from file_utilities import read_file_bbp2
45 | from ts_library import TimeseriesComponent, baseline_function, \
46 | rotate_timeseries, integrate
47 |
48 | def parse_arguments():
49 | """
50 | This function takes care of parsing the command-line arguments and
51 | asking the user for any missing parameters that we need
52 | """
53 | parser = argparse.ArgumentParser(description="Converts RWG "
54 | " observation files to BBP format.")
55 | parser.add_argument("-o", "--output", dest="outdir", required=True,
56 | help="output directory name")
57 | parser.add_argument("-i", "--input", dest="infile",
58 | help="input file (overrides --dir below)")
59 | parser.add_argument("-d", "--dir", dest="indir",
60 | help="input directory")
61 | args = parser.parse_args()
62 |
63 | if args.infile is None and args.indir is None:
64 | print("ERROR: Please specify either an input file or directory!")
65 | sys.exit(-1)
66 |
67 | if args.infile is not None:
68 | args.indir = None
69 |
70 | return args
71 |
72 | def read_rwg_obs_data(input_file):
73 | """
74 | Reads and processes a RWG observation file
75 | """
76 | record_list = []
77 |
78 | # Read file
79 | print("[READING]: %s..." % (input_file))
80 |
81 | # First, read the headers
82 | headers = []
83 | try:
84 | bbp_file = open(input_file, 'r')
85 | for line in bbp_file:
86 | line = line.strip()
87 | if line.startswith('#') or line.startswith('%'):
88 | headers.append(line)
89 | bbp_file.close()
90 | except IOError:
91 | print("[ERROR]: error reading bbp file: %s" % (input_file))
92 | sys.exit(1)
93 |
94 | # Now read the data
95 | [times, vel_h1, vel_h2, vel_ver] = read_file_bbp2(input_file)
96 |
97 | for data, orientation in zip([vel_h1, vel_h2, vel_ver],
98 | [0.0, 90.0, 'up']):
99 |
100 | # Get network code and station id
101 | basefile = os.path.splitext(os.path.basename(input_file))[0]
102 | tokens = basefile.split("_")
103 | network = tokens[0].upper()
104 | station_id = tokens[1].upper()
105 |
106 | # Get location's latitude and longitude
107 | latitude = "N/A"
108 | longitude = "N/A"
109 | for line in headers:
110 | if "lon=" in line:
111 | longitude = float(line.split()[2])
112 | if "lat=" in line:
113 | latitude = float(line.split()[2])
114 |
115 | # Get filtering information
116 | high_pass = 0.1
117 | low_pass = 5.0
118 |
119 | date = '00/00/00'
120 | hour = '00'
121 | minute = '00'
122 | seconds = '00'
123 | fraction = '0'
124 | tzone = '---'
125 |
126 | # Put it all together
127 | time = "%s:%s:%s.%s %s" % (hour, minute, seconds, fraction, tzone)
128 |
129 | # Get number of samples and dt
130 | samples = data.size
131 | delta_t = times[1] - times[0]
132 |
133 | acc_data = data
134 | vel_data = integrate(acc_data, delta_t)
135 | dis_data = integrate(vel_data, delta_t)
136 |
137 | print("[PROCESSING]: Found component: %s" % (orientation))
138 | record_list.append(TimeseriesComponent(samples, delta_t, orientation,
139 | acc_data, vel_data, dis_data))
140 |
141 | station_metadata = {}
142 | station_metadata['network'] = network
143 | station_metadata['station_id'] = station_id
144 | station_metadata['type'] = "RWGOBS"
145 | station_metadata['date'] = date
146 | station_metadata['time'] = time
147 | station_metadata['longitude'] = longitude
148 | station_metadata['latitude'] = latitude
149 | station_metadata['high_pass'] = high_pass
150 | station_metadata['low_pass'] = low_pass
151 |
152 | return record_list, station_metadata
153 |
154 | def process_observation_data(station):
155 | """
156 | This function processes the observation data
157 | using baseline correction and rotation (if needed)
158 | """
159 | # Validate inputs
160 | if len(station) != 3:
161 | print("[ERROR]: Expecting 3 components!")
162 | return False
163 |
164 | # Reorder components if needed so that vertical is always the last one
165 | if isinstance(station[0].orientation, str):
166 | tmp = station[0]
167 | station[0] = station[2]
168 | station[2] = tmp
169 | elif isinstance(station[1].orientation, str):
170 | tmp = station[1]
171 | station[1] = station[2]
172 | station[2] = tmp
173 |
174 | # First we apply the baseline correction, use 5th order polynomial
175 | order = 5
176 | # Inputs are in cm/sec2, so no scaling
177 | gscale = 1.0
178 |
179 | # Apply baseline correction to all components
180 | for component in station:
181 | _, new_acc, new_vel, new_dis = baseline_function(component.acc,
182 | component.dt,
183 | gscale, order)
184 | component.acc = new_acc
185 | component.vel = new_vel
186 | component.dis = new_dis
187 |
188 | # Now rotate if needed, so that components are 0 and 90 degrees
189 | # Always pick the smaller angle for rotation
190 | rotation_angle = min(station[0].orientation,
191 | station[1].orientation)
192 | return rotate_timeseries(station, rotation_angle)
193 |
194 | def write_bbp(station, station_metadata, destination):
195 | """
196 | This function generates .bbp files for
197 | each of velocity/acceleration/displacement
198 | """
199 | filename_base = ("%s_%s.%s" %
200 | (station_metadata['network'],
201 | station_metadata['station_id'],
202 | station_metadata['type']))
203 |
204 | # round data to 7 decimals in order to print properly
205 | for component in station:
206 | if component.orientation in [0, 360, 180, -180]:
207 | dis_ns = component.dis.tolist()
208 | vel_ns = component.vel.tolist()
209 | acc_ns = component.acc.tolist()
210 | elif component.orientation in [90, -270, -90, 270]:
211 | dis_ew = component.dis.tolist()
212 | vel_ew = component.vel.tolist()
213 | acc_ew = component.acc.tolist()
214 | elif (component.orientation.upper() == "UP" or
215 | component.orientation.upper() == "DOWN"):
216 | dis_up = component.dis.tolist()
217 | vel_up = component.vel.tolist()
218 | acc_up = component.acc.tolist()
219 | else:
220 | pass
221 |
222 | # Prepare to output
223 | out_data = [['dis', dis_ns, dis_ew, dis_up, 'displacement', 'cm'],
224 | ['vel', vel_ns, vel_ew, vel_up, 'velocity', 'cm/s'],
225 | ['acc', acc_ns, acc_ew, acc_up, 'acceleration', 'cm/s^2']]
226 |
227 | for data in out_data:
228 | filename = "%s.%s.bbp" % (filename_base, data[0])
229 | try:
230 | out_fp = open(os.path.join(destination, filename), 'w')
231 | except IOError as e:
232 | print("[ERROR]: Writing BBP file: %s" % (filename))
233 | return False
234 |
235 | # Start with time = 0.0
236 | time = [0.000]
237 | samples = component.samples
238 | while samples > 1:
239 | time.append(time[len(time)-1] + component.dt)
240 | samples -= 1
241 |
242 | # Write header
243 | out_fp.write("# Station= %s_%s\n" %
244 | (station_metadata['network'],
245 | station_metadata['station_id']))
246 | out_fp.write("# time= %s,%s\n" %
247 | (station_metadata['date'],
248 | station_metadata['time']))
249 | out_fp.write("# lon= %s\n" %
250 | (station_metadata['longitude']))
251 | out_fp.write("# lat= %s\n" %
252 | (station_metadata['latitude']))
253 | out_fp.write("# hp= %s\n" %
254 | (station_metadata['high_pass']))
255 | out_fp.write("# lp= %s\n" %
256 | (station_metadata['low_pass']))
257 | out_fp.write("# units= %s\n" % (data[5]))
258 | # Orientation is always 0,90,UP as we just rotated the timeseries
259 | out_fp.write("# orientation= 0,90,UP\n")
260 | out_fp.write("#\n")
261 | out_fp.write("# Data fields are TAB-separated\n")
262 | out_fp.write("# Column 1: Time (s)\n")
263 | out_fp.write("# Column 2: H1 component ground "
264 | "%s (+ is 000)\n" % (data[4]))
265 | out_fp.write("# Column 3: H2 component ground "
266 | "%s (+ is 090)\n" % (data[4]))
267 | out_fp.write("# Column 4: V component ground "
268 | "%s (+ is upward)\n" % (data[4]))
269 | out_fp.write("#\n")
270 |
271 | # Write timeseries
272 | for val_time, val_ns, val_ew, val_ud in zip(time, data[1],
273 | data[2], data[3]):
274 | out_fp.write("%5.7f %5.9e %5.9e %5.9e\n" %
275 | (val_time, val_ns, val_ew, val_ud))
276 |
277 | # All done, close file
278 | out_fp.close()
279 | print("[WRITING]: Wrote BBP file: %s" % (filename))
280 |
281 | def rwgdata2bbp_process(input_file, output_dir):
282 | """
283 | Converts input_file to bbp format
284 | """
285 | station, station_metadata = read_rwg_obs_data(input_file)
286 |
287 | if station:
288 | station = process_observation_data(station)
289 | # Make sure output is valid
290 | if not station:
291 | print("[ERROR]: Processing input file: %s" % (input_file))
292 | return
293 | else:
294 | print("[ERROR]: Reading input file: %s" % (input_file))
295 | return
296 |
297 | # Write BBP file
298 | write_bbp(station, station_metadata, output_dir)
299 |
300 | def rwgdata2bbp_main():
301 | """
302 | Main function for the rwgdata2bbp conversion utility
303 | """
304 | args = parse_arguments()
305 |
306 | if args.infile is not None:
307 | # Only one file to process
308 | process_list = [args.infile]
309 | else:
310 | # Create list of files to process
311 | process_list = []
312 | for item in os.listdir(args.indir):
313 | if item.upper().endswith(".BBP"):
314 | process_list.append(os.path.join(args.indir,
315 | item))
316 |
317 | # Now process the list of files
318 | for item in process_list:
319 | rwgdata2bbp_process(item, args.outdir)
320 |
321 | # ============================ MAIN ==============================
322 | if __name__ == "__main__":
323 | rwgdata2bbp_main()
324 | # end of main program
325 |
--------------------------------------------------------------------------------
/ts_process/rotd50/rotd50.f:
--------------------------------------------------------------------------------
1 | ! ------------------------------------------------------------------
2 | !
3 | ! rotD50.f
4 | ! Computes RotD50 using pairs of orthogonal horizontal components
5 | ! Code developed by Norm Abrahamson, optimized in September 2012 to run faster
6 | ! QA/QC and some clean-up for distribution by Christine Goulet
7 | ! ------------------------------------------------------------------
8 | ! Input provided in filein:
9 | ! - Interp: mode of interpolation for small dt
10 | ! 1. linear interpolation
11 | ! 2. sine wave interpolation (e.g. frequency domain interpolation)
12 | ! 3. cubic spline interpolation
13 | ! - NPairs: number of pairs to read
14 | ! - NHead: number of header lines in ASCII time series files
15 | ! - Series of file names:
16 | ! file name component 1 (input)
17 | ! file name component 2 (input)
18 | ! file name RotD50 output
19 | ! file name as-recorded PSa output for both components
20 | ! ** Make sure the input ASCII format is adequate - see how it is read below**
21 | ! ------------------------------------------------------------------
22 |
23 | program Calc_RotD50
24 | parameter (MAXPTS = 2000000)
25 |
26 | character*80 fileacc1, fileacc2, filein, fileout_rd50
27 | integer npts1, npts2, npts, npair, nhead, iFlag
28 | real dt1, dt2, dt, acc1(MAXPTS), acc2(MAXPTS), acc1_0(MAXPTS), acc2_0(MAXPTS)
29 | real x0(MAXPTS), y0(MAXPTS), u(MAXPTS), y2(MAXPTS)
30 | real rspTH1(MAXPTS), rspTH2(MAXPTS), rsp1(MAXPTS), rsp2(MAXPTS)
31 | real x(MAXPTS), y(MAXPTS), rsp_Period(63), famp15(3)
32 | real rotangle, w(200), sa(1000), workArray(1000), rotD50(3,200), psa5E(200), psa5N(200)
33 | real damping
34 | complex cu1(MAXPTS)
35 |
36 | data RSP_Period / 0.010, 0.011, 0.012, 0.013, 0.015, 0.017, 0.020, 0.022, 0.025, 0.029,
37 | 1 0.032, 0.035, 0.040, 0.045, 0.050, 0.055, 0.060, 0.065, 0.075, 0.085,
38 | 2 0.100, 0.110, 0.120, 0.130, 0.150, 0.170, 0.200, 0.220, 0.240, 0.260,
39 | 3 0.280, 0.300, 0.350, 0.400, 0.450, 0.500, 0.550, 0.600, 0.650, 0.750,
40 | 4 0.850, 1.000, 1.100, 1.200, 1.300, 1.500, 1.700, 2.000, 2.200, 2.400,
41 | 5 2.600, 2.800, 3.000, 3.500, 4.000, 4.400, 5.000, 5.500, 6.000, 6.500,
42 | 6 7.500, 8.500, 10.000 /
43 |
44 | nFreq = 63
45 | damping = 0.05
46 | dt_max = 0.001
47 |
48 | ! Convert periods to freq in Radians
49 | do iFreq=1,nFreq
50 | w(iFreq) = 2.0*3.14159 / rsp_period(iFreq)
51 | enddo
52 |
53 | ! Read in the input filename filein
54 | filein = "rotd50_inp.cfg"
55 | ! Uncomment below to make interactive instead
56 | ! write (*,*) 'Enter the input filename.'
57 | ! read (*,*) filein
58 | open (30,file=filein, status='old')
59 |
60 |
61 | ! write (*,'( 2x,''write out interpolations? (0=no, 1=yes)'')')
62 | ! read (*,*) iFlag
63 | iFlag = 0
64 | if ( iFlag .eq. 1 ) then
65 | write (*,'( 2x,'' enter times (sec) of first and last points to write out'')')
66 | read (*,*) time1, time2
67 | endif
68 |
69 | ! Read interpolation method to be used
70 | read (30,*) jInterp
71 | ! Read number of pairs and number of header lines
72 | read (30,*) nPair
73 | read (30,*) nHead
74 |
75 | ! Loop over each pair
76 | do iPair=1,nPair
77 |
78 | ! Uncomment below for screen output
79 | ! write (*,'( 2x,'' set '',i5)') ipair
80 |
81 | ! Read Horiz 1 (x) component
82 | read (30,'(a80)') fileacc1
83 | ! Uncomment below for screen output
84 | ! write (*,'( a70)') fileacc1
85 | open (32,file=fileacc1,status='old')
86 | do i=1,nhead-1
87 | read (32,*)
88 | enddo
89 | read (32,*) npts1, dt1
90 | if (npts1 .gt. MAXPTS) then
91 | write (*,'( 2x,''NPTS is too large: '',i10)') npts
92 | stop 99
93 | endif
94 | read (32,*) (acc1_0(i),i=1,npts1)
95 | close (32)
96 |
97 | ! Read Horiz 2 (y) component
98 | read (30,'(a80)') fileacc2
99 | ! Uncomment below for screen output
100 | ! write (*,'( a70)') fileacc2
101 | open (33,file=fileacc2,status='old')
102 | do i=1,nhead-1
103 | read (33,*)
104 | enddo
105 | read (33,*) npts2, dt2
106 | if (npts2 .gt. MAXPTS) then
107 | write (*,'( 2x,''NPTS is too large: '',i10)') npts
108 | stop 99
109 | endif
110 | read (33,*) (acc2_0(i),i=1,npts2)
111 | close (33)
112 |
113 | ! Check that the two time series have the same number of points. If not, reset to smaller value
114 | if (npts1 .lt. npts2) then
115 | npts0 = npts1
116 | elseif (npts2 .lt. npts1) then
117 | npts0 = npts2
118 | elseif (npts1 .eq. npts2) then
119 | npts0 = npts1
120 | endif
121 |
122 | ! Check that the two time series have the same dt.
123 | if (dt1 .ne. dt2) then
124 | write (*,*) 'DT values are not equal!!!'
125 | write (*,*) 'DT1 = ', dt1
126 | write (*,*) 'DT2 = ', dt2
127 | else
128 | dt = dt1
129 | endif
130 | dt0 = dt
131 |
132 | ! Copy to new array for interpolating ( from original acc )
133 | do i=1,npts0
134 | acc1(i) = acc1_0(i)
135 | acc2(i) = acc2_0(i)
136 | enddo
137 | npts = npts0
138 | dt = dt0
139 |
140 | ! Interpolate to finer time step for calculating the Spectral acceleration
141 | if ( jInterp .ne. 0 ) then
142 | NN = 2**(int(alog(dt/dt_max)/alog(2.))+1)
143 | if ( NN*npts .gt. MAXPTS ) then
144 | write (*,'( 2x,''increase maxpts to '',i10)') nn*npts
145 | read (30,'( a80)') fileout
146 | goto 100
147 | endif
148 |
149 | ! Uncomment below for screen output
150 | ! write (*,'( 2x,''jInterp, dt, interpolation factor '',i5,f10.5,i5)') jinterp, dt, NN
151 |
152 | ! Time domain linear interpolation
153 | if ( jINterp .eq. 1 ) then
154 | call InterpTime (acc1, dt, npts, dt10, npts10, NN, MAXPTS )
155 | call InterpTime (acc2, dt, npts, dt10, npts10, NN, MAXPTS )
156 |
157 | ! Freq domain interpolation (sine wave)
158 | elseif (jInterp .eq. 2 ) then
159 | call InterpFreq (acc1, npts, cu1, NN, npts10, dt )
160 | call InterpFreq (acc2, npts, cu1, NN, npts10, dt )
161 | dt10 = dt / NN
162 |
163 | ! Time domain cubic spline interpolation
164 | elseif (jInterp .eq. 3 ) then
165 | call InterpSpline (acc1, dt, npts, dt10, npts10, NN, y2, x0, y0, u, MAXPTS )
166 | call InterpSpline (acc2, dt, npts, dt10, npts10, NN, y2, x0, y0, u, MAXPTS )
167 | dt10 = dt / NN
168 | ! write (*,'(2x,''out of spline'')')
169 | ! write (*,'( 5e15.6)') (acc1(k) ,k=33600,33620)
170 | ! pause
171 |
172 | endif
173 | npts = npts10
174 | dt = dt10
175 | endif
176 | ! write (*,'( i8, f10.6,2x,'' npts, dt'')') npts, dt
177 | ! if ( iFlag .eq. 1 ) then
178 | ! k1 = int(time1 / dt)
179 | ! k2 = int (time2 /dt ) + 1
180 | ! write (10+jInterp,'( f12.6,e15.6 )') (dt*(k-1), acc1(k),k=33600,34600)
181 | ! endif
182 |
183 |
184 | ! Loop over each oscilator frequency
185 | do iFreq=1,nFreq
186 | ! Uncomment below for screen output
187 | ! write (*,'( i5, f10.3)') iFreq, rsp_period(iFreq)
188 |
189 | ! Compute the oscillator time histoires for the two components.
190 | call CalcRspTH ( acc1, npts, dt, w(iFreq), damping, rspTH1 )
191 | call CalcRspTH ( acc2, npts, dt, w(iFreq), damping, rspTH2 )
192 |
193 | ! Fill new array with points with amplitude on one component at least SaMin/1.5
194 | ! This sets the points for the rotation to speed up the calculation
195 | call Calc_Sa ( rspTH1, sa1, npts)
196 | call Calc_Sa ( rspTH2, sa2, npts )
197 | test = amin1(sa1, sa2) / 1.5
198 | j = 1
199 | do i=1,npts
200 | amp1 = abs(rspTH1(i))
201 | amp2 = abs(rspTH2(i))
202 | if ( amp2 .gt. amp1 ) amp1 = amp2
203 | if ( amp1 .gt. test .and. iFlag .eq. 0 ) then
204 | rsp1(j) = rspTH1(i)
205 | rsp2(j) = rspTH2(i)
206 | j = j + 1
207 | endif
208 | enddo
209 | npts1 = j -1
210 |
211 | ! Loop over different rotation angles and compute response spectra by rotating the Oscillator TH
212 | do j=1,90
213 | rotangle = real(((j-1)*3.14159)/180.0)
214 | cos1 = cos(rotangle)
215 | sin1 = sin(rotangle)
216 | do i=1,npts1
217 | x(i)=cos1*rsp1(i) - sin1*rsp2(i)
218 | y(i)=sin1*rsp1(i) + cos1*rsp2(i)
219 | enddo
220 |
221 | ! Find the maximum response for X and Y and load into a single Sa array
222 | call Calc_Sa ( x, saX, npts1 )
223 | call Calc_Sa ( y, saY, npts1 )
224 | sa(j) = saX
225 | sa(j+90) = SaY
226 | enddo
227 |
228 | ! Get the as-recorded PSa
229 | psa5E(iFreq) = sa(1)
230 | psa5N(iFreq) = sa(91)
231 |
232 | ! Sort the Sa array to find the median value.
233 | n1 = 180
234 | call SORT(Sa,WorkArray,N1)
235 | rotD50(jInterp,iFreq) = ( Sa(90) + Sa(91) ) /2.
236 | enddo
237 |
238 | c Find the Famp1.5 (assumes order of freq are high to low)
239 | do iFreq=2,nFreq
240 | shape1 = rotD50(jInterp,iFreq)/rotD50(jInterp,1)
241 | if ( shape1 .ge. 1.5 ) then
242 | famp15(jInterp) = 1./rsp_period(iFreq)
243 | goto 105
244 | endif
245 | enddo
246 | 105 continue
247 |
248 | ! Open output files for writing
249 | read (30,'( a80)') fileout_rd50
250 | open (40,file=fileout_rd50,status='new')
251 |
252 | ! Write RotD50 and Psa5 file
253 | write (40,'(''#'', 2x, ''Psa5_N'', x, ''Psa5_E'', x, ''RotD50'')')
254 | write (40,'(''#'', 2x, a80)') fileacc1
255 | write (40,'(''#'', 2x, a80)') fileacc2
256 | write (40,'(''#'', 2x, i5, f10.4)') nFreq, damping
257 | do iFreq=1,nFreq
258 | write (40,'(f10.4, 1x, e10.5, 1x, e10.5, 1x, e10.5)') rsp_period(iFreq), psa5N(iFreq), psa5E(iFreq), rotD50(jInterp,iFreq)
259 | enddo
260 | close (40)
261 |
262 | 100 continue
263 | enddo
264 |
265 | close (30)
266 |
267 | stop
268 | end
269 |
270 | ! ---------------------------------------------------------------------
271 |
272 | subroutine Calc_Sa ( x, Sa, npts )
273 | real x(1), Sa
274 |
275 | sa = -1E30
276 | do i=1,npts
277 | x1 = abs(x(i))
278 | if ( x1 .gt. Sa ) Sa = x1
279 | enddo
280 | return
281 | end
282 |
283 | ! ---------------------------------------------------------------------
284 | Subroutine InterpTime (acc1, dt, npts, dt10, npts10, NN, MAXPTS )
285 |
286 | real acc1(1)
287 | real acc2(MAXPTS)
288 |
289 | k = 1
290 | do i=1,npts-1
291 | do j=1,NN
292 | dy = (acc1(i+1)-acc1(i))/NN
293 | acc2(k) = acc1(i) + dy*(j-1)
294 | k = k + 1
295 | enddo
296 | enddo
297 | acc2(k) = acc1(npts)
298 | npts10 = k
299 | dt10 = dt / NN
300 |
301 | do i=1,npts10
302 | acc1(i) = acc2(i)
303 | enddo
304 | return
305 | end
306 |
307 |
308 |
309 | ! ---------------------------------------------------------------------
310 |
311 | subroutine InterpSpline (acc1, dt, npts, dt10, npts10, NN, y2, x0, y0, u, MAXPTS )
312 |
313 | real acc1(MAXPTS)
314 | real y2(MAXPTS), x0(MAXPTS), y0(MAXPTS), u(MAXPTS)
315 |
316 | c Set x array
317 | do i=1,npts
318 | x0(i) = i*dt
319 | y0(i) = acc1(i)
320 | y2(i) = 0.
321 | enddo
322 | yp1 = 0.
323 | ypn = 0.
324 |
325 | call spline( x0, y0, npts, yp1, ypn, y2, u,MAXPTS)
326 |
327 | k = 1
328 | do i=1,npts-1
329 | do j=1,NN
330 | x_new = x0(i) + dt*float(j-1)/NN
331 | call splint(x0,y0,y2,npts,x_new,y_new)
332 | c if ( k .gt. 33600 .and. k .lt. 33620 ) then
333 | c write (*,'( 3i8, 3f10.4,3e15.6)') k, i,j, x0(i), x0(i+1), x_new, y0(i), y0(i+1), y_new
334 | c endif
335 | acc1(k) = y_new
336 | k = k + 1
337 | if (kk .gt. MAXPTS) then
338 | write (*,'( 4i8)') i, j, k, maxpts
339 | stop 99
340 | endif
341 | enddo
342 | enddo
343 | acc1(k) = y0(npts)
344 | dt10 = dt / NN
345 | npts10 = k
346 |
347 | return
348 | end
349 |
350 |
--------------------------------------------------------------------------------
/ts_process/rwg2bbp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Utility to convert RWG time history files to BBP format
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import numpy as np
42 | from ts_library import integrate, derivative, TimeseriesComponent, rotate_timeseries
43 |
44 | def get_dt(input_file):
45 | """
46 | Read RWG file and return DT
47 | """
48 | val1 = None
49 | val2 = None
50 | file_dt = None
51 |
52 | # Figure out dt first, we need it later
53 | ifile = open(input_file)
54 | for line in ifile:
55 | # Skip comments
56 | if line.startswith("#") or line.startswith("%"):
57 | continue
58 | pieces = line.split()
59 | pieces = [float(piece) for piece in pieces]
60 | # Skip negative data points
61 | if pieces[0] < 0.0:
62 | continue
63 | if val1 is None:
64 | val1 = pieces[0]
65 | continue
66 | if val2 is None:
67 | val2 = pieces[0]
68 | break
69 | ifile.close()
70 |
71 | # Quit if cannot figure out dt
72 | if val1 is None or val2 is None:
73 | print("[ERROR]: Cannot determine dt from RWG file! Exiting...")
74 | sys.exit(1)
75 |
76 | # Calculate dt
77 | file_dt = val2 - val1
78 |
79 | return file_dt
80 | # end get_dt
81 |
82 | def read_rwg(input_file):
83 | """
84 | Reads the input file in rwg format and returns arrays containing
85 | vel_ns, vel_ew, vel_ud components
86 | """
87 |
88 | original_header = []
89 | time = []
90 | vel_ns = []
91 | vel_ew = []
92 | vel_ud = []
93 |
94 | # Get RWG file dt
95 | delta_t = get_dt(input_file)
96 |
97 | try:
98 | input_fp = open(input_file, 'r')
99 | for line in input_fp:
100 | line = line.strip()
101 | if line.startswith("#") or line.startswith("%"):
102 | # keep original header
103 | original_header.append(line)
104 | continue
105 | pieces = line.split()
106 | pieces = [float(piece) for piece in pieces]
107 | # Skip negative data points
108 | if pieces[0] < 0.0:
109 | continue
110 | # Add values to out arrays
111 | time.append(pieces[0])
112 | vel_ns.append(pieces[1])
113 | vel_ew.append(pieces[2])
114 | vel_ud.append(pieces[3])
115 | except IOError as e:
116 | print(e)
117 | sys.exit(1)
118 |
119 | # All done
120 | input_fp.close()
121 |
122 | # Convert to NumPy Arrays
123 | time = np.array(time)
124 | vel_ns = np.array(vel_ns)
125 | vel_ew = np.array(vel_ew)
126 | vel_ud = np.array(vel_ud)
127 |
128 | return original_header, delta_t, time, vel_ns, vel_ew, vel_ud
129 |
130 | def parse_rwg_header(header):
131 | """
132 | Parse the header found in the rwg file to extract useful metadata
133 | """
134 | params = {}
135 |
136 | for line in header:
137 | pieces = line.split()
138 | if line.find("Station:") > 0:
139 | params["station"] = pieces[2]
140 | continue
141 | if line.find("lon=") > 0:
142 | params["lon"] = float(pieces[2])
143 | continue
144 | if line.find("lat=") > 0:
145 | params["lat"] = float(pieces[2])
146 | continue
147 | if line.find("Column 2:") > 0:
148 | if line.find("(m/s)") > 0:
149 | params["unit"] = "m"
150 | elif line.find("(cm/s)") > 0:
151 | params["unit"] = "cm"
152 | continue
153 |
154 | # Return what we found
155 | return params
156 |
157 | def write_bbp_header(out_fp, file_type, file_unit, args, header):
158 | """
159 | This function writes the bbp header
160 | """
161 | orientation = args.orientation.strip()
162 | orientations = orientation.split(",")
163 | orientations = [val.strip() for val in orientations]
164 |
165 | # Write header
166 | out_fp.write("# Station= %s\n" % (args.station_name))
167 | out_fp.write("# time= %s\n" % (args.time))
168 | out_fp.write("# lon= %s\n" % (args.longitude))
169 | out_fp.write("# lat= %s\n" % (args.latitude))
170 | out_fp.write("# units= %s\n" % (file_unit))
171 | out_fp.write("# padding= 0\n")
172 | out_fp.write("# orientation= %s\n" % (orientation))
173 | out_fp.write("#\n")
174 | out_fp.write("# Data fields are TAB-separated\n")
175 | out_fp.write("# Column 1: Time (s)\n")
176 | out_fp.write("# Column 2: H1 component ground "
177 | "%s (+ is %s)\n" % (file_type, orientations[0]))
178 | out_fp.write("# Column 3: H2 component ground "
179 | "%s (+ is %s)\n" % (file_type, orientations[1]))
180 | out_fp.write("# Column 4: V component ground "
181 | "%s (+ is %s)\n" % (file_type, orientations[2]))
182 | out_fp.write("#\n")
183 | # Now copy precious header lines
184 | for line in header:
185 | out_fp.write("#%s\n" % (line))
186 |
187 | def rwg2bbp_main():
188 | """
189 | Script to convert RWG files to BBP format
190 | """
191 | parser = argparse.ArgumentParser(description="Converts an RWG "
192 | "file to BBP format, generating "
193 | "displacement, velocity and acceleration "
194 | "BBP files.")
195 | parser.add_argument("-s", "--station-name", dest="station_name",
196 | default="NoName",
197 | help="provides the name for this station")
198 | parser.add_argument("--lat", dest="latitude", type=float, default=0.0,
199 | help="provides the latitude for the station")
200 | parser.add_argument("--lon", dest="longitude", type=float, default=0.0,
201 | help="provides the longitude for the station")
202 | parser.add_argument("-t", "--time", default="00/00/00,0:0:0.0 UTC",
203 | help="provides timing information for this timeseries")
204 | parser.add_argument("-o", "--orientation", default="0,90,UP",
205 | dest="orientation",
206 | help="orientation, default: 0,90,UP")
207 | parser.add_argument("--azimuth", type=float, dest="azimuth",
208 | help="azimuth for rotation (degrees)")
209 | parser.add_argument("input_file", help="AWP input timeseries")
210 | parser.add_argument("output_stem",
211 | help="output BBP filename stem without the "
212 | " .{dis,vel,acc}.bbp extensions")
213 | parser.add_argument("-d", dest="output_dir", default="",
214 | help="output directory for the BBP file")
215 | args = parser.parse_args()
216 |
217 | input_file = args.input_file
218 | output_file_dis = "%s.dis.bbp" % (os.path.join(args.output_dir,
219 | args.output_stem))
220 | output_file_vel = "%s.vel.bbp" % (os.path.join(args.output_dir,
221 | args.output_stem))
222 | output_file_acc = "%s.acc.bbp" % (os.path.join(args.output_dir,
223 | args.output_stem))
224 |
225 | # Check orientation
226 | orientation = args.orientation.split(",")
227 | if len(orientation) != 3:
228 | print("[ERROR]: Need to specify orientation for all 3 components!")
229 | sys.exit(-1)
230 | orientation[0] = float(orientation[0])
231 | orientation[1] = float(orientation[1])
232 | orientation[2] = orientation[2].lower()
233 | if orientation[2] != "up" and orientation[2] != "down":
234 | print("[ERROR]: Vertical orientation must be up or down!")
235 | sys.exit(-1)
236 |
237 | # Read RWG file
238 | print("[INFO]: Reading file %s ..." % (os.path.basename(input_file)))
239 | header, delta_t, times, vel_h1, vel_h2, vel_ver = read_rwg(input_file)
240 | rwg_params = parse_rwg_header(header)
241 |
242 | # Figure out what unit to use
243 | units = {"m": ["m", "m/s", "m/s^2"],
244 | "cm": ["cm", "cm/s", "cm/s^2"]}
245 | if "unit" in rwg_params:
246 | unit = rwg_params["unit"]
247 | else:
248 | # Defaults to meters
249 | unit = "m"
250 |
251 | # Override command-line defaults
252 | if "lat" in rwg_params:
253 | if args.latitude == 0.0:
254 | args.latitude = rwg_params["lat"]
255 | if "lon" in rwg_params:
256 | if args.longitude == 0.0:
257 | args.longitude = rwg_params["lon"]
258 | if "station" in rwg_params:
259 | if args.station_name == "NoName":
260 | args.station_name = rwg_params["station"]
261 |
262 | # Calculate displacement
263 | dis_h1 = integrate(vel_h1, delta_t)
264 | dis_h2 = integrate(vel_h2, delta_t)
265 | dis_ver = integrate(vel_ver, delta_t)
266 |
267 | # Calculate acceleration
268 | acc_h1 = derivative(vel_h1, delta_t)
269 | acc_h2 = derivative(vel_h2, delta_t)
270 | acc_ver = derivative(vel_ver, delta_t)
271 |
272 | # Create station data structures
273 | samples = vel_h1.size
274 |
275 | # samples, dt, data, acceleration, velocity, displacement
276 | signal_h1 = TimeseriesComponent(samples, delta_t, orientation[0],
277 | acc_h1, vel_h1, dis_h1)
278 | signal_h2 = TimeseriesComponent(samples, delta_t, orientation[1],
279 | acc_h2, vel_h2, dis_h2)
280 | signal_ver = TimeseriesComponent(samples, delta_t, orientation[2],
281 | acc_ver, vel_ver, dis_ver)
282 |
283 | station = [signal_h1, signal_h2, signal_ver]
284 |
285 | # Rotate timeseries if needed
286 | if args.azimuth is not None:
287 | print("[INFO]: Rotating timeseries - %f degrees" % (args.azimuth))
288 | station = rotate_timeseries(station, args.azimuth)
289 |
290 | # Update orientation after rotation so headers reflect any changes
291 | args.orientation = "%s,%s,%s" % (str(station[0].orientation),
292 | str(station[1].orientation),
293 | str(station[2].orientation))
294 |
295 | # Pull data back
296 | acc_h1 = station[0].acc.tolist()
297 | vel_h1 = station[0].vel.tolist()
298 | dis_h1 = station[0].dis.tolist()
299 | acc_h2 = station[1].acc.tolist()
300 | vel_h2 = station[1].vel.tolist()
301 | dis_h2 = station[1].dis.tolist()
302 | acc_ver = station[2].acc.tolist()
303 | vel_ver = station[2].vel.tolist()
304 | dis_ver = station[2].dis.tolist()
305 |
306 | # Write header
307 | o_dis_file = open(output_file_dis, 'w')
308 | o_vel_file = open(output_file_vel, 'w')
309 | o_acc_file = open(output_file_acc, 'w')
310 | write_bbp_header(o_dis_file, "displacement",
311 | units[unit][0], args, header)
312 | write_bbp_header(o_vel_file, "velocity",
313 | units[unit][1], args, header)
314 | write_bbp_header(o_acc_file, "acceleration",
315 | units[unit][2], args, header)
316 |
317 | # Write files
318 | for (time, disp_h1, disp_h2, disp_ver,
319 | velo_h1, velo_h2, velo_ver,
320 | accel_h1, accel_h2, accel_ver) in zip(times, dis_h1, dis_h2, dis_ver,
321 | vel_h1, vel_h2, vel_ver,
322 | acc_h1, acc_h2, acc_ver):
323 | o_dis_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
324 | (time, disp_h1, disp_h2, disp_ver))
325 | o_vel_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
326 | (time, velo_h1, velo_h2, velo_ver))
327 | o_acc_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
328 | (time, accel_h1, accel_h2, accel_ver))
329 |
330 | # All done
331 | o_dis_file.close()
332 | o_vel_file.close()
333 | o_acc_file.close()
334 |
335 | # ============================ MAIN ==============================
336 | if __name__ == "__main__":
337 | rwg2bbp_main()
338 | # end of main program
339 |
--------------------------------------------------------------------------------
/ts_process/edge2bbp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Utility to convert EDGE time history files to BBP format
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import numpy as np
42 | from ts_library import integrate, derivative, TimeseriesComponent, rotate_timeseries
43 |
44 | def get_dt(input_file):
45 | """
46 | Read EDGE file and return DT
47 | """
48 | val1 = None
49 | val2 = None
50 | file_dt = None
51 |
52 | # Figure out dt first, we need it later
53 | ifile = open(input_file)
54 | for line in ifile:
55 | # Skip comments
56 | if line.startswith("#") or line.startswith("%"):
57 | continue
58 | pieces = line.split()
59 | pieces = [float(piece) for piece in pieces]
60 | # Skip negative data points
61 | if pieces[0] < 0.0:
62 | continue
63 | if val1 is None:
64 | val1 = pieces[0]
65 | continue
66 | if val2 is None:
67 | val2 = pieces[0]
68 | break
69 | ifile.close()
70 |
71 | # Quit if cannot figure out dt
72 | if val1 is None or val2 is None:
73 | print("[ERROR]: Cannot determine dt from EDGE file! Exiting...")
74 | sys.exit(1)
75 |
76 | # Calculate dt
77 | file_dt = val2 - val1
78 |
79 | return file_dt
80 | # end get_dt
81 |
82 | def read_edge(input_file):
83 | """
84 | Reads the input file in edge format and returns arrays containing
85 | vel_ns, vel_ew, vel_ud components
86 | """
87 |
88 | original_header = []
89 | time = []
90 | vel_h1 = []
91 | vel_h2 = []
92 | vel_ud = []
93 |
94 | # Get RWG file dt
95 | delta_t = get_dt(input_file)
96 |
97 | try:
98 | input_fp = open(input_file, 'r')
99 | for line in input_fp:
100 | line = line.strip()
101 | if line.startswith("#") or line.startswith("%"):
102 | # keep original header
103 | original_header.append(line)
104 | continue
105 | pieces = line.split()
106 | pieces = [float(piece) for piece in pieces]
107 | # Skip negative data points
108 | if pieces[0] < 0.0:
109 | continue
110 | # Add values to out arrays
111 | time.append(pieces[0])
112 | vel_h1.append(pieces[1])
113 | vel_h2.append(pieces[2])
114 | vel_ud.append(pieces[3])
115 | except IOError as e:
116 | print(e)
117 | sys.exit(1)
118 |
119 | # All done
120 | input_fp.close()
121 |
122 | # Convert to NumPy Arrays
123 | time = np.array(time)
124 | vel_h1 = np.array(vel_h1)
125 | vel_h2 = np.array(vel_h2)
126 | vel_ud = np.array(vel_ud)
127 |
128 | return original_header, delta_t, time, vel_h1, vel_h2, vel_ud
129 |
130 | def parse_edge_header(header):
131 | """
132 | Parse the header found in the rwg file to extract useful metadata
133 | """
134 | params = {}
135 |
136 | for line in header:
137 | pieces = line.split()
138 | if line.find("Station=") > 0:
139 | params["station"] = pieces[2]
140 | continue
141 | if line.find("lon=") > 0:
142 | params["lon"] = float(pieces[2])
143 | continue
144 | if line.find("lat=") > 0:
145 | params["lat"] = float(pieces[2])
146 | continue
147 | if line.find("units=") > 0:
148 | if line.find("cm/s") > 0:
149 | params["unit"] = "cm"
150 | elif line.find("m/s") > 0:
151 | params["unit"] = "m"
152 | continue
153 |
154 | # Return what we found
155 | return params
156 |
157 | def write_bbp_header(out_fp, file_type, file_unit, args, header):
158 | """
159 | This function writes the bbp header
160 | """
161 | orientation = args.orientation.strip()
162 | orientations = orientation.split(",")
163 | orientations = [val.strip() for val in orientations]
164 |
165 | # Write header
166 | out_fp.write("# Station= %s\n" % (args.station_name))
167 | out_fp.write("# time= %s\n" % (args.time))
168 | out_fp.write("# lon= %s\n" % (args.longitude))
169 | out_fp.write("# lat= %s\n" % (args.latitude))
170 | out_fp.write("# units= %s\n" % (file_unit))
171 | out_fp.write("# padding= 0\n")
172 | out_fp.write("# orientation= %s\n" % (orientation))
173 | out_fp.write("#\n")
174 | out_fp.write("# Data fields are TAB-separated\n")
175 | out_fp.write("# Column 1: Time (s)\n")
176 | out_fp.write("# Column 2: H1 component ground "
177 | "%s (+ is %s)\n" % (file_type, orientations[0]))
178 | out_fp.write("# Column 3: H2 component ground "
179 | "%s (+ is %s)\n" % (file_type, orientations[1]))
180 | out_fp.write("# Column 4: V component ground "
181 | "%s (+ is %s)\n" % (file_type, orientations[2]))
182 | out_fp.write("#\n")
183 | # Now copy precious header lines
184 | for line in header:
185 | out_fp.write("#%s\n" % (line))
186 |
187 | def edge2bbp_main():
188 | """
189 | Script to convert RWG files to BBP format
190 | """
191 | parser = argparse.ArgumentParser(description="Converts an EDGE "
192 | "file to BBP format, generating "
193 | "displacement, velocity and acceleration "
194 | "BBP files.")
195 | parser.add_argument("-s", "--station-name", dest="station_name",
196 | default="NoName",
197 | help="provides the name for this station")
198 | parser.add_argument("--lat", dest="latitude", type=float, default=0.0,
199 | help="provides the latitude for the station")
200 | parser.add_argument("--lon", dest="longitude", type=float, default=0.0,
201 | help="provides the longitude for the station")
202 | parser.add_argument("-t", "--time", default="00/00/00,0:0:0.0 UTC",
203 | help="provides timing information for this timeseries")
204 | parser.add_argument("-o", "--orientation", default="0,90,UP",
205 | dest="orientation",
206 | help="orientation, default: 0,90,UP")
207 | parser.add_argument("--azimuth", type=float, dest="azimuth",
208 | help="azimuth for rotation (degrees)")
209 | parser.add_argument("input_file", help="AWP input timeseries")
210 | parser.add_argument("output_stem",
211 | help="output BBP filename stem without the "
212 | " .{dis,vel,acc}.bbp extensions")
213 | parser.add_argument("-d", dest="output_dir", default="",
214 | help="output directory for the BBP file")
215 | args = parser.parse_args()
216 |
217 | input_file = args.input_file
218 | output_file_dis = "%s.dis.bbp" % (os.path.join(args.output_dir,
219 | args.output_stem))
220 | output_file_vel = "%s.vel.bbp" % (os.path.join(args.output_dir,
221 | args.output_stem))
222 | output_file_acc = "%s.acc.bbp" % (os.path.join(args.output_dir,
223 | args.output_stem))
224 |
225 | # Check orientation
226 | orientation = args.orientation.split(",")
227 | if len(orientation) != 3:
228 | print("[ERROR]: Need to specify orientation for all 3 components!")
229 | sys.exit(-1)
230 | orientation[0] = float(orientation[0])
231 | orientation[1] = float(orientation[1])
232 | orientation[2] = orientation[2].lower()
233 | if orientation[2] != "up" and orientation[2] != "down":
234 | print("[ERROR]: Vertical orientation must be up or down!")
235 | sys.exit(-1)
236 |
237 | # Read RWG file
238 | print("[INFO]: Reading file %s ..." % (os.path.basename(input_file)))
239 | header, delta_t, times, vel_h1, vel_h2, vel_ver = read_edge(input_file)
240 | edge_params = parse_edge_header(header)
241 |
242 | # Figure out what unit to use
243 | units = {"m": ["m", "m/s", "m/s^2"],
244 | "cm": ["cm", "cm/s", "cm/s^2"]}
245 | if "unit" in edge_params:
246 | unit = edge_params["unit"]
247 | else:
248 | # Defaults to meters
249 | unit = "m"
250 |
251 | # Override command-line defaults
252 | if "lat" in edge_params:
253 | if args.latitude == 0.0:
254 | args.latitude = edge_params["lat"]
255 | if "lon" in edge_params:
256 | if args.longitude == 0.0:
257 | args.longitude = edge_params["lon"]
258 | if "station" in edge_params:
259 | if args.station_name == "NoName":
260 | args.station_name = edge_params["station"]
261 |
262 | # Calculate displacement
263 | dis_h1 = integrate(vel_h1, delta_t)
264 | dis_h2 = integrate(vel_h2, delta_t)
265 | dis_ver = integrate(vel_ver, delta_t)
266 |
267 | # Calculate acceleration
268 | acc_h1 = derivative(vel_h1, delta_t)
269 | acc_h2 = derivative(vel_h2, delta_t)
270 | acc_ver = derivative(vel_ver, delta_t)
271 |
272 | # Create station data structures
273 | samples = vel_h1.size
274 |
275 | # samples, dt, data, acceleration, velocity, displacement
276 | signal_h1 = TimeseriesComponent(samples, delta_t, orientation[0],
277 | acc_h1, vel_h1, dis_h1)
278 | signal_h2 = TimeseriesComponent(samples, delta_t, orientation[1],
279 | acc_h2, vel_h2, dis_h2)
280 | signal_ver = TimeseriesComponent(samples, delta_t, orientation[2],
281 | acc_ver, vel_ver, dis_ver)
282 |
283 | station = [signal_h1, signal_h2, signal_ver]
284 |
285 | # Rotate timeseries if needed
286 | if args.azimuth is not None:
287 | print("[INFO]: Rotating timeseries - %f degrees" % (args.azimuth))
288 | station = rotate_timeseries(station, args.azimuth)
289 |
290 | # Update orientation after rotation so headers reflect any changes
291 | args.orientation = "%s,%s,%s" % (str(station[0].orientation),
292 | str(station[1].orientation),
293 | str(station[2].orientation))
294 |
295 | # Pull data back
296 | acc_h1 = station[0].acc.tolist()
297 | vel_h1 = station[0].vel.tolist()
298 | dis_h1 = station[0].dis.tolist()
299 | acc_h2 = station[1].acc.tolist()
300 | vel_h2 = station[1].vel.tolist()
301 | dis_h2 = station[1].dis.tolist()
302 | acc_ver = station[2].acc.tolist()
303 | vel_ver = station[2].vel.tolist()
304 | dis_ver = station[2].dis.tolist()
305 |
306 | # Write header
307 | o_dis_file = open(output_file_dis, 'w')
308 | o_vel_file = open(output_file_vel, 'w')
309 | o_acc_file = open(output_file_acc, 'w')
310 | write_bbp_header(o_dis_file, "displacement",
311 | units[unit][0], args, header)
312 | write_bbp_header(o_vel_file, "velocity",
313 | units[unit][1], args, header)
314 | write_bbp_header(o_acc_file, "acceleration",
315 | units[unit][2], args, header)
316 |
317 | # Write files
318 | for (time, disp_h1, disp_h2, disp_ver,
319 | velo_h1, velo_h2, velo_ver,
320 | accel_h1, accel_h2, accel_ver) in zip(times, dis_h1, dis_h2, dis_ver,
321 | vel_h1, vel_h2, vel_ver,
322 | acc_h1, acc_h2, acc_ver):
323 | o_dis_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
324 | (time, disp_h1, disp_h2, disp_ver))
325 | o_vel_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
326 | (time, velo_h1, velo_h2, velo_ver))
327 | o_acc_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
328 | (time, accel_h1, accel_h2, accel_ver))
329 |
330 | # All done
331 | o_dis_file.close()
332 | o_vel_file.close()
333 | o_acc_file.close()
334 |
335 | # ============================ MAIN ==============================
336 | if __name__ == "__main__":
337 | edge2bbp_main()
338 | # end of main program
339 |
--------------------------------------------------------------------------------
/ts_process/her2bbp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Utility to convert Hercules .her time history files to BBP format
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import numpy as np
42 | from ts_library import TimeseriesComponent, rotate_timeseries
43 |
44 | def parse_her_header(filename):
45 | """
46 | This function parses the her file header
47 | to try to figure out what units to use
48 | """
49 | # Default unit is meters
50 | unit = "m"
51 |
52 | try:
53 | input_file = open(filename, 'r')
54 | for line in input_file:
55 | line = line.strip()
56 | if line.startswith("#"):
57 | # Header line, look into it
58 | pieces = line.split()
59 | if len(pieces) != 11:
60 | # Not the line we are looking for
61 | continue
62 | if pieces[2].find("(m)") > 0:
63 | # It's meters!
64 | unit = "m"
65 | break
66 | if pieces[2].find("(cm)") > 0:
67 | # It's cm!
68 | unit = "cm"
69 | break
70 | continue
71 | except IOError:
72 | print("[ERROR]: Unable to read file: %s" % (filename))
73 | sys.exit(1)
74 | input_file.close()
75 |
76 | # Return units
77 | return unit
78 |
79 | def write_bbp_header(out_fp, file_type, file_unit, args):
80 | """
81 | This function writes the bbp header
82 | """
83 | orientation = args.orientation.strip()
84 | orientations = orientation.split(",")
85 | orientations = [val.strip() for val in orientations]
86 |
87 | # Write header
88 | out_fp.write("# Station= %s\n" % (args.station_name))
89 | out_fp.write("# time= %s\n" % (args.time))
90 | out_fp.write("# lon= %s\n" % (args.longitude))
91 | out_fp.write("# lat= %s\n" % (args.latitude))
92 | out_fp.write("# units= %s\n" % (file_unit))
93 | out_fp.write("# padding= 0\n")
94 | out_fp.write("# orientation= %s\n" % (orientation))
95 | out_fp.write("#\n")
96 | out_fp.write("# Data fields are TAB-separated\n")
97 | out_fp.write("# Column 1: Time (s)\n")
98 | out_fp.write("# Column 2: H1 component ground "
99 | "%s (+ is %s)\n" % (file_type, orientations[0]))
100 | out_fp.write("# Column 3: H2 component ground "
101 | "%s (+ is %s)\n" % (file_type, orientations[1]))
102 | out_fp.write("# Column 4: V component ground "
103 | "%s (+ is %s)\n" % (file_type, orientations[2]))
104 | out_fp.write("#\n")
105 |
106 | def read_hercules(input_file):
107 | """
108 | Reads the input hercules file and returns the
109 | data along with parsed header lines
110 | """
111 | times = []
112 | acc_h1 = []
113 | vel_h1 = []
114 | dis_h1 = []
115 | acc_h2 = []
116 | vel_h2 = []
117 | dis_h2 = []
118 | acc_ver = []
119 | vel_ver = []
120 | dis_ver = []
121 | dis_header = []
122 | vel_header = []
123 | acc_header = []
124 |
125 | try:
126 | input_fp = open(input_file, 'r')
127 | for line in input_fp:
128 | line = line.strip()
129 | # Skip comments
130 | if line.startswith("#") or line.startswith("%"):
131 | pieces = line.split()[1:]
132 | # Write header
133 | if len(pieces) >= 10:
134 | dis_header.append("# her header: # %s %s %s %s\n" %
135 | (pieces[0], pieces[1], pieces[2], pieces[3]))
136 | vel_header.append("# her header: # %s %s %s %s\n" %
137 | (pieces[0], pieces[4], pieces[5], pieces[6]))
138 | acc_header.append("# her header: # %s %s %s %s\n" %
139 | (pieces[0], pieces[7], pieces[8], pieces[9]))
140 | else:
141 | dis_header.append("# her header: %s\n" % (line))
142 | continue
143 | pieces = line.split()
144 | pieces = [float(piece) for piece in pieces]
145 | # Write timeseries to files. Please not that Hercules files have
146 | # the vertical component positive pointing down so we have to flip it
147 | # here to match the BBP format in which vertical component points up
148 | times.append(pieces[0])
149 | dis_h1.append(pieces[1])
150 | dis_h2.append(pieces[2])
151 | dis_ver.append(-1 * pieces[3])
152 | vel_h1.append(pieces[4])
153 | vel_h2.append(pieces[5])
154 | vel_ver.append(-1 * pieces[6])
155 | acc_h1.append(pieces[7])
156 | acc_h2.append(pieces[8])
157 | acc_ver.append(-1 * pieces[9])
158 | except IOError as e:
159 | print(e)
160 | sys.exit(-1)
161 |
162 | # All done
163 | input_fp.close()
164 |
165 | # Convert to NumPy Arrays
166 | times = np.array(times)
167 | vel_h1 = np.array(vel_h1)
168 | vel_h2 = np.array(vel_h2)
169 | vel_ver = np.array(vel_ver)
170 | acc_h1 = np.array(acc_h1)
171 | acc_h2 = np.array(acc_h2)
172 | acc_ver = np.array(acc_ver)
173 | dis_h1 = np.array(dis_h1)
174 | dis_h2 = np.array(dis_h2)
175 | dis_ver = np.array(dis_ver)
176 |
177 | delta_t = times[1] - times[0]
178 |
179 | # Group headers
180 | headers = [dis_header, vel_header, acc_header]
181 |
182 | return (headers, delta_t, times,
183 | acc_h1, acc_h2, acc_ver,
184 | vel_h1, vel_h2, vel_ver,
185 | dis_h1, dis_h2, dis_ver)
186 |
187 | def her2bbp_main():
188 | """
189 | Main function for her to bbp converter
190 | """
191 | parser = argparse.ArgumentParser(description="Converts a Hercules .her"
192 | "file to BBP format, generating "
193 | "displacement, velocity and acceleration "
194 | "BBP files.")
195 | parser.add_argument("-s", "--station-name", dest="station_name",
196 | default="NoName",
197 | help="provides the name for this station")
198 | parser.add_argument("--lat", dest="latitude", type=float, default=0.0,
199 | help="provides the latitude for the station")
200 | parser.add_argument("--lon", dest="longitude", type=float, default=0.0,
201 | help="provides the longitude for the station")
202 | parser.add_argument("-t", "--time", default="00/00/00,0:0:0.0 UTC",
203 | help="provides timing information for this timeseries")
204 | parser.add_argument("-o", "--orientation", default="0,90,UP",
205 | dest="orientation",
206 | help="orientation, default: 0,90,UP")
207 | parser.add_argument("--azimuth", type=float, dest="azimuth",
208 | help="azimuth for rotation (degrees)")
209 | parser.add_argument("input_file", help="Hercules input timeseries")
210 | parser.add_argument("output_stem",
211 | help="output BBP filename stem without the "
212 | " .{dis,vel,acc}.bbp extensions")
213 | parser.add_argument("-d", dest="output_dir", default="",
214 | help="output directory for the BBP file")
215 | args = parser.parse_args()
216 |
217 | # Check orientation
218 | orientation = args.orientation.split(",")
219 | if len(orientation) != 3:
220 | print("[ERROR]: Need to specify orientation for all 3 components!")
221 | sys.exit(-1)
222 | orientation[0] = float(orientation[0])
223 | orientation[1] = float(orientation[1])
224 | orientation[2] = orientation[2].lower()
225 | if orientation[2] != "up" and orientation[2] != "down":
226 | print("[ERROR]: Vertical orientation must be up or down!")
227 | sys.exit(-1)
228 |
229 | input_file = args.input_file
230 | output_file_dis = "%s.dis.bbp" % (os.path.join(args.output_dir,
231 | args.output_stem))
232 | output_file_vel = "%s.vel.bbp" % (os.path.join(args.output_dir,
233 | args.output_stem))
234 | output_file_acc = "%s.acc.bbp" % (os.path.join(args.output_dir,
235 | args.output_stem))
236 |
237 | # Try to get the units used in the her file
238 | units = {"m": ["m", "m/s", "m/s^2"],
239 | "cm": ["cm", "cm/s", "cm/s^2"]}
240 | unit = parse_her_header(input_file)
241 |
242 | # Covert from her to BBP format
243 | print("[INFO]: Reading file %s ..." % (os.path.basename(input_file)))
244 |
245 | (headers, delta_t, times,
246 | acc_h1, acc_h2, acc_ver,
247 | vel_h1, vel_h2, vel_ver,
248 | dis_h1, dis_h2, dis_ver) = read_hercules(input_file)
249 |
250 | # Create station data structures
251 | samples = vel_h1.size
252 |
253 | # samples, dt, data, acceleration, velocity, displacement
254 | signal_h1 = TimeseriesComponent(samples, delta_t, orientation[0],
255 | acc_h1, vel_h1, dis_h1)
256 | signal_h2 = TimeseriesComponent(samples, delta_t, orientation[1],
257 | acc_h2, vel_h2, dis_h2)
258 | signal_ver = TimeseriesComponent(samples, delta_t, orientation[2],
259 | acc_ver, vel_ver, dis_ver)
260 | station = [signal_h1, signal_h2, signal_ver]
261 |
262 | # Rotate timeseries if needed
263 | if args.azimuth is not None:
264 | print("[INFO]: Rotating timeseries - %f degrees" % (args.azimuth))
265 | station = rotate_timeseries(station, args.azimuth)
266 |
267 | # Update orientation after rotation so headers reflect any changes
268 | args.orientation = "%s,%s,%s" % (str(station[0].orientation),
269 | str(station[1].orientation),
270 | str(station[2].orientation))
271 |
272 | # Pull data back
273 | acc_h1 = station[0].acc.tolist()
274 | vel_h1 = station[0].vel.tolist()
275 | dis_h1 = station[0].dis.tolist()
276 | acc_h2 = station[1].acc.tolist()
277 | vel_h2 = station[1].vel.tolist()
278 | dis_h2 = station[1].dis.tolist()
279 | acc_ver = station[2].acc.tolist()
280 | vel_ver = station[2].vel.tolist()
281 | dis_ver = station[2].dis.tolist()
282 |
283 | o_dis_file = open(output_file_dis, 'w')
284 | o_vel_file = open(output_file_vel, 'w')
285 | o_acc_file = open(output_file_acc, 'w')
286 | write_bbp_header(o_dis_file, "displacement", units[unit][0], args)
287 | write_bbp_header(o_vel_file, "velocity", units[unit][1], args)
288 | write_bbp_header(o_acc_file, "acceleration", units[unit][2], args)
289 |
290 | # Write headers from original Hercules file
291 | dis_header = headers[0]
292 | vel_header = headers[1]
293 | acc_header = headers[2]
294 |
295 | for line in dis_header:
296 | o_dis_file.write(line)
297 | for line in vel_header:
298 | o_vel_file.write(line)
299 | for line in acc_header:
300 | o_acc_file.write(line)
301 |
302 | # Write files
303 | for (time, disp_h1, disp_h2, disp_ver,
304 | velo_h1, velo_h2, velo_ver,
305 | accel_h1, accel_h2, accel_ver) in zip(times, dis_h1, dis_h2, dis_ver,
306 | vel_h1, vel_h2, vel_ver,
307 | acc_h1, acc_h2, acc_ver):
308 | o_dis_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
309 | (time, disp_h1, disp_h2, disp_ver))
310 | o_vel_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
311 | (time, velo_h1, velo_h2, velo_ver))
312 | o_acc_file.write("%1.9E %1.9E %1.9E %1.9E\n" %
313 | (time, accel_h1, accel_h2, accel_ver))
314 |
315 | # All done
316 | o_dis_file.close()
317 | o_vel_file.close()
318 | o_acc_file.close()
319 |
320 | # ============================ MAIN ==============================
321 | if __name__ == "__main__":
322 | her2bbp_main()
323 | # end of main program
324 |
--------------------------------------------------------------------------------
/ts_process/compare_timeseries.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | The program is to plot a simple comparison among timeseries files
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import matplotlib as mpl
42 | if mpl.get_backend() != 'agg':
43 | mpl.use('Agg') # Disables use of Tk/X11
44 | from file_utilities import read_file
45 | from ts_library import calculate_distance, filter_timeseries
46 | from file_utilities import write_bbp
47 | from ts_plot_library import comparison_plot
48 |
49 | def parse_arguments():
50 | """
51 | This function takes care of parsing the command-line arguments and
52 | asking the user for any missing parameters that we need
53 | """
54 | parser = argparse.ArgumentParser(description="Creates comparison plots of "
55 | " a number of timeseries files.")
56 | parser.add_argument("-o", "--output", dest="outfile", required=True,
57 | help="output png file")
58 | parser.add_argument("--outdir", dest="outdir", required=False,
59 | help="output directory for processed timeseries")
60 | parser.add_argument("--prefix", dest="prefix", required=False,
61 | help="prefix for processed timeseries output files")
62 | parser.add_argument("--epicenter-lat", dest="epicenter_lat", type=float,
63 | help="earthquake epicenter latitude")
64 | parser.add_argument("--epicenter-lon", dest="epicenter_lon", type=float,
65 | help="earthquake epicenter longitude")
66 | parser.add_argument("--st-lat", "--station-latitude", dest="st_lat",
67 | type=float, help="station latitude")
68 | parser.add_argument("--st-lon", "--station-longitude", dest="st_lon",
69 | type=float, help="station longitude")
70 | parser.add_argument("-s", "--station-name", "--station", dest="station",
71 | help="station name")
72 | parser.add_argument("--station-list", dest="station_list",
73 | help="station list with latitude and longitude")
74 | parser.add_argument("--xmin", dest="xmin", type=float,
75 | help="xmin for plotting timeseries")
76 | parser.add_argument("--xmax", dest="xmax", type=float,
77 | help="xmax for plotting timeseries")
78 | parser.add_argument("--xfmin", dest="xfmin", type=float,
79 | help="F-min for plotting FAS")
80 | parser.add_argument("--xfmax", dest="xfmax", type=float,
81 | help="F-max for plotting FAS")
82 | parser.add_argument("--tmin", dest="tmin", type=float,
83 | help="T-min for plotting response spectra")
84 | parser.add_argument("--tmax", dest="tmax", type=float,
85 | help="T-max for plotting response spectra")
86 | parser.add_argument("--lowf", dest="lowf", type=float,
87 | help="lowest frequency for filtering")
88 | parser.add_argument("--highf", dest="highf", type=float,
89 | help="highest frequency for filtering")
90 | parser.add_argument("-c", "--cut", dest="cut",
91 | default=False, action='store_true',
92 | help="Cut seismogram for plotting")
93 | parser.add_argument("--acc", dest="acc_plots",
94 | default=False, action='store_true',
95 | help="Generate acceleration plots instead of velocity")
96 | parser.add_argument("--obs", dest="obs_file",
97 | help="input file containing recorded data")
98 | parser.add_argument('input_files', nargs='*')
99 | args = parser.parse_args()
100 |
101 | if args.st_lat is not None and args.st_lon is not None:
102 | args.st_loc = [args.st_lat, args.st_lon]
103 | else:
104 | args.st_loc = None
105 | if args.epicenter_lat is not None and args.epicenter_lon is not None:
106 | args.epicenter = [args.epicenter_lat, args.epicenter_lon]
107 | else:
108 | args.epicenter = None
109 | if args.xmin is None:
110 | args.xmin = 0.0
111 | if args.xmax is None:
112 | args.xmax = 30.0
113 | if args.xfmin is None:
114 | args.xfmin = 0.1
115 | if args.xfmax is None:
116 | args.xfmax = 5.0
117 | if args.tmin is None:
118 | args.tmin = 0.1
119 | if args.tmax is None:
120 | args.tmax = 10
121 | if args.xmin >= args.xmax and args.xmax != 0:
122 | print("[ERROR]: xmin must be smaller than xmax!")
123 | sys.exit(-1)
124 | if args.xfmin >= args.xfmax:
125 | print("[ERROR]: xfmin must be smaller than xfmax!")
126 | sys.exit(-1)
127 | if args.tmin >= args.tmax:
128 | print("[ERROR]: tmin must be smaller than tmax!")
129 | sys.exit(-1)
130 | if args.lowf is not None and args.highf is not None:
131 | if args.lowf >= args.highf:
132 | print("[ERROR]: low-f: %f - high-f: %f" % (args.lowf, args.highf))
133 | print("[ERROR]: low-f must be smaller than high-f!")
134 | sys.exit(-1)
135 | if args.prefix is not None and args.outdir is not None:
136 | args.save_timeseries = True
137 | else:
138 | args.save_timeseries = False
139 |
140 | return args
141 |
142 | def process_for_plotting(obs_filename, obs_station,
143 | filenames, stations, args):
144 | """
145 | Process stations before plotting as indicated by the user
146 |
147 | Basically applies a low/high/band pass butterworth filter
148 | to all timeseries. The obs_station, if present, is treated
149 | separately as it is already filtered and we want to avoid
150 | double filtering it.
151 | """
152 | # Filtering data
153 | lowf = args.lowf
154 | highf = args.highf
155 |
156 | if lowf is None and highf is None:
157 | # Only if needed!
158 | return obs_station, stations
159 |
160 | # Handle observation data first
161 | if obs_station is not None:
162 | params = {}
163 | if lowf is not None and highf is not None:
164 | btype = 'bandpass'
165 | params['hp'] = lowf
166 | params['lp'] = highf
167 | elif lowf is None and highf is not None:
168 | btype = 'lowpass'
169 | params['lp'] = highf
170 | lowf = 0.0
171 | else:
172 | btype = "highpass"
173 | params['hp'] = lowf
174 | highf = 0.0
175 | print("[PROCESSING]: Filter Obs: butter %s %1.1f %1.1f" % (btype,
176 | lowf, highf))
177 | for i in range(0, 3):
178 | obs_station[i] = filter_timeseries(obs_station[i],
179 | family='butter', btype=btype,
180 | fmin=lowf, fmax=highf, N=4)
181 | # Save timseries if needed
182 | if args.save_timeseries:
183 | # Write processed files
184 | obs_file_out = os.path.join(args.outdir,
185 | "%s%s" %
186 | (args.prefix,
187 | os.path.basename(obs_filename)))
188 | write_bbp(obs_filename, obs_file_out, obs_station, params)
189 |
190 | # Now filter simulated data
191 | lowf = args.lowf
192 | highf = args.highf
193 | params = {}
194 | if lowf is not None and highf is not None:
195 | btype = 'bandpass'
196 | params['hp'] = lowf
197 | params['lp'] = highf
198 | elif lowf is None and highf is not None:
199 | btype = "lowpass"
200 | params['lp'] = highf
201 | lowf = 0.0
202 | else:
203 | btype = "highpass"
204 | params['hp'] = lowf
205 | highf = 0.0
206 |
207 | print("[PROCESSING]: Filter: butter %s %1.1f %1.1f" % (btype,
208 | lowf, highf))
209 | for input_file, station in zip(filenames, stations):
210 | for i in range(0, 3):
211 | station[i] = filter_timeseries(station[i],
212 | family='butter', btype=btype,
213 | fmin=lowf, fmax=highf, N=4)
214 | # Save timseries if needed
215 | if args.save_timeseries:
216 | # Write processed files
217 | out_file = os.path.join(args.outdir,
218 | "%s%s" %
219 | (args.prefix,
220 | os.path.basename(input_file)))
221 | write_bbp(input_file, out_file, station, params)
222 |
223 | return obs_station, stations
224 |
225 | def compare_timeseries_main():
226 | """
227 | Main function for compare_timeseries
228 | """
229 | # Parse command-line options
230 | args = parse_arguments()
231 | # Copy inputs
232 | output_file = args.outfile
233 | obs_filename = args.obs_file
234 | filenames = args.input_files
235 |
236 | # Figure out filtering frequencies, if any
237 | if args.lowf is None and args.highf is None:
238 | freqs = "All"
239 | else:
240 | if args.lowf is None:
241 | freqs = "0.0-%1.1fHz" % (args.highf)
242 | elif args.highf is None:
243 | freqs = "%1.1fHz-" % (args.lowf)
244 | else:
245 | freqs = "%1.1f-%1.1fHz" % (args.lowf, args.highf)
246 |
247 | # Set plot title
248 | plot_title = None
249 | if args.station is not None:
250 | plot_title = "%s, Freq: %s" % (args.station, freqs)
251 |
252 | # Set title if station name provided and epicenter are provided
253 | if args.station is not None and args.epicenter is not None:
254 | # Calculate distance if locations are provided
255 | if args.st_loc is None and args.station_list is not None:
256 | # Find station coordinates from station list
257 | st_file = open(args.station_list, 'r')
258 | for line in st_file:
259 | line = line.strip()
260 | if not line:
261 | # skip blank lines
262 | continue
263 | if line.startswith("#") or line.startswith("%"):
264 | # Skip comments
265 | continue
266 | pieces = line.split()
267 | if len(pieces) < 3:
268 | # Skip line with insufficient tokens
269 | continue
270 | if pieces[2].lower() != args.station.lower():
271 | # Not a match
272 | continue
273 | # Match!
274 | args.st_loc = [float(pieces[1]), float(pieces[0])]
275 | break
276 | # All done processing station file
277 | st_file.close()
278 |
279 | if args.st_loc is not None:
280 | # Calculate distance here
281 | distance = calculate_distance(args.epicenter, args.st_loc)
282 | # Set plot title
283 | plot_title = "%s, Dist: ~%dkm, Freq: %s" % (args.station,
284 | distance, freqs)
285 |
286 | # Read observation data, if provided
287 | if obs_filename is not None:
288 | obs_station = read_file(obs_filename)
289 | obs_basename = os.path.basename(obs_filename)
290 | else:
291 | obs_station = None
292 | obs_basename = None
293 |
294 | # Read data
295 | stations = [read_file(filename) for filename in filenames]
296 | basenames = [os.path.basename(filename) for filename in filenames]
297 |
298 | # Perform any processing requested by the user
299 | obs_station, stations = process_for_plotting(obs_filename, obs_station,
300 | filenames, stations, args)
301 |
302 | # Combine observations and simulations in a single list
303 | if obs_station is not None:
304 | all_stations = [obs_station]
305 | all_stations.extend(stations)
306 | all_filenames = [obs_basename]
307 | all_filenames.extend(basenames)
308 | else:
309 | all_stations = stations
310 | all_filenames = basenames
311 |
312 | # Create plot
313 | comparison_plot(args, all_filenames, all_stations,
314 | output_file, plot_title=plot_title)
315 |
316 | # ============================ MAIN ==============================
317 | if __name__ == "__main__":
318 | compare_timeseries_main()
319 | # end of main program
320 |
--------------------------------------------------------------------------------
/ts_process/process_timeseries.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | The program is to read input seismograms; process their signals.
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 |
42 | from file_utilities import write_bbp, read_stamp, read_files
43 | from ts_library import process_station_dt, check_station_data, seism_cutting, seism_appendzeros
44 |
45 | def synchronize_all_stations(obs_data, stations, stamp,
46 | eqtimestamp, leading, sync_end_time):
47 | """
48 | synchronize the stating time and ending time of data arrays
49 | obs_data = recorded data (optional); stations = simulation signal(s)
50 | """
51 | # If we have a recorded data time stamp
52 | if stamp is not None and obs_data is not None:
53 | start = stamp[0]*3600 + stamp[1]*60 + stamp[2]
54 | eq_time = eqtimestamp[0]*3600 + eqtimestamp[1]*60 + eqtimestamp[2]
55 | sim_start = eq_time - leading
56 |
57 | for i in range(0, 3):
58 | # synchronize the start time
59 | if start < sim_start:
60 | # data time < sim time < earthquake time; cutting data array
61 | obs_data[i] = seism_cutting('front', (sim_start - start),
62 | 20, obs_data[i])
63 | elif start > eq_time:
64 | # sim time < earthquake time < data time; adding zeros in front
65 | obs_data[i] = seism_appendzeros('front', (start - eq_time),
66 | 20, obs_data[i])
67 | for station in stations:
68 | station[i] = seism_cutting('front', (eq_time - sim_start),
69 | 20, station[i])
70 | else:
71 | # sim time < data time < earthquake time; adding zeros
72 | obs_data[i] = seism_appendzeros('front', (start - sim_start),
73 | 20, obs_data[i])
74 |
75 | # synchronize the ending time
76 | if obs_data is not None:
77 | obs_dt = obs_data[0].dt
78 | obs_samples = obs_data[0].samples
79 | obs_time = obs_dt * obs_samples
80 | else:
81 | obs_time = None
82 |
83 | # Find target timeseries duration
84 | target_time = None
85 | if obs_time is not None:
86 | target_time = obs_time
87 | for station in stations:
88 | station_dt = station[0].dt
89 | station_samples = station[0].samples
90 | station_time = station_dt * station_samples
91 | if target_time is None:
92 | target_time = station_time
93 | continue
94 | target_time = min(target_time, station_time)
95 |
96 | # Work on obs_data
97 | if obs_data is not None and sync_end_time:
98 | for i in range(0, 3):
99 | if obs_time > target_time:
100 | obs_data[i] = seism_cutting('end', (obs_time - target_time),
101 | 20, obs_data[i])
102 | obs_samples = obs_data[0].samples
103 | obs_time = obs_dt * obs_samples
104 |
105 | # Work on simulated data
106 | if sync_end_time:
107 | for station in stations:
108 | for i in range(0, 3):
109 | sim_dt = station[i].dt
110 | sim_samples = station[i].samples
111 | sim_time = sim_dt * sim_samples
112 | if sim_time > target_time:
113 | station[i] = seism_cutting('end', (sim_time - target_time),
114 | 20, station[i])
115 |
116 | # scale the data if they have one sample in difference after synchronizing
117 | total_samples = None
118 | if obs_data is not None:
119 | total_samples = obs_samples
120 | for station in stations:
121 | sim_samples = station[0].samples
122 | if total_samples is None:
123 | total_samples = sim_samples
124 | continue
125 | total_samples = max(sim_samples, total_samples)
126 |
127 | # For obs_data
128 | if obs_data is not None:
129 | for i in range(0, 3):
130 | if obs_data[i].samples == total_samples - 1:
131 | obs_data[i] = seism_appendzeros('end', obs_data[i].dt,
132 | 20, obs_data[i])
133 | # For simulated data
134 | for station in stations:
135 | for i in range(0, 3):
136 | if station[i].samples == total_samples - 1:
137 | station[i] = seism_appendzeros('end', station[i].dt,
138 | 20, station[i])
139 |
140 | return obs_data, stations
141 | # end of synchronize_all_stations
142 |
143 | def process(obs_file, obs_data, input_files, stations, params):
144 | """
145 | This method processes the signals in each pair of stations.
146 | Processing consists on scaling, low-pass filtering, alignment
147 | and other things to make both signals compatible to apply GOF method.
148 | obs_data: recorded data
149 | stations: simulation
150 | """
151 | # Process signals to have the same dt
152 | if obs_data is not None:
153 | debug_plots_base = os.path.join(params['outdir'],
154 | os.path.basename(obs_file).split('.')[0])
155 | obs_data = process_station_dt(obs_data,
156 | params['targetdt'],
157 | params['lp'],
158 | taper=params['taper'],
159 | debug=params['debug'],
160 | debug_plots_base=debug_plots_base)
161 | new_stations = []
162 | for station, input_file in zip(stations, input_files):
163 | debug_plots_base = os.path.join(params['outdir'],
164 | os.path.basename(input_file).split('.')[0])
165 | new_station = process_station_dt(station,
166 | params['targetdt'],
167 | params['lp'],
168 | taper=params['taper'],
169 | debug=params['debug'],
170 | debug_plots_base=debug_plots_base)
171 | new_stations.append(new_station)
172 | stations = new_stations
173 |
174 | # Read obs_file timestamp if needed
175 | stamp = None
176 | if obs_data is not None:
177 | stamp = read_stamp(obs_file)
178 |
179 | # Synchronize starting and ending time of data arrays
180 | obs_data, stations = synchronize_all_stations(obs_data,
181 | stations,
182 | stamp,
183 | params['eq_time'],
184 | params['leading'],
185 | params['sync_end_time'])
186 |
187 | if params['sync_end_time']:
188 | # Check number of samples
189 | if obs_data is not None:
190 | num_samples = obs_data[0].samples
191 | else:
192 | num_samples = stations[0][0].samples
193 |
194 | for station in stations:
195 | if station[0].samples != num_samples:
196 | print("[ERROR]: two timseries do not have the same number"
197 | " of samples after processing.")
198 | sys.exit(-1)
199 |
200 | # Check the data
201 | if obs_data is not None:
202 | if not check_station_data(obs_data):
203 | print("[ERROR]: processed recorded data contains errors!")
204 | sys.exit(-1)
205 | for station in stations:
206 | if not check_station_data(station):
207 | print("[ERROR]: processed simulated data contains errors!")
208 | sys.exit(-1)
209 |
210 | # All done
211 | return obs_data, stations
212 | # end of process
213 |
214 | def parse_arguments():
215 | """
216 | This function takes care of parsing the command-line arguments and
217 | asking the user for any missing parameters that we need
218 | """
219 | parser = argparse.ArgumentParser(description="Processes a number of "
220 | "timeseries files and prepares them "
221 | "for plotting.")
222 | parser.add_argument("--obs", dest="obs_file",
223 | help="input file containing recorded data")
224 | parser.add_argument("--leading", type=float, dest="leading",
225 | help="leading time for the simulation (seconds)")
226 | parser.add_argument("--eq-time", dest="eq_time",
227 | help="earthquake start time (HH:MM:SS.CCC)")
228 | parser.add_argument("--dt", type=float, dest="targetdt",
229 | help="target dt for all processed signals")
230 | parser.add_argument("--lp-freq", type=float, dest="lp",
231 | help="frequency for low-pass filter")
232 | parser.add_argument("--taper", type=int, dest="taper",
233 | help="taper window length, default is 8")
234 | parser.add_argument("--output-dir", dest="outdir",
235 | help="output directory for the outputs")
236 | parser.add_argument("--debug", dest="debug", action="store_true",
237 | help="produces debug plots and outputs steps in detail")
238 | parser.add_argument("--disable-sync-end-time", dest="disable_sync_end_time",
239 | default=False, action='store_true',
240 | help="Disables automatic timeseries end time sync")
241 | parser.add_argument('input_files', nargs='*')
242 | args = parser.parse_args()
243 |
244 | # Input files
245 | files = args.input_files
246 | obs_file = args.obs_file
247 |
248 | if files is None:
249 | files = []
250 |
251 | # Check for missing input parameters
252 | params = {}
253 |
254 | if args.outdir is None:
255 | print("[ERROR]: Please provide output directory!")
256 | sys.exit(-1)
257 | else:
258 | params['outdir'] = args.outdir
259 |
260 | # Check if we should cut seismograms to make them all equal
261 | if args.disable_sync_end_time:
262 | params['sync_end_time'] = False
263 | else:
264 | params['sync_end_time'] = True
265 |
266 | # Check for user-provided taper window length
267 | if args.taper is None:
268 | params['taper'] = 8
269 | else:
270 | params['taper'] = args.taper
271 |
272 | # None means no low-pass filtering after adjusting dt
273 | params['lp'] = args.lp
274 |
275 | if args.targetdt is None:
276 | print("[ERROR]: Please provide a target DT to be used in all signals!")
277 | sys.exit(-1)
278 | else:
279 | params['targetdt'] = args.targetdt
280 |
281 | if args.eq_time is None:
282 | print("[ERROR]: Please provide earthquake time!")
283 | sys.exit(-1)
284 | else:
285 | tokens = args.eq_time.split(':')
286 | if len(tokens) < 3:
287 | print("[ERROR]: Invalid time format!")
288 | sys.exit(-1)
289 | try:
290 | params['eq_time'] = [float(token) for token in tokens]
291 | except ValueError:
292 | print("[ERROR]: Invalid time format!")
293 | sys.exit(-1)
294 |
295 | if args.leading is None:
296 | print("[ERROR]: Please enter the simulation leading time!")
297 | sys.exit(-1)
298 | else:
299 | params['leading'] = args.leading
300 |
301 | params['debug'] = args.debug is not None
302 |
303 | return obs_file, files, params
304 |
305 | def process_main():
306 | """
307 | Main function for processing seismograms
308 | """
309 | # First let's get all aruments that we need
310 | obs_file, input_files, params = parse_arguments()
311 |
312 | # Read input files
313 | obs_data, stations = read_files(obs_file, input_files)
314 |
315 | # Process signals
316 | obs_data, stations = process(obs_file, obs_data,
317 | input_files, stations,
318 | params)
319 |
320 | # Write processed files
321 | if obs_data is not None:
322 | obs_file_out = os.path.join(params['outdir'],
323 | "p-%s" % os.path.basename(obs_file))
324 | write_bbp(obs_file, obs_file_out, obs_data, params)
325 |
326 | for input_file, station in zip(input_files, stations):
327 | out_file = os.path.join(params['outdir'],
328 | "p-%s" % os.path.basename(input_file))
329 | write_bbp(input_file, out_file, station, params)
330 | # end of process_main
331 |
332 | # ============================ MAIN ==============================
333 | if __name__ == "__main__":
334 | process_main()
335 | # end of main program
336 |
--------------------------------------------------------------------------------
/ts_process/ts_plot_library.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Library of functions to plot timeseries
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import numpy as np
41 | import matplotlib.pyplot as plt
42 | from ts_library import get_points, FAS, calculate_rd50
43 |
44 | def plot_overlay_timeseries(args, filenames, stations,
45 | output_file, plot_title=None):
46 | """
47 | Plotting a comparison of multiple timeseries, supports
48 | a maximum of 12 timeseries
49 |
50 | Inputs:
51 | args.xmin - min x for timeseries plot (s)
52 | args.xmax - max x for timeseries plot (s)
53 | filenames - array of filenames to use for the legend
54 | (in same order as data in stations array)
55 | stations - array of stations with data to plot
56 | output_file - filename to use for the output plot
57 | plot_title - title of the plot, default no title
58 | Outputs:
59 | Plot generated as output_file
60 | """
61 | all_styles = ['k', 'r', 'b', 'm', 'g', 'c', 'y', 'brown',
62 | 'gold', 'blueviolet', 'grey', 'pink']
63 |
64 | # Check number of input timeseries
65 | if len(stations) > len(all_styles):
66 | print("[ERROR]: Too many timeseries to plot!")
67 | sys.exit(-1)
68 |
69 | delta_ts = [station[0].dt for station in stations]
70 | xtmin = args.xmin
71 | xtmax = args.xmax
72 | min_is = [int(xtmin/delta_t) for delta_t in delta_ts]
73 | max_is = [int(xtmax/delta_t) for delta_t in delta_ts]
74 |
75 | # Create plot
76 | f, axarr = plt.subplots(nrows=3, ncols=3, figsize=(14, 9))
77 |
78 | # For each component: N/S, E/W, U/D
79 | for i in range(0, 3):
80 |
81 | signals = [station[i] for station in stations]
82 | samples = [signal.samples for signal in signals]
83 | displs = [signal.dis for signal in signals]
84 | vels = [signal.vel for signal in signals]
85 | accs = [signal.acc for signal in signals]
86 |
87 | # Get orientation
88 | orientation = signals[0].orientation
89 | if type(orientation) is not str:
90 | orientation = str(int(orientation))
91 |
92 | # Set up titles
93 | title_acc = "Acceleration : %s" % (orientation)
94 | title_vel = "Velocity : %s" % (orientation)
95 | title_dis = "Displacement : %s" % (orientation)
96 |
97 | # cutting signal by bounds
98 | c_displs = [dis[min_i:max_i] for dis, min_i, max_i in zip(displs,
99 | min_is,
100 | max_is)]
101 | c_vels = [vel[min_i:max_i] for vel, min_i, max_i in zip(vels,
102 | min_is,
103 | max_is)]
104 | c_accs = [acc[min_i:max_i] for acc, min_i, max_i in zip(accs,
105 | min_is,
106 | max_is)]
107 | times = [np.arange(xtmin,
108 | min(xtmax, (delta_t * sample)),
109 | delta_t) for delta_t, sample in zip(delta_ts,
110 | samples)]
111 |
112 | axarr[i][0] = plt.subplot2grid((3, 3), (i, 0))
113 | axarr[i][0].set_title(title_dis)
114 | axarr[i][0].grid(True)
115 | styles = all_styles[0:len(times)]
116 | for timeseries, c_dis, style in zip(times, c_displs, styles):
117 | axarr[i][0].plot(timeseries, c_dis, style)
118 | plt.xlim(xtmin, xtmax)
119 |
120 | axarr[i][1] = plt.subplot2grid((3, 3), (i, 1))
121 | axarr[i][1].set_title(title_vel)
122 | axarr[i][1].grid(True)
123 | styles = all_styles[0:len(times)]
124 | for timeseries, c_vel, style in zip(times, c_vels, styles):
125 | axarr[i][1].plot(timeseries, c_vel, style)
126 | plt.xlim(xtmin, xtmax)
127 |
128 | axarr[i][2] = plt.subplot2grid((3, 3), (i, 2))
129 | axarr[i][2].set_title(title_acc)
130 | axarr[i][2].grid(True)
131 | styles = all_styles[0:len(times)]
132 | for timeseries, c_acc, style in zip(times, c_accs, styles):
133 | axarr[i][2].plot(timeseries, c_acc, style)
134 | # Add labels to first plot
135 | if i == 0:
136 | plt.legend(filenames, prop={'size':6})
137 | plt.xlim(xtmin, xtmax)
138 |
139 | # Make nice plots with tight_layout
140 | f.tight_layout()
141 |
142 | # Add overall title if provided
143 | if plot_title is not None:
144 | st = plt.suptitle(plot_title, fontsize=16)
145 | # shift subplots down:
146 | #st.set_y(0.95)
147 | f.subplots_adjust(top=0.92)
148 |
149 | # All done, save plot
150 | if output_file.lower().endswith(".png"):
151 | fmt = 'png'
152 | elif output_file.lower().endswith(".pdf"):
153 | fmt = 'pdf'
154 | else:
155 | print("[ERROR]: Unknown format!")
156 | sys.exit(-1)
157 |
158 | plt.savefig(output_file, format=fmt,
159 | transparent=False, dpi=300)
160 |
161 | def comparison_plot(args, filenames, stations,
162 | output_file, plot_title=None):
163 | """
164 | Plot velocity for data and FAS only acceleration for response,
165 | supports up to 12 timeseries
166 |
167 | Inputs:
168 | args.xmin - min x value for timeseries plot (s)
169 | args.xmax - max x value for timeseries plot (s)
170 | args.xfmin - min frequency for FAS plot (Hz)
171 | args.xfmax - max frequency for FAS plot (Hz)
172 | args.tmin - min period for response plot (s)
173 | args.tmax - max period for response plot (s)
174 | args.acc_plots - flag to create acceleration plots instead of velocity
175 | filenames - array of filenames to use for the legend
176 | (in same order as data in stations array)
177 | stations - array of stations with data to plot
178 | output_file - filename to use for the output plot
179 | plot_title - title of the plot, default no title
180 | Outputs:
181 | Plot generated as output_file
182 | """
183 | all_styles = ['k', 'r', 'b', 'm', 'g', 'c', 'y', 'brown',
184 | 'gold', 'blueviolet', 'grey', 'pink']
185 |
186 | # Check number of input timeseries
187 | if len(stations) > len(all_styles):
188 | print("[ERROR]: Too many timeseries to plot!")
189 | sys.exit(-1)
190 |
191 | delta_ts = [station[0].dt for station in stations]
192 | paddings = [station[0].padding for station in stations]
193 | num_points = [station[0].samples for station in stations]
194 | files_vel = [os.path.basename(filename) for filename in filenames]
195 | files_acc = [filename.replace(".vel.", ".acc.") for filename in files_vel]
196 |
197 | xtmins = [args.xmin for station in stations]
198 | if args.xmax == 0:
199 | xtmaxs = [delta_t * (n_points - 2 * padding - 1) for delta_t, n_points, padding in zip(delta_ts,
200 | num_points,
201 | paddings)]
202 | else:
203 | xtmaxs = [args.xmax for station in stations]
204 | xfmin = args.xfmin
205 | xfmax = args.xfmax
206 | tmin = args.tmin
207 | tmax = args.tmax
208 | acc_flag = args.acc_plots
209 |
210 | min_is = [int(xtmin/delta_t) + padding for xtmin, delta_t, padding in zip(xtmins, delta_ts, paddings)]
211 | max_is = [int(xtmax/delta_t) + padding for xtmax, delta_t, padding in zip(xtmaxs, delta_ts, paddings)]
212 |
213 | rd50s = [calculate_rd50(station, tmin, tmax) for station in stations]
214 |
215 | f, axarr = plt.subplots(nrows=3, ncols=3, figsize=(14, 9))
216 | for i in range(0, 3):
217 | signals = [station[i] for station in stations]
218 | samples = [signal.samples for signal in signals]
219 | vels = [signal.vel for signal in signals]
220 | accs = [signal.acc for signal in signals]
221 | psas = [psa[i+1] for psa in rd50s]
222 | periods = [psa[0] for psa in rd50s]
223 | # Get title
224 | if type(signals[0].orientation) is not str:
225 | suffix = "%s Deg." % (signals[0].orientation)
226 | else:
227 | suffix = "%s" % (signals[0].orientation)
228 | if acc_flag:
229 | title = "Acc. (cm/s/s), %s" % (suffix)
230 | else:
231 | title = "Vel. (cm/s), %s" % (suffix)
232 | if type(title) is not str:
233 | title = str(int(title))
234 |
235 | for sample, padding, max_i, delta_t in zip(samples, paddings,
236 | max_is, delta_ts):
237 | if sample - padding - 1 < max_i:
238 | print("sample=%f, padding=%f, max_i=%f" % (sample, padding, max_i))
239 | print("[ERROR]: t_max has to be under %f" %
240 | ((sample - (2 * padding) - 1) * delta_t))
241 | sys.exit(1)
242 |
243 | # cutting signal by bounds
244 | c_vels = [vel[min_i:max_i] for vel, min_i, max_i in zip(vels,
245 | min_is,
246 | max_is)]
247 | c_accs = [acc[min_i:max_i] for acc, min_i, max_i in zip(accs,
248 | min_is,
249 | max_is)]
250 | times = [np.arange(xtmin, xtmax, delta_t) for xtmin, xtmax, delta_t in zip(xtmins, xtmaxs, delta_ts)]
251 | points = get_points(samples)
252 |
253 | if acc_flag:
254 | freqs, fas_s = zip(*[FAS(acc,
255 | delta_t,
256 | points,
257 | xfmin,
258 | xfmax,
259 | 3) for acc, delta_t in zip(accs,
260 | delta_ts)])
261 | else:
262 | freqs, fas_s = zip(*[FAS(vel,
263 | delta_t,
264 | points,
265 | xfmin,
266 | xfmax,
267 | 3) for vel, delta_t in zip(vels,
268 | delta_ts)])
269 |
270 | axarr[i][0] = plt.subplot2grid((3, 4), (i, 0), colspan=2, rowspan=1)
271 | axarr[i][0].set_title(title)
272 | axarr[i][0].grid(True)
273 | styles = all_styles[0:len(times)]
274 | if acc_flag:
275 | for timeseries, c_acc, style in zip(times, c_accs, styles):
276 | axarr[i][0].plot(timeseries, c_acc, style, lw=0.5)
277 | else:
278 | for timeseries, c_vel, style in zip(times, c_vels, styles):
279 | axarr[i][0].plot(timeseries, c_vel, style, lw=0.5)
280 |
281 | if i == 0:
282 | if acc_flag:
283 | plt.legend(files_acc, prop={'size':8})
284 | else:
285 | plt.legend(files_vel, prop={'size':8})
286 | plt.xlim(min(xtmins), max(xtmaxs))
287 |
288 | if i == 2:
289 | axarr[i][0].set_xlabel("Time (s)")
290 |
291 | axarr[i][1] = plt.subplot2grid((3, 4), (i, 2), rowspan=1, colspan=1)
292 | if acc_flag:
293 | axarr[i][1].set_title('Acc. FAS (cm/s), %s' % (suffix))
294 | else:
295 | axarr[i][1].set_title('Vel. FAS (cm), %s' % (suffix))
296 | axarr[i][1].grid(True, which='both')
297 | axarr[i][1].set_xscale('log')
298 | axarr[i][1].set_yscale('log')
299 | for freq, fas, style in zip(freqs, fas_s, styles):
300 | axarr[i][1].plot(freq, fas, style, lw=0.5)
301 |
302 | tmp_xfmin = xfmin
303 | if tmp_xfmin < 0.005:
304 | tmp_xfmin = 0.01
305 | plt.xlim(tmp_xfmin, xfmax)
306 |
307 | if i == 2:
308 | axarr[i][1].set_xlabel("Freq (Hz)")
309 |
310 | axarr[i][2] = plt.subplot2grid((3, 4), (i, 3), rowspan=1, colspan=1)
311 | axarr[i][2].set_title("PSA (g), %s" % (suffix))
312 | axarr[i][2].set_xscale('log')
313 | axarr[i][2].grid(True)
314 | for psa, period, style in zip(psas, periods, styles):
315 | axarr[i][2].plot(period, psa, style, lw=0.5)
316 |
317 | plt.xlim(tmin, tmax)
318 |
319 | if i == 2:
320 | axarr[i][2].set_xlabel("Period (s)")
321 |
322 | # Make nice plots with tight_layout
323 | f.tight_layout()
324 |
325 | # Add overall title if provided
326 | if plot_title is not None:
327 | st = plt.suptitle(plot_title, fontsize=16)
328 | # shift subplots down:
329 | #st.set_y(0.95)
330 | f.subplots_adjust(top=0.92)
331 |
332 | # All done, save plot
333 | if output_file.lower().endswith(".png"):
334 | fmt = 'png'
335 | elif output_file.lower().endswith(".pdf"):
336 | fmt = 'pdf'
337 | else:
338 | print("[ERROR]: Unknown format!")
339 | sys.exit(-1)
340 |
341 | plt.savefig(output_file, format=fmt,
342 | transparent=False, dpi=300)
343 |
--------------------------------------------------------------------------------
/ts_process/smc2bbp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | Utility to convert SMC observation files to BBP format
34 | """
35 | from __future__ import division, print_function
36 |
37 | # Import Python modules
38 | import os
39 | import sys
40 | import argparse
41 | import numpy as np
42 |
43 | # Import seismtools needed classes
44 | from ts_library import TimeseriesComponent, baseline_function, \
45 | rotate_timeseries, check_station_data, integrate, G2CMSS
46 |
47 | def parse_arguments():
48 | """
49 | This function takes care of parsing the command-line arguments and
50 | asking the user for any missing parameters that we need
51 | """
52 | parser = argparse.ArgumentParser(description="Converts V1/V2 "
53 | " observation files to BBP format.")
54 | parser.add_argument("-o", "--output", dest="outdir", required=True,
55 | help="output directory name")
56 | parser.add_argument("-i", "--input", dest="infile",
57 | help="input file (overrides --dir below)")
58 | parser.add_argument("-d", "--dir", dest="indir",
59 | help="input directory")
60 | args = parser.parse_args()
61 |
62 | if args.infile is None and args.indir is None:
63 | print("ERROR: Please specify either an input file or directory!")
64 | sys.exit(-1)
65 |
66 | if args.infile is not None:
67 | args.indir = None
68 |
69 | return args
70 |
71 | def read_data(signal):
72 | """
73 | The function is to convert signal data into an numpy array of float numbers
74 | """
75 | # avoid negative number being stuck
76 | signal = signal.replace('-', ' -')
77 | signal = signal.split()
78 |
79 | data = []
80 | for s in signal:
81 | data.append(float(s))
82 | data = np.array(data)
83 | return data
84 |
85 | def read_smc_v1(input_file):
86 | """
87 | Reads and processes a V1 file
88 | """
89 | record_list = []
90 |
91 | # Loads station into a string
92 | try:
93 | fp = open(input_file, 'r')
94 | except IOError as e:
95 | print("[ERROR]: opening input file %s" % (input_file))
96 | return False
97 |
98 | # Print status message
99 | print("[READING]: %s..." % (input_file))
100 |
101 | # Read data
102 | channels = fp.read()
103 | fp.close()
104 |
105 | # Splits the string by channels
106 | channels = channels.split('/&')
107 | del(channels[len(channels)-1])
108 |
109 | # Splits the channels
110 | for i in range(len(channels)):
111 | channels[i] = channels[i].split('\n')
112 |
113 | # Clean the first row in all but the first channel
114 | for i in range(1, len(channels)):
115 | del channels[i][0]
116 |
117 | for i in range(len(channels)):
118 | # Check this is the uncorrected acceleration data
119 | ctype = channels[i][0][0:24].lower()
120 | if ctype != "uncorrected accelerogram":
121 | print("[ERROR]: processing uncorrected accelerogram ONLY.")
122 | return False
123 | else:
124 | dtype = 'a'
125 |
126 | network = input_file.split('/')[-1].split('.')[0][0:2].upper()
127 | station_id = input_file.split('/')[-1].split('.')[0][2:].upper()
128 |
129 | # Get location's latitude and longitude
130 | tmp = channels[i][4].split()
131 | latitude = tmp[3][:-1]
132 | longitude = tmp[4]
133 |
134 | # Get station name
135 | station_name = channels[i][5][0:40].strip()
136 |
137 | # Get orientation, convert to int if it's digit
138 | tmp = channels[i][6].split()
139 | orientation = tmp[2]
140 | if orientation.isdigit():
141 | orientation = float(int(orientation))
142 | if orientation == 360:
143 | orientation = 0.0
144 | else:
145 | orientation = orientation.lower()
146 |
147 | # Get date and time; set to fixed format
148 | start_time = channels[i][3][37:80].split()
149 | date = start_time[2][:-1]
150 |
151 | tmp = channels[i][14].split()
152 | hour = tmp[0]
153 | minute = tmp[1]
154 | seconds = tmp[2]
155 | fraction = tmp[3]
156 | tzone = channels[i][3].split()[-2]
157 | time = "%s:%s:%s.%s %s" % (hour, minute, seconds, fraction, tzone)
158 |
159 | # Get number of samples and dt
160 | tmp = channels[i][27].split()
161 | samples = int(tmp[0])
162 | delta_t = 1.0 / int(tmp[4])
163 |
164 | # Get signals' data
165 | tmp = channels[i][28:]
166 | signal = str()
167 | for s in tmp:
168 | signal += s
169 | acc_data_g = read_data(signal)
170 | # Convert from g to cm/s/s
171 | acc_data = acc_data_g * G2CMSS
172 | # Now integrate to get velocity and displacement
173 | vel_data = integrate(acc_data, delta_t)
174 | dis_data = integrate(vel_data, delta_t)
175 |
176 | print("[PROCESSING]: Found component: %s" % (orientation))
177 | record_list.append(TimeseriesComponent(samples, delta_t, orientation,
178 | acc_data, vel_data, dis_data))
179 |
180 | station_metadata = {}
181 | station_metadata['network'] = network
182 | station_metadata['station_id'] = station_id
183 | station_metadata['type'] = "V1"
184 | station_metadata['date'] = date
185 | station_metadata['time'] = time
186 | station_metadata['longitude'] = longitude
187 | station_metadata['latitude'] = latitude
188 | station_metadata['high_pass'] = -1
189 | station_metadata['low_pass'] = -1
190 |
191 | return record_list, station_metadata
192 |
193 | def read_smc_v2(input_file):
194 | """
195 | Reads and processes a V2 file
196 | """
197 | record_list = []
198 |
199 | # Loads station into a string
200 | try:
201 | fp = open(input_file, 'r')
202 | except IOError as e:
203 | print("[ERROR]: opening input file %s" % (input_file))
204 | return False
205 |
206 | # Print status message
207 | print("[READING]: %s..." % (input_file))
208 |
209 | # Read data
210 | channels = fp.read()
211 | fp.close()
212 |
213 | # Splits the string by channels
214 | channels = channels.split('/&')
215 | del(channels[len(channels)-1])
216 |
217 | # Splits the channels
218 | for i in range(len(channels)):
219 | channels[i] = channels[i].split('\n')
220 |
221 | # Clean the first row in all but the first channel
222 | for i in range(1, len(channels)):
223 | del channels[i][0]
224 |
225 | for i in range(len(channels)):
226 | tmp = channels[i][0].split()
227 | # Check this is the corrected acceleration data
228 | ctype = (tmp[0] + " " + tmp[1]).lower()
229 | if ctype != "corrected accelerogram":
230 | print("[ERROR]: processing corrected accelerogram ONLY.")
231 | return False
232 |
233 | # Get network code and station id
234 | network = input_file.split('/')[-1].split('.')[0][0:2].upper()
235 | station_id = input_file.split('/')[-1].split('.')[0][2:].upper()
236 |
237 | # Get location's latitude and longitude
238 | tmp = channels[i][5].split()
239 | latitude = tmp[3][:-1]
240 | longitude = tmp[4]
241 |
242 | # Make sure we captured the right values
243 | if latitude[-1].upper() != "N" and latitude.upper() != "S":
244 | # Maybe it is an old file, let's try to get the values again...
245 | latitude = (float(tmp[3]) +
246 | (float(tmp[4]) / 60.0) +
247 | (float(tmp[5][:-2]) / 3600.0))
248 | latitude = "%s%s" % (str(latitude), tmp[5][-2])
249 | longitude = (float(tmp[6]) +
250 | (float(tmp[7]) / 60.0) +
251 | (float(tmp[8][:-1]) / 3600.0))
252 | longitude = "%s%s" % (str(longitude), tmp[8][-1])
253 |
254 | # Get orientation from integer header
255 | orientation = float(int(channels[i][26][50:55]))
256 | if orientation == 360:
257 | orientation = 0.0
258 | elif orientation == 500:
259 | orientation = "up"
260 | elif orientation == 600:
261 | orientation = "down"
262 |
263 | # Get filtering information
264 | tmp = channels[i][14].split()
265 | high_pass = float(tmp[8])
266 | low_pass = float(tmp[10])
267 |
268 | # Get station name
269 | station_name = channels[i][6][0:40].strip()
270 |
271 | # Get date and time; set to fixed format
272 | start_time = channels[i][4][37:80].split()
273 | try:
274 | date = start_time[2][:-1]
275 | tmp = start_time[3].split(':')
276 | hour = tmp[0]
277 | minute = tmp[1]
278 | seconds, fraction = tmp[2].split('.')
279 | # Works for both newer and older V2 files
280 | tzone = channels[i][4].split()[5]
281 | except IndexError:
282 | date = '00/00/00'
283 | hour = '00'
284 | minute = '00'
285 | seconds = '00'
286 | fraction = '0'
287 | tzone = '---'
288 |
289 | # Put it all together
290 | time = "%s:%s:%s.%s %s" % (hour, minute, seconds, fraction, tzone)
291 |
292 | # Get number of samples and dt
293 | tmp = channels[i][45].split()
294 | samples = int(tmp[0])
295 | delta_t = float(tmp[8])
296 |
297 | # Get signals' data
298 | tmp = channels[i][45:]
299 | a_signal = str()
300 | v_signal = str()
301 | d_signal = str()
302 |
303 | for s in tmp:
304 | # Detecting separate line and get data type
305 | if "points" in s.lower():
306 | line = s.split()
307 | if line[3].lower() == "accel" or line[3].lower() == "acc":
308 | dtype = 'a'
309 | elif line[3].lower() == "veloc" or line[3].lower() == "vel":
310 | dtype = 'v'
311 | elif line[3].lower() == "displ" or line[3].lower() == "dis":
312 | dtype = 'd'
313 | else:
314 | dtype = "unknown"
315 |
316 | # Processing data
317 | else:
318 | if dtype == 'a':
319 | a_signal += s
320 | elif dtype == 'v':
321 | v_signal += s
322 | elif dtype == 'd':
323 | d_signal += s
324 |
325 | acc_data = read_data(a_signal)
326 | vel_data = read_data(v_signal)
327 | dis_data = read_data(d_signal)
328 |
329 | print("[PROCESSING]: Found component: %s" % (orientation))
330 | record_list.append(TimeseriesComponent(samples, delta_t, orientation,
331 | acc_data, vel_data, dis_data))
332 |
333 | station_metadata = {}
334 | station_metadata['network'] = network
335 | station_metadata['station_id'] = station_id
336 | station_metadata['type'] = "V2"
337 | station_metadata['date'] = date
338 | station_metadata['time'] = time
339 | station_metadata['longitude'] = longitude
340 | station_metadata['latitude'] = latitude
341 | station_metadata['high_pass'] = high_pass
342 | station_metadata['low_pass'] = low_pass
343 |
344 | return record_list, station_metadata
345 |
346 | def process_observation_data(station):
347 | """
348 | This function processes the observation data
349 | using baseline correction and rotation (if needed)
350 | """
351 | # Validate inputs
352 | if len(station) != 3:
353 | print("[ERROR]: Expecting 3 components!")
354 | return False
355 |
356 | # Reorder components if needed so that vertical is always the last one
357 | if isinstance(station[0].orientation, str):
358 | tmp = station[0]
359 | station[0] = station[2]
360 | station[2] = tmp
361 | elif isinstance(station[1].orientation, str):
362 | tmp = station[1]
363 | station[1] = station[2]
364 | station[2] = tmp
365 |
366 | # First we apply the baseline correction, use 5th order polynomial
367 | order = 5
368 | # Inputs are in cm/sec2, so no scaling
369 | gscale = 1.0
370 |
371 | # Apply baseline correction to all components
372 | for component in station:
373 | _, new_acc, new_vel, new_dis = baseline_function(component.acc,
374 | component.dt,
375 | gscale, order)
376 | component.acc = new_acc
377 | component.vel = new_vel
378 | component.dis = new_dis
379 |
380 | # Now rotate if needed, so that components are 0 and 90 degrees
381 | # Always pick the smaller angle for rotation
382 | rotation_angle = min(station[0].orientation,
383 | station[1].orientation)
384 | return rotate_timeseries(station, rotation_angle)
385 |
386 | def write_bbp(station, station_metadata, destination):
387 | """
388 | This function generates .bbp files for
389 | each of velocity/acceleration/displacement
390 | """
391 | filename_base = ("%s_%s.%s" %
392 | (station_metadata['network'],
393 | station_metadata['station_id'],
394 | station_metadata['type']))
395 |
396 | # round data to 7 decimals in order to print properly
397 | for component in station:
398 | if component.orientation in [0, 360, 180, -180]:
399 | dis_ns = component.dis.tolist()
400 | vel_ns = component.vel.tolist()
401 | acc_ns = component.acc.tolist()
402 | elif component.orientation in [90, -270, -90, 270]:
403 | dis_ew = component.dis.tolist()
404 | vel_ew = component.vel.tolist()
405 | acc_ew = component.acc.tolist()
406 | elif (component.orientation.upper() == "UP" or
407 | component.orientation.upper() == "DOWN"):
408 | dis_up = component.dis.tolist()
409 | vel_up = component.vel.tolist()
410 | acc_up = component.acc.tolist()
411 | else:
412 | pass
413 |
414 | # Prepare to output
415 | out_data = [['dis', dis_ns, dis_ew, dis_up, 'displacement', 'cm'],
416 | ['vel', vel_ns, vel_ew, vel_up, 'velocity', 'cm/s'],
417 | ['acc', acc_ns, acc_ew, acc_up, 'acceleration', 'cm/s^2']]
418 |
419 | for data in out_data:
420 | filename = "%s.%s.bbp" % (filename_base, data[0])
421 | try:
422 | out_fp = open(os.path.join(destination, filename), 'w')
423 | except IOError as e:
424 | print("[ERROR]: Writing BBP file: %s" % (filename))
425 | return False
426 |
427 | # Start with time = 0.0
428 | time = [0.000]
429 | samples = component.samples
430 | while samples > 1:
431 | time.append(time[len(time)-1] + component.dt)
432 | samples -= 1
433 |
434 | # Write header
435 | out_fp.write("# Station= %s_%s\n" %
436 | (station_metadata['network'],
437 | station_metadata['station_id']))
438 | out_fp.write("# time= %s,%s\n" %
439 | (station_metadata['date'],
440 | station_metadata['time']))
441 | out_fp.write("# lon= %s\n" %
442 | (station_metadata['longitude']))
443 | out_fp.write("# lat= %s\n" %
444 | (station_metadata['latitude']))
445 | out_fp.write("# hp= %s\n" %
446 | (station_metadata['high_pass']))
447 | out_fp.write("# lp= %s\n" %
448 | (station_metadata['low_pass']))
449 | out_fp.write("# units= %s\n" % (data[5]))
450 | # We haven't added any padding to the timeseries yet
451 | out_fp.write("# padding= 0\n")
452 | # Orientation is always 0,90,UP as we just rotated the timeseries
453 | out_fp.write("# orientation= 0,90,UP\n")
454 | out_fp.write("#\n")
455 | out_fp.write("# Data fields are TAB-separated\n")
456 | out_fp.write("# Column 1: Time (s)\n")
457 | out_fp.write("# Column 2: H1 component ground "
458 | "%s (+ is 000)\n" % (data[4]))
459 | out_fp.write("# Column 3: H2 component ground "
460 | "%s (+ is 090)\n" % (data[4]))
461 | out_fp.write("# Column 4: V component ground "
462 | "%s (+ is upward)\n" % (data[4]))
463 | out_fp.write("#\n")
464 |
465 | # Write timeseries
466 | for val_time, val_ns, val_ew, val_ud in zip(time, data[1],
467 | data[2], data[3]):
468 | out_fp.write("%5.7f %5.9e %5.9e %5.9e\n" %
469 | (val_time, val_ns, val_ew, val_ud))
470 |
471 | # All done, close file
472 | out_fp.close()
473 | print("[WRITING]: Wrote BBP file: %s" % (filename))
474 |
475 | def smc2bbp_process(input_file, output_dir):
476 | """
477 | Converts input_file to bbp format
478 | """
479 | if (input_file.upper().endswith(".RAW") or
480 | input_file.upper().endswith(".V1")):
481 | station, station_metadata = read_smc_v1(input_file)
482 | else:
483 | # Must be a ".V2" file!
484 | station, station_metadata = read_smc_v2(input_file)
485 |
486 | if station:
487 | station = process_observation_data(station)
488 | # Make sure output is valid
489 | if not station:
490 | print("[ERROR]: Processing input file: %s" % (input_file))
491 | return
492 | else:
493 | print("[ERROR]: Reading input file: %s" % (input_file))
494 | return
495 |
496 | # Write BBP file
497 | write_bbp(station, station_metadata, output_dir)
498 |
499 | def smc2bbp_main():
500 | """
501 | Main function for the smc2bbp conversion utility
502 | """
503 | args = parse_arguments()
504 |
505 | if args.infile is not None:
506 | # Only one file to process
507 | process_list = [args.infile]
508 | else:
509 | # Create list of files to process
510 | process_list = []
511 | for item in os.listdir(args.indir):
512 | if (item.upper().endswith(".V1") or
513 | item.upper().endswith(".RAW") or
514 | item.upper().endswith(".V2")):
515 | process_list.append(os.path.join(args.indir,
516 | item))
517 |
518 | # Now process the list of files
519 | for item in process_list:
520 | smc2bbp_process(item, args.outdir)
521 |
522 | # ============================ MAIN ==============================
523 | if __name__ == "__main__":
524 | smc2bbp_main()
525 | # end of main program
526 |
--------------------------------------------------------------------------------
/ts_process/file_utilities.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | BSD 3-Clause License
4 |
5 | Copyright (c) 2020, Southern California Earthquake Center
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without
9 | modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this
12 | list of conditions and the following disclaimer.
13 |
14 | * Redistributions in binary form must reproduce the above copyright notice,
15 | this list of conditions and the following disclaimer in the documentation
16 | and/or other materials provided with the distribution.
17 |
18 | * Neither the name of the copyright holder nor the names of its
19 | contributors may be used to endorse or promote products derived from
20 | this software without specific prior written permission.
21 |
22 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
23 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
24 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
25 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
26 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
27 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
28 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
30 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 |
33 | The program contains several input/output utility
34 | functions used by other modules.
35 | """
36 | from __future__ import division, print_function, absolute_import
37 |
38 | # Import Python modules
39 | import os
40 | import sys
41 | import numpy as np
42 |
43 | # Import seismtools needed classes
44 | from ts_library import TimeseriesComponent
45 |
46 | def reverse_up_down(station):
47 | """
48 | reverse up down component
49 | """
50 | # station has 3 components [ns, ew, ud]
51 | # only need to flip the 3rd one
52 | station[2].acc *= -1
53 | station[2].vel *= -1
54 | station[2].dis *= -1
55 |
56 | return station
57 | # end of reverse_up_down
58 |
59 | def scale_from_m_to_cm(station):
60 | # scales timeseries from meters to centimeters
61 | for i in range(0, len(station)):
62 | station[i].acc *= 100
63 | station[i].vel *= 100
64 | station[i].dis *= 100
65 |
66 | return station
67 | # end of scale_from_m_to_cm
68 |
69 | def get_dt(input_file):
70 | """
71 | Read timeseries file and return dt
72 | """
73 | val1 = None
74 | val2 = None
75 | file_dt = None
76 |
77 | # Figure out dt first, we need it later
78 | ifile = open(input_file)
79 | for line in ifile:
80 | # Skip comments
81 | if line.startswith("#") or line.startswith("%"):
82 | continue
83 | pieces = line.split()
84 | pieces = [float(piece) for piece in pieces]
85 | if val1 is None:
86 | val1 = pieces[0]
87 | continue
88 | if val2 is None:
89 | val2 = pieces[0]
90 | break
91 | ifile.close()
92 |
93 | # Quit if cannot figure out dt
94 | if val1 is None or val2 is None:
95 | print("[ERROR]: Cannot determine dt from file! Exiting...")
96 | sys.exit(1)
97 |
98 | # Return dt
99 | return val2 - val1
100 | # end get_dt
101 |
102 | def read_files(obs_file, input_files):
103 | """
104 | Reads all input files
105 | """
106 | # read obs data
107 | obs_data = None
108 | if obs_file is not None:
109 | obs_data = read_file(obs_file)
110 | # Make sure we got it
111 | if not obs_data:
112 | print("[ERROR]: Reading obs file: %s!" % (obs_file))
113 | sys.exit(-1)
114 | # Fix units if needed
115 | if obs_file.lower().endswith(".bbp"):
116 | units = read_unit_bbp(obs_file)
117 | # If in meters, scale to cm
118 | if units == "m":
119 | obs_data = scale_from_m_to_cm(obs_data)
120 | else:
121 | print("[ERROR]: Unknown file format: %s!" % (obs_file))
122 | sys.exit(-1)
123 |
124 | # reads signals
125 | stations = []
126 | for input_file in input_files:
127 | station = read_file(input_file)
128 | # Make sure we got it
129 | if not station:
130 | print("[ERROR]: Reading input file: %s!" % (input_file))
131 | sys.exit(-1)
132 | # Fix units if needed
133 | if input_file.lower().endswith(".bbp"):
134 | units = read_unit_bbp(input_file)
135 | # If in meters, scale to cm
136 | if units == "m":
137 | station = scale_from_m_to_cm(station)
138 | else:
139 | print("[ERROR]: Unknown file format: %s!" % (obs_file))
140 | sys.exit(-1)
141 |
142 | # Done with this station
143 | stations.append(station)
144 |
145 | # all done
146 | return obs_data, stations
147 |
148 | def read_filelist(filelist):
149 | """
150 | This function reads the filelist provided by the user
151 | """
152 | station_list = []
153 | coor_x = []
154 | coor_y = []
155 |
156 | try:
157 | input_file = open(filelist, 'r')
158 | except IOError:
159 | print("[ERROR]: error loading filelist.")
160 | sys.exit(-1)
161 |
162 | for line in input_file:
163 | if not '#' in line:
164 | line = line.split()
165 | # Get station name and make substitution
166 | station_name = line[0]
167 | station_name = station_name.replace(".", "_")
168 |
169 | if len(line) == 1:
170 | # not containing coordinates
171 | station_list.append(station_name)
172 | coor_x.append(0.0)
173 | coor_y.append(0.0)
174 | elif len(line) == 3:
175 | # containing coordinates
176 | station_list.append(station_name)
177 | try:
178 | coor_x.append(float(line[1]))
179 | coor_y.append(float(line[2]))
180 | except ValueError:
181 | coor_x.append(0.0)
182 | coor_y.append(0.0)
183 |
184 | # Close the input file
185 | input_file.close()
186 |
187 | return station_list, coor_x, coor_y
188 | # end of read_filelist
189 |
190 | # ================================ READING ================================
191 | def read_file(filename):
192 | """
193 | This function reads a timeseries file in bbp format
194 | """
195 | if filename.lower().endswith(".bbp"):
196 | # Filename in bbp format
197 | print("[READING]: %s" % (filename))
198 | return read_file_bbp(filename)
199 | # Unknown file format
200 | print("[ERROR]: Unknown file format: %s!" % (filename))
201 | sys.exit(-1)
202 | # end of read_file
203 |
204 | def read_file_bbp2(filename):
205 | """
206 | This function reads a bbp file and returns the timeseries in the
207 | format time, h1, h2, up tuple
208 | """
209 | time = []
210 | h1_comp = []
211 | h2_comp = []
212 | ud_comp = []
213 |
214 | try:
215 | input_file = open(filename, 'r')
216 | for line in input_file:
217 | line = line.strip()
218 | if line.startswith('#') or line.startswith('%'):
219 | # Skip comments
220 | continue
221 | # Trim in-line comments
222 | if line.find('#') > 0:
223 | line = line[:line.find('#')]
224 | if line.find('%') > 0:
225 | line = line[:line.find('%')]
226 | # Make them float
227 | pieces = line.split()
228 | pieces = [float(piece) for piece in pieces]
229 | time.append(pieces[0])
230 | h1_comp.append(pieces[1])
231 | h2_comp.append(pieces[2])
232 | ud_comp.append(pieces[3])
233 | except IOError:
234 | print("[ERROR]: error reading bbp file: %s" % (filename))
235 | sys.exit(1)
236 |
237 | # Convert to NumPy Arrays
238 | time = np.array(time)
239 | h1_comp = np.array(h1_comp)
240 | h2_comp = np.array(h2_comp)
241 | ud_comp = np.array(ud_comp)
242 |
243 | # All done!
244 | return time, h1_comp, h2_comp, ud_comp
245 | # end of read_file_bbp2
246 |
247 | def read_file_bbp(filename):
248 | """
249 | This function reads timeseries data from a set of BBP files
250 | """
251 | # Get filenames for displacement, velocity and acceleration bbp files
252 | work_dir = os.path.dirname(filename)
253 | base_file = os.path.basename(filename)
254 |
255 | base_tokens = base_file.split('.')[0:-2]
256 | if not base_tokens:
257 | print("[ERROR]: Invalid BBP filename: %s" % (filename))
258 | sys.exit(1)
259 | dis_tokens = list(base_tokens)
260 | vel_tokens = list(base_tokens)
261 | acc_tokens = list(base_tokens)
262 |
263 | dis_tokens.append('dis')
264 | vel_tokens.append('vel')
265 | acc_tokens.append('acc')
266 |
267 | dis_tokens.append('bbp')
268 | vel_tokens.append('bbp')
269 | acc_tokens.append('bbp')
270 |
271 | dis_file = os.path.join(work_dir, '.'.join(dis_tokens))
272 | vel_file = os.path.join(work_dir, '.'.join(vel_tokens))
273 | acc_file = os.path.join(work_dir, '.'.join(acc_tokens))
274 |
275 | # Read 3 bbp files
276 | [time, dis_h1, dis_h2, dis_ver] = read_file_bbp2(dis_file)
277 | [_, vel_h1, vel_h2, vel_ver] = read_file_bbp2(vel_file)
278 | [_, acc_h1, acc_h2, acc_ver] = read_file_bbp2(acc_file)
279 |
280 | # Read orientation from one of the files
281 | orientation = read_orientation_bbp(vel_file)
282 |
283 | # Read padding information from one of the files
284 | padding = read_padding_bbp(vel_file)
285 |
286 | samples = dis_h1.size
287 | delta_t = time[1]
288 |
289 | # samples, dt, data, acceleration, velocity, displacement
290 | signal_h1 = TimeseriesComponent(samples, delta_t, orientation[0],
291 | acc_h1, vel_h1, dis_h1, padding=padding)
292 | signal_h2 = TimeseriesComponent(samples, delta_t, orientation[1],
293 | acc_h2, vel_h2, dis_h2, padding=padding)
294 | signal_ver = TimeseriesComponent(samples, delta_t, orientation[2],
295 | acc_ver, vel_ver, dis_ver, padding=padding)
296 |
297 | station = [signal_h1, signal_h2, signal_ver]
298 | return station
299 | # end of read_file_bbp
300 |
301 | def read_file_her(filename):
302 | """
303 | The function is to read 10-column .her files.
304 | Return a list of psignals for each orientation.
305 | """
306 | time, dis_ns, dis_ew, dis_up = [np.array([], float) for _ in range(4)]
307 | vel_ns, vel_ew, vel_up = [np.array([], float) for _ in range(3)]
308 | acc_ns, acc_ew, acc_up = [np.array([], float) for _ in range(3)]
309 |
310 | try:
311 | (time, dis_ns, dis_ew, dis_up, vel_ns, vel_ew,
312 | vel_up, acc_ns, acc_ew, acc_up) = np.loadtxt(filename,
313 | comments='#',
314 | unpack=True)
315 | except IOError:
316 | print("[ERROR]: error loading her file.")
317 | return False
318 |
319 | samples = dis_ns.size
320 | delta_t = time[1]
321 |
322 | # samples, dt, orientation, acceleration, velocity, displacement
323 | # right now the values for orientation for the her file are hardcoded here
324 | signal_ns = TimeseriesComponent(samples, delta_t, 0.0,
325 | acc_ns, vel_ns, dis_ns)
326 | signal_ew = TimeseriesComponent(samples, delta_t, 90.0,
327 | acc_ew, vel_ew, dis_ew)
328 | signal_up = TimeseriesComponent(samples, delta_t, "UP",
329 | acc_up, vel_up, dis_up)
330 |
331 | station = [signal_ns, signal_ew, signal_up]
332 | return station
333 | # end of read_file_her
334 |
335 | def read_unit_bbp(filename):
336 | """
337 | Get the units from the file's header
338 | Returns either "m" or "cm"
339 | """
340 | units = None
341 |
342 | try:
343 | input_file = open(filename, 'r')
344 | for line in input_file:
345 | if line.find("units=") > 0:
346 | units = line.split()[2]
347 | break
348 | input_file.close()
349 | except IOError:
350 | print("[ERROR]: No such file.")
351 | sys.exit(-1)
352 |
353 | # Make sure we got something
354 | if units is None:
355 | print("[ERROR]: Cannot find units in bbp file!")
356 | sys.exit(-1)
357 |
358 | # Figure out if we have meters or centimeters
359 | if units == "cm" or units == "cm/s" or units == "cm/s^2":
360 | return "cm"
361 | elif units == "m" or units == "m/s" or units == "m/s^2":
362 | return "m"
363 |
364 | # Invalid units in this file
365 | print("[ERROR]: Cannot parse units in bbp file!")
366 | sys.exit(-1)
367 | # end of read_unit_bbp
368 |
369 | def read_padding_bbp(filename):
370 | """
371 | Get the padding information from a BBP file's header
372 | """
373 | padding = 0
374 |
375 | try:
376 | input_file = open(filename, 'r')
377 | for line in input_file:
378 | if line.find("padding=") > 0:
379 | line = line.strip()
380 | padding = line[(line.find("=") + 1):]
381 | padding = int(float(padding))
382 | break
383 | input_file.close()
384 | except IOError:
385 | print("[ERROR]: No such file.")
386 | sys.exit(-1)
387 |
388 | # All done!
389 | return padding
390 | # end of read_padding_bbp
391 |
392 | def read_orientation_bbp(filename):
393 | """
394 | Get the orientation from the file's header
395 | """
396 | orientation = None
397 |
398 | try:
399 | input_file = open(filename, 'r')
400 | for line in input_file:
401 | if line.find("orientation=") > 0:
402 | line = line.strip()
403 | orientation = line[(line.find("=") + 1):]
404 | orientation = orientation.strip().split(",")
405 | orientation = [val.strip() for val in orientation]
406 | orientation[0] = float(orientation[0])
407 | orientation[1] = float(orientation[1])
408 | orientation[2] = orientation[2].lower()
409 | if orientation[2] != "up" and orientation[2] != "down":
410 | print("[ERROR]: Vertical orientation must be up or down!")
411 | sys.exit(-1)
412 | break
413 | input_file.close()
414 | except IOError:
415 | print("[ERROR]: No such file.")
416 | sys.exit(-1)
417 |
418 | # Make sure we got something
419 | if orientation is None:
420 | print("[ERROR]: Cannot find orientation in bbp file: %s!" % (filename))
421 | sys.exit(-1)
422 |
423 | # All done!
424 | return orientation
425 | # end of read_orientation_bbp
426 |
427 | def read_stamp(filename):
428 | """
429 | Get the time stamp from file's header
430 | """
431 | if filename.endswith(".bbp"):
432 | # File in bbp format
433 | return read_stamp_bbp(filename)
434 | # Otherwise use hercules format
435 | return read_stamp_her(filename)
436 | # end of read_stamp
437 |
438 | def read_stamp_bbp(filename):
439 | """
440 | Get the time stamp from the bbp file's header
441 | """
442 | try:
443 | input_file = open(filename, 'r')
444 | for line in input_file:
445 | if line.find("time=") > 0:
446 | stamp = line.split()[2].split(',')[-1].split(':')
447 | break
448 | input_file.close()
449 | except IOError:
450 | print("[ERROR]: No such file.")
451 | return []
452 |
453 | # Converting time stamps to floats
454 | stamp = [float(i) for i in stamp]
455 | return stamp
456 | # end of read_stamp_bbp
457 |
458 | def read_stamp_her(filename):
459 | """
460 | Get the time stamp from the her file's header
461 | """
462 | try:
463 | with open(filename) as input_file:
464 | try:
465 | header = input_file.readline().split()
466 | stamp = header[4].split(',')[-1].split(':')
467 | input_file.close()
468 | except IndexError:
469 | print("[ERROR]: missing time stamp.")
470 | return []
471 | except IOError:
472 | print("[ERROR]: No such file.")
473 | return []
474 |
475 | # converting time stamps to floats
476 | for i in range(0, len(stamp)):
477 | stamp[i] = float(stamp[i])
478 | return stamp
479 | # end of read_stamp_her
480 |
481 | # ================================ WRITING ==================================
482 | def write_hercules(filename, station):
483 | # filename = 'processed-' + filename.split('/')[-1]
484 | try:
485 | out_f = open(filename, 'w')
486 | except IOError as e:
487 | print(e)
488 | dis_ns = station[0].dis.tolist()
489 | vel_ns = station[0].vel.tolist()
490 | acc_ns = station[0].acc.tolist()
491 | dis_ew = station[1].dis.tolist()
492 | vel_ew = station[1].vel.tolist()
493 | acc_ew = station[1].acc.tolist()
494 | dis_up = station[2].dis.tolist()
495 | vel_up = station[2].vel.tolist()
496 | acc_up = station[2].acc.tolist()
497 |
498 | # get a list of time incremented by dt
499 | time = [0.000]
500 | samples = station[0].samples
501 | dt = station[0].dt
502 | tmp = samples
503 |
504 | while tmp > 1:
505 | time.append(time[len(time)-1] + dt)
506 | tmp -= 1
507 |
508 | out_f.write('# missing header \n')
509 |
510 | descriptor = '{:>12}' + ' {:>12}'*9 + '\n'
511 | out_f.write(descriptor.format("# time",
512 | "dis_ns", "dis_ew", "dis_up",
513 | "vel_ns", "vel_ew", "vel_up",
514 | "acc_ns", "acc_ew", "acc_up")) # header
515 |
516 | descriptor = '{:>12.3f}' + ' {:>12.7f}'*9 + '\n'
517 | for c0, c1, c2, c3, c4, c5, c6, c7, c8, c9 in zip(time,
518 | dis_ns, dis_ew, dis_up,
519 | vel_ns, vel_ew, vel_up,
520 | acc_ns, acc_ew, acc_up):
521 | out_f.write(descriptor.format(c0, c1, c2, c3, c4, c5, c6, c7, c8, c9))
522 | out_f.close()
523 | # end of write_hercules
524 |
525 | def write_bbp(input_file, output_file, station, params={}):
526 | """
527 | This function generates processed .bbp files for
528 | each of velocity/acceleration/displacement
529 | and copies the header of the input bbp file
530 | """
531 | output_dir = os.path.dirname(output_file)
532 | output_basename = os.path.basename(output_file)
533 |
534 | # Prepare data for output
535 | acc_h1 = station[0].acc.tolist()
536 | vel_h1 = station[0].vel.tolist()
537 | dis_h1 = station[0].dis.tolist()
538 | acc_h2 = station[1].acc.tolist()
539 | vel_h2 = station[1].vel.tolist()
540 | dis_h2 = station[1].dis.tolist()
541 | acc_ver = station[2].acc.tolist()
542 | vel_ver = station[2].vel.tolist()
543 | dis_ver = station[2].dis.tolist()
544 |
545 | # Start with time = 0.0
546 | time = [0.000]
547 | samples = station[0].samples
548 | while samples > 1:
549 | time.append(time[len(time)-1] + station[0].dt)
550 | samples -= 1
551 |
552 | # Prepare to output
553 | out_data = [['dis', dis_h1, dis_h2, dis_ver, 'displacement', 'cm'],
554 | ['vel', vel_h1, vel_h2, vel_ver, 'velocity', 'cm/s'],
555 | ['acc', acc_h1, acc_h2, acc_ver, 'acceleration', 'cm/s^2']]
556 |
557 | for data in out_data:
558 | if not output_basename.endswith('.bbp'):
559 | # Remove extension
560 | bbp_output_basename = os.path.splitext(output_basename)[0]
561 | bbp_output_filename = os.path.join(output_dir,
562 | "%s.%s.bbp" %
563 | (bbp_output_basename,
564 | data[0]))
565 | output_header = ["# Station= NoName",
566 | "# time= 00/00/00,00:00:00.00 UTC",
567 | "# lon= 0.00",
568 | "# lat= 0.00",
569 | "# units= %s" % (data[5]),
570 | "# padding= %d" % (station[0].padding),
571 | "# orientation= %s" % (",".join([str(int(station[0].orientation)),
572 | str(int(station[1].orientation)),
573 | station[2].orientation])),
574 | "#",
575 | "# Data fields are TAB-separated",
576 | "# Column 1: Time (s)",
577 | "# Column 2: H1 component ground "
578 | "%s (+ is %s)" % (data[4],
579 | str(int(station[0].orientation))),
580 | "# Column 3: H2 component ground "
581 | "%s (+ is %s)" % (data[4],
582 | str(int(station[1].orientation))),
583 | "# Column 4: V component ground "
584 | "%s (+ is %s)" % (data[4], station[2].orientation),
585 | "#"]
586 | else:
587 | # Read header of input file
588 | input_dirname = os.path.dirname(input_file)
589 | input_basename = os.path.basename(input_file)
590 | pieces = input_basename.split('.')
591 | pieces = pieces[0:-2]
592 | bbp_input_file = os.path.join(input_dirname,
593 | "%s.%s.bbp" %
594 | ('.'.join(pieces),
595 | data[0]))
596 | input_header = []
597 | in_fp = open(bbp_input_file, 'r')
598 | for line in in_fp:
599 | line = line.strip()
600 | if line.startswith("#"):
601 | input_header.append(line)
602 | in_fp.close()
603 |
604 | # Compose new header
605 | output_header = []
606 | for item in input_header:
607 | if item.find("units=") > 0:
608 | output_header.append("# units= %s" % (data[5]))
609 | elif item.find("orientation=") > 0:
610 | output_header.append("# orientation= %s" % (",".join([str(int(station[0].orientation)),
611 | str(int(station[1].orientation)),
612 | station[2].orientation])))
613 | elif item.find("lp=") > 0:
614 | if 'lp' in params and params['lp'] is not None:
615 | output_header.append("# lp= %.2f" % (params['lp']))
616 | else:
617 | output_header.append(item)
618 | elif item.find("hp=") > 0:
619 | if 'hp' in params and params['hp'] is not None:
620 | output_header.append("# hp= %.2f" % (params['hp']))
621 | else:
622 | output_header.append(item)
623 | elif item.find("padding=") > 0:
624 | output_header.append("# padding= %d" % (station[0].padding))
625 | elif item.find("Column 2") > 0:
626 | output_header.append("# Column 2: H1 component ground "
627 | "%s (+ is %s)" % (data[4],
628 | str(int(station[0].orientation))))
629 | elif item.find("Column 3") > 0:
630 | output_header.append("# Column 3: H2 component ground "
631 | "%s (+ is %s)" % (data[4],
632 | str(int(station[1].orientation))))
633 | elif item.find("Column 4") > 0:
634 | output_header.append("# Column 4: V component ground "
635 | "%s (+ is %s)" % (data[4], station[2].orientation))
636 | else:
637 | output_header.append(item)
638 |
639 | pieces = output_basename.split('.')
640 | pieces = pieces[0:-2]
641 | bbp_output_filename = os.path.join(output_dir,
642 | "%s.%s.bbp" %
643 | ('.'.join(pieces),
644 | data[0]))
645 | # Write output file
646 | try:
647 | out_fp = open(bbp_output_filename, 'w')
648 | except IOError as e:
649 | print(e)
650 | continue
651 |
652 | # Write header
653 | for item in output_header:
654 | out_fp.write("%s\n" % (item))
655 |
656 | # Write timeseries
657 | for val_time, val_ns, val_ew, val_ud in zip(time, data[1],
658 | data[2], data[3]):
659 | out_fp.write("%5.7f %5.9e %5.9e %5.9e\n" %
660 | (val_time, val_ns, val_ew, val_ud))
661 |
662 | # All done, close file
663 | out_fp.close()
664 | print("[WRITING]: %s" % (bbp_output_filename))
665 | # end of write_bbp
666 |
--------------------------------------------------------------------------------