├── .gitignore ├── MANIFEST.in ├── doc ├── output_7_1.png └── syntax.rev00.png ├── alien ├── getName.py ├── nearest_idx.py ├── unique_counts.py ├── TextArt.py ├── __init__.py ├── dtrange.py ├── AnsiFormatter.py ├── TimeSeries.py ├── read_hdf5.py ├── read_hdf4.py ├── upscale.py ├── collection.py ├── LOGGER.py └── GridCoordinates.py ├── config ├── __init__.py ├── parse_fname_trmm.py ├── parse_fname_gpm.py ├── get_cache_dir.py ├── LICENSE.md ├── gpm_data.py ├── get_location_trmm.py ├── setup.py ├── granule2map.py ├── get_location_gpm.py ├── get_dtime_trmm.py ├── get_dtime_gpm.py ├── get_path.py ├── search_granules.py ├── get_gtrack_dim.py ├── write_to_nc.py ├── __main__.py ├── gpm.py └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include config 2 | -------------------------------------------------------------------------------- /doc/output_7_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kimlab/GPyM/HEAD/doc/output_7_1.png -------------------------------------------------------------------------------- /doc/syntax.rev00.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kimlab/GPyM/HEAD/doc/syntax.rev00.png -------------------------------------------------------------------------------- /alien/getName.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | def getFuncName(): 4 | return sys._getframe(1).f_code.co_name 5 | 6 | def getCallerName(): 7 | return sys._getframe(2).f_code.co_name 8 | 9 | -------------------------------------------------------------------------------- /alien/nearest_idx.py: -------------------------------------------------------------------------------- 1 | def nearest_idx(aSrc,val): 2 | ''' return nearest index ''' 3 | if hasattr(val,'__iter__'): return [abs(aSrc-v).argmin() for v in val] 4 | else: return abs(aSrc-val).argmin() 5 | -------------------------------------------------------------------------------- /alien/unique_counts.py: -------------------------------------------------------------------------------- 1 | from numpy import bincount, nonzero, array 2 | 3 | def unique_counts(aSrc): 4 | ''' 5 | aSrc : 1d-array 6 | 7 | ### numpy v1.9 included faster implimentation @ np.unique 8 | ''' 9 | print(aSrc) 10 | 11 | bincnt = bincount(aSrc) 12 | elements = nonzero(bincnt)[0] 13 | 14 | return array( zip( bincnt, elements ) ).T 15 | -------------------------------------------------------------------------------- /config: -------------------------------------------------------------------------------- 1 | [Defaults] 2 | dataroot : 3 | 4 | hdf4_module : GPyM.alien.read_hdf4.read_hdf4 ; absolute path of function 5 | hdf5_module : GPyM.alien.read_hdf5.read_hdf5 ; absolute path of function 6 | 7 | cached : cached ; ['cached', 'cached-verbose', 'skip', 'update'] 8 | cache_dir : %(dataroot)s/cache.dim 9 | 10 | compression : False ; [False, 'lz4'] 11 | 12 | #product_ver : 02 13 | 14 | gportal_id : 15 | 16 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : __init__.py 4 | # CREATED BY : hjkim @IIS.2015-07-12 10:08:32.133686 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./__init__.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from .gpm import GPM 17 | from granule2map import granule2map 18 | 19 | 20 | -------------------------------------------------------------------------------- /alien/TextArt.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from .AnsiFormatter import AnsiFormatter 3 | 4 | 5 | class TextArt(object): 6 | LN = '%s\n'%('='*80) 7 | Ln = '%s\n'%('-'*80) 8 | ln = '%s\n'%('.'*80) 9 | 10 | def __init__(self): 11 | pass 12 | 13 | def __getattr__(self,name): 14 | 15 | if name in AnsiFormatter.FOREGROUND.keys(): 16 | return AnsiFormatter(name) 17 | 18 | else: 19 | raise AttributeError 20 | 21 | def cprint(self,sOut,color): 22 | sys.stdout.write( AnsiFormatter(color)+sOut ) 23 | sys.stdout.write( '\n' ) 24 | 25 | -------------------------------------------------------------------------------- /parse_fname_trmm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : parse_fname.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:04:18.212930 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./parse_fname.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os, sys, re 14 | from optparse import OptionParser 15 | 16 | from datetime import datetime, timedelta 17 | 18 | 19 | def parse_fname_trmm(fName, ATTR): 20 | ''' 21 | fName : TRMM HDF filename 22 | ATTR : list of attributes (i.e., 'sDTime' and/or 'eDTime') 23 | ''' 24 | 25 | sDTime = datetime.strptime( re.findall(r'\d{8}', fName)[0], '%Y%m%d' ) 26 | 27 | offset = timedelta( seconds=86400 ) 28 | 29 | dictFunc= {'sDTime': sDTime, 30 | 'eDTime': sDTime+offset, 31 | } 32 | 33 | return [dictFunc[attr] for attr in ATTR] 34 | 35 | 36 | -------------------------------------------------------------------------------- /parse_fname_gpm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : parse_fname.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:04:18.212930 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./parse_fname.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from datetime import datetime, timedelta 17 | 18 | 19 | def parse_fname_gpm(fName, ATTR): 20 | ''' 21 | fName : GPM HDF path 22 | ATTR : list of attributes (i.e., 'sDTime' and/or 'eDTime') 23 | ''' 24 | 25 | fName = fName.split('_') 26 | 27 | dictFunc= {'sDTime': datetime.strptime(fName[2], '%y%m%d%H%M'), 28 | 'eDTime': datetime.strptime(fName[2][:6]+fName[3], '%y%m%d%H%M') 29 | } 30 | 31 | if dictFunc['eDTime'] < dictFunc['sDTime']: 32 | dictFunc['eDTime'] += timedelta( days=1 ) 33 | 34 | return [dictFunc[attr] for attr in ATTR] 35 | 36 | -------------------------------------------------------------------------------- /get_cache_dir.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_cache_dir.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:09:03.418307 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_cache_dir.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | 17 | def main(args,opts): 18 | print(args) 19 | print(opts) 20 | 21 | return 22 | 23 | 24 | if __name__=='__main__': 25 | usage = 'usage: %prog [options] arg' 26 | version = '%prog 1.0' 27 | 28 | parser = OptionParser(usage=usage,version=version) 29 | 30 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 31 | # help='rescan all directory to find missing file') 32 | 33 | (options,args) = parser.parse_args() 34 | 35 | # if len(args) == 0: 36 | # parser.print_help() 37 | # else: 38 | # main(args,options) 39 | 40 | # LOG = LOGGER() 41 | main(args,options) 42 | 43 | 44 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License (MIT) 2 | 3 | Copyright (c) 2016 Hyungjun Kim 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /alien/__init__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : __init__.py 4 | # CREATED BY : hjkim @IIS.2016-06-01 10:44:37.888292 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./__init__.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | from .LOGGER import * 16 | 17 | 18 | @ETA 19 | def main(args,opts): 20 | print (args) 21 | print (opts) 22 | 23 | return 24 | 25 | 26 | if __name__=='__main__': 27 | usage = 'usage: %prog [options] arg' 28 | version = '%prog 1.0' 29 | 30 | parser = OptionParser(usage=usage,version=version) 31 | 32 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 33 | # help='rescan all directory to find missing file') 34 | 35 | (options,args) = parser.parse_args() 36 | 37 | # if len(args) == 0: 38 | # parser.print_help() 39 | # else: 40 | # main(args,options) 41 | 42 | # LOG = LOGGER() 43 | main(args,options) 44 | 45 | 46 | -------------------------------------------------------------------------------- /alien/dtrange.py: -------------------------------------------------------------------------------- 1 | #from pylab import * 2 | from datetime import datetime,timedelta 3 | 4 | 5 | def dtrange(sDTime,eDTime,delTime): 6 | ''' 7 | TODO: add dtxrange 8 | 9 | delTime : object or 10 | 'XXw : XX of unitTime, 'w' means 'week' ['y','m','w','d','h'] 11 | ''' 12 | if type(delTime) == str: 13 | tCnt, tType = int(delTime[:-1]), delTime[-1] 14 | 15 | 16 | if tType in ['y','m']: 17 | if tType == 'y': tCnt *=12 18 | 19 | DTime = [] 20 | nMon = sDTime.month-1 21 | cDTime = sDTime 22 | while cDTime < eDTime: 23 | DTime.append(cDTime) 24 | nMon += 1 25 | 26 | cDTime = datetime.combine( date(sDTime.year+nMon//12, 27 | nMon%12+1, 28 | sDTime.day), 29 | sDTime.time() ) 30 | 31 | return DTime[::tCnt] 32 | 33 | else: 34 | delTime = timedelta( seconds=tCnt*{'w':86400*7, 'd':86400, 'h':3600}[tType] ) 35 | 36 | return [sDTime+delTime*i 37 | for i in range(int((eDTime-sDTime).total_seconds()/delTime.total_seconds()))] 38 | 39 | -------------------------------------------------------------------------------- /alien/AnsiFormatter.py: -------------------------------------------------------------------------------- 1 | class AnsiFormatter( object ): 2 | """ 3 | c = AnsiFormatter( 'cyan' ) 4 | r = AnsiFormatter( 'red' ) 5 | y = AnsiFormatter( 'yellow' ) 6 | m = AnsiFormatter( 'magenta' ) 7 | c + 'hello ?' 8 | prints 'hello ?' with cyan color 9 | 10 | contribution by H.T. Kim @ 20120610 11 | """ 12 | 13 | FOREGROUND = dict( 14 | black = 30, k = 30, 15 | red = 31, r = 31, 16 | green = 32, g = 32, 17 | yellow = 33, y = 33, 18 | blue = 34, b = 34, 19 | magenta = 35, m = 35, 20 | cyan = 36, c = 36, 21 | white = 37, w = 37, 22 | reset = 39, 23 | ) 24 | 25 | BACKGROUND = dict( 26 | black = 40, 27 | red = 41, 28 | green = 42, 29 | yellow = 43, 30 | blue = 44, 31 | magenta = 45, 32 | cyan = 46, 33 | white = 47, 34 | reset = 49, 35 | ) 36 | 37 | def __init__( self, foreground ): 38 | self.fore = self.FOREGROUND.get( foreground ) 39 | self.back = self.BACKGROUND.get( foreground ) 40 | if not self.fore: raise Exception( "Couldn't understand the name", foreground ) 41 | 42 | def __add__( self, other ): 43 | return '\033[%sm%s\033[0m' % ( self.fore, other ) 44 | -------------------------------------------------------------------------------- /gpm_data.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : gpm_data.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 11:55:57.445607 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./gpm_data.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from datetime import datetime 17 | 18 | from write_to_nc import WriteNC 19 | 20 | 21 | class GPM_data( WriteNC ): 22 | 23 | def __init__(self): 24 | self.srcPath = [] 25 | self.recLen = [] 26 | self.lat = [] 27 | self.lon = [] 28 | self.dtime = [] 29 | self.tbound = [] 30 | self.data = [] 31 | self.griddata = [] 32 | self.grid = [] 33 | 34 | self.torigin = datetime( 1901,1,1) 35 | self.missing_value = -9999.9 36 | 37 | 38 | def tofile(self, outpath, filetype='nc'): 39 | 40 | iofunc = { 'nc': self.toncdf, 41 | } 42 | 43 | if filetype not in iofunc: 44 | raise TypeError('%s is not supported yet.'%filetype) 45 | 46 | iofunc[ filetype ]( outpath ) 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /get_location_trmm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_location.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:08:32.265898 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_location.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from numpy import array 17 | 18 | 19 | def get_location_trmm(srcPath, fn_read): 20 | 21 | Lat = fn_read( srcPath, 'Latitude' ) 22 | Lon = fn_read( srcPath, 'Longitude' ) 23 | 24 | return array( [Lat, Lon] ) 25 | 26 | 27 | 28 | def main(args,opts): 29 | print (args) 30 | print (opts) 31 | 32 | return 33 | 34 | 35 | if __name__=='__main__': 36 | usage = 'usage: %prog [options] arg' 37 | version = '%prog 1.0' 38 | 39 | parser = OptionParser(usage=usage,version=version) 40 | 41 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 42 | # help='rescan all directory to find missing file') 43 | 44 | (options,args) = parser.parse_args() 45 | 46 | # if len(args) == 0: 47 | # parser.print_help() 48 | # else: 49 | # main(args,options) 50 | 51 | # LOG = LOGGER() 52 | main(args,options) 53 | 54 | 55 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | 3 | ''' 4 | gpmDir = '' 5 | 6 | if gpmDir == '': 7 | print gpmDir 8 | raise ValueError('gpmDir should be specificed') 9 | 10 | setupFile = open('settings.py','w') 11 | setupFile.write('baseDir = "%s"\n'%gpmDir) 12 | setupFile.close() 13 | ''' 14 | 15 | setup( name = 'GPyM', 16 | version = '0.60b', 17 | description = 'GPM Python Module', 18 | long_description = ''' long_description to be written. ''', 19 | 20 | classifiers = [ 21 | 'Development Status :: 4 - Beta', 22 | 'License :: OSI Approved :: MIT License', 23 | 'Programming Language :: Python :: 2.7', 24 | 'Topic :: Scientific/Engineering :: Atmospheric Science', 25 | ], 26 | keywords = 'precipitation satellite gpm trmm jaxa', 27 | url = 'https://github.com/kimlab/GPyM', 28 | author = 'Hyungjun Kim', 29 | author_email = 'hyungjun@gmail.com', 30 | license = 'MIT', 31 | 32 | package_dir = {'GPyM':''}, 33 | packages = ['GPyM','GPyM.alien'], 34 | package_data = {'': ['config'], 35 | }, 36 | install_requires = ['numpy'], 37 | include_package_data = True, 38 | zip_safe = True, 39 | ) 40 | -------------------------------------------------------------------------------- /granule2map.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : granule2map.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 11:56:07.989735 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./granule2map.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from numpy import zeros, ma 17 | 18 | from alien.upscale import upscale 19 | from alien.nearest_idx import nearest_idx 20 | from alien.GridCoordinates import GridCoordinates 21 | 22 | 23 | def granule2map(lat, lon, aSrc, BBox=None, res=0.1, verbose=True): 24 | ''' 25 | res : out resolution only support n-fold of 0.01 deg 26 | ''' 27 | 28 | Grid = GridCoordinates('^001',BBox=BBox) # default mapCode:^001 29 | 30 | aOut = zeros( (Grid.lat.size,Grid.lon.size), 'float32' )-9999.9 31 | 32 | yIdx = nearest_idx(Grid.lat, lat.flatten()) 33 | xIdx = nearest_idx(Grid.lon, lon.flatten()) 34 | 35 | aOut[yIdx, xIdx] = aSrc.flatten() 36 | 37 | nFold = int( res/Grid.res ) 38 | 39 | aOut = upscale(aOut, (Grid.lat.size/nFold, Grid.lon.size/nFold), mode='m', missing=-9999.9) 40 | #aOut = upscale(aOut, (Grid.lat.size/nFold, Grid.lon.size/nFold), mode='s', missing=-9999.9) 41 | 42 | if verbose: 43 | print('\t[GRANULE2MAP] Domain:%s %s -> %s'%( BBox, aSrc.shape, aOut.shape)) 44 | 45 | return aOut 46 | 47 | 48 | -------------------------------------------------------------------------------- /get_location_gpm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_location.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:08:32.265898 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_location.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from numpy import array 17 | 18 | 19 | def get_location_gpm(srcPath, fn_read): 20 | 21 | if 'GMI' in srcPath : h5Grp = 'S1' 22 | elif 'DPR' in srcPath : h5Grp = 'NS' 23 | elif 'KuPR' in srcPath : h5Grp = 'NS' 24 | elif 'KaPR' in srcPath : h5Grp = 'MS' 25 | else: 26 | raise ValueError('unknown hdf5 group [%s] for %s'%(h5Grp, srcPath)) 27 | 28 | Lat = fn_read( srcPath, '%s/Latitude'%h5Grp ) 29 | Lon = fn_read( srcPath, '%s/Longitude'%h5Grp ) 30 | 31 | return array( [Lat, Lon] ) 32 | 33 | 34 | 35 | def main(args,opts): 36 | print (args) 37 | print (opts) 38 | 39 | return 40 | 41 | 42 | if __name__=='__main__': 43 | usage = 'usage: %prog [options] arg' 44 | version = '%prog 1.0' 45 | 46 | parser = OptionParser(usage=usage,version=version) 47 | 48 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 49 | # help='rescan all directory to find missing file') 50 | 51 | (options,args) = parser.parse_args() 52 | 53 | # if len(args) == 0: 54 | # parser.print_help() 55 | # else: 56 | # main(args,options) 57 | 58 | # LOG = LOGGER() 59 | main(args,options) 60 | 61 | 62 | -------------------------------------------------------------------------------- /alien/TimeSeries.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : bin_bytbound.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 09:05:31.202880 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./bin_bytbound.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | from .LOGGER import * 16 | 17 | import bisect 18 | 19 | def bin_bytbound( DTime, dtBnd, aSrc=None ): 20 | ''' 21 | return Indexer if aSrc == None 22 | else binned aSrc 23 | ''' 24 | 25 | searchidx = bisect.bisect_left 26 | Idx = (searchidx( DTime, bnd ) for bnd in dtBnd) 27 | 28 | if aSrc == None: 29 | Idx = list(Idx) 30 | return map(None, Idx[:-1], Idx[1:]) 31 | 32 | else: 33 | sIdx = Idx.next() 34 | 35 | aOut = [] 36 | for eIdx in Idx: 37 | if sIdx == eIdx : continue 38 | 39 | aOut.append( aSrc[sIdx:eIdx] ) 40 | sIdx = eIdx 41 | return aOut 42 | 43 | 44 | @ETA 45 | def main(args,opts): 46 | print (args) 47 | print (opts) 48 | 49 | return 50 | 51 | 52 | if __name__=='__main__': 53 | usage = 'usage: %prog [options] arg' 54 | version = '%prog 1.0' 55 | 56 | parser = OptionParser(usage=usage,version=version) 57 | 58 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 59 | # help='rescan all directory to find missing file') 60 | 61 | (options,args) = parser.parse_args() 62 | 63 | # if len(args) == 0: 64 | # parser.print_help() 65 | # else: 66 | # main(args,options) 67 | 68 | # LOG = LOGGER() 69 | main(args,options) 70 | 71 | 72 | -------------------------------------------------------------------------------- /alien/read_hdf5.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : read_hdf5.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 11:52:15.012270 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./read_hdf5.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | import h5py 17 | 18 | 19 | def read_hdf5(srcPath, varName, Slice=None, verbose=True): 20 | h5 = h5py.File(srcPath, 'r') 21 | 22 | if Slice == None: Slice = slice(None,None,None) 23 | 24 | try: 25 | h5Var = h5[varName] 26 | aOut = h5Var[Slice] 27 | 28 | except: 29 | print('!'*80) 30 | print('I/O Error') 31 | print('Blank File? %s'%srcPath) 32 | print('Blank array will be returned [ %s ]'%varName) 33 | print(h5Var.shape) 34 | print(Slice) 35 | print('!'*80) 36 | 37 | raise ValueError 38 | 39 | if verbose == True: 40 | print('\t[READ_HDF5] %s [%s] -> %s'%( srcPath, varName, aOut.shape)) 41 | 42 | h5.close() 43 | 44 | return aOut 45 | 46 | 47 | def main(args,opts): 48 | print (args) 49 | print (opts) 50 | 51 | return 52 | 53 | 54 | if __name__=='__main__': 55 | usage = 'usage: %prog [options] arg' 56 | version = '%prog 1.0' 57 | 58 | parser = OptionParser(usage=usage,version=version) 59 | 60 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 61 | # help='rescan all directory to find missing file') 62 | 63 | (options,args) = parser.parse_args() 64 | 65 | # if len(args) == 0: 66 | # parser.print_help() 67 | # else: 68 | # main(args,options) 69 | 70 | # LOG = LOGGER() 71 | main(args,options) 72 | 73 | 74 | -------------------------------------------------------------------------------- /alien/read_hdf4.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : read_hdf4.py 4 | # CREATED BY : hjkim @IIS.2015-07-15 15:21:18.949532 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./read_hdf4.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from pyhdf import SD 17 | 18 | 19 | def read_hdf4(srcPath, varName, Slice=None, verbose=True): 20 | 21 | h4 = SD.SD(srcPath)#, SD.SDC.READ) 22 | 23 | if Slice == None: Slice = slice(None,None,None) 24 | 25 | ''' 26 | h4Var = h4.select(varName) 27 | print dir(h4Var) 28 | print h4Var.dimensions() 29 | 30 | sys.exit() 31 | ''' 32 | 33 | try: 34 | h4Var = h4.select(varName) 35 | aOut = h4Var[:][Slice] 36 | 37 | except: 38 | print('!'*80) 39 | print('I/O Error') 40 | print('Blank File? %s'%srcPath) 41 | print('Blank array will be returned [ %s ]'%varName) 42 | print(h4Var.dimensions()) 43 | print(Slice) 44 | print('!'*80) 45 | 46 | #raise ValueError 47 | 48 | 49 | if verbose == True: 50 | print('\t[READ_HDF4] %s [%s] -> %s'%( srcPath, varName, aOut.shape)) 51 | # print '\t[READ_HDF4] %s %s -> %s'%( srcPath, h4Var.dimensions(), aOut.shape) 52 | 53 | #h4.close() 54 | 55 | return aOut 56 | 57 | 58 | 59 | def main(args,opts): 60 | print (args) 61 | print (opts) 62 | 63 | return 64 | 65 | 66 | if __name__=='__main__': 67 | usage = 'usage: %prog [options] arg' 68 | version = '%prog 1.0' 69 | 70 | parser = OptionParser(usage=usage,version=version) 71 | 72 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 73 | # help='rescan all directory to find missing file') 74 | 75 | (options,args) = parser.parse_args() 76 | 77 | # if len(args) == 0: 78 | # parser.print_help() 79 | # else: 80 | # main(args,options) 81 | 82 | # LOG = LOGGER() 83 | main(args,options) 84 | 85 | 86 | -------------------------------------------------------------------------------- /get_dtime_trmm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_dtime.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:08:39.020805 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_dtime.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from numpy import array 17 | #from datetime import datetime, timedelta 18 | 19 | 20 | def get_dtime_trmm(srcPath, func_read): 21 | 22 | Year = func_read( srcPath, 'Year' ).astype('int') 23 | Month = func_read( srcPath, 'Month' ).astype('int') 24 | Day = func_read( srcPath, 'DayOfMonth' ).astype('int') 25 | Hour = func_read( srcPath, 'Hour' ).astype('int') 26 | Minute = func_read( srcPath, 'Minute' ).astype('int') 27 | Second = func_read( srcPath, 'Second' ).astype('int') 28 | MicSec = func_read( srcPath, 'MilliSecond' ).astype('int')*1000 29 | 30 | return array( [Year, Month, Day, Hour, Minute, Second, MicSec] ).T 31 | 32 | ''' 33 | DTime = [] 34 | for y,m,d,H,M,S,uS in map(None,Year,Month,Day,Hour,Minute,Second,MicSec): 35 | 36 | if uS == 1000000: 37 | DTime.append( datetime(y,m,d,H,M,S,0)+timedelta(seconds=1) ) 38 | print 'Warning [Millisecond] == 1000 : %i %i %i %i %i %i %i'%(y,m,d,H,M,S,uS/1000) 39 | 40 | else: 41 | DTime.append( datetime(y,m,d,H,M,S,uS) ) 42 | 43 | return array( DTime ) 44 | ''' 45 | 46 | 47 | 48 | def main(args,opts): 49 | print (args) 50 | print (opts) 51 | 52 | return 53 | 54 | 55 | if __name__=='__main__': 56 | usage = 'usage: %prog [options] arg' 57 | version = '%prog 1.0' 58 | 59 | parser = OptionParser(usage=usage,version=version) 60 | 61 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 62 | # help='rescan all directory to find missing file') 63 | 64 | (options,args) = parser.parse_args() 65 | 66 | # if len(args) == 0: 67 | # parser.print_help() 68 | # else: 69 | # main(args,options) 70 | 71 | # LOG = LOGGER() 72 | main(args,options) 73 | 74 | 75 | -------------------------------------------------------------------------------- /alien/upscale.py: -------------------------------------------------------------------------------- 1 | from numpy import array, ma 2 | 3 | 4 | def upscale(aSrc,newShape,mode='s',weight=None,post_weight=None,missing=None): 5 | ''' 6 | aSrc[y,x] => aSrc[*newshape] 7 | 8 | mode = [ 9 | 's', # aggregate 10 | 'ws', # weighted aggregation 11 | 'm' # mean 12 | ] 13 | ''' 14 | 15 | if weight != None: 16 | aSrc = aSrc.copy()* weight 17 | 18 | 19 | ''' 20 | modeFunc = {'s':sum, 21 | 'm':mean, 22 | }[mode] 23 | ''' 24 | 25 | if len(aSrc.shape)==3 and aSrc.shape[0]==1: 26 | aSrc.shape = aSrc.shape[1:] 27 | 28 | 29 | if all( array(newShape) > array(aSrc.shape) ): 30 | nFOLD = newShape/array(aSrc.shape) 31 | 32 | aRe = empty(newShape, dtype=aSrc.dtype) 33 | 34 | for i in range(nFOLD[0]): 35 | for j in range(nFOLD[1]): 36 | aRe[i::nFOLD[0], j::nFOLD[1]] = aSrc 37 | 38 | else: 39 | nFOLD = array(aSrc.shape)/newShape 40 | 41 | if missing == None: 42 | aRe = array([ 43 | aSrc[..., i::nFOLD[-2], j::nFOLD[-1]] 44 | for i in range(nFOLD[-2]) 45 | for j in range(nFOLD[-1]) 46 | ]) 47 | 48 | else: 49 | aSrc = ma.masked_equal(aSrc,missing) 50 | 51 | aRe = array([ 52 | aSrc.data[..., i::nFOLD[-2], j::nFOLD[-1]] 53 | for i in range(nFOLD[-2]) 54 | for j in range(nFOLD[-1]) 55 | ]) 56 | 57 | Mask= array([ 58 | aSrc.mask[..., i::nFOLD[-2], j::nFOLD[-1]] 59 | for i in range(nFOLD[-2]) 60 | for j in range(nFOLD[-1]) 61 | ]) 62 | 63 | aRe = ma.array(aRe,mask=Mask) 64 | 65 | 66 | if mode == 's': 67 | aRe = aRe.sum(0) 68 | 69 | elif mode == 'ws': 70 | weight = len(aRe)/(len(aRe)-Mask.astype('float64').sum(0)) 71 | 72 | aRe = aRe.sum(0)*weight 73 | 74 | elif mode == 'm': 75 | aRe = aRe.mean(0) 76 | 77 | else: 78 | raise IOError 79 | 80 | if missing != None: 81 | aRe = aRe.filled(missing) 82 | 83 | if post_weight != None: 84 | aRe *= post_weight 85 | 86 | return aRe 87 | 88 | -------------------------------------------------------------------------------- /get_dtime_gpm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_dtime.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:08:39.020805 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_dtime.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from numpy import array 17 | #from datetime import datetime, timedelta 18 | 19 | 20 | def get_dtime_gpm(srcPath, fn_read): 21 | 22 | if 'GMI' in srcPath : h5Grp = 'S1' 23 | elif 'DPR' in srcPath : h5Grp = 'NS' 24 | elif 'KuPR' in srcPath : h5Grp = 'NS' 25 | elif 'KaPR' in srcPath : h5Grp = 'MS' 26 | else: 27 | raise ValueError('unknown hdf5 group [%s] for %s'%(h5Grp, srcPath)) 28 | 29 | Year = fn_read( srcPath,'%s/ScanTime/Year'%h5Grp ).astype('int') 30 | Month = fn_read( srcPath,'%s/ScanTime/Month'%h5Grp ).astype('int') 31 | Day = fn_read( srcPath,'%s/ScanTime/DayOfMonth'%h5Grp ).astype('int') 32 | Hour = fn_read( srcPath,'%s/ScanTime/Hour'%h5Grp ).astype('int') 33 | Minute = fn_read( srcPath,'%s/ScanTime/Minute'%h5Grp ).astype('int') 34 | Second = fn_read( srcPath,'%s/ScanTime/Second'%h5Grp ).astype('int') 35 | MicSec = fn_read( srcPath,'%s/ScanTime/MilliSecond'%h5Grp ).astype('int')*1000 36 | 37 | return array( [Year, Month, Day, Hour, Minute, Second, MicSec] ).T 38 | 39 | ''' 40 | DTime = [] 41 | for y,m,d,H,M,S,uS in map(None,Year,Month,Day,Hour,Minute,Second,MicSec): 42 | 43 | if uS == 1000000: 44 | DTime.append( datetime(y,m,d,H,M,S,0)+timedelta(seconds=1) ) 45 | print 'Warning [NS/ScanTime/Millisecond] == 1000 : %i %i %i %i %i %i %i'%(y,m,d,H,M,S,uS/1000) 46 | 47 | else: 48 | DTime.append( datetime(y,m,d,H,M,S,uS) ) 49 | 50 | return array( DTime ) 51 | ''' 52 | 53 | 54 | def main(args,opts): 55 | print (args) 56 | print (opts) 57 | 58 | return 59 | 60 | 61 | if __name__=='__main__': 62 | usage = 'usage: %prog [options] arg' 63 | version = '%prog 1.0' 64 | 65 | parser = OptionParser(usage=usage,version=version) 66 | 67 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 68 | # help='rescan all directory to find missing file') 69 | 70 | (options,args) = parser.parse_args() 71 | 72 | # if len(args) == 0: 73 | # parser.print_help() 74 | # else: 75 | # main(args,options) 76 | 77 | # LOG = LOGGER() 78 | main(args,options) 79 | 80 | 81 | -------------------------------------------------------------------------------- /get_path.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_path.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:02:05.942756 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_path.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from parse_fname_trmm import parse_fname_trmm 17 | from parse_fname_gpm import parse_fname_gpm 18 | 19 | 20 | def get_path(srcDir, sDTime, eDTime): 21 | ''' 22 | select GPM(hdf5) and TRMM(hdf4) files and return their paths) 23 | ''' 24 | 25 | prjName, prdLv, prdVer = srcDir.split(os.path.sep)[-3:] 26 | 27 | parse_fname = {'TRMM': parse_fname_trmm, 28 | 'GPM' : parse_fname_gpm}[ prjName.split('.')[0] ] 29 | 30 | 31 | if sDTime == eDTime: 32 | raise ValueError('%s == %s'%(sDTime, eDTime)) 33 | 34 | 35 | 36 | # do not know the reason of implementation ++++++++++++++++++++++ 37 | # consider to use trange 38 | #srcDIR = [os.path.join(srcDir, '%i/%02d'%(y,m)) 39 | srcDIR = [os.path.join(srcDir, str(y), '%02d'%m) 40 | for y in range(sDTime.year,eDTime.year+1) 41 | for m in range(1,13)] 42 | 43 | srcDIR = srcDIR[sDTime.month-1 : eDTime.month-12 if eDTime.month != 12 else 12] 44 | # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 45 | 46 | srcPATH = [] 47 | 48 | for srcDir in srcDIR: 49 | if not os.path.exists(srcDir): 50 | print('Warning [%s] directory does not exists!'%srcDir) 51 | continue 52 | 53 | for srcFName in sorted( os.listdir(srcDir) ): 54 | 55 | sdt_gtrk, edt_gtrk = parse_fname( srcFName, ['sDTime','eDTime'] ) 56 | 57 | if sDTime <= edt_gtrk and eDTime >= sdt_gtrk: 58 | srcPATH.append( os.path.join(srcDir, srcFName) ) 59 | else: 60 | continue 61 | 62 | return srcPATH 63 | 64 | 65 | 66 | def main(args,opts): 67 | print (args) 68 | print (opts) 69 | 70 | return 71 | 72 | 73 | if __name__=='__main__': 74 | usage = 'usage: %prog [options] arg' 75 | version = '%prog 1.0' 76 | 77 | parser = OptionParser(usage=usage,version=version) 78 | 79 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 80 | # help='rescan all directory to find missing file') 81 | 82 | (options,args) = parser.parse_args() 83 | 84 | # if len(args) == 0: 85 | # parser.print_help() 86 | # else: 87 | # main(args,options) 88 | 89 | # LOG = LOGGER() 90 | main(args,options) 91 | 92 | 93 | -------------------------------------------------------------------------------- /search_granules.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : search_granules.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 12:59:51.759752 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./search_granules.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from numpy import arange, ma 17 | 18 | from get_path import get_path 19 | from get_gtrack_dim import get_gtrack_dim 20 | 21 | 22 | class SearchGranules( object ): 23 | def search_granules(self, srcDir, sDTime, eDTime, BBox=[[-90,-180],[90,180]], thresh=0.001): 24 | ''' 25 | BBox : [[lllat,lllon], [urlat,urlon]] /* lat: -90 ~ 90 */ 26 | /* lon: -180 ~ 180 */ 27 | ''' 28 | 29 | srcPATH = get_path(srcDir, sDTime, eDTime) 30 | 31 | gtrkDim = [get_gtrack_dim(path, self.func_read, self.cached, self.cacheDir) 32 | for path in srcPATH] 33 | 34 | DTime, Lat, Lon = zip(*gtrkDim) 35 | 36 | Granule = [] 37 | for dtime, lat, lon, path in map(None, DTime, Lat, Lon, srcPATH): 38 | 39 | mskLat = ma.masked_outside( lat, BBox[0][0], BBox[1][0] ).mask 40 | mskLon = ma.masked_outside( lon, BBox[0][1], BBox[1][1] ).mask 41 | mskTime = ma.masked_outside( dtime, sDTime, eDTime).mask 42 | 43 | #mask = (mskLat + mskLon).any(1) + mskTime 44 | mask = (mskLat + mskLon).all(1) + mskTime 45 | 46 | if not mask.all(): 47 | 48 | idx = ma.array( arange(dtime.size), 'int', mask=mask).compressed() 49 | Granule.append([path, 50 | dtime[idx], 51 | lat[idx], 52 | lon[idx], 53 | idx 54 | ]) 55 | 56 | print('* [V] ground track dimension (%s): %s'%(self.cached,path)) 57 | 58 | else: 59 | print('* [_] ground track dimension (%s): %s'%(self.cached,path)) 60 | 61 | summary = '| [{}] granules intersects domain {} out of [{}] total between ({}-{}) |\n' \ 62 | .format( len(Granule), tuple(BBox), len(srcPATH), sDTime, eDTime ) 63 | 64 | line = '+' + '-'*len(summary[3:]) + '+\n' 65 | 66 | print(line + summary + line) 67 | 68 | return Granule 69 | 70 | 71 | 72 | 73 | def main(args,opts): 74 | print (args) 75 | print (opts) 76 | 77 | return 78 | 79 | 80 | if __name__=='__main__': 81 | usage = 'usage: %prog [options] arg' 82 | version = '%prog 1.0' 83 | 84 | parser = OptionParser(usage=usage,version=version) 85 | 86 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 87 | # help='rescan all directory to find missing file') 88 | 89 | (options,args) = parser.parse_args() 90 | 91 | # if len(args) == 0: 92 | # parser.print_help() 93 | # else: 94 | # main(args,options) 95 | 96 | # LOG = LOGGER() 97 | main(args,options) 98 | 99 | 100 | -------------------------------------------------------------------------------- /get_gtrack_dim.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : get_gtrack_dim.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 13:08:20.075973 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./get_gtrack_dim.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from datetime import datetime, timedelta 17 | from numpy import array 18 | 19 | from alien.collection import cached 20 | 21 | from get_location_gpm import get_location_gpm 22 | from get_location_trmm import get_location_trmm 23 | 24 | from get_dtime_gpm import get_dtime_gpm 25 | from get_dtime_trmm import get_dtime_trmm 26 | 27 | 28 | def get_gtrack_dim(srcPath, fn_read, cache=False, cache_dir=None): 29 | ''' 30 | scan granules and return dimension (T,Y,X) or ground tracks 31 | 32 | cache : mode of cf.devel.collection.cached 33 | ['cached', 'cached-verbose', 'skip', 'update'] 34 | ''' 35 | 36 | verbose = False if 'verbose' in cache \ 37 | else True 38 | verbose = True 39 | 40 | prjName, prdLv, prdVer, yyyy, mm, srcFName = srcPath.split(os.path.sep)[-6:] 41 | 42 | get_dtime, get_location = {'TRMM': [get_dtime_trmm, get_location_trmm], 43 | 'GPM' : [get_dtime_gpm, get_location_gpm ], 44 | }[ prjName.split('.')[0] ] 45 | 46 | 47 | print ('+ Get Groundtrack Dimension: {}'.format( srcPath )) 48 | 49 | cache_dir = os.path.join( cache_dir, prjName, prdLv, prdVer, yyyy, mm ) 50 | 51 | Lat, Lon = cached( srcFName + '.latlon', 52 | cache_dir, 53 | mode=cache, 54 | verbose=verbose )(get_location)(srcPath, fn_read)#, cache, cache_dir) 55 | 56 | Timetuple = cached( srcFName + '.timetuple', 57 | cache_dir, 58 | mode=cache, 59 | verbose=verbose )(get_dtime )(srcPath, fn_read)#, cache, cache_dir) 60 | 61 | 62 | # exception handling for us 1000000 instead of 0 ------------------------------------ 63 | DTime = [] 64 | for y,m,d,H,M,S,uS in Timetuple: 65 | 66 | if uS == 1000000: 67 | DTime.append( datetime(y,m,d,H,M,S,0)+timedelta(seconds=1) ) 68 | print('Warning [NS/ScanTime/Millisecond] == 1000 : %i %i %i %i %i %i %i' \ 69 | %(y,m,d,H,M,S,uS/1000)) 70 | 71 | else: 72 | DTime.append( datetime(y,m,d,H,M,S,uS) ) 73 | # ----------------------------------------------------------------------------------- 74 | 75 | DTime = array( DTime ) 76 | 77 | return DTime, Lat, Lon 78 | 79 | 80 | def main(args,opts): 81 | print (args) 82 | print (opts) 83 | 84 | return 85 | 86 | 87 | if __name__=='__main__': 88 | usage = 'usage: %prog [options] arg' 89 | version = '%prog 1.0' 90 | 91 | parser = OptionParser(usage=usage,version=version) 92 | 93 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 94 | # help='rescan all directory to find missing file') 95 | 96 | (options,args) = parser.parse_args() 97 | 98 | # if len(args) == 0: 99 | # parser.print_help() 100 | # else: 101 | # main(args,options) 102 | 103 | # LOG = LOGGER() 104 | main(args,options) 105 | 106 | 107 | -------------------------------------------------------------------------------- /write_to_nc.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : write_to_nc.py 4 | # CREATED BY : hjkim @IIS.2017-10-17 06:23:16.129216 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./write_to_nc.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | 16 | from collections import OrderedDict 17 | 18 | from netCDF4 import Dataset 19 | import numpy as np 20 | 21 | 22 | class WriteNC( object ): 23 | 24 | def toncdf( self, outpath ): 25 | 26 | torigin = self.torigin 27 | 28 | ncfile = self.open_ncfile( outpath ) 29 | 30 | ncdims = self.create_dimensions( ncfile ) 31 | 32 | ncvars = self.create_variables( ncfile, ncdims ) 33 | 34 | self.set_attributes( ncvars ) 35 | 36 | dtime = [ (dtm-torigin).total_seconds() for dtm in self.dtime ] 37 | 38 | ncvars['time'][:] = dtime[:] 39 | ncvars['pixel'][:] = range( self.data.shape[1] ) 40 | ncvars['lat'][:] = self.lat[:] 41 | ncvars['lon'][:] = self.lon[:] 42 | ncvars['data'][:] = self.data[:] 43 | 44 | if self.griddata != []: 45 | ncvars[ 'gridlat' ][:] = self.grid.lat 46 | ncvars[ 'gridlon' ][:] = self.grid.lon 47 | ncvars[ 'griddata' ][:] = np.ma.masked_equal( self.griddata, self.missing_value ) 48 | 49 | ncfile.close() 50 | 51 | 52 | 53 | def open_ncfile( self, outpath ): 54 | 55 | ncfile = Dataset( outpath, 'w', format='NETCDF4' ) 56 | 57 | return ncfile 58 | 59 | 60 | def create_dimensions( self, ncfile ): 61 | 62 | dims = OrderedDict(( 63 | ('time', None), 64 | ('pixel', None), 65 | ('lat', None), 66 | ('lon', None) 67 | )) 68 | 69 | dims['time'] = ncfile.createDimension( "time", None) 70 | dims['pixel'] = ncfile.createDimension( "pixel", self.data.shape[1]) 71 | 72 | 73 | if self.griddata != []: 74 | 75 | dims['gridlat'] = ncfile.createDimension( "gridlat", self.grid.lat.size) 76 | dims['gridlon'] = ncfile.createDimension( "gridlon", self.grid.lon.size) 77 | 78 | 79 | return dims 80 | 81 | 82 | def create_variables( self, ncfile, ncdims ): 83 | 84 | varparams = dict(( 85 | ( 'time', ('time','f8',('time', )) ), 86 | ( 'pixel', ('pixel','i4',('pixel',)) ), 87 | ( 'lat', ('lat','f4',('time','pixel')) ), 88 | ( 'lon', ('lon','f4',('time','pixel')) ), 89 | ( 'data', ('data','f4',('time','pixel')) ), 90 | ( 'gridlat', ('gridlat','f4',('gridlat', )) ), 91 | ( 'gridlon', ('gridlon','f4',('gridlon', )) ), 92 | ( 'griddata',('griddata','f4',('time','gridlat','gridlon')) ), 93 | )) 94 | 95 | ncvars = OrderedDict() 96 | 97 | ncvars[ 'time' ] = ncfile.createVariable( *varparams['time'] ) 98 | ncvars[ 'pixel' ] = ncfile.createVariable( *varparams['pixel'] ) 99 | ncvars[ 'lat' ] = ncfile.createVariable( *varparams['lat' ] ) 100 | ncvars[ 'lon' ] = ncfile.createVariable( *varparams['lon' ] ) 101 | ncvars[ 'data' ] = ncfile.createVariable( *varparams['data' ] ) 102 | 103 | if self.griddata != []: 104 | ncvars[ 'gridlat' ] = ncfile.createVariable( *varparams['gridlat' ] ) 105 | ncvars[ 'gridlon' ] = ncfile.createVariable( *varparams['gridlon' ] ) 106 | ncvars[ 'griddata' ] = ncfile.createVariable( *varparams['griddata'], zlib=True, complevel=1 ) 107 | 108 | return ncvars 109 | 110 | 111 | def set_attributes( self, ncvars ): 112 | 113 | ncvars['time'].units = 'seconds since %s'%self.torigin.strftime("%Y-%m-%d %H:%M:%S") 114 | 115 | 116 | -------------------------------------------------------------------------------- /alien/collection.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | 3 | import os,sys 4 | from io import StringIO 5 | 6 | from numpy import load, save, array 7 | from numpy.lib.format import open_memmap 8 | 9 | 10 | #def cached(mode='normal',cacheName=None,cacheDir='./cached',compress='lz4'): 11 | def cached(name=None, cdir='./cached', compress=False, mode='cached', verbose=True, purge_empty_file=True): 12 | ''' 13 | mode : in ['cached', # read from cached file if exists 14 | 'skip' , # skip caching process 15 | 'update' # force to update cached file 16 | ] or False 17 | 18 | compress : in ['lz4', False] 19 | ''' 20 | 21 | 22 | def wrapper(func): 23 | 24 | def inner(*args, **kwargs): 25 | mode = wrapper.mode 26 | name = wrapper.name 27 | cdir = wrapper.cdir 28 | compress = wrapper.compress 29 | verbose = wrapper.verbose 30 | 31 | if mode in [False, 'skip'] : return func( *args, **kwargs ) 32 | 33 | if name == None : name = func.__name__ 34 | if not os.path.exists(cdir) : os.makedirs(cdir) 35 | 36 | cachePath = os.path.join(cdir, name) 37 | 38 | if compress : import lz4 39 | 40 | if os.path.exists( cachePath ) and mode != 'update': 41 | 42 | if compress == 'lz4': 43 | cached = StringIO( lz4.loads( open(cachePath,'r').read() ) ) 44 | 45 | else: 46 | cached = cachePath 47 | #cached = open(cachePath,'r') 48 | 49 | if verbose: print ('\t!! Cached from %s'%cachePath) 50 | 51 | aOut = load( cached ) 52 | 53 | if aOut.shape != () or purge_empty_file == False: 54 | return aOut 55 | 56 | else: 57 | os.remove( cachePath ) 58 | raise ValueError('empty cache file (erased): %s'%(cachePath)) 59 | 60 | if os.path.exists( cachePath ) == False or mode == 'update': 61 | aOut = func( *args, **kwargs ) 62 | 63 | if compress == 'lz4': 64 | 65 | cached = StringIO() 66 | save( cached, aOut ) 67 | open(cachePath,'w').write( lz4.dumps( cached.getvalue() ) ) 68 | 69 | else: 70 | fCache = open(cachePath,'wb') 71 | save( fCache, aOut ) 72 | fCache.close() 73 | 74 | if verbose: print ('\t!! Cached to %s'%cachePath) 75 | return aOut 76 | 77 | raise KeyError('failed exception handling for %s and %s'%( cachePath, mode )) 78 | 79 | return inner 80 | 81 | wrapper.name = name 82 | wrapper.mode = mode 83 | wrapper.cdir = cdir 84 | wrapper.compress = compress 85 | wrapper.verbose = verbose 86 | 87 | return wrapper 88 | 89 | 90 | # push_cache, pop_cache 91 | def push_cache(aOut,varName,itrmCode,timeCode,cacheDir=None,ow=False): 92 | if cacheDir == None: 93 | baseDir = './cached/%s.%s'%(varName,itrmCode) 94 | 95 | else: 96 | baseDir = cacheDir 97 | 98 | if not os.path.exists(baseDir): 99 | os.makedirs(baseDir) 100 | 101 | outPath = os.path.join(baseDir,'%s.%s.%s.npy'%(varName,itrmCode,timeCode)) 102 | 103 | if os.path.exists(outPath) and ow == False: # file size and array size compare [ToDo] 104 | return False 105 | 106 | else: 107 | save(outPath, aOut.astype('float32')) # better dtype treatment [ToDo] 108 | return True 109 | 110 | 111 | def pop_cache(varName,itrmCode,timeCode,func,args,cacheDir=None,cache=True,mmap=None,returnTF=False): 112 | if cacheDir == None: 113 | baseDir = './%s.%s'%(varName,itrmCode) 114 | 115 | else: 116 | baseDir = cacheDir 117 | 118 | srcPath = os.path.join(baseDir,'%s.%s.%s.npy'%(varName,itrmCode,timeCode)) 119 | 120 | if os.path.exists(srcPath) and cache != 'ow': 121 | aSrc = load(srcPath, mmap_mode=mmap) 122 | 123 | else: 124 | # replace None with srcPath to cache 125 | if func == open_memmap: 126 | if not os.path.exists(baseDir): 127 | os.makedirs(baseDir) 128 | 129 | aSrc= func(srcPath, *args) 130 | 131 | else: 132 | aSrc = func(*args) 133 | 134 | ow = True if cache == 'ow' else False 135 | 136 | if cache == True: 137 | push_cache(aSrc,varName,itrmCode,timeCode,cacheDir=cacheDir,ow=ow) 138 | 139 | if returnTF : return aSrc,False 140 | else : return aSrc 141 | -------------------------------------------------------------------------------- /__main__.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : __main__.py 4 | # CREATED BY : hjkim @IIS.2015-07-13 15:46:30.658570 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./__main__.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os,sys 14 | from optparse import OptionParser 15 | from alien.LOGGER import * 16 | 17 | from datetime import datetime, timedelta 18 | 19 | from gpm import GPM 20 | 21 | import pylab as pl 22 | import numpy as np 23 | from mpl_toolkits.basemap import Basemap 24 | 25 | 26 | @ETA 27 | def main(args,opts): 28 | print (args) 29 | print (opts) 30 | 31 | 32 | ''' 33 | prjName = 'GPM.KuPR' 34 | prdLv = 'L2' 35 | prdVer = '04' 36 | varName = 'NS/SLV/precipRateESurface' 37 | ''' 38 | prjName = 'GPM.GMI' 39 | prdLv = 'L2' 40 | prdVer = '03' 41 | varName = 'S1/surfacePrecipitation' 42 | 43 | 44 | #BBox = [[20,0],[48,180]] # Radar-AMeDAS domain 45 | BBox = [[20,118],[48,150]] # Radar-AMeDAS domain 46 | res = 0.1 47 | delT = timedelta(seconds=60*60) 48 | 49 | # sDTime = datetime( 2014,4,30 ) 50 | # eDTime = datetime( 2014,5,3 ) 51 | sDTime = datetime( 2014,4,3,22 ) 52 | sDTime = datetime( 2014,4,4,0 ) 53 | eDTime = datetime( 2014,4,5,0 ) 54 | 55 | print (sDTime, eDTime) 56 | 57 | 58 | gpm = GPM(prjName, prdLv, prdVer) 59 | 60 | JP = gpm(varName, 61 | sDTime, 62 | eDTime, 63 | BBox, 64 | #[[30,125],[45,145]], 65 | 0.2, 66 | ) 67 | ''' 68 | 0.1, 69 | timedelta(seconds=3600*24)) 70 | ''' 71 | 72 | for d in JP.griddata: 73 | print (d.shape, d.max(), d.min(), np.ma.masked_less_equal(d,0).sum()) 74 | #sys.exit() 75 | 76 | H = np.arange(25) 77 | for h0,h1 in zip(H[:-1], H[1:]): 78 | offset = 0 if h1 !=24 else 1 79 | 80 | sdtime = datetime(2014,4,4,h0) 81 | edtime = datetime(2014,4,4 + offset,h1%24) 82 | 83 | gpmJP = gpm(varName, sdtime, edtime, BBox, 0.2 ) 84 | 85 | if hasattr( gpmJP, 'griddata'): 86 | for d in gpmJP.griddata: 87 | print (d.shape, d.max(), d.min(), np.ma.masked_less_equal(d,0).sum()) 88 | 89 | sys.exit() 90 | 91 | 92 | A = np.ma.masked_less_equal( np.array( JP.griddata ), 0 ) 93 | 94 | M = Basemap( resolution='c' ,llcrnrlat=BBox[0][0], llcrnrlon=BBox[0][1], urcrnrlat=BBox[1][0], urcrnrlon=BBox[1][1]) 95 | 96 | pl.figure();M.imshow( A.mean(0) );pl.colorbar() 97 | #figure();M.scatter(JP.lon.flatten(), JP.lat.flatten(), 10, JP.data.flatten(), edgecolor='none',vmin=0,vmax=10);colorbar() 98 | M.drawcoastlines() 99 | pl.show() 100 | 101 | sys.exit() 102 | gpmJP = gpm('NS/SLV/precipRateESurface', sDTime, eDTime, BBox, res, delT ) 103 | 104 | A = array( gpmJP.griddata ) 105 | 106 | ''' 107 | # for no delT 108 | Lon = gpmJP.lon 109 | Lat = gpmJP.lat 110 | Data = gpmJP.data 111 | 112 | M.scatter(Lon.flatten(), Lat.flatten(), 10, Data.flatten(), edgecolor='none',vmin=0,vmax=10);colorbar() 113 | M.drawcoastlines() 114 | ''' 115 | 116 | # for with delT 117 | Lon = gpmJP.lon 118 | Lat = gpmJP.lat 119 | Data = gpmJP.data 120 | 121 | for lat, lon, data in zip(Lat, Lon, Data): 122 | figure() 123 | M.scatter(lon.flatten(), lat.flatten(), 10, data.flatten(), edgecolor='none',vmin=0,vmax=10);colorbar() 124 | M.drawcoastlines() 125 | 126 | show() 127 | 128 | print (A.shape, A.max(), A.min()) 129 | for a in A: print (a.shape, a.max(), a.min()) 130 | print (gpmJP.data.min(), gpmJP.data.max()) 131 | 132 | 133 | for a in gpmJP.griddata: 134 | print (a.shape) 135 | figure() 136 | M.imshow( ma.masked_equal(array(a), -9999.9),vmin=0,vmax=10);colorbar() 137 | M.drawcoastlines() 138 | 139 | 140 | figure() 141 | M.imshow( ma.masked_equal(A, -9999.9).sum(0),vmin=0,vmax=10);colorbar() 142 | M.drawcoastlines() 143 | 144 | 145 | show() 146 | 147 | ''' 148 | Path = gpm.get_path(sDTime, eDTime) 149 | 150 | for i in range(5): 151 | 152 | s=time.time() 153 | DTime, Lat, Lon = gpm.get_gtrack(Path[i]) 154 | 155 | 156 | print time.time()-s 157 | ''' 158 | 159 | 160 | 161 | return 162 | 163 | 164 | if __name__=='__main__': 165 | usage = 'usage: %prog [options] arg' 166 | version = '%prog 1.0' 167 | 168 | parser = OptionParser(usage=usage,version=version) 169 | 170 | # parser.add_option('-r','--rescan',action='store_true',dest='rescan', 171 | # help='rescan all directory to find missing file') 172 | 173 | (options,args) = parser.parse_args() 174 | 175 | # if len(args) == 0: 176 | # parser.print_help() 177 | # else: 178 | # main(args,options) 179 | 180 | # LOG = LOGGER() 181 | main(args,options) 182 | 183 | 184 | -------------------------------------------------------------------------------- /alien/LOGGER.py: -------------------------------------------------------------------------------- 1 | import os,sys,time 2 | from datetime import datetime 3 | from .TextArt import TextArt 4 | from .getName import getFuncName, getCallerName 5 | from .AnsiFormatter import AnsiFormatter 6 | 7 | 8 | def ETA(func): 9 | ta = TextArt() 10 | 11 | def inner(*args, **kwargs): 12 | clr = ta.g # Ansi Color 'g':green 13 | 14 | # funcName = func.__name__ if func != None else None 15 | HEADER = (ta.Ln, 16 | '| ETA |', 17 | '\t+ func <%s>\tcalled by <%s>\n'%( 18 | # funcName, 19 | func.__name__, 20 | getCallerName() 21 | ), 22 | ta.ln) 23 | 24 | print(clr+''.join(HEADER)) 25 | 26 | sTime = datetime.now() 27 | retval = func(*args,**kwargs) 28 | eTime = datetime.now() 29 | 30 | FOOTER = (ta.ln, 31 | '\t - args [%s]\n'%str(args), 32 | '\t - kwargs [%s]\n'%str(kwargs), 33 | '\t - return %s %s\n'%( 34 | type(retval), 35 | 'in LENGTH of %i'%len(retval) if hasattr(retval,'__iter__') else '' 36 | ), 37 | '\t - lapse %-53s | ETA |\n'%(eTime-sTime), 38 | ta.Ln) 39 | 40 | print(clr+'%s'*len(FOOTER)%FOOTER) 41 | 42 | return retval 43 | 44 | return inner 45 | 46 | 47 | class LOGGER(object): 48 | ''' 49 | LOG = LOGGER() 50 | LOG.on 51 | LOG.off 52 | LOG.color (['black','red','green',...]) 53 | ''' 54 | 55 | def __init__(self, logPath=None, mode=None): 56 | ta = TextArt() 57 | 58 | self.sDTime = datetime.now() 59 | 60 | if logPath == None: 61 | logFName = 'cmdline' if sys.argv[0] == '' else '_'.join(sys.argv) 62 | logFExt = 'log@'+self.sDTime.strftime('%Y%m%d') 63 | 64 | logFName = '%s.%s'%(logFName,logFExt) 65 | 66 | logPath = os.path.join(settings.LOG_DIR,logFName) 67 | 68 | 69 | logDir = os.path.dirname( logPath ) 70 | #logDir = '/'.join(logPath.split('/')[:-1]) 71 | 72 | if not os.path.exists(logDir) and logDir != '': 73 | os.makedirs(logDir) 74 | 75 | if mode == None: mode='a' 76 | 77 | HEADER = ('\n', 78 | ta.LN, 79 | '* logfile path\t%s\t\n'%(logPath,mode), 80 | '* by %s\t@%s:%s\n'%( 81 | os.environ['USER'], 82 | os.uname()[1], # patch by HJKIM@20130917 83 | # os.environ['HOSTNAME'], 84 | os.environ['PWD'], 85 | ), 86 | ta.Ln, 87 | '* executed FILE <%s>\t'%sys.argv[0], 88 | 'with ARGS %s\n'%str(sys.argv[1:]), 89 | '* started DTIME <%s>\n'%self.sDTime, 90 | ta.LN) 91 | 92 | logHeader = '%s'*len(HEADER)%HEADER 93 | 94 | self.status = 'off' 95 | self.mode = mode 96 | self.logPath = logPath 97 | 98 | self.clr('y') 99 | self.write(logHeader) 100 | self.clr('reset') 101 | 102 | 103 | def __del__(self): 104 | ta = TextArt() 105 | eDTime = datetime.now() 106 | FOOTER = (ta.LN, 107 | '* ended DTIME <%s>\tLapse: %s\n'%( 108 | eDTime, 109 | eDTime-self.sDTime 110 | ), 111 | ta.LN) 112 | 113 | logFooter = '%s'*len(FOOTER)%FOOTER 114 | 115 | self.clr('y') 116 | self.write(logFooter) 117 | self.clr('reset') 118 | 119 | self.off 120 | 121 | 122 | def write(self, data, mode=None): 123 | self.file.write(data) 124 | self.stdout.write(self.color+data) 125 | 126 | 127 | @property 128 | def on(self): 129 | if self.status == 'off': 130 | # duplicate stdout -> stdout & file 131 | self.file = open(self.logPath, self.mode) 132 | self.stdout = sys.stdout 133 | sys.stdout = self 134 | 135 | self.status= 'on' 136 | 137 | 138 | @property 139 | def off(self): 140 | if self.status == 'on': 141 | sys.stdout = self.stdout 142 | self.file.close() 143 | 144 | self.status= 'off' 145 | 146 | 147 | def clr(self,color): 148 | if self.status == 'off': 149 | self.on 150 | 151 | self.color = AnsiFormatter(color) 152 | 153 | 154 | ''' 155 | def printvar(var): 156 | for k,v in locals().items(): 157 | if v==var: break 158 | 159 | print '\t@var: %s15'%k,var 160 | ''' 161 | 162 | 163 | if __name__=='__main__': 164 | 165 | @ETA 166 | def test(text): 167 | print('print in "test" func: %s'%(text*3)) 168 | return text,text 169 | 170 | def main(*args): 171 | print('print in "main" func: %s'%str(args)) 172 | print(test('gogogo ')) 173 | 174 | 175 | LOG = LOGGER() 176 | 177 | LOG.clr('r') 178 | print('test_red') 179 | LOG.clr('b') 180 | print('test_blue') 181 | print('LOG.off') 182 | LOG.off 183 | print('test.off') 184 | LOG.on 185 | print('LOG.on') 186 | 187 | LOG.clr('g') 188 | print('test_green') 189 | LOG.clr('reset') 190 | print('test @ETA') 191 | main(*sys.argv) 192 | 193 | 194 | -------------------------------------------------------------------------------- /gpm.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | #-------------------------------------------------------------------- 3 | # PROGRAM : gpm.py 4 | # CREATED BY : hjkim @IIS.2015-01-14 11:52:17.992599 5 | # MODIFED BY : 6 | # 7 | # USAGE : $ ./gpm.py 8 | # 9 | # DESCRIPTION: 10 | #------------------------------------------------------cf0.2@20120401 11 | 12 | 13 | import os, sys, importlib 14 | import time 15 | import pickle 16 | from optparse import OptionParser 17 | from configparser import SafeConfigParser 18 | 19 | from numpy import empty 20 | 21 | from alien.dtrange import dtrange 22 | 23 | from alien.GridCoordinates import GridCoordinates 24 | 25 | #from alien.read_hdf4 import read_hdf4 26 | #from alien.read_hdf5 import read_hdf5 27 | 28 | from alien.TimeSeries import bin_bytbound 29 | 30 | from gpm_data import GPM_data 31 | from search_granules import SearchGranules 32 | from granule2map import granule2map 33 | 34 | 35 | 36 | class GPM( SearchGranules ): 37 | def __init__(self, prjName, prdLv, prdVer, **kwargs): 38 | ''' 39 | prjName : e.g.) 'GPM.KuPR' 40 | prdLv : e.g.) 'L2' 41 | prdVer : e.g.) '02' 42 | ''' 43 | modroot = os.path.dirname(__file__) 44 | 45 | self.cfg = SafeConfigParser( os.environ ) 46 | self.cfg.read( os.path.join(modroot, 'config') ) 47 | 48 | self.cfg._sections['Defaults'].update( kwargs ) 49 | 50 | if self.cfg.get( 'Defaults','dataroot') == '': 51 | self.cfg.set('Defaults','dataroot', os.environ['PWD']) 52 | 53 | self.dataDir = self.cfg.get('Defaults','dataroot') 54 | 55 | self.prjName = prjName 56 | self.prdLv = prdLv 57 | self.prdVer = prdVer 58 | 59 | self.prdDir = os.path.join( self.dataDir, 60 | self.prjName, 61 | self.prdLv, 62 | self.prdVer) 63 | 64 | self.cached = self.cfg.get('Defaults', 'cached') 65 | self.cacheDir = self.cfg.get('Defaults', 'cache_dir') 66 | 67 | fnPath = {'TRMM': self.cfg.get('Defaults','hdf4_module'), 68 | 'GPM' : self.cfg.get('Defaults','hdf5_module')}[prjName.split('.')[0]] 69 | 70 | fnName = fnPath.split('.')[-1] 71 | modPath = '.'.join( fnPath.split('.')[:-1] ) 72 | 73 | self.func_read = getattr( importlib.import_module( modPath ), fnName ) 74 | 75 | ''' 76 | self.cacheDir = os.path.join( self.dataDir, 77 | 'cache.dim', 78 | self.prjName, 79 | self.prdLv, 80 | self.prdVer) 81 | 82 | self.prdDir = '%s/%s/%s/%s'%(self.dataDir, 83 | self.prjName, 84 | self.prdLv, 85 | self.prdVer) 86 | 87 | self.cacheDir = '%s/cache.dim/%s/%s/%s'%(self.dataDir, 88 | self.prjName, 89 | self.prdLv, 90 | self.prdVer) 91 | 92 | self.func_read = {'TRMM': read_hdf4, 93 | 'GPM' : read_hdf5}[ prjName.split('.')[0] ] 94 | ''' 95 | 96 | ''' 97 | dictGrp = {'GPM.GMI':'S1', 98 | 'GPM.DPR':'NS', # HS, MS, NS 99 | 'GPM.KaPR':'MS', # HS, MS 100 | 'GPM.KuPR':'NS',} 101 | 102 | grpCode = dictGrp[ self.prjName ] 103 | ''' 104 | 105 | 106 | 107 | def __call__(self, varName, sDTime, eDTime, BBox=None, res=None, delT=None): 108 | ''' 109 | res : spa. res. of 2d-array 110 | sDTime : DTime bound left 111 | eDTime : DTime bound right 112 | ''' 113 | 114 | mapCode = '^' + ''.join( str(res).split('.') ) 115 | 116 | 117 | gpmData = GPM_data() 118 | 119 | srcDir = os.path.join( self.dataDir, self.prdDir ) 120 | 121 | assert os.path.exists( srcDir ), '{} is not exists.'.format( srcDir ) 122 | Granule = self.search_granules( srcDir, sDTime, eDTime, BBox ) 123 | 124 | if len(Granule) == 0: 125 | print ('! Warning ! no data extracted') 126 | return None 127 | 128 | outSize = sum( [ len(gra[2]) for gra in Granule ] ), Granule[0][2].shape[1] 129 | Lat = empty( outSize, 'float32') 130 | Lon = empty( outSize, 'float32') 131 | aOut = empty( outSize, 'float32' ) 132 | DTime = [] 133 | 134 | 135 | prvI = 0 136 | for granule in Granule: 137 | 138 | srcPath, dtime, lat, lon, idx = granule 139 | 140 | gpmData.srcPath.append(srcPath) 141 | gpmData.recLen.append( len(dtime) ) # number of data record for each file 142 | 143 | nxtI = prvI + len(dtime) 144 | 145 | aOut[prvI:nxtI] = self.func_read( srcPath, varName, idx.tolist() ) 146 | Lat[prvI:nxtI] = lat 147 | Lon[prvI:nxtI] = lon 148 | DTime.extend(dtime) 149 | 150 | 151 | if res != None and delT == None: 152 | gpmData.griddata.append( granule2map( lat, lon, aOut[prvI:nxtI], BBox, res ) ) 153 | gpmData.grid = GridCoordinates(mapCode, BBox=BBox) 154 | 155 | prvI = nxtI 156 | 157 | 158 | if delT != None: 159 | dtBnd = dtrange(sDTime, eDTime, delT) 160 | 161 | gpmData.tbound = map( None, dtBnd[:-1], dtBnd[1:] ) 162 | gpmData.dtime = bin_bytbound( DTime, dtBnd, DTime ) 163 | gpmData.lat = bin_bytbound( DTime, dtBnd, Lat ) 164 | gpmData.lon = bin_bytbound( DTime, dtBnd, Lon ) 165 | gpmData.data = bin_bytbound( DTime, dtBnd, aOut ) 166 | 167 | 168 | if res != None: 169 | gpmData.griddata = [ granule2map(lat, lon, a, BBox, res) 170 | for lat, lon, a in map(None, gpmData.lat, gpmData.lon, gpmData.data) ] 171 | gpmData.grid = GridCoordinates(mapCode, BBox=BBox) 172 | 173 | else: 174 | gpmData.dtime = DTime 175 | gpmData.lat = Lat 176 | gpmData.lon = Lon 177 | gpmData.data = aOut 178 | 179 | 180 | return gpmData 181 | 182 | 183 | -------------------------------------------------------------------------------- /alien/GridCoordinates.py: -------------------------------------------------------------------------------- 1 | import os,sys 2 | from numpy import arange, linspace, meshgrid, zeros, fromfile, concatenate 3 | from numpy import loadtxt, argmax 4 | 5 | from .unique_counts import unique_counts 6 | 7 | def conv180to360(lon): 8 | ''' convert between -180~180 and 0~360 ''' 9 | if hasattr(lon,'__iter__'): return where(lon >= 0,array(lon), 360.+array(lon)) 10 | else: return lon if lon >=0 else 360+lon 11 | 12 | def conv360to180(lon): 13 | ''' convert between 0~360 and -180~180 ''' 14 | if hasattr(lon,'__iter__'): return where(lon >= 180,array(lon)-360., array(lon)) 15 | else: return lon-360. if lon >=180 else lon 16 | 17 | def nearest_idx(aSrc,val): 18 | ''' return nearest index ''' 19 | if hasattr(val,'__iter__'): return [abs(aSrc-v).argmin() for v in val] 20 | else: return abs(aSrc-val).argmin() 21 | 22 | def detect_map_direction(aSrc, yAxis=-2, xAxis=-1): 23 | ''' 24 | aSrc : 2d-array 25 | 26 | # only support global yet. 27 | ''' 28 | 29 | resY = 180./aSrc.shape[yAxis] 30 | resX = 360./aSrc.shape[xAxis] 31 | 32 | YsampleIdx = int(resY*30) 33 | 34 | unique_cnt_Y0 = unique_counts(aSrc[ YsampleIdx])[0] 35 | unique_cnt_Y1 = unique_counts(aSrc[-YsampleIdx])[0] 36 | 37 | print(most_frq_val_Y1, most_frq_val_Y0) 38 | 39 | return 40 | 41 | 42 | class GridCoordinates(object): 43 | def __init__(self, mapCode, hres=None, vres=None, BBox=None): 44 | ''' 45 | mapCode = presets ['trip05','cru','u','v','^','n',...] 46 | #BBox=[[-90,90],[0,360]], res=1.0): 47 | ''' 48 | self.setup_grid(mapCode, hres, vres, BBox) 49 | 50 | def setup_grid(self, mapCode, hres=None, vres=None, BBox=None): 51 | for i,s in enumerate(mapCode): 52 | if s.isdigit(): break # find location of res. 53 | 54 | mapType, res = mapCode[:i],mapCode[i:] 55 | 56 | if i == len(mapCode)-1: # when res. not given 57 | mapType = mapCode # 1.0 degree assumed 58 | res = '1' 59 | 60 | res = float( res[0] + '.' + res[1:] ) # conv. res. to float 61 | hres = res 62 | vres = res 63 | 64 | if BBox == None: 65 | left, right = [-180.0, 180.0] if mapType in ['v','^'] else [0.0, 360.0] 66 | bottom, top = [90.0, -90.0] if mapType in ['v','n'] else [-90.0,90.0] 67 | 68 | BBox = [[left,right], [top,bottom]] 69 | 70 | else: 71 | bottom, left = BBox[0] 72 | top, right = BBox[1] 73 | 74 | hoff = hres/2. 75 | width = right-left 76 | nJ = width/hres 77 | 78 | voff = vres/2. if bottom < top else -vres/2. 79 | height = top-bottom if bottom < top else bottom-top 80 | nI = height/vres 81 | 82 | 83 | lon = linspace(left+hoff,right-hoff, nJ) 84 | lat = linspace(bottom+voff, top-voff, nI) 85 | 86 | self.mapType = mapType 87 | 88 | self.res = res 89 | self.vres = vres 90 | self.hres = hres 91 | 92 | self.BBox = BBox 93 | 94 | self.lat = lat 95 | self.lon = lon 96 | 97 | self.nI = nI 98 | self.nJ = nJ 99 | 100 | self.Lon, self.Lat = meshgrid(lon,lat) 101 | 102 | self.conv180to360 = conv180to360 103 | self.conv360to180 = conv360to180 104 | 105 | 106 | def get_idx(self, Y, X ,nearest=False, shift_lon=False): 107 | ''' 108 | X : Longitude(s) /* float or iterable */ 109 | Y : Latitude(s) /* float or iterable */ 110 | ''' 111 | 112 | if shift_lon == True: 113 | fnConv = self.conv360to180 if self.mapType in ['v','^'] else self.conv180to360 114 | X = fnConv(X) 115 | 116 | if nearest == True: 117 | j = nearest_idx(self.lon,X) 118 | i = nearest_idx(self.lat,Y) 119 | 120 | else: 121 | lon = self.lon.tolist() 122 | lat = self.lat.tolist() 123 | 124 | j = [lon.index(x) for x in X] if hasattr(X,'__iter__') else lon.index(X) 125 | i = [lon.index(y) for y in Y] if hasattr(Y,'__iter__') else lat.index(Y) 126 | 127 | return i, j 128 | 129 | 130 | def get_crd(self, I, J): 131 | return self.lat[J], self.lon[I] 132 | 133 | 134 | def get_domain_idx(self, BBox, mode='nearest', shift_lon=False): 135 | ''' 136 | BBox : [ [south, west], [north, east] ] 137 | mode : [ 'nearest', 'exact', 'inner' ,'outter'] 138 | * both 'inner' and 'outer' include bounds * 139 | ''' 140 | 141 | [south, west], [north, east] = BBox 142 | 143 | nearest = False if mode == 'exact' else True 144 | 145 | llcr_idx = self.get_idx( south, west, nearest=nearest, shift_lon=shift_lon ) 146 | urcr_idx = self.get_idx( north, east, nearest=nearest, shift_lon=shift_lon ) 147 | 148 | sn_idx = [llcr_idx[0], urcr_idx[0]] 149 | we_idx = [llcr_idx[1], urcr_idx[1]] 150 | 151 | if self.mapType in ['n', 'v']: sn_idx = sn_idx[::-1] 152 | 153 | ####!!!! add treatment for 'inner' and 'outter' !!!!#### 154 | return [ [ sn_idx[0], we_idx[0] ], [ sn_idx[1], we_idx[1]] ] 155 | 156 | 157 | def get_domain_data(self, aSrc, BBox, mode='nearest', shift_lon=False): 158 | 159 | bbox_idx = self.get_domain_idx( BBox, mode=mode, shift_lon=shift_lon ) 160 | print('bbox_idx', bbox_idx) 161 | 162 | return aSrc[..., 163 | bbox_idx[0][0]:bbox_idx[1][0], 164 | bbox_idx[0][1]:bbox_idx[1][1]] 165 | 166 | 167 | def cut_domain(self, BBox, mode='nearest', shift_lon=False): 168 | return GridCoordinates( BBox ) 169 | 170 | 171 | def __repr__(self): 172 | 173 | sOut = '\n'.join( [self.mapType, 174 | # self.res, self.vres, 175 | # self.hres, 176 | # self.BBox, 177 | # self.lat, self.lon, 178 | # self.nI, 179 | # self.nJ, 180 | # self.Lon.shape, self.Lat.shape 181 | ] ) 182 | 183 | return sOut 184 | 185 | 186 | 187 | def main(*args): 188 | 189 | grid = GridCoordinates('u05') 190 | print('+'*80) 191 | grid = GridCoordinates('v') 192 | 193 | vasc = VASClimO('10') 194 | vasc(1951,2000) 195 | 196 | figure();imshow(vasc.data.mean(0));colorbar() 197 | print(vasc.get_idx( 38.5, -0.5 )) 198 | print(vasc.get_idx( 38.5, 359.5, shift_lon=True )) 199 | 200 | BBox = [[66.5,85.5],[70.5,170.5]] 201 | BBox = [[-10.5,10.5],[-60.5,-30.5]] 202 | aSrc = vasc.get_domain_data(vasc.data, BBox, shift_lon=True) 203 | 204 | print(vasc.data.shape) 205 | print(vasc.yr.shape) 206 | print(aSrc.shape) 207 | 208 | print(vasc.data.max()) 209 | 210 | figure();plot( ma.masked_equal( aSrc,-999).mean(-1).mean(-1) ) 211 | 212 | show() 213 | 214 | 215 | if __name__=='__main__': 216 | main(sys.argv) 217 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![DOI](https://zenodo.org/badge/19166/kimlab/GPyM.svg)](https://zenodo.org/badge/latestdoi/19166/kimlab/GPyM) 2 | [![license](https://img.shields.io/github/license/mashape/apistatus.svg?maxAge=2592000)](LICENSE.md) 3 | 4 | 5 | # GPyM 6 | GPyM [dʒi:pai:ɜm / gee pai em] is a GPM (Global Precipitation Mission) Python Module to handle (e.g., search, extract, mapping, and so on) GPM/DPR/GMI and TRMM/PR/TMI. It is a working beta currently (2016-06-01). First official release will be by July 2016. 7 | 8 | ### Features 9 | * Search and extract granules by timespan and spatial domain 10 | * Convert and upscale granules to a gridded format 11 | * Cached IO to speed up for repeated tasks 12 | 13 | ### Installation 14 | 15 | #### Dependency 16 | * numpy: python array manipulation module 17 | * h5py: python HDF5 IO module (for GPM/DPR/GMI) 18 | * pyhdf : python HDF4 IO module (for TRMM/PR/TMI) 19 | 20 | ### Download and Install Package 21 | ```bash 22 | $ git clone git@github.com:kimlab/GPyM.git 23 | 24 | $ cd GPyM 25 | 26 | $ vi config # to config environment including GPM data directory 27 | 28 | $ python setup.py install 29 | ``` 30 | ### Syntax 31 | ![png](./doc/syntax.rev00.png) 32 | 33 | ### Example 34 | ```python 35 | from pylab import * 36 | %matplotlib inline 37 | rcParams['figure.figsize'] = (10, 6) 38 | 39 | from mpl_toolkits.basemap import Basemap 40 | 41 | from datetime import datetime 42 | 43 | from GPyM import GPM 44 | ``` 45 | 46 | 47 | ```python 48 | prdLv = 'L2' 49 | prdVer = '03' 50 | #prjName = 'GPM.KuPR' 51 | #varName = 'NS/SLV/precipRateESurface' 52 | prdName = 'GPM.GMI' 53 | varName = 'S1/surfacePrecipitation' 54 | ``` 55 | 56 | 57 | ```python 58 | gpm = GPM( prdName, prdLv, prdVer ) 59 | ``` 60 | 61 | 62 | ```python 63 | sdtime = datetime(2014,4,30) 64 | edtime = datetime(2014,5,3) 65 | domain = [[20,118],[48,150]] 66 | res = 0.2 67 | ``` 68 | 69 | 70 | ```python 71 | jp = gpm( varName, sdtime, edtime, domain, res) 72 | ``` 73 | 74 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300048_0220_000955_L2S_GL2_03C.h5 75 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300220_0353_000956_L2S_GL2_03C.h5 76 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300353_0525_000957_L2S_GL2_03C.h5 77 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300525_0658_000958_L2S_GL2_03C.h5 78 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300658_0830_000959_L2S_GL2_03C.h5 79 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300830_1003_000960_L2S_GL2_03C.h5 80 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301003_1136_000961_L2S_GL2_03C.h5 81 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301136_1308_000962_L2S_GL2_03C.h5 82 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301308_1441_000963_L2S_GL2_03C.h5 83 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301441_1613_000964_L2S_GL2_03C.h5 84 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301613_1746_000965_L2S_GL2_03C.h5 85 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301746_1918_000966_L2S_GL2_03C.h5 86 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301918_2051_000967_L2S_GL2_03C.h5 87 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404302051_2223_000968_L2S_GL2_03C.h5 88 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404302223_2356_000969_L2S_GL2_03C.h5 89 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404302356_0129_000970_L2S_GL2_03C.h5 90 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405010129_0301_000971_L2S_GL2_03C.h5 91 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405010301_0434_000972_L2S_GL2_03C.h5 92 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405010434_0606_000973_L2S_GL2_03C.h5 93 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405010606_0739_000974_L2S_GL2_03C.h5 94 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405010739_0911_000975_L2S_GL2_03C.h5 95 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405010911_1044_000976_L2S_GL2_03C.h5 96 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011044_1217_000977_L2S_GL2_03C.h5 97 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011217_1349_000978_L2S_GL2_03C.h5 98 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011349_1522_000979_L2S_GL2_03C.h5 99 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011522_1654_000980_L2S_GL2_03C.h5 100 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011654_1827_000981_L2S_GL2_03C.h5 101 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011827_1959_000982_L2S_GL2_03C.h5 102 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011959_2132_000983_L2S_GL2_03C.h5 103 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405012132_2305_000984_L2S_GL2_03C.h5 104 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405012305_0037_000985_L2S_GL2_03C.h5 105 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020037_0210_000986_L2S_GL2_03C.h5 106 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020210_0342_000987_L2S_GL2_03C.h5 107 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020342_0515_000988_L2S_GL2_03C.h5 108 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020515_0647_000989_L2S_GL2_03C.h5 109 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020647_0820_000990_L2S_GL2_03C.h5 110 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020820_0953_000991_L2S_GL2_03C.h5 111 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020953_1125_000992_L2S_GL2_03C.h5 112 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021125_1258_000993_L2S_GL2_03C.h5 113 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021258_1430_000994_L2S_GL2_03C.h5 114 | * [V] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021430_1603_000995_L2S_GL2_03C.h5 115 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021603_1735_000996_L2S_GL2_03C.h5 116 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021735_1908_000997_L2S_GL2_03C.h5 117 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021908_2041_000998_L2S_GL2_03C.h5 118 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405022041_2213_000999_L2S_GL2_03C.h5 119 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405022213_2346_001000_L2S_GL2_03C.h5 120 | * [_] ground track dimension (cached): /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405022346_0118_001001_L2S_GL2_03C.h5 121 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404300048_0220_000955_L2S_GL2_03C.h5 (2961, 221) -> (175, 221) 122 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (175, 221) -> (140, 160) 123 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301441_1613_000964_L2S_GL2_03C.h5 (2962, 221) -> (175, 221) 124 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (175, 221) -> (140, 160) 125 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404301613_1746_000965_L2S_GL2_03C.h5 (2962, 221) -> (27, 221) 126 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (27, 221) -> (140, 160) 127 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/04/GPMCOR_GMI_1404302356_0129_000970_L2S_GL2_03C.h5 (2962, 221) -> (214, 221) 128 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (214, 221) -> (140, 160) 129 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011349_1522_000979_L2S_GL2_03C.h5 (2962, 221) -> (56, 221) 130 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (56, 221) -> (140, 160) 131 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405011522_1654_000980_L2S_GL2_03C.h5 (2962, 221) -> (161, 221) 132 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (161, 221) -> (140, 160) 133 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405012305_0037_000985_L2S_GL2_03C.h5 (2962, 221) -> (112, 221) 134 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (112, 221) -> (140, 160) 135 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405020037_0210_000986_L2S_GL2_03C.h5 (2962, 221) -> (96, 221) 136 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (96, 221) -> (140, 160) 137 | [READ_HDF5] /tank/hjkim/GPM//GPM.GMI/L2/03/2014/05/GPMCOR_GMI_1405021430_1603_000995_L2S_GL2_03C.h5 (2962, 221) -> (215, 221) 138 | [GRANULE2MAP] Domain:[[20, 118], [48, 150]] (215, 221) -> (140, 160) 139 | 140 | 141 | 142 | ```python 143 | print '{} -- {}'.format( jp.dtime[0], jp.dtime[-1] ), jp.data.shape 144 | ``` 145 | 146 | 2014-04-30 01:45:14 -- 2014-05-02 15:06:38 (1231, 221) 147 | 148 | 149 | 150 | ```python 151 | print len(jp.griddata), jp.griddata[0].shape 152 | ``` 153 | 154 | 9 (140, 160) 155 | 156 | 157 | 158 | ```python 159 | M = Basemap( resolution='c' ,llcrnrlat=domain[0][0], llcrnrlon=domain[0][1], urcrnrlat=domain[1][0], urcrnrlon=domain[1][1]) 160 | M.drawcoastlines(color='r', linewidth=2) 161 | 162 | M.imshow( ma.masked_less_equal( jp.griddata, 0).sum(0), interpolation='nearest' ) 163 | colorbar() 164 | ``` 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | ![png](./doc/output_7_1.png) 175 | 176 | ### History 177 | #### 0.60b 178 | * [NEW] cache option @ config 179 | * [NEW] cache_directory option @ config 180 | * [NEW] compress (dimension only) option @ config 181 | * [UPGRADE] enhanced console-log 182 | * [UPGRADE] remove cache routine from get_location* and get_dtime* 183 | --------------------------------------------------------------------------------