r(i[s])&&(e.offsets.popper[d]=r(i[s])),e}},arrow:{order:500,enabled:!0,fn:function(e,o){if(!F(e.instance.modifiers,'arrow','keepTogether'))return e;var i=o.element;if('string'==typeof i){if(i=e.instance.popper.querySelector(i),!i)return e;}else if(!e.instance.popper.contains(i))return console.warn('WARNING: `arrow.element` must be child of its popper element!'),e;var n=e.placement.split('-')[0],r=e.offsets,p=r.popper,s=r.reference,d=-1!==['left','right'].indexOf(n),a=d?'height':'width',l=d?'Top':'Left',f=l.toLowerCase(),m=d?'left':'top',c=d?'bottom':'right',g=O(i)[a];s[c]-gp[c]&&(e.offsets.popper[f]+=s[f]+g-p[c]);var u=s[f]+s[a]/2-g/2,b=t(e.instance.popper,'margin'+l).replace('px',''),y=u-h(e.offsets.popper)[f]-b;return y=X(V(p[a]-g,y),0),e.arrowElement=i,e.offsets.arrow={},e.offsets.arrow[f]=Math.round(y),e.offsets.arrow[m]='',e},element:'[x-arrow]'},flip:{order:600,enabled:!0,fn:function(e,t){if(W(e.instance.modifiers,'inner'))return e;if(e.flipped&&e.placement===e.originalPlacement)return e;var o=w(e.instance.popper,e.instance.reference,t.padding,t.boundariesElement),i=e.placement.split('-')[0],n=L(i),r=e.placement.split('-')[1]||'',p=[];switch(t.behavior){case fe.FLIP:p=[i,n];break;case fe.CLOCKWISE:p=K(i);break;case fe.COUNTERCLOCKWISE:p=K(i,!0);break;default:p=t.behavior;}return p.forEach(function(s,d){if(i!==s||p.length===d+1)return e;i=e.placement.split('-')[0],n=L(i);var a=e.offsets.popper,l=e.offsets.reference,f=_,m='left'===i&&f(a.right)>f(l.left)||'right'===i&&f(a.left)f(l.top)||'bottom'===i&&f(a.top)f(o.right),g=f(a.top)f(o.bottom),b='left'===i&&c||'right'===i&&h||'top'===i&&g||'bottom'===i&&u,y=-1!==['top','bottom'].indexOf(i),w=!!t.flipVariations&&(y&&'start'===r&&c||y&&'end'===r&&h||!y&&'start'===r&&g||!y&&'end'===r&&u);(m||b||w)&&(e.flipped=!0,(m||b)&&(i=p[d+1]),w&&(r=j(r)),e.placement=i+(r?'-'+r:''),e.offsets.popper=de({},e.offsets.popper,S(e.instance.popper,e.offsets.reference,e.placement)),e=N(e.instance.modifiers,e,'flip'))}),e},behavior:'flip',padding:5,boundariesElement:'viewport'},inner:{order:700,enabled:!1,fn:function(e){var t=e.placement,o=t.split('-')[0],i=e.offsets,n=i.popper,r=i.reference,p=-1!==['left','right'].indexOf(o),s=-1===['top','left'].indexOf(o);return n[p?'left':'top']=r[o]-(s?n[p?'width':'height']:0),e.placement=L(t),e.offsets.popper=h(n),e}},hide:{order:800,enabled:!0,fn:function(e){if(!F(e.instance.modifiers,'hide','preventOverflow'))return e;var t=e.offsets.reference,o=T(e.instance.modifiers,function(e){return'preventOverflow'===e.name}).boundaries;if(t.bottomo.right||t.top>o.bottom||t.right | BSD-3-Clause */
2 | !function(){"use strict";function e(e){return r(n(e),arguments)}function t(t,r){return e.apply(null,[t].concat(r||[]))}function r(t,r){var n,i,a,o,p,c,u,f,l,d=1,g=t.length,b="";for(i=0;i=0),o[8]){case"b":n=parseInt(n,10).toString(2);break;case"c":n=String.fromCharCode(parseInt(n,10));break;case"d":case"i":n=parseInt(n,10);break;case"j":n=JSON.stringify(n,null,o[6]?parseInt(o[6]):0);break;case"e":n=o[7]?parseFloat(n).toExponential(o[7]):parseFloat(n).toExponential();break;case"f":n=o[7]?parseFloat(n).toFixed(o[7]):parseFloat(n);break;case"g":n=o[7]?String(Number(n.toPrecision(o[7]))):parseFloat(n);break;case"o":n=(parseInt(n,10)>>>0).toString(8);break;case"s":n=String(n),n=o[7]?n.substring(0,o[7]):n;break;case"t":n=String(!!n),n=o[7]?n.substring(0,o[7]):n;break;case"T":n=Object.prototype.toString.call(n).slice(8,-1).toLowerCase(),n=o[7]?n.substring(0,o[7]):n;break;case"u":n=parseInt(n,10)>>>0;break;case"v":n=n.valueOf(),n=o[7]?n.substring(0,o[7]):n;break;case"x":n=(parseInt(n,10)>>>0).toString(16);break;case"X":n=(parseInt(n,10)>>>0).toString(16).toUpperCase()}s.json.test(o[8])?b+=n:(!s.number.test(o[8])||f&&!o[3]?l="":(l=f?"+":"-",n=n.toString().replace(s.sign,"")),c=o[4]?"0"===o[4]?"0":o[4].charAt(1):" ",u=o[6]-(l+n).length,p=o[6]&&u>0?c.repeat(u):"",b+=o[5]?l+n+p:"0"===c?l+p+n:p+l+n)}return b}function n(e){if(i[e])return i[e];for(var t,r=e,n=[],a=0;r;){if(null!==(t=s.text.exec(r)))n.push(t[0]);else if(null!==(t=s.modulo.exec(r)))n.push("%");else{if(null===(t=s.placeholder.exec(r)))throw new SyntaxError("[sprintf] unexpected placeholder");if(t[2]){a|=1;var o=[],p=t[2],c=[];if(null===(c=s.key.exec(p)))throw new SyntaxError("[sprintf] failed to parse named argument key");for(o.push(c[1]);""!==(p=p.substring(c[0].length));)if(null!==(c=s.key_access.exec(p)))o.push(c[1]);else{if(null===(c=s.index_access.exec(p)))throw new SyntaxError("[sprintf] failed to parse named argument key");o.push(c[1])}t[2]=o}else a|=2;if(3===a)throw new Error("[sprintf] mixing positional and named placeholders is not (yet) supported");n.push(t)}r=r.substring(t[0].length)}return i[e]=n}var s={not_string:/[^s]/,not_bool:/[^t]/,not_type:/[^T]/,not_primitive:/[^v]/,number:/[diefg]/,numeric_arg:/[bcdiefguxX]/,json:/[j]/,not_json:/[^j]/,text:/^[^\x25]+/,modulo:/^\x25{2}/,placeholder:/^\x25(?:([1-9]\d*)\$|\(([^\)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-gijostTuvxX])/,key:/^([a-z_][a-z_\d]*)/i,key_access:/^\.([a-z_][a-z_\d]*)/i,index_access:/^\[(\d+)\]/,sign:/^[\+\-]/},i=Object.create(null);"undefined"!=typeof exports&&(exports.sprintf=e,exports.vsprintf=t),"undefined"!=typeof window&&(window.sprintf=e,window.vsprintf=t,"function"==typeof define&&define.amd&&define(function(){return{sprintf:e,vsprintf:t}}))}();
3 | //# sourceMappingURL=sprintf.min.js.map
4 |
--------------------------------------------------------------------------------
/res/topojson.v1.min.js:
--------------------------------------------------------------------------------
1 | // https://github.com/topojson/topojson-client Version 1.8.0. Copyright 2016 Mike Bostock.
2 | !function(n,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t(n.topojson=n.topojson||{})}(this,function(n){"use strict";function t(n){if(!n)return h;var t,r,e=n.scale[0],o=n.scale[1],i=n.translate[0],u=n.translate[1];return function(n,f){f||(t=r=0),n[0]=(t+=n[0])*e+i,n[1]=(r+=n[1])*o+u}}function r(n){if(!n)return h;var t,r,e=n.scale[0],o=n.scale[1],i=n.translate[0],u=n.translate[1];return function(n,f){f||(t=r=0);var c=Math.round((n[0]-i)/e),a=Math.round((n[1]-u)/o);n[0]=c-t,n[1]=a-r,t=c,r=a}}function e(n,t){for(var r,e=n.length,o=e-t;o<--e;)r=n[o],n[o++]=n[e],n[e]=r}function o(n,t){for(var r=0,e=n.length;r>>1;n[o]1){var c,a=[],s={LineString:o,MultiLineString:i,Polygon:i,MultiPolygon:function(n){n.forEach(i)}};u(t),a.forEach(arguments.length<3?function(n){f.push(n[0].i)}:function(n){r(n[0].g,n[n.length-1].g)&&f.push(n[0].i)})}else for(var l=0,h=n.arcs.length;l1)for(var u,f,c=1,a=e(i[0]);ca&&(f=i[0],i[0]=i[c],i[c]=f,a=u);return i})}}function l(n,t){return n[1][2]-t[1][2]}var h=function(){},p=function(n,t){return"GeometryCollection"===t.type?{type:"FeatureCollection",features:t.geometries.map(function(t){return i(n,t)})}:i(n,t)},v=function(n,t){function r(t){var r,e=n.arcs[t<0?~t:t],o=e[0];return n.transform?(r=[0,0],e.forEach(function(n){r[0]+=n[0],r[1]+=n[1]})):r=e[e.length-1],t<0?[r,o]:[o,r]}function e(n,t){for(var r in n){var e=n[r];delete t[e.start],delete e.start,delete e.end,e.forEach(function(n){o[n<0?~n:n]=1}),f.push(e)}}var o={},i={},u={},f=[],c=-1;return t.forEach(function(r,e){var o,i=n.arcs[r<0?~r:r];i.length<3&&!i[1][0]&&!i[1][1]&&(o=t[++c],t[c]=r,t[e]=o)}),t.forEach(function(n){var t,e,o=r(n),f=o[0],c=o[1];if(t=u[f])if(delete u[t.end],t.push(n),t.end=c,e=i[c]){delete i[e.start];var a=e===t?t:t.concat(e);i[a.start=t.start]=u[a.end=e.end]=a}else i[t.start]=u[t.end]=t;else if(t=i[c])if(delete i[t.start],t.unshift(n),t.start=f,e=u[f]){delete u[e.end];var s=e===t?t:e.concat(t);i[s.start=e.start]=u[s.end=t.end]=s}else i[t.start]=u[t.end]=t;else t=[n],i[t.start=f]=u[t.end=c]=t}),e(u,i),e(i,u),t.forEach(function(n){o[n<0?~n:n]||f.push([n])}),f},g=function(n){return u(n,f.apply(this,arguments))},d=function(n){return u(n,s.apply(this,arguments))},y=function(n){function t(n,t){n.forEach(function(n){n<0&&(n=~n);var r=i[n];r?r.push(t):i[n]=[t]})}function r(n,r){n.forEach(function(n){t(n,r)})}function e(n,t){"GeometryCollection"===n.type?n.geometries.forEach(function(n){e(n,t)}):n.type in f&&f[n.type](n.arcs,t)}var i={},u=n.map(function(){return[]}),f={LineString:t,MultiLineString:r,Polygon:r,MultiPolygon:function(n,t){n.forEach(function(n){r(n,t)})}};n.forEach(e);for(var c in i)for(var a=i[c],s=a.length,l=0;l0;){var r=(t+1>>1)-1,o=e[r];if(l(n,o)>=0)break;e[o._=t]=o,e[n._=t=r]=n}}function t(n,t){for(;;){var r=t+1<<1,i=r-1,u=t,f=e[u];if(i0&&(n=e[o],t(e[n._=0]=n,0)),r}},r.remove=function(r){var i,u=r._;if(e[u]===r)return u!==--o&&(i=e[o],(l(i,r)<0?n:t)(e[i._=u]=i,u)),u},r},E=function(n,e){function o(n){f.remove(n),n[1][2]=e(n),f.push(n)}var i=t(n.transform),u=r(n.transform),f=m();return null==e&&(e=c),n.arcs.forEach(function(n){var t,r,c,a,s=[],l=0;for(r=0,c=n.length;r updating PeeringDB '
21 | echo '========================================'
22 | peeringdb sync || exit 1
23 | cp ~/.peeringdb/peeringdb.sqlite3 ./data-raw/ || cp ./peeringdb.sqlite3 ./data-raw/ || exit 1
24 | echo
25 |
26 | # =========================================================================== #
27 | # merge all databases
28 | # =========================================================================== #
29 | ./wnm/wnm_merge.py || exit 1
30 |
31 | d=$SECONDS
32 | echo "> all done :) ($(($d / 60))m $(($d % 60))s)"
33 |
--------------------------------------------------------------------------------
/wnm/wnm_asnames.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 |
4 | import sys, os, json, gzip
5 | from wnm_utils import *
6 |
7 | # =================================
8 | # vars
9 | # =================================
10 | urlz = {
11 | 'as-radb.db.gz': 'ftp://ftp.radb.net/radb/dbase/radb.db.gz',
12 | 'as-afrinic.db.gz': 'ftp://ftp.afrinic.net/pub/dbase/afrinic.db.gz',
13 | 'as-arin.db.gz': 'ftp://ftp.arin.net/pub/rr/arin.db',
14 | 'as-ripe.db.gz': 'ftp://ftp.ripe.net/ripe/dbase/split/ripe.db.aut-num.gz',
15 | 'as-level3.db.gz': 'ftp://rr.level3.net/pub/rr/level3.db.gz',
16 | 'as-apnic.db.gz': 'https://ftp.apnic.net/apnic/whois/apnic.db.aut-num.gz',
17 | }
18 |
19 | path_data = './data-raw/'
20 | path_db = './data/asnames.json'
21 |
22 | def db_download():
23 | count = 0
24 | print '=' * 40
25 | print '> updating AS names database'
26 | print '=' * 40
27 |
28 | for u in urlz:
29 |
30 | path = path_data + u
31 | if urlz[u][-3:] != '.gz':
32 | path = path_data + u[:-3]
33 |
34 | count += get_url(urlz[u], path)
35 | if urlz[u][-3:] != '.gz':
36 | os.system('gzip -f ' + path)
37 |
38 | if count != len(urlz.values()):
39 | print '> errors occured during update'
40 | return -1
41 |
42 | print '=' * 40
43 | print '> done.'
44 |
45 | def db_update():
46 | out = {}
47 |
48 | for db in urlz:
49 | print '> parsing %s' % db
50 | with gzip.open(path_data + db, 'rb') as f:
51 | lines = f.read().splitlines()
52 |
53 | for i in range(len(lines)-1):
54 | l0 = lines[i]
55 | l1 = lines[i+1]
56 | if l0[0:8] != 'aut-num:' or l1[0:8] != 'as-name:':
57 | continue
58 | l0 = l0.replace('aut-num:', '').strip()
59 | l1 = l1.replace('as-name:', '').strip()
60 |
61 | if '#' in l0:
62 | l0 = l0[0:l0.find('#')]
63 | if '#' in l1:
64 | l1 = l1[0:l1.find('#')]
65 |
66 | l1 = l1.replace('DBP', '').replace('UNSPECIFIED', '')
67 | if not len(l1):
68 | continue
69 |
70 | asn = int(l0[2:])
71 | name = l1
72 |
73 | if asn not in out:
74 | out[asn] = name
75 | elif len(name) > len(out[asn]):
76 | out[asn] = name
77 |
78 | wnm_save(path_db, out)
79 |
80 |
81 | def usage():
82 | print '%s usage ' % sys.argv[0]
83 | print ''
84 | print 'commands:'
85 | print ' download -- download rr databases'
86 | print ' rebuild -- rebuild local json database'
87 | print ' update -- download & rebuild'
88 | print
89 | sys.exit(1)
90 |
91 |
92 | if __name__ == '__main__':
93 | if len(sys.argv) < 2:
94 | usage()
95 | sys.exit(-1)
96 |
97 | action = sys.argv[1]
98 | if action == 'download':
99 | if db_download():
100 | sys.exit(-1)
101 | elif action == 'rebuild':
102 | if db_update():
103 | sys.exit(-1)
104 | elif action == 'update':
105 | if db_download():
106 | sys.exit(-1)
107 | if db_update():
108 | sys.exit(-1)
109 |
110 |
--------------------------------------------------------------------------------
/wnm/wnm_bgp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 | import os, sys, datetime, time, json, gc, ipaddress
4 | from subprocess import Popen, PIPE
5 | from wnm_utils import *
6 | from tqdm import tqdm
7 |
8 | # =================================
9 | # vars
10 | # =================================
11 |
12 | # url for bgp data
13 |
14 | # EU: http://routeviews.org/route-views.linx/bgpdata/2015.01/RIBS/rib.20150105.0800.bz2
15 | # NA: http://routeviews.org/route-views.eqix/bgpdata/2017.11/RIBS/rib.20171113.1600.bz2
16 | # PAC: http://routeviews.org/route-views.sg/bgpdata/2017.11/RIBS/rib.20171112.1000.bz2
17 | # AF: http://routeviews.org/route-views.jinx/bgpdata/2017.11/RIBS/rib.20171112.2000.bz2
18 | # SA: http://routeviews.org/route-views.saopaulo/bgpdata/2017.11/RIBS/rib.20171113.0000.bz2
19 |
20 | # we try to get 1 bgp view from each continent
21 | bgp_provs = [ 'linx', 'eqix', 'jinx', 'saopaulo', 'sg' ]
22 |
23 | url_bgp = 'http://routeviews.org/route-views.%s/bgpdata/%s/RIBS/rib.%s.0000.bz2'
24 | path_bin_bgpdump = './bins/bgpdump'
25 | path_bgp_dir = './data-raw/'
26 | path_bgp_bz2 = path_bgp_dir + 'bgp-%s.bz2'
27 | path_bgp_bz2_bak = path_bgp_dir + 'bgp-%s.bak.bz2'
28 | path_bgp_db = './data/bgp.json'
29 |
30 | # =================================
31 | # funcs
32 | # =================================
33 | def bgp_download(ix):
34 | path = path_bgp_bz2 % ix
35 | pbak = path_bgp_bz2_bak % ix
36 |
37 | # backup current bgp db if any
38 | if os.path.exists(path):
39 | if os.path.exists(pbak):
40 | os.unlink(pbak)
41 | os.rename(path, pbak)
42 |
43 | # try to download today's db
44 | d = datetime.date.today()
45 | url = url_bgp % (ix, d.strftime('%Y.%m'), d.strftime('%Y%m%d'))
46 | if not get_url(url, path):
47 | # try yesterday's db
48 | d = datetime.date.today() - datetime.timedelta(days=1)
49 | url = url_bgp % (ix, d.strftime('%Y.%m'), d.strftime('%Y%m%d'))
50 | if not get_url(url, path):
51 | return -1
52 |
53 | print '> downloaded [%s] bgp database.' % ix
54 | return 0
55 |
56 | def bgp_download_all():
57 | print '=' * 40
58 | print '> updating BGP database'
59 | print '=' * 40
60 | ret = 0
61 | for ix in bgp_provs:
62 | if bgp_download(ix):
63 | print '> failed to download [%s] bgp database.' % ix
64 | ret -= 1
65 | print '> done %d/%d' % (len(bgp_provs) + ret, len(bgp_provs))
66 | return ret
67 |
68 | def bgp_load(path):
69 | pop = Popen([path_bin_bgpdump, '-m', path], stdout=PIPE, stderr=PIPE)
70 | data = pop.stdout.read()
71 | try:
72 | pop.kill()
73 | except:
74 | pass
75 | return data
76 |
77 | def bgp_build(asl, ix):
78 | path = path_bgp_bz2 % ix
79 |
80 | print '> loading %s' % path
81 | data = bgp_load(path)
82 | lines = data.splitlines()
83 |
84 | # free memory
85 | data = ''
86 | gc.collect()
87 |
88 | print '> processing %s' % path
89 | for l in tqdm(lines):
90 | if not len(l):
91 | break
92 | tmp = ascii_clean(l).split('|')
93 | if len(tmp) < 7:
94 | print '! %s ' % l
95 | continue
96 | pfx = tmp[5]
97 | cstr = tmp[6].replace('{', '').replace('}', '').replace(',', ' ')
98 | chain = [ int(_) for _ in cstr.split(' ') ]
99 | asn = chain[-1]
100 | # print 'cidr[%s] chain[%r] as[%d]' % (pfx, chain, asn)
101 |
102 | for i in range(len(chain)-1):
103 | asn_link(asl, chain[i], chain[i+1])
104 |
105 | # skip private prefixes
106 | p = ipaddress.ip_network(unicode(pfx), strict=0)
107 | if p.is_multicast or p.is_private or p.is_unspecified or p.is_reserved or p.is_loopback or p.is_link_local:
108 | continue
109 |
110 | # add prefix to AS
111 | aso = asn_check(asl, asn)
112 | if pfx not in aso['prefix']:
113 | aso['prefix'].append(pfx)
114 | if ':' in pfx:
115 | aso['prefix6'] += 1
116 | else:
117 | aso['prefix4'] += 1
118 |
119 | # free memory
120 | lines = []
121 | gc.collect()
122 | return 0
123 |
124 | def bgp_build_all():
125 | asl = {}
126 |
127 | # build all bgp route views
128 | for ix in bgp_provs:
129 | # for ix in ['jinx']:
130 | bgp_build(asl, ix)
131 |
132 | # dump output db
133 | wnm_save(path_bgp_db, asl)
134 |
135 | return 0
136 |
137 | # =========================================================================== #
138 | # utils
139 | # =========================================================================== #
140 | def ascii_clean(s):
141 | return filter(lambda x: x in string.printable, s)
142 |
143 | def asn_check(asl, asn):
144 | if not asn in asl:
145 | asl[asn] = {}
146 | asl[asn]['asn'] = asn
147 | asl[asn]['prefix'] = []
148 | asl[asn]['prefix4'] = 0
149 | asl[asn]['prefix6'] = 0
150 | asl[asn]['links'] = []
151 | return asl[asn]
152 |
153 | def asn_link(asl, asn1, asn2):
154 | as1 = asn_check(asl, asn1)
155 | as2 = asn_check(asl, asn2)
156 |
157 | if asn1 not in as2['links']:
158 | as2['links'].append(asn1)
159 | if asn2 not in as1['links']:
160 | as1['links'].append(asn2)
161 |
162 |
163 | # =================================
164 | # main
165 | # =================================
166 | if __name__ == '__main__':
167 | if not os.path.exists(path_bgp_dir):
168 | os.mkdir(path_bgp_dir)
169 |
170 | if len(sys.argv) < 2:
171 | print 'usage: %s [update|download|rebuild|test]' % sys.argv[0]
172 | sys.exit(-1)
173 |
174 | action = sys.argv[1]
175 | if action == 'update':
176 | if bgp_download_all():
177 | print '[-] error downloading bgp view'
178 | sys.exit(-1)
179 | if bgp_build_all():
180 | print '[-] error building bgp db'
181 | sys.exit(-1)
182 |
183 | elif action == 'download':
184 | if bgp_download_all():
185 | print '[-] error downloading bgp view'
186 | sys.exit(-1)
187 |
188 | elif action == 'rebuild':
189 | if bgp_build_all():
190 | print '[-] error building bgp db'
191 | sys.exit(-1)
192 |
193 | elif action == 'test':
194 |
195 | print '> loading db..'
196 | with open(path_bgp_db, 'rb') as f:
197 | db = json.load(f)
198 | print json.dumps(db['21502'], indent=4, sort_keys=1)
199 |
200 | else:
201 | print '[-] unknown command "%s"' % action
202 | print 'usage: %s [update|download|rebuild|test]' % sys.argv[0]
203 | sys.exit(-1)
204 |
205 | sys.exit(0)
206 |
--------------------------------------------------------------------------------
/wnm/wnm_geo.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 | import sys, os, json, csv, gzip
4 | from wnm_utils import *
5 |
6 | # submarine cable database
7 | url = 'http://download.maxmind.com/download/worldcities/worldcitiespop.txt.gz'
8 | path_raw = './data-raw/worldcitiespop.txt.gz'
9 | path_out = './data/cities.json'
10 |
11 | def geo_download():
12 | print '=' * 40
13 | print '> updating world cities database'
14 | print '=' * 40
15 | if not get_url(url, path_raw, chunk_size=2048):
16 | return -1
17 |
18 | print '> done'
19 | return 0
20 |
21 | def geo_rebuild():
22 |
23 | geo = {}
24 | n = 0
25 |
26 | print '> processing %s' % path_raw
27 | with gzip.open(path_raw, 'rb') as f:
28 | rd = csv.reader(f, delimiter=',')
29 | hdr = rd.next()
30 | for row in rd:
31 | cc = row[0].upper()
32 | ct = row[1].decode('latin-1')
33 | ct = ct.lower().replace(' ', '-').encode('utf-8')
34 | city = row[2].decode('latin-1')
35 | city = city.lower().replace(' ', '-').encode('utf-8')
36 | state = row[3]
37 | pop = int(row[4]) if len(row[4]) else 0
38 | lat = float(row[5])
39 | lng = float(row[6])
40 |
41 | if not cc in geo:
42 | geo[cc] = {}
43 |
44 | # include state in cityname for the us
45 | if cc == 'US':
46 | city = '%s-%s' % (state, city)
47 | ct = '%s-%s' % (state, ct)
48 |
49 | # update if city is not known yet, or if its a city with same name but more population
50 | if not city in geo[cc] or pop > geo[cc][city]['pop']:
51 | geo[cc][city] = {'lat': lat, 'lng': lng, 'pop': pop }
52 | if ct != city:
53 | geo[cc][ct] = {'lat': lat, 'lng': lng, 'pop': pop }
54 | # else: pass
55 |
56 | n += 1
57 |
58 | geo2 = {}
59 | for cc in geo:
60 | geo2[cc] = {}
61 | for city in geo[cc]:
62 | geo2[cc][city] = [ geo[cc][city]['lat'], geo[cc][city]['lng'] ]
63 |
64 | wnm_save(path_out, geo2)
65 |
66 | return 0
67 |
68 |
69 | # =================================
70 | # main
71 | # =================================
72 | def usage():
73 | print '%s usage ' % sys.argv[0]
74 | print ''
75 | print 'commands:'
76 | print ' download -- download geo cities database'
77 | print ' rebuild -- rebuild local json database'
78 | print ' update -- download & rebuild'
79 | print
80 | sys.exit(1)
81 |
82 | if __name__ == '__main__':
83 |
84 | if len(sys.argv) < 2:
85 | usage()
86 | sys.exit(-1)
87 |
88 | action = sys.argv[1]
89 |
90 | if action == 'download':
91 | geo_download()
92 |
93 | elif action == 'update':
94 | if geo_download():
95 | sys.exit(-1)
96 | if geo_rebuild():
97 | sys.exit(-1)
98 |
99 | elif action == 'rebuild':
100 | if geo_rebuild():
101 | sys.exit(-1)
102 |
103 | else:
104 | print '[-] unknown command "%s"' % action
105 | sys.exit(-1)
106 |
107 | sys.exit(0)
108 |
109 |
--------------------------------------------------------------------------------
/wnm/wnm_nics.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 | import sys, os, socket, struct, urllib2, json, bisect
4 | from wnm_utils import *
5 |
6 | # =================================
7 | # vars
8 | # =================================
9 | nics = {
10 | 'afrinic': 'ftp://ftp.afrinic.net/pub/stats/afrinic/delegated-afrinic-latest',
11 | 'apnic': 'ftp://ftp.apnic.net/pub/stats/apnic/delegated-apnic-latest',
12 | 'arin': 'ftp://ftp.arin.net/pub/stats/arin/delegated-arin-extended-latest',
13 | 'lacnic': 'ftp://ftp.lacnic.net/pub/stats/lacnic/delegated-lacnic-latest',
14 | 'ripe': 'ftp://ftp.ripe.net/ripe/stats/delegated-ripencc-latest',
15 | }
16 |
17 | path_nic = './data-raw/'
18 | path_data = './data/'
19 | path_db = path_data + 'nics.json'
20 | default_db = { 'asn': {}, 'ipv4': {}, 'ipv6': {}, 'dbg': {} }
21 | db = default_db
22 | keys_asn = []
23 | keys_ipv4 = []
24 | keys_ipv6 = []
25 |
26 |
27 | # =================================
28 | # funcs
29 | # =================================
30 |
31 | def parse_rir(db, data):
32 | header = 0
33 |
34 | for l in data.splitlines(False):
35 | l = l.strip()
36 | # skip coments & empty lines
37 | if l[0] == '#' or not len(l):
38 | continue
39 | tmp = l.split('|')
40 |
41 | # parse header
42 | if not header:
43 | header = 1
44 | version, nic, records, enddate = tmp[0], tmp[1], int(tmp[3]), tmp[5]
45 | if version != '2' and version != '2.3':
46 | print '[-1] unknown version'
47 | return 0
48 | print '[%7s] date: %s records %d ' % (nic, enddate, records)
49 | continue
50 |
51 | # parse summary lines
52 | if tmp[-1] == 'summary':
53 | continue
54 |
55 | # normal entry
56 | fields = ['registry', 'cc', 'type', 'start', 'value', 'date', 'status', 'extensions']
57 | entry = dict(zip(fields, tmp))
58 |
59 | entry['intstart'] = val_to_int(entry['type'], entry['start'])
60 | entry['count'] = int(entry['value'])
61 | entry['reg'] = nic
62 |
63 | # add it based on type
64 | if entry['type'] == 'ipv4':
65 | startval = ipv4_to_int(entry['start'])
66 | entry['cidr'] = entry['start']
67 | for i in range(32):
68 | if (1 << i) == entry['count']:
69 | entry['cidr'] += '/%d' % (32 - i)
70 | break
71 | if not '/' in entry['cidr']:
72 | entry['cidr'] += '/?'
73 | elif entry['type'] == 'ipv6':
74 | startval = ipv6_to_int(entry['start'])
75 | entry['count'] = 1 << (128 - int(entry['value']))
76 | entry['cidr'] = entry['start'] + '/' + entry['value']
77 | else:
78 | startval = int(entry['start'])
79 | for i in range(int(startval) + 1, int(startval) + int(entry['count'])):
80 | db[entry['type']][str(i)] = entry
81 |
82 |
83 | db[entry['type']][startval] = entry
84 |
85 | return db
86 |
87 | def db_download():
88 | count = 0
89 | print '=' * 40
90 | print '> updating nics database'
91 | print '=' * 40
92 |
93 | for u in nics:
94 | count += get_url(nics[u], path_nic + 'nic-' + u + '.txt')
95 |
96 | if count != len(nics):
97 | print '> errors occured during update'
98 | return -1
99 |
100 | return 0
101 |
102 | def db_consolidate(db):
103 |
104 | print '=' * 40
105 | print '> consolidating database..'
106 |
107 | for f in nics:
108 | with open(path_nic + 'nic-' + f + '.txt', 'rb') as f:
109 | parse_rir(db, f.read())
110 |
111 | print '> total rirs info [asn: %d, ipv4: %d, ipv6: %d]' % (len(db['asn']), len(db['ipv4']), len(db['ipv6']))
112 |
113 | wnm_save(path_db, db)
114 |
115 | def db_update(download=1):
116 | if download:
117 | if db_download() < 0:
118 | return -1
119 |
120 | db_consolidate(default_db)
121 | return 0
122 |
123 | def db_load():
124 | global db
125 | try:
126 | db1 = wnm_load(path_db)
127 | for type in ['asn', 'ipv4', 'ipv6']:
128 | for k in db1[type]:
129 | db[type][int(k)] = db1[type][k]
130 | print '> loaded db (asn: %d, ipv4: %d, ipv6: %d)' % (len(db['asn']), len(db['ipv4']), len(db['ipv6']))
131 | return db
132 | except:
133 | print '[-] error loading database file'
134 | return default_db
135 |
136 |
137 | def lookup(type, val, warn=1):
138 | global keys_ipv4, keys_ipv6, keys_asn
139 |
140 | # build sorted key tables if needed
141 | if type == 'ipv4':
142 | if not len(keys_ipv4):
143 | keys_ipv4 = sorted(db['ipv4'].keys())
144 | keys = keys_ipv4
145 | elif type == 'ipv6':
146 | if not len(keys_ipv6):
147 | keys_ipv6 = sorted(db['ipv6'].keys())
148 | keys = keys_ipv6
149 | else:
150 | if not len(keys_asn):
151 | keys_asn = sorted(db['asn'].keys())
152 | keys = keys_asn
153 |
154 | # convert to decimal
155 | intval = val_to_int(type, val)
156 |
157 | # lookup
158 | ind = bisect.bisect_left(keys, intval)
159 | if ind < len(keys):
160 | if keys[ind] == intval:
161 | return db[type][keys[ind]]
162 | else:
163 | e = db[type][keys[ind-1]]
164 | if intval >= e['intstart'] and intval < e['intstart'] + e['count']:
165 | return e
166 |
167 | if warn:
168 | print '[-] cant resolve [%s:%s]' % (type, val)
169 | return None
170 |
171 | def lookup_ipv4(ipv4):
172 | return lookup('ipv4', ipv4)
173 |
174 | def lookup_ipv6(ipv6):
175 | return lookup('ipv6', ipv6)
176 |
177 | def lookup_asn(asn):
178 | return lookup('asn', asn)
179 |
180 |
181 | # =================================
182 | # main
183 | # =================================
184 | def usage():
185 | print '%s usage ' % sys.argv[0]
186 | print ''
187 | print 'commands:'
188 | print ' download -- download nics database'
189 | print ' rebuild -- rebuild local json database'
190 | print ' update -- download & rebuild'
191 | print ' lookup ipv4_addr -- lookup an ipv4 address'
192 | print ' lookup ipv4 addr -- lookup an ipv4 address'
193 | print ' lookup ipv6 addr -- lookup an ipv6 address'
194 | print ' lookup asn addr -- lookup an AS number'
195 | print
196 | sys.exit(1)
197 |
198 | if __name__ == '__main__':
199 | if not os.path.exists(path_nic):
200 | os.mkdir(path_nic)
201 |
202 | if not os.path.exists(path_data):
203 | os.mkdir(path_data)
204 |
205 | if len(sys.argv) < 2:
206 | usage()
207 | sys.exit(-1)
208 |
209 | action = sys.argv[1]
210 | if action == 'download':
211 | if db_download():
212 | sys.exit(-1)
213 | elif action == 'update':
214 | if db_update():
215 | sys.exit(-1)
216 | elif action == 'rebuild':
217 | if db_update(0):
218 | sys.exit(-1)
219 | elif action == 'lookup':
220 | if len(sys.argv) == 3:
221 | type = 'ipv4'
222 | val = sys.argv[2]
223 | elif len(sys.argv) == 4:
224 | type = sys.argv[2]
225 | val = sys.argv[3]
226 | else:
227 | usage()
228 | db_load()
229 | print json.dumps(lookup(type, val), indent=4)
230 | else:
231 | print '[-] unknown command "%s"' % action
232 | sys.exit(-1)
233 |
234 | sys.exit(0)
235 |
236 |
237 |
238 |
--------------------------------------------------------------------------------
/wnm/wnm_scd.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 | import sys, os, json, re, glob, datetime
4 | from wnm_utils import *
5 |
6 | # submarine cable database
7 | url = 'https://github.com/telegeography/www.submarinecablemap.com/archive/master.zip'
8 | path_raw = './data-raw/'
9 | path_tmp = path_raw + 'cables.zip'
10 | path_db = path_raw + 'cables'
11 | path_out = './data/scdb.json'
12 |
13 | def scd_download():
14 | print '=' * 40
15 | print '> updating submarine cable database'
16 | print '=' * 40
17 | if not get_url(url, path_tmp, chunk_size=512):
18 | # retry once, github can be annoying sometimes
19 | if not get_url(url, path_tmp, chunk_size=512):
20 | return -1
21 |
22 | if os.system('unzip -q -d %s %s' % (path_raw, path_tmp)):
23 | print '> failed to unzip %s' % path_tmp
24 | return -1
25 |
26 | if os.system('mv %s/www.submarinecablemap.com-master/public/api/v1/ %s' % (path_raw, path_db)):
27 | return -1
28 |
29 | if os.system('rm -rf %s/www.submarinecablemap.com-master' % path_raw):
30 | return -1
31 |
32 | if os.system('rm -rf %s' % path_tmp):
33 | return -1
34 |
35 | return 0
36 |
37 | def scd_rebuild():
38 |
39 | def build_db_from_dir(path):
40 | data = []
41 | for f in glob.glob('%s/*' % path):
42 | if 'all.json' in f:
43 | continue
44 | if 'search.json' in f:
45 | continue
46 | with open(f, 'rb') as f:
47 | data += [ json.load(f) ]
48 | return data
49 |
50 | # country name to iso2 country code
51 | ccmap = {"Andorra":"AD","United Arab Emirates":"AE","Afghanistan":"AF","Antigua and Barbuda":"AG","Anguilla":"AI","Albania":"AL","Armenia":"AM","Netherlands Antilles":"AN","Angola":"AO","Antarctica":"AQ","Argentina":"AR","American Samoa":"AS","Austria":"AT","Australia":"AU","Aruba":"AW","ALA Aland Islands":"AX","Azerbaijan":"AZ","Bosnia and Herzegovina":"BA","Barbados":"BB","Bangladesh":"BD","Belgium":"BE","Burkina Faso":"BF","Bulgaria":"BG","Bahrain":"BH","Burundi":"BI","Benin":"BJ","Saint-Barthélemy":"BL","Saint Barthélemy":"BL","Bermuda":"BM","Brunei Darussalam":"BN","Brunei":"BN","Bolivia":"BO","Bonaire":"BQ","Sint Eustatius and Saba":"BQ","Brazil":"BR","Bahamas":"BS","Bhutan":"BT","Bouvet Island":"BV","Botswana":"BW","Belarus":"BY","Belize":"BZ","Canada":"CA","Cocos (Keeling) Islands":"CC","Congo, (Kinshasa)":"CD","Congo, Rep.":"CD","Congo, Dem. Rep.":"CD","Central African Republic":"CF","Congo (Brazzaville)":"CG","Switzerland":"CH","Côte d'Ivoire":"CI","Cook Islands":"CK","Chile":"CL","Cameroon":"CM","China":"CN","Colombia":"CO","Costa Rica":"CR","Cuba":"CU","Cape Verde":"CV","Curaçao":"CW","Christmas Island":"CX","Cyprus":"CY","Czech Republic":"CZ","Germany":"DE","Djibouti":"DJ","Denmark":"DK","Tyra":"DK","Valdemar":"DK","South Arne":"DK","Dominica":"DM","Dominican Republic":"DO","Algeria":"DZ","Ecuador":"EC","Estonia":"EE","Egypt":"EG","Western Sahara":"EH","Eritrea":"ER","Spain":"ES","Ethiopia":"ET","Europe":"EU","Finland":"FI","Fiji":"FJ","Falkland Islands (Malvinas)":"FK","Micronesia, Federated States of":"FM","Federated States of Micronesia":"FM","Chuuk":"FM","Faroe Islands":"FO","Faeroe Islands":"FO","France":"FR","Gabon":"GA","United Kingdom":"GB","Grenada":"GD","Georgia":"GE","French Guiana":"GF","Guernsey":"GG","Ghana":"GH","Gibraltar":"GI","Greenland":"GL","Gambia":"GM","Guinea":"GN","Guadeloupe":"GP","Equatorial Guinea":"GQ","Greece":"GR","South Georgia and the South Sandwich Islands":"GS","Guatemala":"GT","Guam":"GU","Guinea-Bissau":"GW","Guyana":"GY","Hong Kong, SAR China":"HK","Heard and Mcdonald Islands":"HM","Honduras":"HN","Croatia":"HR","Haiti":"HT","Hungary":"HU","Indonesia":"ID","Ireland":"IE","Israel":"IL","Isle of Man":"IM","India":"IN","British Indian Ocean Territory":"IO","Iraq":"IQ","Iran, Islamic Republic of":"IR","Iran":"IR","Iceland":"IS","Italy":"IT","Jersey":"JE","Jamaica":"JM","Jordan":"JO","Japan":"JP","Kenya":"KE","Kyrgyzstan":"KG","Cambodia":"KH","Kiribati":"KI","Comoros":"KM","Saint Kitts and Nevis":"KN","Korea (North)":"KP","Korea (South)":"KR","Korea, Rep.":"KR","Kuwait":"KW","Cayman Islands":"KY","Kazakhstan":"KZ","Lao PDR":"LA","Lebanon":"LB","Saint Lucia":"LC","Liechtenstein":"LI","Sri Lanka":"LK","Liberia":"LR","Lesotho":"LS","Lithuania":"LT","Luxembourg":"LU","Latvia":"LV","Libya":"LY","Morocco":"MA","Monaco":"MC","Moldova":"MD","Montenegro":"ME","Saint-Martin (French part)":"MF","Saint Martin":"MF","SaintMartin":"MF","Madagascar":"MG","Marshall Islands":"MH","Republic of Marshall Islands":"MH","Macedonia, Republic of":"MK","Mali":"ML","Myanmar":"MM","Mongolia":"MN","Macao, SAR China":"MO","Northern Mariana Islands":"MP","Saipan":"MP","Martinique":"MQ","Mauritania":"MR","Montserrat":"MS","Malta":"MT","Mauritius":"MU","Maldives":"MV","Malawi":"MW","Mexico":"MX","Malaysia":"MY","Mozambique":"MZ","Namibia":"NA","New Caledonia":"NC","Niger":"NE","Norfolk Island":"NF","Nigeria":"NG","Nicaragua":"NI","Netherlands":"NL","Norway":"NO","Nepal":"NP","Nauru":"NR","Niue":"NU","New Zealand":"NZ","Oman":"OM","Panama":"PA","Peru":"PE","French Polynesia":"PF","Papua New Guinea":"PG","Philippines":"PH","Pakistan":"PK","Poland":"PL","Saint Pierre and Miquelon":"PM","Pitcairn":"PN","Puerto Rico":"PR","Palestinian Territory":"PS","Portugal":"PT","Palau":"PW","Paraguay":"PY","Qatar":"QA","Réunion":"RE","Romania":"RO","Serbia":"RS","Russian Federation":"RU","Russia":"RU","Rwanda":"RW","Saudi Arabia":"SA","Solomon Islands":"SB","Seychelles":"SC","Sudan":"SD","Sweden":"SE","Singapore":"SG","Saint Helena":"SH","Slovenia":"SI","Svalbard and Jan Mayen Islands":"SJ","Slovakia":"SK","Sierra Leone":"SL","San Marino":"SM","Senegal":"SN","Somalia":"SO","Suriname":"SR","South Sudan":"SS","Sao Tome and Principe":"ST","El Salvador":"SV","Sint Maarten":"SX","Syrian Arab Republic":"SY","Syria":"SY","Swaziland":"SZ","Turks and Caicos Islands":"TC","Chad":"TD","French Southern Territories":"TF","Togo":"TG","Thailand":"TH","Tajikistan":"TJ","Tokelau":"TK","Timor-Leste":"TL","Turkmenistan":"TM","Tunisia":"TN","Tonga":"TO","Turkey":"TR","Trinidad and Tobago":"TT","Tuvalu":"TV","Taiwan, Republic of China":"TW","Taiwan":"TW","Tanzania, United Republic of":"TZ","Tanzania":"TZ","Ukraine":"UA","Uganda":"UG","US Minor Outlying Islands":"UM","United States of America":"US","United States":"US","Uruguay":"UY","Uzbekistan":"UZ","Holy See (Vatican City State)":"VA","Saint Vincent and Grenadines":"VC","Saint Vincent and the Grenadines":"VC","Venezuela":"VE","British Virgin Islands":"VG","Virgin Islands (U.K.)":"VG","Virgin Islands, US":"VI","Virgin Islands (U.S.)":"VI","Viet Nam":"VN","Vietnam":"VN","Vanuatu":"VU","Wallis and Futuna Islands":"WF","Wallis and Futuna":"WF","Samoa":"WS","Kosovo":"XK","Yemen":"YE","Mayotte":"YT","South Africa":"ZA","Zambia":"ZM","Zimbabwe":"ZW","Canary Islands":"ES"}
52 |
53 | countries = {}
54 | landing2cc = {}
55 | for cn in build_db_from_dir(path_db + '/country/'):
56 | n = cn['name'].encode('utf-8')
57 | c = {}
58 | c['cc'] = ccmap[n]
59 | c['name'] = n
60 | c['cables'] = [ str(_['cable_id']) for _ in cn['cables'] ]
61 | c['landings'] = [ str(_['landing_point_id']) for _ in cn['landing_points'] ]
62 |
63 | countries[c['cc']] = c
64 |
65 | for lid in c['landings']:
66 | if lid in landing2cc and landing2cc[lid] != c['cc']:
67 | print '> warning landing-point %s already mapped to %s (and %s??)' % (lid, landing2cc[lid], c['cc'])
68 | else:
69 | landing2cc[lid] = c['cc']
70 |
71 | landings = {}
72 | for ld in build_db_from_dir(path_db + '/landing-point/'):
73 | l = {}
74 | lid = str(ld['city_id'])
75 | l['cc'] = landing2cc[lid]
76 | l['id'] = lid
77 | l['name'] = ld['name']
78 | l['lat'] = ld['latitude']
79 | l['lng'] = ld['longitude']
80 | l['cables'] = [ str(_['cable_id']) for _ in ld['cables'] ]
81 |
82 | landings[lid] = l
83 |
84 | cables = {}
85 | for ca in build_db_from_dir(path_db + '/cable/'):
86 | cid = str(ca['cable_id'])
87 |
88 | def _length(s):
89 | if s == 'n.a.':
90 | return -1
91 | return float(s.replace(',', '').replace(' ', '').replace('km', ''))
92 |
93 | c = {}
94 | c['id'] = cid
95 | c['name'] = ca['name'].encode('utf-8')
96 | c['rfs'] = ca['rfs'].split(' ')[-1]
97 | c['length'] = _length(ca['length'])
98 | c['url'] = ca['url'] if ca['url'] is not None else ''
99 | c['notes'] = ca['notes'] if ca['notes'] is not None else ''
100 | c['landings'] = [ str(_['landing_point_id']) for _ in ca['landing_points'] ]
101 | c['countries'] = list(set([ landings[i]['cc'] for i in c['landings'] if i in landings ]))
102 |
103 | own = ca['owners'].replace(',', ', ')
104 | while own.find(' ') >= 0:
105 | own = own.replace(' ', ' ')
106 | c['owners'] = own.split(', ')
107 |
108 | # is cable ready for service?
109 | month = ca['rfs'].split(' ')[-2] if len(ca['rfs'].split(' ')) > 1 else 'December'
110 | months = { 'January':1, 'February':2, 'March':3, 'April':4, 'May':5, 'June':6, 'July':7, 'August':8, 'September':9, 'October':10, 'November':11, 'December':12, 'Q1':3, 'Q2':6, 'Q3':9, 'Q4': 12 }
111 | if month not in months.keys():
112 | month = 'December'
113 | try:
114 | m = months[month]
115 | y = int(c['rfs'])
116 | today = datetime.date.today()
117 |
118 | if today.year > y:
119 | c['ready'] = 1
120 | elif today.year < y:
121 | c['ready'] = 0
122 | else:
123 | c['ready'] = 1 if m < today.month else 0
124 | except:
125 | c['ready'] = 0
126 |
127 | cables[cid] = c
128 |
129 | # sanity check
130 | for cc in countries:
131 | for lid in countries[cc]['landings']:
132 | if lid not in landings:
133 | print '> unknown landing %s for country %s' % (lid, cc)
134 | countries[cc]['landings'] = [ lid for lid in countries[cc]['landings'] if lid in landings ]
135 | for cid in cables:
136 | for lid in cables[cid]['landings']:
137 | if lid not in landings:
138 | print '> unknown landing %s for cable %s' % (lid, cid)
139 | cables[cid]['landings'] = [ lid for lid in cables[cid]['landings'] if lid in landings ]
140 |
141 | scdb = {
142 | 'countries': countries,
143 | 'landings': landings,
144 | 'cables': cables,
145 | }
146 |
147 | wnm_save(path_out, scdb)
148 |
149 | return 0
150 |
151 |
152 | # =================================
153 | # main
154 | # =================================
155 | def usage():
156 | print '%s usage ' % sys.argv[0]
157 | print ''
158 | print 'commands:'
159 | print ' download -- download submarinecablemap database'
160 | print ' rebuild -- rebuild local json database'
161 | print ' update -- download & rebuild'
162 | print
163 | sys.exit(1)
164 |
165 | if __name__ == '__main__':
166 |
167 | if len(sys.argv) < 2:
168 | usage()
169 | sys.exit(-1)
170 |
171 | action = sys.argv[1]
172 |
173 | if action == 'download':
174 | scd_download()
175 |
176 | elif action == 'update':
177 | if scd_download():
178 | sys.exit(-1)
179 | if scd_rebuild():
180 | sys.exit(-1)
181 |
182 | elif action == 'rebuild':
183 | if scd_rebuild():
184 | sys.exit(-1)
185 |
186 | else:
187 | print '[-] unknown command "%s"' % action
188 | sys.exit(-1)
189 |
190 | sys.exit(0)
191 |
192 |
--------------------------------------------------------------------------------
/wnm/wnm_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 | import os, sys, json, struct, urllib, urllib2, socket, time, string, gzip
4 |
5 | def h2bin(x):
6 | return x.replace(' ', '').replace('\n', '').decode('hex')
7 |
8 | def hdstr(x):
9 | return ''.join( [ '%.2x' % ord(c) for c in x] )
10 |
11 | def ascii_clean(s):
12 | return filter(lambda x: x in string.printable, s)
13 |
14 | def get_url(url, fname, chunk_size=8192*16):
15 | def report(size_dled, size_tot, error=0):
16 | sys.stdout.write(' ' * 80 + '\r')
17 | if error:
18 | sys.stdout.write('> error downloading %s' % fname + '\n')
19 | elif size_dled >= size_tot:
20 | te = time.time()
21 | sys.stdout.write('> downloaded %s in %.1f secs' % (fname, te-ts) + '\n')
22 | else:
23 | sys.stdout.write('> downloading %s progress %.1f%%\r' % (fname, 100.0 * float(size_dled) / size_tot))
24 | sys.stdout.flush()
25 |
26 | try:
27 | print '> %s' % url
28 | ts = time.time()
29 | f = open(fname, 'wb+')
30 | ans = urllib2.urlopen(url);
31 | size_tot = int(ans.info().getheader('Content-Length').strip())
32 | size_dled = 0
33 | while 1:
34 | chunk = ans.read(chunk_size)
35 | if not chunk:
36 | break
37 | size_dled += len(chunk)
38 | f.write(chunk)
39 | if size_dled < size_tot:
40 | report(size_dled, size_tot)
41 | report(size_dled, size_tot)
42 | f.close()
43 | return 1
44 | except:
45 | report(0, 0, 1)
46 | return 0
47 |
48 | def ipv4_to_int(ipstr):
49 | return struct.unpack('!I', socket.inet_pton(socket.AF_INET, ipstr))[0]
50 |
51 | def int_to_ipv4(ipint):
52 | return socket.inet_ntop(socket.AF_INET, struct.pack('!I', ipint))
53 |
54 | def ipv6_to_int(ipstr):
55 | _str = socket.inet_pton(socket.AF_INET6, ipstr)
56 | a, b = struct.unpack('!2Q', _str)
57 | return (a << 64) | b
58 |
59 | def int_to_ipv6(ipint):
60 | a = ipint >> 64
61 | b = ipint & ((1 << 64) - 1)
62 | return socket.inet_ntop(socket.AF_INET6, struct.pack('!2Q', a, b))
63 |
64 | def val_to_int(type, val):
65 | if type == 'ipv4':
66 | return ipv4_to_int(val)
67 | elif type == 'ipv6':
68 | return ipv6_to_int(val)
69 | else:
70 | return int(val)
71 |
72 | def wnm_save(path, obj):
73 | print '> saving to %s.gz (%d items)\n' % (path, len(obj))
74 | with gzip.open(path + '.gz', 'wb+') as f:
75 | json.dump(obj, f, indent=4, sort_keys=1)
76 |
77 | def wnm_load(path, default=None):
78 | try:
79 | print '> loading data from %s.gz' % path
80 | with gzip.open(path + '.gz', 'rb') as f:
81 | return json.load(f)
82 | except:
83 | print '> error loading %s.gz' % path
84 | return default
85 |
86 | # opencagedata.com to resolve address to gps coords
87 | # OCD_KEY=4b8c2a0d3c624c0a13c276f9125b14aa
88 | api_warn = 0
89 | def geo_lookup(place):
90 | if not 'OCD_KEY' in os.environ:
91 | global api_warn
92 | if not api_warn:
93 | print '> warn: set OpenCageData API key (export OCD_KEY=...) for accurate geolocation'
94 | api_warn = 1
95 | raise Exception('')
96 |
97 | API_KEY = os.environ['OCD_KEY']
98 | u = 'http://api.opencagedata.com/geocode/v1/json?q=%s&key=%s' % (urllib.quote_plus(place), API_KEY)
99 | a = urllib2.urlopen(u)
100 | r = json.loads(a.read())
101 |
102 | # debug
103 | # print '> %s' % place
104 | # print '> %s' % u
105 | # print json.dumps(r, indent=4)
106 |
107 | lat = r['results'][0]['geometry']['lat']
108 | lng = r['results'][0]['geometry']['lng']
109 | return (lat, lng)
110 |
--------------------------------------------------------------------------------
/wnm/wnm_world.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 | import sys, os, json, csv, zipfile
4 | from wnm_utils import *
5 |
6 | url = 'http://download.geonames.org/export/dump/countryInfo.txt'
7 |
8 | path_raw = './data-raw/worldinfo.txt'
9 | path_out = './data/worldinfo.json'
10 |
11 | def download():
12 | print '=' * 40
13 | print '> updating country database'
14 | print '=' * 40
15 |
16 | if not get_url(url, path_raw, chunk_size=2048):
17 | return -1
18 |
19 | return 0
20 |
21 | def rebuild():
22 | data = {}
23 |
24 | with open(path_raw, 'rb') as f:
25 | text = f.read()
26 |
27 | for l in text.splitlines():
28 | if l[0] == '#':
29 | hdr = l[1:]
30 | continue
31 |
32 | c = dict(zip(hdr.split('\t'), l.split('\t')))
33 | data[c['ISO']] = c
34 |
35 | wnm_save(path_out, data)
36 |
37 | return 0
38 |
39 |
40 | # =================================
41 | # main
42 | # =================================
43 | def usage():
44 | print '%s usage ' % sys.argv[0]
45 | print ''
46 | print 'commands:'
47 | print ' download -- download world info databases'
48 | print ' rebuild -- rebuild local json database'
49 | print ' update -- download & rebuild'
50 | print
51 | sys.exit(1)
52 |
53 | if __name__ == '__main__':
54 |
55 | if len(sys.argv) < 2:
56 | usage()
57 | sys.exit(-1)
58 |
59 | action = sys.argv[1]
60 |
61 | if action == 'download':
62 | download()
63 |
64 | elif action == 'update':
65 | if download():
66 | sys.exit(-1)
67 | if rebuild():
68 | sys.exit(-1)
69 |
70 | elif action == 'rebuild':
71 | if rebuild():
72 | sys.exit(-1)
73 |
74 | else:
75 | print '[-] unknown command "%s"' % action
76 | sys.exit(-1)
77 |
78 | sys.exit(0)
79 |
80 |
--------------------------------------------------------------------------------