├── Procfile ├── static └── style.css ├── .gitignore ├── data ├── requirements.txt ├── index-geoids.py ├── index-names.py ├── test-s3-index.py └── index-tiles.py ├── requirements.txt ├── templates ├── error.html └── index.html ├── LICENSE ├── util.py ├── geo.py ├── README.md ├── prepare-datasource.sh ├── app.py └── census.py /Procfile: -------------------------------------------------------------------------------- 1 | web: gunicorn --workers 4 --bind 0.0.0.0:$PORT app:app 2 | -------------------------------------------------------------------------------- /static/style.css: -------------------------------------------------------------------------------- 1 | body 2 | { 3 | font-family: sans-serif; 4 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv-census-api 2 | datasource.??? 3 | app.pyc 4 | *.pyc 5 | -------------------------------------------------------------------------------- /data/requirements.txt: -------------------------------------------------------------------------------- 1 | unidecode 2 | modestmaps 3 | requests 4 | shapely 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | flask 2 | gunicorn 3 | modestmaps 4 | requests 5 | shapely 6 | -------------------------------------------------------------------------------- /templates/error.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Uh-oh (Census API by Code for America) 6 | 7 | 8 | 9 | 10 |

Uh-oh.

11 |

{{error}}

12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013 Code for America 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /util.py: -------------------------------------------------------------------------------- 1 | from StringIO import StringIO 2 | from json import JSONEncoder 3 | from re import compile 4 | 5 | float_pat = compile(r'^-?\d+\.\d+(e-?\d+)?$') 6 | charfloat_pat = compile(r'^[\[,\,]-?\d+\.\d+(e-?\d+)?$') 7 | 8 | def json_encode(data): 9 | ''' Encode stream of JSON with 7-digits floating point precision. 10 | ''' 11 | encoder = JSONEncoder(separators=(',', ':')) 12 | encoded = encoder.iterencode(data) 13 | output = StringIO() 14 | 15 | for token in encoded: 16 | if charfloat_pat.match(token): 17 | # in python 2.7, we see a character followed by a float literal 18 | output.write(token[0] + '%.7f' % float(token[1:])) 19 | 20 | elif float_pat.match(token): 21 | # in python 2.6, we see a simple float literal 22 | output.write('%.7f' % float(token)) 23 | 24 | else: 25 | output.write(token) 26 | 27 | return output.getvalue() 28 | 29 | falsies = set(['f', 'false', 'n', 'no', '0']) 30 | 31 | def bool(val): 32 | ''' Convert a value to boolean. 33 | 34 | >>> bool(True), bool(False) 35 | (True, False) 36 | 37 | >>> bool(1), bool(0) 38 | (True, False) 39 | 40 | >>> bool('1'), bool('0') 41 | (True, False) 42 | 43 | >>> bool('y'), bool('n') 44 | (True, False) 45 | 46 | >>> bool('t'), bool('f') 47 | (True, False) 48 | 49 | >>> bool('true'), bool('false') 50 | (True, False) 51 | 52 | >>> bool(99), bool('what') 53 | (True, True) 54 | ''' 55 | return str(val).lower() not in falsies 56 | 57 | if __name__ == '__main__': 58 | import doctest 59 | doctest.testmod() 60 | -------------------------------------------------------------------------------- /data/index-geoids.py: -------------------------------------------------------------------------------- 1 | ''' Extract one GeoJSON file per GEOID from local zip files. 2 | 3 | Built for zip files of State, County, and Place geometries: 4 | 5 | curl -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/STATE/tl_2013_us_state.zip' 6 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/COUNTY/tl_2013_us_county.zip' 7 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/PLACE/tl_2013_[01-99]_place.zip' 8 | ''' 9 | from zipfile import ZipFile 10 | from subprocess import Popen 11 | from os.path import exists 12 | from glob import glob 13 | from os import remove 14 | 15 | from osgeo import ogr 16 | 17 | if __name__ == '__main__': 18 | 19 | for zipname in glob('*.zip'): 20 | zipfile = ZipFile(zipname) 21 | 22 | types = ('.shp', '.shx', '.prj', '.dbf') 23 | names = [name for name in zipfile.namelist() if name[-4:] in types] 24 | 25 | zipfile.extractall(members=names) 26 | 27 | shpname = names[0][:-4] + '.shp' 28 | 29 | shp_ds = ogr.Open(shpname) 30 | layer = shp_ds.GetLayer(0) 31 | 32 | for feature in layer: 33 | geoid = feature.GetField('GEOID') 34 | outname = '%s.json' % geoid 35 | 36 | print shpname, geoid, '...' 37 | 38 | if exists(outname): 39 | remove(outname) 40 | 41 | ogr2ogr = 'ogr2ogr', '-where', "GEOID='%s'" % geoid, \ 42 | '-t_srs', 'EPSG:4326', '-f', 'GeoJSON', outname, shpname 43 | 44 | ogrcmd = Popen(ogr2ogr) 45 | ogrcmd.wait() 46 | 47 | assert ogrcmd.returncode == 0, 'Failed on GEOID %s' % geoid 48 | 49 | for ext in types: 50 | remove(shpname[:-4] + ext) 51 | -------------------------------------------------------------------------------- /geo.py: -------------------------------------------------------------------------------- 1 | from shapely import wkb 2 | from util import json_encode 3 | 4 | class QueryError (RuntimeError): 5 | pass 6 | 7 | def features_geojson(features, json_callback): 8 | ''' 9 | ''' 10 | geojson = dict(type='FeatureCollection', features=features) 11 | body, mime = json_encode(geojson), 'application/json' 12 | 13 | if json_callback: 14 | body = '%s(%s);\n' % (json_callback, body) 15 | mime = 'text/javascript' 16 | 17 | return body, mime 18 | 19 | def layer_features(layer, include_geom, offset=0, count=25): 20 | ''' 21 | ''' 22 | features = [] 23 | 24 | defn = layer.GetLayerDefn() 25 | names = [defn.GetFieldDefn(i).name for i in range(defn.GetFieldCount())] 26 | 27 | # Skip leading features 28 | for skip in range(offset): 29 | layer.GetNextFeature() 30 | 31 | for feature in layer: 32 | # Stop reading features 33 | if len(features) == count: 34 | break 35 | 36 | properties = dict() 37 | 38 | for (index, name) in enumerate(names): 39 | properties[name] = feature.GetField(index) 40 | 41 | if not include_geom: 42 | features.append(dict(type='Feature', properties=properties, geometry=None)) 43 | continue 44 | 45 | geometry = feature.GetGeometryRef() 46 | shape = wkb.loads(geometry.ExportToWkb()) 47 | 48 | features.append(dict(type='Feature', properties=properties, geometry=shape.__geo_interface__)) 49 | 50 | return features 51 | 52 | def get_intersecting_features(datasource, geometry, include_geom): 53 | ''' 54 | ''' 55 | layer = datasource.GetLayer(0) 56 | layer.SetSpatialFilter(geometry) 57 | 58 | return layer_features(layer, include_geom) 59 | 60 | def get_matching_features(datasource, where_clause, page_number, include_geom): 61 | ''' 62 | ''' 63 | layer, offset, count = datasource.GetLayer(0), (page_number - 1) * 25, 25 64 | 65 | try: 66 | layer.SetAttributeFilter(where_clause) 67 | except RuntimeError, e: 68 | raise QueryError('Bad where clause: ' + str(e)) 69 | 70 | return layer_features(layer, include_geom, offset, count) 71 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | US Census Area API 2 | ================== 3 | 4 | Simple geospatial API for U.S. Census and other data sources, in response to the 5 | [Census Area API hack request](https://github.com/codeforamerica/hack-requests/blob/master/census-area-API.md). 6 | 7 | This application can be run in two distinct ways. When the environment variable `GEO_DATASOURCE=http://forever.codeforamerica.org/Census-API/` is present, the API will serve nationwide U.S. Census geographies from the state to block level. See this in action at [census.codeforamerica.org](http://census.codeforamerica.org) and use it freely. 8 | 9 | Otherwise, it will use a local OGR-compatible datasource such as a shapefile or GeoJSON to serve any data and make it available for download. A sample copy with Bay Area Census data can be found at 10 | [census-api-bay-area.herokuapp.com](http://census-api-bay-area.herokuapp.com). 11 | 12 | Installing 13 | ---- 14 | 15 | This is a [Flask](http://flask.pocoo.org/)-based Python application which 16 | requires compiled geospatial libraries [Shapely](http://toblerity.org/shapely/) 17 | and [GDAL](http://trac.osgeo.org/gdal/) to run. Directions here assume that you have an OGR-compatible datasource you’d like to use. 18 | 19 | ### Test Locally 20 | 21 | 1. Download and unpack [sample Bay Area data](http://forever.codeforamerica.org.s3.amazonaws.com/Census-API/bay-area-data.zip). 22 | 2. Ensure that *datasource.shp* and other files are located in the same directory as *app.py*. 23 | 3. Call `python app.py` for a test server. 24 | 25 | ### Run Locally with Gunicorn 26 | 27 | To run a more robust installation using the Python WSGI HTTP server 28 | [Gunicorn](http://gunicorn.org/), prepare local data as in steps 1 & 2 above, 29 | then call: 30 | 31 | gunicorn app:app 32 | 33 | ### Run on Heroku 34 | 35 | Compiled geospatial libraries for Heroku are available via the 36 | [open source GIS Heroku buildpack](https://github.com/codeforamerica/heroku-buildpack-pygeo). Create a new Heroku app with this buildpack: 37 | 38 | heroku create --buildpack https://github.com/codeforamerica/heroku-buildpack-pygeo 39 | 40 | There are two possible ways to run the API on Heroku: 41 | 42 | 1. Fork this repository, download and commit your own data as *datasource.shp*, 43 | and push the combined application + data repository to Heroku. 44 | 45 | 2. Use the `ZIPPED_DATA_URL` support in *heroku-buildpack-pygeo* to configure 46 | a remote zip file such as *bay-area-data.zip* (URL linked above), 47 | **making sure to install the Heroku plugin** 48 | [user-env-compile](https://devcenter.heroku.com/articles/labs-user-env-compile). 49 | Data will be automatically retrieved and expanded to *datasource.shp* at 50 | compile time. 51 | 52 | Credits 53 | ---- 54 | 55 | Written by [Michal Migurski](https://github.com/migurski) with 56 | [Andy Hull](https://github.com/andyhull), (c) 2013 Code for America. 57 | See `LICENSE` for license information. -------------------------------------------------------------------------------- /prepare-datasource.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env tcsh -ex 2 | 3 | # 4 | # All original source files below originate from 5 | # ftp://ftp.census.gov:21//geo/tiger/TIGER2013/ 6 | # 7 | 8 | # 9 | # Large areas clipped to: 10 | # http://www.openstreetmap.org/?box=yes&bbox=-123.060%2C38.902%2C-121.131%2C36.815 11 | # 12 | 13 | ogr2ogr -sql "SELECT STATEFP, '' AS COUNTYFP, CAST(GEOID AS character(16)), NAME, MTFCC, ALAND, AWATER, INTPTLAT, INTPTLON, 'tl_2013_us_state' AS table FROM tl_2013_us_state" \ 14 | -spat -123.060 36.815 -121.131 38.902 \ 15 | -overwrite tl_2013_us_state/tl_2013_us_state{-smush,}.shp 16 | 17 | ogr2ogr -sql "SELECT STATEFP, '' AS COUNTYFP, CAST(GEOID AS character(16)), NAME, MTFCC, ALAND, AWATER, INTPTLAT, INTPTLON, 'tl_2013_06_place' AS table FROM tl_2013_06_place" \ 18 | -spat -123.060 36.815 -121.131 38.902 \ 19 | -overwrite tl_2013_06_place/tl_2013_06_place{-smush,}.shp 20 | 21 | ogr2ogr -sql "SELECT STATEFP, COUNTYFP, CAST(GEOID AS character(16)), NAME, MTFCC, ALAND, AWATER, INTPTLAT, INTPTLON, 'tl_2013_us_county' AS table FROM tl_2013_us_county" \ 22 | -spat -123.060 36.815 -121.131 38.902 \ 23 | -overwrite tl_2013_us_county/tl_2013_us_county{-smush,}.shp 24 | 25 | ogr2ogr -sql "SELECT '' AS STATEFP, '' AS COUNTYFP, CAST(GEOID10 AS character(16)) AS GEOID, '' AS NAME, MTFCC10 AS MTFCC, ALAND10 AS ALAND, AWATER10 AS AWATER, INTPTLAT10 AS INTPTLAT, INTPTLON10 AS INTPTLON, 'tl_2013_us_zcta510' AS table FROM tl_2013_us_zcta510" \ 26 | -spat -123.060 36.815 -121.131 38.902 \ 27 | -overwrite tl_2013_us_zcta510/tl_2013_us_zcta510{-smush,}.shp 28 | 29 | # 30 | # Small areas clipped to: 31 | # http://www.openstreetmap.org/?box=yes&bbox=-122.535%2C37.936%2C-122.076%2C37.667 32 | # 33 | 34 | ogr2ogr -sql "SELECT STATEFP, COUNTYFP, CAST(GEOID AS character(16)), NAME, MTFCC, ALAND, AWATER, INTPTLAT, INTPTLON, 'tl_2013_06_tract' AS table FROM tl_2013_06_tract" \ 35 | -spat -122.535 37.667 -122.076 37.936 \ 36 | -overwrite tl_2013_06_tract/tl_2013_06_tract{-smush,}.shp 37 | 38 | ogr2ogr -sql "SELECT STATEFP, COUNTYFP, CAST(GEOID AS character(16)), NAMELSAD AS NAME, MTFCC, ALAND, AWATER, INTPTLAT, INTPTLON, 'tl_2013_06_bg' AS table FROM tl_2013_06_bg" \ 39 | -spat -122.535 37.667 -122.076 37.936 \ 40 | -overwrite tl_2013_06_bg/tl_2013_06_bg{-smush,}.shp 41 | 42 | ogr2ogr -sql "SELECT STATEFP, COUNTYFP, CAST(GEOID AS character(16)), '' AS NAMELSAD, MTFCC, ALAND, AWATER, INTPTLAT, INTPTLON, 'tl_2013_06_tabblock' AS table FROM tl_2013_06_tabblock" \ 43 | -spat -122.535 37.667 -122.076 37.936 \ 44 | -overwrite tl_2013_06_tabblock/tl_2013_06_tabblock{-smush,}.shp 45 | 46 | # 47 | # Final output to bay-area-census Shapefile. 48 | # 49 | 50 | ogr2ogr -overwrite -t_srs EPSG:4326 bay-area-census.shp tl_2013_us_state/tl_2013_us_state-smush.shp 51 | ogr2ogr -append -update -t_srs EPSG:4326 bay-area-census.shp tl_2013_us_county/tl_2013_us_county-smush.shp 52 | ogr2ogr -append -update -t_srs EPSG:4326 bay-area-census.shp tl_2013_06_place/tl_2013_06_place-smush.shp 53 | ogr2ogr -append -update -t_srs EPSG:4326 bay-area-census.shp tl_2013_06_tract/tl_2013_06_tract-smush.shp 54 | ogr2ogr -append -update -t_srs EPSG:4326 bay-area-census.shp tl_2013_06_bg/tl_2013_06_bg-smush.shp 55 | ogr2ogr -append -update -t_srs EPSG:4326 bay-area-census.shp tl_2013_06_tabblock/tl_2013_06_tabblock-smush.shp 56 | ogr2ogr -append -update -t_srs EPSG:4326 bay-area-census.shp tl_2013_us_zcta510/tl_2013_us_zcta510-smush.shp 57 | 58 | rm -f bay-area-census.zip && zip -j bay-area-census.zip bay-area-census.{shp,shx,dbf,prj} 59 | -------------------------------------------------------------------------------- /data/index-names.py: -------------------------------------------------------------------------------- 1 | ''' Create one JSON index file per three-letter name prefix from local zip files. 2 | 3 | Built for zip files of State, County, and Place geometries: 4 | 5 | curl -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/STATE/tl_2013_us_state.zip' 6 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/COUNTY/tl_2013_us_county.zip' 7 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/PLACE/tl_2013_[01-99]_place.zip' 8 | ''' 9 | from zipfile import ZipFile 10 | from collections import defaultdict 11 | from operator import itemgetter 12 | from itertools import groupby 13 | from glob import glob 14 | from os import remove 15 | from json import dump 16 | 17 | from unidecode import unidecode 18 | from osgeo import ogr 19 | 20 | state_fips = { 21 | '01': 'Alabama', 22 | '02': 'Alaska', 23 | '04': 'Arizona', 24 | '05': 'Arkansas', 25 | '06': 'California', 26 | '08': 'Colorado', 27 | '09': 'Connecticut', 28 | '10': 'Delaware', 29 | '11': 'District of Columbia', 30 | '12': 'Florida', 31 | '13': 'Georgia', 32 | '15': 'Hawaii', 33 | '16': 'Idaho', 34 | '17': 'Illinois', 35 | '18': 'Indiana', 36 | '19': 'Iowa', 37 | '20': 'Kansas', 38 | '21': 'Kentucky', 39 | '22': 'Louisiana', 40 | '23': 'Maine', 41 | '24': 'Maryland', 42 | '25': 'Massachusetts', 43 | '26': 'Michigan', 44 | '27': 'Minnesota', 45 | '28': 'Mississippi', 46 | '29': 'Missouri', 47 | '30': 'Montana', 48 | '31': 'Nebraska', 49 | '32': 'Nevada', 50 | '33': 'New Hampshire', 51 | '34': 'New Jersey', 52 | '35': 'New Mexico', 53 | '36': 'New York', 54 | '37': 'North Carolina', 55 | '38': 'North Dakota', 56 | '39': 'Ohio', 57 | '40': 'Oklahoma', 58 | '41': 'Oregon', 59 | '42': 'Pennsylvania', 60 | '44': 'Rhode Island', 61 | '45': 'South Carolina', 62 | '46': 'South Dakota', 63 | '47': 'Tennessee', 64 | '48': 'Texas', 65 | '49': 'Utah', 66 | '50': 'Vermont', 67 | '51': 'Virginia', 68 | '53': 'Washington', 69 | '54': 'West Virginia', 70 | '55': 'Wisconsin', 71 | '56': 'Wyoming', 72 | '60': 'American Samoa', 73 | '64': 'Federated States of Micronesia', 74 | '66': 'Guam', 75 | '68': 'Marshall Islands', 76 | '69': 'Northern Mariana Islands', 77 | '70': 'Palau', 78 | '72': 'Puerto Rico', 79 | '74': 'U.S. Minor Outlying Islands', 80 | '78': 'Virgin Islands of the U.S.', 81 | } 82 | 83 | if __name__ == '__main__': 84 | 85 | index = defaultdict(lambda: []) 86 | 87 | for zipname in glob('*.zip'): 88 | zipfile = ZipFile(zipname) 89 | 90 | types = ('.shp', '.shx', '.prj', '.dbf') 91 | names = [name for name in zipfile.namelist() if name[-4:] in types] 92 | 93 | zipfile.extractall(members=names) 94 | 95 | shpname = names[0][:-4] + '.shp' 96 | 97 | shp_ds = ogr.Open(shpname) 98 | layer = shp_ds.GetLayer(0) 99 | 100 | for feature in layer: 101 | if shpname == 'tl_2013_us_county.shp': 102 | name = feature.GetField('NAMELSAD').decode('latin-1') 103 | else: 104 | name = feature.GetField('NAME').decode('latin-1') 105 | 106 | geoid = feature.GetField('GEOID') 107 | name_ascii = unidecode(name) 108 | state = state_fips[geoid[:2]] 109 | 110 | key = name_ascii[:3].lower() 111 | index[key].append(dict(name=name, name_ascii=name_ascii, 112 | state=state, geoid=geoid, source=shpname)) 113 | 114 | print key, name 115 | 116 | for ext in types: 117 | remove(shpname[:-4] + ext) 118 | 119 | for (key, content) in index.items(): 120 | content.sort(key=itemgetter('name')) 121 | 122 | with open(key + '.json', 'w') as out: 123 | dump(content, out, indent=2) 124 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | from sys import stderr 2 | from os import environ 3 | from urlparse import urlparse 4 | from StringIO import StringIO 5 | from zipfile import ZipFile, ZIP_DEFLATED 6 | from time import time 7 | 8 | from flask import Flask 9 | from flask import request 10 | from flask import Response 11 | from flask import render_template 12 | from osgeo import ogr 13 | 14 | from util import json_encode, bool 15 | from geo import features_geojson, QueryError 16 | from geo import get_intersecting_features, get_matching_features 17 | from census import census_url, get_features as census_features 18 | 19 | cors = 'Access-Control-Allow-Origin' 20 | 21 | app = Flask(__name__) 22 | 23 | def is_census_datasource(environ): 24 | ''' Return true if the environment specifies the U.S. Census datasource. 25 | ''' 26 | return environ.get('GEO_DATASOURCE', None) == census_url 27 | 28 | def get_datasource(environ): 29 | ''' Return an environment-appropriate datasource. 30 | 31 | For local data, this will be an OGR Datasource object. 32 | ''' 33 | if is_census_datasource(environ): 34 | # Use the value of the environment variable directly, 35 | datasource = environ['GEO_DATASOURCE'] 36 | 37 | else: 38 | # Or just open datasource.shp with OGR. 39 | datasource = ogr.Open('datasource.shp') 40 | 41 | return datasource 42 | 43 | @app.route('/') 44 | def hello(): 45 | host_port = urlparse(request.base_url).netloc.encode('utf-8') 46 | is_downloadable = not is_census_datasource(environ) 47 | is_us_census = is_census_datasource(environ) 48 | 49 | return render_template('index.html', **locals()) 50 | 51 | @app.route('/.well-known/status') 52 | def status(): 53 | datasource = get_datasource(environ) 54 | 55 | status = { 56 | 'status': 'ok' if bool(datasource) else 'Bad datasource: %s' % repr(datasource), 57 | 'updated': int(time()), 58 | 'dependencies': [], 59 | 'resources': {} 60 | } 61 | 62 | body = json_encode(status) 63 | 64 | return Response(body, headers={'Content-type': 'application/json', cors: '*'}) 65 | 66 | @app.route("/areas") 67 | def areas(): 68 | ''' Retrieve geographic areas. 69 | ''' 70 | is_census = is_census_datasource(environ) 71 | 72 | lat = float(request.args['lat']) 73 | lon = float(request.args['lon']) 74 | 75 | include_geom = bool(request.args.get('include_geom', True)) 76 | json_callback = request.args.get('callback', None) 77 | 78 | layer_names = is_census and request.args.get('layers', '') 79 | layer_names = layer_names and set(layer_names.split(',')) 80 | 81 | # This. Is. Python. 82 | ogr.UseExceptions() 83 | 84 | point = ogr.Geometry(wkt='POINT(%f %f)' % (lon, lat)) 85 | 86 | if is_census: 87 | features = census_features(point, include_geom, layer_names) 88 | 89 | else: 90 | datasource = get_datasource(environ) 91 | features = get_intersecting_features(datasource, point, include_geom) 92 | 93 | body, mime = features_geojson(features, json_callback) 94 | 95 | return Response(body, headers={'Content-type': mime, cors: '*'}) 96 | 97 | @app.route('/select') 98 | def select(): 99 | ''' Retrieve features. 100 | ''' 101 | if is_census_datasource(environ): 102 | error = "Can't select individual features from " + census_url 103 | return Response(render_template('error.html', error=error), status=404) 104 | 105 | where_clause = request.args.get('where', None) 106 | where_clause = where_clause and str(where_clause) 107 | 108 | page_number = int(request.args.get('page', 1)) 109 | 110 | include_geom = bool(request.args.get('include_geom', True)) 111 | json_callback = request.args.get('callback', None) 112 | 113 | # This. Is. Python. 114 | ogr.UseExceptions() 115 | 116 | try: 117 | datasource = get_datasource(environ) 118 | features = get_matching_features(datasource, where_clause, page_number, include_geom) 119 | 120 | except QueryError, e: 121 | body, mime = json_encode({'error': str(e)}), 'application/json' 122 | 123 | if json_callback: 124 | body = '%s(%s);\n' % (json_callback, body) 125 | mime = 'text/javascript' 126 | 127 | return Response(body, status=400, headers={'Content-type': mime, cors: '*'}) 128 | 129 | else: 130 | body, mime = features_geojson(features, json_callback) 131 | return Response(body, headers={'Content-type': mime, cors: '*'}) 132 | 133 | @app.errorhandler(404) 134 | def error_404(error): 135 | return render_template('error.html', error=str(error)) 136 | 137 | @app.route('/datasource.zip') 138 | def download_zip(): 139 | if is_census_datasource(environ): 140 | error = "Can't download all of " + census_url 141 | return Response(render_template('error.html', error=error), status=404) 142 | 143 | buffer = StringIO() 144 | archive = ZipFile(buffer, 'w', ZIP_DEFLATED) 145 | archive.write('datasource.shp') 146 | archive.write('datasource.shx') 147 | archive.write('datasource.dbf') 148 | archive.write('datasource.prj') 149 | archive.close() 150 | 151 | return Response(buffer.getvalue(), headers={'Content-Type': 'application/zip'}) 152 | 153 | if __name__ == '__main__': 154 | app.run(host='0.0.0.0', debug=True) -------------------------------------------------------------------------------- /data/test-s3-index.py: -------------------------------------------------------------------------------- 1 | ''' Test script for retrieving features from Code for America S3-backed index. 2 | ''' 3 | from time import time 4 | from sys import stderr 5 | from threading import Thread 6 | from thread import get_ident 7 | 8 | from requests import get 9 | from shapely.geometry import MultiPolygon, Polygon, LineString, Point 10 | from ModestMaps.OpenStreetMap import Provider 11 | from ModestMaps.Geo import Location 12 | 13 | def unwind(indexes, arcs, transform): 14 | ''' Unwind a set of TopoJSON arc indexes into a transformed line or ring. 15 | 16 | Arc index documentation, with explanation of negative indexes: 17 | https://github.com/topojson/topojson-specification#214-arc-indexes 18 | 19 | Transformations: 20 | https://github.com/topojson/topojson-specification#212-transforms 21 | ''' 22 | ring = [] 23 | 24 | for index in indexes: 25 | arc = arcs[index if index >= 0 else abs(index) - 1] 26 | line = [arc[0]] 27 | 28 | for (x, y) in arc[1:]: 29 | line.append((line[-1][0] + x, line[-1][1] + y)) 30 | 31 | dx, dy = transform['scale'] 32 | tx, ty = transform['translate'] 33 | line = [(x * dx + tx, y * dy + ty) for (x, y) in line] 34 | 35 | ring += line if index >= 0 else reversed(line) 36 | 37 | return ring 38 | 39 | def decode(object, topo): 40 | ''' Decode a single object geometry from a TopoJSON topology. 41 | 42 | Throw an error if it's anything other than a polygon or multipolygon. 43 | ''' 44 | arcs, transform = topo['arcs'], topo['transform'] 45 | 46 | if object['type'] == 'Polygon': 47 | rings = [unwind(indexes, arcs, transform) for indexes in object['arcs']] 48 | return Polygon(rings[0], rings[1:]) 49 | 50 | if object['type'] == 'MultiPolygon': 51 | parts = [] 52 | 53 | for part in object['arcs']: 54 | rings = [unwind(indexes, arcs, transform) for indexes in part] 55 | part_shp = Polygon(rings[0], rings[1:]) 56 | parts.append(part_shp) 57 | 58 | return MultiPolygon(parts) 59 | 60 | raise Exception(object['type']) 61 | 62 | def retrieve_zoom_features(loc, zoom): 63 | ''' Retrieve all features enclosing a given point location at a zoom level. 64 | 65 | Requests TopoJSON tile from forever.codeforamerica.org spatial index, 66 | decodes bounding boxes and geometries if necessary, then yields a stream 67 | of any feature feature whose geometry covers the requested point. 68 | ''' 69 | osm = Provider() 70 | 71 | point = Point(loc.lon, loc.lat) 72 | coord = osm.locationCoordinate(loc).zoomTo(zoom) 73 | path = '%(zoom)d/%(column)d/%(row)d' % coord.__dict__ 74 | url = 'http://forever.codeforamerica.org/Census-API/by-tile/%s.topojson.gz' % path 75 | 76 | resp = get(url) 77 | topo = resp.json() 78 | 79 | print >> stderr, 'request took', resp.elapsed, 'from', url, 'in', hex(get_ident()) 80 | 81 | start = time() 82 | 83 | assert topo['type'] == 'Topology' 84 | 85 | bbox_fails, shape_fails = 0, 0 86 | 87 | for layer in topo['objects']: 88 | if zoom == 8: 89 | assert layer in ('state', 'county', 'place', 'cbsa') 90 | elif zoom == 10: 91 | assert layer in ('zcta510', 'tract') 92 | else: 93 | raise Exception('Unknown layer %d' % zoom) 94 | 95 | for object in topo['objects'][layer]['geometries']: 96 | x_, y_, _x, _y = object['bbox'] 97 | 98 | obj_box = Polygon([(x_, y_), (x_, _y), (_x, _y), (_x, y_), (x_, y_)]) 99 | 100 | if not point.within(obj_box): 101 | # object failed a simple bounding box check and can be discarded. 102 | bbox_fails += 1 103 | continue 104 | 105 | obj_shp = decode(object, topo) 106 | 107 | if not point.within(obj_shp): 108 | # object failed a point-in-polygon check and can be discarded. 109 | shape_fails += 1 110 | continue 111 | 112 | p = object['properties'] 113 | 114 | yield p.get('NAME', None), p.get('NAMELSAD', None), p.get('GEOID', None), p.get('GEOID10', None) 115 | 116 | print >> stderr, 'check took', (time() - start), 'seconds', 'in', hex(get_ident()), 'with', bbox_fails, 'bbox fails and', shape_fails, 'shape fails' 117 | 118 | def get_features(loc): 119 | ''' Get a list of features found at the given point location. 120 | 121 | Thread calls to retrieve_zoom_features(). 122 | ''' 123 | def _retrieve_zoom_features(loc, zoom, results): 124 | for result in retrieve_zoom_features(loc, zoom): 125 | results.append(result) 126 | 127 | start = time() 128 | results = [] 129 | 130 | threads = [ 131 | Thread(target=_retrieve_zoom_features, args=(loc, 10, results)), 132 | Thread(target=_retrieve_zoom_features, args=(loc, 8, results)) 133 | ] 134 | 135 | for t in threads: 136 | t.start() 137 | 138 | for t in threads: 139 | t.join() 140 | 141 | print >> stderr, 'results took', (time() - start), 'seconds' 142 | 143 | return results 144 | 145 | if __name__ == '__main__': 146 | 147 | print get_features(Location(47.620510, -122.349305)) # Space Needle 148 | print get_features(Location(37.805311, -122.272540)) # Oakland City Hall 149 | print get_features(Location(37.775793, -122.413549)) # Code for America 150 | print get_features(Location(40.753526, -73.976626)) # Grand Central Station 151 | print get_features(Location(38.871006, -77.055963)) # The Pentagon 152 | print get_features(Location(29.951057, -90.081090)) # The Superdome 153 | print get_features(Location(41.878874, -87.635907)) # Sears Tower 154 | -------------------------------------------------------------------------------- /census.py: -------------------------------------------------------------------------------- 1 | from time import time 2 | from logging import debug 3 | from threading import Thread 4 | from thread import get_ident 5 | 6 | from requests import get 7 | from shapely.geometry import MultiPolygon, Polygon, LineString, Point 8 | from ModestMaps.OpenStreetMap import Provider 9 | from ModestMaps.Geo import Location 10 | 11 | census_url = 'http://forever.codeforamerica.org/Census-API/' 12 | 13 | zoom_layers = { 14 | 8: set(('state', 'county', 'place', 'cbsa')), 15 | 10: set(('zcta510', 'tract')), 16 | 12: set(('bg', 'tabblock')) 17 | } 18 | 19 | def unwind(indexes, arcs, transform): 20 | ''' Unwind a set of TopoJSON arc indexes into a transformed line or ring. 21 | 22 | Arc index documentation, with explanation of negative indexes: 23 | https://github.com/topojson/topojson-specification#214-arc-indexes 24 | 25 | Transformations: 26 | https://github.com/topojson/topojson-specification#212-transforms 27 | ''' 28 | ring = [] 29 | 30 | for index in indexes: 31 | arc = arcs[index if index >= 0 else abs(index) - 1] 32 | line = [arc[0]] 33 | 34 | for (x, y) in arc[1:]: 35 | line.append((line[-1][0] + x, line[-1][1] + y)) 36 | 37 | dx, dy = transform['scale'] 38 | tx, ty = transform['translate'] 39 | line = [(x * dx + tx, y * dy + ty) for (x, y) in line] 40 | 41 | ring += line if index >= 0 else reversed(line) 42 | 43 | return ring 44 | 45 | def decode(object, topo): 46 | ''' Decode a single object geometry from a TopoJSON topology. 47 | 48 | Throw an error if it's anything other than a polygon or multipolygon. 49 | ''' 50 | arcs, transform = topo['arcs'], topo['transform'] 51 | 52 | if object['type'] == 'Polygon': 53 | rings = [unwind(indexes, arcs, transform) for indexes in object['arcs']] 54 | return Polygon(rings[0], rings[1:]) 55 | 56 | if object['type'] == 'MultiPolygon': 57 | parts = [] 58 | 59 | for part in object['arcs']: 60 | rings = [unwind(indexes, arcs, transform) for indexes in part] 61 | part_shp = Polygon(rings[0], rings[1:]) 62 | parts.append(part_shp) 63 | 64 | return MultiPolygon(parts) 65 | 66 | raise Exception(object['type']) 67 | 68 | def retrieve_zoom_features(loc, zoom, include_geom, layer_names): 69 | ''' Retrieve all features enclosing a given point location at a zoom level. 70 | 71 | Requests TopoJSON tile from forever.codeforamerica.org spatial index, 72 | decodes bounding boxes and geometries if necessary, then yields a stream 73 | of any feature feature whose geometry covers the requested point. 74 | ''' 75 | osm = Provider() 76 | 77 | point = Point(loc.lon, loc.lat) 78 | coord = osm.locationCoordinate(loc).zoomTo(zoom) 79 | path = '%(zoom)d/%(column)d/%(row)d' % coord.__dict__ 80 | url = census_url + 'by-tile/%s.topojson.gz' % path 81 | 82 | resp = get(url) 83 | topo = resp.json() 84 | 85 | debug('request took %.3fs from %s in %s' % (resp.elapsed.total_seconds(), url, hex(get_ident()))) 86 | 87 | start = time() 88 | 89 | assert topo['type'] == 'Topology' 90 | 91 | bbox_fails, shape_fails = 0, 0 92 | 93 | for layer in topo['objects']: 94 | if layer_names is not None and layer not in layer_names: 95 | continue 96 | 97 | if zoom in zoom_layers: 98 | assert layer in zoom_layers[zoom] 99 | else: 100 | raise Exception('Unknown layer %d' % zoom) 101 | 102 | for object in topo['objects'][layer]['geometries']: 103 | x_, y_, _x, _y = object['bbox'] 104 | 105 | obj_box = Polygon([(x_, y_), (x_, _y), (_x, _y), (_x, y_), (x_, y_)]) 106 | 107 | if not point.within(obj_box): 108 | # object failed a simple bounding box check and can be discarded. 109 | bbox_fails += 1 110 | continue 111 | 112 | obj_shp = decode(object, topo) 113 | 114 | if not point.within(obj_shp): 115 | # object failed a point-in-polygon check and can be discarded. 116 | shape_fails += 1 117 | continue 118 | 119 | feature = {'type': 'Feature', 'properties': object['properties']} 120 | 121 | if include_geom: 122 | feature['geometry'] = obj_shp.__geo_interface__ 123 | 124 | yield feature 125 | 126 | debug('check took %.3fs in %s with %d bbox fails and %d shape fails' % (time() - start, hex(get_ident()), bbox_fails, shape_fails)) 127 | 128 | def get_features(point, include_geom, layer_names): 129 | ''' Get a list of features found at the given point location. 130 | 131 | Thread calls to retrieve_zoom_features(). 132 | ''' 133 | loc = Location(point.GetY(), point.GetX()) 134 | 135 | def _retrieve_zoom_features(zoom, results): 136 | for result in retrieve_zoom_features(loc, zoom, include_geom, layer_names or None): 137 | results.append(result) 138 | 139 | start = time() 140 | results = [] 141 | 142 | if layer_names: 143 | # 144 | # Prepare one thread for each zoom_layer needed to get the named layers. 145 | # 146 | layer_needs = [(z, layer_names & zoom_layers[z]) for z in zoom_layers] 147 | layer_args = [(zoom, results) for (zoom, layers) in layer_needs if layers] 148 | 149 | else: 150 | layer_args = [(zoom, results) for zoom in zoom_layers] 151 | 152 | threads = [Thread(target=_retrieve_zoom_features, args=a) for a in layer_args] 153 | 154 | for t in threads: 155 | t.start() 156 | 157 | for t in threads: 158 | t.join() 159 | 160 | debug('results took %.3f seconds' % (time() - start)) 161 | 162 | return results 163 | -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | {% if is_us_census %}U.S. Census{% else %}Geographic{% endif %} Area API by Code for America 6 | 7 | 50 | 51 | 52 | 53 |

{% if is_us_census %}U.S. Census{% else %}Geographic{% endif %} Area API

54 | 55 |

56 | Simple geospatial API{% if is_us_census %} for U.S. Census{% endif %}, in response to the 57 | Census Area API hack request. 58 | Fork me on Github. 59 |

60 | 61 |

62 | With this API, you can: 63 |

64 | 65 | 74 | 75 |

76 | 77 |

78 | 79 |

80 | Written by Michal Migurski with 81 | Andy Hull, © 2013 Code for America. 82 |

83 | 84 | 85 | {% if is_downloadable %} 86 | 87 | 90 | 91 | 92 | 100 | 107 | 108 | {% endif %} 109 | 110 | 113 | 114 | 115 | 167 | 206 | 207 | {% if not is_us_census %} 208 | 209 | 212 | 213 | 214 | 243 | 282 | 283 | {% endif %} 284 |
88 |

Download Datasource ZIP File

89 |
93 |

94 | Download a complete copy of the datasource shapefile in a ZIP archive. 95 |

96 | 97 |

Endpoint

98 |

/datasource.zip

99 |
101 |

Definition

102 | http://{{host_port}}/datasource.zip 103 | 104 |

Response

105 | ZIP archive 106 |
111 |

Areas at a Point

112 |
116 |

117 | Return all areas that overlap a single point. 118 |

119 | 120 |

Endpoint

121 |

/areas

122 | 123 |

Arguments

124 | 125 |
126 |
lat
127 |
Required latitude in degrees.
128 | 129 |
lon
130 |
Required longitude in degrees.
131 | 132 |
include_geom
133 |
Optional boolean flag to include complete geometries in response. Default True.
134 | 135 |
callback
136 |
Optional JSONP callback function name.
137 | 138 | {% if is_us_census %} 139 |
layers
140 |
141 | Optional comma-delimited list of layer names. 142 |
143 | Available layers include: 144 | state, 145 | county, 146 | place, 147 | cbsa, 148 | zcta510 (zip codes), 149 | tract, 150 | bg (block group), and 151 | tabblock (tabulation block). 152 |
153 | If omitted, all layers will be included in response. 154 |
155 | {% endif %} 156 |
157 | 158 |

Response Format

159 |

160 | Responses are given in GeoJSON format: 161 | “GeoJSON is a format for encoding a variety of geographic 162 | data structures. Features in GeoJSON contain a geometry 163 | object and additional properties, and a feature collection 164 | represents a list of features.” 165 |

166 |
168 |

Definition

169 | http://{{host_port}}/areas?lat={latitude}&lon={longitude} 170 | 171 |

Example Request

172 | http://{{host_port}}/areas?lat=37.775793&lon=-122.413549 173 | 174 |

Response

175 | { 176 | "type": "FeatureCollection", 177 | "features": [ 178 | { 179 | "type": "Feature", 180 | "properties": { 181 | "NAME": "California", 182 | "GEOID": "06", 183 | "MTFCC": "G4000", 184 | "dataset": "Bay Area Census (2010-2013)", 185 | ... 186 | }, 187 | "geometry": { 188 | "type": "MultiPolygon", 189 | "coordinates": [ 190 | [ 191 | [ 192 | [ -123.792812, 39.102313 ], 193 | [ -123.793856, 39.104178 ], 194 | ... 195 | [ -123.792812, 39.102313 ] 196 | ] 197 | ], 198 | ... 199 | ], 200 | } 201 | }, 202 | ... 203 | ] 204 | } 205 |
210 |

Select features by attribute values

211 |
215 |

216 | Return all features that match a logical expression. 217 |

218 | 219 |

Endpoint

220 |

/select

221 | 222 |

Arguments

223 | 224 |
225 |
where
226 |
Optional expression, such as NAME = 'California'. Value is passed directly to the datasource, and must match OGR’s SQL dialect for Where clauses.
227 | 228 |
page
229 |
Optional page number to select additional features. Each page contains up to 25 features. Default 1.
230 | 231 |
include_geom
232 |
Optional boolean flag to include complete geometries in response. Default True.
233 | 234 |
callback
235 |
Optional JSONP callback function name.
236 |
237 | 238 |

Response Format

239 |

240 | Responses are given in GeoJSON format. 241 |

242 |
244 |

Definition

245 | http://{{host_port}}/select?where={expression} 246 | 247 |

Example Request

248 | http://{{host_port}}/select?where=NAME='California' 249 | 250 |

Response

251 | { 252 | "type": "FeatureCollection", 253 | "features": [ 254 | { 255 | "type": "Feature", 256 | "properties": { 257 | "NAME": "California", 258 | "GEOID": "06", 259 | "MTFCC": "G4000", 260 | "dataset": "Bay Area Census (2010-2013)", 261 | ... 262 | }, 263 | "geometry": { 264 | "type": "MultiPolygon", 265 | "coordinates": [ 266 | [ 267 | [ 268 | [ -123.792812, 39.102313 ], 269 | [ -123.793856, 39.104178 ], 270 | ... 271 | [ -123.792812, 39.102313 ] 272 | ] 273 | ], 274 | ... 275 | ], 276 | } 277 | }, 278 | ... 279 | ] 280 | } 281 |
285 | 286 | 287 | 288 | -------------------------------------------------------------------------------- /data/index-tiles.py: -------------------------------------------------------------------------------- 1 | ''' Extract one directory of GeoJSON files per tile from local zip files. 2 | 3 | Built for zip files of State, County, CBSA, and Place geometries: 4 | 5 | curl -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/STATE/tl_2013_us_state.zip' 6 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/COUNTY/tl_2013_us_county.zip' 7 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/CBSA/tl_2013_us_cbsa.zip' 8 | -OL 'ftp://ftp.census.gov:21//geo/tiger/TIGER2013/PLACE/tl_2013_[01-99]_place.zip' 9 | ''' 10 | from zipfile import ZipFile 11 | from itertools import product 12 | from os import makedirs, remove, stat, link 13 | from subprocess import Popen 14 | from shutil import copyfile 15 | from os.path import exists 16 | from re import compile, S 17 | from glob import glob 18 | 19 | from ModestMaps.Core import Coordinate 20 | from ModestMaps.OpenStreetMap import Provider 21 | 22 | zoom_low, zoom_mid, zoom_high = 8, 10, 12 23 | 24 | def extract(zipfile): 25 | ''' 26 | ''' 27 | types = ('.shp', '.shx', '.prj', '.dbf') 28 | names = [name for name in zipfile.namelist() if name[-4:] in types] 29 | 30 | zipfile.extractall(members=names) 31 | 32 | shpname = names[0][:-4] + '.shp' 33 | 34 | return shpname 35 | 36 | def cleanup(shpname): 37 | ''' 38 | ''' 39 | types = ('.shp', '.shx', '.prj', '.dbf') 40 | 41 | for ext in types: 42 | remove(shpname[:-4] + ext) 43 | 44 | def coordinates(zoom): 45 | ''' 46 | ''' 47 | osm = Provider() 48 | 49 | for (col, row) in product(range(2**zoom), range(2**zoom)): 50 | coord = Coordinate(row, col, zoom) 51 | 52 | sw = osm.coordinateLocation(coord.down()) 53 | ne = osm.coordinateLocation(coord.right()) 54 | 55 | yield coord, sw, ne 56 | 57 | def prepdir(coord): 58 | ''' 59 | ''' 60 | path = 'tiles/%(zoom)d/%(column)d/%(row)d' % coord.__dict__ 61 | 62 | try: 63 | makedirs(path) 64 | except OSError: 65 | pass 66 | 67 | return path 68 | 69 | def runogr2ogr(cmd): 70 | ''' 71 | ''' 72 | ogrcmd = Popen(cmd) 73 | ogrcmd.wait() 74 | 75 | assert ogrcmd.returncode == 0, 'Failed on %s' % outname 76 | 77 | if 'GeoJSON' in cmd and stat(outname).st_size <= 131: 78 | remove(outname) 79 | 80 | def append_geojson(srcname, destname): 81 | ''' Append ogr2ogr-generated source file to destination file. 82 | 83 | Uses regular expressions instead of JSON parser, so formats and spacing 84 | and line breaks and bounding boxes must all match normal ogr2ogr output. 85 | ''' 86 | pat = compile(r'^{\n"type": "FeatureCollection", *\n"bbox": (\[.+?\]), *\n"features": \[\n(.+)\n\] *\n}\n$', S) 87 | 88 | srcdata = open(srcname).read() 89 | srcmatch = pat.match(srcdata) 90 | assert srcmatch, 'Bad '+ srcname 91 | 92 | destdata = open(destname).read() 93 | destmatch = pat.match(destdata) 94 | assert destmatch, 'Bad '+ destname 95 | 96 | with open(destname, 'w') as out: 97 | print >> out, '{\n"type": "FeatureCollection",\n"bbox":', 98 | print >> out, destmatch.group(1)+',' # bbox is the same each time 99 | print >> out, '"features": [' 100 | print >> out, destmatch.group(2) 101 | print >> out, ',' 102 | print >> out, srcmatch.group(2) 103 | print >> out, ']\n}' 104 | 105 | if __name__ == '__main__': 106 | 107 | # 108 | # Extract state, county and CBSA features for each low-zoom tile. 109 | # 110 | 111 | # States need to be first in the list, so 112 | zipnames = ['tl_2013_us_state.zip', 'tl_2013_us_county.zip', 'tl_2013_us_cbsa.zip'] \ 113 | + glob('tl_2013_??_place.zip') 114 | 115 | for zipname in zipnames: 116 | zipfile = ZipFile(zipname) 117 | shpname = extract(zipfile) 118 | 119 | for (coord, sw, ne) in coordinates(zoom_low): 120 | path = prepdir(coord) 121 | 122 | outname = '%s/%s.json' % (path, shpname[:-4]) 123 | 124 | if shpname != 'tl_2013_us_state.shp' and not exists(path + '/tl_2013_us_state.json'): 125 | # skip this probably-empty tile 126 | continue 127 | 128 | print outname, '...' 129 | 130 | if exists(outname): 131 | remove(outname) 132 | 133 | cmd = 'ogr2ogr', '-spat', str(sw.lon), str(sw.lat), str(ne.lon), str(ne.lat), \ 134 | '-t_srs', 'EPSG:4326', '-f', 'GeoJSON', '-lco', 'WRITE_BBOX=YES', outname, shpname 135 | 136 | runogr2ogr(cmd) 137 | 138 | cleanup(shpname) 139 | 140 | # 141 | # Combine per-state place files into per-tile place files. 142 | # 143 | 144 | coords = coordinates(zoom_low) 145 | 146 | for (coord, sw, ne) in coords: 147 | path = prepdir(coord) 148 | 149 | for (index, filename) in enumerate(glob('%s/tl_2013_??_place.json' % path)): 150 | if filename.endswith('tl_2013_us_place.json'): 151 | continue 152 | 153 | outname = '%s/tl_2013_us_place.json' % path 154 | 155 | if index == 0: 156 | print 'copy', filename, 'to', outname, '...' 157 | copyfile(filename, outname) 158 | 159 | else: 160 | print 'append', filename, 'to', outname, '...' 161 | append_geojson(filename, outname) 162 | 163 | # 164 | # Extract ZCTA5 and tract features for each mid-zoom tile. 165 | # 166 | 167 | zipnames = ['tl_2013_us_zcta510.zip'] + glob('tl_2013_??_tract.zip') 168 | 169 | for zipname in zipnames: 170 | zipfile = ZipFile(zipname) 171 | shpname = extract(zipfile) 172 | 173 | for (coord, sw, ne) in coordinates(zoom_mid): 174 | parent = coord.zoomTo(zoom_low).container() 175 | 176 | if not exists(prepdir(parent) + '/tl_2013_us_state.json'): 177 | # skip this probably-empty tile 178 | continue 179 | 180 | path = prepdir(coord) 181 | 182 | outname = '%s/%s.json' % (path, shpname[:-4]) 183 | 184 | print outname, '...' 185 | 186 | if exists(outname): 187 | remove(outname) 188 | 189 | cmd = 'ogr2ogr', '-spat', str(sw.lon), str(sw.lat), str(ne.lon), str(ne.lat), \ 190 | '-t_srs', 'EPSG:4326', '-f', 'GeoJSON', '-lco', 'WRITE_BBOX=YES', outname, shpname 191 | 192 | runogr2ogr(cmd) 193 | 194 | cleanup(shpname) 195 | 196 | # 197 | # Combine per-state tract files into per-tile tract files. 198 | # 199 | 200 | coords = coordinates(zoom_mid) 201 | 202 | for (coord, sw, ne) in coords: 203 | path = prepdir(coord) 204 | 205 | for (index, filename) in enumerate(glob('%s/tl_2013_??_tract.json' % path)): 206 | if filename.endswith('tl_2013_us_tract.json'): 207 | continue 208 | 209 | outname = '%s/tl_2013_us_tract.json' % path 210 | 211 | if index == 0: 212 | print 'copy', filename, 'to', outname, '...' 213 | copyfile(filename, outname) 214 | 215 | else: 216 | print 'append', filename, 'to', outname, '...' 217 | append_geojson(filename, outname) 218 | 219 | # 220 | # Extract block and block group features for each high-zoom tile. 221 | # 222 | 223 | zipnames = glob('tl_2013_??_bg.zip') + glob('tl_2013_??_tabblock.zip') 224 | 225 | for zipname in zipnames: 226 | zipfile = ZipFile(zipname) 227 | shpname = extract(zipfile) 228 | 229 | for (coord, sw, ne) in coordinates(zoom_high): 230 | parent = coord.zoomTo(zoom_low).container() 231 | 232 | if not exists(prepdir(parent) + '/tl_2013_us_state.json'): 233 | # skip this probably-empty tile 234 | continue 235 | 236 | path = prepdir(coord) 237 | 238 | outname = '%s/%s.json' % (path, shpname[:-4]) 239 | 240 | print outname, '...' 241 | 242 | if exists(outname): 243 | remove(outname) 244 | 245 | cmd = 'ogr2ogr', '-spat', str(sw.lon), str(sw.lat), str(ne.lon), str(ne.lat), \ 246 | '-t_srs', 'EPSG:4326', '-f', 'GeoJSON', '-lco', 'WRITE_BBOX=YES', outname, shpname 247 | 248 | runogr2ogr(cmd) 249 | 250 | cleanup(shpname) 251 | 252 | # 253 | # Combine per-state block files into per-tile block files. 254 | # 255 | 256 | coords = coordinates(zoom_high) 257 | 258 | for (coord, sw, ne) in coords: 259 | path = prepdir(coord) 260 | 261 | for (index, filename) in enumerate(glob('%s/tl_2013_??_bg.json' % path)): 262 | if filename.endswith('tl_2013_us_bg.json'): 263 | continue 264 | 265 | outname = '%s/tl_2013_us_bg.json' % path 266 | 267 | if index == 0: 268 | print 'copy', filename, 'to', outname, '...' 269 | copyfile(filename, outname) 270 | 271 | else: 272 | print 'append', filename, 'to', outname, '...' 273 | append_geojson(filename, outname) 274 | 275 | for (index, filename) in enumerate(glob('%s/tl_2013_??_tabblock.json' % path)): 276 | if filename.endswith('tl_2013_us_tabblock.json'): 277 | continue 278 | 279 | outname = '%s/tl_2013_us_tabblock.json' % path 280 | 281 | if index == 0: 282 | print 'copy', filename, 'to', outname, '...' 283 | copyfile(filename, outname) 284 | 285 | else: 286 | print 'append', filename, 'to', outname, '...' 287 | append_geojson(filename, outname) 288 | 289 | # 290 | # 291 | # 292 | 293 | zooms = zoom_low, zoom_mid 294 | 295 | for zoom in zooms: 296 | for (coord, sw, ne) in coordinates(zoom): 297 | path = prepdir(coord) 298 | 299 | files = glob('%s/tl_2013_us_*.json' % path) 300 | 301 | if not files: 302 | continue 303 | 304 | try: 305 | shortnames = [] 306 | 307 | for file in files: 308 | # knock off the "tl_2013_us_" part 309 | shortname = path + '/' + file[len(path) + 12:] 310 | shortnames.append(shortname) 311 | link(file, shortname) 312 | 313 | outname = path + '.topojson' 314 | 315 | print outname, '...' 316 | 317 | cmd = './node_modules/topojson/bin/topojson', '--cartesian', \ 318 | '--allow-empty', '--bbox', '-q', '36000000', '-p', \ 319 | '--out', outname 320 | 321 | cmd += tuple(shortnames) 322 | 323 | topocmd = Popen(cmd) 324 | topocmd.wait() 325 | 326 | assert topocmd.returncode == 0, 'Failed on %s' % outname 327 | 328 | finally: 329 | for shortname in shortnames: 330 | if shortname not in files: 331 | remove(shortname) 332 | --------------------------------------------------------------------------------