├── Python ├── combine_with_pandas.py ├── conda_environment.yml ├── config.py ├── run_all.py ├── spatial_covariates_buffer_extraction.py ├── spatial_covariates_point_extraction.py └── utils.py ├── README.md ├── __runMe.bat ├── buffers └── .gitignore ├── csvs_buffers └── .gitignore ├── csvs_combined └── .gitignore ├── csvs_points └── .gitignore ├── dependency ├── GCS_Unknown_datum_based_upon_the_Clarke_1866_ellipsoid.prj └── buffer │ ├── buffer.py │ ├── mxd │ ├── clark1866.mxd │ ├── evi_custom.mxd │ ├── mollweide.mxd │ └── wgs84.mxd │ └── shp │ └── .gitignore ├── logs └── .gitignore ├── points └── .gitignore └── rasters ├── clark_1866 └── Global Human Footprint │ └── .gitignore ├── evi_custom └── Enhanced Vegetation Index │ └── .gitignore ├── mollweide └── BUILT Population │ └── .gitignore └── wgs84 └── 2015_accessibility_to_cities └── .gitignore /Python/combine_with_pandas.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import csv 3 | import os 4 | import glob 5 | import time 6 | from timeit import default_timer as timer 7 | import utils as u 8 | 9 | def main_code(buff_dir, pt_dir, combine_dir, log): 10 | u.verify_dir(combine_dir) 11 | start_all = timer() 12 | u.write_to_log('\nSTART COMBINE: {}'.format(time.strftime("%Y-%m-%d %H:%M:%S")), log) 13 | buff_list = [] 14 | for filename in glob.iglob(buff_dir+'/*.csv'): 15 | buff_list.append(filename) 16 | 17 | pt_list = [] 18 | for filename in glob.iglob(pt_dir+'/*.csv'): 19 | pt_list.append(filename) 20 | 21 | match_count = 0 22 | for buff_csv in buff_list: 23 | buff_name = os.path.basename(buff_csv) 24 | process_buff = buff_csv 25 | for pt_csv in pt_list: 26 | pt_name = os.path.basename(pt_csv) 27 | if buff_name==pt_name: 28 | process_pt = pt_csv 29 | match_count+=1 30 | u.write_to_log(' {}) buffers: {} points: {}'.format(match_count, buff_name, pt_name), log) 31 | 32 | #csv_name = os.path.basename(os.path.basename(buff_csv).replace('.csv', '_pandas.csv')) 33 | out_csv = os.path.join(combine_dir, buff_name) 34 | 35 | buff_df = pd.read_csv(buff_csv, dtype='object') 36 | pt_df = pd.read_csv(pt_csv, dtype='object') 37 | 38 | print(' buff shape: {}'.format(buff_df.shape)) 39 | print(' pt shape: {}'.format(pt_df.shape)) 40 | 41 | merged = pd.merge(left=buff_df,right=pt_df, on='DHSID') 42 | print(' merge shape: {}'.format(merged.shape)) 43 | 44 | for col in list(merged): 45 | if col.endswith("_y"): 46 | merged = merged.drop(str(col), 1) 47 | 48 | # These select columns by name and merge point values into them if 49 | # the buffer analysis resulted in a blank value. 50 | try: merged.loc[merged['mean'].isnull(),'mean'] = merged['value'] 51 | except: print(' no MEAN column') 52 | 53 | try: merged.loc[merged['sum'].isnull(),'sum'] = merged['value'] 54 | except: print(' no SUM column') 55 | 56 | try: merged.loc[merged['majority'].isnull(),'majority'] = merged['value'] 57 | except: print(' no MAJORITY column') 58 | 59 | try: merged = merged.drop('nodata', 1) 60 | except: print(' no NODATA column') 61 | merged = merged.drop('value', 1) 62 | 63 | merged.to_csv(out_csv, sep=',') 64 | 65 | u.write_to_log('FINISH COMBINE: {}\nELAPSED TIME: {} sec.'.format(time.strftime("%Y-%m-%d %H:%M:%S"), round(timer()-start_all, 3)), log) 66 | -------------------------------------------------------------------------------- /Python/conda_environment.yml: -------------------------------------------------------------------------------- 1 | name: covariates 2 | channels: 3 | - conda-forge 4 | - defaults 5 | dependencies: 6 | - affine=2.1.0=py_1 7 | - attrs=17.2.0=py36_0 8 | - boto3=1.4.7=py36_0 9 | - botocore=1.5.92=py36_0 10 | - ca-certificates=2017.7.27.1=0 11 | - certifi=2017.7.27.1=py36_0 12 | - click=6.7=py36_0 13 | - click-plugins=1.0.3=py36_0 14 | - cligj=0.4.0=py36_0 15 | - curl=7.49.1=vc14_2 16 | - docutils=0.14=py36_0 17 | - expat=2.1.0=vc14_3 18 | - fiona=1.7.9=py36_0 19 | - freexl=1.0.2=vc14_2 20 | - gdal=2.2.0=np112py36_vc14_2 21 | - geos=3.5.1=vc14_1 22 | - hdf4=4.2.12=vc14_0 23 | - hdf5=1.8.18=vc14_1 24 | - hypothesis=3.23.0=py36_0 25 | - jmespath=0.9.3=py36_0 26 | - jpeg=9b=vc14_1 27 | - kealib=1.4.7=vc14_3 28 | - krb5=1.14.2=vc14_0 29 | - libgdal=2.1.4=vc14_3 30 | - libiconv=1.14=vc14_4 31 | - libnetcdf=4.4.1.1=vc14_6 32 | - libpng=1.6.28=vc14_1 33 | - libpq=9.6.3=vc14_0 34 | - libspatialite=4.3.0a=vc14_15 35 | - libtiff=4.0.7=vc14_0 36 | - libxml2=2.9.5=vc14_0 37 | - munch=2.2.0=py36_0 38 | - openjpeg=2.3.0=vc14_0 39 | - openssl=1.0.2l=vc14_0 40 | - pandas=0.20.3=py36_1 41 | - pip=9.0.1=py36_0 42 | - proj4=4.9.3=vc14_4 43 | - pympler=0.5=py36_0 44 | - pyparsing=2.2.0=py36_0 45 | - python=3.6.3=0 46 | - python-dateutil=2.6.1=py36_0 47 | - pytz=2017.2=py36_0 48 | - rasterio=0.36.0=py36_0 49 | - rasterstats=0.12.0=py_2 50 | - s3transfer=0.1.11=py36_0 51 | - setuptools=36.6.0=py36_1 52 | - shapely=1.6.1=py36_1 53 | - simplejson=3.11.1=py36_0 54 | - six=1.11.0=py36_1 55 | - snuggs=1.4.1=py36_0 56 | - sqlite=3.13.0=vc14_0 57 | - vc=14=0 58 | - vs2015_runtime=14.0.25420=0 59 | - wheel=0.30.0=py_1 60 | - wincertstore=0.2=py36_0 61 | - xerces-c=3.1.4=vc14_2 62 | - xz=5.2.3=0 63 | - zlib=1.2.8=vc14_3 64 | - icc_rt=2017.0.4=h97af966_0 65 | - intel-openmp=2018.0.0=hcd89f80_7 66 | - mkl=2017.0.3=0 67 | - numpy=1.12.1=py36_0 68 | - zope=1.0=py36_0 69 | - zope.interface=4.4.2=py36_0 70 | prefix: C:\Users\deitelberg\AppData\Local\conda\conda\envs\covariates2 71 | 72 | -------------------------------------------------------------------------------- /Python/config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | 5 | # Variables defining paths to points, buffers, and rasters 6 | points_base_path = r'C:\DHS-covariate-extraction\points' 7 | buffers_base_path = r'C:\DHS-covariate-extraction\buffers' 8 | rasters_base_path = r'C:\DHS-covariate-extraction\rasters' 9 | 10 | points = {'clark_1866': os.path.join(points_base_path, 'clark1866.shp'), 11 | 'evi_custom': os.path.join(points_base_path, 'evi_custom.shp'), 12 | 'wgs84': os.path.join(points_base_path, 'wgs84.shp'), 13 | 'mollweide': os.path.join(points_base_path, 'mollweide.shp')} 14 | 15 | buffers = {'clark_1866': os.path.join(buffers_base_path, 'clark1866.shp'), 16 | 'evi_custom': os.path.join(buffers_base_path, 'evi_custom.shp'), 17 | 'wgs84': os.path.join(buffers_base_path, 'wgs84.shp'), 18 | 'mollweide': os.path.join(buffers_base_path, 'mollweide.shp')} 19 | 20 | rasters = {'clark_1866': os.path.join(rasters_base_path, 'clark_1866'), 21 | 'evi_custom': os.path.join(rasters_base_path, 'evi_custom'), 22 | 'wgs84': os.path.join(rasters_base_path, 'wgs84'), 23 | 'mollweide': os.path.join(rasters_base_path, 'mollweide'), 24 | 'regex': '/**/*.tif'} # 'regex': '/**/*_landMasked.tif' 25 | 26 | # Log file output locations 27 | log_pth = os.path.join(os.path.split(buffers_base_path)[0], 'logs') 28 | 29 | # CSV output locations 30 | csv_pth = {'points': os.path.join(os.path.split(points_base_path)[0], 'csvs_points'), 31 | 'buffers': os.path.join(os.path.split(buffers_base_path)[0], 'csvs_buffers'), 32 | 'combined':os.path.join(os.path.split(buffers_base_path)[0], 'csvs_combined')} 33 | 34 | # Statistics for zonal stats 35 | stats = "count sum mean majority nodata" # count sum mean majority nodata -- See RasterStats for full list 36 | -------------------------------------------------------------------------------- /Python/run_all.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import sys 5 | import time 6 | import config as c 7 | import spatial_covariates_buffer_extraction as run_buff 8 | import spatial_covariates_point_extraction as run_pts 9 | import combine_with_pandas as combine 10 | import utils as u 11 | 12 | try: 13 | # User input - should correspond to a value in config points, buffers, and rasters 14 | # to run, in command prompt, for example, python run_all.py clark_1866 15 | projection_group = sys.argv[1] 16 | u.assert_valid_user_input(projection_group) 17 | 18 | # Initialize log file 19 | u.verify_dir(c.log_pth) 20 | log_name = 'log_{}.txt'.format(time.strftime("%Y-%m-%d_%H-%M-%S")) 21 | log_out = os.path.join(c.log_pth, log_name) 22 | 23 | print('START') 24 | # Do zonal stats with buffers 25 | run_buff.main_code(c.rasters[projection_group], 26 | c.buffers[projection_group], 27 | c.csv_pth['buffers'], 28 | log_out, 29 | c.rasters['regex'], 30 | c.stats) 31 | 32 | # Assign raster value to points 33 | run_pts.main_code(c.rasters[projection_group], 34 | c.points[projection_group], 35 | c.csv_pth['points'], 36 | log_out, 37 | c.rasters['regex']) 38 | 39 | # Combine csv files. Where zonal stats is blank, assign point value. 40 | combine.main_code(c.csv_pth['buffers'], c.csv_pth['points'], c.csv_pth['combined'], log_out) 41 | 42 | print('FINISH') 43 | 44 | except Exception as e: 45 | print('ERROR in run_all.py \n{}'.format(e)) 46 | -------------------------------------------------------------------------------- /Python/spatial_covariates_buffer_extraction.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from rasterstats import zonal_stats 4 | import csv 5 | from timeit import default_timer as timer 6 | import time 7 | import os 8 | import sys 9 | import glob 10 | from osgeo import gdal, ogr, osr 11 | import utils as u 12 | 13 | def do_zonal_stats(buffs, raster, csv_pth, num, log_out, stats): 14 | try: 15 | u.verify_dir(csv_pth) 16 | dataset = raster.split('\\')[-2].replace(' ', '_') 17 | raster_name_to_csv = os.path.basename(raster).replace('.tif', '.csv') 18 | csv_name = '__'.join([dataset, raster_name_to_csv]) 19 | csv_out = os.path.join(csv_pth, csv_name) 20 | 21 | start = timer() 22 | u.write_to_log(' {}) Raster: {}'.format(num, os.path.basename(raster)), log_out) 23 | 24 | stats = zonal_stats(buffs, raster, stats=stats, geojson_out=True) 25 | print(' zonal_stats... ({} sec.)'.format(round(timer()-start, 2))) 26 | 27 | start = timer() 28 | attributes = [] 29 | for item in stats: 30 | #print ('{}'.format(item['properties'])) 31 | attributes.append(item['properties']) 32 | print(' append dicts... ({} sec.)'.format(round(timer()-start, 2))) 33 | 34 | start = timer() 35 | with open(csv_out, 'w', newline='') as outfile: 36 | fp = csv.DictWriter(outfile, attributes[0].keys()) 37 | fp.writeheader() 38 | fp.writerows(attributes) 39 | print(' write to csv... ({} sec.)'.format(round(timer()-start, 2))) 40 | u.write_to_log(' CSV file: {}'.format(csv_out), log_out) 41 | u.write_to_log(' Log file: {}'.format(log_out), log_out) 42 | 43 | except Exception as e: 44 | u.write_to_log(str(e), log_out) 45 | u.write_to_log('FINISH BUFFERS: {}'.format(time.strftime("%Y-%m-%d %H:%M:%S")), log_out) 46 | 47 | 48 | def main_code(raster_dir, buffers, csv_path, log, raster_regex, stats): 49 | # Main stuff 50 | start_all = timer() 51 | u.write_to_log('\nSTART BUFFERS: {}'.format(time.strftime("%Y-%m-%d %H:%M:%S")), log) 52 | raster_list = [] 53 | for filename in glob.iglob(raster_dir+raster_regex, recursive=True): 54 | raster_list.append(filename) 55 | 56 | count = 0 57 | for tif in raster_list: 58 | count+=1 59 | try: 60 | do_zonal_stats(buffers, tif, csv_path, count, log, stats) 61 | except Exception as e: 62 | print(e) 63 | 64 | u.write_to_log('FINISH BUFFERS: {}\nELAPSED TIME: {} sec.'.format(time.strftime("%Y-%m-%d %H:%M:%S"), round(timer()-start_all, 2)), log) 65 | -------------------------------------------------------------------------------- /Python/spatial_covariates_point_extraction.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from rasterstats import point_query 4 | import csv 5 | from timeit import default_timer as timer 6 | import time 7 | import os 8 | import sys 9 | import glob 10 | from osgeo import gdal, ogr, osr 11 | import utils as u 12 | 13 | def do_point_query(points, raster, csv_pth, num, log_out): 14 | try: 15 | u.verify_dir(csv_pth) 16 | dataset = raster.split('\\')[-2].replace(' ', '_') 17 | raster_name_to_csv = os.path.basename(raster).replace('.tif', '.csv') 18 | csv_name = '__'.join([dataset, raster_name_to_csv]) 19 | csv_out = os.path.join(csv_pth, csv_name) 20 | 21 | start = timer() 22 | u.write_to_log(' {}) Raster: {}'.format(num, os.path.basename(raster)), log_out) 23 | 24 | stats = point_query(points, raster, interpolate='nearest', geojson_out=True) 25 | print(' point_query... ({} sec.)'.format(round(timer()-start, 2))) 26 | 27 | start = timer() 28 | attributes = [] 29 | for item in stats: 30 | #print ('{}'.format(item['properties'])) 31 | attributes.append(item['properties']) 32 | print(' append dicts... ({} sec.)'.format(round(timer()-start, 2))) 33 | 34 | start = timer() 35 | with open(csv_out, 'w', newline='') as outfile: 36 | fp = csv.DictWriter(outfile, attributes[0].keys()) 37 | fp.writeheader() 38 | fp.writerows(attributes) 39 | print(' write to csv... ({} sec.)'.format(round(timer()-start, 2))) 40 | u.write_to_log(' CSV file: {}'.format(csv_out), log_out) 41 | u.write_to_log(' Log file: {}'.format(log_out), log_out) 42 | 43 | except Exception as e: 44 | u.write_to_log(str(e), log_out) 45 | u.write_to_log('FINISH POINTS: {}'.format(time.strftime("%Y-%m-%d %H:%M:%S")), log_out) 46 | 47 | 48 | def main_code(raster_dir, pts, csv_path, log, raster_regex,): 49 | # Main stuff 50 | start_all = timer() 51 | u.write_to_log('\nSTART POINTS: {}'.format(time.strftime("%Y-%m-%d %H:%M:%S")), log) 52 | raster_list = [] 53 | for filename in glob.iglob(raster_dir+raster_regex, recursive=True): 54 | raster_list.append(filename) 55 | 56 | count = 0 57 | for tif in raster_list: 58 | count+=1 59 | try: 60 | do_point_query(pts, tif, csv_path, count, log) 61 | except Exception as e: 62 | print(e) 63 | 64 | u.write_to_log('FINISH POINTS: {}\nELAPSED TIME: ({} sec.)'.format(time.strftime("%Y-%m-%d %H:%M:%S"), round(timer()-start_all, 2)), log) 65 | -------------------------------------------------------------------------------- /Python/utils.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import config as c 5 | 6 | def write_to_log(string, log): 7 | print(string) 8 | 9 | with open(log, 'a') as l: 10 | l.write('\n' + string) 11 | 12 | def verify_dir(path): 13 | if not os.path.isdir(path): 14 | os.makedirs(path) 15 | print('created dir: {}'.format(path)) 16 | 17 | def assert_valid_user_input(user_input): 18 | assert user_input in c.points, 'ERROR: \'{}\' projection group not in config.points'.format(user_input) 19 | assert user_input in c.buffers, 'ERROR: \'{}\' projection group not in config.buffers'.format(user_input) 20 | assert user_input in c.rasters, 'ERROR: \'{}\' projection group not in config.rasters'.format(user_input) 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DHS Covariate Extraction 2 | 3 | Python code for the extraction of covariates from other sources for DHS clusters. For the results of this kind of processing, please see our publicly available [Geospatial Covariates (GC) files](https://spatialdata.dhsprogram.com/covariates/). 4 | 5 | ## Background 6 | For a detailed overview of how the extraction process works and information about the rasters that we use for our covariate (GC) files, please see: 7 | 8 | Mayala, Benjamin, Thomas D. Fish, David Eitelberg, and Trinadh Dontamsetti. 2018. [The DHS Program Geospatial Covariate Datasets Manual (Second Edition)](https://spatialdata.dhsprogram.com/references/DHS%20Covariates%20Extract%20Data%20Description%202.pdf). Rockville, Maryland, USA: ICF. 9 | 10 | ## Python version warning 11 | This repository breaks best practices and has both Python 2 and Python 3 code. All of the code in the [/dependency/buffer/](/dependency/buffer/) folder uses Python 2 and all of the files in the [/Python/](/Python/) folder uses Python 3. [Hashbang](https://en.wikipedia.org/wiki/Shebang_(Unix)) such as `#!/usr/bin/env python3` are at the beginning of each file as an attempt to minimize confusion. 12 | 13 | ESRI ArcMap 10.X still uses Python 2, sorry. 14 | 15 | ## Setup 16 | ### Initial Setup 17 | 18 | 1. Edit the base paths in [/Python/config.py](/Python/config.py) to the paths of the points, buffers, and rasters folders on your local machine. Save that file. 19 | 2. Populate the [/raster/](/raster/) folder structure with the raster data that you would like to extract values from. Example folders were added as an attempt to help you figure out the data management. For more information about the data management see [the rasters section](#rasters). 20 | 3. You may want to delete the `.gitignore` files found throughout the folder structure to prevent a script from tripping. 21 | 22 | #### Dependencies 23 | A full list of the dependencies needed to run the extraction code ([/Python/](/Python/)) can be found [here](/Python/conda_environment.yml). We utilize a conda environment to sandbox dependencies of different processes. GDAL, RasterIO, and Fiona are all testy to install in Windows. Check with each package for the best practices for installing it. 24 | 25 | ### Per Dataset Setup 26 | 1. Download your points from the from [The DHS Program](https://dhsprogram.com/data/available-datasets.cfm) website. For more information about accessing the GPS data, please [see this post](https://userforum.dhsprogram.com/index.php?t=msg&th=6448&start=0&) in our [user forum](https://userforum.dhsprogram.com/). 27 | 2. Open the points in you favorite GIS program and save them as `wgs84.shp`. 28 | 3. Reproject the points into each projection that you have raster data in and save those points in the points folder. 29 | 4. Create the buffers layers and save them in the buffers folder. Example code and ArcMap dependencies can be found in [/dependency/buffer/](/dependency/buffer/) 30 | 5. Delete the contents of the csvs_buffers, csvs_combined, and csvs_points folders. 31 | 6. Run the script! Some example batch code can be found in [__runMe.bat](/__runMe.bat) 32 | 33 | ## Folder Structure 34 | ### /buffers/ 35 | Your 2 km and 10km buffers go in this folder. We use these file names for the following projections 36 | 37 | * clark1866.shp 38 | * evi_custom.shp 39 | * mollweide.shp 40 | * wgs84.shp 41 | 42 | Python code to generate the buffers from the points in the points folder can be found in [/dependency/buffer/buffer.py](/dependency/buffer/buffer.py) 43 | 44 | ### /csvs_buffers/ 45 | The output from the first attempt to extract values using the buffers. 46 | 47 | ### /csvs_combined/ 48 | The final output of the extraction process. 49 | 50 | ### /csvs_points/ 51 | The output from the second attempt to extract values using the points. 52 | 53 | ### /dependency/ 54 | Three of the covariate rasters that we extract from use an offbeat projection that isn't found ESRI ArcMap (we call it evi_custom in the folder structure) 55 | 56 | #### /dependency/buffer/ 57 | Several map documents and a Python 2 script for creating the buffers. 58 | 59 | ### /logs/ 60 | The main python scripts keep verbose logs of all of the text that is shown in the command line interface. 61 | 62 | ### /points/ 63 | Your points from [The DHS Program](https://dhsprogram.com/data/available-datasets.cfm). For more information about accessing the GPS data, please [see this post](https://userforum.dhsprogram.com/index.php?t=msg&th=6448&start=0&) in our [user forum](https://userforum.dhsprogram.com/). We use these file names for the following projections 64 | 65 | * clark1866.shp 66 | * evi_custom.shp 67 | * mollweide.shp 68 | * wgs84.shp 69 | 70 | ### /rasters/ 71 | The raster files that you want to get extractions from broken down by projection and then further broken down by source. 72 | 73 | ## Questions/Bugs 74 | 75 | This code is for researchers, scientists, and other data users who understand GIS data. Bug reports should be submitted [in the tracker](https://github.com/DHSProgram/DHS-covariate-extraction/issues) here on GitHub and general questions can be directed to gpsrequests@dhsprogram.com. 76 | 77 | Please note that we have limited resources to support users of this codebase. 78 | -------------------------------------------------------------------------------- /__runMe.bat: -------------------------------------------------------------------------------- 1 | call activate cov 2 | 3 | echo "extracting from rasters" 4 | 5 | python Python\run_all.py clark_1866 6 | python Python\run_all.py evi_custom 7 | python Python\run_all.py wgs84 8 | python Python\run_all.py mollweide 9 | pause 10 | -------------------------------------------------------------------------------- /buffers/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /csvs_buffers/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /csvs_combined/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /csvs_points/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /dependency/GCS_Unknown_datum_based_upon_the_Clarke_1866_ellipsoid.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_Unknown_datum_based_upon_the_Clarke_1866_ellipsoid",DATUM["D_Not_specified_based_on_Clarke_1866_spheroid",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /dependency/buffer/buffer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python2 2 | 3 | ######################################################### 4 | # Turns a points layer into urban/rural buffers 5 | # By Tom Fish 6 | ######################################################### 7 | 8 | import arcpy 9 | import os 10 | import shutil 11 | 12 | BASE_MXD_PATH = r"C:\DHS-covariate-extraction\dependency\buffer\mxd" 13 | BASE_SHP_PATH = r"C:\DHS-covariate-extraction\dependency\buffer\shp" 14 | BR_BUFFER_PATH = r"C:\DHS-covariate-extraction\buffers" 15 | 16 | files = os.listdir(BASE_MXD_PATH) 17 | 18 | for entry in files: 19 | print entry 20 | mxd = arcpy.mapping.MapDocument(BASE_MXD_PATH + "\\" + entry) 21 | lyrs = arcpy.mapping.ListLayers(mxd) 22 | lyr = lyrs[0] 23 | 24 | lyr.definitionQuery = """ "URBAN_RURA" = 'U' """ 25 | arcpy.Buffer_analysis(lyr, BASE_SHP_PATH + r"\U.shp", "2 Kilometers") 26 | 27 | lyr.definitionQuery = """ "URBAN_RURA" = 'R' """ 28 | arcpy.Buffer_analysis(lyr, BASE_SHP_PATH + r"\R.shp", "10 Kilometers") 29 | 30 | arcpy.Merge_management([BASE_SHP_PATH + r"\U.shp", BASE_SHP_PATH + r"\R.shp"], BR_BUFFER_PATH + "\\" + entry.split(".")[0] + ".shp") 31 | 32 | # Clean up 33 | torm = os.listdir(BASE_SHP_PATH) 34 | for thing in torm: 35 | os.remove(BASE_SHP_PATH + "//" + thing) 36 | -------------------------------------------------------------------------------- /dependency/buffer/mxd/clark1866.mxd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DHSProgram/DHS-covariate-extraction/43a24a852f2936c0249c148ff68127014416f00c/dependency/buffer/mxd/clark1866.mxd -------------------------------------------------------------------------------- /dependency/buffer/mxd/evi_custom.mxd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DHSProgram/DHS-covariate-extraction/43a24a852f2936c0249c148ff68127014416f00c/dependency/buffer/mxd/evi_custom.mxd -------------------------------------------------------------------------------- /dependency/buffer/mxd/mollweide.mxd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DHSProgram/DHS-covariate-extraction/43a24a852f2936c0249c148ff68127014416f00c/dependency/buffer/mxd/mollweide.mxd -------------------------------------------------------------------------------- /dependency/buffer/mxd/wgs84.mxd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DHSProgram/DHS-covariate-extraction/43a24a852f2936c0249c148ff68127014416f00c/dependency/buffer/mxd/wgs84.mxd -------------------------------------------------------------------------------- /dependency/buffer/shp/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /logs/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /points/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /rasters/clark_1866/Global Human Footprint/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /rasters/evi_custom/Enhanced Vegetation Index/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /rasters/mollweide/BUILT Population/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /rasters/wgs84/2015_accessibility_to_cities/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | --------------------------------------------------------------------------------