├── .github
└── workflows
│ └── build-publish.yml
├── .gitignore
├── .pylintrc
├── .readthedocs.yml
├── MANIFEST.in
├── README.md
├── cythonize_extension.sh
├── data
├── chicago_boundaries
│ ├── chi_comm_boundaries.dbf
│ ├── chi_comm_boundaries.prj
│ ├── chi_comm_boundaries.shp
│ ├── chi_comm_boundaries.shx
│ ├── chicago_boundaries.dbf
│ ├── chicago_boundaries.prj
│ ├── chicago_boundaries.shp
│ └── chicago_boundaries.shx
├── input_data
│ ├── destinations
│ │ ├── health_chicago.csv
│ │ ├── health_chicago.dbf
│ │ ├── health_chicago.prj
│ │ ├── health_chicago.qpj
│ │ ├── health_chicago.shp
│ │ ├── health_chicago.shx
│ │ └── hyde_park_dests.csv
│ └── sources
│ │ ├── hyde_park_tracts.csv
│ │ └── tracts2010.csv
├── osm_query_cache
│ ├── bike41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5
│ ├── bike41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5
│ ├── bike41.783989_-87.609949_41.798279_-87.591212.h5
│ ├── drive41.738929_-87.654979_41.843478999999995_-87.54372900000001.h5
│ ├── drive41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5
│ ├── drive41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5
│ ├── drive41.783989_-87.609949_41.798279_-87.588769.h5
│ ├── walk41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5
│ ├── walk41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5
│ └── walk41.783989_-87.609949_41.798279_-87.591212.h5
└── output_data
│ └── matrices
│ ├── simple_demo_matrix.csv
│ └── walk_asym_health_tracts.tmx
├── docs
├── .gitignore
├── Makefile
├── conf.py
├── index.rst
├── notebooks
│ ├── .ipynb_checkpoints
│ │ ├── access_score-checkpoint.ipynb
│ │ ├── calibration-checkpoint.ipynb
│ │ ├── notes-checkpoint.ipynb
│ │ ├── reqs-checkpoint.ipynb
│ │ └── travel_time_metrics-checkpoint.ipynb
│ ├── 0_Reqs_Install.ipynb
│ ├── 1_Simple_Test_Demo.ipynb
│ ├── 2_Methods.ipynb
│ ├── 3_Travel_Time_Matrix.ipynb
│ ├── 4_Access_Metrics.ipynb
│ ├── 5_Coverage_Metrics.ipynb
│ ├── 6_TSFCA.ipynb
│ ├── figures
│ │ ├── Access_Model_all.png
│ │ ├── a_FQHC.png
│ │ ├── access.png
│ │ ├── access_com.png
│ │ ├── access_comm.png
│ │ ├── access_fed.png
│ │ ├── access_mod_agg.png
│ │ ├── access_sd_t.png
│ │ ├── cdf_access_score.png
│ │ ├── cdf_accesssum.png
│ │ ├── cdf_count.png
│ │ ├── cdf_coverage.png
│ │ ├── cdf_time.png
│ │ ├── cdf_tsfca.png
│ │ ├── cov_t_com.png
│ │ ├── coverage.png
│ │ ├── coverage_agg.png
│ │ ├── coverage_t.png
│ │ ├── dd.png
│ │ ├── diagram_code.png
│ │ ├── fig_0.png
│ │ ├── fig_1.png
│ │ ├── flow.png
│ │ ├── lin.png
│ │ ├── log.png
│ │ ├── map_AccessCount.png
│ │ ├── map_AccessModel.png
│ │ ├── map_AccessSum.png
│ │ ├── map_AccessTime.png
│ │ ├── nd.png
│ │ ├── nd_2.png
│ │ ├── nn.png
│ │ ├── nn_2.png
│ │ ├── snap.png
│ │ ├── sqr.png
│ │ ├── tsfca.png
│ │ └── tsfca_results.png
│ ├── notebook.tex
│ └── spatial_access_documentation081219.pdf
└── source
│ ├── conf.py
│ ├── index.rst
│ ├── modules.rst
│ └── spatial_access.rst
├── setup.cfg
├── setup.py
├── spatial_access
├── BaseModel.py
├── Configs.py
├── MatrixInterface.py
├── Models.py
├── NetworkInterface.py
├── SpatialAccessExceptions.py
├── __init__.py
├── _parsers.py
├── p2p.py
└── src
│ ├── .gitignore
│ ├── Serializer.cpp
│ ├── _p2pExtension.cpp
│ ├── _p2pExtension.pyx
│ ├── csvParser.cpp
│ ├── generate_extension_source.py
│ ├── include
│ ├── Graph.h
│ ├── Serializer.h
│ ├── csvParser.h
│ ├── dataFrame.h
│ ├── networkUtility.h
│ ├── otpCSV.h
│ ├── threadUtilities.h
│ ├── tmxParser.h
│ ├── transitMatrix.h
│ └── userDataContainer.h
│ ├── pyx_src
│ ├── dynamic.pyx
│ └── static.pyx
│ ├── threadUtilities.cpp
│ └── tmxParser.cpp
└── tests
├── .gitignore
├── test_BaseModel.py
├── test_Configs.py
├── test_MatrixInterface.py
├── test_Models.py
├── test_NetworkInterface.py
├── test_data
├── dests.csv
├── dests_a.csv
├── dests_b.csv
├── sample_otp.csv
├── sources.csv
├── sources.tsv
├── sources_a.csv
└── sources_bad_indeces_type.csv
├── test_networkUtility.py
├── test_p2p.py
└── test_pyTransitMatrix.py
/.github/workflows/build-publish.yml:
--------------------------------------------------------------------------------
1 | name: Ubuntu and Mac builds
2 |
3 | on: [push, pull_request, workflow_dispatch]
4 |
5 | jobs:
6 | build_and_publish:
7 | strategy:
8 | matrix:
9 | os:
10 | - ubuntu-latest
11 | - macos-latest
12 |
13 | runs-on: ${{ matrix.os }}
14 | if: "!contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]')"
15 |
16 | steps:
17 | - uses: actions/checkout@v2
18 | with:
19 | submodules: recursive
20 |
21 | - name: Install pacakges
22 | run: python3 -m pip install --upgrade --upgrade-strategy eager twine cibuildwheel
23 |
24 | #- name: Build and Test
25 | # run: |
26 | # python3 setup.py build_ext --inplace
27 | # python3 -m unittest discover -v -s tests -p "test_*.py"
28 |
29 | - name: Build Wheels
30 | run: |
31 | python3 -m cibuildwheel --output-dir wheelhouse
32 | python3 setup.py sdist
33 | ls -lrt wheelhouse/*
34 |
35 | - uses: actions/upload-artifact@v2
36 | with:
37 | name: travel-time-${{ matrix.os }}
38 | path: wheelhouse/
39 |
40 | - name: Publish to Pypi
41 | if: startsWith(github.ref, 'refs/tags/v')
42 | env:
43 | TWINE_USERNAME: __token__
44 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
45 | run: |
46 | python3 -m pip install --upgrade twine
47 | python3 -m twine upload wheelhouse/*.whl
48 |
49 | - name: Publish Source to Pypi
50 | if: matrix.os == 'macos-latest' && startsWith(github.ref, 'refs/tags/v')
51 | env:
52 | TWINE_USERNAME: __token__
53 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
54 | run: |
55 | python3 -m twine upload dist/*.tar.gz
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .ipynb_checkpoints/
2 | .idea/
3 | spatial_access/src/.idea/vcs.xml
4 | data/osm_query_cache/
5 | figures/
6 | dist/
7 | spatial_access/src/matrix.dSYM/Contents
8 | *.csv
9 | logs/
10 | *.DS_Store
11 | *.pyc
12 | spatial_access/src/_p2pExtension.cpp
13 | spatial_access/logs/
14 | build/
15 | data/matrices
16 | travel_times/access_score.ipynb
17 | travel_times/access_score.ipynb
18 | .pytest_cache/
19 | .DS_Store
20 | spatial_access.egg-info/
21 | __pycache__
22 | .cache
23 | spatial_access/matrix.dSYM/
24 | <<<<<<< Updated upstream
25 | .vscode/settings.json
26 | =======
27 | .vscode/
28 | >>>>>>> Stashed changes
29 | _p2pExtension.cpython-39-darwin.so
30 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # Required
2 | version: 2
3 |
4 | python:
5 | version: 3.7
6 | install:
7 | - method: setuptools
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include setup.py
2 | include README.md
3 | include spatial_access/*
4 | include spatial_access/src/*
5 | include spatial_access/src/include/*
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # spatial_access: Compute travel times and spatial access metrics at scale
4 | Compute travel times and spatial access measures at scale (millions of origin-destination pairs in minutes).
5 | Travel times for three modes: walking, biking, driving.
6 | Spatial access measures: provider-to-people ratio, avg. time to nearest provider, count/attribute sum of nearby providers, weighted access scores and floating catchment areas.
7 |
8 |
9 | Latest Release |
10 |
11 |
12 |
13 |
14 | |
15 |
16 |
17 | Build Status |
18 |
19 |
20 |
21 | |
22 |
23 |
24 |
25 | Documentation |
26 |
27 |
28 |
29 | |
30 |
31 |
32 |
33 | Tested Operating Systems |
34 |
35 | Ubuntu, macOS
36 | |
37 |
38 |
39 |
40 |
41 | Components of spatial_access :
42 | ----
43 | spatial_access has two submodules:
44 | - p2p: Generate many to many matrices with travel times for sets of coordinates. Use `walk` ,`bike` or `drive` network types (import `transit` from other sources), or get the distance in meters.
45 | - Models: Contains a suite of models for calculating spatial accessibility to amenities.
46 |
47 | To use this service as a ReST API, see: https://github.com/GeoDaCenter/spatial_access_api
48 |
49 | If you are a Windows user, instructions for installing Ubuntu on a virtual machine are at the bottom of the Readme.
50 |
51 |
52 | Installation
53 | ----
54 | 0. A modern compiler like `gcc` or `clang`.
55 |
56 | 1. Dependencies
57 |
58 | - MacOS:
59 |
60 | `brew install spatialindex`
61 |
62 | - Ubuntu:
63 |
64 | `sudo apt-get install libspatialindex-dev`
65 |
66 | `sudo apt-get install python-tk`
67 |
68 | 2. Package
69 |
70 | `pip3 install spatial_access`
71 |
72 | **More detailed instructions for installing in [0_Reqs_Install.ipynb](./docs/notebooks/0_Reqs_Install.ipynb)**
73 |
74 | Usage
75 | ---
76 | See the iPython notebooks in `docs/` for example usage, The first two notebooks contain installation instructions and run through a simple demo to make sure you have the setup successfully installed:
77 |
78 | * [0_Reqs_Install.ipynb](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/0_Reqs_Install.ipynb): Installation requirements to run the notebook demos
79 | * [1_Simple_Test_Demo](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/1_Simple_Test_Demo.ipynb): Simple demo to test your setup installation works
80 |
81 |
82 | The remaining notebooks walk through how to run the travel time matrix and spatial access metrics, including main functions and parameters:
83 |
84 | * [2_Methods](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/2_Methods.ipynb): Purpose and structure of the package + methodology for estimating travel time matrices and spatial access metrics
85 | * [3_Travel_Time_Matrix.ipynb](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/3_Travel_Time_Matrix.ipynb): How to run the travel time matrices using [p2p.py](https://github.com/GeoDaCenter/spatial_access/blob/master/spatial_access/p2p.py)
86 | * [4_Access_Metrics.ipynb](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/4_Access_Metrics.ipynb): How to run the access metrics (origin-based) using [Models.py](https://github.com/GeoDaCenter/spatial_access/blob/master/spatial_access/Models.py)
87 | * [5_Coverage_Metrics.ipynb](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/5_Coverage_Metrics.ipynb): How to run the coverage metrics (destination-based) using [Models.py](https://github.com/GeoDaCenter/spatial_access/blob/master/spatial_access/Models.py)
88 | * [6_TSFCA.ipynb](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/6_TSFCA.ipynb): How to run a two-stage floating catchment area model (origin-based) using [Models.py](https://github.com/GeoDaCenter/spatial_access/blob/master/spatial_access/Models.py)
89 |
90 |
91 | The **data** folder contains the input_data needed to estimate the metrics under **sources** (for origins) and **destinations** (for destinations).
92 | In output_data, the **matrices** folder stores the estimated symmetric and asymmetric matrices.
93 | The **models** folder contains the results of the models' analyses.
94 | Finally, **figures** stores the results of maps and plots calculated during the process.
95 |
96 | You can also download all of the notebooks in one PDF file [here](https://github.com/GeoDaCenter/spatial_access/tree/master/docs/notebooks/spatial_access_documentation081219.pdf).
97 |
98 | ### Overwriting default configuration values
99 | p2p provides default configuration values for edge weights and node impedance (see spatial_access/configs.py).
100 | You can overwrite these as follows:
101 | ```
102 | from spatial_access.p2p import TransitMatrix
103 | from spatial_access.Configs import Configs
104 | custom_config = Configs()
105 | # set fields of custom_cofig
106 | tm = TransitMatrix(..., configs=custom_config)
107 | # continue with computation
108 | ```
109 |
110 | Maintainance
111 | ---
112 |
113 | ### Instructions for building locally (only for developers):
114 |
115 | - Additional requirements: `cython` and `jinja2`
116 | - To regenerate .pyx files, run: `bash cythonize_extension.sh` (TravisCI will do this automatically on deployment)
117 | - To install locally, run: `sudo python3 setup.py install ` from spatial_access root directory
118 | - Unit tests require the `pytest` package. From package root directory, run `python3 -m pytest tests/` to run all unit tests.
119 |
120 | ### PyPi Maintenance
121 | The package lives at: `https://pypi.org/project/spatial-access/`
122 |
123 | When a branch is pulled into Master and builds/passes all unit tests,
124 | Travis CI will automatically deploy the build to PyPi.
125 |
126 |
127 | To update PyPi access credentials, see .travis.yml and follow the instructions at https://docs.travis-ci.com/user/deployment/pypi/
128 | to generate a new encrypted password.
129 |
130 |
131 | ### Installing Ubuntu 18 LTS with dependencies from scratch (recommended for Windows users)
132 |
133 | 1. Follow the instructions at this link: https://linus.nci.nih.gov/bdge/installUbuntu.html to set up a virtual machine
134 | 2. `sudo apt-get update`
135 | 3. `sudo add-apt-repository universe`
136 | 4. `sudo apt-get -y install python3-pip`
137 | 5. Continue with Installation Instructions (above)
138 |
139 | ### Questions/Feedback?
140 |
141 | spatial@uchicago.edu
142 |
143 | ### Acknowledgments
144 |
145 | Developed by Logan Noel at the University of Chicago's Center for Spatial Data Science (CSDS) with support from the Public Health National Center for Innovations (PHNCI), the University of Chicago, and CSDS.
146 |
--------------------------------------------------------------------------------
/cythonize_extension.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | python3 spatial_access/src/generate_extension_source.py spatial_access/src/
3 | cython --cplus spatial_access/src/*.pyx
4 |
--------------------------------------------------------------------------------
/data/chicago_boundaries/chi_comm_boundaries.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/chicago_boundaries/chi_comm_boundaries.dbf
--------------------------------------------------------------------------------
/data/chicago_boundaries/chi_comm_boundaries.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS84(DD)",DATUM["D_WGS84",SPHEROID["WGS84",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.017453292519943295]]
--------------------------------------------------------------------------------
/data/chicago_boundaries/chi_comm_boundaries.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/chicago_boundaries/chi_comm_boundaries.shp
--------------------------------------------------------------------------------
/data/chicago_boundaries/chi_comm_boundaries.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/chicago_boundaries/chi_comm_boundaries.shx
--------------------------------------------------------------------------------
/data/chicago_boundaries/chicago_boundaries.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/chicago_boundaries/chicago_boundaries.dbf
--------------------------------------------------------------------------------
/data/chicago_boundaries/chicago_boundaries.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS84(DD)",DATUM["D_WGS84",SPHEROID["WGS84",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.017453292519943295]]
--------------------------------------------------------------------------------
/data/chicago_boundaries/chicago_boundaries.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/chicago_boundaries/chicago_boundaries.shp
--------------------------------------------------------------------------------
/data/chicago_boundaries/chicago_boundaries.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/chicago_boundaries/chicago_boundaries.shx
--------------------------------------------------------------------------------
/data/input_data/destinations/health_chicago.dbf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/input_data/destinations/health_chicago.dbf
--------------------------------------------------------------------------------
/data/input_data/destinations/health_chicago.prj:
--------------------------------------------------------------------------------
1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]
--------------------------------------------------------------------------------
/data/input_data/destinations/health_chicago.qpj:
--------------------------------------------------------------------------------
1 | GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]
2 |
--------------------------------------------------------------------------------
/data/input_data/destinations/health_chicago.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/input_data/destinations/health_chicago.shp
--------------------------------------------------------------------------------
/data/input_data/destinations/health_chicago.shx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/input_data/destinations/health_chicago.shx
--------------------------------------------------------------------------------
/data/input_data/destinations/hyde_park_dests.csv:
--------------------------------------------------------------------------------
1 | name,lon,lat,category,capacity
Museum of Science and Industry,-87.583131,41.790883,Museum,400
Medici,-87.593738,41.791438,Restaurant,50
Valois,-87.588328,41.799663,Restaurant,30
DuSable Museum,-87.607132,41.791985,Museum,100
Whole Foods,-87.587949,41.801978,Supermarket,50
Hyde Park Produce,-87.595524,41.799942,Supermarket,35
Jewel Osco,-87.607225,41.78458,Supermarket,70
--------------------------------------------------------------------------------
/data/input_data/sources/hyde_park_tracts.csv:
--------------------------------------------------------------------------------
1 | geoid10,lon,lat,Pop2014,Pov14,community
2 | 17031836300,-87.6017575,41.8015319,6465,234,41
3 | 17031836200,-87.6012844,41.790469,1329,47,41
4 | 17031410100,-87.5793225,41.8014966,1956,551,41
5 | 17031410200,-87.5942689,41.801668,1248,362,41
6 | 17031410500,-87.6037453,41.7978267,2630,717,41
7 | 17031410600,-87.5989457,41.7979713,2365,703,41
8 | 17031411100,-87.5897018,41.7904493,2246,154,41
9 | 17031410700,-87.5941976,41.7980397,1959,453,41
10 | 17031410800,-87.5896261,41.7979597,3201,741,41
11 | 17031410900,-87.5766591,41.7978743,2923,607,41
12 | 17031411000,-87.5768734,41.7907158,3313,465,41
13 | 17031411200,-87.5940174,41.7905558,1691,289,41
--------------------------------------------------------------------------------
/data/osm_query_cache/bike41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/bike41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/bike41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/bike41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/bike41.783989_-87.609949_41.798279_-87.591212.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/bike41.783989_-87.609949_41.798279_-87.591212.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/drive41.738929_-87.654979_41.843478999999995_-87.54372900000001.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/drive41.738929_-87.654979_41.843478999999995_-87.54372900000001.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/drive41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/drive41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/drive41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/drive41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/drive41.783989_-87.609949_41.798279_-87.588769.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/drive41.783989_-87.609949_41.798279_-87.588769.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/walk41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/walk41.738989000000004_-87.654949_41.843278999999995_-87.543769.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/walk41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/walk41.738989000000004_-87.654949_41.843278999999995_-87.546212.h5
--------------------------------------------------------------------------------
/data/osm_query_cache/walk41.783989_-87.609949_41.798279_-87.591212.h5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/osm_query_cache/walk41.783989_-87.609949_41.798279_-87.591212.h5
--------------------------------------------------------------------------------
/data/output_data/matrices/simple_demo_matrix.csv:
--------------------------------------------------------------------------------
1 | ,Museum of Science and Industry,Medici,Valois,DuSable Museum,Whole Foods,Hyde Park Produce,Jewel Osco,
2 | 17031836300,1831,1277,994,1041,1106,545,1744,
3 | 17031836200,1259,517,1422,547,1639,1047,906,
4 | 17031410100,1076,1550,629,2280,765,1119,2882,
5 | 17031410200,1371,956,506,1636,531,474,2262,
6 | 17031410500,1681,1095,1056,652,1273,643,1355,
7 | 17031410600,1396,817,756,936,973,343,1597,
8 | 17031411100,758,449,1011,1339,1169,1167,1506,
9 | 17031410700,1129,604,480,1220,697,268,1872,
10 | 17031410800,899,754,238,1477,455,503,2094,
11 | 17031410900,897,1408,1041,2175,1177,1531,2734,
12 | 17031411000,641,1235,1380,2084,1516,1793,2560,
13 | 17031411200,831,89,1023,992,1230,854,1353,
14 |
--------------------------------------------------------------------------------
/data/output_data/matrices/walk_asym_health_tracts.tmx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/data/output_data/matrices/walk_asym_health_tracts.tmx
--------------------------------------------------------------------------------
/docs/.gitignore:
--------------------------------------------------------------------------------
1 | build/
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = source
8 | BUILDDIR = build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # http://www.sphinx-doc.org/en/master/config
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | from unittest.mock import MagicMock
16 | sys.path.insert(0, os.path.abspath('../'))
17 |
18 | # Mock Imports
19 | class Mock(MagicMock):
20 | @classmethod
21 | def __getattr__(cls, name):
22 | return MagicMock()
23 |
24 | MOCK_MODULES = ['fiona',
25 | 'matplotli',
26 | 'jellyfish',
27 | 'geopandas',
28 | 'psutil',
29 | 'pandas',
30 | 'numpy',
31 | 'osmnet',
32 | 'scipy',
33 | 'geopy',
34 | 'shapely',
35 | 'tables',
36 | 'scikit_learn',
37 | 'atlas',
38 | 'descartes',
39 | 'rtree']
40 |
41 | for mod_name in MOCK_MODULES:
42 | sys.modules[mod_name] = Mock()
43 |
44 |
45 | # -- Project information -----------------------------------------------------
46 |
47 | project = 'spatial_access'
48 | copyright = '2019, Logan Noel'
49 | author = 'Logan Noel'
50 |
51 |
52 | # -- General configuration ---------------------------------------------------
53 |
54 | # Add any Sphinx extension module names here, as strings. They can be
55 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
56 | # ones.
57 | extensions = [
58 | ]
59 | master_doc = 'index'
60 |
61 | # Add any paths that contain templates here, relative to this directory.
62 | templates_path = ['_templates']
63 |
64 | # List of patterns, relative to source directory, that match files and
65 | # directories to ignore when looking for source files.
66 | # This pattern also affects html_static_path and html_extra_path.
67 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
68 |
69 |
70 | # -- Options for HTML output -------------------------------------------------
71 |
72 | # The theme to use for HTML and HTML Help pages. See the documentation for
73 | # a list of builtin themes.
74 | #
75 | html_theme = 'alabaster'
76 |
77 | # Add any paths that contain custom static files (such as style sheets) here,
78 | # relative to this directory. They are copied after the builtin static files,
79 | # so a file named "default.css" will overwrite the builtin "default.css".
80 | html_static_path = ['_static']
81 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. spatial_access documentation master file, created by
2 | sphinx-quickstart on Wed Apr 10 19:36:50 2019.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to spatial_access's documentation!
7 | ==========================================
8 |
9 | .. toctree::
10 | :maxdepth: 2
11 | :caption: Contents:
12 |
13 |
14 |
15 | Indices and tables
16 | ==================
17 |
18 | * :ref:`genindex`
19 | * :ref:`modindex`
20 | * :ref:`search`
21 |
--------------------------------------------------------------------------------
/docs/notebooks/.ipynb_checkpoints/calibration-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "## P2P Calibration \n",
8 | "put at the end of p2p in order to import transitmatrix\n",
9 | "\n",
10 | "We use Graphhopper in order to test the p2p calibration. A Graphhopper API is required to run this process and it can be obtained here:\n",
11 | "https://graphhopper.com/api/1/docs/FAQ/\n",
12 | "\n",
13 | "Note: You can also use a GoogleMaps API.\n",
14 | "\n",
15 | "\n",
16 | "Show the mean and stddev of the difference between p2p's route time\n",
17 | "and GraphHopper's route time, in seconds.\n",
18 | "\n",
19 | "IMPORTANT: To use this, must have a valid GraphHopper Matrix API key\n",
20 | "saved in a text file in this directory called GRAPHHOPPER_API_KEY.txt\n",
21 | "\n",
22 | "Positive differences indicate p2p's route was longer, negative times indicates\n",
23 | "that p2p's route was shorter."
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 8,
29 | "metadata": {},
30 | "outputs": [],
31 | "source": [
32 | "import pandas as pd"
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": 2,
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "# P2P Calibration \n",
42 | "#need this because p2p and graphhopper use synonyms for \n",
43 | "#vehicle/route types\n",
44 | "p2p_to_graphhopper_type_names = {\n",
45 | " 'drive' : 'car',\n",
46 | " 'walk' : 'foot',\n",
47 | " 'bike' : 'bike'\n",
48 | "}\n",
49 | "\n",
50 | "def sample_one_matrix(df, tm, network_type, api_key):\n",
51 | " '''\n",
52 | " Test one ditance matrix\n",
53 | " '''\n",
54 | "\n",
55 | " base_url = \"https://graphhopper.com/api/1/matrix\"\n",
56 | " first = True\n",
57 | " for data in df.itertuples():\n",
58 | " x_data = data[4]\n",
59 | " y_data = data[5]\n",
60 | " if first:\n",
61 | " point_string = \"?point={},{}\".format(x_data, y_data)\n",
62 | " first = False\n",
63 | " else:\n",
64 | " point_string = \"&point={},{}\".format(x_data, y_data)\n",
65 | "\n",
66 | " base_url += point_string\n",
67 | "\n",
68 | " param_string = \"&type=json&vehicle={}&debug=true&out_array=times&key={}\".format(network_type, api_key)\n",
69 | " base_url += param_string\n",
70 | "\n",
71 | "\n",
72 | " try:\n",
73 | " r = requests.get(base_url)\n",
74 | "\n",
75 | " results = r.json()['times']\n",
76 | " except:\n",
77 | " print('there was a problem fetching from GraphHopper. Exiting...')\n",
78 | " sys.exit()\n",
79 | "\n",
80 | "\n",
81 | " already_checked = set()\n",
82 | " diffs = []\n",
83 | " for i, row in enumerate(df.index):\n",
84 | " for j, col in enumerate(df.index):\n",
85 | " if (row, col) not in already_checked and row != col:\n",
86 | " calculated_time = tm.get(row, col)\n",
87 | " actual_time = results[i][j]\n",
88 | " diff = calculated_time - actual_time\n",
89 | " diffs.append(diff)\n",
90 | " already_checked.add((row, col))\n",
91 | " already_checked.add((col, row))\n",
92 | "\n",
93 | " stddev = np.std(diffs)\n",
94 | " mean = np.mean(diffs)\n",
95 | "\n",
96 | "\n",
97 | " print('diffs mean: {}, stddev: {}'.format(mean, stddev))\n",
98 | "\n",
99 | "\n",
100 | "def calibrate(network_type='walk', input_file='resources/LEHD_blocks.csv', \n",
101 | " sl_file='resources/condensed_street_data.csv', n=1):\n",
102 | " '''\n",
103 | " Show the mean and stddev of the difference between p2p's route time\n",
104 | " and GraphHopper's route time, in seconds.\n",
105 | "\n",
106 | " IMPORTANT: To use this, must have a valid GraphHopper Matrix API key\n",
107 | " saved in a text file in this directory called GRAPHHOPPER_API_KEY.txt\n",
108 | "\n",
109 | " Positive differences indicate p2p's route was longer, negative times indicates\n",
110 | " that p2p's route was shorter.\n",
111 | " '''\n",
112 | " if network_type == 'drive':\n",
113 | " assert sl_file is not None, 'must provide sl_file for use with driving network calibration'\n",
114 | " with open('GRAPHHOPPER_API_KEY.txt', 'r') as api_file:\n",
115 | " api_key = api_file.read()\n",
116 | " api_key = api_key.strip()\n",
117 | " gh_type_name = p2p_to_graphhopper_type_names[network_type]\n",
118 | "\n",
119 | " tm = TransitMatrix(network_type=network_type, primary_input=input_file)\n",
120 | " if network_type == 'drive':\n",
121 | " tm.process(speed_limit_filename=sl_file)\n",
122 | " else:\n",
123 | " tm.process()\n",
124 | "\n",
125 | " #extract the column names\n",
126 | " xcol = ''\n",
127 | " ycol = ''\n",
128 | " idx = ''\n",
129 | "\n",
130 | " df = pd.read_csv(input_file)\n",
131 | "\n",
132 | " print('The variables in your data set are:')\n",
133 | " df_cols = df.columns.values\n",
134 | " for var in df_cols:\n",
135 | " print('> ',var)\n",
136 | " while xcol not in df_cols:\n",
137 | " xcol = input('Enter the x coordinate (Latitude): ')\n",
138 | " while ycol not in df_cols:\n",
139 | " ycol = input('Enter the y coordinate (Longitude): ')\n",
140 | " while idx not in df_cols:\n",
141 | " idx = input('Enter the index name: ')\n",
142 | "\n",
143 | " df.rename(columns={xcol:'x',ycol:'y', idx:'idx'},inplace=True)\n",
144 | " df.set_index('idx', inplace=True)\n",
145 | "\n",
146 | " for i in range(n):\n",
147 | "\n",
148 | " sample_one_matrix(df.sample(24), tm, gh_type_name, api_key)\n",
149 | " "
150 | ]
151 | },
152 | {
153 | "cell_type": "code",
154 | "execution_count": null,
155 | "metadata": {},
156 | "outputs": [],
157 | "source": [
158 | "calibrate(network_type='walk',\n",
159 | " input_file='data/ORIG/tracts2010.csv',\n",
160 | " sl_file='data/DEST/health_chicago.csv',\n",
161 | " n=1)"
162 | ]
163 | },
164 | {
165 | "cell_type": "markdown",
166 | "metadata": {},
167 | "source": [
168 | "## Epsilon Calibration"
169 | ]
170 | },
171 | {
172 | "cell_type": "markdown",
173 | "metadata": {},
174 | "source": [
175 | ""
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": 9,
181 | "metadata": {},
182 | "outputs": [
183 | {
184 | "data": {
185 | "text/plain": [
186 | "(801, 200)"
187 | ]
188 | },
189 | "execution_count": 9,
190 | "metadata": {},
191 | "output_type": "execute_result"
192 | }
193 | ],
194 | "source": [
195 | "# Load the travel time distance matrix and assess dimensions: \n",
196 | "df = pd.read_csv('scripts/data/matrices/walk_asym_health_tracts.csv')\n",
197 | "df.shape"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": 10,
203 | "metadata": {},
204 | "outputs": [],
205 | "source": [
206 | "#Identify percentage of values outside epsilon:\n",
207 | "p_eps=((df.groupby('1').count()).iloc[0][0])/len(df)"
208 | ]
209 | },
210 | {
211 | "cell_type": "code",
212 | "execution_count": 11,
213 | "metadata": {},
214 | "outputs": [
215 | {
216 | "name": "stdout",
217 | "output_type": "stream",
218 | "text": [
219 | "Of the total matrix 0.25% of the values are outside the bounding box. If the value is below 1% it seems epsilon is appropriate for this particular dataset.\n"
220 | ]
221 | }
222 | ],
223 | "source": [
224 | "print (\"Of the total matrix \",\"{0:.2f}%\".format(p_eps* 100),\"of the values are outside the bounding box. If the value is below 1% it seems epsilon is appropriate for this particular dataset.\")"
225 | ]
226 | }
227 | ],
228 | "metadata": {
229 | "kernelspec": {
230 | "display_name": "Python 3",
231 | "language": "python",
232 | "name": "python3"
233 | },
234 | "language_info": {
235 | "codemirror_mode": {
236 | "name": "ipython",
237 | "version": 3
238 | },
239 | "file_extension": ".py",
240 | "mimetype": "text/x-python",
241 | "name": "python",
242 | "nbconvert_exporter": "python",
243 | "pygments_lexer": "ipython3",
244 | "version": "3.6.5"
245 | }
246 | },
247 | "nbformat": 4,
248 | "nbformat_minor": 2
249 | }
250 |
--------------------------------------------------------------------------------
/docs/notebooks/.ipynb_checkpoints/reqs-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Installation and Files Requirements"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "--- \n",
15 | "## Installation Setup \n",
16 | "\n",
17 | "The package is written in Python 3.6, C++ 11 and Cython by [Logan Noel](https://www.linkedin.com/in/lmnoel/). (Minimum Python version 3.5) \n",
18 | "Currently, the only supported operating systems are MacOS and Ubuntu (if you don't have either, a guide for installing Ubuntu 16.04 LTS is in README.)"
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "**Within the notebook:**"
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": null,
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "cd scripts"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": null,
40 | "metadata": {},
41 | "outputs": [],
42 | "source": [
43 | "#1. Install `python3` (if not installed already): \n",
44 | "! brew install python3"
45 | ]
46 | },
47 | {
48 | "cell_type": "code",
49 | "execution_count": null,
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "#2. Install `pip` (if not installed already):\n",
54 | "! curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py\n",
55 | "! python3 get-pip.py"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": null,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "#3. Install requirements: \n",
65 | "! pip3 install -r requirements.txt"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "metadata": {},
72 | "outputs": [],
73 | "source": [
74 | "#3.1 If you get errors:\n",
75 | "! pip3 install -r requirements.txt --upgrade"
76 | ]
77 | },
78 | {
79 | "cell_type": "code",
80 | "execution_count": null,
81 | "metadata": {},
82 | "outputs": [],
83 | "source": [
84 | "#3.2 If you get \"Cannot uninstall X errors for distutils packages\":\n",
85 | "! pip3 install -r requirements.txt --ignore-installed"
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "execution_count": null,
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "#4. Run the setup for the travel time matrix:\n",
95 | "! python3 setup.py build\n",
96 | "! python3 setup.py install"
97 | ]
98 | },
99 | {
100 | "cell_type": "markdown",
101 | "metadata": {},
102 | "source": [
103 | "If getting error: \"Failed building wheel for pandana\" or missing \"libspatialindex\" when importing the libraries, do the following:"
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "execution_count": null,
109 | "metadata": {},
110 | "outputs": [],
111 | "source": [
112 | "#Note: If getting error: \"Failed building wheel for pandana\"\n",
113 | "! brew install spatialindex\n",
114 | "! python3 setup.py again."
115 | ]
116 | },
117 | {
118 | "cell_type": "markdown",
119 | "metadata": {},
120 | "source": [
121 | "---\n",
122 | "**From a terminal:** \n",
123 | "\n",
124 | "1. Go to directory where you want to save the folder and run: \n",
125 | "`$ git clone https://github.com/GeoDaCenter/access.git` \n",
126 | " \n",
127 | "2. Go the access > travel_times folder: \n",
128 | "`$ cd access/analytics` \n",
129 | "\n",
130 | "3. Install `python3` and `pip` (if not installed already). \n",
131 | "`$ brew install python3` \n",
132 | "`$ curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py` \n",
133 | "`$ python3 get-pip.py` \n",
134 | "\n",
135 | "4. Also within this directory, run: \n",
136 | "`$ pip3 install -r requirements.txt` \n",
137 | "If you get any errors, try running instead \n",
138 | "`$ pip3 install -r requirements.txt --upgrade` \n",
139 | "If you get \"Cannot uninstall X errors for distutils packages\": \n",
140 | "`$ pip3 install -r requirements.txt --ignore-installed` \n",
141 | "\n",
142 | "5. Run (If you are on linux, throw a `sudo` in front): \n",
143 | "`$ python3 setup.py build` \n",
144 | "`$ python3 setup.py install` \n",
145 | "\n",
146 | "Note: When running the demo, if you get error: *\"Failed building wheel for pandana\"* or missing *\"libspatialindex\"* when importing the libraries, install spatialindex and setup.py again. \n",
147 | "`$ brew install spatialindex` \n",
148 | "`$ python3 setup.py` again."
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "metadata": {},
154 | "source": [
155 | "---\n",
156 | "**If you are having troubles, make sure ruby, homebrew xcode, and git are installed:** \n",
157 | "`$ curl -L https://get.rvm.io | bash -s stable --auto-dotfiles --autolibs=enable --rails` \n",
158 | "`$ mkdir homebrew && curl -L https://github.com/Homebrew/brew/tarball/master | tar xz --strip 1 -C homebrew` \n",
159 | "`$ ruby -e \"$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)\"` \n",
160 | "`$ brew install git`"
161 | ]
162 | },
163 | {
164 | "cell_type": "markdown",
165 | "metadata": {},
166 | "source": [
167 | "---\n",
168 | "## Files Requirements\n",
169 | "* Save origin and destination files as .csv files under the **data** folder. \n",
170 | "\n",
171 | "* Both origin and destination files should be under the same coordinate reference system and that it is EPSG:4326 (WGS 84).\n",
172 | "\n",
173 | "* Make sure that the destination points are within the area of the source area file.\n",
174 | "\n",
175 | "* If the user is interested in **aggregating** the scores, please input the greater areal unit as an attribute of the source table (for access) and as attribute of the destionation table (for coverage). \n",
176 | "In order to attach the polygon's attributes to the points (destinations or origins), the user can use QGIS and do the following: \n",
177 | "1) Import the polygon's shapefile (in this case, community areas for Chicago) and the points' shapefile of interest. \n",
178 | "2) There are two ways of attaching the polygon's IDs to the points:\n",
179 | " * Go to Vector > Data Management Tools > Join attributes by location \n",
180 | " * Go to Processing > Toolbox > SAGA > Vector point tools > Add polygon attributes to points. \n",
181 | " \n",
182 | "\n",
183 | "* **Origin file:** \n",
184 | " * Unique index identifier (ID) (integer or real) \n",
185 | " * Latitude and longitude coordinates (real) \n",
186 | " * For Coverage/Access Score: population count (not mandatory for Access Score)\n",
187 | " * In order to aggregate to larger areal units, attach the larger areal unit ID as an attribute of the origin file. (not mandatory) (integer or real)\n",
188 | " \n",
189 | " \n",
190 | "* **Destination file:** \n",
191 | " * Unique index identifier (ID) (integer or real) \n",
192 | " * Latitude and longitude coordinates (real) \n",
193 | " * For Coverage/Access Score: target amount of each destination (can be size/sales) (not mandatory for Access Score) \n",
194 | " * In order to subset by categories: category field that specifies (string)\n",
195 | " * In order to aggregate to larger areal units, attach the larger areal unit ID as an attribute of the destination file. (not mandatory) (integer or real)\n",
196 | "\n",
197 | "* If you already have an asymmetric matrix, the metrics will consider the negative values as null values (NaN), the blanks as and the zeros as\n",
198 | "\n",
199 | "* If the network type is **driving**, the edge traversal speed is drawn from a table of speed limits and street names that must be supplied separately. In this case, it is sepcified under scripts>data>speed_limit.csv\n"
200 | ]
201 | },
202 | {
203 | "cell_type": "code",
204 | "execution_count": null,
205 | "metadata": {},
206 | "outputs": [],
207 | "source": []
208 | }
209 | ],
210 | "metadata": {
211 | "kernelspec": {
212 | "display_name": "Python 3",
213 | "language": "python",
214 | "name": "python3"
215 | },
216 | "language_info": {
217 | "codemirror_mode": {
218 | "name": "ipython",
219 | "version": 3
220 | },
221 | "file_extension": ".py",
222 | "mimetype": "text/x-python",
223 | "name": "python",
224 | "nbconvert_exporter": "python",
225 | "pygments_lexer": "ipython3",
226 | "version": "3.6.5"
227 | }
228 | },
229 | "nbformat": 4,
230 | "nbformat_minor": 2
231 | }
232 |
--------------------------------------------------------------------------------
/docs/notebooks/0_Reqs_Install.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Notebook Overview and Installation Setup\n"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "\n",
15 | "Authors: [Irene Farah](https://www.linkedin.com/in/imfarah/), [Julia Koschinsky](https://www.linkedin.com/in/julia-koschinsky-657599b1/), [Logan Noel](https://www.linkedin.com/in/lmnoel/). \n",
16 | "Contact: [Julia Koschinsky](mailto:jkoschinsky@uchicago.edu) \n",
17 | "\n",
18 | "Research assistance of [Shiv Agrawal](http://simonlab.uchicago.edu/people/ShivAgrawal.html), [Caitlyn Tien](https://www.linkedin.com/in/caitlyn-tien-0b784b161/) and [Richard Lu](https://www.linkedin.com/in/richard-lu-576874155/) is gratefully acknowledged.\n",
19 | "\n",
20 | "Center for Spatial Data Science \n",
21 | "University of Chicago \n",
22 | "\n",
23 | "July 30, 2019\n",
24 | "\n",
25 | "---\n",
26 | "\n",
27 | "## Notebook Overview\n",
28 | "\n",
29 | "**The content of this notebook folder is organized as follows:**\n",
30 | "\n",
31 | "* [0_Reqs_Install.ipynb](./0_Reqs_Install.ipynb): Installation requirements to run the notebook demos \n",
32 | "* [1_Simple_Test_Demo](.//1_Simple_Test_Demo.ipynb): Simple demo to test your setup installation works \n",
33 | "\n",
34 | "\n",
35 | "The remaining notebooks walk through how to run the travel time matrix and spatial access metrics, including main functions and parameters: \n",
36 | "\n",
37 | "* [2_Methods](./2_Methods.ipynb): Purpose and structure of the package + methodology for estimating travel time matrices and spatial access metrics \n",
38 | "* [3_Travel_Time_Matrix.ipynb](./3_Travel_Time_Matrix.ipynb): How to run the travel time matrices using [p2p.py](./scripts/p2p.py) \n",
39 | "* [4_Access_Metrics.ipynb](./4_Access_Metrics.ipynb): How to run the access metrics (origin-based) using [Models.py](./scripts/Models.py) \n",
40 | "* [5_Coverage_Metrics.ipynb](./5_Coverage_Metrics.ipynb): How to run the coverage metrics (destination-based) using [Models.py](./spatial_access/Models.py)\n",
41 | "* [6_TSFCA.ipynb](./6_TSFCA.ipynb): How to run a two-stage floating catchment area model (origin-based) using [Models.py](./spatial_access/Models.py)\n",
42 | "\n",
43 | "\n",
44 | "The **data** folder contains the input_data needed to estimate the metrics under **sources** (for origins) and **destinations** (for destinations). Note that the capacity field in the destinations file is not real but only for demo purposes. \n",
45 | "In output_data, the **matrices** folder stores the estimated symmetric and asymmetric matrices. \n",
46 | "The **models** folder contains the results of the models' analyses. \n",
47 | "Finally, **figures** stores the results of maps and plots calculated during the process. \n",
48 | "\n"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "# Installation Setup"
56 | ]
57 | },
58 | {
59 | "cell_type": "markdown",
60 | "metadata": {},
61 | "source": [
62 | "--- \n",
63 | "The package is written in Python 3.6, C++ 11 and Cython by [Logan Noel](https://www.linkedin.com/in/lmnoel/). (Minimum Python version 3.5) \n",
64 | "Currently, the only supported operating systems are MacOS and Ubuntu (if you don't have either, a guide for installing Ubuntu 16.04 LTS is in README.)\n",
65 | "\n",
66 | "We recommend setting up a separate anaconda environment for this package to prevent version conflicts between dependencies of this and other packages."
67 | ]
68 | },
69 | {
70 | "cell_type": "markdown",
71 | "metadata": {},
72 | "source": [
73 | "**Note: Experienced users can download installation requirements directly in the terminal.**"
74 | ]
75 | },
76 | {
77 | "cell_type": "markdown",
78 | "metadata": {},
79 | "source": [
80 | "**For MacOS:**"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": null,
86 | "metadata": {},
87 | "outputs": [],
88 | "source": [
89 | "# Install Python3\n",
90 | "! brew install python3"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": null,
96 | "metadata": {},
97 | "outputs": [],
98 | "source": [
99 | "# Install homebrew\n",
100 | "! /usr/bin/ruby -e \"$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)\""
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": null,
106 | "metadata": {},
107 | "outputs": [],
108 | "source": [
109 | "# Install pip3\n",
110 | "! brew install pip3"
111 | ]
112 | },
113 | {
114 | "cell_type": "code",
115 | "execution_count": null,
116 | "metadata": {},
117 | "outputs": [],
118 | "source": [
119 | "# Install jupyter, jupyterlab, and jupyter hub\n",
120 | "! brew install jupyter\n",
121 | "! brew install jupyterlab"
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "metadata": {},
128 | "outputs": [],
129 | "source": [
130 | "# Clone the repository:\n",
131 | "! git clone https://github.com/jupyterhub/jupyterhub"
132 | ]
133 | },
134 | {
135 | "cell_type": "code",
136 | "execution_count": null,
137 | "metadata": {},
138 | "outputs": [],
139 | "source": [
140 | "# Install spatial index package\n",
141 | "! brew install spatialindex"
142 | ]
143 | },
144 | {
145 | "cell_type": "code",
146 | "execution_count": null,
147 | "metadata": {},
148 | "outputs": [],
149 | "source": [
150 | "# Install spatial access package\n",
151 | "! pip3 install spatial_access"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "# Install scipy package\n",
161 | "! brew install scipy"
162 | ]
163 | },
164 | {
165 | "cell_type": "code",
166 | "execution_count": null,
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "# Install geopy package\n",
171 | "! pip install geopy"
172 | ]
173 | },
174 | {
175 | "cell_type": "code",
176 | "execution_count": null,
177 | "metadata": {},
178 | "outputs": [],
179 | "source": [
180 | "# Install rtree package\n",
181 | "! pip install rtree"
182 | ]
183 | },
184 | {
185 | "cell_type": "code",
186 | "execution_count": null,
187 | "metadata": {},
188 | "outputs": [],
189 | "source": [
190 | "# Install geopandas package\n",
191 | "! conda install geopandas"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": null,
197 | "metadata": {},
198 | "outputs": [],
199 | "source": [
200 | "# Run setup.py to install all the packages\n",
201 | "! sudo python setup.py install"
202 | ]
203 | },
204 | {
205 | "cell_type": "code",
206 | "execution_count": null,
207 | "metadata": {},
208 | "outputs": [],
209 | "source": [
210 | "# Install scipy package\n",
211 | "! brew install btree"
212 | ]
213 | },
214 | {
215 | "cell_type": "markdown",
216 | "metadata": {},
217 | "source": [
218 | "**In Ubuntu add:**"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": null,
224 | "metadata": {},
225 | "outputs": [],
226 | "source": [
227 | "! sudo apt-get install libspatialindex-dev"
228 | ]
229 | },
230 | {
231 | "cell_type": "code",
232 | "execution_count": null,
233 | "metadata": {},
234 | "outputs": [],
235 | "source": [
236 | "! sudo apt-get install python-tk"
237 | ]
238 | }
239 | ],
240 | "metadata": {
241 | "kernelspec": {
242 | "display_name": "Python 3",
243 | "language": "python",
244 | "name": "python3"
245 | },
246 | "language_info": {
247 | "codemirror_mode": {
248 | "name": "ipython",
249 | "version": 3
250 | },
251 | "file_extension": ".py",
252 | "mimetype": "text/x-python",
253 | "name": "python",
254 | "nbconvert_exporter": "python",
255 | "pygments_lexer": "ipython3",
256 | "version": "3.7.4"
257 | }
258 | },
259 | "nbformat": 4,
260 | "nbformat_minor": 2
261 | }
262 |
--------------------------------------------------------------------------------
/docs/notebooks/figures/Access_Model_all.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/Access_Model_all.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/a_FQHC.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/a_FQHC.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/access.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/access.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/access_com.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/access_com.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/access_comm.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/access_comm.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/access_fed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/access_fed.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/access_mod_agg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/access_mod_agg.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/access_sd_t.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/access_sd_t.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cdf_access_score.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cdf_access_score.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cdf_accesssum.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cdf_accesssum.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cdf_count.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cdf_count.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cdf_coverage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cdf_coverage.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cdf_time.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cdf_time.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cdf_tsfca.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cdf_tsfca.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/cov_t_com.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/cov_t_com.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/coverage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/coverage.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/coverage_agg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/coverage_agg.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/coverage_t.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/coverage_t.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/dd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/dd.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/diagram_code.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/diagram_code.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/fig_0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/fig_0.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/fig_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/fig_1.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/flow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/flow.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/lin.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/lin.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/log.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/log.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/map_AccessCount.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/map_AccessCount.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/map_AccessModel.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/map_AccessModel.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/map_AccessSum.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/map_AccessSum.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/map_AccessTime.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/map_AccessTime.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/nd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/nd.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/nd_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/nd_2.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/nn.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/nn.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/nn_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/nn_2.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/snap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/snap.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/sqr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/sqr.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/tsfca.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/tsfca.png
--------------------------------------------------------------------------------
/docs/notebooks/figures/tsfca_results.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/figures/tsfca_results.png
--------------------------------------------------------------------------------
/docs/notebooks/spatial_access_documentation081219.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GeoDaCenter/spatial_access/b96ac33a396b5da20d8644191260d758534f8595/docs/notebooks/spatial_access_documentation081219.pdf
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # http://www.sphinx-doc.org/en/master/config
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | import unittest.mock
16 | sys.path.insert(0, os.path.abspath('../'))
17 |
18 | # mock modules
19 |
20 | MOCK_MODULES = ['fiona',
21 | 'cython',
22 | 'matplotlib',
23 | 'jellyfish',
24 | 'geopandas',
25 | 'psutil',
26 | 'pandas',
27 | 'numpy',
28 | 'osmnet',
29 | 'scipy',
30 | 'geopy',
31 | 'shapely',
32 | 'tables',
33 | 'scikit_learn',
34 | 'atlas',
35 | 'descartes',
36 | 'rtree',
37 | '_p2pExtension']
38 | for mod_name in MOCK_MODULES:
39 | sys.modules[mod_name] = unittest.mock.MagicMock()
40 |
41 | # -- Project information -----------------------------------------------------
42 |
43 | project = 'Spatial Access'
44 | copyright = '2019, Logan Noel'
45 | author = 'Logan Noel'
46 |
47 |
48 | # -- General configuration ---------------------------------------------------
49 |
50 | # Add any Sphinx extension module names here, as strings. They can be
51 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
52 | # ones.
53 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon']
54 |
55 | # Include docstrings at both class level and constructor
56 | autoclass_content = "both"
57 |
58 | # Add any paths that contain templates here, relative to this directory.
59 | templates_path = ['_templates']
60 |
61 | # List of patterns, relative to source directory, that match files and
62 | # directories to ignore when looking for source files.
63 | # This pattern also affects html_static_path and html_extra_path.
64 | exclude_patterns = []
65 |
66 |
67 | # -- Options for HTML output -------------------------------------------------
68 |
69 | # The theme to use for HTML and HTML Help pages. See the documentation for
70 | # a list of builtin themes.
71 | #
72 | html_theme = 'alabaster'
73 |
74 | # Add any paths that contain custom static files (such as style sheets) here,
75 | # relative to this directory. They are copied after the builtin static files,
76 | # so a file named "default.css" will overwrite the builtin "default.css".
77 | html_static_path = ['_static']
78 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 |
2 |
3 | Welcome to Spatial Access's documentation!
4 | ==========================================
5 |
6 | .. automodule:: spatial_access.p2p
7 | :members:
8 |
9 | .. automodule:: spatial_access.BaseModel
10 | :members:
11 |
12 | .. automodule:: spatial_access.Models
13 | :members:
14 |
15 | .. automodule:: spatial_access.Configs
16 | :members:
17 |
18 | .. toctree::
19 | :maxdepth: 2
20 | :caption: Contents:
21 |
22 |
23 |
24 | Indices and tables
25 | ==================
26 |
27 | * :ref:`genindex`
28 | * :ref:`modindex`
29 | * :ref:`search`
30 |
--------------------------------------------------------------------------------
/docs/source/modules.rst:
--------------------------------------------------------------------------------
1 | spatial_access
2 | ==============
3 |
4 | .. toctree::
5 | :maxdepth: 4
6 |
7 | spatial_access
8 |
--------------------------------------------------------------------------------
/docs/source/spatial_access.rst:
--------------------------------------------------------------------------------
1 | spatial\_access package
2 | =======================
3 |
4 | Submodules
5 | ----------
6 |
7 | spatial\_access.BaseModel module
8 | --------------------------------
9 |
10 | .. automodule:: spatial_access.BaseModel
11 | :members:
12 | :undoc-members:
13 | :show-inheritance:
14 |
15 | spatial\_access.ConfigInterface module
16 | --------------------------------------
17 |
18 | .. automodule:: spatial_access.ConfigInterface
19 | :members:
20 | :undoc-members:
21 | :show-inheritance:
22 |
23 | spatial\_access.MatrixInterface module
24 | --------------------------------------
25 |
26 | .. automodule:: spatial_access.MatrixInterface
27 | :members:
28 | :undoc-members:
29 | :show-inheritance:
30 |
31 | spatial\_access.Models module
32 | -----------------------------
33 |
34 | .. automodule:: spatial_access.Models
35 | :members:
36 | :undoc-members:
37 | :show-inheritance:
38 |
39 | spatial\_access.NetworkInterface module
40 | ---------------------------------------
41 |
42 | .. automodule:: spatial_access.NetworkInterface
43 | :members:
44 | :undoc-members:
45 | :show-inheritance:
46 |
47 | spatial\_access.SpatialAccessExceptions module
48 | ----------------------------------------------
49 |
50 | .. automodule:: spatial_access.SpatialAccessExceptions
51 | :members:
52 | :undoc-members:
53 | :show-inheritance:
54 |
55 | spatial\_access.p2p module
56 | --------------------------
57 |
58 | .. automodule:: spatial_access.p2p
59 | :members:
60 | :undoc-members:
61 | :show-inheritance:
62 |
63 |
64 | Module contents
65 | ---------------
66 |
67 | .. automodule:: spatial_access
68 | :members:
69 | :undoc-members:
70 | :show-inheritance:
71 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | description-file = README.md
3 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import sys, os
2 | from setuptools.extension import Extension
3 | from setuptools import setup
4 | from setuptools.command.build_py import build_py as _build_py
5 | import re
6 |
7 | with open("README.md", "r") as fh:
8 | LONG_DESCRIPTION = fh.read()
9 |
10 | ouff_mac = []
11 | extra_dependency = []
12 | if sys.platform == "darwin":
13 | ouff_mac = ['-mmacosx-version-min=10.9']
14 | extra_dependency = ['rtree>=0.8.3']
15 |
16 | SRC_PATH = "spatial_access/src/"
17 |
18 | MATRIX_INTERFACE_SOURCES = ["Serializer.cpp",
19 | "threadUtilities.cpp",
20 | "tmxParser.cpp",
21 | "csvParser.cpp"]
22 |
23 |
24 | def build_extension(extension_name, sources):
25 | full_path_sources = [SRC_PATH + src for src in sources]
26 | return Extension(name=extension_name, language='c++',
27 | sources=full_path_sources,
28 | extra_compile_args=['--std=c++11', '-O2', '-fomit-frame-pointer', "-g0"] + ouff_mac,
29 | undef_macros=["NDEBUG"],
30 | extra_link_args=ouff_mac)
31 |
32 | EXTENSION_SOURCES = [('_p2pExtension', ['_p2pExtension.cpp'] + MATRIX_INTERFACE_SOURCES)]
33 |
34 |
35 | EXTENSIONS = [build_extension(extension_name=extension_name, sources=sources) for extension_name, sources in EXTENSION_SOURCES]
36 |
37 | REQUIRED_DEPENDENCIES = ['fiona>=1.7.12',
38 | 'cython>=0.28.2',
39 | 'matplotlib>=2.0.2',
40 | 'jellyfish>=0.5.6',
41 | 'geopandas>=0.3.0',
42 | 'psutil>=5.4.3',
43 | 'pandas>=0.19.2',
44 | 'numpy>=1.15.4',
45 | 'osmnet>=0.1.5',
46 | 'scipy>=1.7.2',
47 | 'geopy>=1.11.0',
48 | 'shapely',
49 | 'tables>=3.4.2',
50 | 'scikit_learn>=0.19.1',
51 | 'atlas>=0.27.0',
52 | 'descartes>=1.1.0',
53 | 'rtree>=0.8.3']
54 |
55 | REQUIRED_DEPENDENCIES += extra_dependency
56 |
57 | SUBMODULE_NAMES = ['spatial_access.p2p',
58 | 'spatial_access.BaseModel',
59 | 'spatial_access.Models',
60 | 'spatial_access.Configs',
61 | 'spatial_access.NetworkInterface',
62 | 'spatial_access.MatrixInterface',
63 | 'spatial_access.SpatialAccessExceptions',
64 | 'spatial_access._parsers']
65 |
66 |
67 | if 'READTHEDOCS' in os.environ:
68 | REQUIRED_DEPENDENCIES = []
69 | EXTENSIONS = []
70 |
71 | def get_property(prop, project):
72 | result = re.search(r'{}\s*=\s*[\'"]([^\'"]*)[\'"]'.format(prop), open('spatial_access/__init__.py').read())
73 | return result.group(1)
74 |
75 | PROJECT_NAME='spatial_access'
76 |
77 | setup(
78 | name=PROJECT_NAME,
79 | author=get_property('__author__', PROJECT_NAME),
80 | url=get_property('__url__', PROJECT_NAME),
81 | author_email='lnoel@uchicago.edu',
82 | version=get_property('__version__', PROJECT_NAME),
83 | ext_modules=EXTENSIONS,
84 | py_modules=SUBMODULE_NAMES,
85 | install_requires=REQUIRED_DEPENDENCIES,
86 | long_description=LONG_DESCRIPTION,
87 | long_description_content_type="text/markdown",
88 | license=get_property('__license__', PROJECT_NAME)
89 | )
90 |
91 |
--------------------------------------------------------------------------------
/spatial_access/Configs.py:
--------------------------------------------------------------------------------
1 | # Logan Noel (github.com/lmnoel)
2 | #
3 | # ©2017-2019, Center for Spatial Data Science
4 |
5 | import pandas as pd
6 |
7 |
8 | class Configs:
9 | """
10 | A utility class to abstract the network parameter
11 | configurations.
12 | """
13 | DEFAULT_SPEED_LIMITS = {"road": 30,
14 | "motorway": 96,
15 | "motorway_link": 48,
16 | "motorway_junction": 48,
17 | "trunk": 80,
18 | "trunk_link": 40,
19 | "primary": 48,
20 | "primary_link": 32,
21 | "secondary": 40,
22 | "secondary_link": 40,
23 | "tertiary": 32,
24 | "tertiary_link": 32,
25 | "residential": 24,
26 | "living_street": 16,
27 | "service": 16,
28 | "track": 32,
29 | "pedestrian": 3.2,
30 | "services": 3.2,
31 | "bus_guideway": 3.2,
32 | "path": 8,
33 | "cycleway": 16,
34 | "footway": 3.2,
35 | "bridleway": 3.2,
36 | "byway": 3.2,
37 | "steps": 0.16,
38 | "unclassified": 24,
39 | "lane": 16,
40 | "opposite_lane": 16,
41 | "opposite": 16,
42 | "grade1": 16,
43 | "grade2": 16,
44 | "grade3": 16,
45 | "grade4": 16,
46 | "grade5": 16,
47 | "roundabout": 40}
48 |
49 | DEFAULT_SPEED_LIMITS = {key : value * 0.4 for key, value in DEFAULT_SPEED_LIMITS.items()}
50 |
51 | def __init__(self, walk_speed=5,
52 | bike_speed=15.5,
53 | default_drive_speed=40,
54 | walk_node_penalty=0,
55 | bike_node_penalty=0,
56 | drive_node_penalty=4,
57 | speed_limit_dict=None,
58 | use_meters=False,
59 | disable_area_threshold=False,
60 | require_extended_range=False,
61 | epsilon=0.05
62 | ):
63 | """
64 | Args:
65 | walk_speed: numeric (km/hr)
66 | bike_speed: numeric (km/hr)
67 | default_drive_speed: numeric (km/hr)
68 | walk_node_penalty: numeric (seconds)
69 | bike_node_penalty: numeric (seconds)
70 | drive_node_penalty: numeric (seconds)
71 | speed_limit_dict: dictionary {edge type (string) : speed in km/hr}
72 | use_meters: output will be in meters, not seconds.
73 | disable_area_threshold: boolean, enable if computation fails due to
74 | exceeding bounding box area constraint.
75 | walk_speed: numeric, override default walking speed (km/hr).
76 | bike_speed: numeric, override default walking speed (km/hr).
77 | epsilon: numeric, factor by which to increase the requested bounding box.
78 | Increasing epsilon may result in increased accuracy for points
79 | at the edge of the bounding box, but will increase computation times.
80 | """
81 | self.ONE_HOUR = 3600 # seconds
82 | self.ONE_KM = 1000 # meters
83 | self.walk_speed = walk_speed # km/hr
84 | self.walk_node_penalty = walk_node_penalty # seconds
85 |
86 | self.default_drive_speed = default_drive_speed # km/hr
87 | self.drive_node_penalty = drive_node_penalty # seconds
88 |
89 | self.bike_speed = bike_speed # km/hr
90 | self.bike_node_penalty = bike_node_penalty # seconds
91 |
92 | self.use_meters = use_meters
93 | self.disable_area_threshold = disable_area_threshold
94 | self.require_extended_range = require_extended_range
95 | self.epsilon = epsilon
96 |
97 | if speed_limit_dict is None:
98 | self.speed_limit_dict = Configs.DEFAULT_SPEED_LIMITS
99 | else:
100 | self.speed_limit_dict = speed_limit_dict
101 |
102 | def _get_driving_cost_matrix(self):
103 | """
104 | Returns: DataFrame of edge unit costs.
105 | """
106 | return pd.DataFrame.from_dict(self.speed_limit_dict,
107 | orient='index',
108 | columns=['unit_cost'])
109 |
110 | def _get_walk_speed(self):
111 | """
112 | Returns: walk speed in meters/second.
113 | """
114 | return (self.walk_speed / self.ONE_HOUR) * self.ONE_KM
115 |
116 | def _get_bike_speed(self):
117 | """
118 | Returns: bike speed in meters/second.
119 | """
120 | return (self.bike_speed / self.ONE_HOUR) * self.ONE_KM
121 |
122 | def _get_default_drive_speed(self):
123 | """
124 | Returns: default drive speed in meters/second.
125 | """
126 | return (self.default_drive_speed / self.ONE_HOUR) * self.ONE_KM
127 |
--------------------------------------------------------------------------------
/spatial_access/NetworkInterface.py:
--------------------------------------------------------------------------------
1 | # Logan Noel (github.com/lmnoel)
2 | #
3 | # ©2017-2019, Center for Spatial Data Science
4 |
5 | import os
6 | import time
7 | import pandas as pd
8 | from osmnet.load import network_from_bbox
9 | from geopy import distance
10 |
11 | from spatial_access.SpatialAccessExceptions import BoundingBoxTooLargeException
12 | from spatial_access.SpatialAccessExceptions import UnableToConnectException
13 | from spatial_access.SpatialAccessExceptions import SourceNotBuiltException
14 | from spatial_access.SpatialAccessExceptions import ConnectedComponentTrimmingFailed
15 |
16 | import logging
17 | logging.getLogger('osmnet').disabled = True
18 |
19 | try:
20 | import _p2pExtension
21 | except ImportError:
22 | raise SourceNotBuiltException()
23 |
24 |
25 | class NetworkInterface:
26 | """
27 | Manages OSM network retrieval for p2p.TransitMatrix.
28 | """
29 |
30 | def __init__(self, network_type, logger=None, disable_area_threshold=False, local_nodes=None, local_edges=None):
31 | """
32 |
33 | Args:
34 | network_type: string, network type
35 | logger: optional, logger.
36 | disable_area_threshold: boolean, enable if computation fails due to
37 | exceeding bounding box area constraint.
38 | local_nodes: optional, preprocessed nodes dataframe in pandana format
39 | local_edges: optional, preprocessed edges dataframe in pandana format
40 | see: https://pyrosm.readthedocs.io/en/latest/basics.html#export-to-pandana
41 | """
42 | self.logger = logger
43 | self.network_type = network_type
44 | self.bbox = None
45 | self.nodes = local_nodes
46 | self.edges = local_edges
47 | self.area_threshold = None if disable_area_threshold else 5000 # km
48 | assert isinstance(network_type, str)
49 | self._try_create_cache()
50 |
51 | @staticmethod
52 | def clear_cache():
53 | """
54 | Remove the contents of the NetworkInterface cache.
55 | """
56 | import shutil
57 | if os.path.exists('data/osm_query_cache'):
58 | shutil.rmtree('data/osm_query_cache')
59 |
60 | @staticmethod
61 | def _try_create_cache():
62 | """
63 | Create the directory for the cache
64 | if it does not already exist.
65 | """
66 | if not os.path.exists('data/'):
67 | os.makedirs('data/')
68 | if not os.path.exists('data/osm_query_cache'):
69 | os.makedirs('data/osm_query_cache')
70 |
71 | def number_of_nodes(self):
72 | """
73 | Returns: number of nodes in graph.
74 | """
75 | return len(self.nodes)
76 |
77 | def number_of_edges(self):
78 | """
79 | Returns: number of edges in graph.
80 | """
81 | return len(self.edges)
82 |
83 | def _approximate_bbox_area(self):
84 | """
85 | Calculate the approximate area of the
86 | bounding box in square kilometers.
87 | Returns: numeric area of the bounding box
88 | in km squared.
89 | """
90 | lat_min, lon_min, lat_max, lon_max = self.bbox
91 | lower_right_point = (lat_min, lon_max)
92 | lower_left_point = (lat_min, lon_min)
93 | upper_left_point = (lat_max, lon_min)
94 | lower_edge = distance.distance(lower_left_point, lower_right_point).km
95 | left_edge = distance.distance(lower_left_point, upper_left_point).km
96 | area = lower_edge * left_edge
97 | if self.logger:
98 | self.logger.info('Approx area of bounding box: {:,.2f} sq. km'.format(area))
99 | return area
100 |
101 | def _get_bbox(self, primary_data, secondary_data,
102 | secondary_input, epsilon):
103 | """
104 | Determine bounding box for given data.
105 |
106 | Args:
107 | primary_data: DataFrame of primary points.
108 | secondary_data: DataFrame of secondary points.
109 | secondary_input: boolean, true if secondary_data
110 | was provided.
111 | epsilon: Safety margin around bounding box.
112 |
113 | Raises:
114 | BoundingBoxTooLargeException: if area is larger than
115 | self.area_threshold.
116 |
117 | """
118 | if secondary_input:
119 | composite_lon = list(primary_data['lon']) + \
120 | list(secondary_data['lon'])
121 | composite_lat = list(primary_data['lat']) + \
122 | list(secondary_data['lat'])
123 | else:
124 | composite_lon = list(primary_data['lon'])
125 | composite_lat = list(primary_data['lat'])
126 |
127 | lat_max = max(composite_lat) + epsilon
128 | lat_min = min(composite_lat) - epsilon
129 |
130 | lon_max = max(composite_lon) + epsilon
131 | lon_min = min(composite_lon) - epsilon
132 |
133 | self.bbox = [lat_min, lon_min, lat_max, lon_max]
134 | if self.area_threshold:
135 | approx_area = self._approximate_bbox_area()
136 | if approx_area > self.area_threshold:
137 | if self.logger:
138 | self.logger.error('Supplied coordinates span too large an area')
139 | self.logger.error('You can set disable_area_threshold to True if this is intentional')
140 | raise BoundingBoxTooLargeException(str(approx_area) + "km square")
141 |
142 | def _get_filename(self):
143 | """
144 | Returns: cache filename formatted for this request.
145 | """
146 | bbox_string = '_'.join([str(coord) for coord in self.bbox])
147 | return 'data/osm_query_cache/' + self.network_type + bbox_string + '.h5'
148 |
149 | def _network_exists(self):
150 | """
151 | Returns: true if a filename matching these
152 | network parameters is in the cache.
153 | """
154 | return os.path.exists(self._get_filename())
155 |
156 | def set_network(self, local_nodes, local_edges):
157 | if self.logger:
158 | self.logger.info('Set network using local nodes and edges.')
159 | self.nodes = local_nodes
160 | self.edges = local_edges
161 | self._remove_disconnected_components()
162 |
163 | def load_network(self, primary_data, secondary_data,
164 | secondary_input, epsilon):
165 | """
166 | Attempt to load the nodes and edges tables for
167 | the current query from the local cache; query OSM
168 | servers if not.
169 |
170 | Args:
171 | primary_data: DataFrame of primary points.
172 | secondary_data: DataFrame of secondary points.
173 | secondary_input: boolean, true if secondary_data
174 | was provided.
175 | epsilon: Safety margin around bounding box.
176 |
177 | Raises:
178 | AssertionError: argument is not of expected type
179 |
180 | """
181 | assert isinstance(primary_data, pd.DataFrame)
182 | assert isinstance(secondary_data, pd.DataFrame) or secondary_data is None
183 | assert isinstance(secondary_input, bool)
184 | assert isinstance(epsilon, float) or isinstance(epsilon, int)
185 | self._try_create_cache()
186 | self._get_bbox(primary_data, secondary_data,
187 | secondary_input, epsilon)
188 |
189 | if self.nodes is None and self.edges is None:
190 | if self._network_exists():
191 | filename = self._get_filename()
192 | self.nodes = pd.read_hdf(filename, 'nodes')
193 | self.edges = pd.read_hdf(filename, 'edges')
194 | if self.logger:
195 | self.logger.debug('Read network from cache: %s', filename)
196 | else:
197 | self._request_network()
198 |
199 | # self._remove_disconnected_components()
200 |
201 | def _request_network(self):
202 | """
203 | Fetch a street network from OSM for the
204 | current query.
205 | Raises:
206 | UnableToConnectException: network connection is unavailable.
207 | """
208 | try:
209 | if self.network_type == 'bike':
210 | osm_bike_filter = '["highway"!~"motor|proposed|construction|abandoned|platform|raceway"]["foot"!~"no"]["bicycle"!~"no"]'
211 | self.nodes, self.edges = network_from_bbox(lat_min=self.bbox[0], lng_min=self.bbox[1],
212 | lat_max=self.bbox[2], lng_max=self.bbox[3],
213 | custom_osm_filter=osm_bike_filter)
214 | else:
215 | self.nodes, self.edges = network_from_bbox(
216 | lat_min=self.bbox[0], lng_min=self.bbox[1],
217 | lat_max=self.bbox[2], lng_max=self.bbox[3],
218 | network_type=self.network_type)
219 | if self.network_type == 'drive':
220 | self.edges.drop(['access', 'hgv', 'lanes', 'maxspeed'], inplace=True, axis=1)
221 | if 'tunnel' in self.edges.columns:
222 | self.edges.drop(['tunnel'], inplace=True, axis=1)
223 | else:
224 | self.edges.drop(['access', 'bridge', 'lanes', 'service'], inplace=True, axis=1)
225 | if 'tunnel' in self.edges.columns:
226 | self.edges.drop(['tunnel'], inplace=True, axis=1)
227 | filename = self._get_filename()
228 | self.nodes.to_hdf(filename, 'nodes', complevel=5)
229 | self.edges.to_hdf(filename, 'edges', complevel=5)
230 | if self.logger:
231 | self.logger.info('Finished querying osm')
232 | self.logger.debug('Cached network to %s', filename)
233 | except BaseException as e:
234 | request_error = """Error trying to download OSM network.
235 | Did you reverse lat/long?
236 | Is your network connection functional?
237 | """
238 | if self.logger:
239 | self.logger.debug(e)
240 | self.logger.error(request_error)
241 | raise UnableToConnectException()
242 |
243 | def _get_edges_as_list(self):
244 | """
245 | Returns: a list of all edges as tuples (from, to).
246 | """
247 | return list(zip(self.edges['from'], self.edges['to']))
248 |
249 | def _get_vertices_as_list(self):
250 | """
251 | Returns: a list of all node ids.
252 | """
253 | return list(self.nodes['id'])
254 |
255 | def _apply_connected_nodes(self, nodes_to_keep):
256 | """
257 | Given a set nodes_to_keep, remove all other
258 | nodes and edges.
259 | Args:
260 | nodes_to_keep: array of node indeces.
261 | """
262 | self.nodes = self.nodes[self.nodes['id'].isin(nodes_to_keep)]
263 | self.edges = self.edges[self.edges['from'].isin(nodes_to_keep) & self.edges['to'].isin(nodes_to_keep)]
264 |
265 | def _remove_disconnected_components(self):
266 | """
267 | Remove all nodes and edges that are not
268 | a part of the largest strongly connected component.
269 | Raises:
270 | ConnectedComponentTrimmingFailed: caused by undefined behavior from extension.
271 | """
272 | len_edges_before = len(self.edges)
273 | len_nodes_before = len(self.nodes)
274 | start_time = time.time()
275 | try:
276 | trimmer = _p2pExtension.pyNetworkUtility(self._get_edges_as_list(), self._get_vertices_as_list())
277 | nodes_of_main_connected_component = trimmer.getConnectedNetworkNodes()
278 | except BaseException:
279 | raise ConnectedComponentTrimmingFailed()
280 |
281 | self._apply_connected_nodes(nodes_of_main_connected_component)
282 |
283 | if self.logger:
284 | edges_diff = len_edges_before - len(self.edges)
285 | nodes_diff = len_nodes_before - len(self.nodes)
286 | time_diff = time.time() - start_time
287 | edges_diff_percent = edges_diff / len_edges_before
288 | nodes_diff_percent = nodes_diff / len_edges_before
289 | self.logger.debug("Removed {}/{} ({:,.2f}%) edges and {}/{} ({:,.2f}%) nodes which were disconnected components in {:,.2f} seconds".format(edges_diff,
290 | len_edges_before,
291 | edges_diff_percent,
292 | nodes_diff,
293 | len_nodes_before,
294 | nodes_diff_percent,
295 | time_diff))
296 |
297 |
--------------------------------------------------------------------------------
/spatial_access/SpatialAccessExceptions.py:
--------------------------------------------------------------------------------
1 | # Logan Noel (github.com/lmnoel)
2 | #
3 | # ©2017-2019, Center for Spatial Data Science
4 |
5 |
6 | class UnableToConnectException(Exception):
7 | def __init__(self, errors=''):
8 | super().__init__(errors)
9 |
10 |
11 | class BoundingBoxTooLargeException(Exception):
12 | def __init__(self, errors=''):
13 | super().__init__(errors)
14 |
15 |
16 | class ConnectedComponentTrimmingFailed(Exception):
17 | def __init__(self, errors=''):
18 | super().__init__(errors)
19 |
20 |
21 | class UnexpectedFileFormatException(Exception):
22 | def __init__(self, errors=''):
23 | super().__init__(errors)
24 |
25 |
26 | class UnexpectedShapeException(Exception):
27 | def __init__(self, errors=''):
28 | super().__init__(errors)
29 |
30 |
31 | class InvalidIdTypeException(Exception):
32 | def __init__(self, errors=''):
33 | super().__init__(errors)
34 |
35 |
36 | class WriteCSVFailedException(Exception):
37 | def __init__(self, errors=''):
38 | super().__init__(errors)
39 |
40 |
41 | class WriteTMXFailedException(Exception):
42 | def __init__(self, errors=''):
43 | super().__init__(errors)
44 |
45 |
46 | class ReadTMXFailedException(Exception):
47 | def __init__(self, errors=''):
48 | super().__init__(errors)
49 |
50 |
51 | class ReadCSVFailedException(Exception):
52 | def __init__(self, errors=''):
53 | super().__init__(errors)
54 |
55 |
56 | class ReadOTPCSVFailedException(Exception):
57 | def __init__(self, errors=''):
58 | super().__init__(errors)
59 |
60 |
61 | class IndecesNotFoundException(Exception):
62 | def __init__(self, errors=''):
63 | super().__init__(errors)
64 |
65 |
66 | class UnableToBuildMatrixException(Exception):
67 | def __init__(self, errors=''):
68 | super().__init__(errors)
69 |
70 |
71 | class FileNotFoundException(Exception):
72 | def __init__(self, errors=''):
73 | super().__init__(errors)
74 |
75 |
76 | class SourceNotBuiltException(Exception):
77 | def __init__(self, errors=''):
78 | super().__init__(errors)
79 |
80 |
81 | class PrimaryDataNotFoundException(Exception):
82 | def __init__(self, errors=''):
83 | super().__init__(errors)
84 |
85 |
86 | class SecondaryDataNotFoundException(Exception):
87 | def __init__(self, errors=''):
88 | super().__init__(errors)
89 |
90 |
91 | class ImproperIndecesTypeException(Exception):
92 | def __init__(self, errors=''):
93 | super().__init__(errors)
94 |
95 |
96 | class UnableToParsePrimaryDataException(Exception):
97 | def __init__(self, errors=''):
98 | super().__init__(errors)
99 |
100 |
101 | class UnableToParseSecondaryDataException(Exception):
102 | def __init__(self, errors=''):
103 | super().__init__(errors)
104 |
105 |
106 | class UnknownModeException(Exception):
107 | def __init__(self, errors=''):
108 | super().__init__(errors)
109 |
110 |
111 | class InsufficientDataException(Exception):
112 | def __init__(self, errors=''):
113 | super().__init__(errors)
114 |
115 |
116 | class DuplicateInputException(Exception):
117 | def __init__(self, errors=''):
118 | super().__init__(errors)
119 |
120 |
121 | class TransitMatrixNotLoadedException(Exception):
122 | def __init__(self, errors=''):
123 | super().__init__(errors)
124 |
125 |
126 | class SourceDataNotFoundException(Exception):
127 | def __init__(self, errors=''):
128 | super().__init__(errors)
129 |
130 |
131 | class DestDataNotFoundException(Exception):
132 | def __init__(self, errors=''):
133 | super().__init__(errors)
134 |
135 |
136 | class SourceDataNotParsableException(Exception):
137 | def __init__(self, errors=''):
138 | super().__init__(errors)
139 |
140 |
141 | class DestDataNotParsableException(Exception):
142 | def __init__(self, errors=''):
143 | super().__init__(errors)
144 |
145 |
146 | class ShapefileNotFoundException(Exception):
147 | def __init__(self, errors=''):
148 | super().__init__(errors)
149 |
150 |
151 | class TooManyCategoriesToPlotException(Exception):
152 | def __init__(self, errors=''):
153 | super().__init__(errors)
154 |
155 |
156 | class SpatialIndexNotMatchedException(Exception):
157 | def __init__(self, errors=''):
158 | super().__init__(errors)
159 |
160 |
161 | class UnexpectedPlotColumnException(Exception):
162 | def __init__(self, errors=''):
163 | super().__init__(errors)
164 |
165 |
166 | class UnrecognizedCategoriesException(Exception):
167 | def __init__(self, errors=''):
168 | super().__init__(errors)
169 |
170 |
171 | class UnrecognizedDecayFunctionException(Exception):
172 | def __init__(self, errors=''):
173 | super().__init__(errors)
174 |
175 |
176 | class UnrecognizedFileTypeException(Exception):
177 | def __init__(self, errors=''):
178 | super().__init__(errors)
179 |
180 |
181 | class IncompleteCategoryDictException(Exception):
182 | def __init__(self, errors=''):
183 | super().__init__(errors)
184 |
185 |
186 | class ModelNotAggregatedException(Exception):
187 | def __init__(self, errors=''):
188 | super().__init__(errors)
189 |
190 |
191 | class ModelNotAggregatableException(Exception):
192 | def __init__(self, errors=''):
193 | super().__init__(errors)
194 |
195 |
196 | class ModelNotCalculatedException(Exception):
197 | def __init__(self, errors=''):
198 | super().__init__(errors)
199 |
200 |
201 | class UnexpectedNormalizeTypeException(Exception):
202 | def __init__(self, errors=''):
203 | super().__init__(errors)
204 |
205 |
206 | class UnexpectedNormalizeColumnsException(Exception):
207 | def __init__(self, errors=''):
208 | super().__init__(errors)
209 |
210 |
211 | class UnexpectedEmptyColumnException(Exception):
212 | def __init__(self, errors=''):
213 | super().__init__(errors)
214 |
215 |
216 | class UnexpectedAggregationTypeException(Exception):
217 | def __init__(self, errors=''):
218 | super().__init__(errors)
219 |
220 |
221 | class AggregateOutputTypeNotExpectedException(Exception):
222 | def __init__(self, errors=''):
223 | super().__init__(errors)
224 |
225 |
--------------------------------------------------------------------------------
/spatial_access/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '1.0.2'
2 | __author__ = 'Logan Noel (lmnoel), Xun Li'
3 | __url__ = 'https://github.com/GeoDaCenter/spatial_access'
4 | __license__ = 'BSD'
--------------------------------------------------------------------------------
/spatial_access/_parsers.py:
--------------------------------------------------------------------------------
1 | # Logan Noel (github.com/lmnoel)
2 | #
3 | # ©2017-2019, Center for Spatial Data Science
4 |
5 |
6 | class BaseParser:
7 |
8 | @staticmethod
9 | def encode_source_id(source_id):
10 | return source_id
11 |
12 | @staticmethod
13 | def encode_dest_id(dest_id):
14 | return dest_id
15 |
16 | @staticmethod
17 | def decode_source_id(source_id):
18 | return source_id
19 |
20 | @staticmethod
21 | def decode_dest_id(dest_id):
22 | return dest_id
23 |
24 | @staticmethod
25 | def decode_vector_source_ids(vector):
26 | return vector
27 |
28 | @staticmethod
29 | def decode_vector_dest_ids(vector):
30 | return vector
31 |
32 | @staticmethod
33 | def encode_vector_source_ids(vector):
34 | return vector
35 |
36 | @staticmethod
37 | def encode_vector_dest_ids(vector):
38 | return vector
39 |
40 | @staticmethod
41 | def encode_filename(filename):
42 | return filename.encode('utf-8')
43 |
44 | @staticmethod
45 | def encode_category(category):
46 | return category.encode('utf-8')
47 |
48 | @staticmethod
49 | def decode_source_to_dest_array_dict(array_dict):
50 | return array_dict
51 |
52 | @staticmethod
53 | def decode_dest_to_source_array_dict(array_dict):
54 | return array_dict
55 |
56 | @staticmethod
57 | def decode_vector_of_dest_tuples(tuple_array):
58 | return tuple_array
59 |
60 | @staticmethod
61 | def decode_vector_of_source_tuples(tuple_array):
62 | return tuple_array
63 |
64 |
65 | class IntStringParser(BaseParser):
66 |
67 | @staticmethod
68 | def encode_dest_id(dest_id):
69 | return dest_id.encode('utf-8')
70 |
71 | @staticmethod
72 | def decode_dest_id(dest_id):
73 | return dest_id.decode()
74 |
75 | @staticmethod
76 | def decode_vector_dest_id(vector):
77 | return [item.decode() for item in vector]
78 |
79 | @staticmethod
80 | def encode_vector_dest_ids(vector):
81 | return [item.encode('utf-8') for item in vector]
82 |
83 | @staticmethod
84 | def decode_source_to_dest_array_dict(array_dict):
85 | return {key: [item.decode() for item in value] for key, value in array_dict.items()}
86 |
87 | @staticmethod
88 | def decode_dest_to_source_array_dict(array_dict):
89 | return {key.decode(): value for key, value in array_dict.items()}
90 |
91 | @staticmethod
92 | def decode_vector_of_dest_tuples(tuple_array):
93 | return [(a.decode(), b) for a, b in tuple_array]
94 |
95 |
96 | class StringIntParser(BaseParser):
97 |
98 | @staticmethod
99 | def encode_source_id(source_id):
100 | return source_id.encode('utf-8')
101 |
102 | @staticmethod
103 | def decode_source_id(source_id):
104 | return source_id.decode()
105 |
106 | @staticmethod
107 | def decode_vector_source_ids(vector):
108 | return [item.decode() for item in vector]
109 |
110 | @staticmethod
111 | def encode_vector_source_ids(vector):
112 | return [item.encode('utf-8') for item in vector]
113 |
114 | @staticmethod
115 | def decode_source_to_dest_array_dict(array_dict):
116 | return {key.decode(): value for key, value in array_dict.items()}
117 |
118 | @staticmethod
119 | def decode_dest_to_source_array_dict(array_dict):
120 | return {key: [item.decode() for item in value] for key, value in array_dict.items()}
121 |
122 | @staticmethod
123 | def decode_vector_of_source_tuples(tuple_array):
124 | return [(a.decode(), b) for a, b in tuple_array]
125 |
126 |
127 | class StringStringParser(BaseParser):
128 |
129 | @staticmethod
130 | def encode_source_id(source_id):
131 | return source_id.encode('utf-8')
132 |
133 | @staticmethod
134 | def encode_dest_id(dest_id):
135 | return dest_id.encode('utf-8')
136 |
137 | @staticmethod
138 | def decode_source_id(source_id):
139 | return source_id.decode()
140 |
141 | @staticmethod
142 | def decode_dest_id(dest_id):
143 | return dest_id.decode()
144 |
145 | @staticmethod
146 | def decode_vector_source_ids(vector):
147 | return [item.decode() for item in vector]
148 |
149 | @staticmethod
150 | def decode_vector_dest_ids(vector):
151 | return [item.decode() for item in vector]
152 |
153 | @staticmethod
154 | def encode_vector_dest_ids(vector):
155 | return [item.encode('utf-8') for item in vector]
156 |
157 | @staticmethod
158 | def encode_vector_source_ids(vector):
159 | return [item.encode('utf-8') for item in vector]
160 |
161 | @staticmethod
162 | def decode_source_to_dest_array_dict(array_dict):
163 | return {key.decode(): [item.decode() for item in value] for key, value in array_dict.items()}
164 |
165 | @staticmethod
166 | def decode_dest_to_source_array_dict(array_dict):
167 | return {key.decode(): [item.decode() for item in value] for key, value in array_dict.items()}
168 |
169 | @staticmethod
170 | def decode_vector_of_dest_tuples(tuple_array):
171 | return [(a.decode(), b) for a, b in tuple_array]
172 |
173 | @staticmethod
174 | def decode_vector_of_source_tuples(tuple_array):
175 | return [(a.decode(), b) for a, b in tuple_array]
--------------------------------------------------------------------------------
/spatial_access/src/.gitignore:
--------------------------------------------------------------------------------
1 | cmake-build-debug/
2 | Makefile
3 | .idea/
4 | _p2pExtension.*
--------------------------------------------------------------------------------
/spatial_access/src/Serializer.cpp:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #include
6 |
7 | #include "include/Serializer.h"
8 |
9 | Serializer::Serializer(const std::string &filename)
10 | {
11 | output.open(filename, std::ios::binary | std::ios::out);
12 | }
13 |
14 | Serializer::~Serializer()
15 | {
16 | output.close();
17 | }
18 |
19 | void Serializer::checkStreamIsGood()
20 | {
21 | if (!output.good()) {
22 | output.close();
23 | throw std::runtime_error("SerializerError: Serialization failed");
24 | }
25 | }
26 |
27 | void Serializer::writeBool(bool value)
28 | {
29 | this->writeNumericType((unsigned short) value);
30 | }
31 |
32 |
33 | Deserializer::Deserializer(const std::string &filename)
34 | {
35 | input.open(filename, std::ios::binary | std::ios::in);
36 | }
37 |
38 | Deserializer::~Deserializer()
39 | {
40 | input.close();
41 | }
42 |
43 | void Deserializer::checkStreamIsGood()
44 | {
45 | if (!input.good()) {
46 | input.close();
47 | throw std::runtime_error("DeserializerError: Deserialization failed");
48 | }
49 | }
50 |
51 | bool Deserializer::readBool()
52 | {
53 | return (bool) this->readNumericType();
54 | }
55 |
56 |
--------------------------------------------------------------------------------
/spatial_access/src/csvParser.cpp:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 |
6 | #include "include/csvParser.h"
7 |
8 |
9 | bool isUndefined(const std::string& item)
10 | {
11 | return item.find("-1") != item.npos;
12 | }
13 |
14 | template <>
15 | const std::string csvParser::parse(const std::string& item)
16 | {
17 | return item;
18 | }
19 |
20 | template <>
21 | const unsigned short csvParser::parse(const std::string& item)
22 | {
23 | if (isUndefined(item)) {
24 | return std::numeric_limits::max();
25 | }
26 | return (unsigned short) std::stoul(item);
27 | }
28 |
29 | template <>
30 | const unsigned int csvParser::parse(const std::string& item)
31 | {;
32 | if (isUndefined(item)) {
33 | return std::numeric_limits::max();
34 | }
35 | return (unsigned int) std::stoul(item);
36 | }
37 |
38 | template <>
39 | const unsigned long csvParser::parse(const std::string& item)
40 | {
41 | if (isUndefined(item)) {
42 | return std::numeric_limits::max();
43 | }
44 | return std::stoul(item);
45 | }
46 |
--------------------------------------------------------------------------------
/spatial_access/src/generate_extension_source.py:
--------------------------------------------------------------------------------
1 | from jinja2 import Template
2 | import sys
3 |
4 | if len(sys.argv) > 1:
5 | SRC_PATH = sys.argv[1]
6 | else:
7 | SRC_PATH = "./"
8 |
9 | EDIT_WARNING = """# WARNING: This file is automatically generated.
10 | # Update pyx_src/static.pyx and pyx_src/dynamic to change."""
11 |
12 | ID_TYPES = [{"type_name":"ulong",
13 | "type_name_full":"unsigned long int",
14 | "type_name_short":"I"},
15 | {"type_name": "string",
16 | "type_name_full": "string",
17 | "type_name_short": "S"}
18 | ]
19 |
20 | VALUE_TYPES = [{"type_name":"ushort",
21 | "type_name_full":"unsigned short int",
22 | "type_name_short":"US"},
23 | {"type_name": "uint",
24 | "type_name_full": "unsigned int",
25 | "type_name_short": "UI"}]
26 |
27 |
28 | def build_param_dict(row_id_type, col_id_type, value_type):
29 | return_value = {}
30 | type_extension = '{}x{}x{}'.format(row_id_type['type_name_short'],
31 | col_id_type['type_name_short'],
32 | value_type['type_name_short'])
33 |
34 | return_value['class_name'] = 'transitMatrix' + type_extension
35 | return_value['py_class_name'] = 'pyTransitMatrix' + type_extension
36 |
37 | return_value['row_type'] = row_id_type['type_name']
38 | return_value['row_type_full'] = row_id_type['type_name_full']
39 |
40 | return_value['col_type'] = col_id_type['type_name']
41 | return_value['col_type_full'] = col_id_type['type_name_full']
42 |
43 | return_value['value_type'] = value_type['type_name']
44 | return_value['value_type_full'] = value_type['type_name_full']
45 |
46 | return return_value
47 |
48 |
49 | with open(SRC_PATH + '_p2pExtension.pyx', "w+") as target:
50 |
51 | target.write(EDIT_WARNING)
52 |
53 | target.write("\n\n# Static:\n\n")
54 |
55 | # write static files
56 | with open(SRC_PATH + 'pyx_src/static.pyx') as static_file:
57 | static_source = static_file.read()
58 | target.write(static_source)
59 |
60 | # write dynamic files
61 | target.write("\n\n# Dynamic Templates: \n\n")
62 | with open(SRC_PATH + 'pyx_src/dynamic.pyx') as dynamic_file:
63 | dynamic_source = dynamic_file.read()
64 | dynamic_template = Template(dynamic_source)
65 |
66 | for row_id_type in ID_TYPES:
67 | for col_id_type in ID_TYPES:
68 | for value_type in VALUE_TYPES:
69 | target.write("\n\n")
70 | params = build_param_dict(row_id_type, col_id_type, value_type)
71 | target.write(dynamic_template.render(**params))
72 |
--------------------------------------------------------------------------------
/spatial_access/src/include/Graph.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 |
11 | typedef unsigned long int network_loc;
12 |
13 | template
14 | class Graph
15 | {
16 | public:
17 | Graph()= default;
18 | unsigned long int vertices;
19 | std::vector>> neighbors;
20 | void initializeGraph(unsigned long int vertices)
21 | {
22 | std::vector> value;
23 | this->neighbors.assign(vertices, value);
24 | this->vertices = vertices;
25 | }
26 |
27 | /* Adds an edge to an undirected graph */
28 | void addEdge(network_loc src, network_loc dest, value_type weight)
29 | {
30 | try
31 | {
32 | this->neighbors.at(src).push_back(std::make_pair(dest, weight));
33 | }
34 | catch (...)
35 | {
36 | throw std::runtime_error("edge incompatible with declared graph structure");
37 | }
38 | }
39 |
40 | };
--------------------------------------------------------------------------------
/spatial_access/src/include/Serializer.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 |
12 | class Serializer {
13 | public:
14 | Serializer(const std::string &filename);
15 | ~Serializer();
16 |
17 | template void writeNumericType(T value)
18 | {
19 | output.write ( reinterpret_cast(&value),sizeof(T));
20 | checkStreamIsGood();
21 | }
22 | template void writeVector(const std::vector& value)
23 | {
24 | unsigned long int vec_size = value.size();
25 | writeNumericType(vec_size);
26 | output.write((char *) &value[0], vec_size * sizeof(T));
27 | checkStreamIsGood();
28 | }
29 |
30 | void writeVector(const std::vector& value)
31 | {
32 | typename std::vector::size_type size = value.size();
33 |
34 | writeNumericType(size);
35 |
36 | for (typename std::vector::size_type i = 0; i < size; ++i)
37 | {
38 | typename std::vector::size_type element_size = value[i].size();
39 | output.write((char*)&element_size, sizeof(element_size));
40 | output.write(&value[i][0], element_size);
41 | }
42 | checkStreamIsGood();
43 | }
44 |
45 | template void write2DVector(const std::vector>& value)
46 | {
47 | unsigned long int vec_size = value.size();
48 | writeNumericType(vec_size);
49 | for (const auto &element : value) {
50 | writeVector(element);
51 | }
52 | checkStreamIsGood();
53 | }
54 | void writeBool(bool value);
55 | private:
56 | std::ofstream output;
57 | void checkStreamIsGood();
58 | };
59 |
60 | class Deserializer {
61 | public:
62 | Deserializer(const std::string &filename);
63 | ~Deserializer();
64 |
65 | template T readNumericType()
66 | {
67 | T value;
68 | input.read( reinterpret_cast(&value), sizeof(T));
69 | checkStreamIsGood();
70 | return value;
71 | }
72 | template void readVector(std::vector& value)
73 | {
74 | auto vec_size = readNumericType();
75 |
76 | value.assign(vec_size, 0);
77 | input.read(reinterpret_cast(&value[0]), vec_size*sizeof(T));
78 | checkStreamIsGood();
79 | }
80 |
81 | void readVector(std::vector& value)
82 | {
83 | typename std::vector::size_type size = 0;
84 | input.read((char*)&size, sizeof(size));
85 | value.resize(size);
86 | checkStreamIsGood();
87 | for (typename std::vector::size_type i = 0; i < size; ++i)
88 | {
89 | typename std::vector::size_type element_size = 0;
90 | input.read((char*)&element_size, sizeof(element_size));
91 | value[i].resize(element_size);
92 | input.read(&value[i][0], element_size);
93 | }
94 | checkStreamIsGood();
95 | }
96 |
97 | template void read2DVector(std::vector>& value)
98 | {
99 | auto vec_size = readNumericType();
100 | for (unsigned long int i = 0; i < vec_size; i++)
101 | {
102 |
103 | std::vector element;
104 | readVector(element);
105 | value.push_back(element);
106 | }
107 | checkStreamIsGood();
108 | }
109 |
110 | bool readBool();
111 | private:
112 | std::ifstream input;
113 | void checkStreamIsGood();
114 | };
115 |
--------------------------------------------------------------------------------
/spatial_access/src/include/csvParser.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 |
13 | template
14 | class csvParser {
15 | private:
16 | std::ifstream& sharedFile;
17 | public:
18 | csvParser(std::ifstream& sharedFile) : sharedFile(sharedFile) {}
19 | void readLine(std::vector& row)
20 | {
21 | std::string line;
22 | std::string item;
23 |
24 | getline(sharedFile, line);
25 | std::istringstream stream(line);
26 |
27 | getline(stream, item, ',');
28 |
29 | while (getline(stream, item, ','))
30 | {
31 | row.push_back(parse(item));
32 | }
33 | }
34 |
35 | static const T parse(const std::string& item);
36 |
37 |
38 | };
39 |
--------------------------------------------------------------------------------
/spatial_access/src/include/dataFrame.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 | #include "Serializer.h"
16 | #include "tmxParser.h"
17 | #include "csvParser.h"
18 | #include "otpCSV.h"
19 |
20 | #define TMX_VERSION (2)
21 |
22 | /* a pandas-like dataFrame */
23 | template
24 | class dataFrame {
25 | public:
26 | static constexpr value_type UNDEFINED = std::numeric_limits::max();
27 | std::vector> dataset;
28 | bool isCompressible;
29 | bool isSymmetric;
30 | unsigned long int rows;
31 | unsigned long int cols;
32 | std::vector rowIds;
33 | std::vector colIds; // TODO: eliminate redundant labels if symmetric
34 | std::unordered_map rowIdsToLoc;
35 | std::unordered_map colIdsToLoc;
36 | unsigned long int dataset_size;
37 |
38 | private:
39 | void indexRows()
40 | {
41 | for (unsigned long int row_loc = 0; row_loc < rows; row_loc++)
42 | {
43 | this->rowIdsToLoc.emplace(std::make_pair(rowIds.at(row_loc), row_loc));
44 | }
45 | }
46 |
47 | void indexCols()
48 | {
49 | for (unsigned long int col_loc = 0; col_loc < cols; col_loc++)
50 | {
51 | this->colIdsToLoc.emplace(std::make_pair(colIds.at(col_loc), col_loc));
52 | }
53 | }
54 |
55 | void initializeDatatsetSize()
56 | {
57 | if (isCompressible) {
58 | dataset_size = (rows * (rows + 1)) / 2;
59 | }
60 | else {
61 | dataset_size = rows * cols;
62 | }
63 | }
64 |
65 | public:
66 | void readOTPCSV(const std::string& filename)
67 | {
68 | isCompressible = false;
69 | isSymmetric = false;
70 | otpCSVReader reader(filename);
71 | auto reader_row_labels = reader.row_labels;
72 | auto reader_col_labels = reader.col_labels;
73 |
74 | std::unordered_set unique_row_labels_set(reader_row_labels.begin(), reader_row_labels.end());
75 | std::unordered_set unique_col_labels_set(reader_col_labels.begin(), reader_col_labels.end());
76 |
77 | rowIds.assign(unique_row_labels_set.begin(), unique_row_labels_set.end());
78 | colIds.assign(unique_col_labels_set.begin(), unique_col_labels_set.end());
79 | this->rows = rowIds.size();
80 | this->cols = colIds.size();
81 | indexRows();
82 | indexCols();
83 | initializeDatatsetSize();
84 |
85 | for (unsigned int row_loc = 0; row_loc < rows; row_loc++)
86 | {
87 | std::vector data(cols, UNDEFINED);
88 | dataset.push_back(data);
89 | }
90 | for (unsigned long int i = 0; i < reader.data.size(); i++)
91 | {
92 | setValueById(reader_row_labels.at(i), reader_col_labels.at(i), reader.data.at(i));
93 | }
94 |
95 | }
96 |
97 | // Methods
98 | dataFrame() = default;
99 | dataFrame(bool isCompressible, bool isSymmetric, unsigned long int rows, unsigned long int cols)
100 | {
101 | this->isCompressible = isCompressible;
102 | this->isSymmetric = isSymmetric;
103 | this->rows = rows;
104 | if (isCompressible)
105 | {
106 | this->cols = rows;
107 | initializeDatatsetSize();
108 | std::vector data(dataset_size, UNDEFINED);
109 | dataset.push_back(data);
110 | }
111 | else
112 | {
113 | this->cols = cols;
114 | initializeDatatsetSize();
115 | for (unsigned int row_loc = 0; row_loc < rows; row_loc++)
116 | {
117 | std::vector data(cols, UNDEFINED);
118 | dataset.push_back(data);
119 | }
120 | }
121 |
122 | }
123 |
124 | void setMockDataFrame(const std::vector>& dataset,
125 | const std::vector& row_ids,
126 | const std::vector& col_ids)
127 | {
128 | setRowIds(row_ids);
129 | setColIds(col_ids);
130 | for (unsigned long int i = 0; i < row_ids.size(); i++)
131 | {
132 | setRowByRowLoc(dataset.at(i), i);
133 | }
134 |
135 | }
136 |
137 | unsigned long int
138 | compressedEquivalentLoc(unsigned long int row_loc, unsigned long int col_loc) const
139 | {
140 | unsigned long int row_delta = rows - row_loc;
141 | return dataset_size - row_delta * (row_delta + 1) / 2 + col_loc - row_loc;
142 | }
143 |
144 | // Getters/Setters
145 |
146 | value_type
147 | getValueByLoc(unsigned long int row_loc, unsigned long int col_loc) const
148 | {
149 | if (isCompressible)
150 | {
151 | unsigned long int index;
152 | if (isUnderDiagonal(row_loc, col_loc))
153 | {
154 | index = compressedEquivalentLoc(col_loc, row_loc);
155 | } else
156 | {
157 | index = compressedEquivalentLoc(row_loc, col_loc);
158 | }
159 | return dataset.at(0).at(index);
160 | }
161 | return dataset.at(row_loc).at(col_loc);
162 | }
163 |
164 |
165 | value_type
166 | getValueById(const row_label_type& row_id, const col_label_type& col_id) const
167 | {
168 | if (rowIdsToLoc.find(row_id) == rowIdsToLoc.end() || colIdsToLoc.find(col_id) == colIdsToLoc.end()) {
169 | return getValueByLoc(0, 0);
170 | }
171 | unsigned long int row_loc = rowIdsToLoc.at(row_id);
172 | unsigned long int col_loc = colIdsToLoc.at(col_id);
173 | return getValueByLoc(row_loc, col_loc);
174 | }
175 |
176 | void
177 | setValueById(const row_label_type& row_id, const col_label_type& col_id, value_type value)
178 | {
179 | unsigned long int row_loc = rowIdsToLoc.at(row_id);
180 | unsigned long int col_loc = colIdsToLoc.at(col_id);
181 | setValueByLoc(row_loc, col_loc, value);
182 | }
183 |
184 |
185 | const std::vector>
186 | getValuesByRowId(const row_label_type& row_id, bool sort) const
187 | {
188 | std::vector> returnValue;
189 |
190 |
191 | if (rowIdsToLoc.find(row_id) == rowIdsToLoc.end()) {
192 | return returnValue;
193 | }
194 |
195 | unsigned long int row_loc = rowIdsToLoc.at(row_id);
196 | for (unsigned long int col_loc = 0; col_loc < cols; col_loc++)
197 | {
198 | returnValue.push_back(std::make_pair(colIds.at(col_loc), getValueByLoc(row_loc, col_loc)));
199 | }
200 | if (sort)
201 | {
202 | std::sort(returnValue.begin(), returnValue.end(), [](std::pair &left, std::pair &right) {
203 | return left.second < right.second;
204 | });
205 | }
206 | return returnValue;
207 | }
208 |
209 |
210 | const std::vector>
211 | getValuesByColId(const col_label_type& col_id, bool sort) const
212 | {
213 | std::vector> returnValue;
214 |
215 | if (colIdsToLoc.find(col_id) == colIdsToLoc.end()) {
216 | return returnValue;
217 | }
218 |
219 | unsigned long int col_loc = colIdsToLoc.at(col_id);
220 | for (unsigned long int row_loc = 0; row_loc < rows; row_loc++)
221 | {
222 | returnValue.push_back(std::make_pair(rowIds.at(row_loc), getValueByLoc(row_loc, col_loc)));
223 | }
224 | if (sort)
225 | {
226 | std::sort(returnValue.begin(), returnValue.end(), [](std::pair &left, std::pair &right) {
227 | return left.second < right.second;
228 | });
229 | }
230 | return returnValue;
231 | }
232 |
233 |
234 | const std::vector&
235 | getRowIds() const
236 | {
237 | return rowIds;
238 | }
239 |
240 |
241 | const std::vector&
242 | getColIds() const
243 | {
244 | return colIds;
245 | }
246 |
247 |
248 | const row_label_type&
249 | getRowIdForLoc(unsigned long int row_loc) const
250 | {
251 | return rowIds.at(row_loc);
252 | }
253 |
254 |
255 | const col_label_type&
256 | getColIdForLoc(unsigned long int col_loc) const
257 | {
258 | return colIds.at(col_loc);
259 | }
260 |
261 |
262 | unsigned long int
263 | getRowLocForId(const row_label_type& row_id) const
264 | {
265 | return rowIdsToLoc.at(row_id);
266 | }
267 |
268 |
269 | unsigned long int
270 | getColLocForId(const col_label_type& col_id) const
271 | {
272 | return colIdsToLoc.at(col_id);
273 | }
274 |
275 |
276 | void
277 | setValueByLoc(unsigned long int row_loc, unsigned long int col_loc, value_type value)
278 | {
279 | if (isCompressible)
280 | {
281 | unsigned long int index;
282 | if (isUnderDiagonal(row_loc, col_loc))
283 | {
284 | index = compressedEquivalentLoc(col_loc, row_loc);
285 | } else
286 | {
287 | index = compressedEquivalentLoc(row_loc, col_loc);
288 | }
289 | dataset.at(0).at(index) = value;
290 | return;
291 | }
292 | dataset.at(row_loc).at(col_loc) = value;
293 | }
294 |
295 |
296 | void
297 | setRowByRowLoc(const std::vector &row_data, unsigned long int source_loc)
298 | {
299 | if (source_loc > rows)
300 | {
301 | throw std::runtime_error("row loc exceeds index of dataframe");
302 | }
303 | if (!isCompressible)
304 | {
305 |
306 | this->dataset.at(source_loc) = row_data;
307 |
308 | }
309 | else
310 | {
311 | unsigned long int left_index = this->compressedEquivalentLoc(source_loc, source_loc);
312 | std::copy(row_data.begin(), row_data.end(), this->dataset.at(0).begin() + left_index);
313 |
314 | }
315 | }
316 |
317 | void
318 | setRowIds(const std::vector& row_ids)
319 | {
320 | this->rowIds = row_ids;
321 | indexRows();
322 | }
323 |
324 |
325 | void
326 | setColIds(const std::vector& col_ids)
327 | {
328 | this->colIds = col_ids;
329 | indexCols();
330 | }
331 |
332 |
333 | unsigned long int
334 | addToRowIndex(const row_label_type& row_id)
335 | {
336 | unsigned long int index = rowIds.size();
337 | rowIds.push_back(row_id);
338 | rowIdsToLoc.emplace(std::make_pair(row_id, index));
339 | return index;
340 | }
341 |
342 |
343 | unsigned long int
344 | addToColIndex(const col_label_type& col_id)
345 | {
346 | unsigned long int index = colIds.size();
347 | colIds.push_back(col_id);
348 | colIdsToLoc.emplace(std::make_pair(col_id, index));
349 | return index;
350 | }
351 |
352 |
353 |
354 | // Input/Output:
355 |
356 | bool
357 | writeCSV(const std::string &outfile) const
358 | {
359 | std::ofstream Ofile;
360 | Ofile.open(outfile);
361 | if (Ofile.fail()) {
362 | throw std::runtime_error("Could not open output file");
363 | }
364 | writeToStream(Ofile);
365 | Ofile.close();
366 | return true;
367 | }
368 |
369 | void
370 | printDataFrame() const
371 | {
372 | writeToStream(std::cout);
373 | }
374 |
375 | void readCSV(const std::string& infile)
376 | {
377 | isCompressible = false;
378 | isSymmetric = false;
379 | std::ifstream fileIN;
380 | fileIN.open(infile);
381 | if (fileIN.fail())
382 | {
383 | throw std::runtime_error("unable to read file");
384 | }
385 |
386 | csvParser rowReader(fileIN);
387 | csvParser colReader(fileIN);
388 | csvParser valueReader(fileIN);
389 |
390 | colReader.readLine(colIds);
391 | indexCols();
392 |
393 | std::string line;
394 | std::string row_label;
395 | std::string value;
396 |
397 | while (getline(fileIN, line))
398 | {
399 | this->dataset.emplace_back(std::vector());
400 | std::istringstream stream(line);
401 |
402 | getline(stream, row_label,',');
403 | rowIds.push_back(rowReader.parse(row_label));
404 | while(getline(stream, value, ','))
405 | {
406 | this->dataset.at(this->dataset.size() - 1).push_back(valueReader.parse(value));
407 | }
408 | }
409 | fileIN.close();
410 | rows = this->rowIds.size();
411 | cols = this->colIds.size();
412 | indexRows();
413 | initializeDatatsetSize();
414 | }
415 |
416 |
417 | void writeTMX(const std::string& filename) const
418 | {
419 | Serializer serializer(filename);
420 | tmxWriter rowWriter(serializer);
421 | tmxWriter colWriter(serializer);
422 | tmxWriter dataWriter(serializer);
423 |
424 | rowWriter.writeTMXVersion(TMX_VERSION);
425 | rowWriter.writeIdTypeEnum();
426 | colWriter.writeIdTypeEnum();
427 | dataWriter.writeValueTypeEnum();
428 |
429 | rowWriter.writeIsCompressible(isCompressible);
430 | rowWriter.writeIsSymmetric(isSymmetric);
431 |
432 | rowWriter.writeNumberOfRows(rows);
433 | colWriter.writeNumberOfCols(cols);
434 |
435 | rowWriter.writeIds(rowIds);
436 | colWriter.writeIds(colIds);
437 | dataWriter.writeData(dataset);
438 | }
439 |
440 | void readTMX(const std::string& filename)
441 | {
442 | Deserializer deserializer(filename);
443 |
444 | tmxReader rowReader(deserializer);
445 | tmxReader colReader(deserializer);
446 | tmxReader dataReader(deserializer);
447 |
448 | auto tmx_version = rowReader.readTMXVersion();
449 | if (tmx_version != TMX_VERSION)
450 | {
451 | auto error = std::string("file is an older version of tmx: ") + std::to_string(tmx_version);
452 | error += std::string("expected: ") + std::to_string(TMX_VERSION);
453 | throw std::runtime_error(error);
454 | }
455 |
456 | // row_enum_type
457 | rowReader.readIdTypeEnum();
458 |
459 | // col_enum_type
460 | colReader.readIdTypeEnum();
461 |
462 | // value_enum_type
463 | dataReader.readValueTypeEnum();
464 |
465 | isCompressible = rowReader.readIsCompressible();
466 | isSymmetric = rowReader.readIsSymmetric();
467 |
468 | rows = rowReader.readNumberOfRows();
469 | cols = colReader.readNumberOfCols();
470 |
471 | rowReader.readIds(rowIds);
472 | colReader.readIds(colIds);
473 | dataReader.readData(dataset);
474 |
475 | indexRows();
476 | indexCols();
477 | initializeDatatsetSize();
478 |
479 | }
480 |
481 | private:
482 |
483 | bool
484 | writeToStream(std::ostream& streamToWrite) const
485 | {
486 |
487 | streamToWrite << ",";
488 | // write the top row of column labels
489 | for (col_label_type col_label : colIds)
490 | {
491 | streamToWrite << col_label << ",";
492 | }
493 |
494 | streamToWrite << std::endl;
495 | // write the body of the table, each row has a row label and values
496 | for (unsigned long int row_loc = 0; row_loc < rows; row_loc++)
497 | {
498 | streamToWrite << rowIds.at(row_loc) << ",";
499 | for (unsigned long int col_loc = 0; col_loc < cols; col_loc++)
500 | {
501 | value_type value = this->getValueByLoc(row_loc, col_loc);
502 | if (value < UNDEFINED) {
503 | streamToWrite << value << ",";
504 | } else {
505 | streamToWrite << "-1" << ",";
506 | }
507 | }
508 | streamToWrite << std::endl;
509 | }
510 | return true;
511 | }
512 |
513 |
514 | public:
515 | // Utilities
516 |
517 | bool
518 | isUnderDiagonal(unsigned long int row_loc, unsigned long int col_loc) const
519 | {
520 | return row_loc > col_loc;
521 | }
522 |
523 | };
524 |
--------------------------------------------------------------------------------
/spatial_access/src/include/networkUtility.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 |
14 | typedef unsigned long int unsigned_long;
15 | template
16 | class NetworkUtility {
17 | public:
18 | typedef unsigned long int ulong;
19 | typedef std::pair edge_id;
20 | typedef std::vector edge_array;
21 | typedef std::vector node_array;
22 | typedef std::unordered_set node_set;
23 | private:
24 | const edge_array edges;
25 | const node_array nodes;
26 | node_set nodes_of_main_connected_component;
27 | public:
28 |
29 | NetworkUtility(edge_array &edges, node_array &nodes)
30 | : edges(edges), nodes(nodes) {
31 | auto traversal_order = getTraversalOrder();
32 | auto transpose_graph = getTransposeGraph();
33 | std::unordered_set is_visited;
34 | std::vector> connected_components;
35 | while (!traversal_order.empty()) {
36 | node_id v = traversal_order.top();
37 | traversal_order.pop();
38 | if (is_visited.find(v) != is_visited.end()) {
39 | continue;
40 | }
41 | std::vector new_connected_component;
42 | connected_components.push_back(new_connected_component);
43 | std::stack secondary_stack;
44 | secondary_stack.push(v);
45 | while (!secondary_stack.empty()) {
46 | node_id u = secondary_stack.top();
47 | secondary_stack.pop();
48 | connected_components.at(connected_components.size() - 1).push_back(u);
49 | is_visited.insert(u);
50 | for (node_id t : transpose_graph.at(u)) {
51 | if (is_visited.find(t) == is_visited.end()) {
52 | secondary_stack.push(t);
53 | }
54 | }
55 | }
56 | }
57 | if (connected_components.empty()) {
58 | throw std::runtime_error("Found no connected components");
59 | }
60 | std::sort(connected_components.begin(), connected_components.end(),
61 | [](const node_array &a, const node_array &b) { return a.size() > b.size(); });
62 | auto main_connected_component = connected_components.at(0);
63 | this->nodes_of_main_connected_component = node_set(main_connected_component.begin(),
64 | main_connected_component.end());
65 | }
66 |
67 | const node_set
68 | getConnectedNetworkNodes() const
69 | {
70 | return nodes_of_main_connected_component;
71 | }
72 |
73 | private:
74 |
75 | const std::unordered_map
76 | getGraph() const {
77 | std::unordered_map graph;
78 | for (auto node : nodes) {
79 | node_array values;
80 | graph.emplace(node, values);
81 | }
82 | for (auto edge : edges) {
83 | node_id from_edge = std::get<0>(edge);
84 | node_id to_edge = std::get<1>(edge);
85 | graph.at(from_edge).push_back(to_edge);
86 | }
87 | return graph;
88 | }
89 |
90 | const std::unordered_map
91 | getTransposeGraph() const {
92 | std::unordered_map graph;
93 | for (auto node : nodes) {
94 | node_array values;
95 | graph.emplace(node, values);
96 | }
97 | for (auto edge : edges) {
98 | node_id from_edge = std::get<1>(edge);
99 | node_id to_edge = std::get<0>(edge);
100 | graph.at(from_edge).push_back(to_edge);
101 | }
102 | return graph;
103 | }
104 |
105 | std::stack
106 | getTraversalOrder() {
107 | std::stack> temp_stack;
108 | std::stack return_stack;
109 | node_array nodes_copy(this->nodes);
110 | std::unordered_set visited;
111 | auto num_nodes = this->nodes.size();
112 | auto graph = this->getGraph();
113 | while (return_stack.size() < num_nodes) {
114 | auto v = nodes_copy.back();
115 | nodes_copy.pop_back();
116 | if (visited.find(v) != visited.end()) {
117 | continue;
118 | }
119 | temp_stack.push(std::make_pair(v, false));
120 | while (!temp_stack.empty()) {
121 | auto res = temp_stack.top();
122 | temp_stack.pop();
123 | node_id u = std::get<0>(res);
124 | bool flag = std::get<1>(res);
125 | if (flag) {
126 | return_stack.push(u);
127 | } else if (visited.find(u) == visited.end()) {
128 | visited.insert(u);
129 | temp_stack.push(std::make_pair(u, true));
130 | for (auto t : graph.at(u)) {
131 | temp_stack.push(std::make_pair(t, false));
132 | }
133 | }
134 | }
135 | }
136 | return return_stack;
137 | }
138 | };
139 |
--------------------------------------------------------------------------------
/spatial_access/src/include/otpCSV.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include "csvParser.h"
13 |
14 | template
15 | class otpCSVReader
16 | {
17 | public:
18 | std::vector data;
19 | std::vector row_labels;
20 | std::vector col_labels;
21 |
22 |
23 | otpCSVReader(const std::string& filename)
24 | {
25 | std::ifstream fileIN;
26 | fileIN.open(filename);
27 | if (fileIN.fail()) {
28 | throw std::runtime_error("unable to read file");
29 | }
30 | std::string line;
31 | std::string row_label;
32 | std::string col_label;
33 | std::string value;
34 |
35 |
36 | while (getline(fileIN, line))
37 | {
38 | std::istringstream stream(line);
39 | getline(stream, row_label,',');
40 | getline(stream, col_label,',');
41 | getline(stream, value);
42 | row_labels.push_back(csvParser::parse(row_label));
43 | col_labels.push_back(csvParser::parse(col_label));
44 | data.push_back((value_type) std::stof(value));
45 | }
46 |
47 | fileIN.close();
48 | }
49 |
50 | };
51 |
--------------------------------------------------------------------------------
/spatial_access/src/include/threadUtilities.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 |
14 | #include "Graph.h"
15 | #include "userDataContainer.h"
16 | #include "dataFrame.h"
17 |
18 | /* jobQueue: a thread-safe queue for dispensing integer jobs*/
19 | class jobQueue {
20 | private:
21 | std::queue data;
22 | mutable std::mutex lock;
23 | public:
24 | jobQueue() = default;
25 | void insert(unsigned long int item);
26 | unsigned long int pop(bool &endNow);
27 | bool empty() const;
28 | };
29 |
30 | void do_join(std::thread &t);
31 |
32 | template class graphWorkerArgs;
33 |
34 | /* A pool of worker threads to execute a job (f_in), which takes arguments (worker_args)*/
35 | template
36 | class workerQueue {
37 | private:
38 | std::vector threads;
39 | public:
40 | workerQueue(unsigned int numThreads,
41 | void (*f_in)(graphWorkerArgs&),
42 | graphWorkerArgs &worker_args)
43 | {
44 |
45 | for (unsigned long int i = 0; i < numThreads; i++)
46 | {
47 | this->threads.push_back(std::thread(f_in, std::ref(worker_args)));
48 | }
49 |
50 | }
51 | void startGraphWorker()
52 | {
53 | std::for_each(this->threads.begin(), this->threads.end(), do_join);
54 | }
55 |
56 | };
57 |
58 |
59 | template
60 | class graphWorkerArgs {
61 | public:
62 | Graph &graph;
63 | dataFrame &df;
64 | jobQueue jq;
65 | userDataContainer userSourceData;
66 | userDataContainer userDestData;
67 | graphWorkerArgs(Graph &graph, userDataContainer &userSourceData,
68 | userDataContainer &userDestData,
69 | dataFrame &df)
70 | : graph(graph), df(df), jq(), userSourceData(userSourceData), userDestData(userDestData) {}
71 | void initialize()
72 | {
73 | //initialize job queue
74 | for (auto i : userSourceData.retrieveUniqueNetworkNodeIds()) {
75 | jq.insert(i);
76 | }
77 | }
78 | };
79 |
--------------------------------------------------------------------------------
/spatial_access/src/include/tmxParser.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include "Serializer.h"
8 | #include
9 | #include
10 |
11 | enum ValidLabelTypes {
12 | UnsignedLongType,
13 | StringType
14 | };
15 |
16 | enum ValidValueTypes {
17 | UnsignedShortType,
18 | UnsignedIntType
19 | };
20 |
21 | template
22 | class tmxWriter{
23 |
24 | private:
25 | Serializer& sharedSerializer;
26 | public:
27 | tmxWriter(Serializer& sharedSerializer) : sharedSerializer(sharedSerializer) {};
28 |
29 | void writeTMXVersion(unsigned short version)
30 | {
31 | sharedSerializer.writeNumericType(version);
32 | }
33 |
34 | void writeIdTypeEnum();
35 | void writeValueTypeEnum();
36 | void writeIsCompressible(bool isCompressible)
37 | {
38 | sharedSerializer.writeBool(isCompressible);
39 | }
40 |
41 | void writeIsSymmetric(bool isSymmetric)
42 | {
43 | sharedSerializer.writeBool(isSymmetric);
44 | }
45 |
46 | void writeNumberOfRows(unsigned long int rows)
47 | {
48 | sharedSerializer.writeNumericType(rows);
49 | }
50 |
51 | void writeNumberOfCols(unsigned long int cols)
52 | {
53 | sharedSerializer.writeNumericType(cols);
54 | }
55 |
56 | void writeIds(const std::vector& ids)
57 | {
58 | sharedSerializer.writeVector(ids);
59 | }
60 |
61 | void writeData(const std::vector>& data)
62 | {
63 | sharedSerializer.write2DVector(data);
64 | }
65 | };
66 |
67 | template
68 | class tmxReader {
69 | private:
70 | Deserializer& sharedDeserializer;
71 | public:
72 | tmxReader(Deserializer& sharedDeserializer) : sharedDeserializer(sharedDeserializer) {};
73 |
74 | unsigned short readTMXVersion()
75 | {
76 | return sharedDeserializer.readNumericType();
77 | }
78 |
79 | unsigned short readIdTypeEnum()
80 | {
81 | return sharedDeserializer.readNumericType();
82 | }
83 |
84 | unsigned short readValueTypeEnum()
85 | {
86 | return sharedDeserializer.readNumericType();
87 | }
88 |
89 | bool readIsCompressible()
90 | {
91 | return sharedDeserializer.readBool();
92 | }
93 |
94 | bool readIsSymmetric()
95 | {
96 | return sharedDeserializer.readBool();
97 | }
98 |
99 | unsigned long int readNumberOfRows()
100 | {
101 | return sharedDeserializer.readNumericType();
102 | }
103 |
104 | unsigned long int readNumberOfCols()
105 | {
106 | return sharedDeserializer.readNumericType();
107 | }
108 |
109 | void readIds(std::vector& ids) {
110 | sharedDeserializer.readVector(ids);
111 | }
112 |
113 | void readData(std::vector>& data)
114 | {
115 | sharedDeserializer.read2DVector(data);
116 | }
117 |
118 | };
119 |
120 | class tmxTypeReader{
121 | private:
122 | Deserializer deserializer;
123 | public:
124 | tmxTypeReader(const std::string& filename) : deserializer(filename) {}
125 | unsigned short readUshort()
126 | {
127 | return deserializer.readNumericType();
128 | }
129 |
130 | };
--------------------------------------------------------------------------------
/spatial_access/src/include/transitMatrix.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | #include
14 | #include
15 |
16 | #include "threadUtilities.h"
17 | #include "dataFrame.h"
18 | #include "Graph.h"
19 | #include "userDataContainer.h"
20 | #include "Serializer.h"
21 | using namespace std;
22 |
23 | typedef unsigned long int network_node;
24 |
25 |
26 | template
27 | constexpr value_type dataFrame::UNDEFINED;
28 |
29 | template
30 | void graphWorkerHandler(graphWorkerArgs &worker_args)
31 | {
32 | network_node src;
33 | bool endNow = false;
34 | std::vector dist_vector(worker_args.graph.vertices);
35 | while (!worker_args.jq.empty()) {
36 | src = worker_args.jq.pop(endNow);
37 | //exit loop if job queue worker_args is empty
38 | if (endNow) {
39 | break;
40 | }
41 | if (src > 0) {
42 | doDijstraFromOneNetworkNode(src, worker_args, dist_vector);
43 | }
44 | }
45 | }
46 |
47 | template
48 | void calculateSingleRowOfDataFrame(const std::vector &dist,
49 | graphWorkerArgs &worker_args,
50 | network_node src) {
51 | value_type src_imp, dst_imp, calc_imp, fin_imp;
52 | // iterate through each data point of the current source tract
53 | auto sourceTract = worker_args.userSourceData.retrieveTract(src);
54 | for (auto sourceDataPoint : sourceTract.retrieveDataPoints())
55 | {
56 | src_imp = sourceDataPoint.lastMileDistance;
57 |
58 | auto destNodeIds = worker_args.userDestData.retrieveUniqueNetworkNodeIds();
59 | // iterate through each dest tract
60 | std::vector row_data;
61 | if (worker_args.df.isCompressible)
62 | {
63 | row_data.assign(worker_args.df.cols - sourceDataPoint.loc, worker_args.df.UNDEFINED);
64 | } else
65 | {
66 | row_data.assign(worker_args.df.cols, worker_args.df.UNDEFINED);
67 | }
68 |
69 | for (network_node destNodeId : destNodeIds)
70 | {
71 | auto destTract = worker_args.userDestData.retrieveTract(destNodeId);
72 | auto destPoints = destTract.retrieveDataPoints();
73 | for (auto destDataPoint : destPoints)
74 | {
75 | if (worker_args.df.isCompressible)
76 | {
77 | if (worker_args.df.isUnderDiagonal(sourceDataPoint.loc, destDataPoint.loc))
78 | {
79 | continue;
80 | }
81 | }
82 | calc_imp = dist.at(destNodeId);
83 | if ((worker_args.df.isSymmetric) && (destDataPoint.loc == sourceDataPoint.loc))
84 | {
85 | fin_imp = 0;
86 | }
87 | else
88 | {
89 | dst_imp = destDataPoint.lastMileDistance;
90 | if (calc_imp == worker_args.df.UNDEFINED)
91 | {
92 | fin_imp = worker_args.df.UNDEFINED;
93 | }
94 | else
95 | {
96 | fin_imp = dst_imp + src_imp + calc_imp;
97 | }
98 |
99 | }
100 | if (worker_args.df.isCompressible)
101 | {
102 | row_data.at(destDataPoint.loc - sourceDataPoint.loc) = fin_imp;
103 | } else {
104 | row_data.at(destDataPoint.loc) = fin_imp;
105 | }
106 |
107 |
108 | }
109 |
110 | }
111 | worker_args.df.setRowByRowLoc(row_data, sourceDataPoint.loc);
112 | }
113 |
114 | }
115 |
116 |
117 | template
118 | void doDijstraFromOneNetworkNode(network_node src, graphWorkerArgs &worker_args,
119 | std::vector& dist_vector)
120 | {
121 | typedef std::pair queue_pair;
122 | network_node V = worker_args.graph.vertices;
123 |
124 | std::fill(dist_vector.begin(), dist_vector.end(), worker_args.df.UNDEFINED);
125 | dist_vector.at(src) = 0;
126 | std::priority_queue, std::greater> queue;
127 | queue.push(std::make_pair(0, src));
128 | std::vector visited(V, false);
129 | while (!queue.empty())
130 | {
131 | network_node u = queue.top().second;
132 | queue.pop();
133 | visited.at(u) = true;
134 | for (auto neighbor : worker_args.graph.neighbors.at(u))
135 | {
136 | auto v = std::get<0>(neighbor);
137 | auto weight = std::get<1>(neighbor);
138 | if ((!visited.at(v)) and (dist_vector.at(v) > dist_vector.at(u) + weight))
139 | {
140 | dist_vector.at(v) = dist_vector.at(u) + weight;
141 | queue.push(std::make_pair(dist_vector.at(v), v));
142 | }
143 | }
144 | }
145 |
146 | //calculate row and add to dataFrame
147 | calculateSingleRowOfDataFrame(dist_vector, worker_args, src);
148 |
149 | }
150 |
151 |
152 | template
153 | class transitMatrix {
154 | public:
155 |
156 | // Public members
157 | dataFrame df;
158 | userDataContainer userSourceDataContainer;
159 | userDataContainer userDestDataContainer;
160 | Graph graph;
161 |
162 | // Constructors
163 | transitMatrix(bool isCompressible, bool isSymmetric, unsigned long int rows, unsigned long int cols)
164 | : df(isCompressible, isSymmetric, rows, cols) {}
165 | transitMatrix()= default;
166 |
167 | void
168 | prepareGraphWithVertices(unsigned long int V)
169 | {
170 | graph.initializeGraph(V);
171 |
172 | }
173 |
174 | void setMockDataFrame(const std::vector> dataset,
175 | const std::vector& row_ids,
176 | const std::vector& col_ids)
177 | {
178 | df.setMockDataFrame(dataset, row_ids, col_ids);
179 | }
180 |
181 |
182 | void
183 | addToUserSourceDataContainer(network_node networkNodeId, const row_label_type& row_id, value_type lastMileDistance)
184 | {
185 | network_node row_loc = df.addToRowIndex(row_id);
186 | this->userSourceDataContainer.addPoint(networkNodeId, row_loc, lastMileDistance);
187 |
188 | }
189 |
190 |
191 | void
192 | addToUserDestDataContainer(network_node networkNodeId, const col_label_type& col_id, value_type lastMileDistance)
193 | {
194 | network_node col_loc = this->df.addToColIndex(col_id);
195 | this->userDestDataContainer.addPoint(networkNodeId, col_loc, lastMileDistance);
196 | }
197 |
198 | void addSingleEdgeToGraph(network_node from_loc, network_node to_loc,
199 | value_type edge_weight, bool is_bidirectional)
200 | {
201 | graph.addEdge(from_loc, to_loc, edge_weight);
202 | if (is_bidirectional)
203 | {
204 | graph.addEdge(to_loc, from_loc, edge_weight);
205 | }
206 | }
207 |
208 | void
209 | addEdgesToGraph(const std::vector& from_column,
210 | const std::vector& to_column,
211 | const std::vector& edge_weights_column,
212 | const std::vector& is_bidirectional_column)
213 | {
214 | for (unsigned long int i = 0; i < from_column.size(); i++)
215 | {
216 | auto from_loc = from_column.at(i);
217 | auto to_loc = to_column.at(i);
218 | value_type edge_weight = edge_weights_column.at(i);
219 | auto is_bidirectional = is_bidirectional_column.at(i);
220 | graph.addEdge(from_loc, to_loc, edge_weight);
221 | if (is_bidirectional)
222 | {
223 | graph.addEdge(to_loc, from_loc, edge_weight);
224 | }
225 | }
226 | }
227 |
228 | void
229 | addToCategoryMap(const col_label_type& dest_id, const std::string& category)
230 | {
231 | if (categoryToDestMap.find(category) != categoryToDestMap.end())
232 | {
233 | categoryToDestMap.at(category).push_back(dest_id);
234 | }
235 | else {
236 | std::vector data;
237 | data.push_back(dest_id);
238 | categoryToDestMap.emplace(std::make_pair(category, data));
239 | }
240 | }
241 |
242 | // Calculations
243 |
244 | void
245 | compute(unsigned int numThreads)
246 | {
247 | try
248 | {
249 | graphWorkerArgs worker_args(graph, userSourceDataContainer, userDestDataContainer,
250 | df);
251 | worker_args.initialize();
252 | workerQueue wq(numThreads,
253 | graphWorkerHandler, worker_args);
254 | wq.startGraphWorker();
255 | } catch (...)
256 | {
257 | throw std::runtime_error("Failed to compute matrix");
258 | }
259 | }
260 |
261 |
262 | const std::vector>
263 | getValuesBySource(row_label_type source_id, bool sort) const
264 | {
265 | return this->df.getValuesByRowId(source_id, sort);
266 | }
267 |
268 |
269 | const std::vector>
270 | getValuesByDest(col_label_type dest_id, bool sort) const
271 | {
272 | return this->df.getValuesByColId(dest_id, sort);
273 | }
274 |
275 |
276 |
277 | const std::unordered_map>
278 | getDestsInRange(unsigned int threshold) const
279 | {
280 | // Initialize maps
281 | std::unordered_map> destsInRange;
282 | for (network_node row_loc = 0; row_loc < df.rows; row_loc++)
283 | {
284 | std::vector valueData;
285 | for (network_node col_loc = 0; col_loc < df.cols; col_loc++) {
286 | if (df.getValueByLoc(row_loc, col_loc) <= threshold) {
287 | valueData.push_back(df.getColIdForLoc(col_loc));
288 | }
289 | }
290 | row_label_type row_id = df.getRowIdForLoc(row_loc);
291 | destsInRange.emplace(std::make_pair(row_id, valueData));
292 | }
293 | return destsInRange;
294 |
295 | }
296 |
297 |
298 | const std::unordered_map>
299 | getSourcesInRange(unsigned int threshold) const
300 | {
301 | // Initialize maps
302 | std::unordered_map> sourcesInRange;
303 | for (network_node col_loc = 0; col_loc < df.cols; col_loc++)
304 | {
305 | std::vector valueData;
306 | for (network_node row_loc = 0; row_loc < df.rows; row_loc++) {
307 | if (df.getValueByLoc(row_loc, col_loc) <= threshold) {
308 | valueData.push_back(df.getRowIdForLoc(row_loc));
309 | }
310 | }
311 | col_label_type col_id = df.getColIdForLoc(col_loc);
312 | sourcesInRange.emplace(std::make_pair(col_id, valueData));
313 | }
314 | return sourcesInRange;
315 |
316 | }
317 |
318 |
319 | value_type
320 | timeToNearestDestPerCategory(const row_label_type& source_id, const std::string& category) const
321 | {
322 | if (categoryToDestMap.find(category) == categoryToDestMap.end()) {
323 | return 0;
324 | }
325 | value_type minimum = df.UNDEFINED;
326 | for (const col_label_type dest_id : categoryToDestMap.at(category))
327 | {
328 | value_type dest_time = this->df.getValueById(source_id, dest_id);
329 | if (dest_time <= minimum)
330 | {
331 | minimum = dest_time;
332 | }
333 | }
334 | return minimum;
335 | }
336 |
337 |
338 | value_type
339 | countDestsInRangePerCategory(const row_label_type& source_id, const std::string& category, value_type range) const
340 | {
341 | if (categoryToDestMap.find(category) == categoryToDestMap.end()) {
342 | return 0;
343 | }
344 | value_type count = 0;
345 | for (const col_label_type dest_id : categoryToDestMap.at(category))
346 | {
347 | if (this->df.getValueById(source_id, dest_id) <= range)
348 | {
349 | count++;
350 | }
351 | }
352 | return count;
353 | }
354 |
355 |
356 | value_type
357 | timeToNearestDest(const row_label_type& source_id) const
358 | {
359 | value_type minimum = df.UNDEFINED;
360 | network_node row_loc = df.getRowLocForId(source_id);
361 | for (network_node col_loc = 0; col_loc < df.cols; col_loc++)
362 | {
363 | value_type dest_time = this->df.getValueByLoc(row_loc, col_loc);
364 | if (dest_time <= minimum)
365 | {
366 | minimum = dest_time;
367 | }
368 | }
369 | return minimum;
370 | }
371 |
372 |
373 | value_type
374 | countDestsInRange(const row_label_type& source_id, value_type range) const
375 | {
376 |
377 | value_type count = 0;
378 | network_node row_loc = df.getRowLocForId(source_id);
379 | for (network_node col_loc = 0; col_loc < df.cols; col_loc++)
380 | {
381 | if (this->df.getValueByLoc(row_loc, col_loc) <= range)
382 | {
383 | count++;
384 | }
385 | }
386 | return count;
387 | }
388 |
389 | // Getters
390 |
391 |
392 | value_type
393 | getValueById(const row_label_type& row_id, const col_label_type& col_id) const
394 | {
395 | return df.getValueById(row_id, col_id);
396 | }
397 |
398 | // IO
399 |
400 | const std::vector&
401 | getRowIds() const
402 | {
403 | return this->df.getRowIds();
404 | }
405 |
406 |
407 | const std::vector&
408 | getColIds() const
409 | {
410 | return this->df.getColIds();
411 | }
412 |
413 | void
414 | printDataFrame() const
415 | {
416 | this->df.printDataFrame();
417 | }
418 |
419 | void
420 | readTMX(const std::string &infile) {
421 | df.readTMX(infile);
422 | }
423 |
424 | void
425 | readCSV(const std::string &infile) {
426 | df.readCSV(infile);
427 | }
428 |
429 | void
430 | readOTPCSV(const std::string &infile)
431 | {
432 | df.readOTPCSV(infile);
433 | }
434 |
435 | void
436 | writeCSV(const std::string &outfile) const
437 | {
438 | try {
439 | df.writeCSV(outfile);
440 | }
441 | catch (...)
442 | {
443 | throw std::runtime_error("Unable to write csv");
444 | }
445 | }
446 |
447 | void
448 | writeTMX(const std::string &outfile) const
449 | {
450 | try {
451 | df.writeTMX(outfile);
452 | }
453 | catch (...)
454 | {
455 | throw std::runtime_error("Unable to write tmx");
456 | }
457 |
458 | }
459 |
460 | private:
461 | // Private Members
462 | std::unordered_map> categoryToDestMap;
463 |
464 | };
--------------------------------------------------------------------------------
/spatial_access/src/include/userDataContainer.h:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #pragma once
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 |
12 | template
13 | class userDataPoint
14 | {
15 | public:
16 | unsigned long int networkNodeId;
17 | unsigned long int loc;
18 | value_type lastMileDistance;
19 | userDataPoint(unsigned long int networkNodeId, unsigned long int loc, unsigned short int lastMileDistance)
20 | : networkNodeId(networkNodeId), loc(loc), lastMileDistance(lastMileDistance) {}
21 | };
22 |
23 | template
24 | class userDataTract
25 | {
26 | public:
27 | unsigned long int networkNodeId;
28 | std::vector> data;
29 | userDataTract(unsigned long int networkNodeId) : networkNodeId(networkNodeId) {}
30 | void addPoint(userDataPoint userData)
31 | {
32 | data.push_back(userData);
33 | }
34 | const std::vector>& retrieveDataPoints() const
35 | {
36 | return data;
37 | }
38 | };
39 |
40 |
41 | template
42 | class userDataContainer
43 | {
44 | private:
45 | std::unordered_map> data;
46 | std::vector allNetworkNodeIds;
47 | std::vector ids;
48 | std::vector uniqueNetworkNodeIds;
49 | public:
50 | userDataContainer()= default;
51 |
52 | void addPoint(unsigned long int networkNodeId, unsigned long int loc, value_type lastMileDistance)
53 | {
54 | ids.push_back(loc);
55 | allNetworkNodeIds.push_back(networkNodeId);
56 | userDataPoint newDataPoint(networkNodeId, loc, lastMileDistance);
57 | if (containsTract(networkNodeId))
58 | {
59 | data.at(networkNodeId).addPoint(newDataPoint);
60 | }
61 | else
62 | {
63 | userDataTract newUserDataTract(networkNodeId);
64 | newUserDataTract.addPoint(newDataPoint);
65 | data.insert(std::make_pair(networkNodeId, newUserDataTract));
66 | uniqueNetworkNodeIds.push_back(networkNodeId);
67 | }
68 |
69 |
70 | }
71 | bool containsTract(unsigned long int networkNodeId) const
72 | {
73 | return data.find(networkNodeId) != data.end();
74 | }
75 | const userDataTract& retrieveTract(unsigned long int networkNodeId) const
76 | {
77 | if (data.find(networkNodeId) == data.end()) {
78 | return data.at(0);
79 | }
80 | return data.at(networkNodeId);
81 | }
82 |
83 | const std::vector& retrieveUniqueNetworkNodeIds() const
84 | {
85 | return uniqueNetworkNodeIds;
86 | }
87 | };
88 |
--------------------------------------------------------------------------------
/spatial_access/src/pyx_src/dynamic.pyx:
--------------------------------------------------------------------------------
1 | cdef extern from "include/transitMatrix.h":
2 | cdef cppclass {{ class_name }} "transitMatrix<{{ row_type_full }}, {{ col_type_full }},{{ value_type_full }}>":
3 |
4 |
5 | {{ class_name }}(bool, bool, unsigned int, unsigned int) except +
6 | {{ class_name }}() except +
7 |
8 | void prepareGraphWithVertices(int V) except +
9 | void addToUserSourceDataContainer(unsigned int, {{ row_type }}, {{ value_type }}) except +
10 | void addToUserDestDataContainer(unsigned int, {{ col_type }}, {{ value_type }}) except +
11 | void addEdgesToGraph(vector[ulong], vector[ulong], vector[{{ value_type }}], vector[bool]) except +
12 | void addToCategoryMap({{ col_type }}, string) except +
13 | void setMockDataFrame(vector[vector[{{ value_type }}]], vector[{{ row_type }}], vector[{{ col_type }}]) except +
14 |
15 | void compute(int) except +
16 | vector[pair[{{ row_type }}, {{ value_type }}]] getValuesByDest({{ col_type }}, bool) except +
17 | vector[pair[{{ col_type }}, {{ value_type }}]] getValuesBySource({{ row_type }}, bool) except +
18 | unordered_map[{{ row_type }}, vector[{{ col_type }}]] getDestsInRange({{ value_type }}) except +
19 | unordered_map[{{ col_type }}, vector[{{ row_type }}]] getSourcesInRange({{ value_type }}) except +
20 | {{ value_type }} timeToNearestDestPerCategory({{ row_type }}, string) except +
21 | {{ value_type }} countDestsInRangePerCategory({{ row_type }}, string, {{ value_type }}) except +
22 | {{ value_type }} timeToNearestDest({{ row_type }}) except +
23 | {{ value_type }} countDestsInRange({{ row_type }}, {{ value_type }}) except +
24 |
25 | vector[{{ col_type }}] getColIds() except +
26 | vector[{{ row_type }}] getRowIds() except +
27 |
28 | void writeCSV(string) except +
29 | void writeTMX(string) except +
30 | void readTMX(string) except +
31 | void readCSV(string) except +
32 | void readOTPCSV(string) except +
33 | void printDataFrame() except +
34 |
35 | cdef class {{ py_class_name }}:
36 | cdef {{ class_name }} *thisptr
37 |
38 | def __cinit__(self, bool isCompressible=False, bool isSymmetric=False, unsigned int rows=0, unsigned int columns=0):
39 | if rows == 0 and columns == 0:
40 | self.thisptr = new {{ class_name }}()
41 | else:
42 | self.thisptr = new {{ class_name }}(isCompressible, isSymmetric, rows, columns)
43 |
44 | def __dealloc__(self):
45 | del self.thisptr
46 |
47 | def prepareGraphWithVertices(self, vertices):
48 | self.thisptr.prepareGraphWithVertices(vertices)
49 |
50 |
51 | def addToUserSourceDataContainer(self, networkNodeId, id_, lastMileDistance):
52 | self.thisptr.addToUserSourceDataContainer(networkNodeId, id_, lastMileDistance)
53 |
54 | def addToUserDestDataContainer(self, networkNodeId, id_, lastMileDistance):
55 | self.thisptr.addToUserDestDataContainer(networkNodeId, id_, lastMileDistance)
56 |
57 | def addEdgesToGraph(self, from_column, to_column, edge_weight_column, is_bidirectional_column):
58 | self.thisptr.addEdgesToGraph(from_column, to_column, edge_weight_column, is_bidirectional_column)
59 |
60 | def setMockDataFrame(self, dataset, row_ids, col_ids):
61 | self.thisptr.setMockDataFrame(dataset, row_ids, col_ids)
62 |
63 | def compute(self, numThreads):
64 | self.thisptr.compute(numThreads)
65 |
66 | def writeCSV(self, outfile):
67 | self.thisptr.writeCSV(outfile)
68 |
69 | def writeTMX(self, outfile):
70 | self.thisptr.writeTMX(outfile)
71 |
72 | def readTMX(self, infile):
73 | self.thisptr.readTMX(infile)
74 |
75 | def readCSV(self, infile):
76 | self.thisptr.readCSV(infile)
77 |
78 | def readOTPCSV(self, infile):
79 | self.thisptr.readOTPCSV(infile)
80 |
81 | def printDataFrame(self):
82 | self.thisptr.printDataFrame()
83 |
84 | def getValuesBySource(self, source_id, sort):
85 | return self.thisptr.getValuesBySource(source_id, sort)
86 |
87 | def getValuesByDest(self, dest_id, sort):
88 | return self.thisptr.getValuesByDest(dest_id, sort)
89 |
90 | def addToCategoryMap(self, dest_id, category):
91 | self.thisptr.addToCategoryMap(dest_id, category)
92 |
93 | def timeToNearestDestPerCategory(self, source_id, category):
94 | return self.thisptr.timeToNearestDestPerCategory(source_id, category)
95 |
96 | def countDestsInRangePerCategory(self, source_id, category, range):
97 | return self.thisptr.countDestsInRangePerCategory(source_id, category, range)
98 |
99 | def timeToNearestDest(self, source_id):
100 | return self.thisptr.timeToNearestDest(source_id)
101 |
102 | def countDestsInRange(self, source_id, range):
103 | return self.thisptr.countDestsInRange(source_id, range)
104 |
105 | def getColIds(self):
106 | return self.thisptr.getColIds()
107 |
108 | def getRowIds(self):
109 | return self.thisptr.getRowIds()
110 |
111 | def getSourcesInRange(self, range_):
112 | return self.thisptr.getSourcesInRange(range_)
113 |
114 | def getDestsInRange(self, range_):
115 | return self.thisptr.getDestsInRange(range_)
--------------------------------------------------------------------------------
/spatial_access/src/pyx_src/static.pyx:
--------------------------------------------------------------------------------
1 | # distutils: language=c++
2 | # cython: language_level=3
3 | from libcpp.string cimport string
4 | from libcpp cimport bool
5 | from libcpp.vector cimport vector
6 | from libcpp.unordered_map cimport unordered_map
7 | from libcpp.utility cimport pair
8 | from libcpp.unordered_set cimport unordered_set
9 |
10 | ctypedef unsigned short int ushort
11 | ctypedef unsigned long int ulong
12 | ctypedef unsigned int uint
13 |
14 | cdef extern from "include/networkUtility.h":
15 | cdef cppclass NetworkUtility "NetworkUtility":
16 | NetworkUtility(vector[pair[ulong, ulong]], vector[ulong]) except +
17 | unordered_set[ulong] getConnectedNetworkNodes() except +
18 |
19 |
20 | cdef extern from "include/tmxParser.h":
21 | cdef cppclass tmxTypeReader:
22 | tmxTypeReader(string) except +
23 | ushort readUshort() except +
24 |
25 |
26 | cdef class pyNetworkUtility:
27 | cdef NetworkUtility *thisptr
28 |
29 | def __cinit__(self, edges, nodes):
30 | self.thisptr = new NetworkUtility(edges, nodes)
31 |
32 | def __dealloc__(self):
33 | del self.thisptr
34 |
35 | def getConnectedNetworkNodes(self):
36 | return self.thisptr.getConnectedNetworkNodes()
37 |
38 | cdef class pyTMXTypeReader:
39 | cdef tmxTypeReader *thisptr
40 | cdef int tmxVersion
41 | cdef int rowTypeEnum
42 | cdef int colTypeEnum
43 | cdef int valueTypeEnum
44 |
45 | def __cinit__(self, filename):
46 | self.thisptr = new tmxTypeReader(filename)
47 | self.tmxVersion = self.thisptr.readUshort()
48 | self.rowTypeEnum = self.thisptr.readUshort()
49 | self.colTypeEnum = self.thisptr.readUshort()
50 | self.valueTypeEnum = self.thisptr.readUshort()
51 |
52 | def __dealloc__(self):
53 | del self.thisptr
54 |
55 | def get_tmx_version(self):
56 | return self.tmxVersion
57 |
58 | def get_row_type_enum(self):
59 | return self.rowTypeEnum
60 |
61 | def get_col_type_enum(self):
62 | return self.colTypeEnum
63 |
64 | def get_value_type_enum(self):
65 | return self.valueTypeEnum
66 |
--------------------------------------------------------------------------------
/spatial_access/src/threadUtilities.cpp:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #include "include/threadUtilities.h"
6 |
7 |
8 | /* initialize jobQueue, reserving size for known inputs*/
9 |
10 |
11 |
12 | /* insert to the jobQueue */
13 | void jobQueue::insert(unsigned long int item) {
14 | data.push(item);
15 | }
16 |
17 |
18 | /* pop from the jobQueue.*/
19 | unsigned long int jobQueue::pop(bool &endNow) {
20 | unsigned long int res = 0;
21 | std::lock_guard guard(lock);
22 | if (!data.empty()) {
23 | res = data.front();
24 | data.pop();
25 |
26 | } else {
27 | endNow = false;
28 | }
29 | return res;
30 | }
31 |
32 | /* return true if jobQueue is empty */
33 | bool jobQueue::empty() const
34 | {
35 | bool res;
36 | std::lock_guard guard(lock);
37 | res = data.empty();
38 | return res;
39 | }
40 |
41 | void do_join(std::thread &t)
42 | {
43 | t.join();
44 | }
--------------------------------------------------------------------------------
/spatial_access/src/tmxParser.cpp:
--------------------------------------------------------------------------------
1 | // Logan Noel (github.com/lmnoel)
2 | //
3 | // ©2017-2019, Center for Spatial Data Science
4 |
5 | #include "include/tmxParser.h"
6 |
7 | // write data
8 |
9 | template<>
10 | void tmxWriter::writeIdTypeEnum()
11 | {
12 | sharedSerializer.writeNumericType(UnsignedLongType);
13 | }
14 |
15 | template<>
16 | void tmxWriter::writeIdTypeEnum()
17 | {
18 | sharedSerializer.writeNumericType(StringType);
19 | }
20 |
21 | template<>
22 | void tmxWriter