├── .gitignore
├── docs
├── images
│ ├── Banner.png
│ ├── EditGTFS.png
│ ├── DeprecatedTools.png
│ ├── GTFSRTConnector.png
│ ├── NetworkAnalysis.png
│ ├── TransitSolution.png
│ ├── VisualizeGTFS.png
│ ├── StoryMapThumbnail.png
│ └── CalculateFrequency.png
├── index.html
├── EditGTFS.html
├── resources.html
├── DeprecatedTools.html
├── GTFSRTConnector.html
├── NetworkAnalysis.html
├── TransitSolution.html
├── VisualizeGTFS.html
├── CalculateFrequency.html
└── style.css
├── transit-network-analysis-tools
├── .gitignore
├── unittests
│ ├── TestInput
│ │ ├── CAM_PostProcessing
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080200.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080000.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080100.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080200.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080300.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080000.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080100.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080200.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080300.csv
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080000.csv
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080100.csv
│ │ │ ├── ODLines_O_3_4_D_3_4_T_19000103_080200.csv
│ │ │ ├── ODLines_O_3_4_D_3_4_T_19000103_080000.csv
│ │ │ └── ODLines_O_3_4_D_3_4_T_19000103_080100.csv
│ │ ├── CTTS_PostProcessing
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080000.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080100.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080200.csv
│ │ │ ├── ODLines_O_1_2_D_1_2_T_19000103_080300.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080000.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080100.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080200.csv
│ │ │ ├── ODLines_O_1_2_D_3_4_T_19000103_080300.csv
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080000.csv
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080100.csv
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080200.csv
│ │ │ ├── ODLines_O_3_4_D_1_2_T_19000103_080300.csv
│ │ │ ├── ODLines_O_3_4_D_3_4_T_19000103_080000.csv
│ │ │ ├── ODLines_O_3_4_D_3_4_T_19000103_080100.csv
│ │ │ ├── ODLines_O_3_4_D_3_4_T_19000103_080200.csv
│ │ │ └── ODLines_O_3_4_D_3_4_T_19000103_080300.csv
│ │ ├── TransitToyNetwork.gdb.zip
│ │ ├── TestOrigins_Subset.json
│ │ ├── TestDestinations_Subset.json
│ │ ├── TestOrigins.json
│ │ ├── TestDestinations.json
│ │ ├── TimeLapsePolys_1Fac_1Cutoff.json
│ │ └── TimeLapsePolys_2Fac_2Cutoffs.json
│ ├── portal_credentials.py
│ ├── unittests_README.txt
│ ├── input_data_helper.py
│ ├── test_PrepareTimeLapsePolygons_tool.py
│ ├── test_CreateTimeLapsePolygonsInParallel.py
│ ├── test_ReplaceRouteGeometryWithLVEShapes_tool.py
│ ├── test_CalculateTravelTimeStatisticsOD_tool.py
│ ├── test_CreatePercentAccessPolygons_tool.py
│ ├── test_parallel_sa.py
│ ├── test_CalculateODMatrixInParallel.py
│ ├── test_parallel_calculate_locations.py
│ └── test_CalculateAccessibilityMatrix_tool.py
├── Symbology_Cells.lyr
├── images
│ ├── Screenshot_CopyTraversed_Dialog.png
│ ├── Screenshot_ReplaceGeometry_Dialog.png
│ ├── Screenshot_PrepareTimeLapsePolygons_Dialog.png
│ ├── Screenshot_CalculateAccessibilityMatrix_Dialog.png
│ ├── Screenshot_CreatePercentAccessPolygons_Dialog.png
│ ├── Screenshot_CalculateTravelTimeStatisticsOD_Dialog.png
│ └── Screenshot_CalculateTravelTimeStatistics_Dialog.png
├── Transit Network Analysis Tools.CalculateTravelTimeStatistics.pyt.xml
├── Transit Network Analysis Tools.CalculateAccessibilityMatrixPro.pyt.xml
├── Transit Network Analysis Tools.pyt.xml
├── CalculateAccessibilityMatrix_OD_config.py
├── CalculateTravelTimeStatistics_OD_config.py
├── CreateTimeLapsePolygons_SA_config.py
├── README.md
├── Transit Network Analysis Tools.CopyTraversedSourceFeaturesWithTransit.pyt.xml
├── Transit Network Analysis Tools.CreatePercentAccessPolygons.pyt.xml
├── ReplaceRouteShapes.py
├── TNAT_ToolValidator.py
├── Transit Network Analysis Tools.CalculateTravelTimeStatisticsOD.pyt.xml
├── Transit Network Analysis Tools.CalculateAccessibilityMatrix.pyt.xml
├── CreatePercentAccessPolygon.py
└── Transit Network Analysis Tools.PrepareTimeLapsePolygons.pyt.xml
├── CONTRIBUTING.md
├── send-GTFS-rt-to-GeoEvent
├── README.md
└── GTFS-rt-to-GeoEvent.py
├── README.md
└── License.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
--------------------------------------------------------------------------------
/docs/images/Banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/Banner.png
--------------------------------------------------------------------------------
/docs/images/EditGTFS.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/EditGTFS.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 |
3 | /unittests/TestInput/*.gdb
4 | /unittests/TestOutput
5 |
--------------------------------------------------------------------------------
/docs/images/DeprecatedTools.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/DeprecatedTools.png
--------------------------------------------------------------------------------
/docs/images/GTFSRTConnector.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/GTFSRTConnector.png
--------------------------------------------------------------------------------
/docs/images/NetworkAnalysis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/NetworkAnalysis.png
--------------------------------------------------------------------------------
/docs/images/TransitSolution.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/TransitSolution.png
--------------------------------------------------------------------------------
/docs/images/VisualizeGTFS.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/VisualizeGTFS.png
--------------------------------------------------------------------------------
/docs/images/StoryMapThumbnail.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/StoryMapThumbnail.png
--------------------------------------------------------------------------------
/docs/images/CalculateFrequency.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/docs/images/CalculateFrequency.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 4,2
3 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing).
2 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Symbology_Cells.lyr:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/Symbology_Cells.lyr
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,1
3 | 1,2
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,1
3 | 1,2
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,1
3 | 1,2
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080300.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,1
3 | 1,2
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,3
3 | 1,4
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,3
3 | 1,4
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,3
3 | 1,4
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080300.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 1,3
3 | 1,4
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 3,1
3 | 3,2
4 | 4,2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 3,1
3 | 3,2
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 4,3
3 | 4,4
4 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 3,3
3 | 3,4
4 | 4,4
5 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CAM_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID
2 | 3,3
3 | 3,4
4 | 4,4
5 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,1,9.8
3 | 1,2,1.5
4 | 2,1,1
5 | 2,2,2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,1,9.9
3 | 1,2,0.5
4 | 2,1,1
5 | 2,2,2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,1,10.1
3 | 1,2,1.5
4 | 2,1,1
5 | 2,2,2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_1_2_T_19000103_080300.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,1,10.2
3 | 1,2,0.5
4 | 2,1,1
5 | 2,2,2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,3,45
3 | 1,4,30
4 | 2,3,15
5 | 2,4,20
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,3,46
3 | 1,4,31
4 | 2,3,16
5 | 2,4,21
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,3,47
3 | 1,4,32
4 | 2,3,17
5 | 2,4,22
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_1_2_D_3_4_T_19000103_080300.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 1,3,48
3 | 1,4,33
4 | 2,3,18
5 | 2,4,23
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,1,0
3 | 3,2,20
4 | 4,1,20
5 | 4,2,20
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,1,0
3 | 3,2,22
4 | 4,1,22
5 | 4,2,22
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,1,0
3 | 3,2,24
4 | 4,1,24
5 | 4,2,24
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_1_2_T_19000103_080300.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,1,0
3 | 3,2,18
4 | 4,1,18
5 | 4,2,18
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080000.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,3,100
3 | 3,4,16.5
4 | 4,3,12.3
5 | 4,4,20.2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080100.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,3,150
3 | 3,4,15.5
4 | 4,3,12.6
5 | 4,4,20.4
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080200.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,3,150
3 | 3,4,17.0
4 | 4,3,12.4
5 | 4,4,20.2
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/CTTS_PostProcessing/ODLines_O_3_4_D_3_4_T_19000103_080300.csv:
--------------------------------------------------------------------------------
1 | OriginOID,DestinationOID,Total_Time
2 | 3,3,200
3 | 3,4,17.5
4 | 4,3,12.8
5 | 4,4,20.6
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_CopyTraversed_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_CopyTraversed_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_ReplaceGeometry_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_ReplaceGeometry_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TransitToyNetwork.gdb.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/unittests/TestInput/TransitToyNetwork.gdb.zip
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_PrepareTimeLapsePolygons_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_PrepareTimeLapsePolygons_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_CalculateAccessibilityMatrix_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_CalculateAccessibilityMatrix_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_CreatePercentAccessPolygons_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_CreatePercentAccessPolygons_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_CalculateTravelTimeStatisticsOD_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_CalculateTravelTimeStatisticsOD_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/images/Screenshot_CalculateTravelTimeStatistics_Dialog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Esri/public-transit-tools/HEAD/transit-network-analysis-tools/images/Screenshot_CalculateTravelTimeStatistics_Dialog.png
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/portal_credentials.py:
--------------------------------------------------------------------------------
1 | """Portal credentials for unit tests."""
2 | # Do not check in your actual credentials into the repo.
3 | PORTAL_URL = ""
4 | PORTAL_USERNAME = ""
5 | PORTAL_PASSWORD = ""
6 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.CalculateTravelTimeStatistics.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20220429132629001.0TRUE
3 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.CalculateAccessibilityMatrixPro.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20210508132557001.0TRUE
3 |
--------------------------------------------------------------------------------
/docs/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/EditGTFS.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/resources.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/DeprecatedTools.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/GTFSRTConnector.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/NetworkAnalysis.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/TransitSolution.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/VisualizeGTFS.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/docs/CalculateFrequency.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | The Public Transit Analysis in ArcGIS website has been removed, but don't worry, it's just the website. All the great tools formerly showcased here still exist, and Esri continues to improve its offerings for public transit analysis.
8 | You will be redirected soon.
9 |
10 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/unittests_README.txt:
--------------------------------------------------------------------------------
1 | The unit tests use data and from the Network Analyst\Tutorial\PublicTransit\CincinnatiTransitNetwork.gdb geodatabase from the ArcGIS Pro Network Analyst tutorial data. Download the data from https://links.esri.com/NetworkAnalyst/TutorialData/Pro. Extract the zip file and copy CincinnatiTransitNetwork.gdb to the TestInput folder.
2 |
3 | The AnalysisHelpers test also requires valid values for a test portal with network analysis services in the portal_credentials.py file.
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20190517161623001.0TRUE20220616144602c:\program files\arcgis\pro\Resources\Help\gpTransit Network Analysis ToolsArcToolbox Toolbox
3 |
--------------------------------------------------------------------------------
/docs/style.css:
--------------------------------------------------------------------------------
1 | .banner-title {
2 | background:rgba(34, 36, 38, 0.6);
3 | padding-left: .25em;
4 | padding-right: .25em;
5 | padding-top: .1em;
6 | padding-bottom: .1em;
7 | border-radius: 5px;
8 | }
9 |
10 | .top-nav-link.is-active {
11 | border-bottom-color: #497671;
12 | }
13 | .top-nav-link:hover, .top-nav-link:focus {
14 | color: #497671;
15 | border-bottom-color: #497671;
16 | }
17 |
18 | .sub-nav {
19 | background: url(./images/Banner.png) no-repeat center;
20 | background-size: cover;
21 | }
22 |
23 | a {
24 | color: #68b9a4;
25 | }
26 | a:hover {
27 | color: #68b9a4;
28 | }
29 |
30 | .btn {
31 | background-color: #497671;
32 | border: none;
33 | }
34 | .btn:hover {
35 | background-color: #68b9a4;
36 | }
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TestOrigins_Subset.json:
--------------------------------------------------------------------------------
1 | {
2 | "displayFieldName" : "",
3 | "fieldAliases" : {
4 | "OBJECTID" : "OBJECTID"
5 | },
6 | "geometryType" : "esriGeometryPoint",
7 | "spatialReference" : {
8 | "wkid" : 102723,
9 | "latestWkid" : 3735
10 | },
11 | "fields" : [
12 | {
13 | "name" : "OBJECTID",
14 | "type" : "esriFieldTypeOID",
15 | "alias" : "OBJECTID"
16 | }
17 | ],
18 | "features" : [{"attributes":{"OBJECTID":1},"geometry":{"x":1395262.0030903816,"y":407498.79280781746}},{"attributes":{"OBJECTID":2},"geometry":{"x":1397936.0941988826,"y":410550.43335805833}},{"attributes":{"OBJECTID":3},"geometry":{"x":1401255.4231901318,"y":396242.41735473275}},{"attributes":{"OBJECTID":4},"geometry":{"x":1374231.542526722,"y":415367.57825131714}}]
19 | }
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TestDestinations_Subset.json:
--------------------------------------------------------------------------------
1 | {
2 | "displayFieldName" : "",
3 | "fieldAliases" : {
4 | "OBJECTID" : "OBJECTID",
5 | "NumJobs" : "NumJobs"
6 | },
7 | "geometryType" : "esriGeometryPoint",
8 | "spatialReference" : {
9 | "wkid" : 102723,
10 | "latestWkid" : 3735
11 | },
12 | "fields" : [
13 | {
14 | "name" : "OBJECTID",
15 | "type" : "esriFieldTypeOID",
16 | "alias" : "OBJECTID"
17 | },
18 | {
19 | "name" : "NumJobs",
20 | "type" : "esriFieldTypeInteger",
21 | "alias" : "NumJobs"
22 | }
23 | ],
24 | "features" : [{"attributes":{"OBJECTID":1,"NumJobs":0},"geometry":{"x":1392266.3722706437,"y":409043.88814280927}},{"attributes":{"OBJECTID":2,"NumJobs":5},"geometry":{"x":1400924.1777896434,"y":416862.27965822816}},{"attributes":{"OBJECTID":3,"NumJobs":10},"geometry":{"x":1405951.2474598885,"y":438841.80459672213}},{"attributes":{"OBJECTID":4,"NumJobs":20},"geometry":{"x":1386737.8521597236,"y":429781.69804506004}}]
25 | }
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TestOrigins.json:
--------------------------------------------------------------------------------
1 | {
2 | "displayFieldName" : "",
3 | "fieldAliases" : {
4 | "OBJECTID" : "OBJECTID"
5 | },
6 | "geometryType" : "esriGeometryPoint",
7 | "spatialReference" : {
8 | "wkid" : 102723,
9 | "latestWkid" : 3735
10 | },
11 | "fields" : [
12 | {
13 | "name" : "OBJECTID",
14 | "type" : "esriFieldTypeOID",
15 | "alias" : "OBJECTID"
16 | }
17 | ],
18 | "features" : [{"attributes":{"OBJECTID":1},"geometry":{"x":1395262.0030903816,"y":407498.79280781746}},{"attributes":{"OBJECTID":2},"geometry":{"x":1397936.0941988826,"y":410550.43335805833}},{"attributes":{"OBJECTID":3},"geometry":{"x":1401255.4231901318,"y":396242.41735473275}},{"attributes":{"OBJECTID":4},"geometry":{"x":1374231.542526722,"y":415367.57825131714}},{"attributes":{"OBJECTID":5},"geometry":{"x":1411576.1446746439,"y":424717.82267414033}},{"attributes":{"OBJECTID":6},"geometry":{"x":1404564.816833809,"y":439105.67381964624}},{"attributes":{"OBJECTID":7},"geometry":{"x":1381480.7829494774,"y":414422.54995764792}},{"attributes":{"OBJECTID":8},"geometry":{"x":1406002.1299040616,"y":405316.15937781334}},{"attributes":{"OBJECTID":9},"geometry":{"x":1417429.2448450476,"y":401178.73392564058}},{"attributes":{"OBJECTID":10},"geometry":{"x":1431558.7090401351,"y":401423.24426339567}},{"attributes":{"OBJECTID":11},"geometry":{"x":1401714.5098703802,"y":419722.75884538889}},{"attributes":{"OBJECTID":12},"geometry":{"x":1393346.0856623948,"y":416642.43298506737}},{"attributes":{"OBJECTID":13},"geometry":{"x":1384517.1403938085,"y":420031.40324123204}}]
19 | }
--------------------------------------------------------------------------------
/send-GTFS-rt-to-GeoEvent/README.md:
--------------------------------------------------------------------------------
1 | # send-GTFS-rt-to-GeoEvent
2 |
3 | This simple script provides a fast and easy way to consume GTFS-rt feeds and send the data to the ArcGIS GeoEvent Extension for Server via TCP.
4 |
5 | ## Features
6 | * View GTFS-rt realtime transit data in ArcGIS using ArcGIS GeoEvent
7 |
8 | ## Instructions
9 |
10 | Edit the hostname and port in the script to match your environment, i.e. the hostname would be the server on which GeoEvent is running and the port would be the port you configured for the TCP input.
11 |
12 |
13 | ## Requirements
14 |
15 | - ArcGIS for Server with the GeoEvent Extension
16 | - GeoEvent TCP input
17 | - Access to a GTFS-rt feed (e.g. http://www.cttransit.com/about/developers/gtfsdata/)
18 | - Python runtime
19 | - Python GTFS bindings
20 |
21 | ## Resources
22 |
23 | * [GTFS specification](https://developers.google.com/transit/gtfs/reference)
24 |
25 | ## Issues
26 |
27 | Find a bug or want to request a new feature? Please let us know by submitting an issue, or post a question in the [Esri Community forums](https://community.esri.com/t5/public-transit-questions/bd-p/public-transit-questions).
28 |
29 | ## Contributing
30 |
31 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing).
32 |
33 | ## Licensing
34 | Copyright 2015 Esri
35 |
36 | Licensed under the Apache License, Version 2.0 (the "License");
37 | you may not use this file except in compliance with the License.
38 | You may obtain a copy of the License at
39 |
40 | http://www.apache.org/licenses/LICENSE-2.0
41 |
42 | Unless required by applicable law or agreed to in writing, software
43 | distributed under the License is distributed on an "AS IS" BASIS,
44 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
45 | See the License for the specific language governing permissions and
46 | limitations under the License.
47 |
48 | A copy of the license is available in the repository's [license.txt](../License.txt?raw=true) file.
49 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TestDestinations.json:
--------------------------------------------------------------------------------
1 | {
2 | "displayFieldName" : "",
3 | "fieldAliases" : {
4 | "OBJECTID" : "OBJECTID",
5 | "NumJobs" : "NumJobs"
6 | },
7 | "geometryType" : "esriGeometryPoint",
8 | "spatialReference" : {
9 | "wkid" : 102723,
10 | "latestWkid" : 3735
11 | },
12 | "fields" : [
13 | {
14 | "name" : "OBJECTID",
15 | "type" : "esriFieldTypeOID",
16 | "alias" : "OBJECTID"
17 | },
18 | {
19 | "name" : "NumJobs",
20 | "type" : "esriFieldTypeInteger",
21 | "alias" : "NumJobs"
22 | }
23 | ],
24 | "features" : [{"attributes":{"OBJECTID":1,"NumJobs":0},"geometry":{"x":1392266.3722706437,"y":409043.88814280927}},{"attributes":{"OBJECTID":2,"NumJobs":5},"geometry":{"x":1400924.1777896434,"y":416862.27965822816}},{"attributes":{"OBJECTID":3,"NumJobs":10},"geometry":{"x":1405951.2474598885,"y":438841.80459672213}},{"attributes":{"OBJECTID":4,"NumJobs":20},"geometry":{"x":1386737.8521597236,"y":429781.69804506004}},{"attributes":{"OBJECTID":5,"NumJobs":30},"geometry":{"x":1402253.9162718803,"y":422199.89201447368}},{"attributes":{"OBJECTID":6,"NumJobs":10},"geometry":{"x":1417444.2923871428,"y":430035.61748473346}},{"attributes":{"OBJECTID":7,"NumJobs":11},"geometry":{"x":1407699.467060551,"y":402556.55958205462}},{"attributes":{"OBJECTID":8,"NumJobs":10},"geometry":{"x":1396069.5506878942,"y":397524.46465939283}},{"attributes":{"OBJECTID":9,"NumJobs":10},"geometry":{"x":1372020.3186027259,"y":393250.23797164857}},{"attributes":{"OBJECTID":10,"NumJobs":10},"geometry":{"x":1372443.5926905572,"y":406230.47808723152}},{"attributes":{"OBJECTID":11,"NumJobs":56},"geometry":{"x":1384517.1403938085,"y":420031.40324123204}},{"attributes":{"OBJECTID":12,"NumJobs":10},"geometry":{"x":1392047.4895063043,"y":412782.4354557246}},{"attributes":{"OBJECTID":13,"NumJobs":10},"geometry":{"x":1396923.6090191305,"y":408628.41882939637}},{"attributes":{"OBJECTID":14,"NumJobs":10},"geometry":{"x":1397825.2446830571,"y":397407.52755731344}},{"attributes":{"OBJECTID":15,"NumJobs":10},"geometry":{"x":1400484.5142988861,"y":399759.72528073192}}]
25 | }
--------------------------------------------------------------------------------
/send-GTFS-rt-to-GeoEvent/GTFS-rt-to-GeoEvent.py:
--------------------------------------------------------------------------------
1 | # Copyright 2015 Esri
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | #
5 | # you may not use this file except in compliance with the License.
6 | #
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | #
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | #
15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | #
17 | # See the License for the specific language governing permissions and
18 | #
19 | # limitations under the License.
20 |
21 | def main():
22 |
23 | # need the GTFS Python bindings
24 | from google.transit import gtfs_realtime_pb2
25 | import urllib
26 | import json
27 | import socket
28 | import time
29 |
30 | # create socket connection to hostname/port on which a TCP GeoEvent input is running
31 | tcpSocket = socket.create_connection(("", 5565))
32 |
33 | # polling model - run, wait 5 seconds, run, wait, run, wait, etc
34 | while True:
35 |
36 | feed = gtfs_realtime_pb2.FeedMessage()
37 |
38 | # this particular feed is from CT Transit (http://www.cttransit.com/about/developers/gtfsdata/)
39 | response = urllib.urlopen('http://65.213.12.244/realtimefeed/vehicle/vehiclepositions.pb')
40 |
41 | # read the Protocal Buffers (.pb) file
42 | feed.ParseFromString(response.read())
43 |
44 | # loop through feed entities
45 | for entity in feed.entity:
46 |
47 | # check for a vehicle in feed entity
48 | if entity.HasField('vehicle'):
49 |
50 | # build a simple id,lon,lat message to send to GeoEvent.
51 | msg = str(entity.vehicle.vehicle.label) + "," + \
52 | str(entity.vehicle.position.longitude) + "," + \
53 | str(entity.vehicle.position.latitude) + "\n"
54 |
55 | # send message
56 | tcpSocket.send(msg)
57 |
58 | time.sleep(5)
59 |
60 | if __name__ == '__main__':
61 | main()
62 |
63 |
64 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/CalculateAccessibilityMatrix_OD_config.py:
--------------------------------------------------------------------------------
1 | """Defines OD Cost matrix solver object properties that are not specified
2 | in the tool dialog.
3 |
4 | A list of OD cost matrix solver properties is documented here:
5 | https://pro.arcgis.com/en/pro-app/latest/arcpy/network-analyst/odcostmatrix.htm
6 |
7 | You can include any of them in the dictionary in this file, and the tool will
8 | use them. However, travelMode, timeUnits, defaultImpedanceCutoff,
9 | and timeOfDay will be ignored because they are specified in the
10 | tool dialog.
11 |
12 | Copyright 2021 Esri
13 | Licensed under the Apache License, Version 2.0 (the "License");
14 | you may not use this file except in compliance with the License.
15 | You may obtain a copy of the License at
16 | http://www.apache.org/licenses/LICENSE-2.0
17 | Unless required by applicable law or agreed to in writing, software
18 | distributed under the License is distributed on an "AS IS" BASIS,
19 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
20 | See the License for the specific language governing permissions and
21 | limitations under the License.
22 | """
23 | import arcpy
24 |
25 | # These properties are set by the tool dialog or can be specified as command line arguments.
26 | # Do not set the values for these properties in the OD_PROPS dictionary below because they will be ignored.
27 | OD_PROPS_SET_BY_TOOL = ["travelMode", "timeUnits", "defaultImpedanceCutoff", "timeOfDay"]
28 |
29 | # You can customize these properties to your needs, and the parallel OD cost matrix calculations will use them.
30 | OD_PROPS = {
31 | "accumulateAttributeNames": [],
32 | "allowSaveLayerFile": False,
33 | "defaultDestinationCount": None, # None means to find all destinations within the cutoff.
34 | "distanceUnits": arcpy.nax.DistanceUnits.Meters,
35 | "ignoreInvalidLocations": True,
36 | "lineShapeType": arcpy.nax.LineShapeType.NoLine,
37 | "overrides": "",
38 | # "searchQuery": [], # This parameter is very network specific. Only uncomment if you are using it.
39 | "searchTolerance": 5000,
40 | "searchToleranceUnits": arcpy.nax.DistanceUnits.Meters,
41 | "timeZone": arcpy.nax.TimeZoneUsage.LocalTimeAtLocations,
42 | }
43 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/CalculateTravelTimeStatistics_OD_config.py:
--------------------------------------------------------------------------------
1 | """Defines OD Cost matrix solver object properties that are not specified
2 | in the tool dialog.
3 |
4 | A list of OD cost matrix solver properties is documented here:
5 | https://pro.arcgis.com/en/pro-app/latest/arcpy/network-analyst/odcostmatrix.htm
6 |
7 | You can include any of them in the dictionary in this file, and the tool will
8 | use them. However, travelMode and timeOfDay will be ignored because they are
9 | specified in the tool dialog. Specifying a value for defaultImpedanceCutoff
10 | and defaultDestinationCount may yield incorrect statistics results.
11 |
12 | Copyright 2022 Esri
13 | Licensed under the Apache License, Version 2.0 (the "License");
14 | you may not use this file except in compliance with the License.
15 | You may obtain a copy of the License at
16 | http://www.apache.org/licenses/LICENSE-2.0
17 | Unless required by applicable law or agreed to in writing, software
18 | distributed under the License is distributed on an "AS IS" BASIS,
19 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
20 | See the License for the specific language governing permissions and
21 | limitations under the License.
22 | """
23 | import arcpy
24 |
25 | # These properties are set by the tool dialog or can be specified as command line arguments.
26 | # Do not set the values for these properties in the OD_PROPS dictionary below because they will be ignored.
27 | OD_PROPS_SET_BY_TOOL = ["travelMode", "timeOfDay"]
28 |
29 | # You can customize these properties to your needs, and the parallel OD cost matrix calculations will use them.
30 | OD_PROPS = {
31 | "accumulateAttributeNames": [],
32 | "allowSaveLayerFile": False,
33 | "defaultDestinationCount": None, # None means to find all destinations within the cutoff.
34 | "defaultImpedanceCutoff": None, # None means no impedance cutoff.
35 | "distanceUnits": arcpy.nax.DistanceUnits.Meters,
36 | "ignoreInvalidLocations": True,
37 | "lineShapeType": arcpy.nax.LineShapeType.NoLine,
38 | "overrides": "",
39 | # "searchQuery": [], # This parameter is very network specific. Only uncomment if you are using it.
40 | "searchTolerance": 5000,
41 | "searchToleranceUnits": arcpy.nax.DistanceUnits.Meters,
42 | "timeUnits": arcpy.nax.TimeUnits.Minutes,
43 | "timeZone": arcpy.nax.TimeZoneUsage.LocalTimeAtLocations,
44 | }
45 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/CreateTimeLapsePolygons_SA_config.py:
--------------------------------------------------------------------------------
1 | """Defines Service Area solver object properties that are not specified in the tool dialog.
2 |
3 | A list of Service Area solver properties is documented here:
4 | https://pro.arcgis.com/en/pro-app/latest/arcpy/network-analyst/servicearea.htm
5 |
6 | You can include any of them in the dictionary in this file, and the tool will
7 | use them. However, values for any of the properties in SA_PROPS_SET_BY_TOOL
8 | will be ignored because they are specified in the tool dialog.
9 |
10 | Copyright 2021 Esri
11 | Licensed under the Apache License, Version 2.0 (the "License");
12 | you may not use this file except in compliance with the License.
13 | You may obtain a copy of the License at
14 | http://www.apache.org/licenses/LICENSE-2.0
15 | Unless required by applicable law or agreed to in writing, software
16 | distributed under the License is distributed on an "AS IS" BASIS,
17 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 | See the License for the specific language governing permissions and
19 | limitations under the License.
20 | """
21 | import arcpy
22 |
23 | # These properties are set by the tool dialog or can be specified as command line arguments. Do not set the values for
24 | # these properties in the SA_PROPS dictionary below because they will be ignored.
25 | SA_PROPS_SET_BY_TOOL = [
26 | "defaultImpedanceCutoffs",
27 | "geometryAtCutoff",
28 | "geometryAtOverlap",
29 | "timeUnits",
30 | "timeOfDay",
31 | "travelDirection",
32 | "travelMode"
33 | ]
34 |
35 | # You can customize these properties to your needs, and the parallel OD cost matrix calculations will use them.
36 | SA_PROPS = {
37 | "accumulateAttributeNames": [],
38 | "allowSaveLayerFile": False,
39 | "distanceUnits": arcpy.nax.DistanceUnits.Kilometers,
40 | "excludeSourcesFromPolygonGeneration": ["LineVariantElements", "StopConnectors"],
41 | "ignoreInvalidLocations": True,
42 | "outputType": arcpy.nax.ServiceAreaOutputType.Polygons, # Tool won't work if you change this.
43 | "overrides": "",
44 | "polygonBufferDistance": 100,
45 | "polygonBufferDistanceUnits": arcpy.nax.DistanceUnits.Meters,
46 | "polygonDetail": arcpy.nax.ServiceAreaPolygonDetail.High,
47 | # "searchQuery": [], # This parameter is very network specific. Only uncomment if you are using it.
48 | "searchTolerance": 500,
49 | "searchToleranceUnits": arcpy.nax.DistanceUnits.Meters,
50 | "timeZone": arcpy.nax.TimeZoneUsage.LocalTimeAtLocations
51 | }
52 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/input_data_helper.py:
--------------------------------------------------------------------------------
1 | """Helper for unit tests to create required inputs.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | import os
15 | import zipfile
16 | import arcpy
17 |
18 |
19 | def make_feature_classes_from_json(input_data_folder):
20 | """Create feature classes needed for test inputs."""
21 | cinci_gdb = os.path.join(input_data_folder, "CincinnatiTransitNetwork.gdb")
22 | if not os.path.exists(cinci_gdb):
23 | raise RuntimeError(f"Required test input gdb {cinci_gdb} does not exist.")
24 | # Create point feature classes for use in testing
25 | in_data_names = ["TestOrigins", "TestOrigins_Subset", "TestDestinations", "TestDestinations_Subset",
26 | "TimeLapsePolys_1Fac_1Cutoff", "TimeLapsePolys_2Fac_2Cutoffs"]
27 | for in_data_name in in_data_names:
28 | out_fc = os.path.join(cinci_gdb, in_data_name)
29 | if not arcpy.Exists(out_fc):
30 | in_json = os.path.join(input_data_folder, in_data_name + ".json")
31 | arcpy.conversion.JSONToFeatures(in_json, out_fc)
32 | print(f"Created test dataset {out_fc}.")
33 | # Create polygon feature classes for use in testing. The actual polygons don't matter very much, so just create
34 | # buffers around the point feature classes.
35 | for in_data_name in ["TestOrigins", "TestDestinations"]:
36 | pg_fc = os.path.join(cinci_gdb, in_data_name + "_Polygons")
37 | if not arcpy.Exists(pg_fc):
38 | pt_fc = os.path.join(cinci_gdb, in_data_name)
39 | arcpy.analysis.Buffer(pt_fc, pg_fc, "100 Meters")
40 | print(f"Created test dataset {pg_fc}.")
41 |
42 |
43 | def extract_toy_network(input_data_folder):
44 | """Extract the transit toy network from zip file."""
45 | toy_gdb = os.path.join(input_data_folder, "TransitToyNetwork.gdb")
46 | if os.path.exists(toy_gdb):
47 | # Data is already present and extracted
48 | return
49 | toy_zip = toy_gdb + ".zip"
50 | if not os.path.exists(toy_zip):
51 | raise RuntimeError(f"Required test input zip file {toy_zip} does not exist.")
52 | if not zipfile.is_zipfile(toy_zip):
53 | raise RuntimeError(f"Required test input zip file {toy_zip} is not a valid zip file.")
54 | with zipfile.ZipFile(toy_zip) as zf:
55 | zf.extractall(input_data_folder)
56 | if not os.path.exists(toy_gdb):
57 | raise RuntimeError(f"Required test input gdb file {toy_gdb} does not exist after unzipping.")
58 | print(f"Extracted {toy_gdb} from {toy_zip}.")
59 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/README.md:
--------------------------------------------------------------------------------
1 | # transit-network-analysis-tools
2 |
3 | The *Transit Network Analysis Tools* are a set of tools for performing transit-specific network analysis. They are intended to supplement the ArcGIS Network Analyst extension by accounting for the time-dependent nature of public transit and to assist with analyses commonly needed by those working with public transit. For example, the tools provided here can help you perform accessibility calculations and show how the area reachable by transit changes throughout the day.
4 |
5 | The *Transit Network Analysis Tools* must be used with a transit-enabled network dataset created using [these tools available in ArcGIS Pro](https://pro.arcgis.com/en/pro-app/help/analysis/networks/network-analysis-with-public-transit-data.htm).
6 |
7 | If you are looking for the older version of this toolbox that worked with ArcMap, it is located [here](https://arcg.is/1u8WKS) and is deprecated and no longer supported. This new version is faster and better, so you are encouraged to upgrade to ArcGIS Pro.
8 |
9 | ## Features
10 | * Visualize transit service areas (also called isochrones or transitsheds)
11 | * Study accessibility of destinations by transit.
12 | * Calculate travel time statistics
13 | * ArcGIS toolbox - No coding is required to use this tool. Use the tools like any other geoprocessing tools.
14 |
15 | ## Instructions
16 |
17 | 1. To simply use the tool, download it here or from [ArcGIS.com](http://www.arcgis.com/home/item.html?id=23cc1b720eb0418db4d0e61955b8fe43) and follow the [User's Guide](https://github.com/Esri/public-transit-tools/blob/master/transit-network-analysis-tools/UsersGuide.md).
18 | 2. If you want to play with the code, fork it and have fun.
19 |
20 | ## Requirements
21 |
22 | * ArcGIS Pro 2.8 or higher. A Desktop Basic license is sufficient.
23 | * Note: The tools have not been tested on versions of ArcGIS Pro prior to 2.8 and may not work properly. Upgrading to the latest version of ArcGIS Pro is always recommended.
24 | * Network Analyst extension.
25 | * A transit-enabled network dataset created using [these tools available in ArcGIS Pro](https://pro.arcgis.com/en/pro-app/help/analysis/networks/network-analysis-with-public-transit-data.htm).
26 |
27 | ## Resources
28 |
29 | * [User's Guide](https://github.com/Esri/public-transit-tools/blob/master/transit-network-analysis-tools/UsersGuide.md)
30 |
31 | ## Issues
32 |
33 | Find a bug or want to request a new feature? Please let us know by submitting an issue, or post a question in the [Esri Community forums](https://community.esri.com/t5/public-transit-questions/bd-p/public-transit-questions).
34 |
35 | ## Contributing
36 |
37 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing).
38 |
39 | ## Licensing
40 | Copyright 2024 Esri
41 |
42 | Licensed under the Apache License, Version 2.0 (the "License");
43 | you may not use this file except in compliance with the License.
44 | You may obtain a copy of the License at
45 |
46 | http://www.apache.org/licenses/LICENSE-2.0
47 |
48 | Unless required by applicable law or agreed to in writing, software
49 | distributed under the License is distributed on an "AS IS" BASIS,
50 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
51 | See the License for the specific language governing permissions and
52 | limitations under the License.
53 |
54 | A copy of the license is available in the repository's [license.txt](../License.txt?raw=true) file.
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.CopyTraversedSourceFeaturesWithTransit.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20220616144145001.0TRUE20220616152817001500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gp<DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The Route, Closest Facility, and Service Area layer for which to save the traversal result with added transit information. If the network analysis layer does not have a valid result, the layer will be solved to produce one. The input network analysis layer must have a time of day set and must use a travel mode whose impedance attribute uses the Public Transit evaluator. Layers using a service URL as their network data source are not supported.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The workspace where the output table and two feature classes will be saved.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The name of the feature class that will contain information about the traversed edge source features, including the added public transit information.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><P><SPAN>The name of the feature class that will contain information about the traversed junction source features, including system junctions and relevant points from the input network analysis layer.</SPAN></P></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The name of the table that will contain information about the traversed global turns and turn features that scale cost for the underlying edges.</SPAN></P></DIV></DIV>Copy Traversed Source Features With Transitcopy traversed source featuresnetwork analystgtfspublic transitArcToolbox Tool20220616
3 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_PrepareTimeLapsePolygons_tool.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the Prepare Time Lapse Polygons script tool.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, invalid-name
15 |
16 | import os
17 | import datetime
18 | import unittest
19 | import arcpy
20 | import input_data_helper
21 |
22 | CWD = os.path.dirname(os.path.abspath(__file__))
23 |
24 |
25 | class TestPrepareTimeLapsePolygonsTool(unittest.TestCase):
26 | """Test cases for the Prepare Time Lapse Polygons script tool."""
27 |
28 | @classmethod
29 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
30 | self.maxDiff = None
31 |
32 | tbx_path = os.path.join(os.path.dirname(CWD), "Transit Network Analysis Tools.pyt")
33 | arcpy.ImportToolbox(tbx_path)
34 |
35 | self.input_data_folder = os.path.join(CWD, "TestInput")
36 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
37 | in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
38 | self.facilities = os.path.join(in_gdb, "TestOrigins_Subset")
39 | self.num_facilities = int(arcpy.management.GetCount(self.facilities).getOutput(0))
40 | self.local_nd = os.path.join(in_gdb, "TransitNetwork", "TransitNetwork_ND")
41 |
42 | # Create a unique output directory and gdb for this test
43 | self.scratch_folder = os.path.join(
44 | CWD, "TestOutput",
45 | "Output_PTLP_Tool_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
46 | os.makedirs(self.scratch_folder)
47 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
48 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
49 |
50 | def test_tool(self):
51 | """Test the tool."""
52 | out_fc = os.path.join(self.output_gdb, "TimeLapsePolys")
53 | # Use a custom travel mode object
54 | tm = arcpy.nax.TravelMode(arcpy.nax.GetTravelModes(self.local_nd)["Public transit time"])
55 | attr_params = tm.attributeParameters
56 | attr_params[('PublicTransitTime', 'Exclude lines')] = "1"
57 | tm.attributeParameters = attr_params
58 | # Run the tool
59 | arcpy.TransitNetworkAnalysisTools.PrepareTimeLapsePolygons( # pylint: disable=no-member
60 | self.facilities,
61 | out_fc,
62 | self.local_nd,
63 | tm,
64 | [30, 45],
65 | "Minutes",
66 | "Wednesday",
67 | "08:00",
68 | "Wednesday",
69 | "08:02",
70 | 1,
71 | "Away From Facilities",
72 | "Rings",
73 | "Overlap",
74 | 4, # Parallel processes
75 | None, # Barriers
76 | True # Precalculate network locations
77 | )
78 | self.assertTrue(arcpy.Exists(out_fc))
79 | # 4 facilities, 2 cutoffs, 3 time slices = 24 total output polygons
80 | expected_num_polygons = 24
81 | self.assertEqual(expected_num_polygons, int(arcpy.management.GetCount(out_fc).getOutput(0)))
82 |
83 |
84 | if __name__ == '__main__':
85 | unittest.main()
86 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TimeLapsePolys_1Fac_1Cutoff.json:
--------------------------------------------------------------------------------
1 | {
2 | "displayFieldName" : "",
3 | "fieldAliases" : {
4 | "OBJECTID" : "OBJECTID",
5 | "Shape_Length" : "Shape_Length",
6 | "Shape_Area" : "Shape_Area",
7 | "Name" : "Name",
8 | "FromBreak" : "FromBreak",
9 | "ToBreak" : "ToBreak",
10 | "FacilityOID" : "FacilityOID",
11 | "FacilityID" : "FacilityID",
12 | "Name_1" : "Facility: Name",
13 | "TimeOfDay" : "Facility: TimeOfDay",
14 | "Breaks" : "Facility: Breaks",
15 | "AdditionalTime" : "Facility: AdditionalTime",
16 | "AdditionalDistance" : "Facility: AdditionalDistance"
17 | },
18 | "geometryType" : "esriGeometryPolygon",
19 | "spatialReference" : {
20 | "wkid" : 4326,
21 | "latestWkid" : 4326
22 | },
23 | "fields" : [
24 | {
25 | "name" : "OBJECTID",
26 | "type" : "esriFieldTypeOID",
27 | "alias" : "OBJECTID"
28 | },
29 | {
30 | "name" : "Shape_Length",
31 | "type" : "esriFieldTypeDouble",
32 | "alias" : "Shape_Length"
33 | },
34 | {
35 | "name" : "Shape_Area",
36 | "type" : "esriFieldTypeDouble",
37 | "alias" : "Shape_Area"
38 | },
39 | {
40 | "name" : "Name",
41 | "type" : "esriFieldTypeString",
42 | "alias" : "Name",
43 | "length" : 1024
44 | },
45 | {
46 | "name" : "FromBreak",
47 | "type" : "esriFieldTypeDouble",
48 | "alias" : "FromBreak"
49 | },
50 | {
51 | "name" : "ToBreak",
52 | "type" : "esriFieldTypeDouble",
53 | "alias" : "ToBreak"
54 | },
55 | {
56 | "name" : "FacilityOID",
57 | "type" : "esriFieldTypeInteger",
58 | "alias" : "FacilityOID"
59 | },
60 | {
61 | "name" : "FacilityID",
62 | "type" : "esriFieldTypeInteger",
63 | "alias" : "FacilityID"
64 | },
65 | {
66 | "name" : "Name_1",
67 | "type" : "esriFieldTypeString",
68 | "alias" : "Facility: Name",
69 | "length" : 500
70 | },
71 | {
72 | "name" : "TimeOfDay",
73 | "type" : "esriFieldTypeDate",
74 | "alias" : "Facility: TimeOfDay",
75 | "length" : 8
76 | },
77 | {
78 | "name" : "Breaks",
79 | "type" : "esriFieldTypeString",
80 | "alias" : "Facility: Breaks",
81 | "length" : 128
82 | },
83 | {
84 | "name" : "AdditionalTime",
85 | "type" : "esriFieldTypeDouble",
86 | "alias" : "Facility: AdditionalTime"
87 | },
88 | {
89 | "name" : "AdditionalDistance",
90 | "type" : "esriFieldTypeDouble",
91 | "alias" : "Facility: AdditionalDistance"
92 | }
93 | ],
94 | "features" : [{"attributes":{"OBJECTID":1,"Shape_Length":0.03085285399993154,"Shape_Area":5.9017839146145041e-05,"Name":"Location 1 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":1,"FacilityID":null,"Name_1":"Location 1","TimeOfDay":-2208790800000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0},"geometry":{"rings":[[[-84.525533020999944,39.112389088000043],[-84.517130007999981,39.112389088000043],[-84.517130007999981,39.105365674000041],[-84.525533020999944,39.105365674000041],[-84.525533020999944,39.112389088000043]]]}},{"attributes":{"OBJECTID":2,"Shape_Length":0.029975892695996646,"Shape_Area":5.5722469470037314e-05,"Name":"Location 1 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":1,"FacilityID":null,"Name_1":"Location 1","TimeOfDay":-2208790740000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0},"geometry":{"rings":[[[-84.520641714999954,39.114395778000073],[-84.512489537999954,39.114395778000073],[-84.512489537999954,39.107497782000053],[-84.520641714999954,39.107623200000035],[-84.520641714999954,39.114395778000073]]]}},{"attributes":{"OBJECTID":3,"Shape_Length":0.022200397360001518,"Shape_Area":3.0790888125629539e-05,"Name":"Location 1 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":1,"FacilityID":null,"Name_1":"Location 1","TimeOfDay":-2208790680000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0},"geometry":{"rings":[[[-84.519638369999939,39.10398607500008],[-84.519638369999939,39.109504471000037],[-84.513994554999954,39.109379053000055],[-84.513994554999954,39.10398607500008],[-84.519638369999939,39.10398607500008]]]}},{"attributes":{"OBJECTID":4,"Shape_Length":0.021722415999988698,"Shape_Area":1.9327299660369377e-05,"Name":"Location 1 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":1,"FacilityID":null,"Name_1":"Location 1","TimeOfDay":-2208790620000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0},"geometry":{"rings":[[[-84.518399238999962,39.116241932000037],[-84.518399238999962,39.107623200000035],[-84.520641714999954,39.107623200000035],[-84.520641714999954,39.116241932000037],[-84.518399238999962,39.116241932000037]]]}}]
95 | }
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.CreatePercentAccessPolygons.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20190517164353001.0TRUE20220503133128001500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gp<DIV STYLE="text-align:Left;"><DIV><P><SPAN>A polygon feature class created using the </SPAN><SPAN STYLE="font-style:italic;">Prepare Time Lapse Polygons</SPAN><SPAN> tool that you wish to summarize.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The main output feature class of the tool. This output is a raw raster-like polygon feature class showing the number and percentage of time each area covered by your time lapse polygons was reached, intended primarily for visualization. The individual polygons are dissolved so that all areas reached the same number of times for a unique combination of FacilityID, FromBreak, and ToBreak are combined into one multipart polygon. The output feature class must be in a geodatabase; it cannot be a shapefile.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>This tool rasterizes the input polygons, essentially turning the study area into little squares. Choose a size for these squares. The cell size refers to the width or length of the cell, not the area. Your cell size choice should relate to how pedestrians travel in the real world. You could base your cell size choice on the size of your city blocks or parcels or the distance a pedestrian can walk in less than a minute. Smaller cells are more accurate but take longer to process, and really tiny cells are probably not analytically meaningful.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><P><SPAN>For maximum efficiency, this tool performs calculations in parallel across multiple cores of your machine. This parameter designates the number of parallel processes that can safely be used. You should select a number less than or equal to the number of virtual cores or processors your computer has. Note that if you are only processing a single input facility/from break/to break combination, there is nothing to parallelize, so you will get no advantage from increasing the number of processes.</SPAN></P></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>This is an optional output you can choose to produce that further summarizes the output percent access polygons feature class. If you specify one or more percentage thresholds, this output contains polygons showing the area reached at least as often as your designated percentage thresholds. There will be a separate feature for each percentage threshold for each unique combination of FacilityID, FromBreak, and ToBreak in the input data.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>You can choose to summarize the tool's raw output for different percentage thresholds. For example, you can find out what area can be reached at least 75% of start times by setting 75 as one of your percentage thresholds. More explanation of tool outputs is given below.</SPAN></P></DIV></DIV>Create Percent Access PolygonsArcToolbox Tool20220503
3 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/TestInput/TimeLapsePolys_2Fac_2Cutoffs.json:
--------------------------------------------------------------------------------
1 | {
2 | "displayFieldName" : "",
3 | "fieldAliases" : {
4 | "OBJECTID" : "OBJECTID",
5 | "Name" : "Name",
6 | "FromBreak" : "FromBreak",
7 | "ToBreak" : "ToBreak",
8 | "FacilityOID" : "FacilityOID",
9 | "FacilityID" : "FacilityID",
10 | "Name_1" : "Facility: Name",
11 | "TimeOfDay" : "Facility: TimeOfDay",
12 | "Breaks" : "Facility: Breaks",
13 | "AdditionalTime" : "Facility: AdditionalTime",
14 | "AdditionalDistance" : "Facility: AdditionalDistance",
15 | "Shape_Length" : "Shape_Length",
16 | "Shape_Area" : "Shape_Area"
17 | },
18 | "geometryType" : "esriGeometryPolygon",
19 | "spatialReference" : {
20 | "wkid" : 4326,
21 | "latestWkid" : 4326
22 | },
23 | "fields" : [
24 | {
25 | "name" : "OBJECTID",
26 | "type" : "esriFieldTypeOID",
27 | "alias" : "OBJECTID"
28 | },
29 | {
30 | "name" : "Name",
31 | "type" : "esriFieldTypeString",
32 | "alias" : "Name",
33 | "length" : 1024
34 | },
35 | {
36 | "name" : "FromBreak",
37 | "type" : "esriFieldTypeDouble",
38 | "alias" : "FromBreak"
39 | },
40 | {
41 | "name" : "ToBreak",
42 | "type" : "esriFieldTypeDouble",
43 | "alias" : "ToBreak"
44 | },
45 | {
46 | "name" : "FacilityOID",
47 | "type" : "esriFieldTypeInteger",
48 | "alias" : "FacilityOID"
49 | },
50 | {
51 | "name" : "FacilityID",
52 | "type" : "esriFieldTypeInteger",
53 | "alias" : "FacilityID"
54 | },
55 | {
56 | "name" : "Name_1",
57 | "type" : "esriFieldTypeString",
58 | "alias" : "Facility: Name",
59 | "length" : 500
60 | },
61 | {
62 | "name" : "TimeOfDay",
63 | "type" : "esriFieldTypeDate",
64 | "alias" : "Facility: TimeOfDay",
65 | "length" : 8
66 | },
67 | {
68 | "name" : "Breaks",
69 | "type" : "esriFieldTypeString",
70 | "alias" : "Facility: Breaks",
71 | "length" : 128
72 | },
73 | {
74 | "name" : "AdditionalTime",
75 | "type" : "esriFieldTypeDouble",
76 | "alias" : "Facility: AdditionalTime"
77 | },
78 | {
79 | "name" : "AdditionalDistance",
80 | "type" : "esriFieldTypeDouble",
81 | "alias" : "Facility: AdditionalDistance"
82 | },
83 | {
84 | "name" : "Shape_Length",
85 | "type" : "esriFieldTypeDouble",
86 | "alias" : "Shape_Length"
87 | },
88 | {
89 | "name" : "Shape_Area",
90 | "type" : "esriFieldTypeDouble",
91 | "alias" : "Shape_Area"
92 | }
93 | ],
94 | "features" : [{"attributes":{"OBJECTID":1,"Name":"Location 1 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":1,"FacilityID":1,"Name_1":"Location 1","TimeOfDay":-2208790800000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.03085285399993154,"Shape_Area":5.9017839146145041e-05},"geometry":{"rings":[[[-84.525533020999944,39.112389088000043],[-84.517130007999981,39.112389088000043],[-84.517130007999981,39.105365674000041],[-84.525533020999944,39.105365674000041],[-84.525533020999944,39.112389088000043]]]}},{"attributes":{"OBJECTID":2,"Name":"Location 1 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":1,"FacilityID":1,"Name_1":"Location 1","TimeOfDay":-2208790740000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.029975892695996646,"Shape_Area":5.5722469470037314e-05},"geometry":{"rings":[[[-84.520641714999954,39.114395778000073],[-84.512489537999954,39.114395778000073],[-84.512489537999954,39.107497782000053],[-84.520641714999954,39.107623200000035],[-84.520641714999954,39.114395778000073]]]}},{"attributes":{"OBJECTID":3,"Name":"Location 1 : 0 - 40","FromBreak":0,"ToBreak":40,"FacilityOID":1,"FacilityID":1,"Name_1":"Location 1","TimeOfDay":-2208790800000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.055545385411734295,"Shape_Area":0.00019168729357845355},"geometry":{"rings":[[[-84.528949409999939,39.104066343000056],[-84.528949409999939,39.116843939000034],[-84.513994554999954,39.116843939000034],[-84.513994554999954,39.10398607500008],[-84.528949409999939,39.104066343000056]]]}},{"attributes":{"OBJECTID":4,"Name":"Location 1 : 0 - 40","FromBreak":0,"ToBreak":40,"FacilityOID":1,"FacilityID":1,"Name_1":"Location 1","TimeOfDay":-2208790740000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.043234130000087134,"Shape_Area":0.00011648279475486691},"geometry":{"rings":[[[-84.518399238999962,39.11784728400005],[-84.518399238999962,39.107623200000035],[-84.52979221999999,39.107623200000035],[-84.52979221999999,39.11784728400005],[-84.518399238999962,39.11784728400005]]]}},{"attributes":{"OBJECTID":9,"Name":"Location 2 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":2,"FacilityID":2,"Name_1":"Location 2","TimeOfDay":-2208790800000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.03085285399993154,"Shape_Area":5.9017839146145041e-05},"geometry":{"rings":[[[-84.511726995999936,39.117767016000073],[-84.503323982999973,39.117767016000073],[-84.503323982999973,39.11074360200007],[-84.511726995999936,39.11074360200007],[-84.511726995999936,39.117767016000073]]]}},{"attributes":{"OBJECTID":10,"Name":"Location 2 : 0 - 30","FromBreak":0,"ToBreak":30,"FacilityOID":2,"FacilityID":2,"Name_1":"Location 2","TimeOfDay":-2208790740000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.029975892695940677,"Shape_Area":5.5722469469805614e-05},"geometry":{"rings":[[[-84.506835689999946,39.119773706000046],[-84.498683512999946,39.119773706000046],[-84.498683512999946,39.112875710000026],[-84.506835689999946,39.113001128000064],[-84.506835689999946,39.119773706000046]]]}},{"attributes":{"OBJECTID":11,"Name":"Location 2 : 0 - 40","FromBreak":0,"ToBreak":40,"FacilityOID":2,"FacilityID":2,"Name_1":"Location 2","TimeOfDay":-2208790800000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.055545385411961669,"Shape_Area":0.00019168729358003223},"geometry":{"rings":[[[-84.515143384999988,39.109444271000029],[-84.515143384999988,39.122221867000064],[-84.500188529999946,39.122221867000064],[-84.500188529999946,39.109364003000053],[-84.515143384999988,39.109444271000029]]]}},{"attributes":{"OBJECTID":12,"Name":"Location 2 : 0 - 40","FromBreak":0,"ToBreak":40,"FacilityOID":2,"FacilityID":2,"Name_1":"Location 2","TimeOfDay":-2208790740000,"Breaks":null,"AdditionalTime":0,"AdditionalDistance":0,"Shape_Length":0.043234130000087134,"Shape_Area":0.00011648279475486691},"geometry":{"rings":[[[-84.504593213999954,39.123225212000079],[-84.504593213999954,39.113001128000064],[-84.515986194999982,39.113001128000064],[-84.515986194999982,39.123225212000079],[-84.504593213999954,39.123225212000079]]]}}]
95 | }
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_CreateTimeLapsePolygonsInParallel.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the CreateTimeLapsePolygonsInParallel.py module.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, protected-access, invalid-name
15 |
16 | import sys
17 | import os
18 | import datetime
19 | import unittest
20 | from copy import deepcopy
21 | import arcpy
22 |
23 | CWD = os.path.dirname(os.path.abspath(__file__))
24 | sys.path.append(os.path.dirname(CWD))
25 | import CreateTimeLapsePolygonsInParallel # noqa: E402, pylint: disable=wrong-import-position
26 | from AnalysisHelpers import MAX_ALLOWED_MAX_PROCESSES, arcgis_version # noqa: E402, pylint: disable=wrong-import-position
27 | import input_data_helper # noqa: E402, pylint: disable=wrong-import-position
28 |
29 |
30 | class TestCreateTimeLapsePolygonsInParallel(unittest.TestCase):
31 | """Test cases for the CreateTimeLapsePolygonsInParallel module."""
32 |
33 | @classmethod
34 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
35 | self.maxDiff = None
36 |
37 | self.input_data_folder = os.path.join(CWD, "TestInput")
38 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
39 | self.in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
40 | self.facilities = os.path.join(self.in_gdb, "TestOrigins_Subset")
41 | self.num_facilities = int(arcpy.management.GetCount(self.facilities).getOutput(0))
42 | self.local_nd = os.path.join(self.in_gdb, "TransitNetwork", "TransitNetwork_ND")
43 | self.local_tm_time = "Public transit time"
44 |
45 | # Create a unique output directory and gdb for this test
46 | self.scratch_folder = os.path.join(
47 | CWD, "TestOutput", "Output_ParallelSA_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
48 | os.makedirs(self.scratch_folder)
49 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
50 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
51 |
52 | self.sa_args = {
53 | "facilities": self.facilities,
54 | "cutoffs": [30, 45],
55 | "time_units": "Minutes",
56 | "output_polygons": os.path.join(self.output_gdb, "TestPolys"),
57 | "time_window_start_day": "Wednesday",
58 | "time_window_start_time": "08:00",
59 | "time_window_end_day": "Wednesday",
60 | "time_window_end_time": "08:02",
61 | "time_increment": 1,
62 | "network_data_source": self.local_nd,
63 | "travel_mode": self.local_tm_time,
64 | "travel_direction": "Toward Facilities",
65 | "geometry_at_cutoff": "Rings",
66 | "geometry_at_overlap": "Overlap",
67 | "max_processes": 4,
68 | "precalculate_network_locations": True,
69 | "barriers": None
70 | }
71 |
72 | def test_validate_inputs(self):
73 | """Test the validate_inputs function."""
74 | does_not_exist = os.path.join(self.in_gdb, "DoesNotExist")
75 | invalid_inputs = [
76 | ("max_processes", 0, ValueError, "Maximum allowed parallel processes must be greater than 0."),
77 | ("max_processes", 5000, ValueError, (
78 | f"The maximum allowed parallel processes cannot exceed {MAX_ALLOWED_MAX_PROCESSES:} due "
79 | "to limitations imposed by Python's concurrent.futures module."
80 | )),
81 | ("cutoffs", [30, -45], ValueError, "Impedance cutoff must be greater than 0."),
82 | ("time_increment", 0, ValueError, "The time increment must be greater than 0."),
83 | ("time_units", "BadUnits", ValueError, "Invalid time units: BadUnits"),
84 | ("travel_direction", "BadValue", ValueError, "Invalid travel direction: BadValue"),
85 | ("geometry_at_cutoff", "BadValue", ValueError, "Invalid geometry at cutoff: BadValue"),
86 | ("geometry_at_overlap", "BadValue", ValueError, "Invalid geometry at overlap: BadValue"),
87 | ("facilities", does_not_exist, ValueError, f"Input dataset {does_not_exist} does not exist."),
88 | ("barriers", [does_not_exist], ValueError, f"Input dataset {does_not_exist} does not exist."),
89 | ("network_data_source", does_not_exist, ValueError,
90 | f"Input network dataset {does_not_exist} does not exist."),
91 | ("travel_mode", "BadTM", ValueError if arcgis_version >= "3.1" else RuntimeError, ""),
92 | ]
93 | for invalid_input in invalid_inputs:
94 | property_name, value, error_type, expected_message = invalid_input
95 | with self.subTest(
96 | property_name=property_name, value=value, error_type=error_type, expected_message=expected_message
97 | ):
98 | inputs = deepcopy(self.sa_args)
99 | inputs[property_name] = value
100 | sa_solver = CreateTimeLapsePolygonsInParallel.ServiceAreaSolver(**inputs)
101 | with self.assertRaises(error_type) as ex:
102 | sa_solver._validate_inputs()
103 | if expected_message:
104 | self.assertEqual(expected_message, str(ex.exception))
105 |
106 | def test_solve_service_areas_in_parallel(self):
107 | """Test the full solve Service Area workflow."""
108 | out_fc = os.path.join(self.output_gdb, "TestSolve")
109 | sa_inputs = deepcopy(self.sa_args)
110 | sa_inputs["output_polygons"] = out_fc
111 | sa_solver = CreateTimeLapsePolygonsInParallel.ServiceAreaSolver(**sa_inputs)
112 | sa_solver.solve_service_areas_in_parallel()
113 | self.assertTrue(arcpy.Exists(out_fc))
114 | # 4 facilities, 2 cutoffs, 3 time slices = 24 total output polygons
115 | expected_num_polygons = 24
116 | self.assertEqual(expected_num_polygons, int(arcpy.management.GetCount(out_fc).getOutput(0)))
117 |
118 |
119 | if __name__ == '__main__':
120 | unittest.main()
121 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_ReplaceRouteGeometryWithLVEShapes_tool.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the Replace Route Geometry With LVEShapes script tool.
2 |
3 | Copyright 2024 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, invalid-name
15 |
16 | import os
17 | import datetime
18 | import unittest
19 | import random
20 | import arcpy
21 | import input_data_helper
22 |
23 | CWD = os.path.dirname(os.path.abspath(__file__))
24 |
25 |
26 | class TestReplaceRouteGeometryWithLVEShapesTool(unittest.TestCase):
27 | """Test cases for the Replace Route Geometry With LVEShapes script tool."""
28 |
29 | @classmethod
30 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
31 | self.maxDiff = None
32 | arcpy.CheckOutExtension("network")
33 |
34 | tbx_path = os.path.join(os.path.dirname(CWD), "Transit Network Analysis Tools.pyt")
35 | arcpy.ImportToolbox(tbx_path)
36 |
37 | self.input_data_folder = os.path.join(CWD, "TestInput")
38 | input_data_helper.extract_toy_network(self.input_data_folder)
39 | self.toy_gdb = os.path.join(self.input_data_folder, "TransitToyNetwork.gdb")
40 | self.toy_nd = os.path.join(self.toy_gdb, "TransitNetwork", "Transit_Network_ND")
41 | self.toy_tm_transit = arcpy.nax.GetTravelModes(self.toy_nd)["Transit"]
42 | self.test_points_1 = os.path.join(self.toy_gdb, "TestPoints1")
43 | self.test_points_2 = os.path.join(self.toy_gdb, "TestPoints2")
44 |
45 | # Create a unique output directory and gdb for this test
46 | self.scratch_folder = os.path.join(
47 | CWD, "TestOutput",
48 | "Output_RRGWL_Tool_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
49 | os.makedirs(self.scratch_folder)
50 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
51 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
52 | arcpy.env.workspace = self.output_gdb
53 |
54 | def test_cf_layer(self):
55 | """Test the tool with a closest facility layer."""
56 | # Create and solve a closest facility layer
57 | layer_name = "CF"
58 | lyr = arcpy.na.MakeClosestFacilityAnalysisLayer(
59 | self.toy_nd, layer_name, self.toy_tm_transit,
60 | number_of_facilities_to_find=1,
61 | time_of_day=datetime.datetime(1900, 1, 3, 7, 56, 0),
62 | time_of_day_usage="START_TIME"
63 | ).getOutput(0)
64 | arcpy.na.AddLocations(lyr, "Incidents", self.test_points_1)
65 | arcpy.na.AddLocations(lyr, "Facilities", self.test_points_2)
66 | arcpy.na.Solve(lyr)
67 | # Check initial stats for route shapes before updating
68 | rt_sublayer = arcpy.na.GetNASublayer(lyr, "CFRoutes")
69 | orig_num_routes = int(arcpy.management.GetCount(rt_sublayer).getOutput(0))
70 | rt_pt_counts = {}
71 | for row in arcpy.da.SearchCursor(rt_sublayer, ["OID@", "SHAPE@"]):
72 | rt_pt_counts[row[0]] = row[1].pointCount
73 | # Run the tool
74 | out_lyr = arcpy.TransitNetworkAnalysisTools.ReplaceRouteGeometryWithLVEShapes( # pylint: disable=no-member
75 | lyr).getOutput(0)
76 | out_lyr.saveACopy(os.path.join(self.scratch_folder, layer_name + ".lyrx"))
77 | # Check stats for updated route shapes
78 | rt_sublayer = arcpy.na.GetNASublayer(out_lyr, "CFRoutes")
79 | updated_num_routes = int(arcpy.management.GetCount(rt_sublayer).getOutput(0))
80 | self.assertEqual(orig_num_routes, updated_num_routes, "Route count is different.")
81 | for row in arcpy.da.SearchCursor(rt_sublayer, ["OID@", "SHAPE@"]):
82 | shape = row[1]
83 | self.assertIsNotNone(shape, "Route shape is null.")
84 | self.assertGreater(shape.length, 0, "Route shape length is 0.")
85 | self.assertGreater(
86 | shape.pointCount, rt_pt_counts[row[0]],
87 | "pointCount of shape did not increase after geometry was swapped."
88 | )
89 |
90 | def test_rt_layer(self):
91 | """Test the tool with a route layer."""
92 | # Create and solve a route layer
93 | layer_name = "Route"
94 | lyr = arcpy.na.MakeRouteAnalysisLayer(
95 | self.toy_nd, layer_name, self.toy_tm_transit,
96 | time_of_day=datetime.datetime(1900, 1, 3, 7, 56, 0)
97 | ).getOutput(0)
98 | arcpy.na.AddLocations(lyr, "Stops", self.test_points_1)
99 | arcpy.na.AddLocations(lyr, "Stops", self.test_points_2)
100 | arcpy.na.Solve(lyr)
101 | # Check initial stats for route shapes before updating
102 | rt_sublayer = arcpy.na.GetNASublayer(lyr, "Routes")
103 | orig_num_routes = int(arcpy.management.GetCount(rt_sublayer).getOutput(0))
104 | rt_pt_counts = {}
105 | for row in arcpy.da.SearchCursor(rt_sublayer, ["OID@", "SHAPE@"]):
106 | rt_pt_counts[row[0]] = row[1].pointCount
107 | # Run the tool
108 | out_lyr = arcpy.TransitNetworkAnalysisTools.ReplaceRouteGeometryWithLVEShapes( # pylint: disable=no-member
109 | lyr).getOutput(0)
110 | out_lyr.saveACopy(os.path.join(self.scratch_folder, layer_name + ".lyrx"))
111 | # Check stats for updated route shapes
112 | rt_sublayer = arcpy.na.GetNASublayer(out_lyr, "Routes")
113 | updated_num_routes = int(arcpy.management.GetCount(rt_sublayer).getOutput(0))
114 | self.assertEqual(orig_num_routes, updated_num_routes, "Route count is different.")
115 | for row in arcpy.da.SearchCursor(rt_sublayer, ["OID@", "SHAPE@"]):
116 | shape = row[1]
117 | self.assertIsNotNone(shape, "Route shape is null.")
118 | self.assertGreater(shape.length, 0, "Route shape length is 0.")
119 | self.assertGreater(
120 | shape.pointCount, rt_pt_counts[row[0]],
121 | "pointCount of shape did not increase after geometry was swapped."
122 | )
123 |
124 | def test_wrong_solver(self):
125 | """Check for correct error when an incorrect solver type is used."""
126 | # Create a layer of one of the unsupported types
127 | # Don't attempt to test VRP because the test network doesn't even support VRP.
128 | layer_name = "WrongType"
129 | solver_tool = random.choice([
130 | arcpy.na.MakeODCostMatrixAnalysisLayer,
131 | arcpy.na.MakeLocationAllocationAnalysisLayer,
132 | arcpy.na.MakeServiceAreaAnalysisLayer
133 | ])
134 | lyr = solver_tool(self.toy_nd, layer_name, self.toy_tm_transit)
135 | # Run the tool
136 | with self.assertRaises(arcpy.ExecuteError):
137 | arcpy.TransitNetworkAnalysisTools.ReplaceRouteGeometryWithLVEShapes( # pylint: disable=no-member
138 | lyr)
139 | expected_message = "The Input Network Analysis Layer must be a Route or Closest Facility layer."
140 | actual_messages = arcpy.GetMessages(2)
141 | self.assertIn(expected_message, actual_messages)
142 |
143 |
144 | if __name__ == '__main__':
145 | unittest.main()
146 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_CalculateTravelTimeStatisticsOD_tool.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the Calculate Travel Time Statistics (OD Cost Matrix) script tool.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, invalid-name
15 |
16 | import os
17 | import datetime
18 | import unittest
19 | from glob import glob
20 | import pandas as pd
21 | import arcpy
22 | import input_data_helper
23 |
24 | CWD = os.path.dirname(os.path.abspath(__file__))
25 |
26 |
27 | class TestCalculateTravelTimeStatisticsODTool(unittest.TestCase):
28 | """Test cases for the Calculate Travel Time Statistics (OD Cost Matrix) script tool."""
29 |
30 | @classmethod
31 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
32 | self.maxDiff = None
33 |
34 | tbx_path = os.path.join(os.path.dirname(CWD), "Transit Network Analysis Tools.pyt")
35 | arcpy.ImportToolbox(tbx_path)
36 |
37 | self.input_data_folder = os.path.join(CWD, "TestInput")
38 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
39 | in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
40 | self.origins = os.path.join(in_gdb, "TestOrigins")
41 | self.destinations = os.path.join(in_gdb, "TestDestinations")
42 | self.num_origins = int(arcpy.management.GetCount(self.origins).getOutput(0))
43 | self.num_dests = int(arcpy.management.GetCount(self.destinations).getOutput(0))
44 | self.local_nd = os.path.join(in_gdb, "TransitNetwork", "TransitNetwork_ND")
45 | self.local_tm_time = "Public transit time"
46 |
47 | # Create a unique output directory and gdb for this test
48 | self.scratch_folder = os.path.join(
49 | CWD, "TestOutput",
50 | "Output_CTTSOD_Tool_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
51 | os.makedirs(self.scratch_folder)
52 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
53 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
54 |
55 | def check_tool_output(self, out_csv, out_na_data_folder=None):
56 | """Do some basic checks of the output origins."""
57 | self.assertTrue(os.path.exists(out_csv), "Output CSV file does not exist.")
58 | df = pd.read_csv(out_csv)
59 | self.assertGreater(df.shape[0], 0, "CSV file has no rows.")
60 | expected_ctts_columns = ["OriginOID", "DestinationOID", "count", "min", "max", "mean"]
61 | self.assertEqual(expected_ctts_columns, df.columns.tolist(), "Incorrect columns in CSV")
62 | if out_na_data_folder:
63 | self.assertTrue(os.path.exists(out_na_data_folder), "Output CSV NA data folder does not exist.")
64 | na_files = glob(os.path.join(out_na_data_folder, "ODLines_*.csv"))
65 | self.assertGreater(len(na_files), 0, "Output NA data folder contains no CSV files.")
66 | return df # Return dataframe for further checks
67 |
68 | def test_basic_points(self):
69 | """Test with basic point datasets as input."""
70 | out_csv = os.path.join(self.scratch_folder, "CTTS_Points.csv")
71 | na_results_folder = os.path.join(self.scratch_folder, "CTTS_Points_NA_Results")
72 | arcpy.TransitNetworkAnalysisTools.CalculateTravelTimeStatisticsOD( # pylint: disable=no-member
73 | self.origins,
74 | self.destinations,
75 | out_csv,
76 | self.local_nd,
77 | self.local_tm_time,
78 | "Wednesday",
79 | "08:00",
80 | "Wednesday",
81 | "08:03",
82 | 1,
83 | 10, # Chunk size,
84 | 4, # Parallel processes
85 | True, # Save individual results folder
86 | na_results_folder,
87 | None, # Barriers
88 | True # Precalculate network locations
89 | )
90 | self.check_tool_output(out_csv, na_results_folder)
91 |
92 | def test_same_origins_destinations(self):
93 | """Test when the origins and destinations are the same. No chunking of inputs"""
94 | out_csv = os.path.join(self.scratch_folder, "CTTS_Same.csv")
95 | arcpy.TransitNetworkAnalysisTools.CalculateTravelTimeStatisticsOD( # pylint: disable=no-member
96 | self.origins,
97 | self.origins,
98 | out_csv,
99 | self.local_nd,
100 | self.local_tm_time,
101 | "Wednesday",
102 | "08:00",
103 | "Wednesday",
104 | "08:03",
105 | 1,
106 | 1000, # Chunk size,
107 | 4, # Parallel processes
108 | False, # Save individual results folder
109 | None,
110 | None, # Barriers
111 | True # Precalculate network locations
112 | )
113 | self.check_tool_output(out_csv)
114 |
115 | def test_polygon_inputs(self):
116 | """Test using polygon feature classes as inputs."""
117 | out_csv = os.path.join(self.scratch_folder, "CTTS_Polygons.csv")
118 | arcpy.TransitNetworkAnalysisTools.CalculateTravelTimeStatisticsOD( # pylint: disable=no-member
119 | self.origins + "_Polygons",
120 | self.destinations + "_Polygons",
121 | out_csv,
122 | self.local_nd,
123 | self.local_tm_time,
124 | "Wednesday",
125 | "08:00",
126 | "Wednesday",
127 | "08:03",
128 | 1,
129 | 10, # Chunk size,
130 | 4, # Parallel processes
131 | False, # Save individual results folder
132 | None,
133 | None, # Barriers
134 | True # Precalculate network locations
135 | )
136 | self.check_tool_output(out_csv)
137 |
138 | def test_selection_and_oid_mapping(self):
139 | """Test that the original OIDs are preserved and mapped correctly. Input with selection set."""
140 | origins_lyr_name = "OriginsLayer"
141 | arcpy.management.MakeFeatureLayer(self.origins, origins_lyr_name, "ObjectID > 5")
142 | dests_lyr_name = "DestsLayer"
143 | arcpy.management.MakeFeatureLayer(self.destinations, dests_lyr_name, "ObjectID > 5")
144 | out_csv = os.path.join(self.scratch_folder, "CTTS_OIDs.csv")
145 | arcpy.TransitNetworkAnalysisTools.CalculateTravelTimeStatisticsOD( # pylint: disable=no-member
146 | origins_lyr_name,
147 | dests_lyr_name,
148 | out_csv,
149 | self.local_nd,
150 | self.local_tm_time,
151 | "Wednesday",
152 | "08:00",
153 | "Wednesday",
154 | "08:03",
155 | 1,
156 | 10, # Chunk size,
157 | 4, # Parallel processes
158 | False, # Save individual results folder
159 | None,
160 | None, # Barriers
161 | True # Precalculate network locations
162 | )
163 | df = self.check_tool_output(out_csv)
164 | self.assertFalse((df["OriginOID"] <= 5).any(), f"OriginOID values are incorrect. {df['OriginOID']}")
165 | self.assertFalse(
166 | (df["DestinationOID"] <= 5).any(), f"DestinationOID values are incorrect. {df['DestinationOID']}")
167 |
168 |
169 | if __name__ == '__main__':
170 | unittest.main()
171 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_CreatePercentAccessPolygons_tool.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the Create Percent Access Polygons script tool.
2 |
3 | The input data for these tests is made up and intended to be simple to verify.
4 | The tests don't attempt to compare geometry exactly because small changes to ArcGIS Pro could cause those checks to be
5 | flaky over time. They just ensure that the output is generally correct. If you're going to make significant changes to
6 | the logic of this tool, you will likely need to create some more exhaustive tests that compare the actual
7 | output geometry before and after the changes.
8 |
9 | Copyright 2023 Esri
10 | Licensed under the Apache License, Version 2.0 (the "License");
11 | you may not use this file except in compliance with the License.
12 | You may obtain a copy of the License at
13 | http://www.apache.org/licenses/LICENSE-2.0
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License.
19 | """
20 | # pylint: disable=import-error, invalid-name
21 |
22 | import os
23 | import datetime
24 | import unittest
25 | import arcpy
26 | import input_data_helper
27 |
28 | CWD = os.path.dirname(os.path.abspath(__file__))
29 |
30 |
31 | class TestCreatePercentAccessPolygonsTool(unittest.TestCase):
32 | """Test cases for the Create Percent Access Polygons script tool."""
33 |
34 | @classmethod
35 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
36 | self.maxDiff = None
37 |
38 | tbx_path = os.path.join(os.path.dirname(CWD), "Transit Network Analysis Tools.pyt")
39 | arcpy.ImportToolbox(tbx_path)
40 |
41 | self.input_data_folder = os.path.join(CWD, "TestInput")
42 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
43 | self.in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
44 |
45 | # Create a unique output directory and gdb for this test
46 | self.scratch_folder = os.path.join(
47 | CWD, "TestOutput",
48 | "Output_CPAP_Tool_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
49 | os.makedirs(self.scratch_folder)
50 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
51 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
52 |
53 | def check_output(self, out_fc, expected_percents):
54 | """Check the output feature class."""
55 | self.assertTrue(arcpy.Exists(out_fc))
56 | self.assertEqual(len(expected_percents), int(arcpy.management.GetCount(out_fc).getOutput(0)))
57 | self.assertIn("Percent", [f.name for f in arcpy.ListFields(out_fc)])
58 | actual_percents = []
59 | for row in arcpy.da.SearchCursor(out_fc, "Percent"):
60 | actual_percents.append(row[0])
61 | actual_percents = sorted(actual_percents)
62 | self.assertEqual(expected_percents, actual_percents)
63 |
64 | def make_percent_shape_dict(self, out_fc):
65 | """Make a dictionary of {percent: shape geometry} for the output feature class."""
66 | shapes = {}
67 | for row in arcpy.da.SearchCursor(out_fc, ["Percent", "SHAPE@"]):
68 | shapes[row[0]] = row[1]
69 | return shapes
70 |
71 | def test_tool_simple(self):
72 | """Test the tool with a simple input modeling one facility and one cutoff.
73 |
74 | The purpose of this test is to make sure the output has the correct polygon geometry. We're not doing an exact
75 | comparison to avoid test flakiness, but we're testing that the polygons have the correct relationship to each
76 | other. If you're going to substantially alter the tool's internal logic, you would probably want to write some
77 | more comprehensive tests to check the exact output, but this is sufficient as a confidence test for minor
78 | changes.
79 | """
80 | in_fc = os.path.join(self.in_gdb, "TimeLapsePolys_1Fac_1Cutoff")
81 | out_fc = os.path.join(self.output_gdb, "CPAP_1Fac_1Cutoff")
82 | out_fc_th = os.path.join(self.output_gdb, "CPAP_Th_1Fac_1Cutoff")
83 | # Run the tool
84 | arcpy.TransitNetworkAnalysisTools.CreatePercentAccessPolygons( # pylint: disable=no-member
85 | in_fc,
86 | out_fc,
87 | "100 Meters",
88 | 4, # Parallel processes
89 | out_fc_th,
90 | [50, 75]
91 | )
92 |
93 | # Do some basic checks of outputs
94 | all_percents = [25, 50, 75, 100]
95 | self.check_output(out_fc, all_percents)
96 | self.check_output(out_fc_th, [50, 75])
97 |
98 | # Check the relationships of the shapes in the output
99 | out_shapes = self.make_percent_shape_dict(out_fc)
100 | th_shapes = self.make_percent_shape_dict(out_fc_th)
101 | # For the main output, none of the polygons should overlap.
102 | for percent1 in all_percents:
103 | shape1 = out_shapes[percent1]
104 | for percent2 in [p for p in all_percents if p > percent1]:
105 | shape2 = out_shapes[percent2]
106 | self.assertFalse(shape1.overlaps(shape2), "Shapes should not overlap.")
107 | # For the threshold output, the 75% polygon should be fully contained within the 50% polygon
108 | self.assertTrue(th_shapes[50].contains(th_shapes[75]), "Smaller threshold polygon should contain larger.")
109 | # The threshold polygons should contain all the main polygons of larger percentages
110 | for percent in [50, 75, 100]:
111 | self.assertTrue(th_shapes[50].contains(out_shapes[percent]))
112 | for percent in [75, 100]:
113 | self.assertTrue(th_shapes[75].contains(out_shapes[percent]))
114 |
115 | def test_tool_multi_facilities_cutoffs(self):
116 | """Test the tool with multiple facilities and cutoffs.
117 |
118 | The purpose of this test is to make sure the code is correctly handling time lapse polygons with multiple
119 | facilities and multiple break values. Those should be counted and reported separately in the output.
120 | """
121 | in_fc = os.path.join(self.in_gdb, "TimeLapsePolys_2Fac_2Cutoffs")
122 | out_fc = os.path.join(self.output_gdb, "CPAP_2Fac_2Cutoffs")
123 | out_fc_th = os.path.join(self.output_gdb, "CPAP_Th_2Fac_2Cutoffs")
124 | # Run the tool
125 | arcpy.TransitNetworkAnalysisTools.CreatePercentAccessPolygons( # pylint: disable=no-member
126 | in_fc,
127 | out_fc,
128 | "100 Meters",
129 | 4, # Parallel processes
130 | out_fc_th,
131 | [50, 75]
132 | )
133 | self.assertTrue(arcpy.Exists(out_fc))
134 | self.assertTrue(arcpy.Exists(out_fc_th))
135 |
136 | # Get a list of unique facility and cutoff combinations
137 | combos = set()
138 | for row in arcpy.da.SearchCursor(in_fc, ["FacilityID", "FromBreak", "ToBreak"]):
139 | combos.add(row)
140 | # The main output should have 50% and 100% rows for each input combo
141 | out_dict = {} # {combo: percent}
142 | for row in arcpy.da.SearchCursor(out_fc, ["FacilityID", "FromBreak", "ToBreak", "Percent"]):
143 | out_dict.setdefault(tuple([v for v in row[:3]]), []).append(row[3])
144 | self.assertEqual(combos, set(list(out_dict.keys())))
145 | for combo, percents in out_dict.items():
146 | self.assertEqual([50, 100], sorted(percents), f"Incorrect percents for combo {combo}.")
147 | # The threshold output should have 50% and 75% rows for each input combo
148 | out_dict = {} # {combo: percent}
149 | for row in arcpy.da.SearchCursor(out_fc_th, ["FacilityID", "FromBreak", "ToBreak", "Percent"]):
150 | out_dict.setdefault(tuple([v for v in row[:3]]), []).append(row[3])
151 | self.assertEqual(combos, set(list(out_dict.keys())))
152 | for combo, percents in out_dict.items():
153 | self.assertEqual([50, 75], sorted(percents), f"Incorrect percents for combo {combo}.")
154 |
155 |
156 | if __name__ == '__main__':
157 | unittest.main()
158 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_parallel_sa.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the parallel_sa.py module.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, protected-access, invalid-name
15 |
16 | import sys
17 | import os
18 | import datetime
19 | import unittest
20 | from copy import deepcopy
21 | import arcpy
22 | import input_data_helper
23 |
24 | CWD = os.path.dirname(os.path.abspath(__file__))
25 | sys.path.append(os.path.dirname(CWD))
26 | import parallel_sa # noqa: E402, pylint: disable=wrong-import-position
27 | import AnalysisHelpers # noqa: E402, pylint: disable=wrong-import-position
28 |
29 |
30 | class TestParallelSA(unittest.TestCase):
31 | """Test cases for the parallel_sa module."""
32 |
33 | @classmethod
34 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
35 | """Set up shared test properties."""
36 | self.maxDiff = None
37 |
38 | self.input_data_folder = os.path.join(CWD, "TestInput")
39 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
40 | in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
41 | self.facilities = os.path.join(in_gdb, "TestOrigins_Subset")
42 | self.num_facilities = int(arcpy.management.GetCount(self.facilities).getOutput(0))
43 | self.local_nd = os.path.join(in_gdb, "TransitNetwork", "TransitNetwork_ND")
44 | self.local_tm_time = "Public transit time"
45 |
46 | # Create a unique output directory and gdb for this test
47 | self.scratch_folder = os.path.join(
48 | CWD, "TestOutput", "Output_ParallelSA_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
49 | os.makedirs(self.scratch_folder)
50 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
51 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
52 |
53 | self.logger = AnalysisHelpers.configure_global_logger(parallel_sa.LOG_LEVEL)
54 | self.parallel_sa_class_args = {
55 | "logger": self.logger,
56 | "facilities": self.facilities,
57 | "output_polygons": os.path.join(self.output_gdb, "TestPolys"),
58 | "network_data_source": self.local_nd,
59 | "travel_mode": self.local_tm_time,
60 | "cutoffs": [30, 45],
61 | "time_units": "Minutes",
62 | "time_window_start_day": "Wednesday",
63 | "time_window_start_time": "08:00",
64 | "time_window_end_day": "Wednesday",
65 | "time_window_end_time": "08:02",
66 | "time_increment": 1,
67 | "travel_direction": "Toward Facilities",
68 | "geometry_at_cutoff": "Rings",
69 | "geometry_at_overlap": "Overlap",
70 | "max_processes": 4
71 | }
72 |
73 | @classmethod
74 | def tearDownClass(self):
75 | """Deconstruct the logger when tests are finished."""
76 | AnalysisHelpers.teardown_logger(self.logger)
77 |
78 | def check_ServiceArea_solve(self, sa_inputs, expected_num_polygons):
79 | """Test the solve method of the ServiceArea class."""
80 | out_folder = sa_inputs["output_folder"]
81 | os.makedirs(out_folder)
82 | sa = parallel_sa.ServiceArea(**sa_inputs)
83 | time_of_day = datetime.datetime(1900, 1, 3, 10, 0, 0)
84 | sa.solve(time_of_day)
85 | # Check results
86 | self.assertIsInstance(sa.job_result, dict)
87 | self.assertTrue(sa.job_result["solveSucceeded"], "SA solve failed")
88 | out_polygons = sa.job_result["outputPolygons"]
89 | self.assertEqual(
90 | out_folder, os.path.commonprefix([out_folder, out_polygons]),
91 | "Output SA polygons feature class has the wrong filepath.")
92 | self.assertTrue(
93 | arcpy.Exists(out_polygons),
94 | "Output SA polygons feature class does not exist.")
95 | self.assertEqual(
96 | expected_num_polygons, int(arcpy.management.GetCount(out_polygons).getOutput(0)),
97 | "Output SA polygons feature class has an incorrect number of rows.")
98 | self.assertIn(
99 | AnalysisHelpers.TIME_FIELD, [f.name for f in arcpy.ListFields(out_polygons)],
100 | "Output SA polygons feature class is missing time of day field.")
101 | for row in arcpy.da.SearchCursor(out_polygons, [AnalysisHelpers.TIME_FIELD]):
102 | self.assertEqual(time_of_day, row[0], "Incorrect time field value.")
103 |
104 | def test_ServiceArea_solve_overlap(self):
105 | """Test the solve method of the ServiceArea class using overlapping polygons."""
106 | out_folder = os.path.join(self.scratch_folder, "ServiceAreaOverlap")
107 | sa_inputs = {
108 | "facilities": self.facilities,
109 | "cutoffs": [30, 45],
110 | "time_units": arcpy.nax.TimeUnits.Minutes,
111 | "travel_direction": arcpy.nax.TravelDirection.FromFacility,
112 | "geometry_at_cutoff": arcpy.nax.ServiceAreaPolygonCutoffGeometry.Rings,
113 | "geometry_at_overlap": arcpy.nax.ServiceAreaOverlapGeometry.Overlap,
114 | "network_data_source": self.local_nd,
115 | "travel_mode": self.local_tm_time,
116 | "output_folder": out_folder
117 | }
118 | # 4 facilities, 2 cutoffs, 1 time slice = 8 total output polygons
119 | self.check_ServiceArea_solve(sa_inputs, 8)
120 |
121 | def test_ServiceArea_solve_dissolve(self):
122 | """Test the solve method of the ServiceArea class using dissolved polygons.
123 |
124 | The time of day field in the output in this case has some special handling.
125 | """
126 | out_folder = os.path.join(self.scratch_folder, "ServiceAreaDissolve")
127 | sa_inputs = {
128 | "facilities": self.facilities,
129 | "cutoffs": [30, 45],
130 | "time_units": arcpy.nax.TimeUnits.Minutes,
131 | "travel_direction": arcpy.nax.TravelDirection.FromFacility,
132 | "geometry_at_cutoff": arcpy.nax.ServiceAreaPolygonCutoffGeometry.Rings,
133 | "geometry_at_overlap": arcpy.nax.ServiceAreaOverlapGeometry.Dissolve,
134 | "network_data_source": self.local_nd,
135 | "travel_mode": self.local_tm_time,
136 | "output_folder": out_folder
137 | }
138 | # 4 facilities (dissolved), 2 cutoffs, 1 time slice = 2 total output polygons
139 | self.check_ServiceArea_solve(sa_inputs, 2)
140 |
141 | def test_ParallelSACalculator_validate_sa_settings(self):
142 | """Test the _validate_sa_settings function."""
143 | # Test that with good inputs, nothing should happen
144 | sa_calculator = parallel_sa.ParallelSACalculator(**self.parallel_sa_class_args)
145 | sa_calculator._validate_sa_settings()
146 | # Test completely invalid travel mode
147 | sa_inputs = deepcopy(self.parallel_sa_class_args)
148 | sa_inputs["travel_mode"] = "InvalidTM"
149 | sa_calculator = parallel_sa.ParallelSACalculator(**sa_inputs)
150 | error_type = ValueError if AnalysisHelpers.arcgis_version >= "3.1" else RuntimeError
151 | with self.assertRaises(error_type):
152 | sa_calculator._validate_sa_settings()
153 |
154 | def test_ParallelSACalculator_solve_sa_in_parallel(self):
155 | """Test calculating parallel service areas and post-processing."""
156 | # Run parallel process. This calculates the SAs and also post-processes the results
157 | out_fc = os.path.join(self.output_gdb, "TestSolveInParallel")
158 | sa_inputs = deepcopy(self.parallel_sa_class_args)
159 | sa_inputs["output_polygons"] = out_fc
160 | sa_calculator = parallel_sa.ParallelSACalculator(**sa_inputs)
161 | sa_calculator.solve_sa_in_parallel()
162 | self.assertTrue(arcpy.Exists(out_fc))
163 | # 4 facilities, 2 cutoffs, 3 time slices = 24 total output polygons
164 | expected_num_polygons = 24
165 | self.assertEqual(expected_num_polygons, int(arcpy.management.GetCount(out_fc).getOutput(0)))
166 |
167 |
168 | if __name__ == '__main__':
169 | unittest.main()
170 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # public-transit-tools
2 |
3 | This repo contains free, downloadable sample tools provided by Esri for analysis using public transit data in ArcGIS. These tools are intended to extend the capabilities of Esri's core products.
4 |
5 | ### Transit Network Analysis Tools
6 |
7 | 
8 |
9 | [transit-network-analysis-tools](transit-network-analysis-tools/README.md)
10 |
11 | The Network Analyst extension in ArcGIS includes tools for transportation analysis and routing, particularly for modeling and optimizing travel through a transportation network. You can include public transit schedule data in your transportation network in order to model travel by public transit for a variety of workflows in transportation and urban planning, public health, economic development, and so on.
12 |
13 | Once you have created your transit-enabled network dataset using the tools provided in ArcGIS Pro, you may be interested in doing some further analysis. The Transit Network Analysis Tools (the tools right here in this repo) are a set of tools for performing transit-specific network analysis. They are intended to supplement the Network Analyst extension with functionality not available out of the box. In particular, the tools account for the time-dependent nature of public transit and assist with analyses commonly needed by those working with public transit.
14 |
15 | - *Calculate Accessibility Matrix* solves an Origin-Destination Cost Matrix analysis incrementally over a time window and summarizes the results. It can be used to count the number of jobs accessible to a set of origins within a reasonable commute time.
16 | - *Calculate Travel Time Statistics* calculates some simple statistics about the total transit travel time between locations over a time window and writes the output to a table.
17 | - *Prepare Time Lapse Polygons* and *Create Percent Access Polygons* help you visualize the area reachable by transit across a time window.
18 | - *Copy Traversed Source Features With Transit* returns the individual network segments traversed in a route enhanced with the wait time, ride time, and Run ID for each transit line used.
19 |
20 | ### GTFS Realtime Connector for GeoEvent
21 |
22 | 
23 |
24 | [send-GTFS-rt-to-GeoEvent](send-GTFS-rt-to-GeoEvent/README.md)
25 |
26 | [GTFS Realtime](https://gtfs.org/realtime/) is an extension to static GTFS that provides a standard format for the real time status of a transit system, such as the positions of buses and trains, information about delays, and service alerts. The GTFS Realtime Connector for GeoEvent Server allows you to ingest GTFS Realtime feeds and display them in a map.
27 |
28 | The GTFS Realtime Connector for GeoEvent Server can poll and process the three feed types listed below:
29 | - Trip updates – delays, cancellations, updated routes, etc.
30 | - Service alerts – stop moved, unforeseen events affecting a station, route, or the entire network, etc.
31 | - Vehicle positions – information about transit vehicles including location and congestion level.
32 |
33 | ## Learning materials
34 |
35 | Start here! [This comprehensive Story Map](https://arcg.is/1mbqyn) highlights key concepts and best practices for public transit GIS analysis. It shows how to determine who your transit system serves, how well people are served by transit, and how easy it is for people to access important destinations by transit.
36 |
37 | When you're ready for a deep dive, watch this comprehensive tutorial video series to learn techniques and best practices for public transit analysis in ArcGIS Pro:
38 | - [Tutorial videos](https://www.youtube.com/playlist?list=PLGZUzt4E4O2KQz9IxGKrEyKB8rA0UVx1W)
39 | - [Slides used in tutorial videos](https://esriurl.com/TransitVideoDownloads)
40 |
41 | Other learning resources:
42 | - ArcGIS Pro written tutorial: [Create and use a network dataset with public transit data](https://pro.arcgis.com/en/pro-app/latest/help/analysis/networks/create-and-use-a-network-dataset-with-public-transit-data.htm)
43 | - Learn Lesson: [Assess access to public transit](https://learn.arcgis.com/en/projects/assess-access-to-public-transit/)
44 | - Blog posts:
45 | - [Use schedule-based public transit in Network Analyst with Pro 2.4](https://www.esri.com/arcgis-blog/products/arcgis-pro/analytics/public-transit-network-analyst/)
46 | - [Who does my public transit system serve?](https://www.esri.com/arcgis-blog/products/arcgis-online/analytics/who-does-my-public-transit-system-serve/)
47 | - [Map the frequency of transit service across your city and find out why it matters](https://www.esri.com/arcgis-blog/products/arcgis-pro/analytics/map-the-frequency-of-transit-service-across-your-city-and-find-out-why-it-matters/)
48 | - [Mapping transit accessibility to jobs](https://www.esri.com/arcgis-blog/products/product/analytics/mapping-transit-accessibility-to-jobs/)
49 | - [How to make a shapes.txt file for your GTFS dataset with ArcGIS](https://www.esri.com/arcgis-blog/products/arcgis-pro/analytics/how-to-make-a-shapes-txt-file-for-your-gtfs-dataset-with-arcgis/)
50 |
51 |
52 | ## Reference materials and useful links
53 |
54 | * ArcGIS Pro documentation: [Public transit in Network Analyst](https://pro.arcgis.com/en/pro-app/latest/help/analysis/networks/network-analysis-with-public-transit-data.htm)
55 | * ArcGIS Pro documentation: [Public Transit Tools.tbx geoprocesing toolbox](https://pro.arcgis.com/en/pro-app/latest/tool-reference/public-transit/an-overview-of-the-public-transit-toolbox.htm)
56 | * [GTFS specification](https://gtfs.org/schedule/)
57 | * [GTFS Realtime](https://gtfs.org/realtime/)
58 | * [GTFS Best Practices](https://gtfs.org/schedule/best-practices/) - Guidelines for creating a good quality GTFS feed
59 | * [The Mobility Database](https://database.mobilitydata.org/) - Catalog of open GTFS datasets from around the world
60 | * [Temporal variability in transit-based accessibility to supermarkets](https://www.sciencedirect.com/science/article/pii/S0143622814001283) by Steve Farber, Melinda Morang, and Michael Widener, in the Journal of Applied Geography
61 |
62 | ## Other ArcGIS tools for public transit agencies
63 |
64 | ArcGIS has many tools, products, and solutions applicable to public transit agencies beyond the analytical tools in this repo and discussed in the learning materials above. Learn more using the links below.
65 |
66 | - Configurable tools and templates from the ArcGIS Solutions for Local Government's Transit Solution
67 | - [Rider outreach](https://doc.arcgis.com/en/arcgis-solutions/latest/reference/introduction-to-transit-outreach.htm)
68 | - [Adopt-a-stop program management](https://doc.arcgis.com/en/arcgis-solutions/latest/reference/introduction-to-adopt-a-stop.htm)
69 | - [Transit safety management](https://doc.arcgis.com/en/arcgis-solutions/latest/reference/introduction-to-transit-safety.htm)
70 | - [Real-Time AVL Feeds With ArcGIS](https://community.esri.com/t5/public-transit-blog/real-time-avl-feeds-with-arcgis/ba-p/883008)
71 | - [Survey123 and Webhooks for Transit](https://community.esri.com/t5/public-transit-blog/survey123-and-webhooks-for-transit/ba-p/882990)
72 | - [Public Transit Real Estate Solution](https://community.esri.com/t5/public-transit-blog/public-transit-real-estate-solution/ba-p/883013)
73 | - [Transit Incident Reporting](https://community.esri.com/t5/public-transit-blog/transit-incident-reporting/ba-p/882969)
74 | - [Routing a Fleet of Vehicles with Ready-To-Use Tools](https://community.esri.com/t5/public-transit-blog/routing-a-fleet-of-vehicles-with-ready-to-use/ba-p/882993)
75 | - [Make your static bus timetables sing and move along with Arcade](https://community.esri.com/t5/arcgis-online-blog/make-your-static-bus-timetables-sing-and-move/ba-p/890211)
76 |
77 | ## Problems or questions?
78 |
79 | Find a bug or want to request a new feature? Please let us know by submitting an [issue](../../issues), or post a question in the [Esri Community forums](https://community.esri.com/t5/public-transit-questions/bd-p/public-transit-questions).
80 |
81 | If you have more general questions about how your public transit agency can leverage ArcGIS, contact [transit@esri.com](mailto:transit@esri.com).
82 |
83 | ## Contributing
84 |
85 | Esri welcomes contributions from anyone and everyone. Please see our [guidelines for contributing](https://github.com/esri/contributing).
86 |
87 | ## Licensing
88 | Copyright 2024 Esri
89 |
90 | Licensed under the Apache License, Version 2.0 (the "License");
91 | you may not use this file except in compliance with the License.
92 | You may obtain a copy of the License at
93 |
94 | http://www.apache.org/licenses/LICENSE-2.0
95 |
96 | Unless required by applicable law or agreed to in writing, software
97 | distributed under the License is distributed on an "AS IS" BASIS,
98 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
99 | See the License for the specific language governing permissions and
100 | limitations under the License.
101 |
102 | A copy of the license is available in the repository's [license.txt](License.txt?raw=true) file.
103 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_CalculateODMatrixInParallel.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the CalculateODMatrixInParallel.py module.
2 |
3 | Copyright 2024 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, protected-access, invalid-name
15 |
16 | import sys
17 | import os
18 | import datetime
19 | import unittest
20 | from copy import deepcopy
21 | import arcpy
22 |
23 | CWD = os.path.dirname(os.path.abspath(__file__))
24 | sys.path.append(os.path.dirname(CWD))
25 | import CalculateODMatrixInParallel # noqa: E402, pylint: disable=wrong-import-position
26 | from AnalysisHelpers import MAX_ALLOWED_MAX_PROCESSES, arcgis_version # noqa: E402, pylint: disable=wrong-import-position
27 | import input_data_helper # noqa: E402, pylint: disable=wrong-import-position
28 |
29 |
30 | class TestCalculateODMatrixInParallel(unittest.TestCase):
31 | """Test cases for the CalculateODMatrixInParallel module."""
32 |
33 | @classmethod
34 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
35 | self.maxDiff = None
36 |
37 | self.input_data_folder = os.path.join(CWD, "TestInput")
38 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
39 | self.in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
40 | self.origins = os.path.join(self.in_gdb, "TestOrigins")
41 | self.destinations = os.path.join(self.in_gdb, "TestDestinations")
42 | self.local_nd = os.path.join(self.in_gdb, "TransitNetwork", "TransitNetwork_ND")
43 | self.local_tm_time = "Public transit time"
44 |
45 | # Create a unique output directory and gdb for this test
46 | self.scratch_folder = os.path.join(
47 | CWD, "TestOutput", "Output_ParallelOD_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
48 | os.makedirs(self.scratch_folder)
49 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
50 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
51 |
52 | self.od_args = {
53 | "origins": self.origins,
54 | "destinations": self.destinations,
55 | "time_window_start_day": "Wednesday",
56 | "time_window_start_time": "08:00",
57 | "time_window_end_day": "Wednesday",
58 | "time_window_end_time": "08:02",
59 | "time_increment": 1,
60 | "network_data_source": self.local_nd,
61 | "travel_mode": self.local_tm_time,
62 | "chunk_size": 10,
63 | "max_processes": 4,
64 | "precalculate_network_locations": True,
65 | "barriers": None
66 | }
67 | self.cam_inputs = deepcopy(self.od_args)
68 | self.cam_inputs["output_origins"] = os.path.join(self.output_gdb, "TestCAM")
69 | self.cam_inputs["time_units"] = "Minutes"
70 | self.cam_inputs["cutoff"] = 30
71 | self.cam_inputs["weight_field"] = "NumJobs"
72 | self.ctts_inputs = deepcopy(self.od_args)
73 | self.ctts_inputs["out_csv_file"] = os.path.join(self.scratch_folder, "TestCSV")
74 | self.ctts_inputs["out_na_folder"] = os.path.join(self.scratch_folder, "TestOutNAFolder")
75 |
76 | # Invalid inputs for the base class
77 | does_not_exist = os.path.join(self.in_gdb, "DoesNotExist")
78 | self.invalid_inputs = [
79 | ("chunk_size", -5, ValueError, "Chunk size must be greater than 0."),
80 | ("max_processes", 0, ValueError, "Maximum allowed parallel processes must be greater than 0."),
81 | ("max_processes", 5000, ValueError, (
82 | f"The maximum allowed parallel processes cannot exceed {MAX_ALLOWED_MAX_PROCESSES:} due "
83 | "to limitations imposed by Python's concurrent.futures module."
84 | )),
85 | ("time_increment", 0, ValueError, "The time increment must be greater than 0."),
86 | ("origins", does_not_exist, ValueError, f"Input dataset {does_not_exist} does not exist."),
87 | ("destinations", does_not_exist, ValueError, f"Input dataset {does_not_exist} does not exist."),
88 | ("barriers", [does_not_exist], ValueError, f"Input dataset {does_not_exist} does not exist."),
89 | ("network_data_source", does_not_exist, ValueError,
90 | f"Input network dataset {does_not_exist} does not exist."),
91 | ("travel_mode", "BadTM", ValueError if arcgis_version >= "3.1" else RuntimeError, ""),
92 | ]
93 |
94 | def test_validate_inputs_cam(self):
95 | """Test the validate_inputs function of the CalculateAccessibilityMatrix child class."""
96 | # Check base class invalid inputs and additional validation of child class
97 | invalid_inputs = self.invalid_inputs + [
98 | ("cutoff", 0, ValueError, "Impedance cutoff must be greater than 0."),
99 | ("cutoff", -5, ValueError, "Impedance cutoff must be greater than 0."),
100 | ("weight_field", "BadField", ValueError,
101 | (f"The destinations feature class {self.cam_inputs['destinations']} is missing the designated weight "
102 | "field BadField.")),
103 | ("weight_field", "Shape", TypeError,
104 | (f"The weight field Shape in the destinations feature class {self.cam_inputs['destinations']} is not "
105 | "numerical."))
106 | ]
107 | for invalid_input in invalid_inputs:
108 | property_name, value, error_type, expected_message = invalid_input
109 | with self.subTest(
110 | property_name=property_name, value=value, error_type=error_type, expected_message=expected_message
111 | ):
112 | inputs = deepcopy(self.cam_inputs)
113 | inputs[property_name] = value
114 | sa_solver = CalculateODMatrixInParallel.CalculateAccessibilityMatrix(**inputs)
115 | with self.assertRaises(error_type) as ex:
116 | sa_solver._validate_inputs()
117 | if expected_message:
118 | self.assertEqual(expected_message, str(ex.exception))
119 |
120 | def test_validate_inputs_ctts(self):
121 | """Test the validate_inputs function of the CalculateTravelTimeStatistics child class."""
122 | # No additional validation of child class. Just check base class bad inputs.
123 | for invalid_input in self.invalid_inputs:
124 | property_name, value, error_type, expected_message = invalid_input
125 | with self.subTest(
126 | property_name=property_name, value=value, error_type=error_type, expected_message=expected_message
127 | ):
128 | inputs = deepcopy(self.ctts_inputs)
129 | inputs[property_name] = value
130 | sa_solver = CalculateODMatrixInParallel.CalculateTravelTimeStatistics(**inputs)
131 | with self.assertRaises(error_type) as ex:
132 | sa_solver._validate_inputs()
133 | if expected_message:
134 | self.assertEqual(expected_message, str(ex.exception))
135 |
136 | def test_CalculateAccessibilityMatrix(self):
137 | """Test the full CalculateAccessibilityMatrix workflow."""
138 | od_calculator = CalculateODMatrixInParallel.CalculateAccessibilityMatrix(**self.cam_inputs)
139 | od_calculator.solve_large_od_cost_matrix()
140 | self.assertTrue(arcpy.Exists(self.cam_inputs["output_origins"]), "Output origins does not exist.")
141 | expected_cam_fields = ["TotalDests", "PercDests"] + \
142 | [f"DsAL{p}Perc" for p in range(10, 100, 10)] + \
143 | [f"PsAL{p}Perc" for p in range(10, 100, 10)]
144 | self.assertTrue(
145 | set(expected_cam_fields).issubset({f.name for f in arcpy.ListFields(self.cam_inputs["output_origins"])}),
146 | "Incorrect fields in origins after CalculateAccessibilityMatrix"
147 | )
148 |
149 | def test_CalculateTravelTimeStatistics(self):
150 | """Test the full CalculateTravelTimeStatistics workflow."""
151 | od_calculator = CalculateODMatrixInParallel.CalculateTravelTimeStatistics(**self.ctts_inputs)
152 | od_calculator.solve_large_od_cost_matrix()
153 | self.assertTrue(os.path.exists(self.ctts_inputs["out_csv_file"]))
154 | self.assertTrue(os.path.exists(self.ctts_inputs["out_na_folder"]))
155 |
156 | if __name__ == '__main__':
157 | unittest.main()
158 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_parallel_calculate_locations.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the parallel_calculate_locations.py module.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, protected-access, invalid-name
15 |
16 | import sys
17 | import os
18 | import datetime
19 | import subprocess
20 | import unittest
21 | import arcpy
22 |
23 | CWD = os.path.dirname(os.path.abspath(__file__))
24 | sys.path.append(os.path.dirname(CWD))
25 | import parallel_calculate_locations # noqa: E402, pylint: disable=wrong-import-position
26 | from AnalysisHelpers import configure_global_logger, teardown_logger
27 |
28 |
29 | class TestParallelCalculateLocations(unittest.TestCase):
30 | """Test cases for the parallel_calculate_locations module."""
31 |
32 | @classmethod
33 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
34 | """Set up shared test properties."""
35 | self.maxDiff = None
36 | self.input_data_folder = os.path.join(CWD, "TestInput")
37 | in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
38 | self.input_fc = os.path.join(in_gdb, "TestOrigins")
39 | self.local_nd = os.path.join(in_gdb, "TransitNetwork", "TransitNetwork_ND")
40 | self.local_tm_time = "Public transit time"
41 | self.search_criteria = [
42 | ["StopConnectors", "NONE"],
43 | ["Stops", "NONE"],
44 | ["StopsOnStreets", "NONE"],
45 | ["Streets", "SHAPE"],
46 | ["TransitNetwork_ND_Junctions", "NONE"]
47 | ]
48 | self.search_query = [
49 | ["StopConnectors", ""],
50 | ["Stops", ""],
51 | ["StopsOnStreets", ""],
52 | ["Streets", "ObjectID <> 1"],
53 | ["TransitNetwork_ND_Junctions", ""]
54 | ]
55 |
56 | # Create a unique output directory and gdb for this test
57 | self.output_folder = os.path.join(
58 | CWD, "TestOutput", "Output_ParallelCalcLocs_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
59 | os.makedirs(self.output_folder)
60 | self.output_gdb = os.path.join(self.output_folder, "outputs.gdb")
61 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
62 |
63 | def check_precalculated_locations(self, fc, check_has_values):
64 | """Check precalculated locations."""
65 | loc_fields = {"SourceID", "SourceOID", "PosAlong", "SideOfEdge"}
66 | actual_fields = set([f.name for f in arcpy.ListFields(fc)])
67 | self.assertTrue(loc_fields.issubset(actual_fields), "Network location fields not added")
68 | if check_has_values:
69 | for row in arcpy.da.SearchCursor(fc, list(loc_fields)): # pylint: disable=no-member
70 | for val in row:
71 | self.assertIsNotNone(val)
72 |
73 | def test_LocationCalculator_subset_inputs(self):
74 | """Test the _subset_inputs method of the LocationCalculator class."""
75 | inputs = {
76 | "input_fc": self.input_fc,
77 | "network_data_source": self.local_nd,
78 | "travel_mode": self.local_tm_time,
79 | "scratch_folder": self.output_folder
80 | }
81 | location_calculator = parallel_calculate_locations.LocationCalculator(**inputs)
82 | location_calculator._subset_inputs([6, 11])
83 | self.assertTrue(arcpy.Exists(location_calculator.out_fc), "Subset fc does not exist.")
84 | self.assertEqual(
85 | 6, int(arcpy.management.GetCount(location_calculator.out_fc).getOutput(0)),
86 | "Subset feature class has the wrong number of rows."
87 | )
88 |
89 | def test_LocationCalculator_calculate_locations(self):
90 | """Test the calculate_locations method of the LocationCalculator class.
91 |
92 | Use all optional Calculate Locations tool settings.
93 | """
94 | fc_to_precalculate = os.path.join(self.output_gdb, "PrecalcFC_LocationCalculator")
95 | arcpy.management.Copy(self.input_fc, fc_to_precalculate)
96 | inputs = {
97 | "input_fc": fc_to_precalculate,
98 | "network_data_source": self.local_nd,
99 | "travel_mode": self.local_tm_time,
100 | "scratch_folder": self.output_folder,
101 | "search_tolerance": "1000 Feet",
102 | "search_criteria": self.search_criteria,
103 | "search_query": self.search_query
104 | }
105 | location_calculator = parallel_calculate_locations.LocationCalculator(**inputs)
106 | oid_range = [6, 11]
107 | location_calculator.calculate_locations(oid_range)
108 | self.assertTrue(arcpy.Exists(location_calculator.out_fc), "Subset fc does not exist.")
109 | self.assertEqual(
110 | 6, int(arcpy.management.GetCount(location_calculator.out_fc).getOutput(0)),
111 | "Subset feature class has the wrong number of rows."
112 | )
113 | self.check_precalculated_locations(location_calculator.out_fc, check_has_values=False)
114 | self.assertEqual(
115 | location_calculator.out_fc, location_calculator.job_result["outputFC"],
116 | "outputFC property of job_result was not set correctly."
117 | )
118 | self.assertEqual(
119 | tuple(oid_range), location_calculator.job_result["oidRange"],
120 | "oidRange property of job_result was not set correctly."
121 | )
122 |
123 | def test_ParallelLocationCalculator(self):
124 | """Test the ParallelLocationCalculator class."""
125 | # The input feature class should not be overwritten by this tool, but copy it first just in case.
126 | fc_to_precalculate = os.path.join(self.output_gdb, "PrecalcFC_Parallel")
127 | arcpy.management.Copy(self.input_fc, fc_to_precalculate)
128 | out_fc = os.path.join(self.output_gdb, "PrecalcFC_Parallel_out")
129 | logger = configure_global_logger(parallel_calculate_locations.LOG_LEVEL)
130 | inputs = {
131 | "logger": logger,
132 | "input_features": fc_to_precalculate,
133 | "output_features": out_fc,
134 | "chunk_size": 6,
135 | "max_processes": 4,
136 | "network_data_source": self.local_nd,
137 | "travel_mode": self.local_tm_time,
138 | "search_tolerance": "1000 Feet",
139 | "search_criteria": self.search_criteria,
140 | "search_query": self.search_query
141 | }
142 | try:
143 | parallel_calculator = parallel_calculate_locations.ParallelLocationCalculator(**inputs)
144 | parallel_calculator.calc_locs_in_parallel()
145 | self.assertTrue(arcpy.Exists(out_fc), "Output fc does not exist.")
146 | self.assertEqual(
147 | int(arcpy.management.GetCount(self.input_fc).getOutput(0)),
148 | int(arcpy.management.GetCount(out_fc).getOutput(0)),
149 | "Output feature class doesn't have the same number of rows as the original input."
150 | )
151 | self.check_precalculated_locations(out_fc, check_has_values=True)
152 | finally:
153 | teardown_logger(logger)
154 |
155 | def test_cli(self):
156 | """Test the command line interface."""
157 | # The input feature class should not be overwritten by this tool, but copy it first just in case.
158 | fc_to_precalculate = os.path.join(self.output_gdb, "PrecalcFC_CLI")
159 | arcpy.management.Copy(self.input_fc, fc_to_precalculate)
160 | out_fc = os.path.join(self.output_gdb, "PrecalcFC_CLI_out")
161 | inputs = [
162 | os.path.join(sys.exec_prefix, "python.exe"),
163 | os.path.join(os.path.dirname(CWD), "parallel_calculate_locations.py"),
164 | "--input-features", fc_to_precalculate,
165 | "--output-features", out_fc,
166 | "--network-data-source", self.local_nd,
167 | "--chunk-size", "6",
168 | "--max-processes", "4",
169 | "--travel-mode", self.local_tm_time,
170 | "--search-tolerance", "1000 Feet",
171 | "--search-criteria",
172 | "StopConnectors NONE;Stops NONE;StopsOnStreets NONE;Streets SHAPE;TransitNetwork_ND_Junctions NONE",
173 | "--search-query", "Streets 'OBJECTID <> 1'"
174 | ]
175 | result = subprocess.run(inputs, check=True)
176 | self.assertEqual(result.returncode, 0)
177 | self.assertTrue(arcpy.Exists(out_fc))
178 |
179 |
180 | if __name__ == '__main__':
181 | unittest.main()
182 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/ReplaceRouteShapes.py:
--------------------------------------------------------------------------------
1 | ############################################################################
2 | ## Tool name: Transit Network Analysis Tools
3 | ## Created by: Melinda Morang, Esri
4 | ## Last updated: 16 August 2024
5 | ############################################################################
6 | """
7 | This is a shared module with classes for replacing the geometry of routes
8 | generated by Route or Closest Facility solves using the Public Transit
9 | evaluator. The original routes use the geometry of the LineVariantElements
10 | feature class of the Public Transit Data Model, and these features are generally
11 | straight lines connecting adjacent stops and are not intended for visualization.
12 | If the Public Transit Data Model includes the LVEShapes feature class, the
13 | straight-line geometry can be swapped for the cartographic lines from LVEShapes
14 | as a post-process, and that's what this class does.
15 |
16 | The RouteShapeReplacer class can be used with a traversal result generated from
17 | a network analysis layer or a Route solver object.
18 |
19 | Copyright 2024 Esri
20 | Licensed under the Apache License, Version 2.0 (the "License");
21 | you may not use this file except in compliance with the License.
22 | You may obtain a copy of the License at
23 | http://www.apache.org/licenses/LICENSE-2.0
24 | Unless required by applicable law or agreed to in writing, software
25 | distributed under the License is distributed on an "AS IS" BASIS,
26 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
27 | See the License for the specific language governing permissions and
28 | limitations under the License.
29 | """
30 | import os
31 | import arcpy
32 | from AnalysisHelpers import TransitNetworkAnalysisToolsError
33 |
34 |
35 | class TransitDataModel: # pylint: disable=too-many-instance-attributes
36 | """Defines and validates the Public Transit Data Model as relevant to this tool."""
37 |
38 | def __init__(self, transit_fd: str):
39 | """Define the Public Transit Data Model as relevant to this tool."""
40 | # For details on the Public Transit Data Model, see
41 | # https://pro.arcgis.com/en/pro-app/latest/help/analysis/networks/transit-data-model.htm
42 | self.line_variant_elements = os.path.join(transit_fd, "LineVariantElements")
43 | self.lve_shapes = os.path.join(transit_fd, "LVEShapes")
44 | self.required_tables = [self.line_variant_elements, self.lve_shapes]
45 | self.required_fields = {
46 | self.line_variant_elements: ["LVEShapeID"],
47 | self.lve_shapes: ["ID"]
48 | }
49 |
50 | def validate_tables_exist(self):
51 | """Validate that the required Public Transit Data Model feature classes and tables exist.
52 |
53 | Raises:
54 | TransitNetworkAnalysisToolsError: If not all required fields are present.
55 | """
56 | # Check for required feature classes and tables
57 | tables_exist = True
58 | for table in self.required_tables:
59 | if not arcpy.Exists(table):
60 | tables_exist = False
61 | if not tables_exist:
62 | # One or more Public Transit Data Model tables does not exist.
63 | raise TransitNetworkAnalysisToolsError(
64 | arcpy.GetIDMessage(2922) + f" Required: LineVariantElements, LVEShapes")
65 |
66 | def validate_required_fields(self):
67 | """Validate that the transit data model feature classes and tables have the required fields for this tool.
68 |
69 | Raises:
70 | TransitNetworkAnalysisToolsError: If not all required fields are present.
71 | """
72 | for table in self.required_fields:
73 | # Compare in lower case because SDE switches the case around. Oracle is all upper. Postgres is all lower.
74 | required_fields_lower = [f.lower() for f in self.required_fields[table]]
75 | actual_fields = [f.name.lower() for f in arcpy.ListFields(table)]
76 | if not set(required_fields_lower).issubset(set(actual_fields)):
77 | # Public transit data model table %1 is missing one or more required fields. Required fields: %2
78 | msg = arcpy.GetIDMessage(2925) % (table, ", ".join(self.required_fields[table]))
79 | raise TransitNetworkAnalysisToolsError(msg)
80 |
81 |
82 | class RouteShapeReplacer:
83 | """Enrich an ordinary traversal result with public transit info."""
84 |
85 | def __init__(self, traversed_edges_fc, transit_fd):
86 | """Initialize the route shape replacer for the given analysis.
87 |
88 | Args:
89 | traversed_edges_fc (str or layer): Feature class layer or catalog path containing the Edges portion of a
90 | traversal result. Typically obtained from the Copy Traversed Source Features tool or the RouteEdges
91 | output from a solver result object.
92 | transit_fd (str): Catalog path to the feature dataset containing the transit-enabled network dataset used
93 | for the analysis and its associated Public Transit Data Model feature classes.
94 | """
95 | self.traversed_edges_fc = traversed_edges_fc
96 |
97 | # Validate basic inputs
98 | if not isinstance(transit_fd, str):
99 | raise TransitNetworkAnalysisToolsError("Invalid Public Transit Data Model feature dataset.")
100 |
101 | # Initialize the Public Transit Data Model tables
102 | self.transit_dm = TransitDataModel(transit_fd)
103 | # Validate Public Transit Data Model
104 | self.transit_dm.validate_tables_exist()
105 | self.transit_dm.validate_required_fields()
106 |
107 | # Validate traversal result
108 | if not arcpy.Exists(self.traversed_edges_fc):
109 | raise TransitNetworkAnalysisToolsError(
110 | f"The input traversed edges feature class {self.traversed_edges_fc} does not exist.")
111 | self.te_desc = arcpy.Describe(self.traversed_edges_fc)
112 | required_fields = ["SourceName", "SourceOID", "RouteID"]
113 | if not set(required_fields).issubset(set([f.name for f in self.te_desc.fields])):
114 | raise TransitNetworkAnalysisToolsError((
115 | f"The input traversed edges feature class {self.traversed_edges_fc} is missing one or more required "
116 | f"fields. Required fields: {required_fields}"
117 | ))
118 |
119 | def replace_route_shapes_with_lveshapes(self) -> dict:
120 | """Replace route shape geometry."""
121 | # Make layers to speed up search cursor queries later
122 | with arcpy.EnvManager(overwriteOutput=True):
123 | lve_lyr_name = "LineVariantElements"
124 | arcpy.management.MakeFeatureLayer(self.transit_dm.line_variant_elements, lve_lyr_name)
125 | lve_oid_field = arcpy.Describe(lve_lyr_name).oidFieldName
126 | lveshapes_lyr_name = "LVEShapes"
127 | arcpy.management.MakeFeatureLayer(self.transit_dm.lve_shapes, lveshapes_lyr_name)
128 |
129 | # Loop over traversed route segments and replace LineVariantElements geometry with LVEShapes geometry
130 | route_segments = {}
131 | fields = ["RouteID", "SHAPE@", "SourceName", "SourceOID"]
132 | for row in arcpy.da.SearchCursor(self.traversed_edges_fc, fields): # pylint: disable=no-member
133 | segment_geom = row[1]
134 | if row[2] == "LineVariantElements":
135 | # Retrieve LVEShapes geometry
136 | try:
137 | with arcpy.da.SearchCursor(lve_lyr_name, ["LVEShapeID"], f"{lve_oid_field} = {row[3]}") as cur:
138 | lveshape_id = next(cur)[0]
139 | if lveshape_id is not None:
140 | with arcpy.da.SearchCursor(lveshapes_lyr_name, ["SHAPE@"], f"ID = {lveshape_id}") as cur:
141 | lveshape_geom = next(cur)[0]
142 | if lveshape_geom:
143 | segment_geom = lveshape_geom
144 | except Exception: # pylint: disable=broad-except
145 | # Probably some kind of mismatch in OIDs or LVEShapeID field values. Just ignore this as invalid
146 | # and leave the original geometry
147 | pass
148 |
149 | # Store the route segment geometry as an array of vertices we'll use to construct the final polylines
150 | # getPart() retrieves an array of arrays of points representing the vertices of the polyline.
151 | for part in segment_geom.getPart():
152 | route_segments.setdefault(row[0], arcpy.Array()).extend(part)
153 |
154 | # Combine route segments into single lines per route
155 | route_geoms = {}
156 | for route_id, vertex_array in route_segments.items():
157 | route_geom = arcpy.Polyline(vertex_array, self.te_desc.spatialReference)
158 | route_geoms[route_id] = route_geom
159 |
160 | # Return dictionary of {route_id: route_geom}
161 | return route_geoms
162 |
163 |
164 | if __name__ == "__main__":
165 | pass
166 |
--------------------------------------------------------------------------------
/License.txt:
--------------------------------------------------------------------------------
1 | Apache License - 2.0
2 |
3 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
4 |
5 | 1. Definitions.
6 |
7 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
8 |
9 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
10 |
11 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control
12 | with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management
13 | of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial
14 | ownership of such entity.
15 |
16 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
17 |
18 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source,
19 | and configuration files.
20 |
21 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to
22 | compiled object code, generated documentation, and conversions to other media types.
23 |
24 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice
25 | that is included in or attached to the work (an example is provided in the Appendix below).
26 |
27 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the
28 | editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes
29 | of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of,
30 | the Work and Derivative Works thereof.
31 |
32 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work
33 | or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual
34 | or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of
35 | electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on
36 | electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for
37 | the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing
38 | by the copyright owner as "Not a Contribution."
39 |
40 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and
41 | subsequently incorporated within the Work.
42 |
43 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual,
44 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display,
45 | publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
46 |
47 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide,
48 | non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell,
49 | sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are
50 | necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was
51 | submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work
52 | or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You
53 | under this License for that Work shall terminate as of the date such litigation is filed.
54 |
55 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications,
56 | and in Source or Object form, provided that You meet the following conditions:
57 |
58 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and
59 |
60 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and
61 |
62 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices
63 | from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
64 |
65 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a
66 | readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the
67 | Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the
68 | Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever
69 | such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License.
70 | You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work,
71 | provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to
72 | Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your
73 | modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with
74 | the conditions stated in this License.
75 |
76 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You
77 | to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above,
78 | nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
79 |
80 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except
81 | as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
82 |
83 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides
84 | its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation,
85 | any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for
86 | determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under
87 | this License.
88 |
89 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required
90 | by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages,
91 | including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the
92 | use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or
93 | any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
94 |
95 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a
96 | fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting
97 | such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree
98 | to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your
99 | accepting any such warranty or additional liability.
100 |
101 | END OF TERMS AND CONDITIONS
102 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/unittests/test_CalculateAccessibilityMatrix_tool.py:
--------------------------------------------------------------------------------
1 | """Unit tests for the Calculate Accessibility Matrix script tool.
2 |
3 | Copyright 2023 Esri
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 | http://www.apache.org/licenses/LICENSE-2.0
8 | Unless required by applicable law or agreed to in writing, software
9 | distributed under the License is distributed on an "AS IS" BASIS,
10 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 | See the License for the specific language governing permissions and
12 | limitations under the License.
13 | """
14 | # pylint: disable=import-error, invalid-name
15 |
16 | import os
17 | import datetime
18 | import unittest
19 | import arcpy
20 | import input_data_helper
21 |
22 | CWD = os.path.dirname(os.path.abspath(__file__))
23 |
24 |
25 | class TestCalculateAccessibilityMatrixTool(unittest.TestCase):
26 | """Test cases for the CalculateAccessibilityMatrix script tool."""
27 |
28 | @classmethod
29 | def setUpClass(self): # pylint: disable=bad-classmethod-argument
30 | self.maxDiff = None
31 |
32 | tbx_path = os.path.join(os.path.dirname(CWD), "Transit Network Analysis Tools.pyt")
33 | arcpy.ImportToolbox(tbx_path)
34 |
35 | self.input_data_folder = os.path.join(CWD, "TestInput")
36 | input_data_helper.make_feature_classes_from_json(self.input_data_folder)
37 | in_gdb = os.path.join(self.input_data_folder, "CincinnatiTransitNetwork.gdb")
38 | self.origins = os.path.join(in_gdb, "TestOrigins")
39 | self.destinations = os.path.join(in_gdb, "TestDestinations")
40 | self.num_origins = int(arcpy.management.GetCount(self.origins).getOutput(0))
41 | self.num_dests = int(arcpy.management.GetCount(self.destinations).getOutput(0))
42 | self.local_nd = os.path.join(in_gdb, "TransitNetwork", "TransitNetwork_ND")
43 | self.local_tm_time = "Public transit time"
44 |
45 | # Create a unique output directory and gdb for this test
46 | self.scratch_folder = os.path.join(
47 | CWD, "TestOutput",
48 | "Output_CAM_Tool_" + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
49 | os.makedirs(self.scratch_folder)
50 | self.output_gdb = os.path.join(self.scratch_folder, "outputs.gdb")
51 | arcpy.management.CreateFileGDB(os.path.dirname(self.output_gdb), os.path.basename(self.output_gdb))
52 |
53 | def check_tool_output(self, out_origins, weighted, expected_num_origins, num_dests):
54 | """Do some basic checks of the output origins."""
55 | self.assertTrue(arcpy.Exists(out_origins), "Output origins does not exist.")
56 | self.assertEqual(
57 | expected_num_origins, int(arcpy.management.GetCount(out_origins).getOutput(0)),
58 | "Incorrect number of output origins."
59 | )
60 | expected_cam_fields = ["TotalDests", "PercDests"] + \
61 | [f"DsAL{p}Perc" for p in range(10, 100, 10)] + \
62 | [f"PsAL{p}Perc" for p in range(10, 100, 10)]
63 | self.assertTrue(
64 | set(expected_cam_fields).issubset({f.name for f in arcpy.ListFields(out_origins)}),
65 | "Incorrect fields in origins after Calculate Accessibility Matrix"
66 | )
67 | max_dests = 0
68 | for row in arcpy.da.SearchCursor(out_origins, expected_cam_fields):
69 | for val in row:
70 | self.assertIsNotNone(val, "Unexpected null value in output field.")
71 | max_dests = max(row[0], max_dests)
72 | if weighted:
73 | # Because this calculation used a weight field, the number of destinations found for some origins should
74 | # exceed the number of destination records in the input feature class. Don't check specific results, but at
75 | # least verify that the weight field was used and that results are generally correct.
76 | self.assertGreater(max_dests, num_dests)
77 | else:
78 | # Because this calculation did not use a weight field, the number of destinations found for any origins
79 | # should not exceed the number of destination records in the input feature class. Don't check specific
80 | # results, but at least verify that the number of destinations found is generally correct.
81 | self.assertGreater(max_dests, 0)
82 | self.assertLessEqual(max_dests, self.num_dests)
83 |
84 | def test_diff_points_unweighted(self):
85 | """Test that the tool runs with different origin and destination points not using a weight field."""
86 | out_origins = os.path.join(self.output_gdb, "Origins_pts_unweighted")
87 | arcpy.TransitNetworkAnalysisTools.CalculateAccessibilityMatrix( # pylint: disable=no-member
88 | self.origins,
89 | self.destinations,
90 | out_origins,
91 | self.local_nd,
92 | self.local_tm_time,
93 | 30, # Cutoff
94 | "Minutes",
95 | "Wednesday",
96 | "08:00",
97 | "Wednesday",
98 | "08:03",
99 | 1,
100 | 10, # Chunk size,
101 | 4, # Parallel processes
102 | None, # Weight field
103 | None, # Barriers
104 | True # Precalculate network locations
105 | )
106 | self.check_tool_output(out_origins, False, self.num_origins, self.num_dests)
107 |
108 | def test_diff_points_weighted(self):
109 | """Test that the tool runs with different origin and destination points using a weight field."""
110 | out_origins = os.path.join(self.output_gdb, "Origins_pts_weighted")
111 | arcpy.TransitNetworkAnalysisTools.CalculateAccessibilityMatrix( # pylint: disable=no-member
112 | self.origins,
113 | self.destinations,
114 | out_origins,
115 | self.local_nd,
116 | self.local_tm_time,
117 | 30, # Cutoff
118 | "Minutes",
119 | "Wednesday",
120 | "08:00",
121 | "Wednesday",
122 | "08:03",
123 | 1,
124 | 10, # Chunk size,
125 | 4, # Parallel processes
126 | "NumJobs", # Weight field
127 | None, # Barriers
128 | True # Precalculate network locations
129 | )
130 | self.check_tool_output(out_origins, True, self.num_origins, self.num_dests)
131 |
132 | def test_same_origins_destinations(self):
133 | """Test when the origins and destinations are the same."""
134 | out_origins = os.path.join(self.output_gdb, "Origins_same")
135 | arcpy.TransitNetworkAnalysisTools.CalculateAccessibilityMatrix( # pylint: disable=no-member
136 | self.origins,
137 | self.origins,
138 | out_origins,
139 | self.local_nd,
140 | self.local_tm_time,
141 | 30, # Cutoff
142 | "Minutes",
143 | "Wednesday",
144 | "08:00",
145 | "Wednesday",
146 | "08:03",
147 | 1,
148 | 10, # Chunk size,
149 | 4, # Parallel processes
150 | None, # Weight field
151 | None, # Barriers
152 | True # Precalculate network locations
153 | )
154 | self.check_tool_output(out_origins, False, self.num_origins, self.num_origins)
155 |
156 | def test_unchunked(self):
157 | """Test that the tool runs correctly when origins and destinations can be handled in one chunk.
158 |
159 | Also use a specific date.
160 | """
161 | out_origins = os.path.join(self.output_gdb, "Origins_unchunked")
162 | arcpy.TransitNetworkAnalysisTools.CalculateAccessibilityMatrix( # pylint: disable=no-member
163 | self.origins,
164 | self.destinations,
165 | out_origins,
166 | self.local_nd,
167 | self.local_tm_time,
168 | 30, # Cutoff
169 | "Minutes",
170 | "20190501",
171 | "08:00",
172 | "20190501",
173 | "08:03",
174 | 1,
175 | 1000, # Chunk size,
176 | 4, # Parallel processes
177 | "NumJobs", # Weight field
178 | None, # Barriers
179 | True # Precalculate network locations
180 | )
181 | self.check_tool_output(out_origins, True, self.num_origins, self.num_dests)
182 |
183 | def test_polygon_inputs(self):
184 | """Test using polygon feature classes as inputs."""
185 | out_origins = os.path.join(self.output_gdb, "Origins_polygons")
186 | arcpy.TransitNetworkAnalysisTools.CalculateAccessibilityMatrix( # pylint: disable=no-member
187 | self.origins + "_Polygons",
188 | self.destinations + "_Polygons",
189 | out_origins,
190 | self.local_nd,
191 | self.local_tm_time,
192 | 30, # Cutoff
193 | "Minutes",
194 | "Wednesday",
195 | "08:00",
196 | "Wednesday",
197 | "08:03",
198 | 1,
199 | 10, # Chunk size,
200 | 4, # Parallel processes
201 | None, # Weight field
202 | None, # Barriers
203 | True # Precalculate network locations
204 | )
205 | self.check_tool_output(out_origins, False, self.num_origins, self.num_dests)
206 | # Verify shape type of output origins
207 | self.assertEqual("Polygon", arcpy.Describe(out_origins).shapeType)
208 |
209 |
210 | if __name__ == '__main__':
211 | unittest.main()
212 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/TNAT_ToolValidator.py:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | ## Toolbox: Transit Network Analysis Tools
3 | ## Created by: Melinda Morang, Esri
4 | ## Last updated: 6 January 2023
5 | ################################################################################
6 | """Shared tool validation methods."""
7 | ################################################################################
8 | """Copyright 2023 Esri
9 | Licensed under the Apache License, Version 2.0 (the "License");
10 | you may not use this file except in compliance with the License.
11 | You may obtain a copy of the License at
12 | http://www.apache.org/licenses/LICENSE-2.0
13 | Unless required by applicable law or agreed to in writing, software
14 | distributed under the License is distributed on an "AS IS" BASIS,
15 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 | See the License for the specific language governing permissions and
17 | limitations under the License."""
18 | ################################################################################
19 | import os
20 | import sys
21 | import re
22 | import datetime
23 | import arcpy
24 | from AnalysisHelpers import is_nds_service, MAX_AGOL_PROCESSES, MAX_ALLOWED_MAX_PROCESSES
25 |
26 | ispy3 = sys.version_info >= (3, 0)
27 |
28 | # Days of the week
29 | days = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
30 |
31 |
32 | def validate_time_increment(param_increment):
33 | """Validate that the time increment is greater than 0."""
34 | if param_increment.value <= 0:
35 | param_increment.setErrorMessage("Time increment must be greater than 0.")
36 |
37 |
38 | def allow_YYYYMMDD_day(param_day):
39 | """Make Day parameter accept a weekday or a YYYYMMDD date string.
40 |
41 | Hack for Pro: Define the filter list in updateMessages to trick UI control
42 | into allowing free text entry in addition to selection from the list. This
43 | allows us to accept both a weekday an a YYYYMMDD date."""
44 | # Define the filter list
45 | param_day.filter.list = days
46 | validate_day(param_day)
47 |
48 |
49 | def validate_day(param_day):
50 | if param_day.altered:
51 | # Make sure if it's not a weekday that it's in YYYYMMDD date format
52 | if param_day.valueAsText not in days:
53 | # If it's not one of the weekday strings, it must be in YYYYMMDD format
54 | try:
55 | datetime.datetime.strptime(param_day.valueAsText, '%Y%m%d')
56 | # This is a valid YYYYMMDD date, so clear the filter list error
57 | if param_day.hasError():
58 | msg_id = param_day.message.split(':')[0]
59 | if msg_id == 'ERROR 000800':
60 | # clearMessage() does not work in python toolboxes because of an ArcGIS bug,
61 | # so catch the error and convert it to a warning so that the tool will run.
62 | # This is the only solution I've been able to come up with.
63 | param_day.setWarningMessage((
64 | "You have chosen to use a specific date for this analysis. Please double check your GTFS "
65 | "calendar.txt and/or calendar_dates.txt files to make sure this specific date falls within "
66 | "the date range covered by your GTFS data."
67 | ))
68 | # Keep this here in case it starts working at some point
69 | param_day.clearMessage()
70 | except ValueError:
71 | param_day.setErrorMessage("Please enter a date in YYYYMMDD format or a weekday.")
72 |
73 |
74 | def set_end_day(param_startday, param_endday):
75 | """Set the end day to the same as start day by default, unless it's explicitly set the end day to something else.
76 |
77 | Also, the end day should be grayed out unless the start day is a specific date.
78 | """
79 |
80 | if param_startday.valueAsText:
81 | param_endday.value = param_startday.value
82 |
83 | if param_startday.valueAsText in days:
84 | param_endday.enabled = False
85 | else:
86 | param_endday.enabled = True
87 |
88 |
89 | def check_time_window(param_starttime, param_endtime, param_startday, param_endday):
90 | """Make sure time window is valid and in the correct HH:MM format"""
91 |
92 | def is_time_valid(param_time):
93 | if param_time.altered:
94 | m = re.match ("^\s*([0-9]{2}):([0-9]{2})\s*$", param_time.value)
95 | if not m:
96 | param_time.setErrorMessage(
97 | "Time of day format should be HH:MM (24-hour time). For example, 2am is 02:00, and 2pm is 14:00.")
98 | return False
99 | else:
100 | TimeNumErrorMessage = "Hours cannot be > 48; minutes cannot be > 59."
101 | hours = int(m.group(1))
102 | minutes = int(m.group(2))
103 | if hours < 0 or hours > 48:
104 | param_time.setErrorMessage(TimeNumErrorMessage)
105 | return False
106 | if minutes < 0 or minutes > 59:
107 | param_time.setErrorMessage(TimeNumErrorMessage)
108 | return False
109 | return True
110 |
111 | # Time of day format should be HH:MM (24-hour time).
112 | t1valid = is_time_valid(param_starttime)
113 | t2valid = is_time_valid(param_endtime)
114 |
115 | # End time must be later than start time if the start and end day are the same
116 | if param_startday.valueAsText == param_endday.valueAsText:
117 | if param_starttime.altered and param_endtime.altered and t1valid and t2valid:
118 | H1,M1 = param_starttime.value.split(':')
119 | seconds1 = (float(H1) * 3600) + (float(M1) * 60)
120 | H2,M2 = param_endtime.value.split(':')
121 | seconds2 = (float(H2) * 3600) + (float(M2) * 60)
122 | if seconds2 <= seconds1:
123 | param_endtime.setErrorMessage(
124 | "Time window invalid! Make sure the time window end is later than the time window start.")
125 |
126 |
127 | def validate_output_is_gdb(param_outTable):
128 | """Output table should be in a geodatabase, not a dbf or info table."""
129 | if param_outTable.altered:
130 | wdesc = arcpy.Describe(os.path.dirname(param_outTable.valueAsText))
131 | if wdesc.dataType == "Folder" or (wdesc.dataType == "Workspace" and wdesc.workspaceType == "FileSystem"):
132 | param_outTable.setErrorMessage("Output table must be in a geodatabase.")
133 |
134 |
135 | def update_precalculate_parameter(param_network, param_precalculate):
136 | """Turn off and hide Precalculate Network Locations parameter if the network data source is a service.
137 |
138 | Args:
139 | param_network (arcpy.Parameter): Parameter for the network data source
140 | param_precalculate (arcpy.Parameter): Parameter for precalculate network locations
141 | """
142 | if not param_network.hasBeenValidated and param_network.altered and param_network.valueAsText:
143 | if is_nds_service(param_network.valueAsText):
144 | param_precalculate.value = False
145 | param_precalculate.enabled = False
146 | else:
147 | param_precalculate.enabled = True
148 |
149 |
150 | def show_only_time_travel_modes(param_network, param_travel_mode):
151 | """Populate the travel mode parameter with time-based travel modes only."""
152 | if not param_network.hasBeenValidated and param_network.altered and param_network.valueAsText:
153 | try:
154 | travel_modes = arcpy.nax.GetTravelModes(param_network.value)
155 | param_travel_mode.filter.list = [
156 | tm_name for tm_name in travel_modes if
157 | travel_modes[tm_name].impedance == travel_modes[tm_name].timeAttributeName
158 | ]
159 | except Exception: # pylint: disable=broad-except
160 | # We couldn't get travel modes for this network for some reason.
161 | pass
162 |
163 |
164 | def cap_max_processes(param_max_processes, param_network=None):
165 | """Validate max processes and cap it when required.
166 |
167 | Args:
168 | param_network (arcpy.Parameter): Parameter for the network data source
169 | param_max_processes (arcpy.Parameter): Parameter for the max processes
170 | """
171 | if param_max_processes.altered and param_max_processes.valueAsText:
172 | max_processes = param_max_processes.value
173 | # Don't allow 0 or negative numbers
174 | if max_processes <= 0:
175 | param_max_processes.setErrorMessage("The maximum number of parallel processes must be positive.")
176 | return
177 | # Cap max processes to the limit allowed by the concurrent.futures module
178 | if max_processes > MAX_ALLOWED_MAX_PROCESSES:
179 | param_max_processes.setErrorMessage((
180 | f"The maximum number of parallel processes cannot exceed {MAX_ALLOWED_MAX_PROCESSES:} due "
181 | "to limitations imposed by Python's concurrent.futures module."
182 | ))
183 | return
184 | # If the network data source is arcgis.com, cap max processes
185 | if (
186 | param_network and
187 | max_processes > MAX_AGOL_PROCESSES and
188 | param_network.altered and
189 | param_network.valueAsText and
190 | is_nds_service(param_network.valueAsText) and
191 | "arcgis.com" in param_network.valueAsText
192 | ):
193 | param_max_processes.setErrorMessage((
194 | f"The maximum number of parallel processes cannot exceed {MAX_AGOL_PROCESSES} when the "
195 | "ArcGIS Online service is used as the network data source."
196 | ))
197 | return
198 | # Set a warning if the user has put more processes than the number of logical cores on their machine
199 | if max_processes > os.cpu_count():
200 | param_max_processes.setWarningMessage((
201 | "The maximum number of parallel processes is greater than the number of logical cores "
202 | f"({os.cpu_count()}) in your machine."
203 | ))
204 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.CalculateTravelTimeStatisticsOD.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20220427094824001.0TRUE20220503132456001500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gp<DIV STYLE="text-align:Left;"><P><SPAN>A point or polygon feature class representing the locations you want to calculate accessibility measures for. For example, your origins might be census blocks, parcels, or specific locations of concern. Note that when polygons are used as origins and destinations, the centroids of the polygons will be used in the network analysis calculation. Keep in mind that the centroid of a polygon is only a good representation of that polygon if the polygon is small with respect to the distance a traveler can walk in a short period of time. It is not appropriate, for example, to use census tracts since tracts are typically very large on a pedestrian scale.</SPAN></P></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>A point or polygon feature class representing the destinations your origins will travel to. For example, if you want to measure your origins' level of accessibility to jobs, your Destinations could be the locations of employment centers.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>File path to the output CSV file that will contain the calculated statistics.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The network dataset or service URL to use for the calculation. You should use a transit-enabled network dataset created with </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/help/analysis/networks/network-analysis-with-public-transit-data.htm" STYLE="text-decoration:underline;"><SPAN>these tools available in ArcGIS Pro</SPAN></A><SPAN> or an ArcGIS Enterprise service created from such a network. Technically, however, the tool will work with any network dataset that has at least one time-based travel mode.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The name of a time-based </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/latest/help/analysis/networks/travel-modes.htm" STYLE="text-decoration:underline;"><SPAN>travel mode</SPAN></A><SPAN> on the network dataset you wish to use to calculate the OD Cost Matrix. Typically you should choose a travel mode modeling travel by public transit.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Day of the week or YYYYMMDD date for the first start time of your analysis.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The lower end of the time window you wish to analyze. Must be in HH:MM format (24-hour time). For example, 2 AM is 02:00, and 2 PM is 14:00.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>If you're using a generic weekday for Start Day, you must use the same day for End Day. If you want to run an analysis spanning multiple days, choose specific YYYYMMDD dates for both Start Day and End Day.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The upper end of the time window you wish to analyze. Must be in HH:MM format (24-hour time). The End Time is inclusive, meaning that an analysis will be performed for the time of day you enter here.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Increment the OD Cost Matrix's time of day by this amount between solves. For example, for a Time Increment of 1 minute, the OD Cost Matrix will be solved for 10:00, 10:01, 10:02, etc. A Time Increment of 2 minutes would calculate the OD Cost Matrix for 10:00, 10:02, 10:04, etc.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>In order to solve large OD Cost Matrix problems efficiently, the tool can split up large numbers of inputs into chunks and solve the chunks in parallel across multiple cores of your computer. This parameter specifies the maximum number of origins and destinations that should be allowed in a single chunk. The optimal number depends on your computing resources. Larger chunks take longer to solve and require more memory, but there is some overhead associated with having more chunks.</SPAN></P><P><SPAN /></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>In order to solve large OD Cost Matrix problems efficiently, the tool solves the OD Cost Matrix for different start times in parallel across multiple cores of your machine. If the number of origins and destinations are large, it may also break them up into chunks and solve them in parallel as well. This parameter designates the number of parallel processes that can safely be used. You should select a number less than or equal to the number of virtual cores or processors your computer has.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Boolean indicating whether to save the individual results of each network analysis at each time step. If false, the results will be deleted when the tool finishes running. If true, they will be preserved for further analysis.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Folder where the network analysis results will be saved if </SPAN><SPAN STYLE="font-weight:bold;">Save individual network analysis results</SPAN><SPAN> is true. Note that if this folder already exists, it will be deleted and recreated by the tool.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Optionally, choose layers with point, line, or polygon barriers to use in the OD Cost Matrix analysis.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>When doing an OD Cost Matrix analysis, the input origin and destination points must be </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/latest/help/analysis/networks/locating-analysis-inputs.htm" STYLE="text-decoration:underline;"><SPAN>"located" on the network dataset</SPAN></A><SPAN>. Because the tool parallelizes the OD Cost Matrix across multiple processes, using the same origins and destinations many times, it saves time to calculate the network locations in advance rather than repeating this calculation in every parallel process. The only time you should uncheck this parameter is if you have already calculated the network locations of your input origins and destinations for the network dataset and travel mode you are using, and you simply wish to re-use these.</SPAN></P></DIV></DIV></DIV>Calculate Travel Time Statistics (OD Cost Matrix)ArcToolbox Tool20220503
3 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.CalculateAccessibilityMatrix.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20210804132953001.0TRUE20220503131900001500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gp<DIV STYLE="text-align:Left;"><DIV><P><SPAN>A point or polygon feature class representing the locations you want to calculate accessibility measures for. For example, your origins might be census blocks, parcels, or specific locations of concern. Note that when polygons are used as origins and destinations, the centroids of the polygons will be used in the network analysis calculation. Keep in mind that the centroid of a polygon is only a good representation of that polygon if the polygon is small with respect to the distance a traveler can walk in a short period of time. It is not appropriate, for example, to use census tracts since tracts are typically very large on a pedestrian scale.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>A point or polygon feature class representing the destinations your origins will travel to. For example, if you want to measure your origins' level of accessibility to jobs, your Destinations could be the locations of employment centers.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Output path. Your input origins will be copied to this location, and the output fields will be added.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The network dataset or service URL to use for the calculation. You should use a transit-enabled network dataset created with </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/help/analysis/networks/network-analysis-with-public-transit-data.htm" target="_blank" STYLE="text-decoration:underline;"><SPAN>these tools available in ArcGIS Pro</SPAN></A><SPAN> or an ArcGIS Enterprise service created from such a network. Technically, however, the tool will work with any network dataset that has at least one time-based travel mode.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The name of a time-based </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/latest/help/analysis/networks/travel-modes.htm" target="_blank" STYLE="text-decoration:underline;"><SPAN>travel mode</SPAN></A><SPAN> on the network dataset you wish to use to calculate the OD Cost Matrix. Typically you should choose a travel mode modeling travel by public transit.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The maximum travel time allowed in your analysis. For example, if you want to analyze the number of jobs reachable within a 30-minute commute from your origins, set the Cutoff Time to 30, and set the Cutoff Time Units to Minutes.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The units of time in which to interpret the Cutoff Time.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Day of the week or YYYYMMDD date for the first start time of your analysis.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The lower end of the time window you wish to analyze. Must be in HH:MM format (24-hour time). For example, 2 AM is 02:00, and 2 PM is 14:00.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>If you're using a generic weekday for Start Day, you must use the same day for End Day. If you want to run an analysis spanning multiple days, choose specific YYYYMMDD dates for both Start Day and End Day.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>The upper end of the time window you wish to analyze. Must be in HH:MM format (24-hour time). The End Time is inclusive, meaning that an analysis will be performed for the time of day you enter here.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Increment the OD Cost Matrix's time of day by this amount between solves. For example, for a Time Increment of 1 minute, the OD Cost Matrix will be solved for 10:00, 10:01, 10:02, etc. A Time Increment of 2 minutes would calculate the OD Cost Matrix for 10:00, 10:02, 10:04, etc.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>In order to solve large OD Cost Matrix problems efficiently, the tool can split up large numbers of inputs into chunks and solve the chunks in parallel across multiple cores of your computer. This parameter specifies the maximum number of origins and destinations that should be allowed in a single chunk. The optimal number depends on your computing resources. Larger chunks take longer to solve and require more memory, but there is some overhead associated with having more chunks.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>In order to solve large OD Cost Matrix problems efficiently, the tool solves the OD Cost Matrix for different start times in parallel across multiple cores of your machine. If the number of origins and destinations are large, it may also break them up into chunks and solve them in parallel as well. This parameter designates the number of parallel processes that can safely be used. You should select a number less than or equal to the number of virtual cores or processors your computer has.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Optionally, choose a field from your Destinations table that will be used as a weight. For example, if your destinations represent employment centers, the weight field could be the number of jobs available at each point. Only integer and double fields can be used for the weight field. If you do not choose a weight field, each destination will be counted as 1.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>Optionally, choose layers with point, line, or polygon barriers to use in the OD Cost Matrix analysis.</SPAN></P></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><P><SPAN>When doing an OD Cost Matrix analysis, the input origin and destination points must be </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/latest/help/analysis/networks/locating-analysis-inputs.htm" target="_blank" STYLE="text-decoration:underline;"><SPAN>"located" on the network dataset</SPAN></A><SPAN>. Because the tool parallelizes the OD Cost Matrix across multiple processes, using the same origins and destinations many times, it saves time to calculate the network locations in advance rather than repeating this calculation in every parallel process. The only time you should uncheck this parameter is if you have already calculated the network locations of your input origins and destinations for the network dataset and travel mode you are using, and you simply wish to re-use these.</SPAN></P></DIV></DIV>Calculate Accessibility MatrixArcToolbox Tool20220503
3 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/CreatePercentAccessPolygon.py:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | ## Toolbox: Transit Network Analysis Tools
3 | ## Tool name: Create Percent Access Polygons
4 | ## Created by: David Wasserman, Fehr & Peers, https://github.com/d-wasserman
5 | ## and: Melinda Morang, Esri
6 | ## Last updated: 29 August 2023
7 | ################################################################################
8 | ################################################################################
9 | """Copyright 2018 Fehr & Peers
10 | Licensed under the Apache License, Version 2.0 (the "License");
11 | you may not use this file except in compliance with the License.
12 | You may obtain a copy of the License at
13 | http://www.apache.org/licenses/LICENSE-2.0
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License."""
19 | ################################################################################
20 | ################################################################################
21 | """Copyright 2024 Esri
22 | Licensed under the Apache License, Version 2.0 (the "License");
23 | you may not use this file except in compliance with the License.
24 | You may obtain a copy of the License at
25 | http://www.apache.org/licenses/LICENSE-2.0
26 | Unless required by applicable law or agreed to in writing, software
27 | distributed under the License is distributed on an "AS IS" BASIS,
28 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
29 | See the License for the specific language governing permissions and
30 | limitations under the License."""
31 | ################################################################################
32 | import sys
33 | import os
34 | import arcpy
35 |
36 | import AnalysisHelpers
37 |
38 |
39 | class PercentAccessPolygonCalculator():
40 | """Main logic for the Create Percent Access Polygons tool.
41 |
42 | The tool creates 'typical access polygons' that represent the area reachable by transit across a time window.
43 |
44 | The tool attempts to account for the dynamic nature of transit schedules by overlaying service area polygons from
45 | multiple times of day and summarizing the results in terms of the number or percentage of the input polygons that
46 | cover an area. Areas covered by a larger percentage of input polygons were reached at more start times and are
47 | consequently more frequently accessible to travelers.
48 |
49 | The tool output will show you the percentage of times any given area was reached, and you can also choose to
50 | summarize these results for different percentage thresholds. For example, you can find out what area can be reached
51 | at least 75% of start times.
52 | """
53 |
54 | def __init__(
55 | self, in_time_lapse_polys, out_cell_counts_fc, cell_size_in_meters, max_processes, out_percents_fc=None,
56 | percents=None
57 | ):
58 | """Initialize percent access polygon calculator.
59 |
60 | Args:
61 | in_time_lapse_polys (feature class or layer): Input time lapse polygons
62 | out_cell_counts_fc (catalog path): Main tool output feature class
63 | cell_size_in_meters (double): Cell size in meters
64 | max_processes (int): Maximum number of allowed parallel processes
65 | out_percents_fc (catalog path, optional): Optional output for threshold percent polygons. Defaults to None.
66 | percents (list(double), optional): List of percent access thresholds. Defaults to None.
67 | """
68 | self.in_time_lapse_polys = in_time_lapse_polys
69 | self.out_cell_counts_fc = out_cell_counts_fc
70 | self.cell_size = cell_size_in_meters
71 | self.max_processes = max_processes
72 | self.out_percents_fc = out_percents_fc
73 | self.percents = percents
74 |
75 | self.raster_template = None
76 | self.projected_polygons = None
77 | self.temp_outputs = []
78 |
79 | def execute(self):
80 | """Execute to tool."""
81 | # Create the raster-like polygons we'll use later with spatial joins.
82 | arcpy.AddMessage("Rasterizing time lapse polygons...")
83 | self._create_polygon_raster_template()
84 |
85 | # Calculate percent access in parallel.
86 | arcpy.AddMessage("Calculating percent access polygons in parallel...")
87 | self._calculate_percent_access_in_parallel()
88 |
89 | # If desired, create polygons dissolved for percent access levels.
90 | if self.out_percents_fc and self.percents:
91 | arcpy.AddMessage("Creating polygons for designated percent thresholds...")
92 | self._make_percent_polygons()
93 |
94 | # Clean up intermediate outputs
95 | if self.temp_outputs:
96 | try:
97 | arcpy.management.Delete(self.temp_outputs)
98 | except Exception: # pylint: disable=broad-except
99 | # If this doesn't work for some reason, don't worry about it, and don't make the tool fail.
100 | pass
101 |
102 | def _create_polygon_raster_template(self):
103 | """Create a raster-like polygon feature class covering the area of the original time lapse polygons.
104 |
105 | Each polygon in the output is equivalent to one square of a raster. The dataset is meant to be used with
106 | Spatial Join with the original time lapse polygon dataset in order to count the number of original polygons
107 | overlapping that cell.
108 | """
109 | try:
110 | # Project to World Cylindrical Equal Area (WKID 54034), which preserves area reasonably well worldwide and
111 | # has units of meters
112 | sr_world_cylindrical = arcpy.SpatialReference(54034)
113 | self.projected_polygons = self._make_temporary_output_path("ProjectedPolys")
114 | arcpy.management.Project(self.in_time_lapse_polys, self.projected_polygons, sr_world_cylindrical)
115 |
116 | # Convert the full time lapse dataset into a temporary raster. The cell values are irrelevant.
117 | poly_oid = arcpy.Describe(self.projected_polygons).OIDFieldName
118 | temp_raster = self._make_temporary_output_path("InitialRaster")
119 | arcpy.conversion.FeatureToRaster(self.projected_polygons, poly_oid, temp_raster, cell_size=self.cell_size)
120 |
121 | # Create a temporary point dataset with one point for the centroid of every raster cell
122 | # The value of the points is irrelevant. We just need their geometry and an OID.
123 | temp_points = self._make_temporary_output_path("Points")
124 | arcpy.conversion.RasterToPoint(temp_raster, temp_points)
125 |
126 | # Create a new raster from the points with the same cell size as the initial raster. Set the value of each
127 | # cell equal to the value of the OID of the point it was created from. This way, each cell has a unique
128 | # value.
129 | pt_oid = arcpy.Describe(temp_points).OIDFieldName
130 | temp_raster2 = self._make_temporary_output_path("ProcessedRaster")
131 | arcpy.conversion.FeatureToRaster(temp_points, pt_oid, temp_raster2, cell_size=self.cell_size)
132 |
133 | # Convert this raster to polygons. The result contains one square polygon per raster cell and can be used
134 | # for calculating spatial joins with the original time lapse polygon dataset.
135 | self.raster_template = self._make_temporary_output_path("PolyRasterTemplate")
136 | arcpy.conversion.RasterToPolygon(temp_raster2, self.raster_template, simplify=False)
137 |
138 | except arcpy.ExecuteError:
139 | # Catch any errors from GP tools and pass them through cleanly so we don't get a nasty traceback.
140 | # Any number of odd geometry errors could occur here.
141 | arcpy.AddError("Failed to rasterize time lapse polygons.")
142 | raise AnalysisHelpers.GPError()
143 |
144 | def _calculate_percent_access_in_parallel(self):
145 | """Calculate the percent access polygons in parallel."""
146 | # Launch the parallel_cpap.py script as a subprocess so it can spawn parallel processes. We have to do this
147 | # because a tool running in the Pro UI cannot call concurrent.futures without opening multiple instances of Pro.
148 | inputs = [
149 | "--time-lapse-polygons", self.projected_polygons,
150 | "--raster-template", self.raster_template,
151 | "--output-fc", self.out_cell_counts_fc,
152 | "--max-processes", str(self.max_processes)
153 | ]
154 | AnalysisHelpers.execute_subprocess("parallel_cpap.py", inputs)
155 |
156 | # At this point, the main output feature class should exist
157 | if not arcpy.Exists(self.out_cell_counts_fc):
158 | arcpy.AddError((
159 | "Create Percent Access Polygons parallelization completed successfully, but output feature class does "
160 | "not exist."))
161 | sys.exit()
162 |
163 | def _make_percent_polygons(self):
164 | """Create dissolved polygons representing each designated percent threshold.
165 |
166 | For each percent threshold, dissolve the cells where the number of times reached exceeds the threshold. Each
167 | threshold gets its own polygon, and they are all output to the same feature class.
168 | """
169 | first = True
170 | temp_out_dissolve_fc = self._make_temporary_output_path("Dissolve")
171 | for percent in sorted(self.percents):
172 | # Select all the cells where the number of times with access is >= our percent threshold
173 | # The result is all the cells that are reachable at least X% of start times
174 | query = arcpy.AddFieldDelimiters(self.out_cell_counts_fc, "Percent") + " >= " + str(percent)
175 | percent_layer_name = "PercentLayer"
176 | with arcpy.EnvManager(overwriteOutput=True):
177 | percent_layer = arcpy.management.MakeFeatureLayer(self.out_cell_counts_fc, percent_layer_name, query)
178 |
179 | # Dissolve everything that meets the threshold into one polygon
180 | if first:
181 | out_dissolve = self.out_percents_fc
182 | else:
183 | out_dissolve = temp_out_dissolve_fc
184 | with arcpy.EnvManager(overwriteOutput=True):
185 | arcpy.management.Dissolve(percent_layer, out_dissolve, AnalysisHelpers.FIELDS_TO_PRESERVE)
186 |
187 | # Calculate the percent field
188 | percent_field = "Percent"
189 | arcpy.management.AddField(out_dissolve, percent_field, "DOUBLE")
190 | arcpy.management.CalculateField(out_dissolve, percent_field, str(percent))
191 |
192 | if not first:
193 | # If this wasn't the first percent output, append it to the master output fc
194 | arcpy.management.Append(out_dissolve, self.out_percents_fc, "TEST")
195 | first = False
196 |
197 | def _make_temporary_output_path(self, name):
198 | """Make a path in the scratch gdb for a temporary intermediate output and track it for later deletion."""
199 | name = arcpy.CreateUniqueName(name, arcpy.env.scratchGDB) # pylint: disable=no-member
200 | temp_output = os.path.join(arcpy.env.scratchGDB, name) # pylint: disable=no-member
201 | self.temp_outputs.append(temp_output)
202 | return temp_output
203 |
204 |
205 | if __name__ == '__main__':
206 | in_time_lapse_polys = sys.argv[1]
207 | out_cell_counts_fc = sys.argv[2]
208 | cell_size_in_meters = sys.argv[3]
209 | max_processes = sys.argv[4]
210 | out_percents_fc = sys.argv[5]
211 | percents = sys.argv[6]
212 | cpap_calculator = PercentAccessPolygonCalculator(
213 | in_time_lapse_polys, out_cell_counts_fc, cell_size_in_meters, max_processes, out_percents_fc, percents)
214 | cpap_calculator.execute()
215 |
--------------------------------------------------------------------------------
/transit-network-analysis-tools/Transit Network Analysis Tools.PrepareTimeLapsePolygons.pyt.xml:
--------------------------------------------------------------------------------
1 |
2 | 20190517161624001.0TRUE20220503133652001500000005000ItemDescriptionc:\program files\arcgis\pro\Resources\Help\gp<DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>A feature class or layer of points you want to use as the starting or ending locations for your Service Area polygons.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Output feature class created by the tool.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The network dataset or service URL to use for the calculation. You should use a transit-enabled network dataset created with </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/help/analysis/networks/network-analysis-with-public-transit-data.htm" STYLE="text-decoration:underline;"><SPAN>these tools available in ArcGIS Pro</SPAN></A><SPAN> or an ArcGIS Enterprise service created from such a network. Technically, however, the tool will work with any network dataset that has at least one time-based travel mode.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The name of a time-based </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/latest/help/analysis/networks/travel-modes.htm" STYLE="text-decoration:underline;"><SPAN>travel mode</SPAN></A><SPAN> on the network dataset you wish to use to calculate the OD Cost Matrix. Typically you should choose a travel mode modeling travel by public transit.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>One or more travel time limits for the Service Area calculation. For example, if you want to create Service Area polygons showing the area reachable within 45 minutes of travel time, set the Cutoff Times to 45, and set the Cutoff Time Units to Minutes. You can enter more than one value.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The units of time in which to interpret the Cutoff Times.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Day of the week or YYYYMMDD date for the first start time of your analysis.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The lower end of the time window you wish to analyze. Must be in HH:MM format (24-hour time). For example, 2 AM is 02:00, and 2 PM is 14:00.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>If you're using a generic weekday for Start Day, you must use the same day for End Day. If you want to run an analysis spanning multiple days, choose specific YYYYMMDD dates for both Start Day and End Day.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>The upper end of the time window you wish to analyze. Must be in HH:MM format (24-hour time). The End Time is inclusive, meaning that a Service Area polygon will be included in the results for the time of day you enter here.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Increment the Service Area's time of day by this amount between solves. For example, for a Time Increment of 1 minute, the results may include a Service Area polygon for 10:00, 10:01, 10:02, etc. A Time Increment of 2 minutes would generate Service Area polygons for 10:00, 10:02, 10:04, etc.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Indicates whether the direction of travel for the Service Area should be away from the facilities or toward the facilities. When traveling away from facilities, the times of day are interpreted as the time at which the traveler leaves the facility. When traveling toward facilities, the times of day are interpreted as the time at which the traveler arrives at the facility.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Indicates how geometry will be handled when there are multiple cutoffs. You can choose to create concentric service area polygons as disks or rings. This parameter is irrelevant and hidden if you have only one cutoff.</SPAN></P><UL><LI><P><SPAN STYLE="font-style:italic;">Rings</SPAN><SPAN>: The polygons extend between the nearest cutoff values only. They do not include the area of smaller breaks. For example, if you specify cutoffs of 30 and 45 minutes, you will get one polygon representing the area reachable within 0 and 30 minutes and another polygon with the area representing the additional area reachable between 30 and 45 minutes.</SPAN></P></LI><LI><P><SPAN STYLE="font-style:italic;">Disks</SPAN><SPAN>: The polygons extend from the facility to the cutoff. For example, if you specify cutoffs of 30 and 45 minutes, you will get one polygon representing the area reachable within 0 and 30 minutes and another polygon representing the area reachable within 0 and 45 minutes.</SPAN></P></LI></UL></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Indicates how geometry will be handled when multiple Service Areas from different facilities overlap one another. This parameter is irrelevant if you have only one facility.</SPAN></P><UL><LI><P><SPAN STYLE="font-style:italic;">Overlap</SPAN><SPAN>: An individual polygon is created for each facility. The polygons can overlap each other.</SPAN></P></LI><LI><P><SPAN STYLE="font-style:italic;">Dissolve</SPAN><SPAN>: The Service Area polygons of multiple facilities that have the same cutoff values are merged into one polygon. If the polygons of a given break value don't touch, they are nonetheless merged into one multipart polygon.</SPAN></P></LI><LI><P><SPAN STYLE="font-style:italic;">Split</SPAN><SPAN>: This option creates individual polygons that are closest for each facility. The polygons do not overlap each other and may not extend to the full cutoff.</SPAN></P></LI></UL></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>For maximum efficiency, this tool solves the Service Area for different start times in parallel across multiple cores of your machine. This parameter designates the number of parallel processes that can safely be used. You should select a number less than or equal to the number of virtual cores or processors your computer has.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>Optionally, choose layers with point, line, or polygon barriers to use in the Service Area analysis.</SPAN></P></DIV></DIV></DIV><DIV STYLE="text-align:Left;"><DIV><DIV><P><SPAN>When doing an Service Area analysis, the input origin and destination points must be </SPAN><A href="https://pro.arcgis.com:443/en/pro-app/latest/help/analysis/networks/locating-analysis-inputs.htm" STYLE="text-decoration:underline;"><SPAN>"located" on the network dataset</SPAN></A><SPAN>. Because the tool parallelizes the Service Area across multiple processes, using the same origins and destinations many times, it saves time to calculate the network locations in advance rather than repeating this calculation in every parallel process. The only time you should uncheck this parameter is if you have already calculated the network locations of your input origins and destinations for the network dataset and travel mode you are using, and you simply wish to re-use these.</SPAN></P></DIV></DIV></DIV>Prepare Time Lapse PolygonsArcToolbox Tool20220503
3 |
--------------------------------------------------------------------------------