├── .dockerignore
├── .gitignore
├── CITATION.cff
├── Dockerfile
├── LICENSE
├── README.md
├── attributes.md
├── core
├── attributes_step.py
├── db_step.py
├── export_step.py
├── import_step.py
├── index_step.py
├── network_step.py
└── optional_step.py
├── docker-compose.yml
├── docker.md
├── examples
├── dev_example_docker_only.yml
├── dev_example_existing_db.yml
├── dev_example_existing_db_no_docker.yml
├── docker-compose.yml
├── profile_bike.yml
├── profile_walk.yml
├── settings_gip.yml
├── settings_osm_file.yml
└── settings_osm_query.yml
├── generate_index.py
├── requirements.txt
├── resources
└── default.style
├── settings.md
├── settings.py
├── sql
├── functions
│ ├── calculate_index.sql.j2
│ ├── determine_utmzone.sql
│ ├── gip_calculate_bicycle_infrastructure.sql
│ ├── gip_calculate_pedestrian_infrastructure.sql
│ ├── gip_calculate_road_category.sql
│ ├── osm_calculate_access_bicycle.sql
│ ├── osm_calculate_access_car.sql
│ ├── osm_calculate_access_pedestrian.sql
│ └── osm_delete_dangling_edges.sql
└── templates
│ ├── export.sql.j2
│ ├── gip_attributes.sql.j2
│ ├── gip_network.sql.j2
│ ├── index.sql.j2
│ ├── osm_attributes.sql.j2
│ └── osm_network.sql.j2
└── toolbox
├── dbhelper.py
└── helper.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | data/
2 | venv/
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # exclude data directory
2 | data/*
3 |
4 | # exclude VSCode files
5 | .vscode/*
6 |
7 | # Byte-compiled / optimized / DLL files
8 | __pycache__/
9 | *.py[cod]
10 | *$py.class
11 |
12 | # C extensions
13 | *.so
14 |
15 | # Distribution / packaging
16 | .Python
17 | build/
18 | develop-eggs/
19 | dist/
20 | downloads/
21 | eggs/
22 | .eggs/
23 | lib/
24 | lib64/
25 | parts/
26 | sdist/
27 | var/
28 | wheels/
29 | share/python-wheels/
30 | *.egg-info/
31 | .installed.cfg
32 | *.egg
33 | MANIFEST
34 |
35 | # PyInstaller
36 | # Usually these files are written by a python script from a template
37 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
38 | *.manifest
39 | *.spec
40 |
41 | # Installer logs
42 | pip-log.txt
43 | pip-delete-this-directory.txt
44 |
45 | # Unit test / coverage reports
46 | htmlcov/
47 | .tox/
48 | .nox/
49 | .coverage
50 | .coverage.*
51 | .cache
52 | nosetests.xml
53 | coverage.xml
54 | *.cover
55 | *.py,cover
56 | .hypothesis/
57 | .pytest_cache/
58 | cover/
59 |
60 | # Translations
61 | *.mo
62 | *.pot
63 |
64 | # Django stuff:
65 | *.log
66 | local_settings.py
67 | db.sqlite3
68 | db.sqlite3-journal
69 |
70 | # Flask stuff:
71 | instance/
72 | .webassets-cache
73 |
74 | # Scrapy stuff:
75 | .scrapy
76 |
77 | # Sphinx documentation
78 | docs/_build/
79 |
80 | # PyBuilder
81 | .pybuilder/
82 | target/
83 |
84 | # Jupyter Notebook
85 | .ipynb_checkpoints
86 |
87 | # IPython
88 | profile_default/
89 | ipython_config.py
90 |
91 | # pyenv
92 | # For a library or package, you might want to ignore these files since the code is
93 | # intended to run in multiple environments; otherwise, check them in:
94 | # .python-version
95 |
96 | # pipenv
97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
100 | # install all needed dependencies.
101 | #Pipfile.lock
102 |
103 | # poetry
104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105 | # This is especially recommended for binary packages to ensure reproducibility, and is more
106 | # commonly ignored for libraries.
107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108 | #poetry.lock
109 |
110 | # pdm
111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112 | #pdm.lock
113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114 | # in version control.
115 | # https://pdm.fming.dev/#use-with-ide
116 | .pdm.toml
117 |
118 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
119 | __pypackages__/
120 |
121 | # Celery stuff
122 | celerybeat-schedule
123 | celerybeat.pid
124 |
125 | # SageMath parsed files
126 | *.sage.py
127 |
128 | # Environments
129 | .env
130 | .venv
131 | env/
132 | venv/
133 | ENV/
134 | env.bak/
135 | venv.bak/
136 |
137 | # Spyder project settings
138 | .spyderproject
139 | .spyproject
140 |
141 | # Rope project settings
142 | .ropeproject
143 |
144 | # mkdocs documentation
145 | /site
146 |
147 | # mypy
148 | .mypy_cache/
149 | .dmypy.json
150 | dmypy.json
151 |
152 | # Pyre type checker
153 | .pyre/
154 |
155 | # pytype static type analyzer
156 | .pytype/
157 |
158 | # Cython debug symbols
159 | cython_debug/
160 |
161 | # PyCharm
162 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
163 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
164 | # and can be added to the global gitignore or merged into this file. For a more nuclear
165 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
166 | .idea/
167 |
168 | .DS_Store
169 |
--------------------------------------------------------------------------------
/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 1.2.0
2 | message: "If you use this software, please cite it as follows:"
3 | authors:
4 | - family-names: "Werner"
5 | given-names: "Christian"
6 | orcid: "https://orcid.org/0000-0001-9406-9284"
7 | - family-names: "Wendel"
8 | given-names: "Robin"
9 | orcid: "https://orcid.org/0000-0001-9270-2883"
10 | - family-names: "Kaziyeva"
11 | given-names: "Dana"
12 | orcid: "https://orcid.org/0000-0001-9616-009X"
13 | - family-names: "Stutz"
14 | given-names: "Petra"
15 | - family-names: "van der Meer"
16 | given-names: "Lucas"
17 | orcid: "https://orcid.org/0000-0001-6336-8628"
18 | - family-names: "Effertz"
19 | given-names: "Lea"
20 | - family-names: "Zagel"
21 | given-names: "Bernhard"
22 | orcid: "https://orcid.org/0000-0003-4134-0039"
23 | - family-names: "Loidl"
24 | given-names: "Martin"
25 | orcid: "https://orcid.org/0000-0003-0474-3234"
26 | title: "NetAScore"
27 | doi: 10.5281/zenodo.7695369
28 | date-released: 2023-03-31
29 | url: "https://doi.org/10.5281/zenodo.7695369"
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8.17-bullseye AS buildstage
2 |
3 | WORKDIR /usr/src/netascore
4 |
5 | # COPY generate_index.py .
6 | COPY *.py ./
7 | COPY core core/
8 | COPY resources resources/
9 | COPY sql sql/
10 | COPY toolbox toolbox/
11 | COPY examples examples/
12 | # COPY settings settings/
13 | COPY requirements.txt .
14 |
15 | ENV DEBIAN_FRONTEND=noninteractive
16 | RUN apt update && \
17 | apt install -y \
18 | gdal-bin \
19 | libgdal-dev \
20 | postgresql-client-common \
21 | postgresql-client-13 \
22 | osm2pgsql && \
23 | rm -rf /var/lib/apt/lists/*
24 |
25 | RUN pip install --no-cache-dir -r requirements.txt
26 |
27 | ENTRYPOINT [ "python", "./generate_index.py" ]
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 PLUS Mobility Lab
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # NetAScore - Network Assessment Score Toolbox for Sustainable Mobility
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | NetAScore provides a toolset and automated workflow for computing ***bikeability***, ***walkability*** and related indicators from publicly available network data sets. Currently, we provide common presets for assessing infrastructure suitability for cycling (*bikeability*) and walking (*walkability*). By editing settings files and mode profiles, additional modes or custom preferences can easily be modeled.
12 |
13 | For global coverage, we support **OpenStreetMap** data as input. Additionally, Austrian authoritative data, the **'GIP'**, can be used if you work on an area of interest within Austria.
14 |
15 | For citing NetAScore, please refer to following paper which introduces the software, its objectives, as well as the data and methods used:
16 | Werner, C., Wendel, R., Kaziyeva, D., Stutz, P., van der Meer, L., Effertz, L., Zagel, B., & Loidl, M. (2024). NetAScore: An open and extendible software for segment-scale bikeability and walkability. *Environment and Planning B: Urban Analytics and City Science*, 0(0). [https://doi.org/10.1177/23998083241293177]. In case you want to refer to a specific version of the software implementation, you may add the respective Zenodo reference [doi.org/10.5281/zenodo.7695369](https://doi.org/10.5281/zenodo.7695369)
17 |
18 | Details regarding the **bikeability assessment method** as well as results of an **evaluation study** are provided in the following **scientific publication**, which is openly available via [doi.org/10.1016/j.jcmr.2024.100040](https://doi.org/10.1016/j.jcmr.2024.100040): Werner, C., van der Meer, L., Kaziyeva, D., Stutz, P., Wendel, R., & Loidl, M. (2024). Bikeability of road segments: An open, adjustable and extendible model. *Journal of Cycling and Micromobility Research*, *2*, 100040.
19 |
20 | Details on the **walkability index** together with results from a large **evaluation study** are published Open Access: [doi.org/10.3390/su17083634](https://doi.org/10.3390/su17083634): Stutz, P., Kaziyeva, D., Traun, C., Werner, C. & Loidl, M. (2025). Walkability at Street Level: An Indicator-Based Assessment Model. *Sustainability*, *17(8)*, 3634.
21 |
22 | **Examples:** You find example output files of NetAScore at [doi.org/10.5281/zenodo.10886961](https://doi.org/10.5281/zenodo.10886961).
23 |
24 | You find **more information** on NetAScore in the **[wiki](https://github.com/plus-mobilitylab/netascore/wiki)**:
25 |
26 | * [About NetAScore](https://github.com/plus-mobilitylab/netascore/wiki)
27 | * [Quickstart-guide](https://github.com/plus-mobilitylab/netascore/wiki/Quickstart%E2%80%90Guide)
28 | * [The Workflow](https://github.com/plus-mobilitylab/netascore/wiki/The-workflow)
29 | * [How to run the Project...](https://github.com/plus-mobilitylab/netascore/wiki/How-to-run-the-project)
30 | * [...in a Docker environment](https://github.com/plus-mobilitylab/netascore/wiki/How-to-run-the-project-in-a-Docker-environment)
31 | * [...directly on your Machine (Python)](https://github.com/plus-mobilitylab/netascore/wiki/Run-NetAScore-manually-with-Python)
32 | * [Attributes & Indicators](https://github.com/plus-mobilitylab/netascore/wiki/Attributes-and-Indicators)
33 | * [Attribute derivation from OSM](https://github.com/plus-mobilitylab/netascore/wiki/Attribute-derivation-from-OSM)
34 | * [Attribute derivation from GIP](https://github.com/plus-mobilitylab/netascore/wiki/Attribute-derivation-from-GIP)
35 | * [Configuration of the Settings](https://github.com/plus-mobilitylab/netascore/wiki/Configuration-of-the-settings)
36 | * [Contribute to the Project!](https://github.com/plus-mobilitylab/netascore/wiki/How-to-contribute)
37 | * [Requirements and Limitations](https://github.com/plus-mobilitylab/netascore/wiki/Requirements-and-Limitations)
38 | * [Credits and License](https://github.com/plus-mobilitylab/netascore/wiki/Credits-and-license)
39 |
40 | ## How to get started?
41 |
42 | To get a better impression of what this toolset and workflow provides, you can quickly start with processing a sample area.
43 |
44 | ### Easy quickstart: ready-made Docker image
45 |
46 | The easiest way to get started is running the ready-made Docker image. All you need for this to succeed is a [Docker installation](https://docs.docker.com/engine/install/), running Docker Desktop and internet connection. Then, follow these two steps:
47 |
48 | - download the `docker-compose.yml` file from the `examples` ([ download the raw file](https://github.com/plus-mobilitylab/netascore/blob/main/examples/docker-compose.yml)) to an empty directory
49 | - from within this directory, execute the following command from a terminal:
50 | `docker compose run netascore`
51 |
52 | Docker will download the NetAScore image and PostgreSQL database image, setup the environment for you and finally execute the workflow for Salzburg, Austria as an example case.
53 |
54 | #### What it does (example case):
55 |
56 | NetAScore first loads an area of interest by place name from Overpass Turbo API, then downloads the respective OpenStreetMap data and afterwards imports, processes and exports the final dataset. A new subdirectory named `data` will be present after successful execution. Within this folder, the assessed network is stored in `netascore_salzburg.gpkg`. It includes *bikeability* in columns `index_bike_ft` and `index_bike_tf` and *walkability* in `index_walk_ft` and `index_walk_tf`. The extensions `ft` and `tf` refer to the direction along an edge: *from-to* or *to-from* node. These values represent the assessed suitability of a segment for cycling (*bikeability*) and walking (*walkability*).
57 |
58 | #### What the results look like:
59 |
60 | Currently, NetAScore does not come with a built-in visualization module. However, you can easily visualize the *bikeability* and *walkability* index by loading the resulting geopackage in [QGIS](https://qgis.org). Simply drag and drop the geopackage into a new QGIS project and select the `edge` layer. Then in layer preferences define a symbology that visualizes one of the computed index values - e.g. `index_bike_ft` for *bikeability* (`_ft`: bikeability in forward-direction of each segment). Please note that from version 1.0 onwards, an index value of `0` refers to unsuitable infrastructure, whereas `1` represents well suited infrastructure.
61 |
62 | This is an exemplary visualization of *bikeability* for Salzburg, Austria:
63 |
64 | 
65 |
66 | #### How to proceed?
67 |
68 | Most likely, you want to execute an analysis for a specific area of your interest - please see the [instructions in the wiki](https://github.com/plus-mobilitylab/netascore/wiki/How-to-run-the-project-in-a-Docker-environment#run-netascore-for-your-own-area-of-interest) for how to achieve this with just changing one line in the settings file.
69 | If you need more detailled instructions or want to know more about the project, please consolidate the [wiki](https://github.com/plus-mobilitylab/netascore/wiki).
70 |
71 | ### Running NetAScore locally (without Docker)
72 |
73 | For running NetAScore without Docker you need several software packages and Python libraries installed on your machine. You find all details in the section ["How to run the project"](https://github.com/plus-mobilitylab/netascore/wiki/How-to-run-the-project).
74 |
75 | **NetAScore uses the following technologies:**
76 |
77 | - python 3
78 | - PostgreSQL with PostGIS extension
79 | - Docker (optional)
80 | - psql
81 | - ogr2ogr
82 | - osm2pgsql
83 | - raster2pgsql
84 | - [several python libraries](../main/requirements.txt)
85 |
--------------------------------------------------------------------------------
/attributes.md:
--------------------------------------------------------------------------------
1 | # Attributes / Indicators
2 |
3 | Attributes / Indicators describe a certain property of a road segment.
4 | Attributes / Indicators indicated with an asterisk (*) are differentiated by direction (ft/tf).
5 | Indicators are used for index calculation.
6 |
7 | ## Attributes
8 |
9 | ### access_car\_* / access_bicycle\_* / access_pedestrian\_*
10 |
11 | Indicates the accessibility of a road segment for cars, bicycles or pedestrians: `true`, `false`
12 |
13 | ### bridge
14 |
15 | Indicates a bridge on a road segment: `true`, `false`
16 |
17 | ### tunnel
18 |
19 | Indicates a tunnel on a road segment: `true`, `false`
20 |
21 | ## Indicators
22 |
23 | ### bicycle_infrastructure_*
24 |
25 | Describes the existence of dedicated bicycle infrastructure: `bicycle_way`, `mixed_way`, `bicycle_road`, `cyclestreet`, `bicycle_lane`, `bus_lane`, `no`.
26 |
27 | ### buildings
28 |
29 | Describes the proportion of the area of buildings within a 30 meters buffer: `0` to `100`.
30 |
31 |
32 | ### crossings
33 |
34 | Describes the amount of crossings within a 10 meters buffer.
35 |
36 |
37 | ### designated_route_*
38 |
39 | Describes the existence of designated cycling routes categorized by impact: `local`, `regional`, `national`, `international`, `unknown`, `no`.
40 |
41 |
42 | ### facilities
43 |
44 | Describes the amount of facilities (POIs) within a 30 meters buffer.
45 |
46 |
47 | ### gradient_*
48 |
49 | Describes the gradient class of a road segment for downhill and uphill: `-4` to `4`.
50 |
51 | | Class | Definition |
52 | |-------|-------------|
53 | | 0 | 0 - 1,5 % |
54 | | 1 | > 1,5 - 3 % |
55 | | 2 | > 3 - 6 % |
56 | | 3 | > 6 - 12 % |
57 | | 4 | > 12 % |
58 |
59 | The influence of gradient classes on the final index can be assigned per mode using the section `indicator_mapping` within mode profile files.
60 |
61 |
62 | ### greenness
63 |
64 | Describes the proportion of the green area within a 30 meters buffer: `0` to `100`.
65 |
66 |
67 | ### max_speed\_* / max_speed_greatest
68 |
69 | `max_speed_*` describes the speed limit (car) in the direction of travel or the average speed (car), if speed limit is not available: `0` to `130`. `max_speed_greatest_*` uses the maximum value of speed limits for both directions of travel on this segment.
70 |
71 |
72 | ### noise
73 |
74 | Describes the noise level of a road segment in decibel.
75 |
76 |
77 | ### number_lanes_*
78 |
79 | Describes the number of lanes of a road segment.
80 |
81 |
82 | ### parking_* (not in use)
83 |
84 | Describes designated parking lots: `yes`, `no`. Currently, this indicator is not computed due to data availability. This will be documented accordingly in the `index__*_robustness`-column in the output dataset.
85 |
86 |
87 | ### pavement
88 |
89 | Describes the condition of the road surface: `asphalt`, `gravel`, `cobble`, `soft`.
90 |
91 |
92 | ### pedestrian_infrastructure_*
93 |
94 | Describes the existence of dedicated pedestrian infrastructure: `pedestrian_area`, `pedestrian_way`, `mixed_way`, `stairs`, `sidewalk`, `no`.
95 |
96 |
97 | ### road_category
98 |
99 | Describes the road category of a road segment: `primary`, `secondary`, `residential`, `service`, `calmed`, `no_mit`, `path`.
100 |
101 |
102 | ### water
103 |
104 | Describes the occurrence of water bodies within a 30 meters buffer: `true`, `false`.
105 |
106 |
107 | ### width
108 |
109 | Describes the width of a road segment in meters.
110 |
--------------------------------------------------------------------------------
/core/attributes_step.py:
--------------------------------------------------------------------------------
1 | import toolbox.helper as h
2 | from core.db_step import DbStep
3 | from settings import DbSettings, GlobalSettings, InputType
4 | from toolbox.dbhelper import PostgresConnection
5 |
6 |
7 | class GipAttributesStep(DbStep):
8 | def __init__(self, db_settings: DbSettings):
9 | super().__init__(db_settings)
10 |
11 | def run_step(self, settings: dict):
12 | h.info('attributes step')
13 | h.log(f"using import settings: {str(settings)}")
14 |
15 | schema = self.db_settings.entities.network_schema
16 |
17 | # open database connection
18 | h.log('connecting to database...')
19 | db = PostgresConnection.from_settings_object(self.db_settings)
20 | db.connect()
21 | db.schema = schema
22 |
23 | # create functions
24 | h.log('create functions')
25 | db.execute_sql_from_file("gip_calculate_bicycle_infrastructure", "sql/functions")
26 | db.execute_sql_from_file("gip_calculate_pedestrian_infrastructure", "sql/functions")
27 | db.execute_sql_from_file("gip_calculate_road_category", "sql/functions")
28 | db.commit()
29 |
30 | # execute "gip_attributes"
31 | h.logBeginTask('execute "gip_attributes')
32 | if db.handle_conflicting_output_tables(['network_edge_attributes', 'network_edge_export', 'network_node_attributes']):
33 | params = { # TODO: @CW: check hard-coded vs. dynamic table names -> settings; also preferably use common data schema - e.g. to avoid providing combined schema + table identifiers
34 | 'schema_network': schema,
35 | 'schema_data': self.db_settings.entities.data_schema,
36 | 'table_dem': db.use_if_exists('dem', self.db_settings.entities.data_schema),
37 | 'table_noise': db.use_if_exists('noise', self.db_settings.entities.data_schema),
38 | 'column_noise': 'noise', # TODO: get from settings file
39 | 'table_building': db.use_if_exists('building', self.db_settings.entities.data_schema),
40 | 'table_crossing': db.use_if_exists('crossing', self.db_settings.entities.data_schema),
41 | 'table_facility': db.use_if_exists('facility', self.db_settings.entities.data_schema),
42 | 'table_greenness': db.use_if_exists('greenness', self.db_settings.entities.data_schema),
43 | 'table_water': db.use_if_exists('water', self.db_settings.entities.data_schema)
44 | }
45 | if params["table_dem"] is not None:
46 | h.majorInfo("WARNING: You provided a DEM file. However, for GIP attribute calculation only the elevation data contained in the GIP dataset is used. Your provided DEM is ignored.")
47 | db.execute_template_sql_from_file("gip_attributes", params)
48 | db.commit()
49 | h.logEndTask()
50 |
51 | # close database connection
52 | h.log('closing database connection')
53 | db.close()
54 |
55 |
56 | class OsmAttributesStep(DbStep):
57 | def __init__(self, db_settings: DbSettings):
58 | super().__init__(db_settings)
59 |
60 | def run_step(self, settings: dict):
61 | h.info('attributes step')
62 | h.log(f"using import settings: {str(settings)}")
63 |
64 | schema = self.db_settings.entities.network_schema
65 |
66 | # open database connection
67 | h.log('connecting to database...')
68 | db = PostgresConnection.from_settings_object(self.db_settings)
69 | db.connect()
70 | db.schema = schema
71 |
72 | # create functions
73 | h.log('create functions')
74 | db.execute_sql_from_file("osm_calculate_access_bicycle", "sql/functions")
75 | db.execute_sql_from_file("osm_calculate_access_car", "sql/functions")
76 | db.execute_sql_from_file("osm_calculate_access_pedestrian", "sql/functions")
77 | db.commit()
78 |
79 | # execute "osm_attributes"
80 | h.logBeginTask('execute "osm_attributes"')
81 | if db.handle_conflicting_output_tables(['network_edge_attributes', 'network_edge_export', 'network_node_attributes']):
82 | params = {
83 | # TODO: harmonize schema + load table names from dbEntitySettings (which are populated / overwritten by read values from settings file)
84 | 'schema_network': schema,
85 | 'schema_data': self.db_settings.entities.data_schema,
86 | 'table_dem': db.use_if_exists('dem', self.db_settings.entities.data_schema),
87 | 'table_noise': db.use_if_exists('noise', self.db_settings.entities.data_schema),
88 | 'column_noise': 'noise', # TODO: get from settings file
89 | 'table_building': db.use_if_exists('building', self.db_settings.entities.data_schema),
90 | 'table_crossing': db.use_if_exists('crossing', self.db_settings.entities.data_schema),
91 | 'table_facility': db.use_if_exists('facility', self.db_settings.entities.data_schema),
92 | 'table_greenness': db.use_if_exists('greenness', self.db_settings.entities.data_schema),
93 | 'table_water': db.use_if_exists('water', self.db_settings.entities.data_schema),
94 | 'target_srid': GlobalSettings.get_target_srid()
95 | }
96 | db.execute_template_sql_from_file("osm_attributes", params)
97 | db.commit()
98 | h.logEndTask()
99 |
100 | # close database connection
101 | h.log('closing database connection')
102 | db.close()
103 |
104 |
105 | def create_attributes_step(db_settings: DbSettings, import_type: str):
106 | if import_type.lower() == InputType.GIP.value.lower():
107 | return GipAttributesStep(db_settings)
108 | if import_type.lower() == InputType.OSM.value.lower():
109 | return OsmAttributesStep(db_settings)
110 | raise NotImplementedError(f"import type '{import_type}' not implemented")
111 |
--------------------------------------------------------------------------------
/core/db_step.py:
--------------------------------------------------------------------------------
1 | from settings import GlobalSettings, DbSettings
2 |
3 |
4 | class DbStep:
5 | db_settings: DbSettings
6 |
7 | def __init__(self, db_settings: DbSettings):
8 | self.db_settings = db_settings
9 |
10 | def run_step(self, settings: dict):
11 | raise NotImplementedError()
12 |
--------------------------------------------------------------------------------
/core/export_step.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 |
4 | import toolbox.helper as h
5 | from core.db_step import DbStep
6 | from settings import DbSettings, GlobalSettings
7 | from toolbox.dbhelper import PostgresConnection
8 |
9 |
10 | def export_geopackage(connection_string: str, path: str, schema: str, table: str, layer: str, fid: str, geometry_type: str = None, update: bool = False) -> None:
11 | """Takes in a database table and exports it to a geopackage layer."""
12 | geometry_type = f"-nlt {geometry_type}" if geometry_type else ""
13 | update = "-update" if update else ""
14 |
15 | subprocess.run(f"ogr2ogr -f \"GPKG\" \"{path}\" PG:\"{connection_string}\" -lco FID={fid} -lco GEOMETRY_NAME=geom -nln {layer} {geometry_type} {update} -progress -sql \"SELECT * FROM {schema}.{table}\"",
16 | shell=True, check=True)
17 |
18 |
19 | class GeopackageExporter(DbStep):
20 | def run_step(self, settings: dict):
21 | h.info('exporting geopackage')
22 | h.log(f"using the following settings: {str(settings)}")
23 |
24 | schema = self.db_settings.entities.network_schema
25 | directory = GlobalSettings.data_directory
26 |
27 | # open database connection
28 | h.info('open database connection')
29 | db = PostgresConnection.from_settings_object(self.db_settings)
30 |
31 | # set search path
32 | h.log('set search path')
33 | db.schema = schema
34 |
35 | filename: str = settings['filename']
36 | filename = filename.replace("", GlobalSettings.case_id)
37 | filename = filename.replace("", str(GlobalSettings.get_target_srid()))
38 |
39 | # delete file if exists
40 | if os.path.exists(os.path.join(directory, filename)):
41 | os.remove(os.path.join(directory, filename))
42 |
43 | # export layers "edge" and "node"
44 | h.logBeginTask('export layer "edge"')
45 | export_geopackage(db.connection_string_old, os.path.join(directory, filename), schema, table='export_edge', layer='edge', fid='edge_id', geometry_type='LINESTRING')
46 | h.logEndTask()
47 |
48 | h.logBeginTask('export layer "node"')
49 | export_geopackage(db.connection_string_old, os.path.join(directory, filename), schema, table='export_node', layer='node', fid='node_id', geometry_type='POINT', update=True)
50 | h.logEndTask()
51 |
52 | # close database connection
53 | h.log('close database connection')
54 | db.close()
55 |
56 |
57 | def create_exporter(db_settings: DbSettings, export_type: str):
58 | if export_type == 'geopackage':
59 | return GeopackageExporter(db_settings)
60 | raise NotImplementedError(f"export type '{export_type}' not implemented")
61 |
--------------------------------------------------------------------------------
/core/import_step.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import subprocess
4 | from urllib.error import HTTPError
5 | import urllib.request
6 | import zipfile
7 | from osgeo import ogr
8 | from typing import List
9 |
10 | import toolbox.helper as h
11 | from core.db_step import DbStep
12 | from settings import DbSettings, GlobalSettings, InputType
13 | from toolbox.dbhelper import PostgresConnection
14 |
15 |
16 | def create_csv(file_txt: str) -> None:
17 | """Takes in a path to an ogd gip txt file and converts it to a csv file."""
18 | with open(file_txt, 'r', encoding='iso-8859-1') as f:
19 | for line in f:
20 | if line.startswith('tbl;'):
21 | file_csv = open(os.path.splitext(file_txt)[0] + '.csv', 'w', encoding='utf-8')
22 | elif line.startswith('atr;'):
23 | file_csv.write(line[4:])
24 | elif line.startswith('rec;'):
25 | file_csv.write(line[4:].replace('""', '').replace('" "', ''))
26 |
27 | file_csv.close()
28 |
29 |
30 | def create_sql(file_txt: str) -> None:
31 | """Takes in a path to an ogd gip txt file and creates a sql file from it."""
32 | with open(file_txt, 'r', encoding='iso-8859-1') as f:
33 | for line in f:
34 | if line.startswith('tbl;'):
35 | file_sql = open(os.path.splitext(file_txt)[0] + '.sql', 'w', encoding='utf-8')
36 | tbl = line[4:].strip().lower()
37 | elif line.startswith('atr;'):
38 | atr = line[4:].strip().lower().split(';')
39 | elif line.startswith('frm;'):
40 | frm = line[4:].strip().lower().split(';')
41 | elif line.startswith('rec;'):
42 | break
43 |
44 | for i, atr_ in enumerate(atr):
45 | if atr_ == 'offset':
46 | atr[i] = 'offset_'
47 |
48 | for i, frm_ in enumerate(frm):
49 | if frm_ == 'string':
50 | frm[i] = 'varchar'
51 | if m := re.search(r"^(string)[(]([0-9]*)[)]", frm_):
52 | length = m.group(2)
53 | frm[i] = f"varchar({length})"
54 | elif m := re.search(r"^(decimal)[(]([0-9]*)[,]([0-9]*)[)]", frm_):
55 | precision = m.group(2)
56 | scale = m.group(3)
57 | frm[i] = f"numeric({precision},{scale})"
58 | elif m := re.search(r"^(decimal)[(]([0-9]*)[)]", frm_):
59 | precision = m.group(2)
60 | if int(precision) <= 4:
61 | frm[i] = "smallint"
62 | elif int(precision) <= 10:
63 | frm[i] = "integer"
64 | elif int(precision) <= 18:
65 | frm[i] = "bigint"
66 | else:
67 | frm[i] = f"numeric({precision})"
68 |
69 | columns = [f"{atr_} {frm_}" for atr_, frm_ in zip(atr, frm)]
70 | sql = f"CREATE TABLE gip_{tbl} ({', '.join(columns)});"
71 |
72 | file_sql.write(sql)
73 | file_sql.close()
74 |
75 |
76 | def import_csv(connection_string: str, path: str, schema: str, table: str) -> None:
77 | """Takes in a path to a csv file and imports it to a database table."""
78 | h.log(f"Importing CSV '{path}' into database: '{schema}.{table}'")
79 | # INFO: for psql in Windows, connection_string MUST be the LAST parameter - otherwise, further arguments are ignored
80 | subprocess.run(['psql', '-c', f"\\copy {schema}.{table} from '{path}' WITH CSV DELIMITER ';' NULL '' HEADER ENCODING 'utf-8'", connection_string],
81 | check=True)
82 |
83 |
84 | def import_geopackage(connection_string: str, path: str, schema: str, table: str, fid: str = None, target_srid: int = None, layers: List[str] = None, attributes: List[str] = None, geometry_types: List[str] = None) -> None: # TODO: @CW: add error handling
85 | """Takes in a path to a geopackage file and imports it to a database table."""
86 | data_source = ogr.Open(path)
87 |
88 | attributes = [] if attributes is None else attributes
89 | attributes = ','.join(attribute for attribute in attributes)
90 |
91 | geometry_types = [] if geometry_types is None else geometry_types
92 | geometry_types = ', '.join(f"'{geometry_type}'" for geometry_type in geometry_types)
93 |
94 | layers = [layer.GetName() for layer in data_source] if layers is None else layers
95 | layers_geometry_types = set(data_source.GetLayerByName(layer).GetGeomType() for layer in layers)
96 |
97 | fid = f"-lco FID={fid}" if fid else "-lco FID=fid"
98 | transform = f"-t_srs EPSG:{target_srid}" if target_srid else ""
99 | geometry_type = "-nlt GEOMETRY" if len(layers_geometry_types) > 1 else ""
100 |
101 | for layer in layers:
102 | h.log(f"import layer \"{layer}\"")
103 | geometry_column = data_source.GetLayerByName(layer).GetGeometryColumn()
104 |
105 | select = f"-select \"{attributes}\"" if attributes else ""
106 | where = f"-where \"ST_GeometryType({geometry_column}) IN ({geometry_types})\"" if geometry_types else ""
107 |
108 | result = subprocess.run(f"ogr2ogr -f PostgreSQL \"PG:{connection_string}\" {fid} -skipfailures -lco GEOMETRY_NAME=geom -nln {schema}.{table} {transform} {geometry_type} {select} {where} \"{path}\" \"{layer}\"",
109 | shell=True, check=True)
110 | h.debugLog(f"ogr2ogr returned code: {result.returncode}")
111 | #h.debugLog(f"ogr2ogr stdout: {result.args}")
112 |
113 |
114 | def import_osm(connection_string: str, path: str, path_style: str, schema: str, prefix: str = None) -> None:
115 | """Takes in a path to an osm pbf file and imports it to database tables."""
116 | prefix = f"--prefix {prefix}" if prefix else ""
117 |
118 | subprocess.run(f"osm2pgsql --database={connection_string} --middle-schema={schema} --output-pgsql-schema={schema} {prefix} --latlong --slim --hstore --style=\"{path_style}\" \"{path}\"",
119 | shell=True, check=True)
120 |
121 |
122 | class GipImporter(DbStep):
123 | def __init__(self, db_settings: DbSettings):
124 | super().__init__(db_settings)
125 |
126 | def run_step(self, settings: dict):
127 | h.info('importing gip')
128 | h.log(f"using import settings: {str(settings)}")
129 |
130 | schema = self.db_settings.entities.data_schema
131 | directory = GlobalSettings.data_directory
132 |
133 | files_A = [
134 | {'filename': 'BikeHike.txt', 'table': 'gip_bikehike', 'columns': ['use_id']},
135 | {'filename': 'Link.txt', 'table': 'gip_link', 'columns': ['link_id']},
136 | {'filename': 'LinkCoordinate.txt', 'table': 'gip_linkcoordinate', 'columns': ['link_id', 'count']},
137 | {'filename': 'LinkUse.txt', 'table': 'gip_linkuse', 'columns': ['use_id']},
138 | {'filename': 'Link2ReferenceObject.txt', 'table': 'gip_link2referenceobject', 'columns': ['idseq']},
139 | {'filename': 'Node.txt', 'table': 'gip_node', 'columns': ['node_id']},
140 | {'filename': 'ReferenceObject.txt', 'table': 'gip_referenceobject', 'columns': ['refobj_id']},
141 | ]
142 |
143 | # open database connection
144 | h.log('connecting to database...')
145 | db = PostgresConnection.from_settings_object(self.db_settings)
146 | db.connect()
147 | db.init_extensions_and_schema(schema)
148 |
149 | # extract zip files
150 | h.logBeginTask('extract zip files')
151 | with zipfile.ZipFile(os.path.join(directory, settings['filename_A']), 'r') as zf:
152 | for file in files_A:
153 | if not os.path.isfile(os.path.join(directory, os.path.splitext(settings['filename_A'])[0], file['filename'])):
154 | zf.extract(file['filename'], os.path.join(directory, os.path.splitext(settings['filename_A'])[0]))
155 | h.logEndTask()
156 |
157 | # create tables from files_A
158 | for file in files_A:
159 | h.logBeginTask(f"create table \"{file['table']}\"")
160 | if not os.path.isfile(os.path.join(directory, os.path.splitext(settings['filename_A'])[0], f"{os.path.splitext(file['filename'])[0]}.csv")):
161 | create_csv(os.path.join(directory, os.path.splitext(settings['filename_A'])[0], file['filename']))
162 | if not os.path.isfile(os.path.join(directory, os.path.splitext(settings['filename_A'])[0], f"{os.path.splitext(file['filename'])[0]}.sql")):
163 | create_sql(os.path.join(directory, os.path.splitext(settings['filename_A'])[0], file['filename']))
164 |
165 | db.drop_table(file['table'], schema=schema)
166 | db.execute_sql_from_file(f"{os.path.splitext(file['filename'])[0]}", os.path.join(directory, os.path.splitext(settings['filename_A'])[0]))
167 | db.commit()
168 |
169 | import_csv(db.connection_string, os.path.join(directory, os.path.splitext(settings['filename_A'])[0], f"{os.path.splitext(file['filename'])[0]}.csv"), schema, table=file['table'])
170 |
171 | db.add_primary_key(file['table'], file['columns'], schema=schema)
172 | db.commit()
173 | h.logEndTask()
174 |
175 | # close database connection
176 | h.log('closing database connection')
177 | db.close()
178 |
179 |
180 | class OsmImporter(DbStep):
181 | def __init__(self, db_settings: DbSettings):
182 | super().__init__(db_settings)
183 |
184 | def _get_srid_for_AOI(self, data_con:PostgresConnection, aoi_name: str, aoi_table: str = "aoi", aoi_schema: str = "data", save_to_aoi_table: bool = True) -> int:
185 | data_con.execute_sql_from_file("determine_utmzone", "sql/functions")
186 | srid = data_con.query_one(f"SELECT srid FROM {aoi_schema}.{aoi_table} WHERE name=%s", (aoi_name,))[0]
187 | if srid is None:
188 | srid = data_con.query_one(f"SELECT utmzone(ST_Centroid(geom)) FROM {aoi_schema}.{aoi_table} WHERE name=%s", (aoi_name,))[0]
189 | h.log(f"determined SRID based on AOI centroid: EPSG:{srid}")
190 | if save_to_aoi_table:
191 | self._save_srid_for_AOI(srid, data_con, aoi_name, aoi_table, aoi_schema)
192 | else:
193 | h.log(f"fetched SRID from AOI table: EPSG:{srid}")
194 | return srid
195 |
196 | def _save_srid_for_AOI(self, srid: int, data_con:PostgresConnection, aoi_name: str, aoi_table: str = "aoi", aoi_schema: str = "data"):
197 | data_con.ex(f"UPDATE {aoi_schema}.{aoi_table} SET srid=%s WHERE name=%s", (srid, aoi_name,))
198 | data_con.commit()
199 |
200 | def _load_osm_from_placename(self, db: PostgresConnection, data_schema: str, data_directory: str, settings: dict):
201 | # local imports, as we only use these here (makes it optional dependency for stand-alone use)
202 | import requests as rq
203 | import osm2geojson as o2g
204 | # prepare DB: create schema and setup extensions
205 | db.create_schema(data_schema)
206 | # first, set target schema
207 | db.schema = data_schema
208 | aoi_table = self.db_settings.entities.table_aoi
209 | aoi_name = GlobalSettings.case_id
210 | on_existing = settings['on_existing']
211 | # create AOI table if it not already exists
212 | db.ex(f"""CREATE TABLE IF NOT EXISTS {aoi_table} (
213 | id serial NOT NULL PRIMARY KEY,
214 | name varchar(40) NOT NULL CHECK (name <> '') UNIQUE,
215 | geom geometry,
216 | srid integer
217 | );""")
218 |
219 | # check whether an AOI with the given name already exists
220 | excnt = db.query_one("SELECT COUNT(*) FROM " + aoi_table + " WHERE name = %s;", (aoi_name,))[0]
221 | if excnt > 0:
222 | h.info(f"found {excnt} entry in existing AOI table with the given AOI name '{aoi_name}'")
223 | # if exists, use param switch or ask whether to use an existing AOI or overwrite it
224 | if on_existing == 'delete':
225 | h.info("...you specified to overwrite the existing AOI entry.")
226 | # delete existing AOI from AOI table
227 | db.ex("DELETE FROM " + data_schema + "." + aoi_table + " WHERE name = %s;", (aoi_name,))
228 | # continue script execution
229 | elif on_existing == 'skip':
230 | h.info("...you chose to re-use the existing AOI entry. Skipping AOI download.")
231 | return
232 | else:
233 | raise Exception("An AOI with the given id already exists. Please resolve the conflict manually or provide a different option for the import setting 'on_exists': [skip/delete/abort]")
234 | else:
235 | h.info("AOI name '" + aoi_name + "' is not in use -> continuing with AOI download...")
236 |
237 | # download AOI
238 | # first: create AOI query string
239 | h.debugLog(f"preparing AOI query for {settings['place_name']}")
240 | o_add_filter = ""
241 | if h.has_keys(settings, ['admin_level']):
242 | o_add_filter += "[admin_level='" + str(settings['admin_level']) + "']"
243 | if h.has_keys(settings, ['zip_code']):
244 | o_add_filter += "[\"admin_centre:postal_code\"='" + str(settings['zip_code']) + "']"
245 | overpass_aoi_api_string = f"""
246 | area
247 | [name='{settings['place_name']}'][boundary='administrative']{o_add_filter};
248 | rel(pivot);
249 | out body;
250 | >;
251 | out skel qt;
252 | """
253 | h.debugLog(f"AOI query string: {overpass_aoi_api_string}")
254 | # download AOI geom and add to aoi table
255 | h.logBeginTask("Downloading AOI...")
256 | curEndpointIndex = 0
257 | success = False
258 | while curEndpointIndex < len(GlobalSettings.overpass_api_endpoints) and not success:
259 | try:
260 | aoi_response = rq.get(GlobalSettings.overpass_api_endpoints[curEndpointIndex], params={'data': overpass_aoi_api_string}, timeout=5)
261 | aoi_response.raise_for_status()
262 | except HTTPError as e:
263 | h.log(f"HTTPError while trying to download OSM AOI from '{GlobalSettings.overpass_api_endpoints[curEndpointIndex]}': Error code {e.code}\n{e.args}\n{e.info()} --> trying again with next available API endpoint...")
264 | curEndpointIndex+=1
265 | except KeyboardInterrupt:
266 | h.majorInfo(f"OSM AOI download from '{GlobalSettings.overpass_api_endpoints[curEndpointIndex]}' interrupted by user. Terminating.")
267 | exit()
268 | except BaseException as e:
269 | h.majorInfo(f"An unexpected ERROR occured during OSM AOI download from '{GlobalSettings.overpass_api_endpoints[curEndpointIndex]}': {e.args}")
270 | curEndpointIndex+=1
271 | else:
272 | success = True
273 | h.log(f"Response headers from API call to {GlobalSettings.overpass_api_endpoints[curEndpointIndex]}: {aoi_response.headers}", h.LOG_LEVEL_4_DEBUG)
274 | h.log(f"OSM AOI Download from {GlobalSettings.overpass_api_endpoints[curEndpointIndex]} succeeded.")
275 | if not success:
276 | raise Exception("OSM data download was not successful. Terminating.")
277 | h.logEndTask()
278 |
279 | aoi_geoJson = o2g.xml2geojson(aoi_response.text)
280 | num_ft = len(aoi_geoJson['features'])
281 | if num_ft < 1:
282 | h.majorInfo("ERROR: no matching feature found. Please try again with different AOI query settings.")
283 | raise Exception("AOI not found. Please check your query settings or use a bounding box instead (parameter 'bbox' in 'import' section of settings file).")
284 | # by default, use the first result (in case there were more results returned)
285 | fid = 0
286 | if num_ft > 1:
287 | h.info(f"Found {num_ft} matching features:")
288 | i = 0
289 | for ft in aoi_geoJson['features']:
290 | i+=1
291 | if "tags" in ft["properties"]:
292 | print("->", str(i), "--- Type:", ft["type"], "ZIP-Code:", ft["properties"]["tags"]["admin_centre:postal_code"] if "admin_centre:postal_code" in ft["properties"]["tags"] else "-",
293 | "Admin level:", ft["properties"]["tags"]["admin_level"] if "admin_level" in ft["properties"]["tags"] else "-", "Wikipedia:", ft["properties"]["tags"]["wikipedia"] if "wikipedia" in ft["properties"]["tags"] else "-")
294 | else:
295 | print("->", str(i), "--- Type:", ft["type"])
296 | if h.has_keys(settings, ['interactive']) and settings['interactive']:
297 | # let user choose which result to use if more than one AOI was found
298 | fid = int(input("Which one do you want to use? [1.." + str(i) + "] for your choice, other number to abort:\t"))
299 | if not fid in range(i+1) or fid < 1:
300 | print("You chose to abort the process. Will now exit.")
301 | exit("User cancelled processing during AOI choice")
302 | print("You chose to continue with AOI #", fid)
303 | fid -= 1 # back to 0-based index value
304 | else:
305 | print("Using the first search result. If you want to use a different AOI, either provide more query parameters or add 'interactive: True' to the import settings for an interactive choice.")
306 |
307 | h.logBeginTask("importing AOI to db table...")
308 | aoi_geoJson = aoi_geoJson['features'][fid]
309 | h.debugLog("geoJson: " + str(aoi_geoJson))
310 | db.ex("INSERT INTO " + aoi_table + " (name, geom) VALUES (%s, ST_SetSRID(ST_GeomFromGeoJSON(%s::text),4326));", (aoi_name, str(aoi_geoJson["geometry"])))
311 | db.commit()
312 | h.logEndTask()
313 |
314 | # now, data download can be prepared
315 | # determine SRID if not specified manually
316 | srid = GlobalSettings.custom_srid
317 | if srid is None:
318 | srid = self._get_srid_for_AOI(db, aoi_name, aoi_table, data_schema)
319 | GlobalSettings.default_srid = srid
320 | else:
321 | # save cusom SRID to AOI table
322 | self._save_srid_for_AOI(srid, db, aoi_name, aoi_table, data_schema)
323 |
324 | # get BBox for network coverage (larger than chosen AOI -> prevent edge effects)
325 | buffer = 500
326 | if h.has_keys(settings, ['buffer']):
327 | buffer = settings['buffer']
328 | bbox = db.query_one("WITH a as (SELECT ST_Transform(ST_setSRID(ST_EXPAND(box2d(ST_Transform(geom, %s)),%s),%s), 4326) as bbox FROM " +
329 | aoi_table + """ WHERE name=%s)
330 | SELECT ST_YMIN(bbox), ST_XMIN(bbox), ST_YMAX(bbox), ST_XMAX(bbox) FROM a;""",
331 | (srid, buffer, srid, aoi_name,)
332 | )
333 | h.debugLog(f"Determined Bounding box: {bbox}")
334 | # load OSM data from bounding box
335 | self._load_osm_from_bbox(str(bbox)[1:-1], settings)
336 |
337 | def _load_osm_from_bbox(self, bbox: str, settings: dict):
338 | q_template: str = """
339 | [timeout:900][maxsize:1073741824];
340 | nwr(__bbox__);
341 | out;"""
342 | net_file = f"{GlobalSettings.osm_download_prefix}_{GlobalSettings.case_id}.xml"
343 | if os.path.isfile(os.path.join(GlobalSettings.data_directory, net_file)):
344 | if not h.has_keys(settings, ['on_existing']):
345 | raise Exception("Target file for OSM download already exists. Please add a value for 'on_existing' to the import settings. [skip/abort/delete]")
346 | if settings['on_existing'] == 'skip':
347 | h.info("Target file for OSM download already exists. Skipping download (re-using existing file). You can change this behavior by adding a value for 'on_existing' to the import settings. [skip/abort/delete]")
348 | return
349 | if settings['on_existing'] != 'delete':
350 | raise Exception("Target file for OSM download already exists. Aborting. Please resolve the conflict manually or specify a different value for 'on_existing' in the import settings. [skip/abort/delete]")
351 | else:
352 | h.info("Target file for OSM download already exists. Deleting existing file and proceeding with download. You can change this behavior by adding a value for 'on_existing' to the import settings. [skip/abort/delete]")
353 | q_str = q_template.replace("__bbox__", bbox)
354 | h.debugLog(f"prepared OSM overpass API query: \n'{q_str}")
355 |
356 | h.logBeginTask("Starting OSM data download...")
357 | curEndpointIndex = 0
358 | success = False
359 | while curEndpointIndex < len(GlobalSettings.overpass_api_endpoints) and not success:
360 | success = False
361 | try:
362 | file_name, headers = urllib.request.urlretrieve(
363 | GlobalSettings.overpass_api_endpoints[curEndpointIndex] + "?data=" + urllib.parse.quote_plus(q_str),
364 | os.path.join(GlobalSettings.data_directory, net_file))
365 | except HTTPError as e:
366 | h.log(f"HTTPError while trying to download OSM data from '{GlobalSettings.overpass_api_endpoints[curEndpointIndex]}': Error code {e.code}\n{e.args}\n{e.info()} --> trying again with next available API endpoint...")
367 | curEndpointIndex+=1
368 | except KeyboardInterrupt:
369 | h.majorInfo(f"OSM download from '{GlobalSettings.overpass_api_endpoints[curEndpointIndex]}' interrupted by user. Terminating.")
370 | exit()
371 | except BaseException as e:
372 | h.log(f"An unexpected ERROR occured during OSM data download from '{GlobalSettings.overpass_api_endpoints[curEndpointIndex]}': {e.args}")
373 | curEndpointIndex+=1
374 | else:
375 | success = True
376 | h.log(f"Response headers from API call to {GlobalSettings.overpass_api_endpoints[curEndpointIndex]}: {headers}", h.LOG_LEVEL_4_DEBUG)
377 | h.log(f"OSM Download from {GlobalSettings.overpass_api_endpoints[curEndpointIndex]} succeeded.")
378 | if not success:
379 | raise Exception("OSM data download was not successful. Terminating.")
380 | h.logEndTask()
381 |
382 | def run_step(self, settings: dict):
383 | h.info('importing osm')
384 | h.log(f"using settings: {str(settings)}")
385 | use_overpass_api: bool = False
386 |
387 | schema = self.db_settings.entities.data_schema
388 | directory = GlobalSettings.data_directory
389 |
390 | # open database connection
391 | h.info('open database connection')
392 | db = PostgresConnection.from_settings_object(self.db_settings)
393 | db.init_extensions_and_schema(schema)
394 |
395 | # if needed, download OSM data
396 | if not h.has_keys(settings, ['filename']):
397 | h.info("no OSM file provided. Checking for Overpass API settings instead...")
398 | if not h.has_any_key(settings, ['place_name', 'bbox']):
399 | raise Exception("neither 'aoi_name' nor 'bbox' parameter specified for OSM download. Terminating.")
400 | use_overpass_api = True
401 | # start OSM import through overpass API
402 | # import from bounding box
403 | if h.has_keys(settings, ['bbox']):
404 | self._load_osm_from_bbox(settings['bbox'], settings)
405 | # import from place name
406 | elif h.has_keys(settings, ['place_name']):
407 | self._load_osm_from_placename(db, schema, directory, settings)
408 |
409 | # import osm file
410 | h.logBeginTask('import osm file')
411 | db.drop_table("osm_point", schema=schema)
412 | db.drop_table("osm_line", schema=schema)
413 | db.drop_table("osm_polygon", schema=schema)
414 | db.drop_table("osm_nodes", schema=schema)
415 | db.drop_table("osm_rels", schema=schema)
416 | db.drop_table("osm_roads", schema=schema)
417 | db.drop_table("osm_ways", schema=schema)
418 | db.commit()
419 |
420 | filename = f"{GlobalSettings.osm_download_prefix}_{GlobalSettings.case_id}.xml"
421 | if not use_overpass_api:
422 | filename = settings['filename']
423 | import_osm(db.connection_string, os.path.join(directory, filename), os.path.join('resources', 'default.style'), schema, prefix='osm') # 12 m 35 s
424 |
425 | db.drop_table("osm_nodes", schema=schema)
426 | db.drop_table("osm_rels", schema=schema)
427 | db.drop_table("osm_roads", schema=schema)
428 | db.drop_table("osm_ways", schema=schema)
429 | db.commit()
430 | h.logEndTask()
431 |
432 | # create dataset "building"
433 | h.logBeginTask('create dataset "building"')
434 | if db.handle_conflicting_output_tables(['building'], schema):
435 | db.execute('''
436 | CREATE TABLE building AS ( -- 16 s
437 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Polygon, %(target_srid)s) AS geom
438 | FROM osm_polygon
439 | WHERE building IS NOT NULL
440 | );
441 |
442 | CREATE INDEX building_geom_idx ON building USING gist (geom); -- 22 s
443 | ''', {'target_srid':GlobalSettings.get_target_srid()})
444 | db.commit()
445 | h.logEndTask()
446 |
447 | # create dataset "crossing"
448 | h.logBeginTask('create dataset "crossing"')
449 | if db.handle_conflicting_output_tables(['crossing'], schema):
450 | db.execute('''
451 | CREATE TABLE crossing AS ( -- 4 s
452 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Point, %(target_srid)s) AS geom FROM osm_point WHERE highway IN ('crossing') UNION ALL
453 | SELECT ST_Transform(way, %(target_srid)s)::geometry(LineString, %(target_srid)s) AS geom FROM osm_line WHERE highway IN ('crossing') UNION ALL
454 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Polygon, %(target_srid)s) AS geom FROM osm_polygon WHERE highway IN ('crossing')
455 | );
456 |
457 | CREATE INDEX crossing_geom_idx ON crossing USING gist (geom); -- 1 s
458 | ''', {'target_srid':GlobalSettings.get_target_srid()})
459 | db.commit()
460 | h.logEndTask()
461 |
462 | # create dataset "facility"
463 | h.logBeginTask('create dataset "facility"')
464 | if db.handle_conflicting_output_tables(['facility'], schema):
465 | db.execute('''
466 | CREATE TABLE facility AS ( -- 3 s
467 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Point, %(target_srid)s) AS geom
468 | FROM osm_point
469 | WHERE amenity IN ('arts_centre', 'artwork', 'attraction', 'bar', 'biergarten', 'cafe', 'castle', 'cinema', 'community_centre', 'library', 'museum',
470 | 'music_venue', 'park', 'pub', 'public_bookcase', 'restaurant', 'swimming_pool', 'theatre', 'toy_library', 'viewpoint', 'public_bath') -- entertainment
471 | OR amenity IN ('atm', 'bureau_de_change', 'bakery', 'beverages', 'butcher', 'clothes', 'department_store', 'fast_food', 'marketplace',
472 | 'florist', 'food_court', 'furniture_shop', 'ice_cream', 'kiosk', 'mall', 'outdoor_shop', 'pharmacy',
473 | 'shoe_shop', 'sports_shop', 'internet_cafe', 'supermarket', 'commercial', 'retail', 'shop', 'bicycle_rental', 'boat_rental', 'car_rental', 'bank') -- retail
474 | OR amenity IN ('university', 'school', 'college', 'gymnasium', 'kindergarten', 'childcare', 'boarding_school', 'music_school',
475 | 'riding_school', 'driving_school', 'language_school', 'research_institute', 'school;dormitory', 'training', 'place_of_worship',
476 | 'conference_centre', 'events_venue', 'exhibition_centre', 'social_centre', 'courthouse', 'post_office', 'ranger_station', 'townhall') -- institutional
477 | OR amenity IN ('post_box', 'bbq', 'bench', 'drinking_water', 'give_box', 'shelter', 'toilets', 'water_point', 'watering_place',
478 | 'waste_basket', 'clock', 'kneipp_water_cure', 'lounger', 'vending_machine') -- infrastructure
479 | OR tourism IN ('museum', 'attraction', 'gallery', 'viewpoint', 'zoo')
480 |
481 | UNION ALL
482 |
483 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Polygon, %(target_srid)s) AS geom
484 | FROM osm_polygon
485 | WHERE amenity IN ('arts_centre', 'artwork', 'attraction', 'bar', 'biergarten', 'cafe', 'castle', 'cinema', 'community_centre', 'library', 'museum',
486 | 'music_venue', 'park', 'pub', 'public_bookcase', 'restaurant', 'swimming_pool', 'theatre', 'toy_library', 'viewpoint', 'public_bath') -- entertainment
487 | OR amenity IN ('atm', 'bureau_de_change', 'bakery', 'beverages', 'butcher', 'clothes', 'department_store', 'fast_food', 'marketplace', 'florist', 'food_court',
488 | 'furniture_shop', 'ice_cream', 'kiosk', 'mall', 'outdoor_shop', 'pharmacy', 'shoe_shop', 'sports_shop', 'internet_cafe'
489 | 'supermarket', 'commercial', 'retail', 'shop', 'bicycle_rental', 'boat_rental', 'car_rental', 'bank') -- retail
490 | OR amenity IN ('university', 'school', 'college', 'gymnasium', 'kindergarten', 'childcare', 'boarding_school', 'music_school',
491 | 'riding_school', 'driving_school', 'language_school', 'research_institute', 'school;dormitory', 'training', 'place_of_worship',
492 | 'conference_centre', 'events_venue', 'exhibition_centre', 'social_centre', 'courthouse', 'post_office', 'ranger_station', 'townhall') -- institutional
493 | OR amenity IN ('post_box', 'bbq', 'bench', 'drinking_water', 'give_box', 'shelter', 'toilets', 'water_point', 'watering_place',
494 | 'waste_basket', 'clock', 'kneipp_water_cure', 'lounger', 'vending_machine') -- infrastructure
495 | OR tourism IN ('museum', 'attraction', 'gallery', 'viewpoint', 'zoo')
496 | );
497 |
498 | CREATE INDEX facility_geom_idx ON facility USING gist (geom); -- 1 s
499 | ''', {'target_srid':GlobalSettings.get_target_srid()})
500 | db.commit()
501 | h.logEndTask()
502 |
503 | # create dataset "greenness"
504 | h.logBeginTask('create dataset "greenness"')
505 | if db.handle_conflicting_output_tables(['greenness'], schema):
506 | db.execute('''
507 | CREATE TABLE greenness AS ( -- 14 s
508 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Polygon, %(target_srid)s) AS geom
509 | FROM osm_polygon
510 | WHERE landuse IN ('forest', 'grass', 'meadow', 'village_green', 'recreation_ground', 'vineyard', 'flowerbed', 'farmland', 'heath', 'nature_reseve', 'park', 'greenfield')
511 | OR leisure IN ('garden', 'golf_course', 'park')
512 | OR "natural" IN ('tree', 'wood', 'grassland', 'heath', 'scrub')
513 | );
514 |
515 | CREATE INDEX greenness_geom_idx ON greenness USING gist (geom); -- 4 s
516 | ''', {'target_srid':GlobalSettings.get_target_srid()})
517 | db.commit()
518 | h.logEndTask()
519 |
520 | # create dataset "water"
521 | h.logBeginTask('create dataset "water"')
522 | if db.handle_conflicting_output_tables(['water'], schema):
523 | db.execute('''
524 | CREATE TABLE water AS ( -- 10 s
525 | SELECT ST_Transform(way, %(target_srid)s)::geometry(LineString, %(target_srid)s) AS geom FROM osm_line WHERE (waterway IS NOT NULL OR "natural" = 'water') AND tunnel IS NULL UNION ALL
526 | SELECT ST_Transform(way, %(target_srid)s)::geometry(Polygon, %(target_srid)s) AS geom FROM osm_polygon WHERE (waterway IS NOT NULL OR "natural" = 'water') AND tunnel IS NULL
527 | );
528 |
529 | CREATE INDEX water_geom_idx ON water USING gist (geom); -- 1 s
530 | ''', {'target_srid':GlobalSettings.get_target_srid()})
531 | db.commit()
532 | h.logEndTask()
533 |
534 | # close database connection
535 | h.log('close database connection')
536 | db.close()
537 |
538 |
539 | def create_importer(db_settings: DbSettings, import_type: str):
540 | if import_type.lower() == InputType.GIP.value.lower():
541 | return GipImporter(db_settings)
542 | if import_type.lower() == InputType.OSM.value.lower():
543 | return OsmImporter(db_settings)
544 | raise NotImplementedError(f"import type '{import_type}' not implemented")
545 |
--------------------------------------------------------------------------------
/core/index_step.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | import yaml
4 |
5 | import toolbox.helper as h
6 | from settings import DbSettings
7 | from toolbox.dbhelper import PostgresConnection
8 | from typing import List
9 |
10 |
11 | class ProfileDefinition:
12 | profile_name: str
13 | filename: str
14 |
15 | def __init__(self, profile_name: str, filename: str):
16 | self.profile_name = profile_name
17 | self.filename = filename
18 |
19 |
20 | class ModeProfile:
21 | profile_name: str
22 | profile: dict = {}
23 | access_car: False
24 | access_bike: False
25 | access_walk: False
26 |
27 | def __init__(self, base_path: str, definition: dict):
28 | self.profile_name = definition.get('profile_name')
29 | filename = os.path.join(base_path, definition.get('filename'))
30 | self.access_car = h.has_keys(definition, ['filter_access_car']) and definition['filter_access_car']
31 | self.access_bike = h.has_keys(definition, ['filter_access_bike']) and definition['filter_access_bike']
32 | self.access_walk = h.has_keys(definition, ['filter_access_walk']) and definition['filter_access_walk']
33 | # if none of the mode access filters are set, then enable index computation for all modes
34 | if not (self.access_car or self.access_bike or self.access_walk):
35 | self.access_car = self.access_bike = self.access_walk = True
36 | with open(filename) as file:
37 | self.profile = yaml.safe_load(file)
38 |
39 |
40 | def load_profiles(base_path: str, profile_definitions: dict):
41 | return [ModeProfile(base_path, definition) for definition in profile_definitions]
42 |
43 | def _build_sql_overrides(overrides_yml: dict) -> str:
44 | # load yml: extract output details and prepare target variable names
45 | indicator_name = h.get_safe_name(overrides_yml.get('indicator'))
46 | h.require_keys(overrides_yml, ["output"], f"No 'output' key provided in overrides definition for '{indicator_name}'.")
47 | out = overrides_yml.get("output")
48 | h.require_keys(out, ["type"], f"'output' key has no 'type' in overrides definition for '{indicator_name}'.")
49 | out_type = out.get("type")
50 | assignment_targets = []
51 | if out_type == "index":
52 | assignment_targets.append("index")
53 | elif out_type == "weight":
54 | if h.has_keys(out, ["for"]):
55 | ft = out.get("for")
56 | if type(ft) == str:
57 | assignment_targets.append(f"{h.get_safe_name(ft)}_weight")
58 | elif type(ft) == list:
59 | for t in ft:
60 | assignment_targets.append(f"{h.get_safe_name(t)}_weight")
61 | else:
62 | raise Exception(f"Unknown output type '{out_type}' provided in overrides definition for '{indicator_name}'.")
63 | # compile value assignment SQL
64 | assignment_sql = ""
65 | for a_target in assignment_targets:
66 | assignment_sql += f"{a_target} := temp; \n"
67 | # delete output details and description from yml (for function compatibility)
68 | del overrides_yml['output']
69 | del overrides_yml['description']
70 | # compile value mappings
71 | value_assignments = _build_sql_indicator_mapping_internal_(overrides_yml, "", force_default_value = True, def_value = -1)
72 | # compile full indicator SQL around indicator mapping code
73 | sql: str = f"""
74 | temp :=
75 | {value_assignments};
76 | IF NOT temp < 0 THEN
77 | {assignment_sql}
78 | {"return; " if out_type == "index" else ""}
79 | END IF;
80 | """
81 | # compile result into template
82 | return sql
83 |
84 | def _build_sql_indicator_mapping_internal_(indicator_yml: dict, name_hierarchy: str = "", force_default_value: bool = False, def_value = None) -> str:
85 | indicator_name = h.get_safe_name(indicator_yml.get('indicator'))
86 | full_name = name_hierarchy + indicator_name
87 | h.debugLog(f"parsing YAML for ind. '{full_name}' \tRaw input: {indicator_yml}")
88 | value_assignments = "CASE \n"
89 | add_default_value: bool = force_default_value
90 | default_value = def_value
91 |
92 | # check type of mapping
93 | del indicator_yml['indicator']
94 | keys = indicator_yml.keys()
95 | if len(keys) != 1:
96 | raise Exception(f"Exactly one indicator mapping key is needed for indicator '{full_name}'. Please update your mode profile file accordingly.")
97 | k = list(keys)[0]
98 | contents = indicator_yml.get(k)
99 | if k not in ["mapping", "classes"]:
100 | raise Exception(f"You provided an unknown indicator mapping '{k}' for indicator '{full_name}'. Please update your mode profile file accordingly.")
101 | # parse each of the given keys
102 | for key in contents:
103 | # handle special assignment value types
104 | v = contents[key]
105 | h.debugLog(f"got mapping: {key}: {v} (type: {type(key)}:{type(v)})")
106 | if type(v) == dict:
107 | # parse dict recursively -> add result to value_assignments (nested CASE...END)
108 | v = _build_sql_indicator_mapping_internal_(v, f"{full_name}.", force_default_value, def_value)
109 | elif v is None:
110 | v = "NULL"
111 | elif not h.is_numeric(v):
112 | raise Exception(f"Only numeric value assignments are allowed for indicator mappings. Please update indicator '{full_name}' for '{key}'.")
113 | # handle special cases and (in last step) default cases for key types/values
114 | # special case of NULL value key
115 | if key is None:
116 | value_assignments += f"WHEN {indicator_name} IS NULL THEN {v}\n"
117 | # handle special case of default value (added last)
118 | elif str(key) == "_default_":
119 | add_default_value = True
120 | default_value = v
121 | # handle lists
122 | elif str(key).startswith("{") and str(key).endswith("}"):
123 | # list of Strings or numeric values
124 | s = str(key)[1:-1]
125 | slist = s.split(',')
126 | # if list contains at least one non-numeric value, interpret as list of strings
127 | cnt = sum([1 for val in slist if not h.str_is_numeric_only(val)])
128 | if cnt > 0:
129 | # String
130 | value_assignments += f"""WHEN {indicator_name} IN ('{"', '".join([h.get_safe_string(v.strip()) for v in slist])}') THEN {v}\n"""
131 | else:
132 | # numeric
133 | value_assignments += f"WHEN {indicator_name} IN ({', '.join([str(h.str_to_numeric(v.strip())) for v in slist])}) THEN {v}\n"
134 | # specific handling depending on type (mapping / classes)
135 | elif k == "mapping":
136 | if h.is_numeric(key) or type(key) == bool:
137 | value_assignments += f"WHEN {indicator_name} = {key} THEN {v}\n"
138 | else:
139 | value_assignments += f"WHEN {indicator_name} = '{h.get_safe_string(key)}' THEN {v}\n"
140 | elif k == "classes":
141 | # split key into op. and class value
142 | kstr = str(key)
143 | cv = h.str_to_numeric(kstr)
144 | if cv is None:
145 | raise Exception(f"For class-based indicator value assignments, a numeric class value must be specified. Indicator '{full_name}', key '{key}'.")
146 | op = "=" # default: equals
147 | opstr = re.sub("[^a-zA-Z]", "", kstr)
148 | if opstr == "g":
149 | op = ">"
150 | elif opstr == "ge":
151 | op = ">="
152 | elif opstr == "l":
153 | op = "<"
154 | elif opstr == "le":
155 | op = "<="
156 | elif opstr == "e":
157 | op = "="
158 | elif opstr == "ne":
159 | op = "<>"
160 | # append current assignment
161 | value_assignments += f"WHEN {indicator_name} {op} {cv} THEN {v}\n"
162 | else:
163 | raise Exception(f"Unexpected configuration received for indicator '{indicator_name}', key '{key}'.")
164 |
165 | # add default value assignment if specified
166 | if add_default_value:
167 | value_assignments += f"ELSE {default_value} \n"
168 | # close this indicator mapping CASE statement
169 | value_assignments += "END \n"
170 | return value_assignments
171 |
172 | def _build_sql_indicator_mapping(indicator_yml: dict) -> str:
173 | indicator_name = h.get_safe_name(indicator_yml.get('indicator'))
174 | value_assignments = _build_sql_indicator_mapping_internal_(indicator_yml)
175 | # compile full indicator SQL around indicator mapping code
176 | sql: str = f"""
177 | IF {indicator_name} IS NOT NULL AND {indicator_name}_weight IS NOT NULL THEN
178 | indicator :=
179 | {value_assignments};
180 | weight := {indicator_name}_weight / weights_sum;
181 | index := index + indicator * weight;
182 | indicator_weights := array_append(indicator_weights, ('{indicator_name}', indicator * weight)::indicator_weight);
183 | END IF;"""
184 | return sql
185 |
186 | def generate_index(db_settings: DbSettings, profiles: List[ModeProfile], settings: dict):
187 | schema = db_settings.entities.network_schema
188 |
189 | # open database connection
190 | h.info('open database connection')
191 | db = PostgresConnection.from_settings_object(db_settings)
192 |
193 | # set search path
194 | h.log('set search path')
195 | db.schema = schema
196 |
197 | # create functions
198 | ## this now happens in the "calculate index step" (re-defining functions based on mode profile and settings)
199 |
200 | # calculate index
201 | h.logBeginTask("compute index columns for given profiles")
202 | if db.handle_conflicting_output_tables(['network_edge_index']):
203 | for p in profiles:
204 | profile_name = p.profile_name
205 | indicator_weights = p.profile['weights']
206 | # parse profile definition: generate SQL for indicator value assignments
207 | h.info(f'parsing indicator value mapping for profile "{profile_name}"...')
208 | indicator_mapping_sql = ""
209 | for indicator in p.profile['indicator_mapping']:
210 | indicator_mapping_sql += _build_sql_indicator_mapping(indicator)
211 | h.debugLog(f"compiled indicator mapping SQL: \n\n{indicator_mapping_sql}")
212 | # parse profile definition: generate SQL for overrides (indicator weights or index)
213 | h.info(f'parsing value overrides for profile "{profile_name}"...')
214 | overrides_sql = ""
215 | for override in p.profile['overrides']:
216 | overrides_sql += _build_sql_overrides(override)
217 | h.debugLog(f"compiled overrides SQL: \n\n{overrides_sql}")
218 |
219 | # profile-specific function registration
220 | h.info(f'register index function for profile "{profile_name}"...')
221 | f_params = {
222 | 'compute_explanation': settings and h.has_keys(settings, ['compute_explanation']) and settings['compute_explanation'],
223 | 'indicator_mappings': indicator_mapping_sql,
224 | 'overrides': overrides_sql
225 | }
226 | db.execute_template_sql_from_file("calculate_index", f_params, template_subdir="sql/functions/")
227 | # calculate index for currrent profile
228 | h.info('calculate index_' + profile_name)
229 | params = {
230 | 'schema_network': schema,
231 | 'profile_name': profile_name,
232 | 'compute_explanation': settings and h.has_keys(settings, ['compute_explanation']) and settings['compute_explanation'],
233 | 'access_car': p.access_car,
234 | 'access_bike': p.access_bike,
235 | 'access_walk': p.access_walk,
236 | }
237 | params.update(indicator_weights)
238 | print(params)
239 | db.execute_template_sql_from_file("index", params)
240 | h.logEndTask()
241 |
242 | # create tables "edges" and "nodes"
243 | h.logBeginTask('create tables "export_edge" and "export_node"')
244 | if db.handle_conflicting_output_tables(['export_edge', "export_node"]):
245 | params = {
246 | 'schema_network': schema
247 | }
248 | db.execute_template_sql_from_file("export", params)
249 | h.logEndTask()
250 |
251 | # check for null columns
252 | rows = db.query_all("SELECT attname FROM pg_stats WHERE schemaname = %s AND tablename = %s AND null_frac = 1;", (schema, 'export_edge'))
253 | if rows:
254 | h.majorInfo(f"WARNING: The following columns contain only NULL values: {', '.join(str(row[0]) for row in rows)}")
255 |
256 | # create views for routing
257 | # db.execute('''
258 | # CREATE OR REPLACE VIEW network_car AS (
259 | # SELECT edge_id AS id,
260 | # from_node AS source,
261 | # to_node AS target,
262 | # CASE WHEN access_car_ft = false THEN -1 -- no access
263 | # ELSE length::numeric
264 | # END AS cost,
265 | # CASE WHEN access_car_tf = false THEN -1 -- no access
266 | # ELSE length::numeric
267 | # END AS reverse_cost
268 | # FROM export_edge
269 | # WHERE access_car_ft OR access_car_tf
270 | # );
271 | #
272 | # CREATE OR REPLACE VIEW network_bike AS (
273 | # SELECT edge_id AS id,
274 | # from_node AS source,
275 | # to_node AS target,
276 | # ((CASE WHEN NOT access_bicycle_ft AND NOT access_pedestrian_ft THEN -1 -- no access
277 | # WHEN NOT access_bicycle_ft AND access_pedestrian_ft THEN 3 -- pedestrian access
278 | # WHEN bridge THEN 3 -- bridge
279 | # ELSE 1 - index_bike_ft
280 | # END + 1) * (5 - 1) - (5 - 2)) * length::numeric AS cost,
281 | # ((CASE WHEN NOT access_bicycle_tf AND NOT access_pedestrian_tf THEN -1 -- no access
282 | # WHEN NOT access_bicycle_tf AND access_pedestrian_tf THEN 3 -- pedestrian access
283 | # WHEN bridge THEN 3 -- bridge
284 | # ELSE 1 - index_bike_tf
285 | # END + 1) * (5 - 1) - (5 - 2)) * length::numeric AS reverse_cost
286 | # FROM export_edge
287 | # WHERE access_bicycle_ft OR access_bicycle_tf OR
288 | # access_pedestrian_ft OR access_pedestrian_tf
289 | # );
290 | #
291 | # CREATE OR REPLACE VIEW network_walk AS (
292 | # SELECT edge_id AS id,
293 | # from_node AS source,
294 | # to_node AS target,
295 | # ((CASE WHEN NOT access_pedestrian_ft THEN -1 -- no access
296 | # WHEN bridge THEN 0.6 -- bridge
297 | # ELSE index_walk_ft
298 | # END + 1) * (5 - 1) - (5 - 2)) * length::numeric AS cost,
299 | # ((CASE WHEN NOT access_pedestrian_tf THEN -1 -- no access
300 | # WHEN bridge THEN 0.6 -- bridge
301 | # ELSE index_walk_tf
302 | # END + 1) * (5 - 1) - (5 - 2)) * length::numeric AS reverse_cost
303 | # FROM export_edge
304 | # WHERE access_pedestrian_ft OR access_pedestrian_tf
305 | # );
306 | # ''')
307 | # db.commit()
308 |
309 | # close database connection
310 | h.log('close database connection')
311 | db.close()
312 |
--------------------------------------------------------------------------------
/core/network_step.py:
--------------------------------------------------------------------------------
1 | import toolbox.helper as h
2 | from core.db_step import DbStep
3 | from settings import DbSettings, GlobalSettings, InputType
4 | from toolbox.dbhelper import PostgresConnection
5 |
6 |
7 | class GipNetworkStep(DbStep):
8 | def __init__(self, db_settings: DbSettings):
9 | super().__init__(db_settings)
10 |
11 | def run_step(self, settings: dict):
12 | h.info('network step')
13 | h.log(f"using import settings: {str(settings)}")
14 |
15 | schema = self.db_settings.entities.network_schema
16 |
17 | # open database connection
18 | h.log('connecting to database...')
19 | db = PostgresConnection.from_settings_object(self.db_settings)
20 | db.connect()
21 | db.init_extensions_and_schema(schema)
22 |
23 | # execute "gip_network"
24 | h.logBeginTask('execute "gip_network"')
25 | if db.handle_conflicting_output_tables(['network_edge', 'network_node']):
26 | params = {
27 | 'schema_network': schema,
28 | 'schema_data': self.db_settings.entities.data_schema,
29 | 'target_srid': GlobalSettings.get_target_srid()
30 | }
31 | db.execute_template_sql_from_file("gip_network", params)
32 | db.commit()
33 | h.logEndTask()
34 |
35 | # close database connection
36 | h.log('closing database connection')
37 | db.close()
38 |
39 |
40 | class OsmNetworkStep(DbStep):
41 | def __init__(self, db_settings: DbSettings):
42 | super().__init__(db_settings)
43 |
44 | def run_step(self, settings: dict):
45 | h.info('network step')
46 | h.log(f"using import settings: {str(settings)}")
47 |
48 | schema = self.db_settings.entities.network_schema
49 |
50 | # open database connection
51 | h.log('connecting to database...')
52 | db = PostgresConnection.from_settings_object(self.db_settings)
53 | db.connect()
54 | db.init_extensions_and_schema(schema)
55 |
56 | # create functions
57 | h.log('create functions')
58 | db.execute_sql_from_file("osm_delete_dangling_edges", "sql/functions")
59 | db.commit()
60 |
61 | # execute "osm_network"
62 | h.logBeginTask('execute "osm_network"')
63 | if db.handle_conflicting_output_tables(['network_edge', 'network_node']):
64 | params = {
65 | 'schema_network': schema,
66 | 'schema_data': self.db_settings.entities.data_schema,
67 | 'target_srid': GlobalSettings.get_target_srid(),
68 | 'include_rail': settings and h.has_keys(settings, ['include_rail']) and settings['include_rail'],
69 | 'include_aerialway': settings and h.has_keys(settings, ['include_aerialway']) and settings['include_aerialway']
70 | }
71 | db.execute_template_sql_from_file("osm_network", params)
72 | db.commit()
73 | h.logEndTask()
74 |
75 | # close database connection
76 | h.log('closing database connection')
77 | db.close()
78 |
79 |
80 | def create_network_step(db_settings: DbSettings, import_type: str):
81 | if import_type.lower() == InputType.GIP.value.lower():
82 | return GipNetworkStep(db_settings)
83 | if import_type.lower() == InputType.OSM.value.lower():
84 | return OsmNetworkStep(db_settings)
85 | raise NotImplementedError(f"import type '{import_type}' not implemented")
86 |
--------------------------------------------------------------------------------
/core/optional_step.py:
--------------------------------------------------------------------------------
1 | import os
2 | import subprocess
3 |
4 | import toolbox.helper as h
5 | from core import import_step
6 | from core.db_step import DbStep
7 | from settings import DbSettings, GlobalSettings
8 | from toolbox.dbhelper import PostgresConnection
9 |
10 |
11 | def import_raster(connection_string: str, path: str, schema: str, table: str, input_srid: int = 0) -> None:
12 | """Takes in a path to a geotiff raster file and imports it to a database raster table."""
13 | subprocess.run(f"raster2pgsql -s {input_srid} -I -C -M \"{path}\" -t auto {schema}.{table} | psql \"{connection_string}\" --variable ON_ERROR_STOP=on --quiet",
14 | shell=True, check=True)
15 |
16 |
17 | class DemImporter(DbStep):
18 | def run_step(self, settings: dict):
19 | h.info('importing dem:')
20 | h.log(f"using settings: {str(settings)}")
21 |
22 | schema = self.db_settings.entities.data_schema
23 | directory = GlobalSettings.data_directory
24 |
25 | # open database connection
26 | h.info('open database connection')
27 | db = PostgresConnection.from_settings_object(self.db_settings)
28 | db.init_extensions_and_schema(schema)
29 |
30 | # import DEM
31 | h.logBeginTask('import dem raster')
32 | if db.handle_conflicting_output_tables(['dem'], schema):
33 | # raster is imported without reprojection - during the attributes step, the network will be temporarily reprojected to DEM srid.
34 | import_raster(db.connection_string, os.path.join(directory, settings['filename']), schema, table='dem', input_srid=settings['srid']) # 4 m 34 s
35 | h.logEndTask()
36 |
37 | # close database connection
38 | h.log('close database connection')
39 | db.close()
40 |
41 |
42 | class NoiseImporter(DbStep):
43 | def run_step(self, settings: dict):
44 | h.log('importing noise:')
45 | h.log(f"using settings: {str(settings)}")
46 |
47 | schema = self.db_settings.entities.data_schema
48 | directory = GlobalSettings.data_directory
49 |
50 | # open database connection
51 | h.info('open database connection')
52 | db = PostgresConnection.from_settings_object(self.db_settings)
53 | db.init_extensions_and_schema(schema)
54 |
55 | # import noise
56 | h.logBeginTask('import noise')
57 | if db.handle_conflicting_output_tables(['noise'], schema):
58 | import_step.import_geopackage(db.connection_string_old, os.path.join(directory, settings['filename']), schema,
59 | table='noise', target_srid=GlobalSettings.get_target_srid(), geometry_types=['POLYGON', 'MULTIPOLYGON'])
60 | h.logEndTask()
61 |
62 | # close database connection
63 | h.log('close database connection')
64 | db.close()
65 |
66 |
67 | class BuildingImporter(DbStep):
68 | def run_step(self, settings: dict):
69 | h.info('importing building')
70 | h.log(f"using settings: {str(settings)}")
71 |
72 | schema = self.db_settings.entities.data_schema
73 | directory = GlobalSettings.data_directory
74 |
75 | # open database connection
76 | h.info('open database connection')
77 | db = PostgresConnection.from_settings_object(self.db_settings)
78 | db.init_extensions_and_schema(schema)
79 |
80 | # import building
81 | h.logBeginTask('import building')
82 | if db.handle_conflicting_output_tables(['building'], schema):
83 | import_step.import_geopackage(db.connection_string_old, os.path.join(directory, settings['filename']), schema,
84 | table='building', target_srid=GlobalSettings.get_target_srid(), geometry_types=['POLYGON'])
85 | h.logEndTask()
86 |
87 | # close database connection
88 | h.log('close database connection')
89 | db.close()
90 |
91 |
92 | class CrossingImporter(DbStep):
93 | def run_step(self, settings: dict):
94 | h.log('importing crossing:')
95 | h.log(f"using settings: {str(settings)}")
96 |
97 | schema = self.db_settings.entities.data_schema
98 | directory = GlobalSettings.data_directory
99 |
100 | # open database connection
101 | h.info('open database connection')
102 | db = PostgresConnection.from_settings_object(self.db_settings)
103 | db.init_extensions_and_schema(schema)
104 |
105 | # import crossing
106 | h.logBeginTask('import crossing')
107 | if db.handle_conflicting_output_tables(['crossing'], schema):
108 | import_step.import_geopackage(db.connection_string_old, os.path.join(directory, settings['filename']), schema,
109 | table='crossing', target_srid=GlobalSettings.get_target_srid(), geometry_types=['POINT', 'LINESTRING'])
110 | h.logEndTask()
111 |
112 | # close database connection
113 | h.log('close database connection')
114 | db.close()
115 |
116 |
117 | class FacilityImporter(DbStep):
118 | def run_step(self, settings: dict):
119 | h.log('importing facility:')
120 | h.log(f"using settings: {str(settings)}")
121 |
122 | schema = self.db_settings.entities.data_schema
123 | directory = GlobalSettings.data_directory
124 |
125 | # open database connection
126 | h.info('open database connection')
127 | db = PostgresConnection.from_settings_object(self.db_settings)
128 | db.init_extensions_and_schema(schema)
129 |
130 | # import facility
131 | h.logBeginTask('import facility')
132 | if db.handle_conflicting_output_tables(['facility'], schema):
133 | import_step.import_geopackage(db.connection_string_old, os.path.join(directory, settings['filename']), schema,
134 | table='facility', target_srid=GlobalSettings.get_target_srid(), geometry_types=['POINT', 'POLYGON'])
135 | h.logEndTask()
136 |
137 | # close database connection
138 | h.log('close database connection')
139 | db.close()
140 |
141 |
142 | class GreennessImporter(DbStep):
143 | def run_step(self, settings: dict):
144 | h.log('importing greenness:')
145 | h.log(f"using settings: {str(settings)}")
146 |
147 | schema = self.db_settings.entities.data_schema
148 | directory = GlobalSettings.data_directory
149 |
150 | # open database connection
151 | h.info('open database connection')
152 | db = PostgresConnection.from_settings_object(self.db_settings)
153 | db.init_extensions_and_schema(schema)
154 |
155 | # import greenness
156 | h.logBeginTask('import greenness')
157 | if db.handle_conflicting_output_tables(['greenness'], schema):
158 | import_step.import_geopackage(db.connection_string_old, os.path.join(directory, settings['filename']), schema,
159 | table='greenness', target_srid=GlobalSettings.get_target_srid(), geometry_types=['POLYGON'])
160 | h.logEndTask()
161 |
162 | # close database connection
163 | h.log('close database connection')
164 | db.close()
165 |
166 |
167 | class WaterImporter(DbStep):
168 | def run_step(self, settings: dict):
169 | h.log('importing water:')
170 | h.log(f"using settings: {str(settings)}")
171 |
172 | schema = self.db_settings.entities.data_schema
173 | directory = GlobalSettings.data_directory
174 |
175 | # open database connection
176 | h.info('open database connection')
177 | db = PostgresConnection.from_settings_object(self.db_settings)
178 | db.init_extensions_and_schema(schema)
179 |
180 | # import water
181 | h.logBeginTask('import water')
182 | if db.handle_conflicting_output_tables(['water'], schema):
183 | import_step.import_geopackage(db.connection_string_old, os.path.join(directory, settings['filename']), schema,
184 | table='water', target_srid=GlobalSettings.get_target_srid(), geometry_types=['LINESTRING', 'POLYGON'])
185 | h.logEndTask()
186 |
187 | # close database connection
188 | h.log('close database connection')
189 | db.close()
190 |
191 |
192 | def create_optional_importer(db_settings: DbSettings, import_type: str):
193 | if import_type == 'dem':
194 | return DemImporter(db_settings)
195 | if import_type == 'noise':
196 | return NoiseImporter(db_settings)
197 | if import_type == 'osm':
198 | return import_step.OsmImporter(db_settings)
199 | if import_type == 'building':
200 | return BuildingImporter(db_settings)
201 | if import_type == 'crossing':
202 | return CrossingImporter(db_settings)
203 | if import_type == 'facility':
204 | return FacilityImporter(db_settings)
205 | if import_type == 'greenness':
206 | return GreennessImporter(db_settings)
207 | if import_type == 'water':
208 | return WaterImporter(db_settings)
209 | raise NotImplementedError(f"import type '{import_type}' not implemented")
210 |
211 |
212 | def run_optional_importers(db_settings: DbSettings, optional_importer_settings: dict):
213 | for import_type, settings in optional_importer_settings.items():
214 | optional_importer = create_optional_importer(db_settings, import_type)
215 | optional_importer.run_step(settings)
216 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 |
3 | services:
4 | netascore:
5 | image: plusmobilitylab/netascore:latest
6 | build: .
7 | stdin_open: true # docker run -i
8 | tty: true # docker run -t
9 | environment:
10 | - DB_USERNAME=${DB_USERNAME}
11 | - DB_PASSWORD=${DB_PASSWORD}
12 | volumes:
13 | - ./data:/usr/src/netascore/data
14 | command: "examples/settings_osm_query.yml"
15 | depends_on:
16 | netascore-db:
17 | condition: service_healthy
18 |
19 | netascore-db:
20 | image: postgis/postgis:13-3.2
21 | ports:
22 | - "5433:5432"
23 | environment:
24 | - POSTGRES_USER=postgres
25 | - POSTGRES_PASSWORD=postgres
26 | - POSTGRES_DB=postgres
27 | healthcheck:
28 | test: ["CMD-SHELL", "pg_isready", "-U", "postgres"]
29 | interval: 10s
30 | timeout: 20s
31 | retries: 120
--------------------------------------------------------------------------------
/docker.md:
--------------------------------------------------------------------------------
1 | # Running NetAScore in Docker
2 |
3 | In this file, we describe how to run all components or only parts in Docker.
4 | There are two components involved:
5 |
6 | 1. NetAScore (Python source or docker image)
7 | 2. a PostGIS-enabled database (which is also provided as docker image)
8 |
9 | ## Quickstart
10 |
11 | NetAScore comes with a `docker compose` configuration in `docker-compose.yml` and a demo configuration, so you can simply run an example workflow by following these two steps (if you don't have Docker installed yet, please [install the Docker Engine](https://docs.docker.com/engine/install/) first):
12 |
13 | - download the `docker-compose.yml` file from the `examples` directory ([file link](https://raw.githubusercontent.com/plus-mobilitylab/netascore/main/examples/docker-compose.yml)) to an empty directory
14 | - from within this directory, execute the following command from a terminal:
15 | `docker compose run netascore`
16 |
17 | NetAScore first loads an area of interest from Overpass Turbo API, then downloads the respective OpenStreetMap data and afterwards imports, processes and exports the final dataset. A new subdirectory named `data` will be present after successful execution. Within this folder, the assessed network is stored in `netascore_salzburg.gpkg`. It includes *bikeability* in columns `index_bike_ft` and `index_bike_tf` and *walkability* in `index_walk_ft` and `index_walk_tf`. The extensions `ft` and `tf` refer to the direction along an edge: *from-to* or *to-from* node.
18 |
19 | ## Run NetAScore for your own area of interest
20 |
21 | The easiest way to run a network assessment for your own area of interest is by adapting the given example in `examples/settings_osm_query.yml`:
22 |
23 | - create a new **subdirectory** named **`data`** (if you already ran the quickstart example, you can just use the `data` directory created)
24 | - download the **settings template** [from here](https://raw.githubusercontent.com/plus-mobilitylab/netascore/main/examples/settings_osm_query.yml) or copy it from `examples/settings_osm_query.yml`
25 | - add the **mode profiles** for *bikeability* and *walkability* to the `data` direcotry: copy both, `profile_bike.yml` and `profile_walk.yml` from the `examples` folder.
26 | - **edit** your newly created **settings file** `settings_osm_query.yml` - e.g. to download data for the City of London:
27 | - provide a **`case_id`** in `global` section (only alphanumeric characters are allowed - please avoid special characters such as German "Umlaute" etc.; this will be added e.g. to the output file name) - e.g. `case_id: london`
28 | - specify a **`place_name`** that is used to query data from OSM in the section `import`: e.g. `place_name: City of London` (please note: currently, this must equal the OSM "name" tag of your target area - you may check this using e.g. www.openstreetmap.org)
29 | - for editing this file we recommend using a code editor such as Visual Studio Code, Notepad++ or comparable which handles text encodings properly
30 | - **run NetAScore** by executing the following line from a terminal inside the main directory (parent of `data`):
31 | `docker compose run netascore data/settings_osm_query.yml`
32 | (here, the last argument represents the settings file to use)
33 |
34 | ## Add more detail
35 |
36 | The example settings use OpenStreetMap data as the only input. While this gives a good first estimate of *bikeability* and *walkability*, utilizing additional input datasets can further improve the quality of results. NetAScore supports additional datasets such as *DEM* (digital elevation model) and *noise* (e.g. traffic noise corridors). Please refer to the [settings documentation](settings.md) for details.
37 |
38 | To add optional input data sets, follow these steps:
39 |
40 | - acquire the file(s) for your area of interest - availability of DEM, noise map, etc. may largely depend on the area of interest
41 | - add the file(s) to the `data` subdirectory (where the settings file and mode profiles are located)
42 | - edit the settings file to add the new datasets and store it inside the `data` folder
43 | - execute NetAScore from the parent directory:
44 | `docker compose run netascore data/.yml` (where `` refers to the file name you chose for the edited settings file)
45 |
46 | ## Manual use of the Docker image
47 |
48 | If you want to use the NetAScore Docker image without docker compose or in a custom setting, you may simply get the latest version of the NetAScore image using:
49 |
50 | ```bash
51 | docker pull plusmobilitylab/netascore:latest
52 | ```
53 |
54 | To run the workflow with an existing postgres database, simply follow these steps:
55 |
56 | - create a directory named `data` and place all geofiles inside
57 | - add mode profile files and settings file to this directory (see example files provided in the code repository)
58 | - adjust settings to your needs in the `settings.yml` file - see the [settings documentation](settings.md) for reference
59 | - finally, execute the workflow using:
60 |
61 | ```bash
62 | docker run -i -t -v :/usr/src/netascore/data plusmobilitylab/netascore data/settings.yml
63 | ```
64 |
65 | # Build the Docker image from source
66 |
67 | The easiest way to build and launch NetAScore is by using docker compose. The `docker-compose.yml` inside the main code directory is configured accordingly. Therefore, the only command you need to execute should be:
68 |
69 | `docker compose build`
70 |
71 | Then, once you are sure that all input datasets, settings and mode profile files are properly placed inside the `data` subdirectory, execute NetAScore:
72 |
73 | `docker compose run netascore data/.yml`
74 |
75 | ## The manual, stepwise approach
76 |
77 | You can build the Docker image yourself from source using the following command from within the main code directory:
78 |
79 | `docker build -t netascore .`
80 |
81 | This builds a local docker image named `netascore`.
82 |
83 | To manually create a network for communication between NetAScore and the PostgreSQL database running in Docker execute the following (required only once per computer):
84 |
85 | `docker network create netascore-net`
86 |
87 | Then, to run the workflow, first start the PostgreSQL database and attach it to the network:
88 |
89 | ```bash
90 | docker run --name netascore-db --network=netascore-net \
91 | -e POSTGRES_PASSWORD=postgres -d postgis/postgis:13-3.2
92 | ```
93 |
94 | ```bash
95 | # Map TCP port 5432 in the container to port 5433 on the Docker host:
96 | docker run --name netascore-db --network=netascore-net -p 5433:5432 \
97 | -e POSTGRES_PASSWORD=postgres -d postgis/postgis:13-3.2
98 | ```
99 |
100 | Make sure that the database connection in your `settings.yml` is set up to use the Docker network:
101 |
102 | ```yml
103 | database:
104 | host: netascore-db
105 | port: 5432
106 | dbname: postgres
107 | username: postgres
108 | password: postgres
109 | ```
110 |
111 | Make sure that you have all necessary geofiles, settings and mode profile files in the `data` subdirectory, because this directory is mounted into the netascore container:
112 |
113 | ```bash
114 | # linux and mac:
115 | docker run -i -t --network=netascore-net \
116 | -v $(pwd)/data:/usr/src/netascore/data netascore data/settings.yml
117 | ```
118 |
119 | ```shell
120 | # windows:
121 | docker run -i -t --network=netascore-net \
122 | -v %cd%/data:/usr/src/netascore/data netascore data/settings.yml
123 | ```
124 |
125 |
126 |
127 | # Advanced configuration
128 |
129 | ## Only the database runs in docker
130 |
131 | If the database runs in docker, then you have to configure your database to accept connections from the local machine:
132 |
133 | ```bash
134 | docker run --name netascore-db --network=netascore-net -p 5432:5432 \
135 | -e POSTGRES_PASSWORD=postgres -d postgis/postgis:13-3.2
136 | ```
137 |
138 | Your `database` section in the settings file should point to the local port which is mapped to the database on localhost:
139 |
140 | ```yml
141 | database:
142 | host: localhost
143 | port: 5432
144 | dbname: postgres
145 | username: postgres
146 | password: postgres
147 | ```
148 |
149 | Now you can use the python script as described in the [README.md](README.md).
150 |
151 | ## Only the script runs in docker
152 |
153 | If the script runs inside the docker container, it needs access to the database outside of the docker ecosystem. If the external database runs on another host, provide the necessary connection information in the `database` section. If you have the database running on your local system, then the host needs the IP address or hostname of the local system. Please note that `127.0.0.1` or `localhost` will not work, because it would try to connect to the container's localhost. If you are unable to obtain the ip of your
154 | machine, or you cannot establish a connection, use `gateway.docker.internal` as the host, e.g.:
155 |
156 | ```yml
157 | database:
158 | host: gateway.docker.internal
159 | port: 5432
160 | dbname: postgres
161 | username: postgres
162 | password: postgres
163 | ```
164 |
165 | ## Troubleshooting and performance improvement
166 |
167 | ### Performance when running NetAScore in Docker
168 |
169 | When using NetAScore in a docker image on mac or windows, overall performance of the pipeline can be 3-5 times slower compared to executing NetAScore in local Python or in Docker on Linux. This is caused by slow docker volume mounts and might be an issue for computations on large input files.
170 | To resolve this issue, you can either execute the python script on your machine (outside Docker) or copy the files into a volume using the following steps:
171 |
172 | ```bash
173 | docker volume create netascore-storage
174 |
175 | docker create -t --network=netascore-net --name netascore-pipe \
176 | -v netascore-storage:/usr/src/netascore/data netascore data/settings.yml
177 |
178 | docker cp data/. netascore-pipe:/usr/src/netascore/data
179 |
180 | docker start netascore-pipe
181 | ```
182 |
183 | To monitor the progress (logs), run:
184 |
185 | ```bash
186 | docker logs -f netascore-pipe
187 | ```
188 |
189 | This command will show the logs of the container and will follow the logs. You can stop the command with `ctrl+c`.
190 |
191 | To copy the resulting files back to your local system, you can use the following command:
192 |
193 | ```bash
194 | docker copy netascore-pipe:/usr/src/netascore/data/YOUR_RESULT_FILE1.gpkg .
195 | docker copy netascore-pipe:/usr/src/netascore/data/YOUR_RESULT_FILE2.gpkg .
196 | ```
197 |
198 | ### Memory issues with large datasets
199 |
200 | In case you experience errors when processing large datasets, please make sure that you have enough memory and disk space available.
201 | Furthermore, it might be necessary to dedicate more memory to the database container. This can be done by adding the following line to `docker-compose.yml` within the section `netascore-db` (adjust the amount of memory to your needs):
202 |
203 | ```yml
204 | shm_size: 2gb
205 | ```
206 |
207 | Then, the `netascore-db`-section of `docker-compose.yml` should look like this:
208 |
209 | ```yml
210 | netascore-db:
211 | image: postgis/postgis:13-3.2
212 | shm_size: 2gb
213 | ports:
214 | - "5433:5432"
215 | environment:
216 | - POSTGRES_USER=postgres
217 | - POSTGRES_PASSWORD=postgres
218 | - POSTGRES_DB=postgres
219 | healthcheck:
220 | test: ["CMD-SHELL", "pg_isready", "-U", "postgres"]
221 | interval: 10s
222 | timeout: 20s
223 | retries: 120
224 | ```
225 |
226 |
227 | ## Overwrite `default.style` for OSM import to database
228 |
229 | For importing OpenStreetMap data into the database, NetAScore uses [osm2pgsql](https://osm2pgsql.org/). Import settings for this commandline utility are provided in a `default.style` file. By default, NetAScore provides this file within its Docker container. In order to customize `default.style` settings you may perform the following steps:
230 |
231 | - copy the file from this repository (`resources/default.style`)
232 | - adapt the settings according to your needs
233 | - mount the settings file into the docker container when running it
234 |
235 | ```bash
236 | # linux and mac:
237 | docker run -i -t --network=netascore-net \
238 | -v $(pwd)/default.style:/usr/src/netascore/resources/default.style \
239 | -v $(pwd)/data:/usr/src/netascore/data netascore data/settings.yml
240 | ```
241 |
242 | ```shell
243 | # windows:
244 | docker run -i -t --network=netascore-net \
245 | -v %cd%/default.style:/usr/src/netascore/resources/default.style \
246 | -v %cd%/data:/usr/src/netascore/data netascore data/settings.yml
247 | ```
248 |
--------------------------------------------------------------------------------
/examples/dev_example_docker_only.yml:
--------------------------------------------------------------------------------
1 | version: 1.0
2 | # for execution within docker container and PostgresDB in Docker
3 | database:
4 | host: netascore-db
5 | port: 5432
6 | dbname: postgres
7 | username: postgres
8 | password: postgres
9 | on_existing: abort # skip, delete, abort
10 | import:
11 | type: gip
12 | filename_A: A_routingexport_ogd_split.zip
13 |
--------------------------------------------------------------------------------
/examples/dev_example_existing_db.yml:
--------------------------------------------------------------------------------
1 | version: 1.0
2 | # for program execution within docker container and local PostgresDB outside of Docker
3 | database:
4 | host: gateway.docker.internal
5 | port: 5432 # port of existing PostgreSQL server on localhost
6 | dbname: postgres # target database
7 | username: postgres # local postgres user
8 | password: postgres
9 | on_existing: abort # skip, delete, abort
10 | import:
11 | type: gip
12 | filename_A: A_routingexport_ogd_split.zip
13 |
--------------------------------------------------------------------------------
/examples/dev_example_existing_db_no_docker.yml:
--------------------------------------------------------------------------------
1 | version: 1.0
2 | # for local execution without docker container and PostgresDB on localhost
3 | database:
4 | host: localhost
5 | port: 5432 # port of existing PostgreSQL server on localhost
6 | dbname: postgres # target database
7 | username: postgres # local postgres user
8 | password: postgres
9 | on_existing: abort # skip, delete, abort
10 | import:
11 | type: gip
12 | filename_A: A_routingexport_ogd_split.zip
13 |
--------------------------------------------------------------------------------
/examples/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 |
3 | services:
4 | netascore:
5 | image: plusmobilitylab/netascore:latest
6 | stdin_open: true # docker run -i
7 | tty: true # docker run -t
8 | environment:
9 | - DB_USERNAME=${DB_USERNAME}
10 | - DB_PASSWORD=${DB_PASSWORD}
11 | volumes:
12 | - ./data:/usr/src/netascore/data
13 | command: "examples/settings_osm_query.yml"
14 | depends_on:
15 | netascore-db:
16 | condition: service_healthy
17 |
18 | netascore-db:
19 | image: postgis/postgis:13-3.2
20 | ports:
21 | - "5433:5432"
22 | environment:
23 | - POSTGRES_USER=postgres
24 | - POSTGRES_PASSWORD=postgres
25 | - POSTGRES_DB=postgres
26 | healthcheck:
27 | test: ["CMD-SHELL", "pg_isready", "-U", "postgres"]
28 | interval: 10s
29 | timeout: 20s
30 | retries: 120
--------------------------------------------------------------------------------
/examples/profile_bike.yml:
--------------------------------------------------------------------------------
1 | # This is the default bikeability mode profile provided with NetAScore,
2 | # developed and tested by the MobilityLab at University of Salzburg.
3 | # Feel free to copy, edit and adjust for specific use cases.
4 |
5 | version: 1.1
6 |
7 | weights:
8 | bicycle_infrastructure: 0.2
9 | pedestrian_infrastructure: NULL
10 | designated_route: 0.1
11 | road_category: 0.3
12 | max_speed: 0.1
13 | max_speed_greatest: NULL
14 | parking: 0.1
15 | pavement: 0.1
16 | width: NULL
17 | gradient: 0.1
18 | number_lanes: NULL
19 | facilities: NULL
20 | crossings: NULL
21 | buildings: NULL
22 | greenness: NULL
23 | water: NULL
24 | noise: NULL
25 |
26 |
27 | # =================================================================== #
28 | # In the following section advanced settings can be defined. #
29 | # Only edit the contents below if you really know what you are doing. #
30 | # =================================================================== #
31 |
32 |
33 | # OVERRIDES - here, weights and/or index values can be overridden #
34 |
35 | overrides:
36 | - description: combination of gradient and pavement (steep and loose/rough)
37 | indicator: pavement
38 | output:
39 | type: weight
40 | for: [pavement, gradient]
41 | mapping:
42 | "{gravel, soft, cobble}":
43 | indicator: gradient
44 | mapping:
45 | "{-4, -3, 3, 4}": 1.6
46 |
47 |
48 | # INDICATOR attribute mappings #
49 |
50 | indicator_mapping:
51 | - indicator: bicycle_infrastructure
52 | mapping:
53 | "bicycle_way": 1
54 | "mixed_way": 0.9
55 | "bicycle_road": 0.85
56 | "cyclestreet": 0.8
57 | "bicycle_lane": 0.75
58 | "bus_lane": 0.75
59 | "shared_lane": 0.5
60 | "undefined": 0.2
61 | "no": 0
62 |
63 | - indicator: designated_route
64 | mapping:
65 | "international": 1
66 | "national": 0.9
67 | "regional": 0.85
68 | "local": 0.8
69 | "unknown": 0.8
70 | "no": 0
71 |
72 | - indicator: road_category
73 | mapping:
74 | "primary": 0
75 | "secondary": 0.2
76 | "residential": 0.8
77 | "service": 0.85
78 | "calmed": 0.9
79 | "no_mit": 1
80 | "path": 0
81 |
82 | - indicator: max_speed
83 | classes:
84 | ge100: 0
85 | ge80: 0.2
86 | ge70: 0.3
87 | ge60: 0.4
88 | ge50: 0.6
89 | ge30: 0.85
90 | g0: 0.9
91 | e0: 1
92 |
93 | - indicator: max_speed_greatest
94 | classes:
95 | ge100: 0
96 | ge80: 0.2
97 | ge70: 0.3
98 | ge60: 0.4
99 | ge50: 0.6
100 | ge30: 0.85
101 | g0: 0.9
102 | e0: 1
103 |
104 | - indicator: parking
105 | mapping:
106 | "yes": 0
107 | "no": 1
108 | True: 0
109 | False: 1
110 |
111 | - indicator: pavement
112 | mapping:
113 | asphalt: 1
114 | gravel: 0.75
115 | soft: 0.4
116 | cobble: 0
117 |
118 | - indicator: width
119 | classes:
120 | g5: 1
121 | g4: 0.9
122 | g3: 0.85
123 | g2: 0.5
124 | ge0: 0
125 |
126 | - indicator: gradient
127 | mapping:
128 | 4: 0
129 | 3: 0.25
130 | 2: 0.4
131 | 1: 0.5
132 | 0: 0.9
133 | -1: 1
134 | -2: 0.95
135 | -3: 0.35
136 | -4: 0
137 |
138 | - indicator: number_lanes
139 | classes:
140 | g4: 0
141 | g3: 0.1
142 | g2: 0.2
143 | g1: 0.5
144 | ge0: 1
145 |
146 | - indicator: facilities
147 | classes:
148 | g0: 1
149 | e0: 0
150 |
151 | - indicator: buildings
152 | classes:
153 | ge80: 0
154 | g60: 0.2
155 | g40: 0.4
156 | g20: 0.6
157 | g0: 0.8
158 | e0: 1
159 |
160 | - indicator: greenness
161 | classes:
162 | g75: 1
163 | g50: 0.9
164 | g25: 0.8
165 | g0: 0.7
166 | e0: 0
167 |
168 | - indicator: water
169 | mapping:
170 | True: 1
171 | False: 0
172 |
173 | - indicator: noise
174 | classes:
175 | g70: 0
176 | g55: 0.6
177 | g10: 0.8
178 | ge0: 1
179 |
--------------------------------------------------------------------------------
/examples/profile_walk.yml:
--------------------------------------------------------------------------------
1 | # This is the default walkability mode profile provided with NetAScore,
2 | # developed and tested by the MobilityLab at University of Salzburg.
3 | # Feel free to copy, edit and adjust for specific use cases.
4 |
5 | version: 1.1
6 |
7 | weights:
8 | bicycle_infrastructure: NULL
9 | pedestrian_infrastructure: 0.4
10 | designated_route: NULL
11 | road_category: 0.3
12 | max_speed: NULL
13 | max_speed_greatest: 0.3
14 | parking: NULL
15 | pavement: NULL
16 | width: NULL
17 | gradient: 0.3
18 | number_lanes: 0.1
19 | facilities: 0.3
20 | crossings: 0.2
21 | buildings: 0.1
22 | greenness: 0.3
23 | water: 0.4
24 | noise: 0.3
25 |
26 |
27 | # =================================================================== #
28 | # In the following section advanced settings can be defined. #
29 | # Only edit the contents below if you really know what you are doing. #
30 | # =================================================================== #
31 |
32 |
33 | # OVERRIDES - here, weights and/or index values can be overridden #
34 |
35 | overrides:
36 | - description: fixed index value for sidewalk on primary/secondary roads (pedestrian_infrastructure/road_category)
37 | indicator: pedestrian_infrastructure
38 | output:
39 | type: index
40 | mapping:
41 | "sidewalk":
42 | indicator: road_category
43 | mapping:
44 | "{'secondary', 'primary'}": 0.2
45 |
46 |
47 | # INDICATOR attribute mappings #
48 |
49 | indicator_mapping:
50 | - indicator: pedestrian_infrastructure
51 | mapping:
52 | "pedestrian_area": 1
53 | "pedestrian_way": 1
54 | "mixed_way": 0.85
55 | "stairs": 0.7
56 | "sidewalk": 0.5
57 | "no": 0
58 |
59 | - indicator: road_category
60 | mapping:
61 | "primary": 0
62 | "secondary": 0.2
63 | "residential": 0.8
64 | "service": 0.85
65 | "calmed": 0.9
66 | "no_mit": 1
67 | "path": 1
68 |
69 | - indicator: max_speed
70 | classes:
71 | ge100: 0
72 | ge80: 0.2
73 | ge70: 0.3
74 | ge60: 0.4
75 | ge50: 0.6
76 | ge30: 0.85
77 | g0: 0.9
78 | e0: 1
79 |
80 | - indicator: max_speed_greatest
81 | classes:
82 | ge100: 0
83 | ge80: 0.2
84 | ge70: 0.3
85 | ge60: 0.4
86 | ge50: 0.6
87 | ge30: 0.85
88 | g0: 0.9
89 | e0: 1
90 |
91 | - indicator: parking
92 | mapping:
93 | "yes": 0
94 | "no": 1
95 | True: 0
96 | False: 1
97 |
98 | - indicator: pavement
99 | mapping:
100 | "asphalt": 1
101 | "gravel": 0.75
102 | "soft": 0.4
103 | "cobble": 0
104 |
105 | - indicator: width
106 | classes:
107 | g5: 1
108 | g4: 0.9
109 | g3: 0.85
110 | g2: 0.5
111 | ge0: 0
112 |
113 | - indicator: gradient
114 | mapping:
115 | 4: 0.25
116 | 3: 0.5
117 | 2: 0.7
118 | 1: 1
119 | 0: 1
120 | -1: 1
121 | -2: 0.7
122 | -3: 0.5
123 | -4: 0.25
124 |
125 | - indicator: number_lanes
126 | classes:
127 | g4: 0
128 | g3: 0.1
129 | g2: 0.2
130 | g1: 0.5
131 | ge0: 1
132 |
133 | - indicator: facilities
134 | classes:
135 | g0: 1
136 | e0: 0
137 |
138 | - indicator: crossings
139 | classes:
140 | e0:
141 | indicator: road_category
142 | mapping:
143 | "primary": 0
144 | "secondary": 0
145 | NULL: 0
146 | "residential": 0.5
147 | _default_: 1
148 | g0: 1
149 |
150 | - indicator: buildings
151 | classes:
152 | ge80: 0
153 | g60: 0.2
154 | g40: 0.4
155 | g20: 0.6
156 | g0: 0.8
157 | e0: 1
158 |
159 | - indicator: greenness
160 | classes:
161 | g75: 1
162 | g50: 0.9
163 | g25: 0.8
164 | g0: 0.7
165 | e0: 0
166 |
167 | - indicator: water
168 | mapping:
169 | True: 1
170 | False: 0
171 |
172 | - indicator: noise
173 | classes:
174 | g70: 0
175 | g55: 0.6
176 | g10: 0.8
177 | ge0: 1
178 |
--------------------------------------------------------------------------------
/examples/settings_gip.yml:
--------------------------------------------------------------------------------
1 | version: 1.2
2 | # this part is optional: if not specified, the internal Postgres-DB will be used (inside Docker container)
3 | database:
4 | host: gateway.docker.internal
5 | port: 5432
6 | dbname: netascore
7 | on_existing: abort # skip, delete, abort
8 | # user and password to be specified as ENV variables
9 | # -- end of optional db part --
10 | import:
11 | type: gip
12 | filename_A: A_routingexport_ogd_split.zip # triggers unzip + db import (following standard file structure of GIP files)
13 | optional:
14 | noise:
15 | filename: noise.gpkg # GeoPackage
16 | # column_noise: noise # TODO: set from settings file
17 | osm:
18 | filename: austria-latest.osm.pbf # required mainly for walkability indicators
19 | # NOTE: if an OSM file is provided, the layers 'building', 'crossing', 'facility', 'greennesss' and 'water' are extracted from OSM
20 | # building:
21 | # filename: building.gpkg # GeoPackage
22 | # crossing:
23 | # filename: crossing.gpkg # GeoPackage
24 | # facility:
25 | # filename: facility.gpkg # GeoPackage
26 | # greenness:
27 | # filename: greenness.gpkg # GeoPackage
28 | # water:
29 | # filename: water.gpkg # GeoPackage
30 | index:
31 | compute_explanation: False
32 | profiles:
33 | -
34 | profile_name: bike
35 | filename: profile_bike.yml
36 | filter_access_bike: True
37 | -
38 | profile_name: walk
39 | filename: profile_walk.yml
40 | filter_access_walk: True
41 | export:
42 | type: geopackage
43 | filename: gip_network.gpkg # GeoPackage: here two layers are included: "edge" including attributes and indexes; "node" including attributes
44 |
--------------------------------------------------------------------------------
/examples/settings_osm_file.yml:
--------------------------------------------------------------------------------
1 | version: 1.2
2 | # this part is optional: if not specified, the internal Postgres-DB will be used (inside Docker container)
3 | database:
4 | host: gateway.docker.internal
5 | port: 5432
6 | dbname: netascore
7 | on_existing: abort # skip, delete, abort
8 | # user and password to be specified as ENV variables
9 | # -- end of optional db part --
10 | import:
11 | type: osm
12 | filename: austria-latest.osm.pbf
13 | optional:
14 | dem:
15 | filename: austria.tif # GeoTIFF
16 | srid: 31287
17 | noise:
18 | filename: noise.gpkg # GeoPackage
19 | # column_noise: noise # TODO: set from settings file
20 | index:
21 | compute_explanation: False
22 | profiles:
23 | -
24 | profile_name: bike
25 | filename: profile_bike.yml
26 | filter_access_bike: True
27 | -
28 | profile_name: walk
29 | filename: profile_walk.yml
30 | filter_access_walk: True
31 | export:
32 | type: geopackage
33 | filename: osm_network.gpkg # GeoPackage: here two layers are included: "edge" including attributes and indexes; "node" including attributes
34 |
--------------------------------------------------------------------------------
/examples/settings_osm_query.yml:
--------------------------------------------------------------------------------
1 | version: 1.2
2 |
3 | # global settings that are relevant across individual processing steps
4 | global:
5 | #target_srid: 32633 # NOTE: currently, only metric unit systems (e.g. UTM) supported
6 | case_id: salzburg # unique id (name) - useful if working with different datasets / areas of interest
7 |
8 | # this part is optional: if not specified, the internal Postgres-DB will be used (inside Docker container)
9 | database:
10 | on_existing: delete # skip, delete, abort
11 | # for production environments, user and password should be specified as ENV variables instead
12 | password: postgres
13 | username: postgres
14 | host: netascore-db
15 | port: 5432
16 | dbname: postgres
17 | # -- end of optional db part --
18 |
19 | import:
20 | type: osm
21 | on_existing: delete
22 | place_name: Salzburg
23 | #other currently supported query parameters: admin_level, zip_code
24 | #alternative for querying using bounding box (please also specify 'srid' in 'global_settings' in that case):
25 | #bbox: 47.7957,13.0117,47.8410,13.0748
26 | interactive: False # if true, allows for interactive choice in case multiple results were returned for given AOI query (otherwise first is used)
27 | buffer: 1000
28 |
29 | index:
30 | compute_explanation: True
31 |
32 | profiles:
33 | -
34 | profile_name: bike
35 | filename: profile_bike.yml
36 | filter_access_bike: True
37 | -
38 | profile_name: walk
39 | filename: profile_walk.yml
40 | filter_access_walk: True
41 |
42 | export:
43 | type: geopackage
44 | filename: netascore_.gpkg # GeoPackage: here two layers are included: "edge" including attributes and indexes; "node" including attributes
45 |
--------------------------------------------------------------------------------
/generate_index.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import sys
3 | import yaml
4 | import re
5 | from typing import List
6 |
7 | import toolbox.helper as h
8 | from core.attributes_step import create_attributes_step
9 | from core.db_step import DbStep
10 | from core.export_step import create_exporter
11 | from core.import_step import create_importer
12 | from core.index_step import generate_index, load_profiles
13 | from core.network_step import create_network_step
14 | from core.optional_step import run_optional_importers
15 | from settings import DbSettings, GlobalSettings
16 |
17 | parser = argparse.ArgumentParser(description='TODO: add description')
18 | parser.add_argument('settings_file', type=argparse.FileType('r', encoding='utf-8'),
19 | help='TODO: write detailed description here')
20 | parser.add_argument('--skip', nargs='+', choices=['import', 'optional', 'network', 'attributes', 'index', 'export'],
21 | help='skip one or more of these steps - e.g. "--skip import optional"')
22 | parser.add_argument('--loglevel', nargs=1, choices=["1", "2", "3", "4"],
23 | help="Sets the level of debug outputs on the console: 1=MajorInfo, 2=Info, 3=Detailed, 4=Debug")
24 |
25 | args = parser.parse_args()
26 | settings_stream = args.settings_file
27 | skip_steps = args.skip or []
28 | base_path = settings_stream.name.rsplit('/', 1)[0]
29 | if args.loglevel:
30 | h.verbose_level = int(args.loglevel[0])
31 | h.majorInfo(f"using log level {h.verbose_level}")
32 |
33 | h.info(f'loading {settings_stream.name}')
34 | h.info(f'skip steps: {str(skip_steps)}')
35 |
36 | def require_allowed_value(setting_value: str, setting_name: str, allowed: List[str]):
37 | for value in allowed:
38 | if value == setting_value:
39 | return
40 |
41 | h.majorInfo(setting_name + ': ' + setting_value + ' is not one of the allowed values: ' + ' '.join(allowed))
42 | sys.exit(1)
43 |
44 |
45 | def require_on_existing_setting(settings: dict):
46 | if 'on_existing' not in settings:
47 | h.info('defaulting \'on_existing\' to \'skip\'')
48 | settings['on_existing'] = 'skip'
49 | require_allowed_value(settings['on_existing'], 'on_existing', ['skip', 'delete', 'abort'])
50 |
51 |
52 | with settings_stream:
53 | settings: dict = yaml.safe_load(settings_stream)
54 |
55 | # process global settings if given
56 | if h.has_keys(settings, ['global']):
57 | global_settings: dict = settings['global']
58 | if h.has_keys(global_settings, ['target_srid']):
59 | GlobalSettings.custom_srid = h.str_to_numeric(re.sub("[^0-9]", "", str(global_settings['target_srid'])))
60 | h.info(f"Set the target SRID to {GlobalSettings.get_target_srid()}")
61 | if h.has_keys(global_settings, ['case_id']):
62 | GlobalSettings.case_id = re.sub("[^a-zA-Z0-9_]", "", str(global_settings['case_id']))
63 |
64 | db_settings: DbSettings = DbSettings.from_dict(settings.get('database'))
65 |
66 | # check if all required sections are present first before taking any actions
67 | if 'import' not in skip_steps:
68 | h.require_keys(settings, ['import'], 'error: section missing:')
69 | if 'export' not in skip_steps:
70 | h.require_keys(settings, ['export'], 'error: section missing:')
71 | if 'index' not in skip_steps:
72 | h.require_keys(settings, ['profiles'], 'error: section missing:')
73 |
74 | # execute processing steps
75 | if 'import' not in skip_steps:
76 | h.majorInfo(' === importing ===')
77 | import_settings: dict = settings['import']
78 | h.require_keys(import_settings, ['type'], 'error: import section is missing:')
79 | require_on_existing_setting(import_settings)
80 | importer: DbStep = create_importer(db_settings, import_settings['type'])
81 | importer.run_step(import_settings)
82 | else:
83 | h.majorInfo(' === skipping import ===')
84 |
85 | if 'optional' not in skip_steps and 'optional' in settings:
86 | h.majorInfo(' === running optional importers ===')
87 | run_optional_importers(db_settings, settings.get('optional'))
88 |
89 | if 'network' not in skip_steps and 'import' in settings:
90 | # TODO: specify settings key that is needed by the network step
91 | h.majorInfo(' === running network step ===')
92 | # TODO: add error handling for import_settings
93 | # TODO: code repetition
94 | import_settings: dict = settings['import']
95 | h.require_keys(import_settings, ['type'], 'error: import section is missing:')
96 | require_on_existing_setting(import_settings)
97 | network_step: DbStep = create_network_step(db_settings, import_settings['type'])
98 | network_step.run_step(import_settings)
99 |
100 | if 'attributes' not in skip_steps and 'import' in settings:
101 | # TODO: specify settings key that is needed by the attributes step
102 | h.majorInfo(' === running attributes step ===')
103 | # TODO: add error handling for import_settings
104 | # TODO: code repetition
105 | import_settings: dict = settings['import']
106 | h.require_keys(import_settings, ['type'], 'error: import section is missing:')
107 | require_on_existing_setting(import_settings)
108 | attributes_step: DbStep = create_attributes_step(db_settings, import_settings['type'])
109 | attributes_step.run_step(import_settings)
110 |
111 | if 'index' not in skip_steps:
112 | h.majorInfo(' === generating index ===')
113 | mode_profile_settings: dict = settings['profiles']
114 | profiles = load_profiles(base_path, mode_profile_settings)
115 | index_settings: dict = None
116 | if h.has_keys(settings, ['index']):
117 | index_settings = settings['index']
118 | generate_index(db_settings, profiles, index_settings)
119 |
120 | if 'export' not in skip_steps:
121 | h.majorInfo(' === exporting ===')
122 | export_settings: dict = settings['export']
123 | h.require_keys(export_settings, ['type'], 'error: export section is missing:')
124 | exporter: DbStep = create_exporter(db_settings, export_settings['type'])
125 | exporter.run_step(export_settings)
126 | else:
127 | h.majorInfo('skipping export (as listed in skip_steps)')
128 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | gdal==3.2.2
2 | igraph
3 | pandas
4 | Flask==2.0.3
5 | psycopg2
6 | Jinja2<3.1.0
7 | JinjaSql
8 | requests
9 | osm2geojson
10 | PyYaml
--------------------------------------------------------------------------------
/resources/default.style:
--------------------------------------------------------------------------------
1 | # This is the default osm2pgsql .style file that comes with osm2pgsql.
2 | #
3 | # A .style file has 4 columns that define how OSM objects end up in tables in
4 | # the database and what columns are created. It interacts with the command-line
5 | # hstore options.
6 | #
7 | # Columns
8 | # =======
9 | #
10 | # OsmType: This is either "node", "way" or "node,way" and indicates if this tag
11 | # applies to nodes, ways, or both.
12 | #
13 | # Tag: The tag
14 | #
15 | # DataType: The type of the column to be created. Normally "text"
16 | #
17 | # Flags: Flags that indicate what table the OSM object is moved into.
18 | #
19 | # There are 6 possible flags. These flags are used both to indicate if a column
20 | # should be created, and if ways with the tag are assumed to be areas. The area
21 | # assumptions can be overridden with an area=yes/no tag
22 | #
23 | # polygon - Create a column for this tag, and objects with the tag are areas
24 | #
25 | # linear - Create a column for this tag
26 | #
27 | # nocolumn - Override the above and don't create a column for the tag, but do
28 | # include objects with this tag
29 | #
30 | # phstore - Same as polygon,nocolumn for backward compatibility
31 | #
32 | # delete - Drop this tag completely and don't create a column for it. This also
33 | # prevents the tag from being added to hstore columns
34 | #
35 | # nocache - Deprecated and does nothing
36 | #
37 | # If an object has a tag that indicates it is an area or has area=yes/1,
38 | # osm2pgsql will try to turn it into an area. If it succeeds, it places it in
39 | # the polygon table. If it fails (e.g. not a closed way) it places it in the
40 | # line table.
41 | #
42 | # Nodes are never placed into the polygon or line table and are always placed in
43 | # the point table.
44 | #
45 | # Hstore
46 | # ======
47 | #
48 | # The options --hstore, --hstore-match-only, and --hstore-all interact with
49 | # the .style file.
50 | #
51 | # With --hstore any tags without a column will be added to the hstore column.
52 | # This will also cause all objects to be kept.
53 | #
54 | # With --hstore-match-only the behavior for tags is the same, but objects are
55 | # only kept if they have a non-NULL value in one of the columns.
56 | #
57 | # With --hstore-all all tags are added to the hstore column unless they appear
58 | # in the style file with a delete flag, causing duplication between the normal
59 | # columns and the hstore column.
60 | #
61 | # Special database columns
62 | # ========================
63 | #
64 | # There are some special database columns that if present in the .style file
65 | # will be populated by osm2pgsql.
66 | #
67 | # These are
68 | #
69 | # z_order - datatype int4
70 | #
71 | # way_area - datatype real. The area of the way, in the units of the projection
72 | # (e.g. square mercator meters). Only applies to areas
73 | #
74 | # osm_user - datatype text
75 | # osm_uid - datatype integer
76 | # osm_version - datatype integer
77 | # osm_changeset - datatype integer
78 | # osm_timestamp - datatype timestamptz(0).
79 | # Used with the --extra-attributes option to include metadata in the database.
80 | # If importing with both --hstore and --extra-attributes the meta-data will
81 | # end up in the tags hstore column regardless of the style file.
82 |
83 | # OsmType Tag DataType Flags
84 | node,way access text linear
85 | node,way addr:housename text linear
86 | node,way addr:housenumber text linear
87 | node,way addr:interpolation text linear
88 | node,way admin_level text linear
89 | node,way aerialway text linear
90 | node,way aeroway text polygon
91 | node,way amenity text polygon
92 | node,way area text polygon # hard coded support for area=1/yes => polygon is in osm2pgsql
93 | node,way barrier text linear
94 | node,way bicycle text linear
95 | node,way brand text linear
96 | node,way bridge text linear
97 | node,way boundary text linear
98 | node,way building text polygon
99 | node capital text linear
100 | node,way construction text linear
101 | node,way covered text linear
102 | node,way culvert text linear
103 | node,way cutting text linear
104 | node,way denomination text linear
105 | node,way disused text linear
106 | node ele text linear
107 | node,way embankment text linear
108 | node,way foot text linear
109 | node,way generator:source text linear
110 | node,way harbour text polygon
111 | node,way highway text linear
112 | node,way historic text polygon
113 | node,way horse text linear
114 | node,way intermittent text linear
115 | node,way junction text linear
116 | node,way landuse text polygon
117 | node,way layer text linear
118 | node,way leisure text polygon
119 | node,way lock text linear
120 | node,way man_made text polygon
121 | node,way military text polygon
122 | node,way motorcar text linear
123 | node,way name text linear
124 | node,way natural text polygon # natural=coastline tags are discarded by a hard coded rule in osm2pgsql
125 | node,way office text polygon
126 | node,way oneway text linear
127 | node,way operator text linear
128 | node,way place text polygon
129 | node,way population text linear
130 | node,way power text polygon
131 | node,way power_source text linear
132 | node,way public_transport text polygon
133 | node,way railway text linear
134 | node,way ref text linear
135 | node,way religion text linear
136 | node,way route text linear
137 | node,way service text linear
138 | node,way shop text polygon
139 | node,way sport text polygon
140 | node,way surface text linear
141 | node,way toll text linear
142 | node,way tourism text polygon
143 | node,way tower:type text linear
144 | way tracktype text linear
145 | node,way tunnel text linear
146 | node,way water text polygon
147 | node,way waterway text polygon
148 | node,way wetland text polygon
149 | node,way width text linear
150 | node,way wood text linear
151 | node,way z_order int4 linear # This is calculated during import
152 | way way_area real linear # This is calculated during import
153 |
154 | # Area tags
155 | # We don't make columns for these tags, but objects with them are areas.
156 | # Mainly for use with hstore
157 | way abandoned:aeroway text polygon,nocolumn
158 | way abandoned:amenity text polygon,nocolumn
159 | way abandoned:building text polygon,nocolumn
160 | way abandoned:landuse text polygon,nocolumn
161 | way abandoned:power text polygon,nocolumn
162 | way area:highway text polygon,nocolumn
163 |
164 | # Deleted tags
165 | # These are tags that are generally regarded as useless for most rendering.
166 | # Most of them are from imports or intended as internal information for mappers
167 | # Some of them are automatically deleted by editors.
168 | # If you want some of them, perhaps for a debugging layer, just delete the lines.
169 |
170 | # These tags are used by mappers to keep track of data.
171 | # They aren't very useful for rendering.
172 | node,way note text delete
173 | node,way note:* text delete
174 | node,way source text delete
175 | node,way source_ref text delete
176 | node,way source:* text delete
177 | node,way attribution text delete
178 | node,way comment text delete
179 | node,way fixme text delete
180 |
181 | # Tags generally dropped by editors, not otherwise covered
182 | node,way created_by text delete
183 | node,way odbl text delete
184 | node,way odbl:note text delete
185 | node,way SK53_bulk:load text delete
186 |
187 | # Lots of import tags
188 | # TIGER (US)
189 | node,way tiger:* text delete
190 |
191 | # NHD (US)
192 | # NHD has been converted every way imaginable
193 | node,way NHD:* text delete
194 | node,way nhd:* text delete
195 |
196 | # GNIS (US)
197 | node,way gnis:* text delete
198 |
199 | # Geobase (CA)
200 | node,way geobase:* text delete
201 | # NHN (CA)
202 | node,way accuracy:meters text delete
203 | node,way sub_sea:type text delete
204 | node,way waterway:type text delete
205 |
206 | # KSJ2 (JA)
207 | # See also note:ja and source_ref above
208 | node,way KSJ2:* text delete
209 | # Yahoo/ALPS (JA)
210 | node,way yh:* text delete
211 |
212 | # osak (DK)
213 | node,way osak:* text delete
214 |
215 | # kms (DK)
216 | node,way kms:* text delete
217 |
218 | # ngbe (ES)
219 | # See also note:es and source:file above
220 | node,way ngbe:* text delete
221 |
222 | # naptan (UK)
223 | node,way naptan:* text delete
224 |
225 | # Corine (CLC) (Europe)
226 | node,way CLC:* text delete
227 |
228 | # misc
229 | node,way 3dshapes:ggmodelk text delete
230 | node,way AND_nosr_r text delete
231 | node,way import text delete
232 | node,way it:fvg:* text delete
233 |
--------------------------------------------------------------------------------
/settings.md:
--------------------------------------------------------------------------------
1 | # Settings file
2 |
3 | NetAScore uses a settings file that holds the necessary information to perform all steps for computing *bikeability* and *walkability*. It is written in YAML markup language. General information about YAML can be found at https://yaml.org/.
4 |
5 | We provide a set of **example settings files** with NetAScore. You find them inside the subdirectory `examples`. The most common to start from is `settings_osm_query.yml`. This is the file used by default in the Docker image. It is configured for easy use with all components running in Docker and no local input files required. See [docker.md](docker.md) for instructions on executing NetAScore in this example setup.
6 |
7 | Settings files for NetAScore consist of several sections. For **every processing step** that should be executed, a **corresponding settings section** needs to be provided. This means that you can omit sections if you skip all processing steps that require this section. E.g. if you skip the `export` step, then you don't need to provide an `export` section.
8 |
9 | In this document, we provide an overview on the structure of settings files and details on available settings for each processing step.
10 |
11 | Besides the settings file, you also need to provide a mode profile file for each of the modes you want to compute a suitability index for. For now, please refer to the two mode profiles provided in the `examples/` subdirectory for reference and copy them to the `data` directory for usage in your own queries.
12 |
13 | ## Structure
14 |
15 | The settings file can consist of the following **sections**:
16 |
17 | - **version**:
18 | mandatory, single value. For compatibility with NetAScore version 1.1, the settings file `version` entry should be `1.2`.
19 | - **global**:
20 | general settings such as target reference system (SRID) to use
21 | - **database**:
22 | connection parameters for PostgreSQL database
23 | - **import**:
24 | essential information for importing core datasets
25 | - **optional**:
26 | information on optional datasets to import
27 | - **profiles**:
28 | specification of indicator weights and indicator value mappings per mode profile - e.g. for *bikeability* and *walkability*
29 | - **export**:
30 | information for exporting results
31 |
32 |
33 |
34 | ## Section `global`
35 |
36 | ### Property `target_srid`
37 |
38 | **This parameter is optional if using GIP** (for the whole of Austria) **or OSM Download via place name** - in these cases NetAScore uses the centroid's UTM zone by default. However, **if working with OSM file import** (e.g. Planet OSM / Geofabrik downloads), you should **provide an appropriate reference system** for your specific area of interest.
39 | Specify the SRID (spatial reference system as EPSG-code) to use for processing and data output.
40 | Datasets will be imported and if necessary transformed to this reference system.
41 | **Please note: currently only metric unit systems are supported such as UTM**.
42 |
43 | **Example**:
44 |
45 | - for WGS 84 UTM zone 33N use: 32633
46 |
47 | ### Property `case_id`
48 |
49 | This parameter allows you to specify a unique identifier for the current task as defined in the settings file. It may be useful if working with different areas of interest, different mode profiles, or when re-executing parts of a workflow. You can include this identifier e.g. in export file names using `` as a placeholder.
50 |
51 | **Note**: Only alphanumeric characters [A-Z, a-z, 0-9] and '_' are allowed. Other characters will be removed.
52 |
53 |
54 |
55 |
56 | ## Section `database`
57 |
58 | Connection details for a PostgreSQL database are required in order to import, transform, assess and export data. If you don't provide any information, the program will start up normal, but it will most likely terminate soon after with a connection error.
59 |
60 | Example settings are provided in the `examples` directory and should be a good starting point for customization.
61 |
62 | The **default settings** for using the **Docker** setup (NetAScore and PostgreSQL running in containers) are:
63 |
64 | ```yaml
65 | database:
66 | host: netascore-db
67 | port: 5432
68 | dbname: postgres
69 | username: postgres
70 | password: postgres
71 | ```
72 |
73 | If you want to connect to a **local PostgreSQL instance** outside of Docker, but running NetAScore in Docker you can use the following settings:
74 |
75 | ```yaml
76 | database:
77 | host: gateway.docker.internal
78 | port: 5432 # port of existing PostgreSQL server on localhost
79 | dbname: postgres # name of target database
80 | username: postgres # local postgres user
81 | password: postgres
82 | on_existing: abort # skip, delete, abort
83 | ```
84 |
85 | For using the whole toolset without Docker, just provide connection details for your PostgreSQL database as you do with any other software.
86 |
87 | **Note**: For security reasons, you should **provide `username` and `password` as environment variables** if you do not work within a test environment. In this case just set `DB_USERNAME` and `DB_PASSWORD` accordingly and remove `username` and `password` keys from the settings file.
88 |
89 |
90 |
91 | ## Section `import`
92 |
93 | ### Property `type`
94 |
95 | Specify the type of geodata you want to import. At the moment, the following file formats
96 | are supported:
97 |
98 | - value `osm`: OpenStreetMap
99 | - value `gip`: Austrian authoritative dataset *"Graphenintegrations-Plattform GIP"* - see also: [http://www.gip.gv.at](http://www.gip.gv.at/en/index.html)
100 |
101 | ### Property `filename`
102 |
103 | Refers to the file containing the geodata.
104 |
105 | - OSM data can be imported from `PBF` format, which can be downloaded e.g. from https://download.geofabrik.de. For directly downloading a small AOI with NetAScore, use one of the options outlined in the following section "Additional options for OSM".
106 | - GIP data for Austria can be imported from the `IDF` export files which can be downloaded from [https://www.data.gv.at](https://www.data.gv.at/katalog/dataset/3fefc838-791d-4dde-975b-a4131a54e7c5)
107 |
108 | ### Additional options for OSM
109 |
110 | NetAScore allows you to **directly download OpenStreetMap data** via Overpass API. You can simply provide the following properties instead of the `filename` property:
111 |
112 | - property **`place_name`**: Name of a place, referring to a polygon. Typically you could use the name of a city or city district. E.g. 'Salzburg' or 'City of London'. If the place name given is not unique, you can either opt for an interactive prompt by adding the setting `interactive: True` or specify additional parameters:
113 |
114 | - `admin_level`: filters the given results for OSM 'admin_level' property (see [OSM documentation](https://wiki.openstreetmap.org/wiki/Item:Q1074))
115 | - `zip_code`: filters the given results for a ZIP code (if available in OSM data)
116 |
117 | **Please note**: Network data is being queried based on the bounding box (rectangle) containing the polygon returned for the place name query. If you do not specify a reference system (global option `target_srid`), the UTM zone suitable for the centroid of the area of interest is used.
118 |
119 | - property **`buffer`**: When using the `place_name` query, this option allows you to specify a spatial buffer to enlarge the extent of network data being queried. The unit of the buffer is defined by the target SRID - currently this must be in meters.
120 |
121 | - property **`bbox`**: Bounding box of your area of interest in WGS 84 (longitude and latitude, geographic coordinates) - e.g. for Salzburg (Austria) use: `bbox: 47.7957,13.0117,47.8410,13.0748`
122 | **Please note**: when using this option, please specify **`target_srid`** in the `global` settings section to define an appropriate spatial reference system for your custom area of interest
123 |
124 | - properties **`include_rail`** and **`include_aerialway`**: These optional `boolean` tags allow you to include railway and aerialway features into the network dataset. This may be useful for visualization and specific types of analysis. For example, provide `include_rail: True` if you want railway geometry to be included in the output data set.
125 |
126 | - (advanced) property `filename_style`: For importing OpenStreetMap data into the database, NetAScore uses [osm2pgsql](https://osm2pgsql.org/). Import settings for this commandline utility are provided in a `default.style` file. By default, NetAScore provides this file in the `resources` directory. This setting, however, allows you to specify a custom style file.
127 |
128 |
129 | ### Property `on_existing`
130 |
131 | This setting defines how to handle file and database table conflicts during any import and processing step (e.g. when trying to re-import a file with the same `case_id`).
132 |
133 | The possible values are:
134 |
135 | - **`skip`** (default): skip the import
136 | - **`delete`**: delete existing data and run the import and/or processing again (overwrites existing data)
137 | - **`abort`**: terminate program execution (will report an error) - in this case, you are responsible to manually resolve conflicts
138 |
139 | ### Example import section
140 |
141 | This is an example for OpenStreetMap import using a place name query:
142 |
143 | ```yaml
144 | import:
145 | type: osm
146 | on_existing: delete
147 | place_name: Salzburg
148 | interactive: True
149 | buffer: 1000
150 | ```
151 |
152 | The following example uses an existing OSM extract for Austria downloaded from https://download.geofabrik.de. It additionally specifies a custom `default.style` for OSM import to the database.
153 |
154 | ```yaml
155 | import:
156 | type: osm
157 | filename: austria-latest.osm.pbf
158 | filename_style: default.style
159 | on_existing: delete
160 | ```
161 |
162 |
163 |
164 | ## Section `optional`
165 |
166 | This is the most flexible section and specifies data that can optionally be imported for higher quality results of the resulting index.
167 |
168 | The following optional data is currently supported:
169 |
170 | - **dem** (GeoTIFF): digital elevation model
171 | - **noise** (GeoPackage: Polygon, MultiPolygon): polygons with noise attribute (e.g. traffic noise)
172 | - **osm** (PBF): only relevant for GIP import, if OpenStreetMap data should be used to infer the layers `building`, `crossing`, `facility`, `greenness` and `water`
173 |
174 | The following layers can be supplied if OpenStreetMap data is not used or if you wish to use custom datasets:
175 |
176 | - **building** (GeoPackage: Polygon)
177 | - **crossing** (GeoPackage: Point, LineString)
178 | - **facility** (GeoPackage: Point, Polygon)
179 | - **greenness** (GeoPackage: Polygon)
180 | - **water** (GeoPackage: LineString, Polygon)
181 |
182 | ### Subsection `dem`
183 |
184 | The DEM (digital elevation model) is used to add elevation values to the network nodes and to calculate the gradient of a road segment.
185 |
186 | - Property `filename`: name of the file (GeoTIFF) to be imported
187 | - Property `srid`: spatial reference system identifier (SRID) of the dataset
188 |
189 | For Austria a 10 m x 10 m DEM can be downloaded here: [https://www.data.gv.at](https://www.data.gv.at/katalog/dataset/b5de6975-417b-4320-afdb-eb2a9e2a1dbf)
190 |
191 | ### Subsection `noise`
192 |
193 | The noise dataset contains a mapping of noise levels in decibels, represented as polygons with associated noise attribute.
194 |
195 | - `filename`: name of the file to be imported
196 |
197 | For Austrian states the noise datasets can be downloaded here: [https://www.inspire.gv.at](https://geometadatensuche.inspire.gv.at/metadatensuche/srv/ger/catalog.search#/metadata/125ec87c-7120-48a7-bd2c-2718cbf878c6)
198 |
199 | ### Subsection `osm`
200 |
201 | This is only relevant when working with GIP data as main input. An OSM dataset can be used to derive the following optional datasets: `building`, `crossing`, `facility`, `greeness`, `water`
202 |
203 | - `filename`: name of the OSM file to be imported
204 |
205 | ### Subsections `building`, `crossing`, `facility`, `greeness`, `water`
206 |
207 | If these datasets are not directly derived from an OSM dataset, they can be imported from individual data sets. This might be useful e.g. when working with local, authoritative data sets.
208 |
209 | - `filename`: name of the file to be imported
210 |
211 |
212 |
213 | ## Section `profiles`
214 |
215 | NetAScore uses weights to determine the importance of individual indicators for a specific profile such as for cycling or walking. Different use cases may have different weights. Additionally, numeric indicator values are assigned to original attribute values in the mode profiles.
216 |
217 | We include well-tested default mode profiles for cycling as well as walking with NetAScore. For general purpose assessments we recommend to utilize these profiles by copying the respective mode profile files `profile_bike.yml` and `profile_walk.yml` from `examples` to the `data` directory and referencing them from the settings file as follows:
218 |
219 | ```yaml
220 | profiles:
221 | -
222 | profile_name: bike
223 | filename: profile_bike.yml
224 | filter_access_bike: True
225 | -
226 | profile_name: walk
227 | filename: profile_walk.yml
228 | filter_access_walk: True
229 | ```
230 |
231 | You may edit these profiles or create your own custom profiles and add them to this section of the settings file. The `profile_name` value is included in the column name of the resulting index: e.g. `index_bike_ft`.
232 |
233 | Since NetAScore version 1.1.0, index values are only computed for edges with legal access per mode. This filter is indicated by `filter_access_: True`. You may include segments accessible to other modes by adding multiple lines of `filter_access_`. Possible modes are: `bike`, `walk` and `car`. For example, if you want to compute bikeability for all segments that are accessible by bike but also for those only open to pedestrians you may use:
234 |
235 | ``````yaml
236 | profiles:
237 | -
238 | profile_name: bike
239 | filename: profile_bike.yml
240 | filter_access_bike: True
241 | filter_access_walk: True
242 | ``````
243 |
244 | For details on weight file definition and computation steps involved, please refer to the [README](README.md) and [attributes](attributes.md) documentation.
245 |
246 |
247 |
248 | ## Section `export`
249 |
250 | Currently, NetAScore supports exporting the assessed network into a geopackage file. You can define this as follows:
251 |
252 | ```yaml
253 | export:
254 | type: geopackage
255 | filename: osm_network.gpkg
256 | ```
257 |
258 | If you provide a `case_id` in the `global` settings section, you can include it in the export filename by using the placeholder ``:
259 |
260 | ```yaml
261 | export:
262 | type: geopackage
263 | filename: netascore_.gpkg
264 | ```
265 |
266 |
--------------------------------------------------------------------------------
/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 | from dataclasses import dataclass
3 | from enum import Enum
4 |
5 |
6 | class InputType(Enum):
7 | OSM = "OSM"
8 | GIP = "GIP"
9 |
10 |
11 | class GlobalSettings:
12 | data_directory = "data"
13 | osm_download_prefix = "osm_download"
14 | overpass_api_endpoints = [
15 | "https://overpass-api.de/api/interpreter",
16 | "https://z.overpass-api.de/api/interpreter",
17 | "https://lz4.overpass-api.de/api/interpreter",
18 | "https://maps.mail.ru/osm/tools/overpass/api/interpreter",
19 | "https://overpass.kumi.systems/api/interpreter"
20 | ]
21 | default_srid: int = 32633
22 | custom_srid = None
23 | def get_target_srid()->int:
24 | return GlobalSettings.custom_srid or GlobalSettings.default_srid
25 |
26 | case_id = "default_net"
27 |
28 |
29 | @dataclass
30 | class DbSettings:
31 | host: str
32 | port: int
33 | dbname: str
34 | username: str
35 | password: str
36 | on_existing: str
37 |
38 | def __post_init__(self):
39 | self.entities: DbEntitySettings = DbEntitySettings(GlobalSettings.case_id)
40 |
41 | @staticmethod
42 | def from_dict(settings_template: dict):
43 | host = settings_template.get('host', 'netascore-db')
44 | port = settings_template.get('port', 5432)
45 | dbname = settings_template.get('dbname', 'postgres')
46 | username = settings_template.get('username', '')
47 | password = settings_template.get('password', '')
48 | on_existing = settings_template.get('on_existing', 'abort')
49 |
50 | if len(username) == 0:
51 | username = os.getenv('DB_USERNAME', '')
52 | if len(password) == 0:
53 | password = os.getenv('DB_PASSWORD', '')
54 |
55 | if len(username) == 0:
56 | print('warn: DB_USERNAME not set')
57 |
58 | if len(password) == 0:
59 | print('warn: DB_PASSWORD not set')
60 |
61 | return DbSettings(host, port, dbname, username, password, on_existing)
62 |
63 |
64 | class DbEntitySettings:
65 |
66 | def __init__(self, case_name: str):
67 | self.case_name: str = case_name
68 | self.global_schema_prefix: str = "netascore_"
69 | self.schema_prefix: str = "case_"
70 | self.data_schema_suffix: str = "data"
71 | self.table_net: str = "network"
72 | self.table_net_nodes: str = "network_nodes" # "network_vertices_pgr" for pgRouting-based workflow
73 | self.table_net_attributes: str = "network_attributes"
74 | self.table_net_indicators: str = "network_indicators"
75 | self.table_net_index: str = "network_modes_index" # output table with bike/walk/etc. index columns
76 | self.table_aoi: str = "aoi"
77 | # self.table_osm_bicycle_routes: str = "osm_bicycle_route_network"
78 | # self.table_osm_bicycle_routes_lines: str = "osm_bicycle_route_network_line"
79 |
80 | self._data_schema: str = None
81 | self._network_schema: str = None
82 | self._output_schema: str = None
83 |
84 | ### computed properties (e.g. joined with a name, etc.) - can also be manually set instead // getter with default return value and setter ###
85 |
86 | # data schema
87 | def get_data_schema(self) -> str:
88 | return self._data_schema or f"{self.global_schema_prefix}{self.data_schema_suffix}".lower()
89 |
90 | def set_data_schema(self, schema: str):
91 | self._data_schema = schema
92 | data_schema: str = property(get_data_schema, set_data_schema)
93 |
94 | # network schema
95 | def get_network_schema(self) -> str:
96 | return self._network_schema or f"{self.global_schema_prefix}{self.schema_prefix}{self.case_name}".lower()
97 |
98 | def set_network_schema(self, schema: str):
99 | self._network_schema = schema
100 | network_schema: str = property(get_network_schema, set_network_schema)
101 |
102 | # output schema
103 | def get_output_schema(self) -> str:
104 | return self._output_schema or self.network_schema # defaults to network schema
105 |
106 | def set_output_schema(self, schema: str):
107 | self._output_schema = schema
108 | output_schema: str = property(get_output_schema, set_output_schema)
109 |
--------------------------------------------------------------------------------
/sql/functions/calculate_index.sql.j2:
--------------------------------------------------------------------------------
1 | DROP TYPE IF EXISTS indicator_weight CASCADE;
2 | CREATE TYPE indicator_weight AS (
3 | indicator varchar,
4 | weight numeric
5 | );
6 |
7 | DROP FUNCTION IF EXISTS calculate_index(varchar, numeric,
8 | varchar, numeric,
9 | varchar, numeric,
10 | varchar, numeric,
11 | numeric, numeric,
12 | numeric, numeric,
13 | varchar, numeric,
14 | varchar, numeric,
15 | numeric, numeric,
16 | numeric, numeric,
17 | numeric, numeric,
18 | numeric, numeric,
19 | numeric, numeric,
20 | numeric, numeric,
21 | numeric, numeric,
22 | boolean, numeric,
23 | numeric, numeric);
24 | DROP FUNCTION IF EXISTS calculate_index;
25 |
26 | CREATE OR REPLACE FUNCTION calculate_index(
27 | IN bicycle_infrastructure varchar, IN bicycle_infrastructure_weight numeric,
28 | IN pedestrian_infrastructure varchar, IN pedestrian_infrastructure_weight numeric,
29 | IN designated_route varchar, IN designated_route_weight numeric,
30 | IN road_category varchar, IN road_category_weight numeric,
31 | IN max_speed numeric, IN max_speed_weight numeric,
32 | IN max_speed_greatest numeric, IN max_speed_greatest_weight numeric,
33 | IN parking varchar, IN parking_weight numeric,
34 | IN pavement varchar, IN pavement_weight numeric,
35 | IN width numeric, IN width_weight numeric,
36 | IN gradient numeric, IN gradient_weight numeric,
37 | IN number_lanes numeric, IN number_lanes_weight numeric,
38 | IN facilities numeric, IN facilities_weight numeric,
39 | IN crossings numeric, IN crossings_weight numeric,
40 | IN buildings numeric, IN buildings_weight numeric,
41 | IN greenness numeric, IN greenness_weight numeric,
42 | IN water boolean, IN water_weight numeric,
43 | IN noise numeric, IN noise_weight numeric,
44 | OUT index numeric,
45 | OUT index_robustness numeric,
46 | OUT index_explanation json
47 | ) AS $$
48 | DECLARE
49 | weights_total numeric;
50 | weights_sum numeric;
51 | indicator numeric;
52 | weight numeric;
53 | temp numeric;
54 | indicator_weights indicator_weight[];
55 | BEGIN
56 | -- here, index and weights overrides are added according to their definition in the mode profile files --
57 | {{ overrides | sqlsafe }}
58 |
59 | weights_total := 0;
60 | weights_total :=
61 | CASE WHEN bicycle_infrastructure_weight IS NOT NULL THEN bicycle_infrastructure_weight ELSE 0 END +
62 | CASE WHEN pedestrian_infrastructure_weight IS NOT NULL THEN pedestrian_infrastructure_weight ELSE 0 END +
63 | CASE WHEN designated_route_weight IS NOT NULL THEN designated_route_weight ELSE 0 END +
64 | CASE WHEN road_category_weight IS NOT NULL THEN road_category_weight ELSE 0 END +
65 | CASE WHEN max_speed_weight IS NOT NULL THEN max_speed_weight ELSE 0 END +
66 | CASE WHEN max_speed_greatest_weight IS NOT NULL THEN max_speed_greatest_weight ELSE 0 END +
67 | CASE WHEN parking_weight IS NOT NULL THEN parking_weight ELSE 0 END +
68 | CASE WHEN pavement_weight IS NOT NULL THEN pavement_weight ELSE 0 END +
69 | CASE WHEN width_weight IS NOT NULL THEN width_weight ELSE 0 END +
70 | CASE WHEN gradient_weight IS NOT NULL THEN gradient_weight ELSE 0 END +
71 | CASE WHEN number_lanes_weight IS NOT NULL THEN number_lanes_weight ELSE 0 END +
72 | CASE WHEN facilities_weight IS NOT NULL THEN facilities_weight ELSE 0 END +
73 | CASE WHEN crossings_weight IS NOT NULL THEN crossings_weight ELSE 0 END +
74 | CASE WHEN buildings_weight IS NOT NULL THEN buildings_weight ELSE 0 END +
75 | CASE WHEN greenness_weight IS NOT NULL THEN greenness_weight ELSE 0 END +
76 | CASE WHEN water_weight IS NOT NULL THEN water_weight ELSE 0 END +
77 | CASE WHEN noise_weight IS NOT NULL THEN noise_weight ELSE 0 END;
78 |
79 | weights_sum := 0;
80 | weights_sum :=
81 | CASE WHEN bicycle_infrastructure IS NOT NULL AND bicycle_infrastructure_weight IS NOT NULL THEN bicycle_infrastructure_weight ELSE 0 END +
82 | CASE WHEN pedestrian_infrastructure IS NOT NULL AND pedestrian_infrastructure_weight IS NOT NULL THEN pedestrian_infrastructure_weight ELSE 0 END +
83 | CASE WHEN designated_route IS NOT NULL AND designated_route_weight IS NOT NULL THEN designated_route_weight ELSE 0 END +
84 | CASE WHEN road_category IS NOT NULL AND road_category_weight IS NOT NULL THEN road_category_weight ELSE 0 END +
85 | CASE WHEN max_speed IS NOT NULL AND max_speed_weight IS NOT NULL THEN max_speed_weight ELSE 0 END +
86 | CASE WHEN max_speed_greatest IS NOT NULL AND max_speed_greatest_weight IS NOT NULL THEN max_speed_greatest_weight ELSE 0 END +
87 | CASE WHEN parking IS NOT NULL AND parking_weight IS NOT NULL THEN parking_weight ELSE 0 END +
88 | CASE WHEN pavement IS NOT NULL AND pavement_weight IS NOT NULL THEN pavement_weight ELSE 0 END +
89 | CASE WHEN width IS NOT NULL AND width_weight IS NOT NULL THEN width_weight ELSE 0 END +
90 | CASE WHEN gradient IS NOT NULL AND gradient_weight IS NOT NULL THEN gradient_weight ELSE 0 END +
91 | CASE WHEN number_lanes IS NOT NULL AND number_lanes_weight IS NOT NULL THEN number_lanes_weight ELSE 0 END +
92 | CASE WHEN facilities IS NOT NULL AND facilities_weight IS NOT NULL THEN facilities_weight ELSE 0 END +
93 | CASE WHEN crossings IS NOT NULL AND crossings_weight IS NOT NULL THEN crossings_weight ELSE 0 END +
94 | CASE WHEN buildings IS NOT NULL AND buildings_weight IS NOT NULL THEN buildings_weight ELSE 0 END +
95 | CASE WHEN greenness IS NOT NULL AND greenness_weight IS NOT NULL THEN greenness_weight ELSE 0 END +
96 | CASE WHEN water IS NOT NULL AND water_weight IS NOT NULL THEN water_weight ELSE 0 END +
97 | CASE WHEN noise IS NOT NULL AND noise_weight IS NOT NULL THEN noise_weight ELSE 0 END;
98 |
99 | IF weights_sum > 0 THEN
100 | index := 0;
101 |
102 | -- here, the value assignments per indicator are added as defined in the mode profile files --
103 | {{ indicator_mappings | sqlsafe }}
104 |
105 | END IF;
106 |
107 | index := round(index, 4);
108 | index_robustness := round(weights_sum / weights_total, 4);
109 | {% if compute_explanation %}
110 | index_explanation := (
111 | WITH indicator_weights AS (
112 | SELECT unnest(indicator_weights) AS indicator_weight
113 | ORDER BY (unnest(indicator_weights)).weight DESC, (unnest(indicator_weights)).indicator
114 | -- LIMIT 1
115 | )
116 | SELECT json_object_agg((indicator_weight).indicator, round((indicator_weight).weight, 4))
117 | FROM indicator_weights
118 | );
119 | {% endif %}
120 | END;
121 | $$ LANGUAGE plpgsql;
122 |
--------------------------------------------------------------------------------
/sql/functions/determine_utmzone.sql:
--------------------------------------------------------------------------------
1 | -- Usage: SELECT ST_Transform(the_geom, utmzone(ST_Centroid(the_geom))) FROM sometable;
2 |
3 | CREATE OR REPLACE FUNCTION public.utmzone(geometry)
4 | RETURNS integer AS
5 | $BODY$
6 | DECLARE
7 | geomgeog geometry;
8 | zone int;
9 | pref int;
10 |
11 | BEGIN
12 | geomgeog:= ST_Transform($1,4326);
13 |
14 | IF (ST_Y(geomgeog))>0 THEN
15 | pref:=32600;
16 | ELSE
17 | pref:=32700;
18 | END IF;
19 |
20 | zone:=floor((ST_X(geomgeog)+180)/6)+1;
21 |
22 | RETURN zone+pref;
23 | END;
24 | $BODY$ LANGUAGE 'plpgsql' IMMUTABLE
25 | COST 100;
--------------------------------------------------------------------------------
/sql/functions/gip_calculate_bicycle_infrastructure.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION calculate_bicycle_infrastructure(
2 | basetype varchar, bikefeature varchar -- bikesigned varchar, designated_route varchar
3 | )
4 | RETURNS varchar AS $$
5 | DECLARE
6 | basetype_array varchar[];
7 | -- bikesigned_array varchar[]
8 | bikefeature_array varchar[];
9 | indicator_values integer[];
10 | indicator_value varchar;
11 | BEGIN
12 | basetype_array := string_to_array(basetype, ';');
13 | -- bikesigned_array := string_to_array(bikesigned, ';');
14 | bikefeature_array := string_to_array(bikefeature, ';');
15 |
16 | IF basetype_array IS NOT NULL THEN -- TODO: condition correct?
17 | FOR i IN 1..array_length(basetype_array, 1) LOOP
18 | IF bikefeature_array[i] IN ('RW', 'RWO') THEN
19 | indicator_values := array_append(indicator_values, 1);
20 | ELSEIF (bikefeature_array[i] IN ('GRW_T', 'GRW_TO', 'GRW_M', 'GRW_MO') AND basetype_array[i] <> '7') THEN
21 | -- (bikesigned_array[i] = '1' and basetype_array[i] <> '1') OR -- TODO: adaptation to client
22 | -- (basetype_array[i] = '7' and designated_route <> 'no') -- TODO: adaptation to client
23 | indicator_values := array_append(indicator_values, 2);
24 | ELSEIF bikefeature_array[i] IN ('MZSTR', 'RF') THEN
25 | indicator_values := array_append(indicator_values, 3);
26 | ELSEIF bikefeature_array[i] IN ('BS') THEN
27 | indicator_values := array_append(indicator_values, 4);
28 | END IF;
29 | END LOOP;
30 |
31 | indicator_value :=
32 | CASE
33 | WHEN 1 = ANY (indicator_values) THEN 'bicycle_way'
34 | WHEN 2 = ANY (indicator_values) THEN 'mixed_way'
35 | WHEN 3 = ANY (indicator_values) THEN 'bicycle_lane'
36 | WHEN 4 = ANY (indicator_values) THEN 'bus_lane'
37 | ELSE 'no'
38 | END;
39 | END IF;
40 |
41 | RETURN indicator_value;
42 | END;
43 | $$ LANGUAGE plpgsql;
--------------------------------------------------------------------------------
/sql/functions/gip_calculate_pedestrian_infrastructure.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION calculate_pedestrian_infrastructure(
2 | basetype varchar, bikefeature varchar,
3 | formofway integer, access_pedestrian boolean
4 | )
5 | RETURNS varchar AS $$
6 | DECLARE
7 | basetype_array varchar[];
8 | bikefeature_array varchar[];
9 | indicator_values integer[];
10 | indicator_value varchar;
11 | BEGIN
12 | basetype_array := string_to_array(basetype, ';');
13 | bikefeature_array := string_to_array(bikefeature, ';');
14 |
15 | IF basetype_array IS NOT NULL THEN
16 | FOR i IN 1..array_length(basetype_array, 1) LOOP
17 | IF formofway = '14' AND basetype_array[i] IN ('1', '7') THEN
18 | indicator_values := array_append(indicator_values, 1);
19 | ELSEIF basetype_array[i] = '7' THEN
20 | indicator_values := array_append(indicator_values, 2);
21 | ELSEIF basetype_array[i] <> '7' AND bikefeature_array[i] IN ('GRW_M', 'GRW_MO') AND access_pedestrian THEN
22 | indicator_values := array_append(indicator_values, 3);
23 | ELSEIF basetype_array[i] IN ('6', '13', '24', '25', '42') THEN
24 | indicator_values := array_append(indicator_values, 4);
25 | ELSEIF access_pedestrian AND basetype_array[i] = '1' THEN -- TODO: access_pedestrian is true for both directions when there is only a sidewalk on one side of the road
26 | indicator_values := array_append(indicator_values, 5);
27 | END IF;
28 | END LOOP;
29 |
30 | indicator_value :=
31 | CASE
32 | WHEN 1 = ANY (indicator_values) THEN 'pedestrian_area' -- 'fuzo'
33 | WHEN 2 = ANY (indicator_values) THEN 'pedestrian_way' -- 'pedestrians_separated'
34 | WHEN 3 = ANY (indicator_values) THEN 'mixed_way' -- 'mixed'
35 | WHEN 4 = ANY (indicator_values) THEN 'stairs'
36 | WHEN 5 = ANY (indicator_values) THEN 'sidewalk'
37 | ELSE 'no'
38 | END;
39 | END IF;
40 |
41 | RETURN indicator_value;
42 | END;
43 | $$ LANGUAGE plpgsql;
--------------------------------------------------------------------------------
/sql/functions/gip_calculate_road_category.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION calculate_road_category(
2 | access_car_ft boolean, access_car_tf boolean,
3 | access_bicycle_ft boolean, access_bicycle_tf boolean,
4 | funcroadclass integer, streetcat varchar, basetype varchar,
5 | bikefeaturetow varchar, bikefeaturebkw varchar
6 | )
7 | RETURNS varchar AS $$
8 | DECLARE
9 | basetype_array varchar[];
10 | bikefeaturetow_array varchar[];
11 | bikefeaturebkw_array varchar[];
12 | indicator_values integer[];
13 | indicator_value varchar;
14 | BEGIN
15 | basetype_array := string_to_array(basetype, ';');
16 | bikefeaturetow_array := string_to_array(bikefeaturetow, ';');
17 | bikefeaturebkw_array := string_to_array(bikefeaturebkw, ';');
18 |
19 | IF basetype_array IS NOT NULL THEN
20 | FOR i IN 1..array_length(basetype_array, 1) LOOP
21 | IF streetcat = 'B' THEN
22 | indicator_values := array_append(indicator_values, 1);
23 | ELSEIF (streetcat = 'L' OR funcroadclass = 2) AND streetcat <> 'B' THEN
24 | indicator_values := array_append(indicator_values, 2);
25 | ELSEIF ((streetcat = 'G' AND funcroadclass >= 3) OR
26 | (streetcat = 'R' AND funcroadclass BETWEEN 3 AND 5) OR
27 | (streetcat NOT IN ('B', 'L') AND funcroadclass BETWEEN 3 AND 5)) AND
28 | (bikefeaturetow_array[i] <> 'VK_BE' AND bikefeaturebkw_array[i] <> 'VK_BE' AND
29 | bikefeaturetow_array[i] <> 'FRS' AND bikefeaturebkw_array[i] <> 'FRS') AND
30 | (access_car_ft OR access_car_tf) THEN
31 | indicator_values := array_append(indicator_values, 3);
32 | ELSEIF streetcat NOT IN ('B', 'L', 'G') AND funcroadclass > 5 AND
33 | (bikefeaturetow_array[i] <> 'VK_BE' AND bikefeaturebkw_array[i] <> 'VK_BE' AND
34 | bikefeaturetow_array[i] <> 'FRS' AND bikefeaturebkw_array[i] <> 'FRS') AND
35 | (access_car_ft OR access_car_tf) THEN
36 | indicator_values := array_append(indicator_values, 4);
37 | ELSEIF (bikefeaturetow_array[i] = 'VK_BE' OR bikefeaturebkw_array[i] = 'VK_BE' OR
38 | bikefeaturetow_array[i] = 'FRS' OR bikefeaturebkw_array[i] = 'FRS') AND
39 | (access_car_ft OR access_car_tf) THEN
40 | indicator_values := array_append(indicator_values, 5);
41 | ELSEIF (bikefeaturetow_array[i] = 'FUZO' OR bikefeaturebkw_array[i] = 'FUZO') OR
42 | ((access_car_ft IS FALSE AND access_car_tf IS FALSE) AND (access_bicycle_ft OR access_bicycle_tf) AND
43 | basetype_array[i] <> '7') THEN
44 | indicator_values := array_append(indicator_values, 6);
45 | ELSEIF (access_bicycle_ft IS FALSE AND access_bicycle_tf IS FALSE) OR basetype_array[i] = '7' THEN
46 | indicator_values := array_append(indicator_values, 7);
47 | END IF;
48 | END LOOP;
49 |
50 | indicator_value :=
51 | CASE
52 | WHEN '1' = ANY (indicator_values) THEN 'primary'
53 | WHEN '2' = ANY (indicator_values) THEN 'secondary'
54 | WHEN '3' = ANY (indicator_values) THEN 'residential'
55 | WHEN '4' = ANY (indicator_values) THEN 'service'
56 | WHEN '5' = ANY (indicator_values) THEN 'calmed'
57 | WHEN '6' = ANY (indicator_values) THEN 'no_mit'
58 | WHEN '7' = ANY (indicator_values) THEN 'path'
59 | END;
60 | END IF;
61 |
62 | RETURN indicator_value;
63 | END;
64 | $$ LANGUAGE plpgsql;
--------------------------------------------------------------------------------
/sql/functions/osm_calculate_access_bicycle.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION calculate_access_bicycle(
2 | direction character varying,access character varying,
3 | bicycle_fwd_bkw character varying,
4 | oneway_bicycle character varying,
5 | roundabout character varying,
6 | oneway character varying,
7 | cycleway character varying,
8 | cycleway_right character varying,
9 | cycleway_left character varying,
10 | cycleway_both character varying,
11 | bicycle character varying,
12 | highway character varying
13 | )
14 | RETURNS int AS $BODY$
15 | DECLARE
16 | bike_access int;
17 | BEGIN
18 | bike_access:=case
19 | -- check bicycle_forward/bicycle_backward (depends on the direction) restrictions and permissions
20 | when bicycle_fwd_bkw='no'
21 | then 0 -- restrict access
22 | when bicycle_fwd_bkw='yes'
23 | then 1 -- allow access
24 | -- check oneway_bicycle restrictions and permissions
25 | when (direction='ft' and oneway_bicycle='opposite') or (direction='tf' and oneway_bicycle='yes')
26 | then 0 -- restrict access
27 | when (direction='ft' and oneway_bicycle='yes') or (direction='tf' and oneway_bicycle='opposite')
28 | then 1 -- allow access
29 | -- check roundabout restrictions for bkw direction
30 | when direction='tf' and roundabout='yes'
31 | then 0 -- restrict access
32 | -- check oneway restrictions and cycleway infrastructure in the opposite direction
33 | when (direction='ft' and oneway='opposite'
34 | and (cycleway!='yes' or cycleway is null)
35 | and (cycleway_right!='yes' or cycleway_right is null)
36 | and (cycleway_left!='opposite' or cycleway_left is null)
37 | and (cycleway_both!='yes' or cycleway_both is null)) or
38 | (direction='tf' and oneway='yes'
39 | and (cycleway!='opposite' or cycleway is null)
40 | and (cycleway_right!='opposite' or cycleway_right is null)
41 | and (cycleway_left!='yes' or cycleway_left is null)
42 | and (cycleway_both!='yes' or cycleway_both is null))
43 | then 0 -- restrict access
44 | -- check bicycle permissions
45 | when bicycle='no'
46 | then 0 -- restrict access
47 | when bicycle='yes'
48 | then 1 -- allow access
49 | -- check global access restrictions
50 | when access='no'
51 | then 0 -- restrict access
52 | -- check highway restrictions
53 | when highway='no' or highway is null
54 | then 0 -- restrict access
55 | else 1 -- allow access
56 | end;
57 | RETURN bike_access;
58 | END;
59 | $BODY$ LANGUAGE plpgsql;
--------------------------------------------------------------------------------
/sql/functions/osm_calculate_access_car.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION calculate_access_car(
2 | direction character varying,
3 | access character varying,
4 | motor_vehicle_fwd_bkw character varying,
5 | oneway character varying,
6 | oneway_motor_vehicle character varying,
7 | oneway_vehicle character varying,
8 | roundabout character varying,
9 | motor_vehicle character varying,
10 | motorcar character varying,
11 | vehicle_fwd_bkw character varying,
12 | vehicle character varying,
13 | highway character varying
14 | )
15 | RETURNS int AS $BODY$
16 | DECLARE
17 | car_access int;
18 | BEGIN
19 | car_access:=case
20 | -- check motor vehicle forward restrictions and permissions
21 | when motor_vehicle_fwd_bkw='yes'
22 | then 1 -- allow access
23 | when motor_vehicle_fwd_bkw='no'
24 | then 0 -- restrict access
25 | -- check oneway restrictions
26 | when (direction='ft' and (oneway='opposite' or oneway_motor_vehicle='opposite' or oneway_vehicle='opposite')) or
27 | (direction='tf' and (oneway='yes' or oneway_motor_vehicle='yes' or oneway_vehicle='yes'))
28 | then 0 -- restrict access
29 | -- check roundabout restrictions
30 | when direction='tf' and roundabout='yes'
31 | then 0 -- restrict access
32 | -- check motor_vehicle and motorcar restrictions and permissions
33 | when motor_vehicle='yes' or motorcar='yes'
34 | then 1 -- allow access
35 | when motor_vehicle='no' or motorcar='no'
36 | then 0 -- restrict access
37 | -- check vehicle_forward restrictions
38 | when vehicle_fwd_bkw='no'
39 | then 0 -- restrict access
40 | -- check vehicle restrictions
41 | when vehicle='no'
42 | then 0 -- restrict access
43 | -- check global access restrictions
44 | when access='no'
45 | then 0 -- restrict access
46 | -- check highway restrictions
47 | when highway='no' or highway is null
48 | then 0 -- restrict access
49 | else 1
50 | end;
51 | RETURN car_access;
52 | END;
53 | $BODY$ LANGUAGE plpgsql;
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/sql/functions/osm_calculate_access_pedestrian.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION calculate_access_pedestrian(
2 | access character varying,
3 | foot character varying,
4 | footway character varying,
5 | sidewalk character varying,
6 | highway character varying
7 | )
8 | RETURNS int AS $BODY$
9 | DECLARE
10 | ped_access int;
11 | BEGIN
12 | ped_access:=case
13 | -- check foot, footway, and sidewalk attributes
14 | when foot='yes' or footway='yes' or sidewalk='yes'
15 | then 1 -- allow access
16 | when foot='no' or footway='no'
17 | then 0 -- restrict access
18 | -- check global access restrictions
19 | when access='no'
20 | then 0 -- restrict access
21 | when highway='no' or highway is null
22 | then 0 -- restrict access
23 | else 1 -- allow access
24 | end;
25 | RETURN ped_access;
26 | END;
27 | $BODY$ LANGUAGE plpgsql;
28 |
--------------------------------------------------------------------------------
/sql/functions/osm_delete_dangling_edges.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION delete_dangling_edges()
2 | RETURNS void AS $$
3 | DECLARE
4 | count integer;
5 | BEGIN
6 | count := 0;
7 |
8 | DROP TABLE IF EXISTS dangling_edges;
9 | CREATE TABLE dangling_edges AS (
10 | WITH points_cnt AS ( -- 777, 184, 88, 42, 18, 8, 1
11 | SELECT geom, count(*)
12 | FROM indoor_points
13 | GROUP BY geom
14 | ),
15 | indoor_links AS (
16 | SELECT id AS link_id, geom
17 | FROM network_corrected
18 | WHERE tags -> 'indoor' = 'yes'
19 | ),
20 | intersections_links AS (
21 | SELECT a.link_id
22 | FROM indoor_links a
23 | JOIN points_cnt b ON (ST_Intersects(a.geom, b.geom))
24 | WHERE b.count > 1
25 | ),
26 | intersections_counts AS (
27 | SELECT link_id, count(*) AS cnt
28 | FROM intersections_links
29 | GROUP BY link_id
30 | )
31 | SELECT link_id
32 | FROM intersections_counts
33 | WHERE cnt < 2
34 | );
35 |
36 | count := (SELECT count(link_id) FROM dangling_edges);
37 | RAISE NOTICE 'dangling edges: %', count;
38 |
39 | WHILE count > 0 LOOP
40 | DELETE FROM indoor_points WHERE link_id IN (SELECT link_id FROM dangling_edges);
41 | DELETE FROM network_corrected WHERE id IN (SELECT link_id FROM dangling_edges);
42 |
43 | DROP TABLE IF EXISTS dangling_edges;
44 | CREATE TABLE dangling_edges AS (
45 | WITH points_cnt AS (
46 | SELECT geom, count(*)
47 | FROM indoor_points
48 | GROUP BY geom
49 | ),
50 | indoor_links AS (
51 | SELECT id AS link_id, geom
52 | FROM network_corrected
53 | WHERE tags -> 'indoor' = 'yes'
54 | ),
55 | intersections_links AS (
56 | SELECT a.link_id
57 | FROM indoor_links a
58 | JOIN points_cnt b ON (ST_Intersects(a.geom, b.geom))
59 | WHERE b.count > 1
60 | ),
61 | intersections_counts AS (
62 | SELECT link_id, count(*) AS cnt
63 | FROM intersections_links
64 | GROUP BY link_id
65 | )
66 | SELECT link_id
67 | FROM intersections_counts
68 | WHERE cnt < 2
69 | );
70 |
71 | count := (SELECT count(link_id) FROM dangling_edges);
72 | RAISE NOTICE 'dangling edges: %', count;
73 | END LOOP;
74 |
75 | DROP TABLE IF EXISTS dangling_edges;
76 | END
77 | $$ LANGUAGE plpgsql;
--------------------------------------------------------------------------------
/sql/templates/export.sql.j2:
--------------------------------------------------------------------------------
1 | -- ---------------------------------------------------------------------------------------------------------------------
2 | -- export
3 | -- ---------------------------------------------------------------------------------------------------------------------
4 |
5 | SET search_path =
6 | {{ schema_network | sqlsafe }},
7 | public;
8 |
9 | -- ---------------------------------------------------------------------------------------------------------------------
10 | -- create tables "export_edge", "export_node"
11 | -- ---------------------------------------------------------------------------------------------------------------------
12 |
13 | DROP TABLE IF EXISTS export_edge;
14 | CREATE TABLE export_edge AS (
15 | SELECT *
16 | FROM network_edge_export a
17 | JOIN network_edge_attributes b USING (edge_id)
18 | LEFT JOIN network_edge_index c USING (edge_id)
19 | );
20 |
21 | ALTER TABLE export_edge ADD PRIMARY KEY (edge_id);
22 | CREATE INDEX export_edge_geom_idx ON export_edge USING gist (geom);
23 | CREATE INDEX export_edge_from_node_idx ON export_edge (from_node);
24 | CREATE INDEX export_edge_to_node_idx ON export_edge (to_node);
25 |
26 | -- ---------------------------------------------------------------------------------------------------------------------
27 |
28 | DROP TABLE IF EXISTS export_node;
29 | CREATE TABLE export_node AS (
30 | SELECT *
31 | FROM network_node a
32 | JOIN network_node_attributes b USING (node_id)
33 | );
34 |
35 | ALTER TABLE export_node ADD PRIMARY KEY (node_id);
36 | CREATE INDEX export_node_geom_idx ON export_node USING gist (geom);
--------------------------------------------------------------------------------
/sql/templates/gip_network.sql.j2:
--------------------------------------------------------------------------------
1 | -- ---------------------------------------------------------------------------------------------------------------------
2 | -- gip_network
3 | -- ---------------------------------------------------------------------------------------------------------------------
4 |
5 | SET search_path =
6 | {{ schema_network | sqlsafe }},
7 | {{ schema_data | sqlsafe }},
8 | public;
9 |
10 | -- ---------------------------------------------------------------------------------------------------------------------
11 | -- create tables "gip_network_tmp", "gip_link_tmp", "gip_linkuse_tmp", "gip_node_tmp"
12 | -- ---------------------------------------------------------------------------------------------------------------------
13 |
14 | DROP TABLE IF EXISTS gip_network_tmp;
15 | CREATE TABLE gip_network_tmp AS ( -- 22 s, 1.947.374
16 | WITH count AS (
17 | SELECT a.link_id, 1 AS "order", NULL AS count, b.x, b.y
18 | FROM gip_link a JOIN gip_node b ON a.from_node = b.node_id
19 |
20 | UNION ALL
21 |
22 | SELECT link_id, 2 AS "order", count, x, y
23 | FROM gip_linkcoordinate
24 |
25 | UNION ALL
26 |
27 | SELECT a.link_id, 3 AS "order", NULL AS count, b.x, b.y
28 | FROM gip_link a JOIN gip_node b ON a.to_node = b.node_id
29 | )
30 | SELECT link_id,
31 | ST_SetSRID(ST_MakeLine(ST_MakePoint(x, y) ORDER BY "order", count), 4326)::geometry(LineString, 4326) AS geom
32 | FROM count
33 | GROUP BY link_id
34 | );
35 |
36 | -- ---------------------------------------------------------------------------------------------------------------------
37 |
38 | DROP TABLE IF EXISTS gip_link_tmp;
39 | CREATE TABLE gip_link_tmp AS ( -- 14 s, 1.894.917
40 | SELECT a.link_id,
41 | ST_Transform(a.geom, {{target_srid}})::geometry(LineString, {{target_srid}}) AS geom,
42 | b.name1, b.from_node, b.to_node, b.speed_tow_car, b.speed_bkw_car, b.speed_tow_truck, b.speed_bkw_truck,
43 | b.maxspeed_tow_car, b.maxspeed_bkw_car, b.maxspeed_tow_truck, b.maxspeed_bkw_truck, b.access_tow,
44 | b.access_bkw, b.funcroadclass, b.lanes_tow, b.lanes_bkw, b.formofway, b.width, b.oneway, b.streetcat
45 | FROM gip_network_tmp a
46 | JOIN gip_link b USING (link_id)
47 | WHERE (b.access_tow::bit(8) | b.access_bkw::bit(8) & '00000111'::bit(8))::int > 0 -- access = car, bicycle, pedestrian
48 | AND b.formofway <> 7 -- Parkgarage
49 | );
50 |
51 | -- ---------------------------------------------------------------------------------------------------------------------
52 |
53 | DROP TABLE IF EXISTS gip_linkuse_tmp;
54 | CREATE TABLE gip_linkuse_tmp AS ( -- 17 s, 2.306.250
55 | SELECT b.*,
56 | ST_LineSubstring(ST_OffsetCurve(a.geom, b.offset_), b.from_percent / 100, b.to_percent / 100) AS geom,
57 | c.bikeenvironment, c.bikesignedtow, c.bikesignedbkw, c.bikefeaturetow, c.bikefeaturebkw
58 | FROM gip_link_tmp a
59 | JOIN gip_linkuse b USING (link_id)
60 | LEFT JOIN gip_bikehike c USING (use_id)
61 | );
62 |
63 | -- ---------------------------------------------------------------------------------------------------------------------
64 |
65 | DROP TABLE IF EXISTS gip_node_tmp;
66 | CREATE TABLE gip_node_tmp AS ( -- 3 s, 1.651.999
67 | SELECT node_id,
68 | ST_Transform(ST_SetSRID(ST_MakePoint(x, y), 4326), {{target_srid}})::geometry(Point, {{target_srid}}) AS geom,
69 | z::numeric AS elevation
70 | FROM gip_node
71 | );
72 |
73 | -- ---------------------------------------------------------------------------------------------------------------------
74 | -- create table "network_edge"
75 | -- ---------------------------------------------------------------------------------------------------------------------
76 |
77 | DROP TABLE IF EXISTS network_edge;
78 | CREATE TABLE network_edge AS ( -- 20 s, 1.894.917
79 | WITH gip_linkuse_tmp_agg AS (
80 | SELECT link_id,
81 | array_to_string(array_agg(use_id), ';', '*') AS use_id,
82 | array_to_string(array_agg(basetype), ';', '*') AS basetype,
83 | array_to_string(array_agg(offset_), ';', '*') AS offset_,
84 | array_to_string(array_agg(bikeenvironment), ';', '*') AS bikeenvironment,
85 | array_to_string(array_agg(bikesignedtow), ';', '*') AS bikesignedtow,
86 | array_to_string(array_agg(bikesignedbkw), ';', '*') AS bikesignedbkw,
87 | array_to_string(array_agg(bikefeaturetow), ';', '*') AS bikefeaturetow,
88 | array_to_string(array_agg(bikefeaturebkw), ';', '*') AS bikefeaturebkw
89 | FROM gip_linkuse_tmp
90 | GROUP BY link_id)
91 | SELECT a.link_id AS edge_id,
92 | a.*,
93 | ST_Length(a.geom) AS length,
94 | b.use_id,
95 | b.basetype,
96 | b.offset_,
97 | b.bikeenvironment,
98 | b.bikesignedtow,
99 | b.bikesignedbkw,
100 | b.bikefeaturetow,
101 | b.bikefeaturebkw
102 | FROM gip_link_tmp a
103 | LEFT JOIN gip_linkuse_tmp_agg b USING (link_id)
104 | );
105 |
106 | ALTER TABLE network_edge ADD PRIMARY KEY (edge_id); -- 2 s
107 | CREATE INDEX network_edge_geom_idx ON network_edge USING gist(geom); -- 8 s
108 |
109 | -- ---------------------------------------------------------------------------------------------------------------------
110 | -- create table "network_node"
111 | -- ---------------------------------------------------------------------------------------------------------------------
112 |
113 | DROP TABLE IF EXISTS network_node;
114 | CREATE TABLE network_node AS ( -- 6 s, 1.601.817
115 | SELECT *
116 | FROM gip_node_tmp
117 | WHERE node_id IN (SELECT from_node AS node_id FROM network_edge UNION
118 | SELECT to_node AS node_id FROM network_edge)
119 | );
120 |
121 | ALTER TABLE network_node ADD PRIMARY KEY (node_id); -- 1 s
122 | CREATE INDEX network_node_geom_idx ON network_node USING gist(geom); -- 7 s
123 |
124 | -- ---------------------------------------------------------------------------------------------------------------------
125 | -- drop tables
126 | -- ---------------------------------------------------------------------------------------------------------------------
127 |
128 | DROP TABLE IF EXISTS gip_network_tmp, gip_link_tmp, gip_linkuse_tmp, gip_node_tmp;
--------------------------------------------------------------------------------
/sql/templates/index.sql.j2:
--------------------------------------------------------------------------------
1 | -- ---------------------------------------------------------------------------------------------------------------------
2 | -- index
3 | -- ---------------------------------------------------------------------------------------------------------------------
4 |
5 | SET search_path =
6 | {{ schema_network | sqlsafe }},
7 | public;
8 |
9 | -- ---------------------------------------------------------------------------------------------------------------------
10 | -- create table "network_edge_index"
11 | -- ---------------------------------------------------------------------------------------------------------------------
12 |
13 | CREATE TABLE IF NOT EXISTS network_edge_index AS (
14 | SELECT edge_id
15 | FROM network_edge
16 | );
17 |
18 | -- ---------------------------------------------------------------------------------------------------------------------
19 | -- calculate "index"
20 | -- ---------------------------------------------------------------------------------------------------------------------
21 |
22 | ALTER TABLE network_edge_index DROP COLUMN IF EXISTS index_{{ profile_name | sqlsafe }}_ft;
23 | ALTER TABLE network_edge_index DROP COLUMN IF EXISTS index_{{ profile_name | sqlsafe }}_tf;
24 | ALTER TABLE network_edge_index DROP COLUMN IF EXISTS index_{{ profile_name | sqlsafe }}_ft_robustness;
25 | ALTER TABLE network_edge_index DROP COLUMN IF EXISTS index_{{ profile_name | sqlsafe }}_tf_robustness;
26 | {% if compute_explanation %}
27 | ALTER TABLE network_edge_index DROP COLUMN IF EXISTS index_{{ profile_name | sqlsafe }}_ft_explanation;
28 | ALTER TABLE network_edge_index DROP COLUMN IF EXISTS index_{{ profile_name | sqlsafe }}_tf_explanation;
29 | {% endif %}
30 |
31 | DROP TABLE IF EXISTS network_edge_index_tmp;
32 | CREATE TABLE network_edge_index_tmp AS (
33 | WITH index_filtered as (SELECT a.edge_id,
34 | ft.index AS index_ft,
35 | tf.index AS index_tf,
36 | ft.index_robustness AS index_robustness_ft,
37 | tf.index_robustness AS index_robustness_tf
38 | {% if compute_explanation %}
39 | ,ft.index_explanation AS index_explanation_ft,
40 | tf.index_explanation AS index_explanation_tf
41 | {% endif %}
42 | FROM network_edge_index a
43 | JOIN network_edge_attributes b USING (edge_id),
44 | LATERAL calculate_index(
45 | b.bicycle_infrastructure_ft, {{ bicycle_infrastructure }},
46 | b.pedestrian_infrastructure_ft, {{ pedestrian_infrastructure }},
47 | b.designated_route_ft, {{ designated_route }},
48 | b.road_category, {{ road_category }},
49 | b.max_speed_ft, {{ max_speed }},
50 | b.max_speed_greatest, {{ max_speed_greatest }},
51 | b.parking_ft, {{ parking }},
52 | b.pavement, {{ pavement }},
53 | b.width, {{ width }},
54 | b.gradient_ft, {{ gradient }},
55 | b.number_lanes_ft, {{ number_lanes }},
56 | b.facilities, {{ facilities }},
57 | b.crossings, {{ crossings }},
58 | b.buildings, {{ buildings }},
59 | b.greenness, {{ greenness }},
60 | b.water, {{ water }},
61 | b.noise, {{ noise }}
62 | ) ft,
63 | LATERAL calculate_index(
64 | b.bicycle_infrastructure_tf, {{ bicycle_infrastructure }},
65 | b.pedestrian_infrastructure_tf, {{ pedestrian_infrastructure }},
66 | b.designated_route_tf, {{ designated_route }},
67 | b.road_category, {{ road_category }},
68 | b.max_speed_tf, {{ max_speed }},
69 | b.max_speed_greatest, {{ max_speed_greatest }},
70 | b.parking_tf, {{ parking }},
71 | b.pavement, {{ pavement }},
72 | b.width, {{ width }},
73 | b.gradient_tf, {{ gradient }},
74 | b.number_lanes_tf, {{ number_lanes }},
75 | b.facilities, {{ facilities }},
76 | b.crossings, {{ crossings }},
77 | b.buildings, {{ buildings }},
78 | b.greenness, {{ greenness }},
79 | b.water, {{ water }},
80 | b.noise, {{ noise }}
81 | ) tf
82 | WHERE
83 | false
84 | {%if access_car %}
85 | OR b.access_car_ft OR b.access_car_tf
86 | {% endif %}
87 | {% if access_bike %}
88 | OR b.access_bicycle_ft OR b.access_bicycle_tf
89 | {% endif %}
90 | {% if access_walk %}
91 | OR b.access_pedestrian_ft OR b.access_pedestrian_tf
92 | {% endif %}
93 | )
94 | SELECT a.*,
95 | b.index_ft AS index_{{ profile_name | sqlsafe }}_ft,
96 | b.index_tf AS index_{{ profile_name | sqlsafe }}_tf,
97 | b.index_robustness_ft AS index_{{ profile_name | sqlsafe }}_ft_robustness,
98 | b.index_robustness_tf AS index_{{ profile_name | sqlsafe }}_tf_robustness
99 | {% if compute_explanation %}
100 | ,b.index_explanation_ft AS index_{{ profile_name | sqlsafe }}_ft_explanation,
101 | b.index_explanation_tf AS index_{{ profile_name | sqlsafe }}_tf_explanation
102 | {% endif %}
103 | FROM network_edge_index a
104 | LEFT JOIN index_filtered b
105 | USING (edge_id)
106 | );
107 |
108 | DROP TABLE network_edge_index;
109 | ALTER TABLE network_edge_index_tmp RENAME TO network_edge_index;
110 |
111 | ALTER TABLE network_edge_index ADD PRIMARY KEY (edge_id);
--------------------------------------------------------------------------------
/sql/templates/osm_network.sql.j2:
--------------------------------------------------------------------------------
1 | -- ---------------------------------------------------------------------------------------------------------------------
2 | -- osm_network
3 | -- ---------------------------------------------------------------------------------------------------------------------
4 |
5 | SET search_path =
6 | {{ schema_network | sqlsafe }},
7 | {{ schema_data | sqlsafe }},
8 | public;
9 |
10 | -- ---------------------------------------------------------------------------------------------------------------------
11 | -- calculate network with highways, railways and aerialways
12 | -- ---------------------------------------------------------------------------------------------------------------------
13 |
14 | DROP TABLE IF EXISTS network_init;
15 | CREATE TABLE network_init AS ( -- 19 s, 2.162.218
16 | SELECT osm_id,
17 | ST_Transform(way, {{target_srid}})::geometry(LineString, {{target_srid}}) AS geom,
18 | highway, railway, aerialway, access, "addr:housename" AS addr_housename,
19 | "addr:housenumber" AS addr_housenumber, "addr:interpolation" AS addr_interpolation, admin_level, amenity,
20 | area, barrier, bicycle, boundary, brand, building, construction, covered, culvert, cutting, denomination,
21 | disused, embankment, foot, "generator:source" AS generator_source, harbour, historic, horse, intermittent,
22 | junction, landuse, layer, leisure, lock, man_made, military, motorcar, name, "natural", office, oneway, operator,
23 | place, population, power, power_source, public_transport, ref, religion, route, service, shop, sport,
24 | surface, tags, toll, tourism, "tower:type" AS tower_type, tracktype, water, way_area, wetland, width, wood, z_order,
25 | CASE
26 | WHEN (bridge = 'no' OR bridge IS NULL) AND tags -> 'bridge:movable' IS NOT NULL THEN tags -> 'bridge:movable'
27 | WHEN (bridge = 'no' OR bridge IS NULL) AND tags -> 'bridge:structure' IS NOT NULL THEN tags -> 'bridge:structure'
28 | WHEN (bridge = 'no' OR bridge IS NULL) AND man_made = 'bridge' THEN man_made
29 | WHEN (bridge = 'no' OR bridge IS NULL) AND tags -> 'seamark:type' = 'bridge' THEN tags -> 'seamark:type'
30 | WHEN bridge = 'no' THEN NULL
31 | ELSE bridge
32 | END AS bridge,
33 | CASE
34 | WHEN (tunnel = 'no' OR tunnel IS NULL) AND covered IN ('tunnel', 'arcade', 'building_passage') THEN covered
35 | WHEN tunnel = 'no' THEN NULL
36 | ELSE tunnel
37 | END AS tunnel
38 | FROM osm_line
39 | WHERE osm_id >= 0
40 | AND (highway IN ('motorway', 'trunk', 'primary', 'secondary', 'tertiary', 'unclassified', 'residential',
41 | 'motorway_link', 'trunk_link', 'primary_link', 'secondary_link', 'tertiary_link', 'living_street',
42 | 'service', 'pedestrian', 'track', 'bus_guideway', 'escape', 'road', 'footway', 'bridleway',
43 | 'steps', 'path', 'cycleway', 'construction', 'crossing', 'emergency_bay', 'platform')
44 | {% if include_rail %}
45 | OR
46 | railway IN ('construction', 'funicular', 'light_rail', 'monorail', 'narrow_gauge', 'rail', 'subway', 'tram',
47 | 'turntable', 'traverser')
48 | {% endif %}
49 | {% if include_aerialway %}
50 | OR
51 | aerialway IN ('cable_car', 'gondola', 'mixed_lift', 'chair_lift', 'drag_lift', 't-bar', 'j-bar', 'platter')
52 | {% endif %}
53 | )
54 | );
55 |
56 | -- ---------------------------------------------------------------------------------------------------------------------
57 | -- correct intersections
58 | -- ---------------------------------------------------------------------------------------------------------------------
59 |
60 | -- calculate start and end points for the whole network
61 | DROP TABLE IF EXISTS link_points;
62 | CREATE TABLE link_points AS ( -- 18 s, 2.162.218
63 | SELECT osm_id,
64 | geom,
65 | bridge,
66 | tunnel,
67 | CASE WHEN tags -> 'conveying' = 'no' THEN NULL ELSE tags -> 'conveying' END AS escalator,
68 | CASE WHEN tags -> 'indoor' = 'no' THEN NULL ELSE tags -> 'indoor' END AS indoor,
69 | string_to_array(layer, ';') || string_to_array(tags -> 'level', ';') AS layer,
70 | ST_StartPoint(geom) as geom_startpoint,
71 | ST_EndPoint(geom) as geom_endpoint
72 | FROM network_init
73 | );
74 |
75 | CREATE INDEX link_points_geom_idx ON link_points USING gist(geom); -- 11 s
76 |
77 | -- calculate intersections
78 | DROP TABLE IF EXISTS intersections_all;
79 | CREATE TABLE intersections_all AS ( -- 53 s, 1.673.203
80 | SELECT ST_Intersection(a.geom, b.geom) AS geom,
81 | a.osm_id AS osm_id_1,
82 | a.bridge AS bridge_1,
83 | a.tunnel AS tunnel_1,
84 | a.escalator AS escalator_1,
85 | a.indoor AS indoor_1,
86 | a.layer AS layer_1,
87 | a.geom_startpoint AS geom_startpoint_1,
88 | a.geom_endpoint AS geom_endpoint_1,
89 | b.osm_id AS osm_id_2,
90 | b.bridge AS bridge_2,
91 | b.tunnel AS tunnel_2,
92 | b.escalator AS escalator_2,
93 | b.indoor AS indoor_2,
94 | b.layer AS layer_2,
95 | b.geom_startpoint AS geom_startpoint_2,
96 | b.geom_endpoint AS geom_endpoint_2
97 | FROM link_points AS a,
98 | link_points AS b
99 | WHERE ST_Intersects(a.geom, b.geom)
100 | AND NOT ST_Equals(a.geom_startpoint, b.geom_startpoint)
101 | AND NOT ST_Equals(a.geom_startpoint, b.geom_endpoint)
102 | AND NOT ST_Equals(a.geom_endpoint, b.geom_startpoint)
103 | AND NOT ST_Equals(a.geom_endpoint, b.geom_endpoint)
104 | AND a.osm_id < b.osm_id
105 | );
106 |
107 | -- turn geometry collections and multipoints to points
108 | INSERT INTO intersections_all -- 1 s, 189
109 | SELECT (ST_Dump(ST_CollectionExtract(geom, 1))).geom AS geom,
110 | osm_id_1, bridge_1, tunnel_1, escalator_1, indoor_1, layer_1, geom_startpoint_1, geom_endpoint_1,
111 | osm_id_2, bridge_2, tunnel_2, escalator_2, indoor_2, layer_2, geom_startpoint_2, geom_endpoint_2
112 | FROM intersections_all
113 | WHERE ST_GeometryType(geom) = 'ST_GeometryCollection';
114 |
115 | INSERT INTO intersections_all -- 1 s, 110.743
116 | SELECT (ST_Dump(geom)).geom AS geom,
117 | osm_id_1, bridge_1, tunnel_1, escalator_1, indoor_1, layer_1, geom_startpoint_1, geom_endpoint_1,
118 | osm_id_2, bridge_2, tunnel_2, escalator_2, indoor_2, layer_2, geom_startpoint_2, geom_endpoint_2
119 | FROM intersections_all
120 | WHERE ST_GeometryType(geom) = 'ST_MultiPoint';
121 |
122 | -- delete intersections of geometry types: geometry collection, multipoint, linestring and multilinestring
123 | DELETE FROM intersections_all -- 5 s, 54.314
124 | WHERE ST_GeometryType(geom) = 'ST_GeometryCollection'
125 | OR ST_GeometryType(geom) = 'ST_LineString'
126 | OR ST_GeometryType(geom) = 'ST_MultiLineString'
127 | OR ST_GeometryType(geom) = 'ST_MultiPoint';
128 |
129 | -- delete intersections that are at the same point
130 | DROP TABLE IF EXISTS intersections;
131 | CREATE TABLE intersections AS ( -- 15 s, 1.654.581
132 | -- TODO: DISTINCT ON might not work here
133 | SELECT DISTINCT ON (geom) row_number() OVER (ORDER BY geom, osm_id_1, bridge_1, tunnel_1, escalator_1, indoor_1, layer_1, geom_startpoint_1, geom_endpoint_1, osm_id_2, bridge_2, tunnel_2, escalator_2, indoor_2, layer_2, geom_startpoint_2, geom_endpoint_2) AS id, *
134 | FROM intersections_all
135 | );
136 |
137 | -- correct intersections at bridges
138 | DELETE FROM intersections -- 2 s, 28.381
139 | WHERE id IN (
140 | SELECT id
141 | FROM intersections
142 | WHERE ((bridge_1 IS NOT NULL OR bridge_2 IS NOT NULL) OR
143 | (escalator_1 IS NOT NULL OR escalator_2 IS NOT NULL))
144 | AND NOT ST_Intersects(geom, geom_startpoint_1)
145 | AND NOT ST_Intersects(geom, geom_endpoint_1)
146 | AND NOT ST_Intersects(geom, geom_startpoint_2)
147 | AND NOT ST_Intersects(geom, geom_endpoint_2)
148 | );
149 |
150 | -- correct intersections at links with different layers that are not at the endpoints
151 | DELETE FROM intersections -- 2 s, 24.825
152 | WHERE id IN (
153 | SELECT id
154 | FROM intersections
155 | WHERE ((layer_1 IS NULL AND layer_2 IS NOT NULL) OR
156 | (layer_1 IS NOT NULL AND layer_2 IS NULL) OR
157 | (layer_1 && layer_2 = false))
158 | AND NOT ST_Intersects(geom, geom_startpoint_1)
159 | AND NOT ST_Intersects(geom, geom_endpoint_1)
160 | AND NOT ST_Intersects(geom, geom_startpoint_2)
161 | AND NOT ST_Intersects(geom, geom_endpoint_2)
162 |
163 | );
164 |
165 | -- correct intersections between tunnels and non-indoor/non-tunnel links that are not at the endpoints
166 | DELETE FROM intersections -- 1 s, 114
167 | WHERE id IN (
168 | SELECT id
169 | FROM intersections
170 | WHERE layer_1 IS NULL
171 | AND layer_2 IS NULL
172 | AND ((tunnel_1 IS NOT NULL AND tunnel_2 IS NULL AND indoor_2 IS NULL) OR
173 | (tunnel_2 IS NOT NULL AND tunnel_1 IS NULL AND indoor_1 IS NULL))
174 | AND NOT ST_Intersects(geom, geom_startpoint_1)
175 | AND NOT ST_Intersects(geom, geom_endpoint_1)
176 | AND NOT ST_Intersects(geom, geom_startpoint_2)
177 | AND NOT ST_Intersects(geom, geom_endpoint_2)
178 | );
179 |
180 | -- delete intersections between links with different layers that are not at the ends of the links
181 | WITH first_layer AS ( -- 34 s, 27
182 | SELECT a.id,
183 | ST_Buffer(a.geom, 0.01) AS intersection_geom,
184 | a.osm_id_1,
185 | a.osm_id_2,
186 | b.layer AS layer_1,
187 | b.geom AS geom_1
188 | FROM intersections a
189 | JOIN network_init b ON (a.osm_id_1 = b.osm_id)
190 | ),
191 | second_layer AS (
192 | SELECT c.*, d.layer AS layer_2, d.geom AS geom_2
193 | FROM first_layer c
194 | JOIN network_init d ON (c.osm_id_2 = d.osm_id)
195 | ),
196 | different_layers AS (
197 | SELECT id, intersection_geom, geom_1, geom_2
198 | FROM second_layer
199 | WHERE layer_1 != layer_2
200 | )
201 | DELETE
202 | FROM intersections
203 | WHERE id IN (
204 | SELECT id
205 | FROM different_layers
206 | WHERE NOT ST_Intersects(intersection_geom, ST_Startpoint(geom_1))
207 | AND NOT ST_Intersects(intersection_geom, ST_Endpoint(geom_1))
208 | AND NOT ST_Intersects(intersection_geom, ST_Startpoint(geom_2))
209 | AND NOT ST_Intersects(intersection_geom, ST_Endpoint(geom_2))
210 | );
211 |
212 | -- calculate links that have to be split
213 | DROP TABLE IF EXISTS intersecting_links;
214 | CREATE TABLE intersecting_links AS ( -- 12 s, 1.573.644
215 | WITH osm_ids AS (
216 | SELECT osm_id_1 AS osm_id, geom
217 | FROM intersections
218 | UNION
219 | SELECT osm_id_2 AS osm_id, geom
220 | FROM intersections
221 | )
222 | SELECT osm_id, ST_Union(geom) AS intersection_geom
223 | FROM osm_ids
224 | GROUP BY osm_id
225 | );
226 |
227 | -- split links at intersections and add the rest of the links to new table. osm_id will not stay unique.
228 | CREATE TABLE network_corrected AS ( -- 32 s, 3.876.291
229 | SELECT a.osm_id,
230 | (ST_Dump(ST_Split(a.geom, b.intersection_geom))).geom::geometry(LineString, {{target_srid}}) AS geom,
231 | highway, railway, aerialway, access, addr_housename, addr_housenumber, addr_interpolation, admin_level,
232 | amenity, area, barrier, bicycle, boundary, brand, building, construction, covered, culvert, cutting,
233 | denomination, disused, embankment, foot, generator_source, harbour, historic, horse, intermittent, junction,
234 | landuse, layer, leisure, lock, man_made, military, motorcar, name, "natural", office, oneway, operator,
235 | place, population, power, power_source, public_transport, ref, religion, route, service, shop, sport,
236 | surface, tags, toll, tourism, tower_type, tracktype, water, way_area, wetland, width, wood, z_order, bridge,
237 | tunnel
238 | FROM network_init a
239 | JOIN intersecting_links b USING (osm_id)
240 |
241 | UNION
242 |
243 | SELECT *
244 | FROM network_init c
245 | WHERE NOT exists(SELECT FROM intersecting_links d WHERE c.osm_id = d.osm_id)
246 | );
247 |
248 | CREATE INDEX network_corrected_geom_idx ON network_corrected USING gist (geom); -- 16 s
249 |
250 | ALTER TABLE network_corrected ADD COLUMN id serial; -- 39 s
251 |
252 | -- ---------------------------------------------------------------------------------------------------------------------
253 | -- correct indoor links
254 | -- ---------------------------------------------------------------------------------------------------------------------
255 |
256 | -- calculate existing intersections (end points) at indoor links
257 | DROP TABLE IF EXISTS network_corrected_points;
258 | CREATE TABLE network_corrected_points AS ( -- 18 s, 7.752.582
259 | WITH points AS (
260 | SELECT id AS link_id, ST_StartPoint(geom) AS geom
261 | FROM network_corrected
262 |
263 | UNION ALL
264 |
265 | SELECT id AS link_id, ST_EndPoint(geom) AS geom
266 | FROM network_corrected
267 | )
268 | SELECT row_number() OVER () AS id, *
269 | FROM points
270 | );
271 |
272 | CREATE INDEX network_corrected_points_geom_idx ON network_corrected_points USING gist (geom); -- 47 s
273 |
274 | -- ---------------------------------------------------------------------------------------------------------------------
275 |
276 | DROP TABLE IF EXISTS indoor_points;
277 | CREATE TABLE indoor_points AS ( -- 8 s, 14181
278 | WITH indoor_links AS (
279 | SELECT geom
280 | FROM network_corrected
281 | WHERE tags -> 'indoor' = 'yes'
282 | ),
283 | intersecting_points AS (
284 | SELECT b.id
285 | FROM indoor_links a,
286 | network_corrected_points b
287 | WHERE ST_Intersects(b.geom, a.geom)
288 | ),
289 | unioned_points AS (
290 | SELECT id
291 | FROM intersecting_points
292 | GROUP BY id
293 | )
294 | SELECT b.*
295 | FROM unioned_points a
296 | JOIN network_corrected_points b USING (id)
297 | );
298 |
299 | -- ---------------------------------------------------------------------------------------------------------------------
300 |
301 | -- delete edges with dangling ends for indoor edges
302 | SELECT delete_dangling_edges();
303 |
304 | -- ---------------------------------------------------------------------------------------------------------------------
305 | -- calculate final table with new id and native osm attributes only
306 | -- ---------------------------------------------------------------------------------------------------------------------
307 |
308 | ALTER TABLE network_corrected DROP COLUMN IF EXISTS id;
309 |
310 | DROP TABLE IF EXISTS network_tmp;
311 | CREATE TABLE network_tmp AS ( -- 17 s, 3.875.173
312 | SELECT row_number() OVER (ORDER BY osm_id) AS edge_id,
313 | osm_id, geom, highway, railway, aerialway, access, addr_housename, addr_housenumber, addr_interpolation,
314 | admin_level, amenity, area, barrier, bicycle, boundary, brand, building, construction, covered, culvert,
315 | cutting, denomination, disused, embankment, foot, generator_source, harbour, historic, horse, intermittent,
316 | junction, landuse, layer, leisure, lock, man_made, military, motorcar, name, "natural", office, oneway,
317 | operator, place, population, power, power_source, public_transport, ref, religion, route, service, shop,
318 | sport, surface, tags, toll, tourism, tower_type, tracktype, water, way_area, wetland, width, wood, z_order,
319 | bridge, tunnel
320 | FROM network_corrected
321 | );
322 |
323 | -- ---------------------------------------------------------------------------------------------------------------------
324 | -- create tables "network_edge", "network_node"
325 | -- ---------------------------------------------------------------------------------------------------------------------
326 |
327 | DROP TABLE IF EXISTS network_point;
328 | CREATE TABLE network_point AS ( -- 9 s
329 | SELECT edge_id,
330 | ST_StartPoint(geom)::geometry(Point, {{target_srid}}) AS geom_startpoint,
331 | ST_EndPoint(geom)::geometry(Point, {{target_srid}}) AS geom_endpoint
332 | FROM network_tmp
333 | );
334 |
335 | DROP TABLE IF EXISTS network_node;
336 | CREATE TABLE network_node AS ( -- 20 s, 3.215.892
337 | WITH a AS (
338 | SELECT DISTINCT geom_startpoint AS geom FROM network_point
339 | UNION
340 | SELECT DISTINCT geom_endpoint AS geom FROM network_point
341 | )
342 | SELECT row_number() OVER (ORDER BY geom)::integer AS node_id,
343 | geom
344 | FROM a
345 | GROUP BY geom
346 | );
347 |
348 | ALTER TABLE network_node ADD PRIMARY KEY (node_id); -- 2 s
349 | CREATE INDEX network_node_geom_idx ON network_node USING gist(geom); -- 20 s
350 |
351 | DROP TABLE IF EXISTS network_edge;
352 | CREATE TABLE network_edge AS ( -- 1 m 13 s, 3.875.334
353 | SELECT a.*,
354 | c.node_id as from_node,
355 | d.node_id as to_node,
356 | ST_Length(a.geom) AS length
357 | FROM network_tmp a
358 | JOIN network_point b ON a.edge_id = b.edge_id
359 | JOIN network_node c ON b.geom_startpoint = c.geom
360 | JOIN network_node d ON b.geom_endpoint = d.geom
361 | );
362 |
363 | ALTER TABLE network_edge ADD PRIMARY KEY (edge_id); -- 3 s
364 | CREATE INDEX network_edge_geom_idx ON network_edge USING gist(geom); -- 21 s
365 |
366 | DROP TABLE network_point;
367 |
368 | -- ---------------------------------------------------------------------------------------------------------------------
369 | -- drop tables
370 | -- ---------------------------------------------------------------------------------------------------------------------
371 |
372 | DROP TABLE IF EXISTS indoor_points, intersecting_links, intersections, intersections_all, link_points,
373 | network_corrected, network_corrected_points, network_init, network_tmp;
--------------------------------------------------------------------------------
/toolbox/dbhelper.py:
--------------------------------------------------------------------------------
1 | import psycopg2 as psy
2 | import toolbox.helper as h
3 | from jinjasql import JinjaSql
4 | from typing import List
5 | import re
6 |
7 |
8 | class PostgresConnection:
9 |
10 | @staticmethod
11 | # creates a PostgresConnection object from settings object instead of individual params
12 | def from_settings_object(settings_object):
13 | return PostgresConnection(settings_object.dbname, settings_object.username, settings_object.password,
14 | settings_object.host or None, settings_object.port or None, settings_object.on_existing)
15 |
16 | # constructor
17 | def __init__(self, dbname: str, user: str = "postgres", pw: str = "postgres", host: str = "localhost", port: int = 5432, on_existing: str = "abort"):
18 | self._dbname = dbname
19 | self._user = user
20 | self._pw = pw
21 | self._host = host
22 | self._port = port
23 | self._schema = "public"
24 | self._on_existing = on_existing
25 |
26 | _con = None
27 | _cur = None
28 |
29 | @property
30 | def port(self):
31 | return self._port
32 |
33 | @property
34 | def host(self):
35 | return self._host
36 |
37 | @property
38 | def user(self):
39 | return self._user
40 |
41 | @property
42 | def pw(self):
43 | return self._pw
44 |
45 | @property
46 | def dbname(self):
47 | return self._dbname
48 |
49 | # read-only connection string
50 | def get_connection_string(self) -> str:
51 | return f"postgresql://{self.user}:{self.pw}@{self.host}:{str(self.port)}/{self.dbname}"
52 | connection_string: str = property(get_connection_string)
53 |
54 | def get_connection_string_old(self) -> str:
55 | if self.pw:
56 | return f"dbname='{self.dbname}' host='{self.host}' port='{str(self.port)}' user='{self.user}' password='{self.pw}'"
57 | else:
58 | return f"dbname='{self.dbname}' host='{self.host}' port='{str(self.port)}' user='{self.user}'"
59 | connection_string_old: str = property(get_connection_string_old)
60 |
61 | def connect(self):
62 | # skip if already connected
63 | if self._con:
64 | return
65 | # connect
66 | h.log(f"connecting to database '{self._dbname}' on {self._host}:{self._port}...")
67 | # create db connection
68 | try:
69 | self._con:psy.connection = psy.connect(dbname = self._dbname, host = self._host, port = self._port, user = self._user, password = self._pw,
70 | connect_timeout=3, keepalives=1, keepalives_idle=5, keepalives_interval=2, keepalives_count=2)
71 | except psy.Error as e:
72 | h.log(f"ERROR while connecting to database: Error {e.pgcode} - {e.pgerror}")
73 | raise Exception("ERROR while connecting to database. Terminating.")
74 | # retreive cursor
75 | self._cur:psy.cursor = self._con.cursor()
76 |
77 | # define functions
78 | def ex(self, query, vars=None):
79 | if self._cur == None:
80 | self.connect()
81 | self._cur.execute(query, vars)
82 |
83 | def execute(self, query, vars=None):
84 | self.ex(query, vars)
85 |
86 | def commit(self):
87 | if self._cur == None:
88 | raise Exception("Called commit for non-existing database connection.")
89 | self._con.commit()
90 |
91 | def rollback(self):
92 | if self._cur == None:
93 | raise Exception("Called rollback for non-existing database connection.")
94 | self._con.rollback()
95 |
96 | # define getters
97 |
98 | def get_connection(self):
99 | return self._con
100 |
101 | con = property(get_connection)
102 |
103 | def get_cursor(self):
104 | return self._cur
105 |
106 | cur = property(get_cursor)
107 |
108 | # define extended setters and getters
109 |
110 | def set_working_schema(self, schema: str):
111 | print(f"setting working schema to: {schema}, public")
112 | self.ex(f"SET search_path = {schema}, public")
113 | self._schema = schema
114 |
115 | def get_working_schema(self)->str:
116 | return self._schema
117 |
118 | schema = property(get_working_schema, set_working_schema)
119 |
120 | # shorthand methods
121 |
122 | def query_one(self, *args):
123 | if len(args) > 1:
124 | self.ex(args[0], args[1])
125 | else:
126 | self.ex(args[0])
127 | return self.cur.fetchone()
128 |
129 | def query_all(self, *args):
130 | if len(args) > 1:
131 | self.ex(args[0], args[1])
132 | else:
133 | self.ex(args[0])
134 | return self.cur.fetchall()
135 |
136 | def close(self, commit_before_close=False):
137 | if self._con is not None:
138 | if commit_before_close:
139 | self._con.commit()
140 | self._con.close()
141 | print("DB connection closed.")
142 | self._con = None
143 | self._cur = None
144 | else:
145 | print("ERROR closing connection: no connection available.")
146 |
147 | # own methods
148 |
149 | def init_extensions_and_schema(self, schema):
150 | # create extensions
151 | h.log('create extensions')
152 | self.create_extension("postgis", "public")
153 | self.create_extension("postgis_raster", "public")
154 | self.create_extension("hstore", "public")
155 |
156 | # create schema
157 | h.log("create schema '{schema}' if not exists")
158 | self.create_schema(schema)
159 |
160 | # set search path
161 | h.log('set search path')
162 | self.schema = schema
163 | self.commit()
164 |
165 |
166 | # checks whether the given entity (e.g. table) exists within the database
167 | def exists(self, entity: str, schema: str = None) -> bool:
168 | if schema == None:
169 | schema = self.schema
170 | h.log(f"Checking whether entity exists: SELECT to_regclass('{schema + '.' + entity}');", h.LOG_LEVEL_4_DEBUG)
171 | result = self.query_one(f"SELECT to_regclass('{schema + '.' + entity}');")[0] != None
172 | h.log(f"...returned {result}")
173 | return result
174 |
175 | def use_if_exists(self, entity: str, schema: str = None) -> str:
176 | orig_entity = entity
177 | # try to extract schema from entity string if not specified
178 | if schema == None:
179 | spl = entity.split(".")
180 | if len(spl) > 1:
181 | entity = spl[1]
182 | schema = spl[0]
183 | if self.exists(entity, schema):
184 | return orig_entity
185 | return None
186 |
187 | def column_exists(self, column_name: str, schema: str, table: str) -> bool:
188 | if schema == None:
189 | schema = self.schema
190 | h.log(f"Checking whether column '{column_name}' exists in table '{schema}.{table}'", h.LOG_LEVEL_4_DEBUG)
191 | return self.query_one("""SELECT EXISTS (SELECT 1
192 | FROM information_schema.columns
193 | WHERE table_schema=%s AND table_name=%s AND column_name=%s)""",
194 | (schema, table, column_name))[0]
195 |
196 | def set_autocommit(self, autocommit: bool, pre_commit: bool = True):
197 | if autocommit:
198 | if pre_commit:
199 | self.con.commit()
200 | self._con.set_session(readonly=False, autocommit=True)
201 | else:
202 | self._con.set_session(readonly=False, autocommit=False)
203 |
204 | def vacuum(self, table, schema = None):
205 | # need to enable auto-commit for VACUUM command - not possible in transaction mode
206 | self.set_autocommit(True, pre_commit=True)
207 | if schema == None:
208 | self.ex("VACUUM FULL ANALYZE " + table)
209 | else:
210 | self.ex("VACUUM FULL ANALYZE " + schema + "." + table)
211 | # reset to manual commit again
212 | self.set_autocommit(False)
213 |
214 | def helper_replace_vacuum_statements(self, sql_string) -> str:
215 | return re.sub("vacuum full analyze", "ANALYZE", sql_string, flags=re.I)
216 |
217 | def geom_reproject(self, table, geomType, srid):
218 | h.log(f"reprojecting {geomType} geometry in table {table} to SRID {srid}", h.LOG_LEVEL_2_INFO)
219 | self.ex("""ALTER TABLE """ + table + """
220 | ALTER COLUMN geom
221 | TYPE Geometry(%s, %s)
222 | USING ST_Transform(geom, %s);""", (geomType, srid, srid))
223 | self.commit()
224 |
225 | def create_extension(self, extension: str, schema: str = None):
226 | h.log(f"creating extension: {extension}")
227 | if schema != None and len(schema) > 0:
228 | self.ex(f"CREATE EXTENSION IF NOT EXISTS {extension} WITH SCHEMA {schema};")
229 | else:
230 | self.ex(f"CREATE EXTENSION IF NOT EXISTS {extension};")
231 |
232 | def create_common_extensions(self):
233 | h.log("Setting up common DB extensions...", h.LOG_LEVEL_2_INFO)
234 | self.create_extension("postgis")
235 | self.create_extension("pgrouting")
236 | self.create_extension("hstore")
237 |
238 | def add_primary_key(self, table: str, columns: List[str], schema: str = None):
239 | h.log(f"Altering table {table} adding primary key")
240 | if schema is None:
241 | self.ex(f"ALTER TABLE {table} ADD PRIMARY KEY ({', '.join(columns)});")
242 | else:
243 | self.ex(f"ALTER TABLE {schema}.{table} ADD PRIMARY KEY ({', '.join(columns)});")
244 |
245 | def drop_table(self, table, cascade: bool = True, schema: str = None):
246 | casc = "CASCADE" if cascade else ""
247 | h.log(f"Dropping table {table} if exists {casc}")
248 | if schema == None:
249 | self.ex(f"DROP TABLE IF EXISTS {table} {casc};")
250 | else:
251 | self.ex(f"DROP TABLE IF EXISTS {schema}.{table} {casc};")
252 |
253 | def create_schema(self, schema: str):
254 | h.log(f"Creating schema {schema} if not exists")
255 | self.ex(f"CREATE SCHEMA IF NOT EXISTS {schema};")
256 |
257 | def drop_schema(self, schema, cascade: bool):
258 | casc = "CASCADE" if cascade else ""
259 | h.log(f"Dropping schema {schema} if exists {casc}")
260 | self.ex(f"DROP SCHEMA IF EXISTS {schema} {casc};")
261 |
262 | def verify_input_tables_exist(self, tables: List[str], schema: str = None):
263 | if schema == None:
264 | schema = self.schema
265 | for table in tables:
266 | if not self.exists(table, schema):
267 | raise Exception(
268 | f"ERROR: at least one of the input tables does not exist. Please resolve issue and try again. Table: '{schema}.{table}'")
269 |
270 | def handle_conflicting_output_tables(self, tables: List[str], schema: str = None, on_existing:str = None)->bool:
271 | # returns True if tables were dropped or are not existing and process can continue normally, False if step should be skipped
272 | if on_existing is None:
273 | on_existing = self._on_existing
274 | t_exists:bool = False
275 | for table in tables:
276 | if self.exists(table, schema):
277 | t_exists = True
278 | if not t_exists:
279 | return True # no conflicts -> all good
280 | if on_existing == "skip":
281 | return False # return False, calling code needs to handle skipping of step
282 | if on_existing == "delete":
283 | for table in tables:
284 | if self.exists(table, schema):
285 | self.drop_table(table, cascade=True, schema = schema)
286 | self.commit()
287 | return True
288 | # case "abort" / in general: throw Error
289 | raise Exception("Output table(s) already exist. Please resolve conflict manually or specify 'on_existing' parameter to delete or skip existing tables.")
290 |
291 | def execute_sql_from_file(self, script_file_name, subdir:str="sql/"):
292 | if not subdir.endswith("/") or subdir.endswith("\\"):
293 | subdir+="/"
294 | h.log(f"Executing SQL from file: '{subdir}{script_file_name}.sql'")
295 | # load sql template
296 | with open(f"{subdir}{script_file_name}.sql", "r") as sqlfile:
297 | sql = sqlfile.read()
298 | self.ex(sql)
299 |
300 | def execute_sql_template_string(self, template: str, parameters: dict, override_parameters: dict = None) -> None:
301 | '''
302 | Apply a JinjaSql template (string) substituting parameters (dict) and execute
303 | the final SQL. If override_parameters is given, parameters with the same key
304 | name will be replaced by the values from override_parameters
305 | Make sure to use " | sqlsafe" for table or schema names in the template - otherwise they get String-quoted which leads to an error.
306 | '''
307 |
308 | # if override_parameters are given, check whether given keys exist in params and replace them accordingly
309 | template_params = h.overrideParams(parameters, override_parameters)
310 |
311 | j = JinjaSql(param_style='pyformat')
312 | query, bind_params = j.prepare_query(template, template_params)
313 | h.log("query: " + str(query), h.LOG_LEVEL_4_DEBUG)
314 | #dbg_file = open("debug_sql.sql", "w")
315 | #n = dbg_file.write(query)
316 | #dbg_file.close()
317 | h.log("bind params: " + str(bind_params), h.LOG_LEVEL_4_DEBUG)
318 | self.ex(query, bind_params)
319 | # TODO: error handling and reporting
320 |
321 | def execute_template_sql_from_file(self, template_file_name: str, parameters: dict, override_parameters: dict = None,
322 | autocommit: bool = True, template_subdir="sql/templates/") -> None:
323 | h.log(f"Loading SQL template from '{template_subdir}{template_file_name}.sql.j2'...")
324 | with open(f"{template_subdir}{template_file_name}.sql.j2", "r") as sqlfile:
325 | sql = sqlfile.read()
326 | # current workaround for vacuum full analyze not working in multiple-statement SQL string: replace with ANALYZE
327 | # see issue on Gitlab for further info
328 | sql = self.helper_replace_vacuum_statements(sql) # TODO: remove in future
329 | # execute SQL
330 | h.log("Executing SQL statements...")
331 | if autocommit:
332 | self.set_autocommit(True, pre_commit=True) # set autocommit TRUE, so VACUUM can take place in SQL
333 | self.execute_sql_template_string(sql, parameters, override_parameters)
334 | if autocommit:
335 | self.set_autocommit(False) # reset to manual commit mode
--------------------------------------------------------------------------------
/toolbox/helper.py:
--------------------------------------------------------------------------------
1 | from functools import reduce
2 | import atexit
3 | from time import perf_counter as clock
4 | import sys
5 | from typing import List
6 | from datetime import datetime as dt
7 | import re
8 |
9 | ### LOGGING ###
10 |
11 | LOG_LEVEL_4_DEBUG = 4
12 | LOG_LEVEL_3_DETAILED = 3
13 | LOG_LEVEL_2_INFO = 2
14 | LOG_LEVEL_1_MAJOR_INFO = 1
15 |
16 | def secondsToStr(t, detailed:bool = False):
17 | # more detailed: %d:%02d:%02d.%03d
18 | if detailed:
19 | return "%d:%02d:%02d.%03d" % \
20 | reduce(lambda ll,b : divmod(ll[0],b) + ll[1:],
21 | [(t*1000,),1000,60,60])
22 |
23 | return "%d:%02d:%02d" % \
24 | reduce(lambda ll,b : divmod(ll[0],b) + ll[1:],
25 | [(t,),60,60])
26 |
27 | line = "="*40
28 | lineT = "-"*40
29 | lineTime = "-"*11
30 |
31 | verbose_level = LOG_LEVEL_2_INFO
32 |
33 | # level: debug message detail level - 1: Major info - 2: INFO - 3: Detailed - 4: Debug level
34 | def log(msg: str, level=LOG_LEVEL_3_DETAILED, elapsed = None):
35 | if(verbose_level < level):
36 | return
37 | if(level < LOG_LEVEL_4_DEBUG):
38 | # print (line)
39 | print (secondsToStr(clock()), '-', msg)
40 | else:
41 | print(msg)
42 | if elapsed:
43 | print ("Elapsed time:", elapsed)
44 | if(level < LOG_LEVEL_3_DETAILED):
45 | print (line)
46 |
47 | # shorthand functions
48 |
49 | def majorInfo(msg: str):
50 | log(msg, LOG_LEVEL_1_MAJOR_INFO)
51 |
52 | def info(msg: str):
53 | log(msg, LOG_LEVEL_2_INFO)
54 |
55 | def debugLog(msg: str):
56 | log(msg, LOG_LEVEL_4_DEBUG)
57 |
58 | # task logging with time tracking
59 |
60 | def logBeginTask(s, level = LOG_LEVEL_2_INFO):
61 | global startT, taskS
62 | if(verbose_level < level):
63 | return
64 | taskS = s
65 | print()
66 | print(line)
67 | print(">>> at", dt.now().strftime('%H:%M:%S'), ">>> ", s, "")
68 | startT = clock()
69 |
70 | def logEndTask():
71 | global taskS, startT
72 | if taskS is None:
73 | return
74 | print(">>> ", taskS, "completed.")
75 | print(lineT)
76 | print(lineTime, "took", secondsToStr(clock()-startT, detailed=True), lineTime)
77 | print(line)
78 | print()
79 | taskS = None
80 |
81 | def endlog():
82 | end = clock()
83 | elapsed = end-start
84 | info("Thank you for using NetAScore!")
85 | info(f"Program terminating after {secondsToStr(elapsed)} (hr:min:sec) at {dt.now().strftime('%H:%M:%S')}.")
86 |
87 | def now():
88 | return secondsToStr(clock())
89 |
90 | def get_current_log_level():
91 | return verbose_level
92 |
93 | start = clock()
94 | startT = clock()
95 | taskS = None
96 | atexit.register(endlog)
97 | log("Program started.")
98 |
99 |
100 | ### other helper functions
101 |
102 | def overrideParams(orig: dict, override: dict) -> dict:
103 | if orig == None:
104 | raise Exception("ERROR: original dict is None!")
105 | if override == None:
106 | log("No override parameters given -> skipping.")
107 | return orig
108 | result = orig.copy()
109 | log("Replacing default parameter values with given override values")
110 | for key in override:
111 | log(f"{key}: {override[key]}")
112 | if key in result:
113 | log(f" -> replacing original value '{result[key]}' with '{override[key]}'")
114 | result[key] = override[key]
115 | else:
116 | log(f" -> key '{key}' does not exist. Skipping.")
117 | return result
118 |
119 |
120 | def require_keys(settings: dict, required_keys: List[str], error_message: str = "A required settings key was not provided. Terminating."):
121 | if not has_keys(settings, required_keys, loglevel=1):
122 | majorInfo(error_message)
123 | sys.exit(1)
124 |
125 | def has_keys(settings: dict, keys: List[str], loglevel: int = 4) -> bool:
126 | for key in keys:
127 | if key not in settings:
128 | log(f"key '{key}' not provided.", loglevel)
129 | return False
130 | return True
131 |
132 | def has_any_key(settings: dict, keys: List[str], loglevel: int = 4) -> bool:
133 | for key in keys:
134 | if key in settings:
135 | return True
136 | log(f"None of the following keys were provided: {keys}", loglevel)
137 | return False
138 |
139 |
140 | # helper functions for parsing user input / settings values - sqlsafe
141 |
142 | def is_numeric(value) -> bool:
143 | return type(value) in [int, float]
144 |
145 | def get_safe_name(value: str) -> str:
146 | return re.sub("[^a-zA-Z0-9_]", "", value)
147 |
148 | def get_safe_string(value) -> str:
149 | v = str(value)
150 | return re.sub("[^a-zA-Z0-9_.: \-]", "", v)
151 |
152 | def str_to_numeric(value: str, throw_error: bool = False):
153 | v = re.sub("[^0-9.\-]", "", value) # extract value
154 | if v.find(".") > -1:
155 | return float(v)
156 | elif len(v) > 0:
157 | return int(v)
158 | if throw_error:
159 | raise Exception(f"Unable to convert string '{value}' to numeric.")
160 | return None
161 |
162 | def str_is_numeric_only(value: str) -> bool:
163 | return True if re.fullmatch("[ 0-9.\-]+" ,value) else False
--------------------------------------------------------------------------------