├── vtovosm ├── tests │ ├── __init__.py │ ├── test_simulations_main.py │ ├── network_config │ │ ├── tolerance_inspection.json │ │ └── tests.json │ ├── test_simulations_tolerance_inspection.py │ ├── test_osm.py │ └── test_sumo.py ├── simulations │ ├── __init__.py │ ├── network_config │ │ ├── viriyasitavat_comparison_uniform.json │ │ ├── tolerance_inspection.json │ │ ├── pathloss_vs_euclidean_distance.json │ │ ├── viriyasitavat_comparison_uniform_pathloss.json │ │ ├── viriyasitavat_comparison.json │ │ ├── viriyasitavat_comparison_london.json │ │ ├── viriyasitavat_comparison_neubau.json │ │ ├── speed_and_tls_cycle_impact.json │ │ ├── graz_cost_paper.json │ │ └── default.json │ ├── tolerance_inspection.py │ ├── result_analysis.py │ └── main.py ├── __init__.py ├── osm_xml.py ├── demo.py ├── geometry.py ├── pathloss.py ├── network_parser.py ├── vehicles.py ├── propagation.py ├── plot.py ├── utils.py ├── osmnx_addons.py └── sumo.py ├── data └── .gitignore ├── images ├── .gitignore └── demo_neubau │ ├── pathloss.png │ ├── prop_cond.png │ ├── vehicles.png │ └── con_status.png ├── results └── .gitignore ├── sumo_data └── .gitignore ├── requirements-test.txt ├── MANIFEST.in ├── requirements.txt ├── doc ├── source │ ├── index.rst │ └── conf.py ├── Makefile └── make.bat ├── .travis └── push.sh ├── setup.py ├── .travis.yml ├── .gitignore └── README.md /vtovosm/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /vtovosm/simulations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /data/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /images/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /results/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /sumo_data/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | nose 2 | coverage 3 | coverage-badge 4 | -------------------------------------------------------------------------------- /images/demo_neubau/pathloss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dosenpfand/V2V-OSM/HEAD/images/demo_neubau/pathloss.png -------------------------------------------------------------------------------- /images/demo_neubau/prop_cond.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dosenpfand/V2V-OSM/HEAD/images/demo_neubau/prop_cond.png -------------------------------------------------------------------------------- /images/demo_neubau/vehicles.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dosenpfand/V2V-OSM/HEAD/images/demo_neubau/vehicles.png -------------------------------------------------------------------------------- /images/demo_neubau/con_status.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dosenpfand/V2V-OSM/HEAD/images/demo_neubau/con_status.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE.txt 3 | include vtovosm/simulations/network_config/*.json 4 | include vtovosm/tests/network_config/*.json 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | geopandas>=0.2.1 2 | matplotlib>=2.0.2 3 | networkx>=1.11,<2.0 4 | numpy>=1.12.1 5 | osmnx>=0.5.1,<0.6 6 | requests>=2.14.2 7 | scipy>=0.19.0 8 | Shapely>=1.6.4 9 | -------------------------------------------------------------------------------- /doc/source/index.rst: -------------------------------------------------------------------------------- 1 | .. vtovosm documentation master file, created by 2 | sphinx-quickstart on Wed Jun 7 15:10:03 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to vtovosm's documentation! 7 | =================================== 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | :caption: Contents: 12 | 13 | 14 | 15 | Indices and tables 16 | ================== 17 | 18 | * :ref:`genindex` 19 | * :ref:`modindex` 20 | * :ref:`search` 21 | -------------------------------------------------------------------------------- /vtovosm/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'connection_analysis', 3 | 'demo', 4 | 'geometry', 5 | 'network_parser', 6 | 'osmnx_addons', 7 | 'osm_xml', 8 | 'pathloss', 9 | 'plot', 10 | 'propagation', 11 | 'sumo', 12 | 'utils', 13 | 'vehicles' 14 | ] 15 | 16 | from . import connection_analysis 17 | from . import demo 18 | from . import geometry 19 | from . import network_parser 20 | from . import osm_xml 21 | from . import osmnx_addons 22 | from . import pathloss 23 | from . import plot 24 | from . import propagation 25 | from . import sumo 26 | from . import utils 27 | from . import vehicles 28 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = vtovosm 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /.travis/push.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | convert_image() { 4 | convert .travis/coverage.svg .travis/coverage.png 5 | } 6 | 7 | setup_git() { 8 | git config --global user.email "travis@travis-ci.org" 9 | git config --global user.name "Travis CI" 10 | } 11 | 12 | commit_coverage_image() { 13 | git checkout -b travis 14 | git add .travis/coverage.svg .travis/coverage.png 15 | git commit --message "Travis coverage update: $TRAVIS_BUILD_NUMBER" 16 | } 17 | 18 | push_files() { 19 | git remote add origin-travis https://${GH_TOKEN}@github.com/Dosenpfand/V2V-OSM.git > /dev/null 2>&1 20 | git push -f --quiet --set-upstream origin-travis travis 21 | } 22 | 23 | setup_git 24 | convert_image 25 | commit_coverage_image 26 | push_files 27 | -------------------------------------------------------------------------------- /vtovosm/tests/test_simulations_main.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the module simulations.main which execute slow""" 2 | 3 | import json 4 | import os 5 | import unittest 6 | 7 | import vtovosm.simulations.main as main_sim 8 | 9 | 10 | class TestSimulationsMain(unittest.TestCase): 11 | """Provides unit tests for the simulations.main module""" 12 | 13 | slow = True 14 | network = True 15 | 16 | module_path = os.path.dirname(__file__) 17 | conf_file_path = os.path.join(module_path, 'network_config', 'tests.json') 18 | 19 | def test_main(self): 20 | """Tests the function main""" 21 | 22 | main_sim.main_multi_scenario(conf_path=self.conf_file_path) 23 | 24 | 25 | if __name__ == '__main__': 26 | unittest.main() 27 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup(name='vtovosm', 4 | version='0.1', 5 | description='Simulate Vehicle-to-vehicle communication on street networks obtained from OpenStreetMap', 6 | url='https://github.com/Dosenpfand/thesis_code', 7 | author='Markus Gasser, Thomas Blazek', 8 | author_email='markus.gasser@student.tuwien.ac.at, tblazek@nt.tuwien.ac.at', 9 | license='GPLv3+', 10 | packages=['vtovosm'], 11 | install_requires=['geopandas>=0.2.1', 12 | 'matplotlib>=2.0.2', 13 | 'networkx>=1.11,<2.0', 14 | 'numpy>=1.12.1', 15 | 'osmnx>=0.5.1,<0.6', 16 | 'requests>=2.14.2', 17 | 'scipy>=0.19.0', 18 | 'Shapely>=1.6.4'], 19 | test_suite='nose.collector', 20 | tests_require=['nose']) 21 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=vtovosm 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /vtovosm/tests/network_config/tolerance_inspection.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "tolerance_0_test", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "which_result": null, 8 | "distribution_veh": "uniform", 9 | "iterations": 3, 10 | "density_type": "absolute", 11 | "densities_veh": [ 12 | 4, 13 | 6 14 | ], 15 | "connection_metric": "distance", 16 | "max_connection_metric": { 17 | "olos_los": 40e6, 18 | "nlos": 0 19 | }, 20 | "simulation_mode": "parallel", 21 | "overwrite_result": true, 22 | "results_file_dir": "results/tests/tolerance_inspection" 23 | }, 24 | "tolerance_0_test": { 25 | "place": "Salmannsdorf - Vienna - Austria", 26 | "building_tolerance": 0 27 | }, 28 | "tolerance_1_test": { 29 | "place": "Salmannsdorf - Vienna - Austria", 30 | "building_tolerance": 1 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /vtovosm/tests/test_simulations_tolerance_inspection.py: -------------------------------------------------------------------------------- 1 | """Unit tests for the module simulations.tolerance_inspection which execute slow""" 2 | 3 | import os 4 | import unittest 5 | 6 | import vtovosm.simulations.main as main_sim 7 | import vtovosm.simulations.tolerance_inspection as tol_insp 8 | 9 | 10 | class TestSimulationsToleranceInspection(unittest.TestCase): 11 | """Provides unit tests for the simulations.tolerance_inspection module""" 12 | 13 | max_diff_ratio = 1e-4 14 | 15 | slow = True 16 | network = True 17 | 18 | module_path = os.path.dirname(__file__) 19 | conf_file_path = os.path.join(module_path, 'network_config', 'tolerance_inspection.json') 20 | 21 | def test_analyze_tolerance(self): 22 | """Tests the function analyze_tolerance""" 23 | 24 | main_sim.main_multi_scenario(conf_path=self.conf_file_path) 25 | all_results = tol_insp.analyze_tolerance(self.conf_file_path) 26 | 27 | for results in all_results.values(): 28 | for result in results: 29 | self.assertTrue(result['ratio_con_diff'] < self.max_diff_ratio) 30 | 31 | 32 | if __name__ == '__main__': 33 | unittest.main() 34 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/viriyasitavat_comparison_uniform.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "default", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "distribution_veh": "uniform", 8 | "density_type": "area", 9 | "densities_veh": [ 10 | { 11 | "start": 10e-6, 12 | "stop": 80e-6, 13 | "num": 8 14 | }, 15 | { 16 | "start": 120e-6, 17 | "stop": 160e-6, 18 | "num": 2 19 | } 20 | ], 21 | "connection_metric": "distance", 22 | "max_connection_metric": { 23 | "olos_los": 250, 24 | "nlos": 140 25 | }, 26 | "iterations": 1000, 27 | "results_file_dir": "results/viriyasitavat_comparison_uniform", 28 | "results_file_prefix": "result", 29 | "analyze_results": [ 30 | "net_connectivities" 31 | ], 32 | "building_tolerance": 1, 33 | "overwrite_result": false, 34 | "simulation_mode": "parallel" 35 | }, 36 | "default": { 37 | "place": "Upper West Side - New York - USA", 38 | "which_result": null 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/tolerance_inspection.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "tolerance_0_upperwestside", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "which_result": null, 8 | "distribution_veh": "uniform", 9 | "iterations": 100, 10 | "density_type": "absolute", 11 | "densities_veh": [ 12 | 10, 13 | 100, 14 | 1000 15 | ], 16 | "connection_metric": "distance", 17 | "max_connection_metric": { 18 | "olos_los": 40e6, 19 | "nlos": 0 20 | }, 21 | "simulation_mode": "parallel", 22 | "overwrite_result": false, 23 | "results_file_dir": "results/tolerance_inspection" 24 | }, 25 | "tolerance_0_upperwestside": { 26 | "place": "Upper West Side - New York - USA", 27 | "building_tolerance": 0 28 | }, 29 | "tolerance_1_upperwestside": { 30 | "place": "Upper West Side - New York - USA", 31 | "building_tolerance": 1 32 | }, 33 | "tolerance_0_neubau": { 34 | "place": "Neubau - Vienna - Austria", 35 | "building_tolerance": 0 36 | }, 37 | "tolerance_1_neubau": { 38 | "place": "Neubau - Vienna - Austria", 39 | "building_tolerance": 1 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/pathloss_vs_euclidean_distance.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "euclidean", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "place": "Upper West Side - New York - USA", 8 | "which_result": null, 9 | "distribution_veh": "uniform", 10 | "density_type": "area", 11 | "densities_veh": [ 12 | { 13 | "start": 10e-6, 14 | "stop": 80e-6, 15 | "num": 8 16 | }, 17 | { 18 | "start": 120e-6, 19 | "stop": 160e-6, 20 | "num": 2 21 | } 22 | ], 23 | "iterations": 100, 24 | "results_file_dir": "results/pathloss_vs_euclidean_distance", 25 | "analyze_results": [ 26 | "net_connectivities" 27 | ], 28 | "building_tolerance": 1, 29 | "overwrite_result": false, 30 | "simulation_mode": "parallel" 31 | }, 32 | "pathloss": { 33 | "connection_metric": "pathloss", 34 | "max_connection_metric": 100, 35 | "results_file_prefix": "result_pathloss" 36 | }, 37 | "euclidean": { 38 | "connection_metric": "distance", 39 | "max_connection_metric": { 40 | "olos_los": 434.33, 41 | "nlos": 40.72 42 | }, 43 | "results_file_prefix": "result_distance" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/viriyasitavat_comparison_uniform_pathloss.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "max_from_mean", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "place": "Upper West Side - New York - USA", 8 | "which_result": null, 9 | "distribution_veh": "uniform", 10 | "density_type": "area", 11 | "densities_veh": [ 12 | { 13 | "start": 10e-6, 14 | "stop": 80e-6, 15 | "num": 8 16 | }, 17 | { 18 | "start": 120e-6, 19 | "stop": 160e-6, 20 | "num": 2 21 | } 22 | ], 23 | "connection_metric": "pathloss", 24 | "iterations": 500, 25 | "results_file_dir": "results/viriyasitavat_comparison_uniform_pathloss", 26 | 27 | "analyze_results": [ 28 | "net_connectivities" 29 | ], 30 | "building_tolerance": 1, 31 | "overwrite_result": false, 32 | "simulation_mode": "parallel" 33 | }, 34 | "max_from_mean": { 35 | "max_connection_metric": 109.71, 36 | "results_file_prefix": "result_max_from_mean" 37 | }, 38 | "max_from_nlos": { 39 | "max_connection_metric": 133.60, 40 | "results_file_prefix": "result_max_from_nlos" 41 | }, 42 | "max_from_olos": { 43 | "max_connection_metric": 102.37, 44 | "results_file_prefix": "result_max_from_olos" 45 | }, 46 | "max_from_los": { 47 | "max_connection_metric": 93.16, 48 | "results_file_prefix": "result_max_from_los" 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | dist: trusty 2 | sudo: required 3 | language: python 4 | python: 5 | # Temporarily disabled because of https://github.com/pypa/pip/issues/5230 6 | # - "3.4" 7 | - "3.6" 8 | env: 9 | # Has to be set so that the TLS coordinator script works 10 | - SUMO_HOME=$TRAVIS_BUILD_DIR/sumo/sumo 11 | # SUMO ppa can not be added with the apt addon because it is not whitelisted 12 | before_install: 13 | - sudo add-apt-repository ppa:sumo/stable -y 14 | - sudo add-apt-repository ppa:jonathonf/ffmpeg-3 -y 15 | - sudo apt-get update -q 16 | - sudo apt-get install sumo ffmpeg 17 | install: 18 | - pip install --upgrade pip 19 | - pip install -r requirements-test.txt 20 | # Clone SUMO to get newer tools than in the repository 21 | - git clone --depth 10 https://github.com/planetsumo/sumo.git $TRAVIS_BUILD_DIR/sumo 22 | - cd $TRAVIS_BUILD_DIR/sumo 23 | - git checkout eefb468d8a3d255643dd7c2adb65a2d95902af20 24 | - cd $TRAVIS_BUILD_DIR 25 | - pip install . 26 | addons: 27 | apt: 28 | packages: 29 | - libgdal1h 30 | - gdal-bin 31 | - libgdal-dev 32 | - libspatialindex-dev 33 | - libgeos-dev 34 | - libopenblas-dev 35 | - liblapack-dev 36 | - gfortran 37 | - sumo 38 | - texlive-latex-recommended 39 | - texlive-latex-extra 40 | - texlive-fonts-recommended 41 | - texlive-fonts-extra 42 | - dvipng 43 | cache: pip 44 | branches: 45 | only: 46 | - master 47 | # Configure a headless display to test plot generation 48 | before_script: 49 | - "export DISPLAY=:99.0" 50 | - "sh -e /etc/init.d/xvfb start" 51 | # Give xvfb some time to start 52 | - sleep 3 53 | script: 54 | - nosetests --with-coverage --cover-html --cover-tests --cover-package=vtovosm --verbose 55 | after_success: 56 | - coverage-badge -f -o .travis/coverage.svg 57 | - if [[ $TRAVIS_PYTHON_VERSION == 3.4 ]]; then .travis/push.sh; fi 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Visual Studio Code specific files 2 | .vscode/ 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *,cover 50 | .hypothesis/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # pyenv 77 | .python-version 78 | 79 | # celery beat schedule file 80 | celerybeat-schedule 81 | 82 | # SageMath parsed files 83 | *.sage.py 84 | 85 | # dotenv 86 | .env 87 | 88 | # virtualenv 89 | .venv 90 | venv/ 91 | ENV/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | 96 | # Rope project settings 97 | .ropeproject 98 | 99 | # Pycharm project settings 100 | .idea 101 | 102 | # osmnx cache files 103 | cache/*.json 104 | 105 | # Pickle saved objects 106 | *.pickle 107 | *.p 108 | 109 | # Coverage report 110 | cover/ 111 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/viriyasitavat_comparison.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "default", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "place": "Upper West Side - New York - USA", 8 | "which_result": null, 9 | "distribution_veh": "SUMO", 10 | "density_type": "area", 11 | "densities_veh": [ 12 | { 13 | "start": 10e-6, 14 | "stop": 80e-6, 15 | "num": 8 16 | }, 17 | { 18 | "start": 120e-6, 19 | "stop": 160e-6, 20 | "num": 2 21 | } 22 | ], 23 | "connection_metric": "distance", 24 | "max_connection_metric": { 25 | "olos_los": 250, 26 | "nlos": 140 27 | }, 28 | "sumo": { 29 | "sim_duration": 3000, 30 | "warmup_duration": 1000, 31 | "fringe_factor": 1, 32 | "tls_settings": { 33 | "cycle_time": 45, 34 | "yellow_time": 2 35 | }, 36 | "max_speed": 10, 37 | "intermediate_points": 100, 38 | "veh_rate_factor": 0.5 39 | }, 40 | "results_file_dir": "results/viriyasitavat_comparison", 41 | "results_file_prefix": "result", 42 | "analyze_results": "all", 43 | "building_tolerance": 1, 44 | "overwrite_result": false, 45 | "simulation_mode": "parallel" 46 | }, 47 | "default": { 48 | "sumo": { 49 | "skip_sumo": false, 50 | "abort_after_sumo": false 51 | } 52 | }, 53 | "only_sumo": { 54 | "sumo": { 55 | "skip_sumo": false, 56 | "abort_after_sumo": true 57 | } 58 | }, 59 | "only_connection": { 60 | "sumo": { 61 | "skip_sumo": true, 62 | "abort_after_sumo": false 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/viriyasitavat_comparison_london.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "default", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "place": "City of London - London - UK", 8 | "which_result": null, 9 | "distribution_veh": "SUMO", 10 | "density_type": "area", 11 | "densities_veh": [ 12 | { 13 | "start": 10e-6, 14 | "stop": 80e-6, 15 | "num": 8 16 | }, 17 | { 18 | "start": 120e-6, 19 | "stop": 160e-6, 20 | "num": 2 21 | } 22 | ], 23 | "connection_metric": "distance", 24 | "max_connection_metric": { 25 | "olos_los": 250, 26 | "nlos": 140 27 | }, 28 | "sumo": { 29 | "sim_duration": 3000, 30 | "warmup_duration": 1000, 31 | "fringe_factor": 1, 32 | "tls_settings": { 33 | "cycle_time": 45, 34 | "yellow_time": 2 35 | }, 36 | "max_speed": 10, 37 | "intermediate_points": 50, 38 | "veh_rate_factor": 0.25 39 | }, 40 | "results_file_dir": "results/viriyasitavat_comparison_london", 41 | "results_file_prefix": "result", 42 | "analyze_results": "all", 43 | "building_tolerance": 1, 44 | "overwrite_result": false, 45 | "simulation_mode": "parallel" 46 | }, 47 | "default": { 48 | "sumo": { 49 | "skip_sumo": false, 50 | "abort_after_sumo": false 51 | } 52 | }, 53 | "only_sumo": { 54 | "sumo": { 55 | "skip_sumo": false, 56 | "abort_after_sumo": true 57 | } 58 | }, 59 | "only_connection": { 60 | "sumo": { 61 | "skip_sumo": true, 62 | "abort_after_sumo": false 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/viriyasitavat_comparison_neubau.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "default", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "place": "Neubau - Vienna - Austria", 8 | "which_result": null, 9 | "distribution_veh": "SUMO", 10 | "density_type": "area", 11 | "densities_veh": [ 12 | { 13 | "start": 10e-6, 14 | "stop": 80e-6, 15 | "num": 8 16 | }, 17 | { 18 | "start": 120e-6, 19 | "stop": 160e-6, 20 | "num": 2 21 | } 22 | ], 23 | "connection_metric": "distance", 24 | "max_connection_metric": { 25 | "olos_los": 250, 26 | "nlos": 140 27 | }, 28 | "sumo": { 29 | "sim_duration": 3000, 30 | "warmup_duration": 1000, 31 | "fringe_factor": 1, 32 | "tls_settings": { 33 | "cycle_time": 45, 34 | "yellow_time": 2 35 | }, 36 | "max_speed": 10, 37 | "intermediate_points": 50, 38 | "veh_rate_factor": 0.25 39 | }, 40 | "results_file_dir": "results/viriyasitavat_comparison_neubau", 41 | "results_file_prefix": "result", 42 | "analyze_results": "all", 43 | "building_tolerance": 1, 44 | "overwrite_result": false, 45 | "simulation_mode": "parallel", 46 | "processes": 16 47 | }, 48 | "default": { 49 | "sumo": { 50 | "skip_sumo": false, 51 | "abort_after_sumo": false 52 | } 53 | }, 54 | "only_sumo": { 55 | "sumo": { 56 | "skip_sumo": false, 57 | "abort_after_sumo": true 58 | } 59 | }, 60 | "only_connection": { 61 | "sumo": { 62 | "skip_sumo": true, 63 | "abort_after_sumo": false 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /vtovosm/osm_xml.py: -------------------------------------------------------------------------------- 1 | """Get street networks from OpenStreetMap in XML format""" 2 | 3 | import time 4 | 5 | import osmnx 6 | import requests 7 | 8 | 9 | def osm_net_download(polygon, 10 | network_type='all_private', 11 | timeout=180, 12 | memory=None, 13 | max_query_area_size=50 * 1000 * 50 * 1000): 14 | """Download OSM ways and nodes within a polygon from the Overpass API""" 15 | 16 | osm_filter = osmnx.get_osm_filter(network_type) 17 | response_xmls = [] 18 | 19 | if memory is None: 20 | maxsize = '' 21 | else: 22 | maxsize = '[maxsize:{}]'.format(memory) 23 | 24 | geometry_proj, crs_proj = osmnx.project_geometry(polygon) 25 | geometry_proj_cons_subdiv = osmnx.consolidate_subdivide_geometry( 26 | geometry_proj, max_query_area_size=max_query_area_size) 27 | geometry, _ = osmnx.project_geometry( 28 | geometry_proj_cons_subdiv, crs=crs_proj, to_latlong=True) 29 | polygon_coord_strs = osmnx.get_polygons_coordinates(geometry) 30 | 31 | for polygon_coord_str in polygon_coord_strs: 32 | query_template = \ 33 | '[out:xml][timeout:{timeout}]{maxsize};' + \ 34 | '(way["highway"]{filters}(poly:"{polygon}");>;);out;' 35 | query_str = query_template.format( 36 | polygon=polygon_coord_str, filters=osm_filter, timeout=timeout, maxsize=maxsize) 37 | response_xml = overpass_request( 38 | data={'data': query_str}, timeout=timeout) 39 | response_xmls.append(response_xml) 40 | return response_xmls 41 | 42 | 43 | def overpass_request(data, pause_duration=None, timeout=180, error_pause_duration=None): 44 | """Send a request to the Overpass API via HTTP POST and return the XML response""" 45 | 46 | url = 'http://www.overpass-api.de/api/interpreter' 47 | if pause_duration is None: 48 | this_pause_duration = osmnx.get_pause_duration() 49 | time.sleep(this_pause_duration) 50 | response = requests.post(url, data=data, timeout=timeout) 51 | 52 | if response.status_code in [429, 504]: 53 | if error_pause_duration is None: 54 | error_pause_duration = osmnx.get_pause_duration() 55 | time.sleep(error_pause_duration) 56 | response = overpass_request( 57 | data=data, pause_duration=pause_duration, timeout=timeout) 58 | 59 | return response.content 60 | -------------------------------------------------------------------------------- /vtovosm/tests/test_osm.py: -------------------------------------------------------------------------------- 1 | """Unit tests for all modules that interact with OpenStreetMaps and therefore execute slower""" 2 | 3 | import unittest 4 | 5 | import geopandas as geop 6 | import networkx as nx 7 | import osmnx as ox 8 | 9 | import vtovosm.osmnx_addons as ox_a 10 | 11 | 12 | class TestOsmnxAddons(unittest.TestCase): 13 | """Provides unit tests for the osmnx_addons module""" 14 | 15 | place = 'Salmannsdorf - Vienna - Austria' 16 | slow = True 17 | network = True 18 | 19 | def test_many(self): 20 | """Tests the functions which_result_polygon, add_geometry, check_geometry""" 21 | 22 | # Setup 23 | ox_a.setup() 24 | self.street_graph = None 25 | index = ox_a.which_result_polygon(self.place) 26 | 27 | # Check if a valid index was returned 28 | result_correct = isinstance(index, int) 29 | self.assertTrue(result_correct) 30 | 31 | # Try to download the street network with the returned index 32 | street_graph = ox.graph_from_place(self.place, which_result=index) 33 | self.assertIsInstance(street_graph, nx.MultiDiGraph) 34 | 35 | # Add missing geometry entries 36 | ox_a.add_geometry(street_graph) 37 | 38 | # Check the geometry 39 | geometry_complete = ox_a.check_geometry(street_graph) 40 | self.assertTrue(geometry_complete) 41 | 42 | def test_load_network(self): 43 | """Tests the function load_network""" 44 | 45 | # Setup 46 | ox_a.setup() 47 | 48 | # Load the network from the internet 49 | network = ox_a.load_network(self.place, which_result=None, overwrite=True) 50 | 51 | self.assertIsInstance(network['graph_streets'], nx.MultiDiGraph) 52 | self.assertIsInstance(network['graph_streets_wave'], nx.MultiGraph) 53 | self.assertIsInstance(network['gdf_buildings'], geop.GeoDataFrame) 54 | self.assertIsInstance(network['gdf_boundary'], geop.GeoDataFrame) 55 | 56 | # Load the network from disk 57 | network = ox_a.load_network(self.place, which_result=None, overwrite=False) 58 | 59 | self.assertIsInstance(network['graph_streets'], nx.MultiDiGraph) 60 | self.assertIsInstance(network['graph_streets_wave'], nx.MultiGraph) 61 | self.assertIsInstance(network['gdf_buildings'], geop.GeoDataFrame) 62 | self.assertIsInstance(network['gdf_boundary'], geop.GeoDataFrame) 63 | 64 | 65 | if __name__ == '__main__': 66 | unittest.main() 67 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/speed_and_tls_cycle_impact.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "45s15mps", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "place": "Upper West Side - New York - USA", 8 | "which_result": null, 9 | "distribution_veh": "SUMO", 10 | "density_type": "area", 11 | "densities_veh": [ 12 | { 13 | "start": 25e-6, 14 | "stop": 75e-6, 15 | "num": 3 16 | } 17 | ], 18 | "connection_metric": "distance", 19 | "max_connection_metric": { 20 | "olos_los": 250, 21 | "nlos": 140 22 | }, 23 | "sumo": { 24 | "sim_duration": 3000, 25 | "warmup_duration": 1000, 26 | "fringe_factor": 1, 27 | "intermediate_points": 50, 28 | "veh_rate_factor": 0.25, 29 | "skip_sumo": false, 30 | "abort_after_sumo": false 31 | }, 32 | "analyze_results": "all", 33 | "building_tolerance": 1, 34 | "overwrite_result": false, 35 | "simulation_mode": "parallel", 36 | "processes": 16, 37 | "results_file_dir": "results/speed_and_tls_cycle_impact" 38 | }, 39 | "45s15mps": { 40 | "sumo": { 41 | "tls_settings": { 42 | "cycle_time": 45, 43 | "yellow_time": 2 44 | }, 45 | "max_speed": 15, 46 | "directory": "sumo_data/45s15mps/" 47 | }, 48 | "results_file_prefix": "result_45s15mps" 49 | }, 50 | "90s15mps": { 51 | "sumo": { 52 | "tls_settings": { 53 | "cycle_time": 90, 54 | "yellow_time": 2 55 | }, 56 | "max_speed": 15, 57 | "directory": "sumo_data/90s15mps/" 58 | }, 59 | "results_file_prefix": "result_90s15mps" 60 | }, 61 | "45s10mps": { 62 | "sumo": { 63 | "tls_settings": { 64 | "cycle_time": 45, 65 | "yellow_time": 2 66 | }, 67 | "max_speed": 10, 68 | "directory": "sumo_data/45s10mps/" 69 | }, 70 | "results_file_prefix": "result_45s10mps" 71 | }, 72 | "45s15mps": { 73 | "sumo": { 74 | "tls_settings": { 75 | "cycle_time": 45, 76 | "yellow_time": 2 77 | }, 78 | "max_speed": 15, 79 | "directory": "sumo_data/45s15mps/" 80 | }, 81 | "results_file_prefix": "result_45s15mps" 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /vtovosm/tests/test_sumo.py: -------------------------------------------------------------------------------- 1 | """Unit tests for all modules that interact with SUMO and therefore execute slower and need SUMO installed""" 2 | 3 | import os 4 | import unittest 5 | 6 | import numpy as np 7 | 8 | import vtovosm.sumo as sumo 9 | 10 | 11 | class TestSumo(unittest.TestCase): 12 | """Provides unit tests for the sumo module""" 13 | 14 | slow = True 15 | network = True 16 | 17 | def test_download_streets_from_id(self): 18 | """Tests the function download_streets_from_id""" 19 | 20 | # OSM id for Salmannsdorf, Vienna, Austria 21 | osm_id = 5875884 22 | directory = os.path.join('sumo_data', 'tests') 23 | path_out = os.path.join(directory, 'test_download_streets_from_id_city.osm.xml') 24 | 25 | if os.path.isfile(path_out): 26 | os.remove(path_out) 27 | os.makedirs(directory, exist_ok=True) 28 | return_code = sumo.download_streets_from_id(osm_id, prefix='test_download_streets_from_id', directory=directory) 29 | 30 | self.assertIs(return_code, 0) 31 | self.assertTrue(os.path.isfile(path_out)) 32 | 33 | def test_simple_wrapper(self): 34 | """Tests the function simple_wrapper""" 35 | 36 | place = 'Salmannsdorf - Vienna - Austria' 37 | directory = os.path.join('sumo_data', 'tests') 38 | 39 | # Run simulation with overwrite 40 | traces = sumo.simple_wrapper( 41 | place, 42 | which_result=None, 43 | count_veh=5, 44 | duration=60, 45 | warmup_duration=30, 46 | max_speed=None, 47 | tls_settings=None, 48 | fringe_factor=None, 49 | intermediate_points=None, 50 | start_veh_simult=True, 51 | coordinate_tls=True, 52 | directory=directory, 53 | skip_if_exists=False, 54 | veh_class='passenger' 55 | ) 56 | 57 | self.assertIsInstance(traces, np.ndarray) 58 | 59 | # Run simulation without overwrite 60 | traces = sumo.simple_wrapper( 61 | place, 62 | which_result=None, 63 | count_veh=5, 64 | duration=60, 65 | warmup_duration=30, 66 | max_speed=None, 67 | tls_settings=None, 68 | fringe_factor=None, 69 | intermediate_points=None, 70 | start_veh_simult=True, 71 | coordinate_tls=True, 72 | directory=directory, 73 | skip_if_exists=True, 74 | veh_class='passenger' 75 | ) 76 | 77 | self.assertIsInstance(traces, np.ndarray) 78 | 79 | 80 | if __name__ == '__main__': 81 | unittest.main() 82 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/graz_cost_paper.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "linz_sumo", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "which_result": null, 8 | "density_type": "length", 9 | "densities_veh": [ 10 | 10e-3, 11 | 20e-3, 12 | 50e-3 13 | ], 14 | "connection_metric": "distance", 15 | "max_connection_metric": { 16 | "olos_los": 350, 17 | "nlos": 100 18 | }, 19 | "results_file_dir": "results/graz_cost_paper", 20 | "analyze_results": null, 21 | "building_tolerance": 1, 22 | "overwrite_result": false, 23 | "simulation_mode": "parallel" 24 | }, 25 | "linz_sumo": { 26 | "place": "Linz - Austria", 27 | "distribution_veh": "SUMO", 28 | "results_file_prefix": "result_linz_sumo", 29 | "sumo": { 30 | "sim_duration": 3000, 31 | "warmup_duration": 1000, 32 | "fringe_factor": 1, 33 | "tls_settings": { 34 | "cycle_time": 45, 35 | "yellow_time": 2 36 | }, 37 | "max_speed": 10, 38 | "intermediate_points": 10, 39 | "veh_rate_factor": 0.25, 40 | "coordinate_tls": false, 41 | "skip_sumo": false, 42 | "abort_after_sumo": false 43 | } 44 | }, 45 | "innerelinz_sumo": { 46 | "place": "Innere Stadt - Linz - Austria", 47 | "distribution_veh": "SUMO", 48 | "results_file_prefix": "result_innerelinz_sumo", 49 | "sumo": { 50 | "sim_duration": 3000, 51 | "warmup_duration": 1000, 52 | "fringe_factor": 1, 53 | "tls_settings": { 54 | "cycle_time": 45, 55 | "yellow_time": 2 56 | }, 57 | "max_speed": 10, 58 | "intermediate_points": 100, 59 | "veh_rate_factor": 0.5, 60 | "coordinate_tls": false, 61 | "skip_sumo": false, 62 | "abort_after_sumo": false 63 | } 64 | }, 65 | "upperwestside_sumo": { 66 | "place": "Upper West Side - New York - USA", 67 | "distribution_veh": "SUMO", 68 | "results_file_prefix": "result_upperwestside_sumo", 69 | "sumo": { 70 | "sim_duration": 3000, 71 | "warmup_duration": 1000, 72 | "fringe_factor": 1, 73 | "tls_settings": { 74 | "cycle_time": 45, 75 | "yellow_time": 2 76 | }, 77 | "max_speed": 10, 78 | "intermediate_points": 100, 79 | "veh_rate_factor": 0.5, 80 | "coordinate_tls": true, 81 | "skip_sumo": false, 82 | "abort_after_sumo": false 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /vtovosm/simulations/tolerance_inspection.py: -------------------------------------------------------------------------------- 1 | """Determine if using a building tolerance makes a difference on the resulting connection matrices""" 2 | 3 | import os 4 | import signal 5 | 6 | import numpy as np 7 | 8 | from . import main as main_sim 9 | from .. import network_parser as nw_p 10 | from .. import utils 11 | 12 | 13 | def analyze_tolerance(conf_path): 14 | """Analyzes the simulation results by comparing connection matrices from simulations with and without tolerance. 15 | The connection matrices correspond to propagation condition matrices with True = OLOS/LOS and False = NLOS because 16 | of the maximum set distances.""" 17 | 18 | config = nw_p.params_from_conf(config_file=conf_path) 19 | 20 | if config['density_type'] != 'absolute': 21 | raise NotImplementedError('Only absolute vehicle counts supported') 22 | 23 | counts_vehs = config['densities_veh'] 24 | 25 | scenarios = nw_p.get_scenarios_list(conf_path) 26 | suffixes = set() 27 | for scenario in scenarios: 28 | suffixes.add(scenario[12:]) 29 | 30 | all_results = {} 31 | for suffix in list(suffixes): 32 | 33 | results = [] 34 | 35 | result_dir = config['results_file_dir'] 36 | 37 | for count_vehs in counts_vehs: 38 | res_wo = utils.load(os.path.join(result_dir, 'tolerance_0_{}.{:d}.pickle.xz'.format(suffix, count_vehs))) 39 | res_w = utils.load(os.path.join(result_dir, 'tolerance_1_{}.{:d}.pickle.xz'.format(suffix, count_vehs))) 40 | run_time_wo = res_wo['info']['time_finish'] - res_wo['info']['time_start'] 41 | run_time_w = res_w['info']['time_finish'] - res_w['info']['time_start'] 42 | matrices_cons_wo = res_wo['results']['matrices_cons'] 43 | matrices_cons_w = res_w['results']['matrices_cons'] 44 | 45 | count_diff = 0 46 | count_tot = 0 47 | 48 | for matrix_cons_wo, matrix_cons_w in zip(matrices_cons_wo, matrices_cons_w): 49 | count_diff += np.nonzero(matrix_cons_wo != matrix_cons_w)[0].size 50 | count_tot += matrix_cons_w.size 51 | 52 | ratio_diff = count_diff / count_tot 53 | result = {'count_vehs': count_vehs, 54 | 'count_con_tot': count_tot, 55 | 'count_con_diff': count_diff, 56 | 'ratio_con_diff': ratio_diff, 57 | 'run_time_wo': run_time_wo, 58 | 'run_time_w': run_time_w} 59 | results.append(result) 60 | 61 | utils.save(results, os.path.join(result_dir, 'tolerance_comparison_{}.pickle.xz'.format(suffix))) 62 | all_results[suffix] = results 63 | 64 | return all_results 65 | 66 | 67 | if __name__ == '__main__': 68 | # Set the config to be used 69 | config_file_path = os.path.join(nw_p.DEFAULT_CONFIG_DIR, 'tolerance_inspection.json') 70 | 71 | # Register signal handler 72 | signal.signal(signal.SIGTSTP, main_sim.signal_handler) 73 | 74 | # Run main sumulation 75 | main_sim.main_multi_scenario(conf_path=config_file_path) 76 | 77 | # Analyze results 78 | results = analyze_tolerance(config_file_path) 79 | print(results) 80 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # V2V-OSM 2 | Simulate Vehicle-to-vehicle (V2V) communication on street networks obtained from [OpenStreetMap](https://www.openstreetmap.org/) (OSM) 3 | 4 | ## Status 5 | [![Build Status](https://travis-ci.org/Dosenpfand/V2V-OSM.svg?branch=master)](https://travis-ci.org/Dosenpfand/V2V-OSM) 6 | ![Coverage](https://github.com/Dosenpfand/V2V-OSM/blob/travis/.travis/coverage.png?raw=true) 7 | 8 | ## Screenshots 9 | 10 | ![Exemplary propagation condition](/images/demo_neubau/prop_cond.png?raw=true "Propagation condition") 11 | ![Exemplary pathloss](/images/demo_neubau/pathloss.png?raw=true "Pathloss") 12 | 13 | ## Main Components 14 | Main software components are: 15 | 16 | - This Python package 17 | - [OSMnx](https://github.com/gboeing/osmnx) 18 | - [SUMO](http://www.sumo.dlr.de) 19 | - [NetworkX](https://networkx.github.io/) 20 | - To see all third party libraries check `requirements.txt` 21 | 22 | ## Quickstart 23 | To get started on Debian 8 follow these steps. 24 | 25 | 1. Install basic tools 26 | 27 | apt-get install python3 python3-pip git 28 | 29 | 2. Install libraries 30 | 31 | apt-get install libfreetype6-dev libxft-dev libgeos-dev libgdal-dev libspatialindex-dev 32 | 33 | 3. Optionally install linear algebra libraries for a faster numpy experience 34 | 35 | apt-get install libopenblas-dev liblapack-dev gfortran 36 | 37 | or if you want to use ATLAS instead of OpenBLAS: 38 | 39 | apt-get install liblapack-dev libatlas-dev libatlas-base-dev gfortran 40 | 41 | 4. Clone the repository and cd into it 42 | 43 | 5. Create a virtual environment 44 | 45 | python3 -m venv venv --without-pip 46 | 47 | 6. Activate the virtual environment 48 | 49 | source venv/bin/activate 50 | 51 | 7. Download and install pip 52 | 53 | wget https://bootstrap.pypa.io/get-pip.py 54 | python3 get-pip.py 55 | 56 | 8. Either 57 | * Install the package 58 | 59 | pip install . 60 | 61 | or via symlinks 62 | 63 | pip install -e . 64 | 65 | Or 66 | 67 | * Install only the dependencies 68 | 69 | pip install -r requirements.txt 70 | 71 | 9. Run an exemplary simulation 72 | 73 | python3 -m vtovosm.simulations.main 74 | 75 | 10. Modify the simulation parameters in 76 | 77 | vtovosm/simulations/network_config/default.json 78 | 79 | and run the simulation again 80 | 81 | python3 -m vtovosm.simulations.main 82 | 83 | 11. Get help by executing 84 | 85 | python3 -m vtovosm.simulations.main -h 86 | 87 | 12. Optional: For realistic vehicle placement and movement install [SUMO](http://www.sumo.dlr.de) from the [backports repository](https://backports.debian.org/Instructions/): 88 | 89 | apt-get -t jessie-backports install sumo sumo-tools 90 | 91 | # Tests 92 | To run the tests install the test specific dependencies by executing 93 | 94 | pip install -r requirements-test.txt 95 | 96 | ## Unit tests 97 | 98 | 99 | Run the unit tests by executing 100 | 101 | nosetests -v 102 | 103 | For all tests to complete successfully SUMO needs to be installed (see Quickstart). 104 | 105 | To only run fast tests or tests that do not need network access exectute 106 | 107 | nosetests -v -a '!slow' 108 | 109 | or 110 | 111 | nosetests -v -a '!network' 112 | 113 | ## Coverage 114 | Run the tests with coverage analysis by starting 115 | 116 | nosetests --with-coverage --cover-html --cover-tests --cover-package=vtovosm --verbose 117 | 118 | And then open `cover/index.html`. 119 | 120 | To create a badge execute 121 | 122 | coverage-badge -f -o .travis/coverage.svg 123 | 124 | # Authors 125 | 126 | - [Dosenpfand](https://github.com/Dosenpfand) 127 | - [tmblazek](https://github.com/tmblazek) 128 | -------------------------------------------------------------------------------- /vtovosm/demo.py: -------------------------------------------------------------------------------- 1 | """Demonstration module mainly suited for generating plots""" 2 | 3 | import numpy as np 4 | 5 | from . import geometry as geom_o 6 | from . import pathloss 7 | from . import propagation as prop 8 | from . import utils 9 | 10 | 11 | def simulate(network, max_pl=150): 12 | """Simulates the connections from one to all other vehicles using pathloss functions. 13 | The function finds the center vehicle, determines the propagation condition from it to all other vehicles, then 14 | determines the corresponding pathloss and finally checks if there is a connection. 15 | 16 | Parameters 17 | ---------- 18 | network : dict 19 | A dictionary containing the buildings, street network and vehicles 20 | max_pl : float 21 | The maximum pathloss for 2 vehicles to be connected 22 | """ 23 | 24 | # Initialize 25 | vehs = network['vehs'] 26 | graph_streets_wave = network['graph_streets_wave'] 27 | gdf_buildings = network['gdf_buildings'] 28 | count_veh = vehs.count 29 | vehs.allocate(count_veh) 30 | 31 | # Find center vehicle 32 | time_start = utils.debug(None, 'Finding center vehicle') 33 | idx_center_veh = geom_o.find_center_veh(vehs.get()) 34 | idxs_other_vehs = np.where(np.arange(count_veh) != idx_center_veh)[0] 35 | vehs.add_key('center', idx_center_veh) 36 | vehs.add_key('other', idxs_other_vehs) 37 | utils.debug(time_start) 38 | 39 | # Determine propagation conditions 40 | time_start = utils.debug(None, 'Determining propagation conditions') 41 | is_nlos = prop.veh_cons_are_nlos(vehs.get_points('center'), 42 | vehs.get_points('other'), gdf_buildings) 43 | vehs.add_key('nlos', idxs_other_vehs[is_nlos]) 44 | is_olos_los = np.invert(is_nlos) 45 | vehs.add_key('olos_los', idxs_other_vehs[is_olos_los]) 46 | utils.debug(time_start) 47 | 48 | # Determine OLOS and LOS 49 | time_start = utils.debug(None, 'Determining OLOS and LOS') 50 | # NOTE: A margin of 2, means round cars with radius 2 meters 51 | is_olos = prop.veh_cons_are_olos(vehs.get_points('center'), 52 | vehs.get_points('olos_los'), margin=2) 53 | is_los = np.invert(is_olos) 54 | vehs.add_key('olos', vehs.get_idxs('olos_los')[is_olos]) 55 | vehs.add_key('los', vehs.get_idxs('olos_los')[is_los]) 56 | utils.debug(time_start) 57 | 58 | # Determine orthogonal and parallel 59 | time_start = utils.debug(None, 'Determining orthogonal and parallel') 60 | 61 | is_orthogonal, coords_intersections = \ 62 | prop.check_if_cons_are_orthogonal(graph_streets_wave, 63 | vehs.get_graph('center'), 64 | vehs.get_graph('nlos'), 65 | max_angle=np.pi) 66 | is_parallel = np.invert(is_orthogonal) 67 | vehs.add_key('ort', vehs.get_idxs('nlos')[is_orthogonal]) 68 | vehs.add_key('par', vehs.get_idxs('nlos')[is_parallel]) 69 | utils.debug(time_start) 70 | 71 | # Determining pathlosses for LOS and OLOS 72 | time_start = utils.debug(None, 'Calculating pathlosses for OLOS and LOS') 73 | 74 | p_loss = pathloss.Pathloss() 75 | distances_olos_los = np.sqrt( 76 | (vehs.get('olos_los')[:, 0] - vehs.get('center')[0]) ** 2 + 77 | (vehs.get('olos_los')[:, 1] - vehs.get('center')[1]) ** 2) 78 | 79 | pathlosses_olos = p_loss.pathloss_olos(distances_olos_los[is_olos]) 80 | vehs.set_pathlosses('olos', pathlosses_olos) 81 | pathlosses_los = p_loss.pathloss_los(distances_olos_los[is_los]) 82 | vehs.set_pathlosses('los', pathlosses_los) 83 | utils.debug(time_start) 84 | 85 | # Determining pathlosses for NLOS orthogonal 86 | time_start = utils.debug( 87 | None, 'Calculating pathlosses for NLOS orthogonal') 88 | 89 | # NOTE: Assumes center vehicle is receiver 90 | # NOTE: Uses airline vehicle -> intersection -> vehicle and not 91 | # street route 92 | distances_orth_tx = np.sqrt( 93 | (vehs.get('ort')[:, 0] - coords_intersections[is_orthogonal, 0]) ** 2 + 94 | (vehs.get('ort')[:, 1] - coords_intersections[is_orthogonal, 1]) ** 2) 95 | distances_orth_rx = np.sqrt( 96 | (vehs.get('center')[0] - coords_intersections[is_orthogonal, 0]) ** 2 + 97 | (vehs.get('center')[1] - coords_intersections[is_orthogonal, 1]) ** 2) 98 | pathlosses_orth = p_loss.pathloss_nlos( 99 | distances_orth_rx, distances_orth_tx) 100 | vehs.set_pathlosses('ort', pathlosses_orth) 101 | pathlosses_par = np.Infinity * np.ones(np.sum(is_parallel)) 102 | vehs.set_pathlosses('par', pathlosses_par) 103 | utils.debug(time_start) 104 | 105 | # Determine in range / out of range 106 | time_start = utils.debug(None, 'Determining in range vehicles') 107 | idxs_in_range = vehs.get_pathlosses('other') < max_pl 108 | idxs_out_range = np.invert(idxs_in_range) 109 | vehs.add_key('in_range', vehs.get_idxs('other')[idxs_in_range]) 110 | vehs.add_key('out_range', vehs.get_idxs('other')[idxs_out_range]) 111 | utils.debug(time_start) 112 | -------------------------------------------------------------------------------- /doc/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # vtovosm documentation build configuration file, created by 5 | # sphinx-quickstart on Wed Jun 7 15:10:03 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | # import os 21 | # import sys 22 | # sys.path.insert(0, os.path.abspath('.')) 23 | 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # 29 | # needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = ['sphinx.ext.autodoc', 35 | 'sphinx.ext.todo', 36 | 'sphinx.ext.coverage', 37 | 'sphinx.ext.mathjax'] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # The suffix(es) of source filenames. 43 | # You can specify multiple suffix as a list of string: 44 | # 45 | # source_suffix = ['.rst', '.md'] 46 | source_suffix = '.rst' 47 | 48 | # The master toctree document. 49 | master_doc = 'index' 50 | 51 | # General information about the project. 52 | project = 'vtovosm' 53 | copyright = '2017, Markus Gasser, Thomas Blazek' 54 | author = 'Markus Gasser, Thomas Blazek' 55 | 56 | # The version info for the project you're documenting, acts as replacement for 57 | # |version| and |release|, also used in various other places throughout the 58 | # built documents. 59 | # 60 | # The short X.Y version. 61 | version = '0.1' 62 | # The full version, including alpha/beta/rc tags. 63 | release = '0.1' 64 | 65 | # The language for content autogenerated by Sphinx. Refer to documentation 66 | # for a list of supported languages. 67 | # 68 | # This is also used if you do content translation via gettext catalogs. 69 | # Usually you set "language" from the command line for these cases. 70 | language = None 71 | 72 | # List of patterns, relative to source directory, that match files and 73 | # directories to ignore when looking for source files. 74 | # This patterns also effect to html_static_path and html_extra_path 75 | exclude_patterns = [] 76 | 77 | # The name of the Pygments (syntax highlighting) style to use. 78 | pygments_style = 'sphinx' 79 | 80 | # If true, `todo` and `todoList` produce output, else they produce nothing. 81 | todo_include_todos = True 82 | 83 | 84 | # -- Options for HTML output ---------------------------------------------- 85 | 86 | # The theme to use for HTML and HTML Help pages. See the documentation for 87 | # a list of builtin themes. 88 | # 89 | html_theme = 'alabaster' 90 | 91 | # Theme options are theme-specific and customize the look and feel of a theme 92 | # further. For a list of options available for each theme, see the 93 | # documentation. 94 | # 95 | # html_theme_options = {} 96 | 97 | # Add any paths that contain custom static files (such as style sheets) here, 98 | # relative to this directory. They are copied after the builtin static files, 99 | # so a file named "default.css" will overwrite the builtin "default.css". 100 | html_static_path = ['_static'] 101 | 102 | 103 | # -- Options for HTMLHelp output ------------------------------------------ 104 | 105 | # Output file base name for HTML help builder. 106 | htmlhelp_basename = 'vtovosmdoc' 107 | 108 | 109 | # -- Options for LaTeX output --------------------------------------------- 110 | 111 | latex_elements = { 112 | # The paper size ('letterpaper' or 'a4paper'). 113 | # 114 | # 'papersize': 'letterpaper', 115 | 116 | # The font size ('10pt', '11pt' or '12pt'). 117 | # 118 | # 'pointsize': '10pt', 119 | 120 | # Additional stuff for the LaTeX preamble. 121 | # 122 | # 'preamble': '', 123 | 124 | # Latex figure (float) alignment 125 | # 126 | # 'figure_align': 'htbp', 127 | } 128 | 129 | # Grouping the document tree into LaTeX files. List of tuples 130 | # (source start file, target name, title, 131 | # author, documentclass [howto, manual, or own class]). 132 | latex_documents = [ 133 | (master_doc, 'vtovosm.tex', 'vtovosm Documentation', 134 | 'Markus Gasser, Thomas Blazek', 'manual'), 135 | ] 136 | 137 | 138 | # -- Options for manual page output --------------------------------------- 139 | 140 | # One entry per manual page. List of tuples 141 | # (source start file, name, description, authors, manual section). 142 | man_pages = [ 143 | (master_doc, 'vtovosm', 'vtovosm Documentation', 144 | [author], 1) 145 | ] 146 | 147 | 148 | # -- Options for Texinfo output ------------------------------------------- 149 | 150 | # Grouping the document tree into Texinfo files. List of tuples 151 | # (source start file, target name, title, author, 152 | # dir menu entry, description, category) 153 | texinfo_documents = [ 154 | (master_doc, 'vtovosm', 'vtovosm Documentation', 155 | author, 'vtovosm', 'One line description of project.', 156 | 'Miscellaneous'), 157 | ] 158 | 159 | 160 | 161 | -------------------------------------------------------------------------------- /vtovosm/simulations/network_config/default.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "default", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "INFO", 7 | "overwrite_result": true 8 | }, 9 | "default": { 10 | "place": "Neubau - Vienna - Austria", 11 | "which_result": null, 12 | "distribution_veh": "uniform", 13 | "iterations": 5, 14 | "density_type": "absolute", 15 | "densities_veh": [ 16 | 10, 17 | 20, 18 | 50 19 | ], 20 | "connection_metric": "distance", 21 | "max_connection_metric": { 22 | "olos_los": 250, 23 | "nlos": 140 24 | }, 25 | "simulation_mode": "sequential", 26 | "building_tolerance": 1, 27 | "analyze_results": "all" 28 | }, 29 | "uniform_distance_sequential": { 30 | "place": "Neubau - Vienna - Austria", 31 | "which_result": null, 32 | "distribution_veh": "uniform", 33 | "iterations": 5, 34 | "density_type": "absolute", 35 | "densities_veh": [ 36 | 50, 37 | 100 38 | ], 39 | "connection_metric": "distance", 40 | "max_connection_metric": { 41 | "olos_los": 250, 42 | "nlos": 140 43 | }, 44 | "simulation_mode": "sequential", 45 | "analyze_results": "all" 46 | }, 47 | "sumo_distance_sequential": { 48 | "place": "Neubau - Vienna - Austria", 49 | "which_result": null, 50 | "distribution_veh": "SUMO", 51 | "density_type": "absolute", 52 | "densities_veh": [ 53 | 50, 54 | 100 55 | ], 56 | "connection_metric": "distance", 57 | "max_connection_metric": { 58 | "olos_los": 250, 59 | "nlos": 140 60 | }, 61 | "simulation_mode": "sequential", 62 | "analyze_results": "all", 63 | "sumo": { 64 | "sim_duration": 200, 65 | "warmup_duration": 100, 66 | "fringe_factor": 1, 67 | "tls_settings": { 68 | "cycle_time": 45, 69 | "yellow_time": 2 70 | }, 71 | "max_speed": 10, 72 | "intermediate_points": 100, 73 | "skip_sumo": false, 74 | "abort_after_sumo": false 75 | } 76 | }, 77 | "uniform_pathloss_sequential": { 78 | "place": "Neubau - Vienna - Austria", 79 | "which_result": null, 80 | "distribution_veh": "uniform", 81 | "iterations": 5, 82 | "density_type": "absolute", 83 | "densities_veh": [ 84 | 10, 85 | 50, 86 | 75 87 | ], 88 | "connection_metric": "pathloss", 89 | "max_connection_metric": 116, 90 | "simulation_mode": "sequential", 91 | "analyze_results": "all" 92 | }, 93 | "demo_neubau": { 94 | "place": "Neubau - Vienna - Austria", 95 | "which_result": null, 96 | "distribution_veh": "uniform", 97 | "density_type": "absolute", 98 | "densities_veh": 250, 99 | "connection_metric": "pathloss", 100 | "max_connection_metric": 120, 101 | "simulation_mode": "demo", 102 | "save_plot": true, 103 | "plot_dir": "images/demo_neubau" 104 | }, 105 | "demo_upperwestside": { 106 | "place": "Upper West Side - New York - USA", 107 | "which_result": null, 108 | "distribution_veh": "uniform", 109 | "density_type": "absolute", 110 | "densities_veh": 250, 111 | "connection_metric": "pathloss", 112 | "max_connection_metric": 120, 113 | "simulation_mode": "demo", 114 | "save_plot": true, 115 | "plot_dir": "images/demo_upperwestside" 116 | }, 117 | "demo_sumo_neubau": { 118 | "place": "Neubau - Vienna - Austria", 119 | "which_result": null, 120 | "distribution_veh": "SUMO", 121 | "density_type": "absolute", 122 | "densities_veh": 250, 123 | "connection_metric": "distance", 124 | "max_connection_metric": { 125 | "olos_los": 250, 126 | "nlos": 140 127 | }, 128 | "simulation_mode": "sequential", 129 | "sumo": { 130 | "sim_duration": 600, 131 | "warmup_duration": 0, 132 | "fringe_factor": 1, 133 | "tls_settings": { 134 | "cycle_time": 45, 135 | "yellow_time": 2 136 | }, 137 | "max_speed": 10, 138 | "intermediate_points": 100, 139 | "skip_sumo": false, 140 | "abort_after_sumo": true 141 | }, 142 | "save_plot": true, 143 | "plot_dir": "images/demo_sumo_neubau" 144 | }, 145 | "demo_sumo_upperwestside": { 146 | "place": "Upper West Side - New York - USA", 147 | "which_result": null, 148 | "distribution_veh": "SUMO", 149 | "density_type": "absolute", 150 | "densities_veh": 250, 151 | "connection_metric": "distance", 152 | "max_connection_metric": { 153 | "olos_los": 250, 154 | "nlos": 140 155 | }, 156 | "simulation_mode": "sequential", 157 | "sumo": { 158 | "sim_duration": 600, 159 | "warmup_duration": 0, 160 | "fringe_factor": 1, 161 | "tls_settings": { 162 | "cycle_time": 45, 163 | "yellow_time": 2 164 | }, 165 | "max_speed": 10, 166 | "intermediate_points": 100, 167 | "skip_sumo": false, 168 | "abort_after_sumo": true 169 | }, 170 | "save_plot": true, 171 | "plot_dir": "images/demo_sumo_upperwestside" 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /vtovosm/geometry.py: -------------------------------------------------------------------------------- 1 | """Geometrical functionality""" 2 | 3 | import numpy as np 4 | import shapely.ops as ops 5 | 6 | 7 | def line_intersects_buildings(line, buildings): 8 | """Returns `True` if `line` intersects with any of the `buildings`. 9 | 10 | Parameters 11 | ---------- 12 | line : shapely.geometry.LineString 13 | Geometrical line 14 | buildings : geopandas.GeoDataFrame 15 | Buildings inside a geodata frame 16 | 17 | Returns 18 | ------- 19 | intersects : bool 20 | True if `line` intersects buildings, otherwise false 21 | """ 22 | """ """ 23 | 24 | intersects = False 25 | for geometry in buildings.geometry: 26 | if line.intersects(geometry): 27 | intersects = True 28 | break 29 | 30 | return intersects 31 | 32 | 33 | def line_intersects_points(line, points, margin=1): 34 | """Returns `True` if `line` intersects with any of the `points` within a `margin`. 35 | 36 | Parameters 37 | ---------- 38 | line : shapely.geometry.LineString 39 | Geometrical line 40 | points : list of shapely.geometry.Point 41 | A list of geometrical points 42 | margin : float 43 | The maximum margin between `line` and `points` 44 | 45 | Returns 46 | ------- 47 | intersects : bool 48 | True if `line` intersects `points`, otherwise false 49 | 50 | """ 51 | 52 | intersects = False 53 | 54 | for point in points: 55 | circle = point.buffer(margin) 56 | intersects = circle.intersects(line) 57 | if intersects: 58 | break 59 | 60 | return intersects 61 | 62 | 63 | def get_street_lengths(graph_streets): 64 | """Returns the lengths of the streets in `graph_streets`. 65 | 66 | Parameters 67 | ---------- 68 | graph_streets : networkx.MultiDiGraph 69 | Street network graph generated by osmnx 70 | 71 | Returns 72 | ------- 73 | lengths : float 74 | sum of length of all streets 75 | 76 | Notes 77 | ----- 78 | There are small differences in the values of data['geometry'].length 79 | and data['length'] 80 | """ 81 | 82 | lengths = np.zeros(graph_streets.number_of_edges()) 83 | for index, street in enumerate(graph_streets.edges_iter(data=True)): 84 | lengths[index] = street[2]['length'] 85 | return lengths 86 | 87 | 88 | def extract_point_array(points): 89 | """Extracts coordinates form a point array 90 | 91 | Parameters 92 | ---------- 93 | points : list of shapely.geometry.Point 94 | 95 | Returns 96 | ------- 97 | coords : numpy.ndarray 98 | Coordinates 99 | """ 100 | 101 | coords = np.zeros([np.size(points), 2], dtype=float) 102 | 103 | for index, point in np.ndenumerate(points): 104 | coords[index, :] = np.transpose(point.xy) 105 | 106 | return coords 107 | 108 | 109 | def find_center_veh(coords): 110 | """Finds the index of the centermost coordinates. 111 | 112 | Parameters 113 | ---------- 114 | coords : numpy.ndarray 115 | Coordinates 116 | 117 | Returns 118 | ------- 119 | index_center_veh : int 120 | Index of the centermost vehicle 121 | 122 | """ 123 | 124 | min_x = np.amin(coords[:, 0]) 125 | max_x = np.amax(coords[:, 0]) 126 | min_y = np.amin(coords[:, 1]) 127 | max_y = np.amax(coords[:, 1]) 128 | mean_x = (min_x + max_x) / 2 129 | mean_y = (min_y + max_y) / 2 130 | coords_center = np.array((mean_x, mean_y)) 131 | distances_center = np.linalg.norm( 132 | coords_center - coords, ord=2, axis=1) 133 | index_center_veh = np.argmin(distances_center) 134 | return index_center_veh 135 | 136 | 137 | def split_line_at_point(line, point): 138 | """Splits the `line` at the `point` on the line and returns it's two parts. 139 | 140 | Parameters 141 | ---------- 142 | line : shapely.geometry.LineString 143 | point : shapely.geometry.Point 144 | 145 | Returns 146 | ------- 147 | line_before: shapely.geometry.LineString 148 | Part of `line` before `point` 149 | line_after: shapely.geometry.LineString 150 | Part of `line` after `point` 151 | 152 | """ 153 | 154 | if line.distance(point) > 1e-8: 155 | raise ValueError('Point not on line') 156 | 157 | # Use small buffer polygon instead of point to deal with floating point precision 158 | circle = point.buffer(1e-8) 159 | line_split = ops.split(line, circle) 160 | line_before = line_split[0] 161 | line_after = line_split[-1] 162 | 163 | return line_before, line_after 164 | 165 | 166 | def angles_along_line(line): 167 | """Determines the the `angles` along the `line` string. 168 | For a line consisting of n segments the function returns n-1 angles where the i-th element is the angle between 169 | segment i and i+1. 170 | 171 | Parameters 172 | ---------- 173 | line : shapely.geometry.LineString 174 | 175 | Returns 176 | ------- 177 | angles : numpy.ndarray 178 | """ 179 | 180 | coords = line.coords 181 | angles = np.zeros(len(coords) - 2) 182 | angle_temp_prev = 0 183 | 184 | for index, coord in enumerate(coords[1:]): 185 | coord_prev = coords[index] 186 | angle_temp = np.arctan2( 187 | coord[0] - coord_prev[0], coord[1] - coord_prev[1]) 188 | if index != 0: 189 | if angle_temp - angle_temp_prev < np.pi: 190 | angles[index - 1] = angle_temp - angle_temp_prev + np.pi 191 | else: 192 | angles[index - 1] = angle_temp - angle_temp_prev - np.pi 193 | angle_temp_prev = angle_temp 194 | 195 | return angles 196 | 197 | 198 | def wrap_to_pi(angle): 199 | """Limits angle from -pi to +pi. 200 | 201 | Parameters 202 | ---------- 203 | angle : float 204 | 205 | Returns 206 | ------- 207 | wrapped_angle : float 208 | """ 209 | 210 | return (angle + np.pi) % (2 * np.pi) - np.pi 211 | -------------------------------------------------------------------------------- /vtovosm/pathloss.py: -------------------------------------------------------------------------------- 1 | """ Provides pathloss functions as defined in : 2 | Abbas, Taimoor, et al. 3 | "A measurement based shadow fading model for vehicle-to-vehicle network simulations." 4 | International Journal of Antennas and Propagation 2015 (2015). 5 | """ 6 | 7 | import logging 8 | 9 | import numpy as np 10 | 11 | 12 | class Pathloss: 13 | """ Class providing the pathloss functions for NLOS, OLOS and LOS propagation""" 14 | 15 | def __init__(self, nlos_config=None, los_config=None, olos_config=None): 16 | if nlos_config is None: 17 | # NOTE: dist_break assumes a vehicle height of 1.5 m, and a 18 | # frequency of 5.9 GHz (d_b = 4*h_t*h_r/lambda) 19 | self.nlos_config = { 20 | 'wavelength': 0.050812281, 21 | 'width_rx_street': 10, 22 | 'dist_tx_wall': 5, 23 | 'is_sub_urban': False, 24 | 'pathloss_exp': 2.69, 25 | 'dist_break': 44.25, 26 | 'standard_dev': 4.1} 27 | else: 28 | self.nlos_config = nlos_config 29 | 30 | if los_config is None: 31 | self.los_config = { 32 | 'dist_ref': 10, 33 | 'dist_break': 104, 34 | 'pathloss_exp_1': -1.81, 35 | 'pathloss_exp_2': -2.85, 36 | 'pathloss_ref': -63.9, 37 | 'standard_dev': 4.15} 38 | else: 39 | self.los_config = los_config 40 | 41 | if olos_config is None: 42 | self.olos_config = { 43 | 'dist_ref': 10, 44 | 'dist_break': 104, 45 | 'pathloss_exp_1': -1.93, 46 | 'pathloss_exp_2': -2.74, 47 | 'pathloss_ref': -72.3, 48 | 'standard_dev': 6.67} 49 | else: 50 | self.olos_config = olos_config 51 | 52 | def disable_shadowfading(self): 53 | """Deactivates shadow fading (random component) by setting the standard deviation to zero""" 54 | 55 | self.olos_config['standard_dev'] = 0 56 | self.los_config['standard_dev'] = 0 57 | self.nlos_config['standard_dev'] = 0 58 | 59 | def pathloss_nlos(self, dist_rx, dist_tx): 60 | """Calculates the pathloss for the non line of sight case in equation (6)""" 61 | 62 | # NOTE: Missing in the equation in the paper 63 | sf_loss = np.random.normal( 64 | 0, self.nlos_config['standard_dev'], np.size(dist_rx)) 65 | 66 | slope_selector = dist_rx < self.nlos_config['dist_break'] 67 | pathloss_slope_1 = slope_selector \ 68 | * (3.75 + self.nlos_config['is_sub_urban'] * 2.94 + 10 69 | * np.log10((dist_tx ** 0.957 * 4 * np.pi * dist_rx / 70 | (self.nlos_config['dist_tx_wall'] 71 | * self.nlos_config['width_rx_street']) ** 72 | 0.81 / self.nlos_config['wavelength']) 73 | ** self.nlos_config['pathloss_exp'])) 74 | pathloss_slope_2 = np.invert(slope_selector) \ 75 | * (3.75 + self.nlos_config['is_sub_urban'] * 2.94 + 10 76 | * np.log10((dist_tx ** 0.957 * 4 * np.pi * dist_rx ** 2 77 | / (self.nlos_config['dist_tx_wall'] * 78 | self.nlos_config['width_rx_street']) ** 0.81 79 | / (self.nlos_config['wavelength'] * self.nlos_config['dist_break'])) 80 | ** self.nlos_config['pathloss_exp'])) 81 | 82 | pathloss = pathloss_slope_1 + pathloss_slope_2 + sf_loss 83 | return pathloss 84 | 85 | def pathloss_los(self, dist): 86 | """Calculates the pathloss for the line of sight case defined in equation (5)""" 87 | 88 | sf_loss = np.random.normal( 89 | 0, self.los_config['standard_dev'], np.size(dist)) 90 | 91 | if (np.isscalar(dist) and dist < self.los_config['dist_ref']) or \ 92 | (not np.isscalar(dist) and any(dist < self.los_config['dist_ref'])): 93 | logging.warning('Distance smaller than reference distance') 94 | 95 | slope_selector = dist < self.los_config['dist_break'] 96 | pathloss_slope_1 = slope_selector \ 97 | * (self.los_config['pathloss_ref'] + 10 * self.los_config['pathloss_exp_1'] 98 | * np.log10(dist / self.los_config['dist_ref'])) 99 | pathloss_slope_2 = np.invert(slope_selector) \ 100 | * (self.los_config['pathloss_ref'] + 10 * self.los_config['pathloss_exp_1'] 101 | * np.log10(self.los_config['dist_break'] / self.los_config['dist_ref']) 102 | + 10 * self.los_config['pathloss_exp_2'] 103 | * np.log10(dist / self.los_config['dist_break'])) 104 | 105 | # NOTE: Invert sign to keep consistency with NLOS 106 | pathloss = - (pathloss_slope_1 + pathloss_slope_2 + sf_loss) 107 | return pathloss 108 | 109 | def pathloss_olos(self, dist): 110 | """Calculates the pathloss for the obstructed line of sight case defined in equation (5)""" 111 | 112 | sf_loss = np.random.normal( 113 | 0, self.olos_config['standard_dev'], np.size(dist)) 114 | 115 | if (np.isscalar(dist) and dist < self.olos_config['dist_ref']) or \ 116 | (not np.isscalar(dist) and any(dist < self.olos_config['dist_ref'])): 117 | logging.warning('Distance smaller than reference distance') 118 | 119 | slope_selector = dist < self.olos_config['dist_break'] 120 | pathloss_slope_1 = slope_selector \ 121 | * (self.olos_config['pathloss_ref'] + 10 * self.olos_config['pathloss_exp_1'] 122 | * np.log10(dist / self.olos_config['dist_ref'])) 123 | pathloss_slope_2 = np.invert(slope_selector) \ 124 | * (self.olos_config['pathloss_ref'] + 10 * self.olos_config['pathloss_exp_1'] 125 | * np.log10(self.olos_config['dist_break'] / self.olos_config['dist_ref']) 126 | + 10 * self.olos_config['pathloss_exp_2'] * 127 | np.log10(dist / self.olos_config['dist_break'])) 128 | 129 | # NOTE: Invert sign to keep consistency with NLOS 130 | pathloss = - (pathloss_slope_1 + pathloss_slope_2 + sf_loss) 131 | return pathloss 132 | -------------------------------------------------------------------------------- /vtovosm/tests/network_config/tests.json: -------------------------------------------------------------------------------- 1 | { 2 | "global": { 3 | "scenario": "uniform_distance_parallel", 4 | "mail_to": "mgasser@nt.tuwien.ac.at", 5 | "send_mail": true, 6 | "loglevel": "WARNING", 7 | "overwrite_result": true, 8 | "results_file_dir": "results/tests" 9 | }, 10 | "default": { 11 | "place": "Salmannsdorf - Vienna - Austria", 12 | "which_result": null, 13 | "distribution_veh": "uniform", 14 | "iterations": 3, 15 | "density_type": "absolute", 16 | "densities_veh": [ 17 | 10, 18 | 20, 19 | 30 20 | ], 21 | "connection_metric": "distance", 22 | "max_connection_metric": { 23 | "olos_los": 250, 24 | "nlos": 140 25 | }, 26 | "simulation_mode": "sequential", 27 | "analyze_results": "all" 28 | }, 29 | "simplify_buildings": { 30 | "place": "Salmannsdorf - Vienna - Austria", 31 | "which_result": null, 32 | "distribution_veh": "uniform", 33 | "iterations": 3, 34 | "density_type": "absolute", 35 | "densities_veh": [ 36 | 1 37 | ], 38 | "connection_metric": "distance", 39 | "max_connection_metric": { 40 | "olos_los": 250, 41 | "nlos": 140 42 | }, 43 | "simulation_mode": "sequential", 44 | "building_tolerance": 1 45 | }, 46 | "sumo_distance_paralell": { 47 | "place": "Salmannsdorf - Vienna - Austria", 48 | "which_result": null, 49 | "distribution_veh": "SUMO", 50 | "density_type": "area", 51 | "densities_veh": [ 52 | { 53 | "start": 1e-6, 54 | "stop": 8e-6, 55 | "num": 3 56 | }, 57 | { 58 | "start": 12e-6, 59 | "stop": 16e-6, 60 | "num": 2 61 | } 62 | ], 63 | "connection_metric": "distance", 64 | "max_connection_metric": { 65 | "olos_los": 250, 66 | "nlos": 140 67 | }, 68 | "simulation_mode": "parallel", 69 | "sumo": { 70 | "sim_duration": 120, 71 | "warmup_duration": 60, 72 | "fringe_factor": 1, 73 | "tls_settings": { 74 | "cycle_time": 45, 75 | "yellow_time": 2 76 | }, 77 | "max_speed": 10, 78 | "intermediate_points": 100, 79 | "skip_sumo": false, 80 | "abort_after_sumo": false, 81 | "directory": "sumo_data/tests" 82 | }, 83 | "analyze_results": "all" 84 | }, 85 | "uniform_distance_sequential": { 86 | "place": "Salmannsdorf - Vienna - Austria", 87 | "which_result": null, 88 | "distribution_veh": "uniform", 89 | "iterations": 5, 90 | "density_type": "absolute", 91 | "densities_veh": [ 92 | 10, 93 | 20 94 | ], 95 | "connection_metric": "distance", 96 | "max_connection_metric": { 97 | "olos_los": 250, 98 | "nlos": 140 99 | }, 100 | "simulation_mode": "sequential", 101 | "analyze_results": "all" 102 | }, 103 | "uniform_distance_parallel": { 104 | "place": "Salmannsdorf - Vienna - Austria", 105 | "which_result": null, 106 | "distribution_veh": "uniform", 107 | "iterations": 5, 108 | "density_type": "absolute", 109 | "densities_veh": [ 110 | 10, 111 | 20 112 | ], 113 | "connection_metric": "distance", 114 | "max_connection_metric": { 115 | "olos_los": 250, 116 | "nlos": 140 117 | }, 118 | "simulation_mode": "parallel", 119 | "analyze_results": "all" 120 | }, 121 | "uniform_pathloss_sequential": { 122 | "place": "Salmannsdorf - Vienna - Austria", 123 | "which_result": null, 124 | "distribution_veh": "uniform", 125 | "iterations": 5, 126 | "density_type": "absolute", 127 | "densities_veh": [ 128 | 10, 129 | 15, 130 | 20 131 | ], 132 | "connection_metric": "pathloss", 133 | "max_connection_metric": 116, 134 | "simulation_mode": "sequential", 135 | "analyze_results": "all" 136 | }, 137 | "demo": { 138 | "place": "Salmannsdorf - Vienna - Austria", 139 | "which_result": null, 140 | "distribution_veh": "uniform", 141 | "density_type": "absolute", 142 | "densities_veh": 50, 143 | "connection_metric": "pathloss", 144 | "max_connection_metric": 150, 145 | "simulation_mode": "demo", 146 | "save_plot": true, 147 | "plot_dir": "images/demo_tests" 148 | }, 149 | "demo_sumo_seq": { 150 | "place": "Salmannsdorf - Vienna - Austria", 151 | "which_result": null, 152 | "distribution_veh": "SUMO", 153 | "density_type": "absolute", 154 | "densities_veh": [ 155 | 3 156 | ], 157 | "connection_metric": "distance", 158 | "max_connection_metric": { 159 | "olos_los": 250, 160 | "nlos": 140 161 | }, 162 | "simulation_mode": "sequential", 163 | "sumo": { 164 | "sim_duration": 20, 165 | "warmup_duration": 5, 166 | "fringe_factor": 1, 167 | "tls_settings": { 168 | "cycle_time": 45, 169 | "yellow_time": 2 170 | }, 171 | "max_speed": 10, 172 | "intermediate_points": 5, 173 | "skip_sumo": false, 174 | "abort_after_sumo": true 175 | }, 176 | "save_plot": true, 177 | "plot_dir": "images/demo_sumo_seq_tests" 178 | }, 179 | "demo_sumo_par": { 180 | "place": "Salmannsdorf - Vienna - Austria", 181 | "which_result": null, 182 | "distribution_veh": "SUMO", 183 | "density_type": "absolute", 184 | "densities_veh": [ 185 | 3 186 | ], 187 | "connection_metric": "distance", 188 | "max_connection_metric": { 189 | "olos_los": 250, 190 | "nlos": 140 191 | }, 192 | "simulation_mode": "parallel", 193 | "sumo": { 194 | "sim_duration": 20, 195 | "warmup_duration": 5, 196 | "fringe_factor": 1, 197 | "tls_settings": { 198 | "cycle_time": 45, 199 | "yellow_time": 2 200 | }, 201 | "max_speed": 10, 202 | "intermediate_points": 5, 203 | "skip_sumo": false, 204 | "abort_after_sumo": true 205 | }, 206 | "save_plot": true, 207 | "plot_dir": "images/demo_sumo_par_tests" 208 | } 209 | } 210 | -------------------------------------------------------------------------------- /vtovosm/network_parser.py: -------------------------------------------------------------------------------- 1 | """Provides configuration based network generation""" 2 | 3 | import json 4 | import os 5 | 6 | import numpy as np 7 | 8 | from . import osmnx_addons as ox_a 9 | 10 | MODULE_PATH = os.path.dirname(__file__) 11 | DEFAULT_CONFIG_DIR = os.path.join(MODULE_PATH, 'simulations', 'network_config') 12 | DEFAULT_CONFIG_PATH = os.path.join(DEFAULT_CONFIG_DIR, 'default.json') 13 | 14 | 15 | def network_from_conf(in_key="default", config_file=DEFAULT_CONFIG_PATH): 16 | """Load a network from the settings in a json file. 17 | 18 | Abstracts away load_network call. 19 | """ 20 | conf = params_from_conf(in_key, config_file) 21 | return ox_a.load_network(conf["place"], conf["which_result"]) 22 | 23 | 24 | def params_from_conf(in_key="global", config_file=DEFAULT_CONFIG_PATH): 25 | """Load a parameter set from the given config_file. 26 | 27 | global_config: configuration params that are independent on chosen network. 28 | otherwise: network paramters for load_network and range and stuff.""" 29 | with open(config_file, "r") as file_pointer: 30 | conf = json.load(file_pointer) 31 | return conf[in_key] 32 | 33 | 34 | def get_scenarios_list(config_file=DEFAULT_CONFIG_PATH): 35 | """Returns a list of scenarios that are defined in the JSON config""" 36 | 37 | with open(config_file, 'r') as file: 38 | config = json.load(file) 39 | 40 | scenarios = list(config.keys()) 41 | scenarios.remove('global') 42 | 43 | return scenarios 44 | 45 | 46 | def check_fill_config(config): 47 | """Checks mandatory settings and sets unset SUMO settings to defaults""" 48 | 49 | # Mandatory settings 50 | if 'scenario' not in config: 51 | raise KeyError('Scenario not set') 52 | 53 | if 'place' not in config: 54 | raise KeyError('Place not set') 55 | 56 | if 'distribution_veh' not in config: 57 | raise KeyError('Distrubution type not set') 58 | else: 59 | if config['distribution_veh'] not in ['SUMO', 'uniform']: 60 | raise KeyError('Vehicle distribution method not supported') 61 | 62 | if config['distribution_veh'] == 'uniform' and config['simulation_mode'] != 'demo': 63 | if 'iterations' not in config: 64 | raise KeyError('Number of iterations not set') 65 | 66 | if config['simulation_mode'] == 'demo': 67 | if isinstance(config['densities_veh'], (list, tuple)): 68 | raise KeyError('Only a single density supported in demo mode') 69 | 70 | if config['distribution_veh'] != 'uniform': 71 | raise KeyError('Only uniform vehicle distribution supported in demo mode') 72 | 73 | if config['connection_metric'] != 'pathloss': 74 | raise KeyError('Only pathloss as connection metric supported in demo mode') 75 | 76 | if 'densities_veh' not in config: 77 | raise KeyError('Vehicle densities not set') 78 | 79 | if 'connection_metric' not in config: 80 | raise KeyError('Connection metric not set') 81 | 82 | if 'max_connection_metric' not in config: 83 | raise KeyError('Maximum connection metric not set') 84 | 85 | if 'simulation_mode' not in config: 86 | raise KeyError('Simulation mode not set') 87 | 88 | # Optional settings 89 | if 'overwrite_result' not in config: 90 | config['overwrite_result'] = False 91 | 92 | if 'send_mail' not in config: 93 | config['send_mail'] = False 94 | else: 95 | if config['send_mail']: 96 | if 'mail_to' not in config: 97 | raise KeyError('Email address not set') 98 | 99 | if 'save_plot' not in config: 100 | config['save_plot'] = False 101 | 102 | if config['save_plot']: 103 | if 'plot_dir' not in config: 104 | config['plot_dir'] = None 105 | 106 | if 'loglevel' not in config: 107 | config['loglevel'] = 'INFO' 108 | 109 | if 'which_result' not in config: 110 | config['which_result'] = None 111 | 112 | if 'building_tolerance' not in config: 113 | config['building_tolerance'] = 0 114 | 115 | if 'results_file_prefix' not in config: 116 | config['results_file_prefix'] = None 117 | 118 | if 'results_file_dir' not in config: 119 | config['results_file_dir'] = None 120 | 121 | if 'analyze_results' not in config: 122 | config['analyze_results'] = None 123 | elif not isinstance(config['analyze_results'], (list, tuple, type(None))): 124 | config['analyze_results'] = [config['analyze_results']] 125 | 126 | if (config['simulation_mode'] == 'parallel') and ('processes' not in config): 127 | config['processes'] = None 128 | 129 | # Optional SUMO settings 130 | if config['distribution_veh'] == 'SUMO': 131 | if 'sumo' not in config: 132 | config['sumo'] = {} 133 | if 'tls_settings' not in config['sumo']: 134 | config['sumo']['tls_settings'] = None 135 | if 'fringe_factor' not in config['sumo']: 136 | config['sumo']['fringe_factor'] = None 137 | if 'max_speed' not in config['sumo']: 138 | config['sumo']['max_speed'] = None 139 | if 'intermediate_points' not in config['sumo']: 140 | config['sumo']['intermediate_points'] = None 141 | if 'warmup_duration' not in config['sumo']: 142 | config['sumo']['warmup_duration'] = None 143 | if 'abort_after_sumo' not in config['sumo']: 144 | config['sumo']['abort_after_sumo'] = False 145 | if 'skip_sumo' not in config['sumo']: 146 | config['sumo']['skip_sumo'] = False 147 | if 'directory' not in config['sumo']: 148 | config['sumo']['directory'] = 'sumo_data/' 149 | if 'veh_rate_factor' not in config['sumo']: 150 | config['sumo']['veh_rate_factor'] = None 151 | if 'coordinate_tls' not in config['sumo']: 152 | config['sumo']['coordinate_tls'] = True 153 | 154 | # Convert densities 155 | config['densities_veh'] = convert_densities(config['densities_veh']) 156 | 157 | return config 158 | 159 | 160 | def convert_densities(config_densities): 161 | """Converts the density parameters from the configuration to a simple array""" 162 | 163 | if isinstance(config_densities, (list, tuple)): 164 | densities = np.zeros(0) 165 | for density_in in config_densities: 166 | if isinstance(density_in, dict): 167 | density = np.linspace(**density_in) 168 | else: 169 | density = density_in 170 | densities = np.append(densities, density) 171 | else: 172 | densities = np.array([config_densities]) 173 | 174 | return densities 175 | 176 | 177 | def merge(orig, update, path=None): 178 | """Deep merges update into orig. (dict.update only shallow merges)""" 179 | 180 | if path is None: path = [] 181 | for key in update: 182 | if key in orig: 183 | if isinstance(orig[key], dict) and isinstance(update[key], dict): 184 | merge(orig[key], update[key], path + [str(key)]) 185 | elif orig[key] == update[key]: 186 | pass 187 | else: 188 | raise Exception('Conflict at %s' % '.'.join(path + [str(key)])) 189 | else: 190 | orig[key] = update[key] 191 | return orig 192 | -------------------------------------------------------------------------------- /vtovosm/vehicles.py: -------------------------------------------------------------------------------- 1 | """ Distributes vehicles along streets""" 2 | 3 | import networkx as nx 4 | import numpy as np 5 | 6 | from . import geometry as geom_o 7 | from . import utils 8 | 9 | 10 | class Vehicles: 11 | """Class representing vehicles with their properties and relations 12 | to each other.""" 13 | 14 | # TODO: only points as attributes and get coordinates from points when requested? 15 | 16 | def __init__(self, points, graphs=None, size=0): 17 | self.count = np.size(points) 18 | self.points = points 19 | self.coordinates = geom_o.extract_point_array(points) 20 | self.graphs = graphs 21 | self.pathlosses = np.zeros(size) 22 | self.distances = np.zeros(size) 23 | self.nlos = np.zeros(size, dtype=bool) 24 | self.idxs = {} 25 | 26 | def allocate(self, size): 27 | """Allocate memory for relational properties""" 28 | 29 | self.pathlosses = np.zeros(size) 30 | self.distances = np.zeros(size) 31 | self.nlos = np.zeros(size, dtype=bool) 32 | 33 | def add_key(self, key, value): 34 | """Add a key that can then be used to retrieve a subset 35 | of the properties/relations""" 36 | 37 | self.idxs[key] = value 38 | 39 | def get(self, key=None): 40 | """"Get the coordinates of a set of vehicles specified by a key""" 41 | 42 | if key is None or key == "all": 43 | return self.coordinates 44 | else: 45 | return self.coordinates[self.idxs[key]] 46 | 47 | def get_points(self, key=None): 48 | """"Get the geometry points of a set of vehicles specified by a key""" 49 | 50 | if key is None: 51 | return self.points 52 | else: 53 | return self.points[self.idxs[key]] 54 | 55 | def get_graph(self, key=None): 56 | """"Get the graphs of a set of vehicles specified by a key""" 57 | 58 | if key is None: 59 | return self.graphs 60 | else: 61 | return self.graphs[self.idxs[key]] 62 | 63 | def get_idxs(self, key): 64 | """Get the indices defined by a key""" 65 | 66 | return self.idxs[key] 67 | 68 | def set_pathlosses(self, key, values): 69 | """"Set the pathlosses of a set of relations specified by a key""" 70 | 71 | self.pathlosses[self.idxs[key]] = values 72 | 73 | def get_pathlosses(self, key=None): 74 | """"Get the pathlosses of a set of relations specified by a key""" 75 | 76 | if key is None or key == "all": 77 | return self.pathlosses 78 | else: 79 | return self.pathlosses[self.idxs[key]] 80 | 81 | def set_distances(self, key, values): 82 | """"Set the distances of a set of relations specified by a key""" 83 | 84 | self.distances[self.idxs[key]] = values 85 | 86 | def get_distances(self, key=None): 87 | """"Get the distances of a set of relations specified by a key""" 88 | 89 | if key is None: 90 | return self.distances 91 | else: 92 | return self.distances[self.idxs[key]] 93 | 94 | def __repr__(self): 95 | allowed_keys = list(self.idxs.keys()) 96 | allowed_keys.insert(0, "all") 97 | return ("{} vehicles, allowed keys: {}". 98 | format(self.count, allowed_keys)) 99 | 100 | 101 | def place_vehicles_in_network(network, density_veh=100, density_type='absolute'): 102 | """Generates vehicles in the network""" 103 | 104 | graph_streets = network['graph_streets'] 105 | 106 | # Streets and positions selection 107 | time_start = utils.debug(None, 'Choosing random vehicle positions') 108 | 109 | street_lengths = geom_o.get_street_lengths(graph_streets) 110 | 111 | if density_type == 'absolute': 112 | count_veh = int(density_veh) 113 | elif density_type == 'length': 114 | count_veh = int(round(density_veh * np.sum(street_lengths))) 115 | elif density_type == 'area': 116 | area = network['gdf_boundary'].area 117 | count_veh = int(round(density_veh * area)) 118 | else: 119 | raise ValueError('Density type not supported') 120 | 121 | rand_street_idxs = choose_random_streets( 122 | street_lengths, count_veh) 123 | utils.debug(time_start) 124 | 125 | # Vehicle generation 126 | time_start = utils.debug(None, 'Generating vehicles') 127 | vehs = generate_vehs(graph_streets, rand_street_idxs) 128 | utils.debug(time_start) 129 | 130 | network['vehs'] = vehs 131 | return vehs 132 | 133 | 134 | def choose_random_streets(lengths, count=1): 135 | """ Chooses random streets with probabilities relative to their length""" 136 | 137 | total_length = sum(lengths) 138 | probs = lengths / total_length 139 | count_streets = np.size(lengths) 140 | indices = np.random.choice(count_streets, size=count, p=probs) 141 | return indices 142 | 143 | 144 | def choose_random_point(street, count=1): 145 | """Chooses random points along street""" 146 | 147 | distances = np.random.random(count) 148 | points = np.zeros_like(distances, dtype=object) 149 | for index, dist in np.ndenumerate(distances): 150 | points[index] = street.interpolate(dist, normalized=True) 151 | 152 | return points 153 | 154 | 155 | def generate_vehs(graph_streets, street_idxs=None, points_vehs_in=None): 156 | """Generates vehicles on specific streets """ 157 | 158 | if points_vehs_in is None: 159 | points_vehs_in = get_vehicles_from_streets( 160 | graph_streets, street_idxs) 161 | elif street_idxs is None: 162 | street_idxs = get_streets_from_vehicles(graph_streets, points_vehs_in) 163 | 164 | count_veh = np.size(street_idxs) 165 | graphs_vehs = np.zeros(count_veh, dtype=object) 166 | points_vehs = np.zeros(count_veh, dtype=object) 167 | 168 | for iteration, index in enumerate(street_idxs): 169 | street = graph_streets.edges(data=True)[index] 170 | point_veh = points_vehs_in[iteration] 171 | points_vehs[iteration] = point_veh 172 | street_geom = street[2]['geometry'] 173 | # NOTE: All vehicle nodes get the prefix 'v' 174 | node = 'v' + str(iteration) 175 | # Add vehicle, needed intersections and edges to graph 176 | graph_iter = nx.MultiGraph(node_veh=node) 177 | node_attr = {'geometry': point_veh, 'x': point_veh.x, 'y': point_veh.y} 178 | graph_iter.add_node(node, attr_dict=node_attr) 179 | graph_iter.add_nodes_from(street[0: 2]) 180 | 181 | # Determine street parts that connect vehicle to intersections 182 | street_before, street_after = geom_o.split_line_at_point( 183 | street_geom, point_veh) 184 | edge_attr = {'geometry': street_before, 185 | 'length': street_before.length, 'is_veh_edge': True} 186 | graph_iter.add_edge(node, street[0], attr_dict=edge_attr) 187 | edge_attr = {'geometry': street_after, 188 | 'length': street_after.length, 'is_veh_edge': True} 189 | graph_iter.add_edge(node, street[1], attr_dict=edge_attr) 190 | 191 | # Copy the created graph 192 | graphs_vehs[iteration] = graph_iter.copy() 193 | 194 | vehs = Vehicles(points_vehs, graphs_vehs) 195 | return vehs 196 | 197 | 198 | def get_vehicles_from_streets(graph_streets, street_idxs): 199 | """Generate random vehicle points according to the street_idxs.""" 200 | 201 | count_veh = np.size(street_idxs) 202 | points_vehs = np.zeros(count_veh, dtype=object) 203 | for iteration, index in enumerate(street_idxs): 204 | street = graph_streets.edges(data=True)[index] 205 | street_geom = street[2]['geometry'] 206 | points_vehs[iteration] = choose_random_point(street_geom)[0] 207 | return points_vehs 208 | 209 | 210 | def get_streets_from_vehicles(graph_streets, points_vehs): 211 | """Generate appropriate streets for given vehicular point coordinates.""" 212 | 213 | street_idxs = np.zeros(len(points_vehs), dtype=object) 214 | edge_set = graph_streets.edges(data=True) 215 | 216 | for iteration, point_veh in enumerate(points_vehs): 217 | # Generate distances for vehicle to all edges (minimum distance) 218 | # Result is a tuple with (distance, index). Min can use this to return 219 | # the tuple with minimum 1st entry, so we have the index of the minimum 220 | distance_list = [(point_veh.distance(x[2]['geometry']), ind) 221 | for (ind, x) in enumerate(edge_set)] 222 | 223 | _, street_idxs[iteration] = min(distance_list) 224 | current_street = edge_set[street_idxs[iteration]][2]['geometry'] 225 | 226 | # Find the closest point to point_veh that lies ON the street 227 | correction = current_street.project(point_veh) 228 | 229 | # Reset the point so that it lies on the street 230 | points_vehs[iteration] = current_street.interpolate(correction) 231 | return street_idxs 232 | -------------------------------------------------------------------------------- /vtovosm/propagation.py: -------------------------------------------------------------------------------- 1 | """ Determines the propagation conditions (LOS/OLOS/NLOS orthogonal/NLOS paralell) of connections""" 2 | 3 | from enum import IntEnum 4 | 5 | import networkx as nx 6 | import numpy as np 7 | import shapely.geometry as geom 8 | import shapely.ops as ops 9 | 10 | from . import geometry as geom_o 11 | 12 | 13 | class Cond(IntEnum): 14 | """Enumeration of possible propagation conditions: 15 | LOS: Line Of Sight 16 | OLOS: Obstructed Line Of Sight (obstructed by other vehicles) 17 | NLOS_par: Non Line Of Sight between vehicles on parallel streets 18 | NLOS_ort: Non Line Of Sight between vehicles on orthogonal streets 19 | NLOS: Non Line Of Sight 20 | """ 21 | 22 | LOS = 1 23 | OLOS = 2 24 | NLOS_par = 3 25 | NLOS_ort = 4 26 | OLOS_LOS = 5 27 | NLOS = 6 28 | 29 | 30 | def gen_prop_cond_matrix(points_vehs, 31 | buildings, 32 | graph_streets_wave=None, 33 | graphs_vehs=None, 34 | fully_determine=True, 35 | max_dist=None, 36 | car_radius=2, 37 | max_angle=np.pi): 38 | """Determines the condensed connection matrix, i.e. the propagation conditions between all pairs 39 | of vehicles""" 40 | 41 | count_vehs = points_vehs.size 42 | count_cond = count_vehs * (count_vehs - 1) // 2 43 | prop_cond_matrix = np.zeros(count_cond, dtype=Cond) 44 | coords_max_angle_matrix = np.zeros(count_cond, dtype=object) 45 | range_vehs = np.arange(count_vehs) 46 | 47 | index = 0 48 | for idx1, point1 in enumerate(points_vehs): 49 | for idx2, point2 in enumerate(points_vehs[idx1 + 1:]): 50 | is_nlos = True 51 | line = geom.LineString([point1, point2]) 52 | if (max_dist is None) or (line.length < max_dist): 53 | is_nlos = geom_o.line_intersects_buildings( 54 | line, buildings) 55 | 56 | if is_nlos: 57 | if fully_determine: 58 | graph_veh1 = graphs_vehs[idx1] 59 | graph_veh2 = graphs_vehs[idx1 + idx2 + 1] 60 | 61 | is_orthogonal, coords_max_angle = check_if_con_is_orthogonal( 62 | graph_streets_wave, 63 | graph_veh1, 64 | graph_veh2, 65 | max_angle=max_angle) 66 | if is_orthogonal: 67 | prop_cond_matrix[index] = Cond.NLOS_ort 68 | coords_max_angle_matrix[index] = coords_max_angle 69 | else: 70 | prop_cond_matrix[index] = Cond.NLOS_par 71 | 72 | else: 73 | prop_cond_matrix[index] = Cond.NLOS 74 | else: 75 | if fully_determine: 76 | idxs_other = np.setdiff1d( 77 | range_vehs, [idx1, idx1 + idx2 + 1]) 78 | is_olos = geom_o.line_intersects_points(line, points_vehs[idxs_other], 79 | margin=car_radius) 80 | if is_olos: 81 | prop_cond_matrix[index] = Cond.OLOS 82 | else: 83 | prop_cond_matrix[index] = Cond.LOS 84 | else: 85 | prop_cond_matrix[index] = Cond.OLOS_LOS 86 | 87 | index += 1 88 | 89 | return prop_cond_matrix, coords_max_angle_matrix 90 | 91 | 92 | def veh_cons_are_nlos(point_own, points_vehs, buildings, max_dist=None): 93 | """ Determines for each connection if it is NLOS or not (i.e. LOS and OLOS)""" 94 | 95 | is_nlos = np.ones(np.size(points_vehs), dtype=bool) 96 | 97 | for index, point in enumerate(points_vehs): 98 | line = geom.LineString([point_own, point]) 99 | if (max_dist is None) or (line.length < max_dist): 100 | is_nlos[index] = geom_o.line_intersects_buildings(line, buildings) 101 | 102 | return is_nlos 103 | 104 | 105 | def veh_cons_are_nlos_all(points_vehs, buildings, max_dist=None): 106 | """ Determines for each possible connection if it is NLOS or not (i.e. LOS and OLOS)""" 107 | # NOTE: This function is deprecated and is replaced by gen_prop_cond_matrix 108 | 109 | count_vehs = np.size(points_vehs) 110 | count_cond = count_vehs * (count_vehs - 1) // 2 111 | is_nlos = np.ones(count_cond, dtype=bool) 112 | 113 | index = 0 114 | for idx1, point1 in enumerate(points_vehs): 115 | for point2 in points_vehs[idx1 + 1:]: 116 | line = geom.LineString([point1, point2]) 117 | if (max_dist is None) or (line.length < max_dist): 118 | is_nlos[index] = geom_o.line_intersects_buildings( 119 | line, buildings) 120 | index += 1 121 | 122 | return is_nlos 123 | 124 | 125 | def veh_cons_are_olos(point_own, points_vehs, margin=1): 126 | """Determines for each LOS/OLOS connection if it is OLOS""" 127 | 128 | is_olos = np.zeros(np.size(points_vehs), dtype=bool) 129 | 130 | for index, point in np.ndenumerate(points_vehs): 131 | line = geom.LineString([point_own, point]) 132 | indices_other = np.ones(np.size(points_vehs), dtype=bool) 133 | indices_other[index] = False 134 | is_olos[index] = geom_o.line_intersects_points(line, points_vehs[indices_other], 135 | margin=margin) 136 | 137 | return is_olos 138 | 139 | 140 | def check_if_con_is_orthogonal(streets_wave, 141 | graph_veh_u, 142 | graph_veh_v, 143 | max_angle=np.pi): 144 | """Determines if the propagation condition between two vehicles is NLOS on an orthogonal 145 | street""" 146 | 147 | node_u = graph_veh_u.graph['node_veh'] 148 | node_v = graph_veh_v.graph['node_veh'] 149 | streets_wave_local = nx.compose(graph_veh_u, streets_wave) 150 | streets_wave_local = nx.compose(graph_veh_v, streets_wave_local) 151 | 152 | # NOTE: We suboptimally use the length of roads between nodes as weight for routing and not the angle 153 | route = line_route_between_nodes( 154 | node_u, node_v, streets_wave_local) 155 | angles = geom_o.angles_along_line(route) 156 | angles_wrapped = np.pi - np.abs(geom_o.wrap_to_pi(angles)) 157 | 158 | sum_angles = sum(angles_wrapped) 159 | if sum_angles <= max_angle: 160 | is_orthogonal = True 161 | else: 162 | is_orthogonal = False 163 | 164 | # Determine position of max angle 165 | index_angle = np.argmax(angles_wrapped) 166 | route_coords = np.array(route.xy) 167 | coords_max_angle = route_coords[:, index_angle + 1] 168 | 169 | return is_orthogonal, coords_max_angle 170 | 171 | 172 | def check_if_cons_are_orthogonal(streets_wave, 173 | graph_veh_own, 174 | graphs_veh_other, 175 | max_angle=np.pi): 176 | """Determines if the propagation condition is NLOS on an orthogonal street for every possible 177 | connection to one node""" 178 | 179 | count_veh_other = np.size(graphs_veh_other) 180 | 181 | is_orthogonal = np.zeros(count_veh_other, dtype=bool) 182 | coords_max_angle = np.zeros((count_veh_other, 2)) 183 | for index, graph in enumerate(graphs_veh_other): 184 | is_orthogonal[index], coords_max_angle[index, :] = \ 185 | check_if_con_is_orthogonal( 186 | streets_wave, graph_veh_own, graph, max_angle=max_angle) 187 | 188 | return is_orthogonal, coords_max_angle 189 | 190 | 191 | def line_route_between_nodes(node_from, node_to, graph): 192 | """Determines the line representing the shortest path between two nodes""" 193 | 194 | route = nx.shortest_path(graph, node_from, node_to, weight='length') 195 | edge_nodes = list(zip(route[:-1], route[1:])) 196 | lines = [] 197 | for u_node, v_node in edge_nodes: 198 | # If there are parallel edges, select the shortest in length 199 | data = min([data for data in graph.edge[u_node][v_node].values()], 200 | key=lambda x: x['length']) 201 | lines.append(data['geometry']) 202 | 203 | line = ops.linemerge(lines) 204 | return line 205 | 206 | 207 | def add_edges_if_los(graph, buildings, max_distance=50): 208 | """Adds edges to the streets graph if there is none between 2 nodes if there is none, the have 209 | no buildings in between and are only a certain distance apart""" 210 | 211 | for index, node_u in enumerate(graph.nodes()): 212 | coords_u = np.array((graph.node[node_u]['x'], graph.node[node_u]['y'])) 213 | for node_v in graph.nodes()[index + 1:]: 214 | 215 | # Check if nodes are already connected 216 | if graph.has_edge(node_u, node_v): 217 | continue 218 | coords_v = np.array( 219 | (graph.node[node_v]['x'], graph.node[node_v]['y'])) 220 | distance = np.linalg.norm(coords_u - coords_v, ord=2) 221 | 222 | # Check if the nodes are further apart than the max distance 223 | if distance > max_distance: 224 | continue 225 | 226 | # Check if there are buildings between the nodes 227 | line = geom.LineString( 228 | [(coords_u[0], coords_u[1]), (coords_v[0], coords_v[1])]) 229 | 230 | if geom_o.line_intersects_buildings(line, buildings): 231 | continue 232 | 233 | # Add edge between nodes 234 | edge_attr = {'length': distance, 'geometry': line} 235 | graph.add_edge(node_u, node_v, attr_dict=edge_attr) 236 | -------------------------------------------------------------------------------- /vtovosm/plot.py: -------------------------------------------------------------------------------- 1 | """ Plot functionality""" 2 | 3 | import os 4 | 5 | import matplotlib.animation as animation 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | import osmnx as ox 9 | 10 | 11 | def setup(figsize=(8, 5)): 12 | """Sets up plotting""" 13 | 14 | plt.rcParams["figure.figsize"] = figsize 15 | plt.rcParams["savefig.bbox"] = 'tight' 16 | 17 | plt.rcParams['text.usetex'] = True 18 | plt.rc('font', **{'family': 'serif', 'serif': ['Palatino']}) 19 | 20 | 21 | def plot_streets_and_buildings(streets, buildings=None, show=True, dpi=300, path=None, overwrite=False, ruler=True, 22 | axes=False): 23 | """ Plots streets and buildings""" 24 | 25 | fig, axi = ox.plot_graph( 26 | streets, show=False, close=False, node_size=0, dpi=dpi, edge_color='#333333', fig_height=6) 27 | 28 | # TODO: bug when plotting buildings, inner area not empty! (e.g. Stiftskaserne Wien Neubau) 29 | if buildings is not None: 30 | ox.plot_buildings(buildings, fig=fig, ax=axi, set_bounds=False, show=False, close=False, dpi=dpi, 31 | color='#999999') 32 | 33 | # Reset axes parameters to default 34 | if axes: 35 | axes_color = '#999999' 36 | axi.axis('on') 37 | axi.margins(0.05) 38 | axi.tick_params(which='both', direction='out', colors=axes_color) 39 | axi.set_xlabel('X coordinate [m]', color=axes_color) 40 | axi.set_ylabel('Y coordinate [m]', color=axes_color) 41 | axi.spines['right'].set_color('none') 42 | axi.spines['top'].set_color('none') 43 | axi.spines['left'].set_color(axes_color) 44 | axi.spines['bottom'].set_color(axes_color) 45 | fig.canvas.draw() 46 | 47 | if ruler: 48 | plot_ruler(axi) 49 | 50 | if path is not None: 51 | if overwrite or not os.path.isfile(path): 52 | fig.savefig(path) 53 | 54 | if show: 55 | fig.show() 56 | 57 | return fig, axi 58 | 59 | 60 | def plot_vehs(streets, buildings, vehicles, show=True, path=None, overwrite=False): 61 | """Plots vehicles""" 62 | 63 | # Plot streets and buildings 64 | fig, axi = plot_streets_and_buildings( 65 | streets, buildings, show=False, dpi=300) 66 | 67 | # Plot vehicles with propagation conditions 68 | axi.scatter(vehicles.get()[:, 0], vehicles.get()[:, 1]) 69 | 70 | if path is not None: 71 | if overwrite or not os.path.isfile(path): 72 | plt.savefig(path) 73 | 74 | if show: 75 | plt.show() 76 | 77 | return fig, axi 78 | 79 | 80 | def plot_prop_cond(streets, buildings, vehicles, show=True, path=None, overwrite=False): 81 | """ Plots vehicles and their respective propagation condition (LOS/OLOS/NLOS parallel/NLOS 82 | orthogonal)""" 83 | 84 | # Plot streets and buildings 85 | fig, axi = plot_streets_and_buildings( 86 | streets, buildings, show=False, dpi=300) 87 | 88 | # Plot vehicles with propagation conditions 89 | axi.scatter(vehicles.get('center')[0], vehicles.get('center')[1], label='Center', 90 | marker='x', zorder=10, s=2 * plt.rcParams['lines.markersize'] ** 2, c='black') 91 | axi.scatter(vehicles.get('los')[:, 0], vehicles.get('los')[:, 1], label='LOS', 92 | zorder=9, alpha=0.75) 93 | axi.scatter(vehicles.get('olos')[:, 0], vehicles.get('olos')[:, 1], 94 | label='OLOS', zorder=8, alpha=0.75) 95 | axi.scatter(vehicles.get('ort')[:, 0], vehicles.get('ort')[:, 1], 96 | label='NLOS orth', zorder=5, alpha=0.5) 97 | axi.scatter(vehicles.get('par')[:, 0], vehicles.get('par')[:, 1], 98 | label='NLOS par', zorder=5, alpha=0.5) 99 | 100 | # Add additional information to plot 101 | axi.legend().set_visible(True) 102 | 103 | if path is not None: 104 | if overwrite or not os.path.isfile(path): 105 | fig.savefig(path) 106 | 107 | if show: 108 | fig.show() 109 | 110 | return fig, axi 111 | 112 | 113 | def plot_pathloss(streets, buildings, vehicles, show=True, path=None, overwrite=False): 114 | """ Plots vehicles and their respecitive pathloss color coded""" 115 | 116 | # Plot streets and buildings 117 | fig, axi = plot_streets_and_buildings( 118 | streets, buildings, show=False, dpi=300) 119 | 120 | # Plot vehicles with pathlosses 121 | pathlosses = vehicles.get_pathlosses('other') 122 | index_wo_inf = pathlosses != np.Infinity 123 | index_inf = np.invert(index_wo_inf) 124 | axi.scatter(vehicles.get('center')[0], vehicles.get('center')[1], label='Center', 125 | c='black', marker='x', s=2 * plt.rcParams['lines.markersize'] ** 2, zorder=3) 126 | cax = plt.scatter(vehicles.get('other')[index_wo_inf][:, 0], 127 | vehicles.get('other')[index_wo_inf][:, 1], marker='o', 128 | c=pathlosses[index_wo_inf], cmap=plt.cm.magma, label='Finite PL', zorder=2) 129 | axi.scatter(vehicles.get('other')[index_inf][:, 0], 130 | vehicles.get('other')[index_inf][:, 1], marker='.', c='y', 131 | label='Infinite PL', alpha=0.5, zorder=1) 132 | 133 | # Add additional information to plot 134 | axi.legend().set_visible(True) 135 | 136 | # Plot color map 137 | pl_min = np.min(pathlosses[index_wo_inf]) 138 | pl_max = np.max(pathlosses[index_wo_inf]) 139 | pl_med = np.mean((pl_min, pl_max)) 140 | string_min = '{:.0f}'.format(pl_min) 141 | string_med = '{:.0f}'.format(pl_med) 142 | string_max = '{:.0f}'.format(pl_max) 143 | cbar = fig.colorbar( 144 | cax, ticks=[pl_min, pl_med, pl_max], orientation='vertical') 145 | cbar.ax.set_xticklabels([string_min, string_med, string_max]) 146 | cbar.ax.set_xlabel('Pathloss [dB]') 147 | 148 | if path is not None: 149 | if overwrite or not os.path.isfile(path): 150 | plt.savefig(path) 151 | 152 | if show: 153 | plt.show() 154 | 155 | return fig, axi, cax 156 | 157 | 158 | def plot_con_status(streets, buildings, vehicles, show=True, path=None, overwrite=False): 159 | """ Plots the connection status (connected/not conected) in regard to another vehicle""" 160 | 161 | # Plot streets and buildings 162 | fig, axi = plot_streets_and_buildings( 163 | streets, buildings, show=False, dpi=300) 164 | 165 | # Plot vehicles with connection status 166 | axi.scatter(vehicles.get('center')[0], vehicles.get('center')[1], label='Center', 167 | c='black', marker='x', s=2 * plt.rcParams['lines.markersize'] ** 2, zorder=3) 168 | axi.scatter(vehicles.get('in_range')[:, 0], vehicles.get('in_range')[:, 1], 169 | label='In range', marker='o', zorder=2) 170 | axi.scatter(vehicles.get('out_range')[:, 0], vehicles.get('out_range')[:, 1], 171 | label='Out of range', marker='o', alpha=0.75, zorder=1) 172 | 173 | # Add additional information to plot 174 | axi.legend().set_visible(True) 175 | 176 | if path is not None: 177 | if overwrite or not os.path.isfile(path): 178 | fig.savefig(path) 179 | 180 | if show: 181 | fig.show() 182 | 183 | return fig, axi 184 | 185 | 186 | def plot_cluster_max(streets, buildings, vehicles, show=True, path=None, overwrite=False): 187 | """ Plots the biggest cluster and the remaining vehicles""" 188 | 189 | # Plot streets and buildings 190 | fig, axi = plot_streets_and_buildings( 191 | streets, buildings, show=False, dpi=300) 192 | 193 | # Plot vehicles with connection status 194 | axi.scatter(vehicles.get('cluster_max')[:, 0], 195 | vehicles.get('cluster_max')[:, 1], 196 | label='Biggest cluster', marker='o', zorder=2) 197 | axi.scatter(vehicles.get('not_cluster_max')[:, 0], 198 | vehicles.get('not_cluster_max')[:, 1], 199 | label='Other vehicles', marker='o', alpha=0.75, zorder=1) 200 | 201 | # Add additional information to plot 202 | axi.legend().set_visible(True) 203 | 204 | if path is not None: 205 | if overwrite or not os.path.isfile(path): 206 | fig.savefig(path) 207 | 208 | if show: 209 | fig.show() 210 | 211 | return fig, axi 212 | 213 | 214 | def plot_veh_traces_animation(traces, streets, buildings=None, show=True, path=None, overwrite=False): 215 | """Plots an animation of the vehicle traces""" 216 | 217 | def update_line(timestep, traces, line): 218 | """Updates the animation periodically""" 219 | 220 | line.set_data([traces[timestep]['x'], traces[timestep]['y']]) 221 | return line, 222 | 223 | fig, axi = plot_streets_and_buildings( 224 | streets, buildings=buildings, show=False) 225 | line, = axi.plot([], [], linewidth=0, marker='o') 226 | 227 | line_anim = animation.FuncAnimation(fig, update_line, len(traces), fargs=(traces, line), 228 | interval=25, blit=True) 229 | if show: 230 | fig.show() 231 | 232 | if path is not None: 233 | if overwrite or not os.path.isfile(path): 234 | if os.path.splitext(path)[1] == '.mp4': 235 | writer = animation.writers['ffmpeg'] 236 | writer_inst = writer(fps=25, bitrate=1800) 237 | elif os.path.splitext(path)[1] == '.gif': 238 | writer = animation.writers['imagemagick'] 239 | writer_inst = writer(fps=12, bitrate=-1) 240 | else: 241 | raise RuntimeError('File extension not supported') 242 | 243 | line_anim.save(path, writer=writer_inst) 244 | 245 | 246 | def plot_ruler(axi, length=1000, coord=None, linewidth=3, color='#999999'): 247 | """Plots a ruler""" 248 | 249 | if coord is None: 250 | xlim = axi.get_xlim() 251 | ylim = axi.get_ylim() 252 | coord = (xlim[0] + 10, ylim[0] - 50) 253 | 254 | axi.plot([coord[0], coord[0] + length], [coord[1], coord[1]], color=color, linewidth=linewidth) 255 | 256 | axi.text(coord[0] + length / 2, coord[1] + 1, '{:d} m'.format(length), horizontalalignment='center', 257 | verticalalignment='bottom', color=color) 258 | 259 | axi.autoscale() 260 | -------------------------------------------------------------------------------- /vtovosm/utils.py: -------------------------------------------------------------------------------- 1 | """Various functionality that does not fit in any other module.""" 2 | 3 | import datetime 4 | import getpass 5 | import logging 6 | import lzma 7 | import os 8 | import pickle 9 | import smtplib 10 | import socket 11 | import sys 12 | import time 13 | from email.mime.text import MIMEText 14 | 15 | import numpy as np 16 | import scipy.stats as st 17 | 18 | 19 | def string_to_filename(string): 20 | """Returns a cleaned up string that can be used as a filename. 21 | 22 | Parameters 23 | ---------- 24 | string : str 25 | String that will be cleaned up. 26 | 27 | Returns 28 | ------- 29 | filename: str 30 | Cleaned up string 31 | """ 32 | 33 | keepcharacters = ('_', '-') 34 | filename = ''.join(c for c in string if c.isalnum() 35 | or c in keepcharacters).rstrip() 36 | filename = filename.lower() 37 | return filename 38 | 39 | 40 | def seconds_to_string(seconds): 41 | """Converts an amount of seconds to a a string with format "dd:hh:mm:ss". 42 | 43 | Parameters 44 | ---------- 45 | seconds : int or float 46 | Number of seconds that will be converted 47 | 48 | Returns 49 | ------- 50 | string : str 51 | Formatted string 52 | """ 53 | 54 | dtime = datetime.datetime( 55 | 1, 1, 1) + datetime.timedelta(seconds=int(seconds)) 56 | 57 | string = '{:02d}:{:02d}:{:02d}:{:02d}'.format( 58 | dtime.day - 1, 59 | dtime.hour, 60 | dtime.minute, 61 | dtime.second) 62 | 63 | return string 64 | 65 | 66 | def print_nnl(text, file=sys.stdout): 67 | """Print without adding a new line. 68 | 69 | Parameters 70 | ---------- 71 | text : str 72 | Text to be printed 73 | file: optional 74 | file object to which will be printed 75 | """ 76 | 77 | print(text, file=file, end='', flush=True) 78 | 79 | 80 | def debug(time_start=None, text=None): 81 | """Times execution and outputs log messages. 82 | If `time_start` is None, it will be interpreted as a start message and `text` will be logged. 83 | If `time_start` is None, it will be interpreted as start time and `text` will be logged together with the time 84 | difference between now and `time_start`. 85 | 86 | Parameters 87 | ---------- 88 | time_start : float, optional 89 | Start time of the corresponding action. 90 | text : str, optional 91 | Text that will be logged 92 | """ 93 | 94 | if time_start is None: 95 | if text is not None: 96 | logging.info(text) 97 | time_start = time.process_time() 98 | return time_start 99 | else: 100 | time_diff = time.process_time() - time_start 101 | logging.debug('Finished in {:.3f} s'.format(time_diff)) 102 | return time_diff 103 | 104 | 105 | def square_to_condensed(idx_i, idx_j, size_n): 106 | """Converts the squareform indices i and j of the square matrix with with size `size_n` x `size_n` to the 107 | condensed index k. 108 | 109 | Parameters 110 | ---------- 111 | idx_i : int 112 | Row index of the square matrix 113 | idx_j : int 114 | Column index of the square matrix 115 | size_n : 116 | Size of the square matrix 117 | 118 | Returns 119 | ------- 120 | k : int 121 | Index of the condensed vector 122 | 123 | See Also 124 | -------- 125 | scipy.spatial.distance.squareform 126 | """ 127 | 128 | if idx_i == idx_j: 129 | raise ValueError('Diagonal entries are not defined') 130 | if idx_i < idx_j: 131 | idx_i, idx_j = idx_j, idx_i 132 | k = size_n * idx_j - idx_j * (idx_j + 1) / 2 + idx_i - 1 - idx_j 133 | return int(k) 134 | 135 | 136 | def condensed_to_square(index_k, size_n): 137 | """Converts the condensed index k of the condensed vector to the indicies i and j of the square matrix with 138 | size `size_n` x `size_n`. 139 | 140 | Parameters 141 | ---------- 142 | index_k : int 143 | Index of the condensed vector 144 | size_n : int 145 | Size of the square matrix 146 | 147 | Returns 148 | ------- 149 | i : int 150 | Row index of the square matrix 151 | j : int 152 | Column index of the square matrix 153 | 154 | See Also 155 | -------- 156 | scipy.spatial.distance.squareform 157 | """ 158 | 159 | def calc_row_idx(index_k, size_n): 160 | """Determines the row index""" 161 | return int( 162 | np.ceil((1 / 2.) * 163 | (- (-8 * index_k + 4 * size_n ** 2 - 4 * size_n - 7) ** 0.5 164 | + 2 * size_n - 1) - 1)) 165 | 166 | def elem_in_i_rows(index_i, size_n): 167 | """Determines the number of elements in the i-th row""" 168 | return index_i * (size_n - 1 - index_i) + (index_i * (index_i + 1)) / 2 169 | 170 | def calc_col_idx(index_k, index_i, size_n): 171 | """Determines the column index""" 172 | return int(size_n - elem_in_i_rows(index_i + 1, size_n) + index_k) 173 | 174 | i = calc_row_idx(index_k, size_n) 175 | j = calc_col_idx(index_k, i, size_n) 176 | 177 | return i, j 178 | 179 | 180 | def net_connectivity_stats(net_connectivities, confidence=0.95): 181 | """Calculates the means and confidence intervals for network connectivity results. 182 | 183 | Parameters 184 | ---------- 185 | net_connectivities : list of float 186 | Network connectivities 187 | confidence : float, optional 188 | Confidence interval 189 | 190 | Returns 191 | ------- 192 | means : array of float 193 | Means 194 | conf_intervals : array of float 195 | Confidence intervals 196 | 197 | """ 198 | 199 | means = np.mean(net_connectivities, axis=0) 200 | conf_intervals = np.zeros([np.size(means), 2]) 201 | 202 | for index, mean in enumerate(means): 203 | conf_intervals[index] = st.t.interval(confidence, len( 204 | net_connectivities[:, index]) - 1, loc=mean, scale=st.sem(net_connectivities[:, index])) 205 | 206 | return means, conf_intervals 207 | 208 | 209 | def send_mail_finish(recipient=None, time_start=None): 210 | """Sends an email to notify someone about the finished simulation using a local mail server. 211 | 212 | Parameters 213 | ---------- 214 | recipient : str, optional 215 | recipient of the email. If it is `None` than it will be sent to the user executing python. 216 | time_start: float, optional 217 | Time at which the simulation has been started. 218 | """ 219 | 220 | if time_start is None: 221 | msg = MIMEText('The simulation is finished.') 222 | else: 223 | msg = MIMEText('The simulation started at {:.0f} is finished.'.format( 224 | time_start)) 225 | 226 | msg['Subject'] = 'Simulation finished' 227 | msg['From'] = getpass.getuser() + '@' + socket.getfqdn() 228 | if recipient is None: 229 | msg['To'] = msg['From'] 230 | else: 231 | msg['To'] = recipient 232 | 233 | try: 234 | smtp = smtplib.SMTP('localhost') 235 | except ConnectionRefusedError: 236 | logging.error('Connection to mailserver refused') 237 | else: 238 | smtp.send_message(msg) 239 | smtp.quit() 240 | 241 | 242 | def save(obj, file_path, protocol=4, compression_level=1, overwrite=True, create_dir=True): 243 | """Saves an object using gzip compression. 244 | 245 | Parameters 246 | ---------- 247 | obj : 248 | Object that will be saved 249 | file_path : str 250 | Path at which `obj` will be saved 251 | protocol : int, optinal 252 | LZMA protocol 253 | compression_level : int, optional 254 | LZMA compression level 255 | overwrite : bool, optional 256 | When true an already existing file will be overwritten. 257 | create_dir : bool, optional 258 | When true any non existing intermediary directories in `file_path` will be created. 259 | 260 | Notes 261 | ----- 262 | Because of the 4 GiB limit of gzip in Python <= 3.4 we use LZMA as compression even though it is slower. 263 | See: https://bugs.python.org/issue27130 264 | """ 265 | 266 | # Return if file already exists 267 | if not overwrite and os.path.isfile(file_path): 268 | return 269 | 270 | # Create the output directory if it does not exist 271 | if create_dir: 272 | directory = os.path.dirname(file_path) 273 | if not os.path.isdir(directory): 274 | os.makedirs(directory) 275 | 276 | with lzma.open(file_path, 'wb', preset=compression_level) as file: 277 | pickle.dump(obj, file, protocol=protocol) 278 | 279 | 280 | def load(file_path): 281 | """Loads and decompresses a saved object. 282 | 283 | Parameters 284 | ---------- 285 | file_path : str 286 | Path of the compressed file 287 | 288 | Returns 289 | ------- 290 | object 291 | Object that was saved in the file 292 | 293 | Notes 294 | ----- 295 | Because of the 4 GiB limit of gzip in Python <= 3.4 we use LZMA as compression even though it is slower. 296 | See: https://bugs.python.org/issue27130 297 | """ 298 | 299 | with lzma.open(file_path, 'rb') as file: 300 | return pickle.load(file) 301 | 302 | 303 | def compress_file(file_in_path, protocol=4, compression_level=1, delete_uncompressed=True): 304 | """Loads an uncompressed file and saves a compressed copy of it 305 | 306 | Parameters 307 | ---------- 308 | file_in_path : str 309 | Path of the uncompressed file 310 | protocol : int, optinal 311 | Gzip protocol version 312 | compression_level : int, optional 313 | Gzip compression level 314 | delete_uncompressed : bool, optional 315 | When True, the uncompressed file will be deleted after compression 316 | """ 317 | 318 | file_out_path = file_in_path + '.xz' 319 | with open(file_in_path, 'rb') as file_in: 320 | obj = pickle.load(file_in) 321 | save(obj, file_out_path, protocol=protocol, 322 | compression_level=compression_level) 323 | 324 | if delete_uncompressed: 325 | os.remove(file_in_path) 326 | -------------------------------------------------------------------------------- /vtovosm/simulations/result_analysis.py: -------------------------------------------------------------------------------- 1 | """Provides functions to derive further results from simulation results""" 2 | 3 | import logging 4 | import multiprocessing as mp 5 | import os 6 | 7 | import networkx as nx 8 | import numpy as np 9 | 10 | from .. import connection_analysis as con_ana 11 | from .. import geometry as geom_o 12 | from .. import network_parser as nw_p 13 | from .. import osmnx_addons as ox_a 14 | from .. import utils 15 | 16 | 17 | def main(conf_path=None, scenario=None): 18 | """Main result analysis function""" 19 | 20 | # Load the configuration 21 | if conf_path is None: 22 | config = nw_p.params_from_conf() 23 | if scenario is None: 24 | config_scenario = nw_p.params_from_conf(in_key=config['scenario']) 25 | else: 26 | config_scenario = nw_p.params_from_conf(in_key=scenario) 27 | config['scenario'] = scenario 28 | else: 29 | config = nw_p.params_from_conf(config_file=conf_path) 30 | if scenario is None: 31 | config_scenario = nw_p.params_from_conf(in_key=config['scenario'], config_file=conf_path) 32 | else: 33 | config_scenario = nw_p.params_from_conf(in_key=scenario, config_file=conf_path) 34 | config['scenario'] = scenario 35 | 36 | if isinstance(config_scenario, (list, tuple)): 37 | raise RuntimeError('Multiple scenarios not supported. Use appropriate function') 38 | 39 | # Merge the two configurations 40 | config = nw_p.merge(config, config_scenario) 41 | 42 | # Sanitize config 43 | config = nw_p.check_fill_config(config) 44 | densities_veh = config['densities_veh'] 45 | 46 | # Return if there is nothing to analyze 47 | if config['analyze_results'] is None: 48 | return 49 | 50 | loglevel = logging.getLevelName(config['loglevel']) 51 | logger = logging.getLogger() 52 | logger.setLevel(loglevel) 53 | 54 | # Load street network 55 | time_start = utils.debug(None, 'Loading street network') 56 | net = ox_a.load_network(config['place'], 57 | which_result=config['which_result'], 58 | tolerance=config['building_tolerance']) 59 | graph_streets = net['graph_streets'] 60 | utils.debug(time_start) 61 | 62 | # Convert vehicle densities to counts 63 | counts_veh = np.zeros(densities_veh.size, dtype=int) 64 | 65 | if config['density_type'] == 'length': 66 | street_lengths = geom_o.get_street_lengths(graph_streets) 67 | 68 | # Determine total vehicle count 69 | for idx, density_veh in enumerate(densities_veh): 70 | if config['density_type'] == 'absolute': 71 | counts_veh[idx] = int(density_veh) 72 | elif config['density_type'] == 'length': 73 | counts_veh[idx] = int(round(density_veh * np.sum(street_lengths))) 74 | elif config['density_type'] == 'area': 75 | area = net['gdf_boundary'].area 76 | counts_veh[idx] = int(round(density_veh * area)) 77 | else: 78 | raise ValueError('Density type not supported') 79 | 80 | # Determine file paths 81 | filepaths_res = [] 82 | filepaths_ana = [] 83 | filepaths_ana_all = [] 84 | for idx_count_veh, count_veh in enumerate(counts_veh): 85 | 86 | # Determine results path and check if it exists 87 | if config['results_file_prefix'] is not None: 88 | filename_prefix = utils.string_to_filename(config['results_file_prefix']) 89 | elif 'scenario' in config: 90 | filename_prefix = utils.string_to_filename(config['scenario']) 91 | else: 92 | filename_prefix = utils.string_to_filename(config['place']) 93 | 94 | file_name_res = '{}.{:d}.pickle.xz'.format(filename_prefix, count_veh) 95 | file_name_ana = '{}.{:d}_analysis.pickle.xz'.format(filename_prefix, count_veh) 96 | 97 | if config['results_file_dir'] is not None: 98 | file_dir = config['results_file_dir'] 99 | else: 100 | file_dir = 'results' 101 | 102 | filepath_res = os.path.join(file_dir, file_name_res) 103 | filepath_ana = os.path.join(file_dir, file_name_ana) 104 | 105 | result_file_exists = os.path.isfile(filepath_res) 106 | if not result_file_exists: 107 | raise FileNotFoundError('Result file not found') 108 | 109 | analysis_file_exists = os.path.isfile(filepath_ana) 110 | if analysis_file_exists: 111 | if config['overwrite_result']: 112 | logging.warning('Analysis file already exists. Overwriting') 113 | filepaths_res.append(filepath_res) 114 | filepaths_ana.append(filepath_ana) 115 | else: 116 | logging.warning('Analysis file already exists. Skipping analysis') 117 | else: 118 | filepaths_res.append(filepath_res) 119 | filepaths_ana.append(filepath_ana) 120 | 121 | filepaths_ana_all.append(filepath_ana) 122 | 123 | logging.info('Starting analysis of results') 124 | 125 | if config['simulation_mode'] == 'parallel': 126 | multiprocess = True 127 | processes = config['processes'] 128 | elif config['simulation_mode'] == 'sequential': 129 | multiprocess = False 130 | processes = None 131 | else: 132 | raise NotImplementedError('Mode not supported') 133 | 134 | # Iterate all result files 135 | for filepath_res, filepath_ana in zip(filepaths_res, filepaths_ana): 136 | # Analyze results 137 | analyze_single(filepath_res, filepath_ana, config['analyze_results'], multiprocess=multiprocess, 138 | processes=processes) 139 | 140 | logging.info('Merging all analysis results') 141 | analysis_results = {} 142 | for count_veh, filepath_ana in zip(counts_veh, filepaths_ana_all): 143 | analysis_results[count_veh] = utils.load(filepath_ana) 144 | 145 | file_name_ana = '{}_analysis.pickle.xz'.format(filename_prefix) 146 | filepath_ana = os.path.join(file_dir, file_name_ana) 147 | analysis_file_exists = os.path.isfile(filepath_ana) 148 | if analysis_file_exists: 149 | if config['overwrite_result']: 150 | logging.warning('Overwriting combined analysis file') 151 | utils.save(analysis_results, filepath_ana) 152 | else: 153 | logging.warning('Combined analysis file already exists. Not overwriting') 154 | else: 155 | utils.save(analysis_results, filepath_ana) 156 | 157 | return analysis_results 158 | 159 | def load_results(filepath_res, multiprocess=False, processes=None): 160 | "Loads the results file, converts the connection matrices to graphs and returns the connection graphs and vehicles" 161 | 162 | # Load the connection results 163 | logging.info('Loading results file') 164 | results_loaded = utils.load(filepath_res) 165 | matrices_cons = results_loaded['results']['matrices_cons'] 166 | vehs = results_loaded['results']['vehs'] 167 | 168 | # Check if given connection results are not empty 169 | if len(matrices_cons) == 0 or vehs[0].count == 1: 170 | return 171 | 172 | # Transform connection matrices to graphs 173 | if multiprocess: 174 | with mp.Pool(processes=processes) as pool: 175 | graphs_cons = pool.map(nx.from_numpy_matrix, matrices_cons) 176 | else: 177 | graphs_cons = [] 178 | for matrix_cons in matrices_cons: 179 | graphs_cons.append(nx.from_numpy_matrix(matrix_cons)) 180 | 181 | results_processed = {'graphs_cons': graphs_cons, 182 | 'vehs': vehs} 183 | 184 | return results_processed 185 | 186 | def analyze_single(filepath_res, filepath_ana, config_analysis, multiprocess=False, processes=None): 187 | """Runs a single vehicle count analysis of a simulation result. 188 | Can be run in parallel""" 189 | 190 | # Check if analysis to be performed is set 191 | if config_analysis is None: 192 | logging.warning('No analysis requested. Exiting') 193 | return 194 | 195 | all_analysis = ['net_connectivities', 196 | 'path_redundancies_all', 197 | 'link_durations', 198 | 'connection_durations'] 199 | 200 | if config_analysis == ['all'] or config_analysis == 'all': 201 | config_analysis = all_analysis 202 | 203 | if not set(config_analysis).issubset(set(all_analysis)): 204 | raise RuntimeError('Analysis not supported') 205 | 206 | 207 | loaded_results = load_results(filepath_res, multiprocess=multiprocess, processes=processes) 208 | if loaded_results is None: 209 | logging.warning('Nothing to analyze. Exiting') 210 | utils.save(None, filepath_ana) 211 | return 212 | 213 | graphs_cons = loaded_results['graphs_cons'] 214 | vehs = loaded_results['vehs'] 215 | 216 | # Start main analysis 217 | time_start = utils.debug(None, 'Analyzing results') 218 | analysis_result = {} 219 | 220 | # Determine network connectivities 221 | if 'net_connectivities' in config_analysis: 222 | logging.info('Determining network connectivities') 223 | 224 | if multiprocess: 225 | with mp.Pool(processes=processes) as pool: 226 | net_connectivities = pool.map(con_ana.calc_net_connectivity, graphs_cons) 227 | else: 228 | net_connectivities = con_ana.calc_net_connectivities(graphs_cons) 229 | 230 | analysis_result['net_connectivities'] = net_connectivities 231 | 232 | # Determine path redundancies for center vehicle (node disjoint and path disjoint) 233 | if 'path_redundancies_center' in config_analysis: 234 | logging.info('Determining center path redundancies') 235 | 236 | if multiprocess: 237 | with mp.Pool(processes=processes) as pool: 238 | path_redundancies_separate = pool.starmap(con_ana.calc_center_path_redundancy, zip(graphs_cons, vehs)) 239 | path_redundancies = np.concatenate(path_redundancies_separate) 240 | else: 241 | path_redundancies = con_ana.calc_center_path_redundancies(graphs_cons, vehs) 242 | 243 | analysis_result['path_redundancies_center'] = path_redundancies 244 | 245 | # Determine path redundancies for all pairs (only node disjoint) 246 | if 'path_redundancies_all' in config_analysis: 247 | logging.info('Determining all path redundancies') 248 | 249 | if multiprocess: 250 | with mp.Pool(processes=processes) as pool: 251 | path_redundancies = pool.starmap(con_ana.calc_path_redundancies, zip(graphs_cons, vehs)) 252 | else: 253 | path_redundancies = [] 254 | for graph, vehs_current in zip(graphs_cons, vehs): 255 | path_redundancies_current = con_ana.calc_path_redundancies(graph, vehs_current) 256 | path_redundancies.append(path_redundancies_current) 257 | 258 | analysis_result['path_redundancies_all'] = path_redundancies 259 | 260 | # Determine link durations 261 | if 'link_durations' in config_analysis: 262 | logging.info('Determining link durations') 263 | if multiprocess: 264 | link_durations = con_ana.calc_link_durations_multiprocess(graphs_cons, processes=processes) 265 | else: 266 | link_durations = con_ana.calc_link_durations(graphs_cons) 267 | 268 | analysis_result['link_durations'] = link_durations 269 | 270 | # Determine connection durations 271 | if 'connection_durations' in config_analysis: 272 | logging.info('Determining connection durations') 273 | if multiprocess: 274 | connection_durations = con_ana.calc_connection_durations_multiprocess(graphs_cons, processes=processes) 275 | else: 276 | connection_durations = con_ana.calc_connection_durations(graphs_cons) 277 | 278 | analysis_result['connection_durations'] = connection_durations[0] 279 | analysis_result['rehealing_times'] = connection_durations[1] 280 | connection_stats = con_ana.calc_connection_stats( 281 | connection_durations[0], graphs_cons[0].number_of_nodes()) 282 | analysis_result['connection_duration_mean'] = connection_stats[0] 283 | analysis_result['connection_periods_mean'] = connection_stats[1] 284 | 285 | utils.debug(time_start) 286 | 287 | # Save results 288 | utils.save(analysis_result, filepath_ana) 289 | 290 | return analysis_result 291 | -------------------------------------------------------------------------------- /vtovosm/osmnx_addons.py: -------------------------------------------------------------------------------- 1 | """ Additional functions missing in the OSMnx package""" 2 | 3 | import logging 4 | import os 5 | 6 | import geopandas as gpd 7 | import numpy as np 8 | import osmnx as ox 9 | import shapely.geometry as geom 10 | import shapely.ops as ops 11 | 12 | from . import propagation as prop 13 | from . import utils 14 | 15 | 16 | def setup(): 17 | """Sets up OSMnx""" 18 | 19 | logger = logging.getLogger() 20 | ox.config(log_console=False, log_file=os.devnull, log_name=logger.name, use_cache=True) 21 | 22 | 23 | def load_network(place, which_result=1, overwrite=False, tolerance=0): 24 | """Generates streets and buildings""" 25 | 26 | # Generate filenames 27 | file_prefix = 'data/{}'.format(utils.string_to_filename(place)) 28 | filename_data_streets = 'data/{}_streets.pickle.xz'.format( 29 | utils.string_to_filename(place)) 30 | filename_data_boundary = 'data/{}_boundary.pickle.xz'.format( 31 | utils.string_to_filename(place)) 32 | filename_data_wave = 'data/{}_wave.pickle.xz'.format( 33 | utils.string_to_filename(place)) 34 | filename_data_buildings = 'data/{}_buildings.pickle.xz'.format( 35 | utils.string_to_filename(place)) 36 | 37 | # Create the output directory if it does not exist 38 | if not os.path.isdir('data/'): 39 | os.makedirs('data/') 40 | 41 | if not overwrite and \ 42 | os.path.isfile(filename_data_streets) and \ 43 | os.path.isfile(filename_data_buildings) and \ 44 | os.path.isfile(filename_data_boundary): 45 | # Load from file 46 | time_start = utils.debug(None, 'Loading data from disk') 47 | data = load_place(file_prefix, tolerance=tolerance) 48 | else: 49 | # Load from internet 50 | time_start = utils.debug(None, 'Loading data from the internet') 51 | data = download_place(place, which_result=which_result, tolerance=tolerance) 52 | 53 | graph_streets = data['streets'] 54 | gdf_buildings = data['buildings'] 55 | gdf_boundary = data['boundary'] 56 | add_geometry(graph_streets) 57 | 58 | utils.debug(time_start) 59 | 60 | # Generate wave propagation graph: 61 | # Vehicles are placed in a undirected version of the graph because electromagnetic 62 | # waves do not respect driving directions 63 | if not overwrite and os.path.isfile(filename_data_wave): 64 | # Load from file 65 | time_start = utils.debug(None, 'Loading graph for wave propagation') 66 | graph_streets_wave = utils.load(filename_data_wave) 67 | else: 68 | # Generate 69 | time_start = utils.debug(None, 'Generating graph for wave propagation') 70 | graph_streets_wave = graph_streets.to_undirected() 71 | prop.add_edges_if_los(graph_streets_wave, gdf_buildings) 72 | utils.save(graph_streets_wave, filename_data_wave) 73 | 74 | utils.debug(time_start) 75 | 76 | network = {'graph_streets': graph_streets, 77 | 'graph_streets_wave': graph_streets_wave, 78 | 'gdf_buildings': gdf_buildings, 79 | 'gdf_boundary': gdf_boundary} 80 | 81 | return network 82 | 83 | 84 | def download_place(place, network_type='drive', file_prefix=None, which_result=1, project=True, tolerance=0): 85 | """ Downloads streets and buildings for a place, saves the data to disk and returns them """ 86 | 87 | if file_prefix is None: 88 | file_prefix = 'data/{}'.format(utils.string_to_filename(place)) 89 | 90 | if which_result is None: 91 | which_result = which_result_polygon(place) 92 | 93 | # Streets 94 | streets = ox.graph_from_place( 95 | place, network_type=network_type, which_result=which_result) 96 | if project: 97 | streets = ox.project_graph(streets) 98 | filename_streets = '{}_streets.pickle.xz'.format(file_prefix) 99 | utils.save(streets, filename_streets) 100 | 101 | # Boundary and buildings 102 | boundary = ox.gdf_from_place(place, which_result=which_result) 103 | polygon = boundary['geometry'].iloc[0] 104 | buildings = ox.create_buildings_gdf(polygon) 105 | if project: 106 | buildings = ox.project_gdf(buildings) 107 | boundary = ox.project_gdf(boundary) 108 | 109 | # Save buildings 110 | filename_buildings = '{}_buildings.pickle.xz'.format(file_prefix) 111 | utils.save(buildings, filename_buildings) 112 | 113 | # Build and save simplified buildings 114 | if tolerance != 0: 115 | filename_buildings_simpl = '{}_buildings_{:.2f}.pickle.xz'.format(file_prefix, tolerance) 116 | buildings = simplify_buildings(buildings) 117 | utils.save(buildings, filename_buildings_simpl) 118 | 119 | # Save boundary 120 | filename_boundary = '{}_boundary.pickle.xz'.format(file_prefix) 121 | utils.save(boundary, filename_boundary) 122 | 123 | # Return data 124 | data = {'streets': streets, 'buildings': buildings, 'boundary': boundary} 125 | return data 126 | 127 | 128 | def load_place(file_prefix, tolerance=0): 129 | """ Loads previously downloaded street and building data of a place""" 130 | 131 | filename_buildings = '{}_buildings.pickle.xz'.format(file_prefix) 132 | 133 | if tolerance == 0: 134 | buildings = utils.load(filename_buildings) 135 | else: 136 | filename_buildings_simpl = '{}_buildings_{:.2f}.pickle.xz'.format(file_prefix, tolerance) 137 | if os.path.isfile(filename_buildings_simpl): 138 | buildings = utils.load(filename_buildings_simpl) 139 | else: 140 | buildings_compl = utils.load(filename_buildings) 141 | buildings = simplify_buildings(buildings_compl) 142 | utils.save(buildings, filename_buildings_simpl) 143 | 144 | filename_streets = '{}_streets.pickle.xz'.format(file_prefix) 145 | streets = utils.load(filename_streets) 146 | 147 | filename_boundary = '{}_boundary.pickle.xz'.format(file_prefix) 148 | boundary = utils.load(filename_boundary) 149 | 150 | place = {'streets': streets, 'buildings': buildings, 'boundary': boundary} 151 | return place 152 | 153 | 154 | def add_geometry(streets): 155 | """ Adds geometry object to the edges of the graph where they are missing""" 156 | for u_node, v_node, data in streets.edges(data=True): 157 | if 'geometry' not in data: 158 | coord_x1 = streets.node[u_node]['x'] 159 | coord_y1 = streets.node[u_node]['y'] 160 | coord_x2 = streets.node[v_node]['x'] 161 | coord_y2 = streets.node[v_node]['y'] 162 | data['geometry'] = geom.LineString( 163 | [(coord_x1, coord_y1), (coord_x2, coord_y2)]) 164 | 165 | 166 | def check_geometry(streets): 167 | """ Checks if all edges of the graph have a geometry object""" 168 | 169 | complete = True 170 | for _, _, data in streets.edges(data=True): 171 | if 'geometry' not in data: 172 | complete = False 173 | break 174 | 175 | return complete 176 | 177 | 178 | def which_result_polygon(query, limit=5): 179 | """Determines the first which_result value that returns a polygon from the nominatim API""" 180 | 181 | response = ox.osm_polygon_download(query, limit=limit, polygon_geojson=1) 182 | for index, result in enumerate(response): 183 | if result['geojson']['type'] == 'Polygon': 184 | return index + 1 185 | return None 186 | 187 | 188 | def simplify_buildings(gdf_buildings, tolerance=1, merge_by_fill=True): 189 | """Simplifies the building polygons by reducing the number of edges. 190 | Notes: The resulting deviation can be larger than tolerance, because both merging and simplifying use tolerance.""" 191 | 192 | geoms_list = gdf_buildings.geometry.tolist() 193 | geoms_list_comb = [] 194 | 195 | # Merge polygons if they are near each other 196 | for idx1 in range(len(geoms_list)): 197 | geom1 = geoms_list[idx1] 198 | 199 | if geom1 is None: 200 | continue 201 | elif not isinstance(geom1, geom.Polygon): 202 | geoms_list_comb.append(geom1) 203 | continue 204 | 205 | # Because of previous merges we need to check from the beginning and not from idx+1 206 | for idx2 in range(len(geoms_list)): 207 | geom2 = geoms_list[idx2] 208 | 209 | if idx1 == idx2: 210 | continue 211 | elif geom2 is None: 212 | continue 213 | elif not isinstance(geom2, geom.Polygon): 214 | continue 215 | 216 | dist = geom1.distance(geom2) 217 | 218 | if dist > tolerance: 219 | continue 220 | 221 | if merge_by_fill: 222 | geom_union = merge_polygons_by_fill(geom1, geom2) 223 | else: 224 | geom_union = merge_polygons_by_buffer(geom1, geom2) 225 | 226 | # If the union is 2 separate polygons we keep them otherwise we save the union 227 | if not isinstance(geom_union, geom.MultiPolygon): 228 | geom1 = geom_union 229 | geoms_list[idx2] = None 230 | 231 | geoms_list[idx1] = geom1 232 | geoms_list_comb.append(geom1) 233 | 234 | # Remove interiors of polygons 235 | geoms_list_ext = remove_interior_polygons(geoms_list_comb) 236 | 237 | # Simplify polygons 238 | geoms_list_simpl = simplify_polygons(geoms_list_ext, tolerance=tolerance) 239 | 240 | # Build a new GDF 241 | buildings = {} 242 | for idx, geometry in enumerate(geoms_list_simpl): 243 | building = {'id': idx, 244 | 'geometry': geometry} 245 | buildings[idx] = building 246 | 247 | gdf_buildings_opt = gpd.GeoDataFrame(buildings).T 248 | 249 | return gdf_buildings_opt 250 | 251 | 252 | def simplify_polygons(polygons_list, tolerance=1): 253 | """Simplifies a list of polygons""" 254 | 255 | polygons_list_simpl = [] 256 | for geometry in polygons_list: 257 | if not isinstance(geometry, geom.Polygon): 258 | polygons_list_simpl.append(geometry) 259 | continue 260 | 261 | geometry_simpl = geometry.simplify(tolerance, preserve_topology=False) 262 | 263 | if isinstance(geometry_simpl, geom.MultiPolygon): 264 | for poly in geometry_simpl: 265 | if not poly.is_empty: 266 | polygons_list_simpl.append(poly) 267 | else: 268 | if not geometry_simpl.is_empty: 269 | polygons_list_simpl.append(geometry_simpl) 270 | else: 271 | polygons_list_simpl.append(geometry) 272 | 273 | return polygons_list_simpl 274 | 275 | 276 | def remove_interior_polygons(polygons_list): 277 | """Removes all interiors of a list of polygons""" 278 | 279 | polygons_list_exterior = [] 280 | for geometry in polygons_list: 281 | if not isinstance(geometry, geom.Polygon): 282 | polygons_list_exterior.append(geometry) 283 | else: 284 | poly_simp = geom.Polygon(geometry.exterior) 285 | polygons_list_exterior.append(poly_simp) 286 | 287 | return polygons_list_exterior 288 | 289 | 290 | def merge_polygons_by_fill(polygon1, polygon2): 291 | """Merges 2 polygons by searching for the 2 nearest nodes on each and constructing a square to fill the gap 292 | region""" 293 | 294 | if polygon1.intersects(polygon2): 295 | geom_union = ops.unary_union([polygon1, polygon2]) 296 | return geom_union 297 | 298 | coords1 = np.array(polygon1.exterior.coords.xy) 299 | coords2 = np.array(polygon2.exterior.coords.xy) 300 | points1 = [geom.Point(coord) for coord in coords1.T][:-1] 301 | points2 = [geom.Point(coord) for coord in coords2.T][:-1] 302 | 303 | # Find pair of closest edges 304 | min_dist_1 = np.inf 305 | min_idx1_1 = None 306 | min_idx2_1 = None 307 | for idx1, point1 in enumerate(points1): 308 | for idx2, point2 in enumerate(points2): 309 | cur_dist = point1.distance(point2) 310 | if cur_dist < min_dist_1: 311 | min_dist_1 = cur_dist 312 | min_idx1_1 = idx1 313 | min_idx2_1 = idx2 314 | 315 | # Find pair of 2nd closest edges 316 | min_dist_2 = np.inf 317 | min_idx1_2 = None 318 | min_idx2_2 = None 319 | for idx1, point1 in enumerate(points1): 320 | if (idx1 == min_idx1_1) or point1.almost_equals(points1[min_idx1_1]): 321 | continue 322 | for idx2, point2 in enumerate(points2): 323 | if (idx2 == min_idx2_1) or point2.almost_equals(points2[min_idx2_1]): 324 | continue 325 | cur_dist = point1.distance(point2) 326 | if cur_dist < min_dist_2: 327 | min_dist_2 = cur_dist 328 | min_idx1_2 = idx1 329 | min_idx2_2 = idx2 330 | 331 | # Generate fill square 332 | points_fill = [points1[min_idx1_1], points2[min_idx2_1], points2[min_idx2_2], points1[min_idx1_2]] 333 | points_fill_idxs = [(0,1,2,3), (1,0,2,3), (0,2,1,3)] 334 | 335 | poly_fill = None 336 | for idxs in points_fill_idxs: 337 | points_fill_iter = [points_fill[idx] for idx in idxs] 338 | coords_fill = [point.coords[:][0] for point in points_fill_iter] 339 | poly_fill = geom.Polygon(coords_fill) 340 | if poly_fill.is_valid: 341 | break 342 | 343 | # Build union of 3 polygons 344 | geom_union = ops.unary_union([polygon1, poly_fill, polygon2]).simplify(0) 345 | return geom_union 346 | 347 | 348 | def merge_polygons_by_buffer(polygon1, polygon2): 349 | """Merges 2 polygons by creating a buffer around them so they intersect and appliend a negative buffer after the 350 | merge""" 351 | 352 | dist = polygon1.distance(polygon2) 353 | if polygon1.intersects(polygon2): 354 | geom_union = ops.unary_union([polygon1, polygon2]) 355 | return geom_union 356 | 357 | # Setting the buffer to dist/2 does not guarantee that the 2 polygons will intersect and resulting in 358 | # a single polygon. Therefore we need the check at the end of the inner loop. 359 | buffer = dist / 2 360 | geom1_buf = polygon1.buffer(buffer, resolution=1) 361 | geom2_buf = polygon2.buffer(buffer, resolution=1) 362 | 363 | if not geom1_buf.intersects(geom2_buf): 364 | geom_union = geom.MultiPolygon(polygon1, polygon2) 365 | else: 366 | geom_union = ops.unary_union([geom1_buf, geom2_buf]).buffer(-buffer, resolution=1) 367 | 368 | return geom_union 369 | -------------------------------------------------------------------------------- /vtovosm/simulations/main.py: -------------------------------------------------------------------------------- 1 | """ Generates streets, buildings and vehicles from OpenStreetMap data with osmnx""" 2 | 3 | import logging 4 | import multiprocessing as mp 5 | import os 6 | import signal 7 | import time 8 | from itertools import repeat 9 | from optparse import OptionParser 10 | 11 | import numpy as np 12 | from scipy.special import comb 13 | 14 | from . import result_analysis 15 | from .. import connection_analysis as con_ana 16 | from .. import demo 17 | from .. import geometry as geom_o 18 | from .. import network_parser as nw_p 19 | from .. import osmnx_addons as ox_a 20 | from .. import plot 21 | from .. import sumo 22 | from .. import utils 23 | from .. import vehicles 24 | 25 | # Global variables 26 | rte_count_con_checkpoint = 0 27 | rte_count_con_total = 0 28 | rte_time_start = 0 29 | rte_time_checkpoint = 0 30 | 31 | 32 | def parse_cmd_args(): 33 | """Parses command line options""" 34 | 35 | parser = OptionParser() 36 | parser.add_option('-c', '--conf-file', dest='conf_path', default=None, 37 | help='Load configuration from json FILE', 38 | metavar='FILE') 39 | parser.add_option('-s', '--scenario', dest="scenario", default=None, 40 | help='Use SCENARIO instead of the one defined in the configuration file', 41 | metavar='SCENARIO') 42 | parser.add_option('-m', '--multi', action="store_true", dest="multi", default=False, 43 | help="Simulate all scenarios defined in the configuration file") 44 | 45 | (options, args) = parser.parse_args() 46 | 47 | return options, args 48 | 49 | 50 | def signal_handler(sig, frame): 51 | """Outputs simulation progress on SIGINFO""" 52 | 53 | if sig == signal.SIGTSTP: 54 | log_progress(rte_count_con_checkpoint, rte_count_con_total, 55 | rte_time_checkpoint, rte_time_start) 56 | 57 | 58 | def log_progress(c_checkpoint, c_end, t_checkpoint, t_start): 59 | """Estimates and logs the progress of the currently running simulation""" 60 | 61 | if c_checkpoint == 0: 62 | logging.info('No simulation progress and remaining time estimation possible') 63 | return 64 | 65 | t_now = time.time() - t_start 66 | c_now = c_checkpoint * t_now / t_checkpoint 67 | progress_now = min([c_now / c_end, 1]) 68 | t_end = t_now * c_end / c_now 69 | t_todo = max([t_end - t_now, 0]) 70 | logging.info( 71 | '{:.0f}% total simulation progress, '.format(progress_now * 100) + 72 | '{} remaining simulation time'.format(utils.seconds_to_string(t_todo))) 73 | 74 | 75 | def sim_single_sumo(snapshot, 76 | graph_streets, 77 | gdf_buildings, 78 | max_metric, 79 | metric='distance', 80 | graph_streets_wave=None): 81 | """Runs a single snapshot analysis of a SUMO simulation result. 82 | Can be run in parallel""" 83 | 84 | # TODO: too much distance between SUMO vehicle positions and 85 | # OSMnx streets? 86 | # Generate vehicles from SUMO traces snapshot 87 | vehs = sumo.vehicles_from_traces( 88 | graph_streets, snapshot) 89 | 90 | # Generate connection matrix 91 | matrix_cons = con_ana.gen_connection_matrix( 92 | vehs, 93 | gdf_buildings, 94 | max_metric, 95 | metric=metric, 96 | graph_streets_wave=graph_streets_wave) 97 | 98 | return matrix_cons, vehs 99 | 100 | 101 | def sim_single_uniform(random_seed, 102 | count_veh, 103 | graph_streets, 104 | gdf_buildings, 105 | max_metric, 106 | metric='distance', 107 | graph_streets_wave=None): 108 | """Runs a single iteration of a simulation with uniform vehicle distribution. 109 | Can be run in parallel""" 110 | 111 | # Seed random number generator 112 | np.random.seed(random_seed) 113 | 114 | # Choose street indexes 115 | street_lengths = geom_o.get_street_lengths(graph_streets) 116 | rand_street_idxs = vehicles.choose_random_streets( 117 | street_lengths, count_veh) 118 | 119 | # Vehicle generation 120 | vehs = vehicles.generate_vehs(graph_streets, street_idxs=rand_street_idxs) 121 | 122 | # Generate connection matrix 123 | matrix_cons = con_ana.gen_connection_matrix( 124 | vehs, 125 | gdf_buildings, 126 | max_metric, 127 | metric=metric, 128 | graph_streets_wave=graph_streets_wave) 129 | 130 | return matrix_cons, vehs 131 | 132 | 133 | def main_multi_scenario(conf_path=None, scenarios=None): 134 | """Simulates multiple scenarios""" 135 | 136 | # Load the configuration 137 | if scenarios is None: 138 | if conf_path is None: 139 | scenarios = nw_p.get_scenarios_list() 140 | else: 141 | scenarios = nw_p.get_scenarios_list(conf_path) 142 | 143 | if not isinstance(scenarios, (list, tuple)): 144 | raise RuntimeError('Single scenario not supported. Use appropriate function') 145 | 146 | # Iterate scenarios 147 | for scenario in scenarios: 148 | main(conf_path=conf_path, scenario=scenario) 149 | 150 | 151 | def main(conf_path=None, scenario=None): 152 | """Main simulation function""" 153 | 154 | # TODO: why is global keyword needed? 155 | global rte_count_con_checkpoint 156 | global rte_count_con_total 157 | global rte_time_start 158 | global rte_time_checkpoint 159 | 160 | # Load the configuration 161 | if conf_path is None: 162 | config = nw_p.params_from_conf() 163 | if scenario is None: 164 | config_scenario = nw_p.params_from_conf(in_key=config['scenario']) 165 | else: 166 | config_scenario = nw_p.params_from_conf(in_key=scenario) 167 | config['scenario'] = scenario 168 | else: 169 | config = nw_p.params_from_conf(config_file=conf_path) 170 | if scenario is None: 171 | config_scenario = nw_p.params_from_conf(in_key=config['scenario'], config_file=conf_path) 172 | else: 173 | config_scenario = nw_p.params_from_conf(in_key=scenario, config_file=conf_path) 174 | config['scenario'] = scenario 175 | 176 | if isinstance(config_scenario, (list, tuple)): 177 | raise RuntimeError('Multiple scenarios not supported. Use appropriate function') 178 | 179 | # Merge the two configurations 180 | config = nw_p.merge(config, config_scenario) 181 | 182 | # Sanitize config 183 | config = nw_p.check_fill_config(config) 184 | densities_veh = config['densities_veh'] 185 | 186 | loglevel = logging.getLevelName(config['loglevel']) 187 | logger = logging.getLogger() 188 | logger.setLevel(loglevel) 189 | 190 | # Setup OSMnx 191 | # We are logging to dev/null as a workaround to get nice log output and so that specified levels are respected 192 | ox_a.setup() 193 | 194 | # Load street network 195 | time_start = utils.debug(None, 'Loading street network') 196 | net = ox_a.load_network(config['place'], 197 | which_result=config['which_result'], 198 | tolerance=config['building_tolerance']) 199 | graph_streets = net['graph_streets'] 200 | utils.debug(time_start) 201 | 202 | # Convert vehicle densities to counts 203 | counts_veh = np.zeros(densities_veh.size, dtype=int) 204 | 205 | if config['density_type'] == 'length': 206 | street_lengths = geom_o.get_street_lengths(graph_streets) 207 | 208 | # Determine total vehicle count 209 | for idx, density_veh in enumerate(densities_veh): 210 | if config['density_type'] == 'absolute': 211 | counts_veh[idx] = int(density_veh) 212 | elif config['density_type'] == 'length': 213 | counts_veh[idx] = int(round(density_veh * np.sum(street_lengths))) 214 | elif config['density_type'] == 'area': 215 | area = net['gdf_boundary'].area 216 | counts_veh[idx] = int(round(density_veh * area)) 217 | else: 218 | raise ValueError('Density type not supported') 219 | 220 | # Run time estimation 221 | if config['simulation_mode'] == 'demo': 222 | time_steps = 1 223 | elif config['distribution_veh'] == 'SUMO': 224 | time_steps = config['sumo']['sim_duration'] - \ 225 | config['sumo']['warmup_duration'] 226 | elif config['distribution_veh'] == 'uniform': 227 | time_steps = config['iterations'] 228 | 229 | rte_counts_con = comb(counts_veh, 2) * time_steps 230 | rte_count_con_total = np.sum(rte_counts_con) 231 | rte_time_start = time.time() 232 | rte_count_con_checkpoint = 0 233 | 234 | # Save start time 235 | time_start_total = time.time() 236 | 237 | # Iterate densities 238 | for idx_count_veh, count_veh in enumerate(counts_veh): 239 | 240 | # Determine results path and check if it exists 241 | if config['results_file_prefix'] is not None: 242 | filename_prefix = utils.string_to_filename(config['results_file_prefix']) 243 | elif 'scenario' in config: 244 | filename_prefix = utils.string_to_filename(config['scenario']) 245 | else: 246 | filename_prefix = utils.string_to_filename(config['place']) 247 | 248 | file_name = '{}.{:d}.pickle.xz'.format(filename_prefix, count_veh) 249 | 250 | if config['results_file_dir'] is not None: 251 | file_dir = config['results_file_dir'] 252 | else: 253 | file_dir = 'results' 254 | 255 | filepath_res = os.path.join(file_dir, file_name) 256 | 257 | result_file_exists = os.path.isfile(filepath_res) 258 | if result_file_exists: 259 | if config['overwrite_result']: 260 | logging.warning('Results file already exists. Overwriting') 261 | else: 262 | logging.warning('Results file already exists. Skipping simulation') 263 | continue 264 | 265 | time_start_iter = time.time() 266 | logging.info('Simulating {:d} vehicles'.format(count_veh)) 267 | 268 | if config['distribution_veh'] == 'SUMO': 269 | if not config['sumo']['skip_sumo']: 270 | # Run SUMO interface functions 271 | time_start = utils.debug(None, 'Running SUMO interface') 272 | veh_traces = sumo.simple_wrapper( 273 | config['place'], 274 | which_result=config['which_result'], 275 | count_veh=count_veh, 276 | duration=config['sumo']['sim_duration'], 277 | warmup_duration=config['sumo']['warmup_duration'], 278 | max_speed=config['sumo']['max_speed'], 279 | tls_settings=config['sumo']['tls_settings'], 280 | fringe_factor=config['sumo']['fringe_factor'], 281 | intermediate_points=config['sumo']['intermediate_points'], 282 | coordinate_tls=config['sumo']['coordinate_tls'], 283 | directory=config['sumo']['directory'], 284 | veh_rate_factor=config['sumo']['veh_rate_factor']) 285 | utils.debug(time_start) 286 | else: 287 | # Load vehicle traces 288 | time_start = utils.debug(None, 'Loading vehicle traces') 289 | veh_traces = sumo.load_veh_traces( 290 | config['place'], 291 | file_suffix=str(count_veh), 292 | directory=config['sumo']['directory'], 293 | delete_first_n=config['sumo']['warmup_duration'], 294 | count_veh=count_veh) 295 | utils.debug(time_start) 296 | 297 | if config['sumo']['abort_after_sumo']: 298 | logger.warning('Aborting after SUMO completed') 299 | continue 300 | 301 | # Determine connected vehicles 302 | if config['simulation_mode'] == 'parallel': 303 | if config['distribution_veh'] == 'SUMO': 304 | if config['connection_metric'] == 'distance': 305 | sim_param_list = \ 306 | zip(veh_traces, 307 | repeat(net['graph_streets']), 308 | repeat(net['gdf_buildings']), 309 | repeat(config['max_connection_metric']), 310 | repeat(config['connection_metric'])) 311 | elif config['connection_metric'] == 'pathloss': 312 | sim_param_list = \ 313 | zip(veh_traces, 314 | repeat(net['graph_streets']), 315 | repeat(net['gdf_buildings']), 316 | repeat(config['max_connection_metric']), 317 | repeat(config['connection_metric']), 318 | repeat(net['graph_streets_wave'])) 319 | else: 320 | raise NotImplementedError( 321 | 'Connection metric not supported') 322 | with mp.Pool(processes=config['processes']) as pool: 323 | mp_res = pool.starmap( 324 | sim_single_sumo, 325 | sim_param_list 326 | ) 327 | 328 | elif config['distribution_veh'] == 'uniform': 329 | random_seeds = np.arange(config['iterations']) 330 | 331 | if config['connection_metric'] == 'distance': 332 | sim_param_list = \ 333 | zip(random_seeds, 334 | repeat(count_veh), 335 | repeat(net['graph_streets']), 336 | repeat(net['gdf_buildings']), 337 | repeat(config['max_connection_metric']), 338 | repeat(config['connection_metric'])) 339 | elif config['connection_metric'] == 'pathloss': 340 | sim_param_list = \ 341 | zip(random_seeds, 342 | repeat(count_veh), 343 | repeat(net['graph_streets']), 344 | repeat(net['gdf_buildings']), 345 | repeat(config['max_connection_metric']), 346 | repeat(config['connection_metric']), 347 | repeat(net['graph_streets_wave'])) 348 | else: 349 | raise NotImplementedError( 350 | 'Connection metric not supported') 351 | with mp.Pool(processes=config['processes']) as pool: 352 | mp_res = pool.starmap( 353 | sim_single_uniform, 354 | sim_param_list) 355 | 356 | else: 357 | raise NotImplementedError( 358 | 'Vehicle distribution type not supported') 359 | 360 | # Check result 361 | if len(mp_res) == 0: 362 | matrices_cons, vehs = [], [] 363 | else: 364 | matrices_cons, vehs = list(zip(*mp_res)) 365 | 366 | # Define which variables to save in a file 367 | results = {'matrices_cons': matrices_cons, 'vehs': vehs} 368 | 369 | elif config['simulation_mode'] == 'sequential': 370 | if config['distribution_veh'] == 'SUMO': 371 | matrices_cons = np.zeros(veh_traces.size, dtype=object) 372 | vehs = np.zeros(veh_traces.size, dtype=object) 373 | for idx, snapshot in enumerate(veh_traces): 374 | time_start = utils.debug( 375 | None, 'Analyzing snapshot {:d}'.format(idx)) 376 | 377 | if config['connection_metric'] == 'distance': 378 | matrix_cons_snapshot, vehs_snapshot = \ 379 | sim_single_sumo( 380 | snapshot, 381 | net['graph_streets'], 382 | net['gdf_buildings'], 383 | max_metric=config['max_connection_metric'], 384 | metric=config['connection_metric']) 385 | elif config['connection_metric'] == 'pathloss': 386 | matrix_cons_snapshot, vehs_snapshot = \ 387 | sim_single_sumo( 388 | snapshot, 389 | net['graph_streets'], 390 | net['gdf_buildings'], 391 | max_metric=config['max_connection_metric'], 392 | metric=config['connection_metric'], 393 | graph_streets_wave=net['graph_streets_wave']) 394 | else: 395 | raise NotImplementedError( 396 | 'Connection metric not supported') 397 | 398 | matrices_cons[idx] = matrix_cons_snapshot 399 | vehs[idx] = vehs_snapshot 400 | utils.debug(time_start) 401 | elif config['distribution_veh'] == 'uniform': 402 | matrices_cons = np.zeros(config['iterations'], dtype=object) 403 | vehs = np.zeros(config['iterations'], dtype=object) 404 | for iteration in np.arange(config['iterations']): 405 | time_start = utils.debug( 406 | None, 'Analyzing iteration {:d}'.format(iteration)) 407 | 408 | if config['connection_metric'] == 'distance': 409 | matrix_cons_snapshot, vehs_snapshot = \ 410 | sim_single_uniform( 411 | iteration, 412 | count_veh, 413 | net['graph_streets'], 414 | net['gdf_buildings'], 415 | max_metric=config['max_connection_metric'], 416 | metric=config['connection_metric']) 417 | elif config['connection_metric'] == 'pathloss': 418 | matrix_cons_snapshot, vehs_snapshot = \ 419 | sim_single_uniform( 420 | iteration, 421 | count_veh, 422 | net['graph_streets'], 423 | net['gdf_buildings'], 424 | max_metric=config['max_connection_metric'], 425 | metric=config['connection_metric'], 426 | graph_streets_wave=net['graph_streets_wave']) 427 | else: 428 | raise NotImplementedError( 429 | 'Connection metric not supported') 430 | 431 | matrices_cons[iteration] = matrix_cons_snapshot 432 | vehs[iteration] = vehs_snapshot 433 | utils.debug(time_start) 434 | else: 435 | raise NotImplementedError( 436 | 'Vehicle distribution type not supported') 437 | 438 | # Define which variables to save in a file 439 | results = {'matrices_cons': matrices_cons, 'vehs': vehs} 440 | 441 | elif config['simulation_mode'] == 'demo': 442 | vehicles.place_vehicles_in_network(net, 443 | density_veh=config['densities_veh'], 444 | density_type=config['density_type']) 445 | demo.simulate(net, max_pl=config['max_connection_metric']) 446 | 447 | # Define which variables to save in a file 448 | results = {'vehs': net['vehs']} 449 | 450 | else: 451 | raise NotImplementedError('Simulation mode not supported') 452 | 453 | # Progress report 454 | rte_time_checkpoint = time.time() - rte_time_start 455 | rte_count_con_checkpoint += rte_counts_con[idx_count_veh] 456 | log_progress(rte_count_con_checkpoint, rte_count_con_total, 457 | rte_time_checkpoint, rte_time_start) 458 | 459 | # Save in and outputs 460 | config_save = config.copy() 461 | config_save['count_veh'] = count_veh 462 | 463 | time_finish_iter = time.time() 464 | info_vars = {'time_start': time_start_iter, 465 | 'time_finish': time_finish_iter} 466 | save_vars = {'config': config_save, 467 | 'results': results, 468 | 'info': info_vars} 469 | 470 | utils.save(save_vars, filepath_res) 471 | 472 | time_finish_total = time.time() 473 | runtime_total = time_finish_total - time_start_total 474 | logging.info('Total simulation runtime: {}'.format(utils.seconds_to_string(runtime_total))) 475 | 476 | # TODO: runtime estimation should also include result analysis! 477 | # Analyze simulation results 478 | if config['analyze_results'] is not None: 479 | if config['distribution_veh'] == 'SUMO' and config['sumo']['abort_after_sumo']: 480 | logging.warning('Not running result analysis because simulation was skipped') 481 | else: 482 | result_analysis.main(conf_path, scenario) 483 | 484 | # Send mail 485 | if config['send_mail']: 486 | utils.send_mail_finish(config['mail_to'], time_start=time_start_total) 487 | 488 | if config['save_plot']: 489 | if config['plot_dir'] is None: 490 | plot_dir = 'images' 491 | else: 492 | plot_dir = config['plot_dir'] 493 | 494 | if not os.path.isdir(plot_dir): 495 | os.makedirs(plot_dir) 496 | 497 | plot.setup() 498 | time_start = utils.debug(None, 'Plotting') 499 | 500 | if config['simulation_mode'] == 'demo': 501 | # Plot the vehicles 502 | path = os.path.join(plot_dir, 'vehicles.pdf') 503 | plot.plot_vehs(net['graph_streets'], net['gdf_buildings'], net['vehs'], 504 | show=False, path=path, overwrite=config['overwrite_result']) 505 | 506 | # Plot propagation conditions 507 | path = os.path.join(plot_dir, 'prop_cond.pdf') 508 | plot.plot_prop_cond(net['graph_streets'], net['gdf_buildings'], 509 | net['vehs'], show=False, path=path, overwrite=config['overwrite_result']) 510 | 511 | # Plot pathloss 512 | path = os.path.join(plot_dir, 'pathloss.pdf') 513 | plot.plot_pathloss(net['graph_streets'], net['gdf_buildings'], 514 | net['vehs'], show=False, path=path, overwrite=config['overwrite_result']) 515 | 516 | # Plot connection status 517 | path = os.path.join(plot_dir, 'con_status.pdf') 518 | plot.plot_con_status(net['graph_streets'], net['gdf_buildings'], 519 | net['vehs'], show=False, path=path, overwrite=config['overwrite_result']) 520 | elif config['distribution_veh'] == 'SUMO': 521 | 522 | if len(counts_veh) > 1: 523 | logging.warning('Multiple vehicle counts simulated, but will only generate plot for last one') 524 | 525 | # Plot animation of vehicle traces 526 | path = os.path.join(plot_dir, 'veh_traces.mp4') 527 | plot.plot_veh_traces_animation( 528 | veh_traces, net['graph_streets'], net['gdf_buildings'], show=False, path=path, 529 | overwrite=config['overwrite_result']) 530 | 531 | # Plot vehicle positions at the end of simulation time 532 | vehs_snapshot = veh_traces[-1] 533 | vehs = sumo.vehicles_from_traces(net['graph_streets'], vehs_snapshot) 534 | path = os.path.join(plot_dir, 'vehs_snapshot_end.pdf') 535 | plot.plot_vehs(net['graph_streets'], net['gdf_buildings'], vehs, show=False, path=path, 536 | overwrite=config['overwrite_result']) 537 | 538 | utils.debug(time_start) 539 | 540 | 541 | if __name__ == '__main__': 542 | # Parse command line options 543 | (options, _) = parse_cmd_args() 544 | 545 | # Register signal handler 546 | signal.signal(signal.SIGTSTP, signal_handler) 547 | 548 | # Run main simulation 549 | if options.multi: 550 | main_multi_scenario(conf_path=options.conf_path, scenarios=options.scenario) 551 | else: 552 | main(conf_path=options.conf_path, scenario=options.scenario) 553 | -------------------------------------------------------------------------------- /vtovosm/sumo.py: -------------------------------------------------------------------------------- 1 | """Interface to SUMO – Simulation of Urban MObility, sumo.dlr.de""" 2 | 3 | import logging 4 | import os 5 | import subprocess as sproc 6 | import sys 7 | import xml.etree.cElementTree as ET 8 | 9 | import numpy as np 10 | import osmnx as ox 11 | import shapely.geometry as geom 12 | 13 | from . import osm_xml 14 | from . import osmnx_addons as ox_a 15 | from . import utils 16 | from . import vehicles 17 | 18 | 19 | def simple_wrapper(place, 20 | which_result=1, 21 | count_veh=None, 22 | duration=3600, 23 | warmup_duration=0, 24 | max_speed=None, 25 | tls_settings=None, 26 | fringe_factor=None, 27 | intermediate_points=None, 28 | start_veh_simult=True, 29 | coordinate_tls=True, 30 | directory='sumo_data/', 31 | skip_if_exists=True, 32 | veh_class='passenger', 33 | veh_rate_factor=None): 34 | """Generates and downloads all necessary files, runs a generic SUMO simulation 35 | and returns the vehicle traces.""" 36 | 37 | filename_place = utils.string_to_filename(place) 38 | if count_veh is not None: 39 | filename_place_count = filename_place + '.' + str(count_veh) 40 | else: 41 | filename_place_count = filename_place 42 | path_network_sumo = os.path.join(directory, filename_place + '.net.xml') 43 | filename_network_osm = filename_place + '_city.osm.xml' 44 | path_network_osm = os.path.join( 45 | directory, filename_network_osm) 46 | path_trips = os.path.join( 47 | directory, filename_place_count + '.' + veh_class + '.trips.xml') 48 | path_tls = os.path.join( 49 | directory, filename_place + '.' + veh_class + '.tls.xml') 50 | path_cfg = os.path.join(directory, filename_place_count + '.sumocfg') 51 | path_traces = os.path.join(directory, filename_place_count + '.traces.xml') 52 | 53 | # Create the output directory if it does not exist 54 | if not os.path.isdir(directory): 55 | os.makedirs(directory) 56 | 57 | if not (skip_if_exists and os.path.isfile(path_network_osm)): 58 | logging.info('Downloading street network from OpenStreetMap') 59 | 60 | if which_result is None: 61 | which_result = ox_a.which_result_polygon(place) 62 | 63 | download_streets_from_name( 64 | place, which_result=which_result, prefix=filename_place, directory=directory) 65 | 66 | else: 67 | logging.info('Skipping street network download from OpenStreetMap') 68 | 69 | if not (skip_if_exists and os.path.isfile(path_network_sumo)): 70 | logging.info('Generating SUMO street network') 71 | build_network(filename_network_osm, veh_class=veh_class, 72 | prefix=filename_place, tls_settings=tls_settings, directory=directory) 73 | else: 74 | logging.info('Skipping SUMO street network generation') 75 | 76 | if not (skip_if_exists and os.path.isfile(path_trips)): 77 | logging.info('Generating trips') 78 | if count_veh is not None: 79 | # Generate more trips than needed because validation will throw some away 80 | if veh_rate_factor is None: 81 | veh_rate_factor = 0.5 82 | veh_rate = duration / count_veh * veh_rate_factor 83 | else: 84 | veh_rate = 1 85 | 86 | create_random_trips(place, 87 | directory=directory, 88 | file_suffix=str(count_veh), 89 | fringe_factor=fringe_factor, 90 | veh_period=veh_rate, 91 | intermediate_points=intermediate_points) 92 | modify_trips(place, 93 | directory=directory, 94 | file_suffix=str(count_veh), 95 | start_all_at_zero=start_veh_simult, 96 | rename_ids=True, 97 | limit_veh_count=count_veh, 98 | max_speed=max_speed) 99 | else: 100 | logging.info('Skipping trip generation') 101 | 102 | if coordinate_tls and not (skip_if_exists and os.path.isfile(path_tls)): 103 | logging.info('Generating SUMO TLS coordination') 104 | if count_veh is not None: 105 | count_veh_tls = int(np.ceil(count_veh / 10)) 106 | else: 107 | count_veh_tls = None 108 | 109 | generate_tls_coordination(place, 110 | directory=directory, 111 | file_suffix=str(count_veh), 112 | count_veh=count_veh_tls) 113 | else: 114 | logging.info('Skipping SUMO TLS coordination') 115 | 116 | if not (skip_if_exists and os.path.isfile(path_cfg)): 117 | logging.info('Generating SUMO simulation configuration') 118 | gen_simulation_conf( 119 | place, 120 | directory=directory, 121 | file_suffix=str(count_veh), 122 | seconds_end=duration, 123 | max_count_veh=count_veh, 124 | coordinate_tls=coordinate_tls) 125 | else: 126 | logging.info('Skipping SUMO simulation configuration generation') 127 | 128 | if not (skip_if_exists and os.path.isfile(path_traces)): 129 | logging.info('Running SUMO simulation') 130 | run_simulation(place, file_suffix=str(count_veh), directory=directory) 131 | else: 132 | logging.info('Skipping SUMO simulation run') 133 | 134 | logging.info('Loading parsing and cleaning vehicle traces') 135 | traces = load_veh_traces(place, 136 | file_suffix=str(count_veh), 137 | directory=directory, 138 | delete_first_n=warmup_duration, 139 | count_veh=count_veh) 140 | 141 | return traces 142 | 143 | 144 | def gen_simulation_conf(place, 145 | directory='', 146 | file_suffix=None, 147 | seconds_end=None, 148 | veh_class='passenger', 149 | max_count_veh=None, 150 | coordinate_tls=True, 151 | use_route_file=True, 152 | debug=False, 153 | bin_dir=''): 154 | """Generates a SUMO simulation configuration file""" 155 | 156 | filename_place = utils.string_to_filename(place) 157 | 158 | if file_suffix is None: 159 | filename_place_suffix = filename_place 160 | else: 161 | filename_place_suffix = filename_place + '.' + str(file_suffix) 162 | 163 | path_cfg = os.path.join(directory, filename_place_suffix + '.sumocfg') 164 | path_bin = os.path.join(bin_dir, 'sumo') 165 | filename_network = filename_place + '.net.xml' 166 | filename_trips = filename_place_suffix + '.' + veh_class + '.trips.xml' 167 | filename_tls = filename_place + '.' + veh_class + '.tls.xml' 168 | filename_routes = filename_place_suffix + '.' + veh_class + '.rou.xml' 169 | 170 | arguments = [path_bin, 171 | '-n', filename_network, 172 | '--duration-log.statistics', 173 | '--device.rerouting.adaptation-steps', '180', 174 | '--no-step-log', 175 | '--save-configuration', path_cfg, 176 | '--ignore-route-errors'] 177 | 178 | if max_count_veh is not None: 179 | arguments += ['--max-num-vehicles', str(max_count_veh)] 180 | 181 | if seconds_end is not None: 182 | arguments += ['--end', str(seconds_end)] 183 | 184 | if coordinate_tls: 185 | arguments += ['-a', filename_tls] 186 | 187 | if use_route_file: 188 | arguments += ['-r', filename_routes] 189 | else: 190 | arguments += ['-r', filename_trips] 191 | 192 | proc = sproc.Popen(arguments, stdout=sproc.PIPE, stderr=sproc.PIPE) 193 | out_text, err_text = proc.communicate() 194 | exit_code = proc.returncode 195 | 196 | if exit_code != 0: 197 | utils.print_nnl(err_text.decode(), file=sys.stderr) 198 | raise RuntimeError('SUMO quit with nonzero exit code') 199 | 200 | if debug: 201 | utils.print_nnl(out_text.decode()) 202 | utils.print_nnl(err_text.decode(), file=sys.stderr) 203 | 204 | return exit_code 205 | 206 | 207 | def run_simulation(place, directory='', file_suffix=None, debug=False, bin_dir=''): 208 | """Runs a SUMO simulations and saves the vehicle traces""" 209 | 210 | filename_place = utils.string_to_filename(place) 211 | 212 | if file_suffix is None: 213 | filename_place_suffix = filename_place 214 | else: 215 | filename_place_suffix = filename_place + '.' + str(file_suffix) 216 | 217 | path_cfg = os.path.join(directory, filename_place_suffix + '.sumocfg') 218 | path_traces = os.path.join( 219 | directory, filename_place_suffix + '.traces.xml') 220 | path_bin = os.path.join(bin_dir, 'sumo') 221 | 222 | arguments = [path_bin, 223 | '-c', path_cfg, 224 | '--fcd-output', path_traces] 225 | 226 | proc = sproc.Popen(arguments, stdout=sproc.PIPE, stderr=sproc.PIPE) 227 | out_text, err_text = proc.communicate() 228 | exit_code = proc.returncode 229 | 230 | if exit_code != 0: 231 | utils.print_nnl(err_text.decode(), file=sys.stderr) 232 | raise RuntimeError('SUMO quit with nonzero exit code') 233 | 234 | if debug: 235 | utils.print_nnl(out_text.decode()) 236 | utils.print_nnl(err_text.decode(), file=sys.stderr) 237 | 238 | return exit_code 239 | 240 | 241 | def modify_trips(place, 242 | directory='', 243 | file_suffix=None, 244 | start_all_at_zero=False, 245 | rename_ids=False, 246 | limit_veh_count=None, 247 | max_speed=None, 248 | modify_routes=True, 249 | veh_class='passenger', 250 | prefix='veh'): 251 | """Modifies the randomly generated trips according to the parameters""" 252 | 253 | filename_place = utils.string_to_filename(place) 254 | 255 | if file_suffix is None: 256 | filename_place_suffix = filename_place 257 | else: 258 | filename_place_suffix = filename_place + '.' + str(file_suffix) 259 | 260 | path_trips = os.path.join( 261 | directory, filename_place_suffix + '.' + veh_class + '.trips.xml') 262 | path_routes = os.path.join( 263 | directory, filename_place_suffix + '.' + veh_class + '.rou.xml') 264 | 265 | # Modify trips file 266 | tree = ET.parse(path_trips) 267 | root = tree.getroot() 268 | 269 | if limit_veh_count is not None: 270 | for trip in root.findall('trip')[limit_veh_count:]: 271 | root.remove(trip) 272 | 273 | if start_all_at_zero: 274 | for trip in root.findall('trip'): 275 | trip.attrib['depart'] = '0.00' 276 | 277 | if rename_ids: 278 | for idx, trip in enumerate(root.findall('trip')): 279 | trip.attrib['id'] = prefix + str(idx) 280 | 281 | if max_speed is not None: 282 | for vtype in root.findall('vType'): 283 | vtype.attrib['maxSpeed'] = str(max_speed) 284 | 285 | tree.write(path_trips, 'UTF-8') 286 | 287 | # Modify routes file 288 | if not modify_routes: 289 | return 290 | 291 | tree = ET.parse(path_routes) 292 | root = tree.getroot() 293 | 294 | if limit_veh_count is not None: 295 | for trip in root.findall('vehicle')[limit_veh_count:]: 296 | root.remove(trip) 297 | 298 | if start_all_at_zero: 299 | for trip in root.findall('vehicle'): 300 | trip.attrib['depart'] = '0.00' 301 | 302 | if rename_ids: 303 | for idx, trip in enumerate(root.findall('vehicle')): 304 | trip.attrib['id'] = prefix + str(idx) 305 | 306 | if max_speed is not None: 307 | for vtype in root.findall('vType'): 308 | vtype.attrib['maxSpeed'] = str(max_speed) 309 | 310 | tree.write(path_routes, 'UTF-8') 311 | 312 | 313 | def create_random_trips(place, 314 | directory='', 315 | file_suffix=None, 316 | random_seed=42, 317 | seconds_end=3600, 318 | fringe_factor=None, 319 | veh_period=1, 320 | veh_class='passenger', 321 | prefix='veh', 322 | min_dist=300, 323 | intermediate_points=None, 324 | debug=False, 325 | script_dir=None): 326 | """Creates random vehicle trips on a street network""" 327 | 328 | filename_place = utils.string_to_filename(place) 329 | 330 | if file_suffix is None: 331 | filename_place_suffix = filename_place 332 | else: 333 | filename_place_suffix = filename_place + '.' + str(file_suffix) 334 | 335 | path_network = os.path.join( 336 | directory, filename_place + '.net.xml') 337 | path_routes = os.path.join( 338 | directory, filename_place_suffix + '.' + veh_class + '.rou.xml') 339 | path_trips = os.path.join( 340 | directory, filename_place_suffix + '.' + veh_class + '.trips.xml') 341 | 342 | if script_dir is None: 343 | script_dir = search_tool_dir() 344 | 345 | arguments = [os.path.join(script_dir, 'randomTrips.py'), 346 | '-n', path_network, 347 | '-s', str(random_seed), 348 | '-e', str(seconds_end), 349 | '-p', str(veh_period), 350 | '-r', path_routes, 351 | '-o', path_trips, 352 | '--vehicle-class', veh_class, 353 | '--vclass', veh_class, 354 | '--prefix', prefix, 355 | '--min-distance', str(min_dist), 356 | '--validate'] 357 | 358 | if intermediate_points is not None: 359 | arguments += ['--intermediate', str(intermediate_points)] 360 | 361 | if fringe_factor is not None: 362 | arguments += ['--fringe-factor', str(fringe_factor)] 363 | 364 | proc = sproc.Popen(arguments, stdout=sproc.PIPE, stderr=sproc.PIPE) 365 | out_text, err_text = proc.communicate() 366 | exit_code = proc.returncode 367 | 368 | if exit_code != 0: 369 | utils.print_nnl(err_text.decode(), file=sys.stderr) 370 | raise RuntimeError('Trip generation script quit with nonzero exit code') 371 | 372 | if debug: 373 | utils.print_nnl(out_text.decode()) 374 | utils.print_nnl(err_text.decode(), file=sys.stderr) 375 | 376 | return exit_code 377 | 378 | 379 | def build_network(filename, 380 | veh_class='passenger', 381 | prefix=None, 382 | tls_settings=None, 383 | directory='', 384 | debug=False, 385 | script_dir=None, 386 | remove_isolated=True): 387 | """Converts a OpenStreetMap files to a SUMO street network file""" 388 | 389 | filepath = os.path.join(directory, filename) 390 | 391 | if script_dir is None: 392 | script_dir = search_tool_dir() 393 | 394 | arguments = [script_dir + '/osmBuild.py', '-f', filepath, '-c', veh_class] 395 | 396 | if prefix is not None: 397 | arguments += ['-p', prefix] 398 | 399 | if directory != '': 400 | arguments += ['-d', directory] 401 | 402 | if isinstance(tls_settings, dict): 403 | # Taken from osmBuild.py 404 | netconvert_opts = '--geometry.remove,' + \ 405 | '--roundabouts.guess,' + \ 406 | '--ramps.guess,' + \ 407 | '-v,' + \ 408 | '--junctions.join,' + \ 409 | '--tls.guess-signals,' + \ 410 | '--tls.discard-simple,' + \ 411 | '--tls.join,' + \ 412 | '--output.original-names,' + \ 413 | '--junctions.corner-detail,5,' + \ 414 | '--output.street-names' 415 | 416 | if remove_isolated: 417 | netconvert_opts += ',--remove-edges.isolated' 418 | 419 | if ('cycle_time' in tls_settings) and ('green_time' in tls_settings): 420 | raise RuntimeError( 421 | 'Cycle time and green time can not be set simultaneosly') 422 | 423 | if 'cycle_time' in tls_settings: 424 | netconvert_opts += ',--tls.cycle.time,' + \ 425 | str(round(tls_settings['cycle_time'])) 426 | 427 | if 'green_time' in tls_settings: 428 | netconvert_opts += ',--tls.green.time,' + \ 429 | str(round(tls_settings['green_time'])) 430 | 431 | if 'yellow_time' in tls_settings: 432 | netconvert_opts += ',--tls.yellow.time,' + \ 433 | str(round(tls_settings['yellow_time'])) 434 | 435 | arguments += ['--netconvert-options', netconvert_opts] 436 | 437 | proc = sproc.Popen(arguments, stdout=sproc.PIPE, stderr=sproc.PIPE) 438 | out_text, err_text = proc.communicate() 439 | exit_code = proc.returncode 440 | 441 | if exit_code != 0: 442 | utils.print_nnl(err_text.decode(), file=sys.stderr) 443 | raise RuntimeError('Network build script quit with nonzero exit code') 444 | 445 | if debug: 446 | utils.print_nnl(out_text.decode()) 447 | utils.print_nnl(err_text.decode(), file=sys.stderr) 448 | 449 | return exit_code 450 | 451 | 452 | def generate_tls_coordination(place, 453 | directory='', 454 | file_suffix=None, 455 | veh_class='passenger', 456 | count_veh=None, 457 | debug=False, 458 | script_dir=None): 459 | """Generates a traffic light system coordination""" 460 | 461 | filename_place = utils.string_to_filename(place) 462 | 463 | if file_suffix is None: 464 | filename_place_suffix = filename_place 465 | else: 466 | filename_place_suffix = filename_place + '.' + str(file_suffix) 467 | 468 | path_network = os.path.join( 469 | directory, filename_place + '.net.xml') 470 | path_tls = os.path.join( 471 | directory, filename_place + '.' + veh_class + '.tls.xml') 472 | 473 | if count_veh is None: 474 | path_routes = os.path.join( 475 | directory, filename_place_suffix + '.' + veh_class + '.rou.xml') 476 | else: 477 | path_routes_full = os.path.join( 478 | directory, filename_place_suffix + '.' + veh_class + '.rou.xml') 479 | path_routes = os.path.join( 480 | directory, filename_place_suffix + '.' + veh_class + '.rou_part.xml') 481 | tree = ET.parse(path_routes_full) 482 | root = tree.getroot() 483 | for vehicle in root.findall('vehicle')[count_veh:]: 484 | root.remove(vehicle) 485 | 486 | tree.write(path_routes, 'UTF-8') 487 | 488 | if script_dir is None: 489 | script_dir = search_tool_dir() 490 | 491 | arguments = [script_dir + '/tlsCoordinator.py', 492 | '-n', path_network, 493 | '-r', path_routes, 494 | '-o', path_tls] 495 | 496 | proc = sproc.Popen(arguments, stdout=sproc.PIPE, stderr=sproc.PIPE) 497 | out_text, err_text = proc.communicate() 498 | exit_code = proc.returncode 499 | 500 | if exit_code != 0: 501 | utils.print_nnl(err_text.decode(), file=sys.stderr) 502 | raise RuntimeError('TLS coordination script quit with nonzero exit code') 503 | 504 | if debug: 505 | utils.print_nnl(out_text.decode()) 506 | utils.print_nnl(err_text.decode(), file=sys.stderr) 507 | 508 | return exit_code 509 | 510 | 511 | def download_streets_from_id(area_id, 512 | prefix=None, 513 | directory='', 514 | debug=False, 515 | script_dir=None): 516 | """Downloads a street data defined by it's id from OpenStreetMap 517 | with the SUMO helper script""" 518 | 519 | if script_dir is None: 520 | script_dir = search_tool_dir() 521 | 522 | arguments = [script_dir + '/osmGet.py', '-a', str(area_id)] 523 | if prefix is not None: 524 | arguments += ['-p', prefix] 525 | if directory != '': 526 | arguments += ['-d', directory] 527 | 528 | proc = sproc.Popen(arguments, stdout=sproc.PIPE, stderr=sproc.PIPE) 529 | out_text, err_text = proc.communicate() 530 | exit_code = proc.returncode 531 | 532 | if exit_code != 0: 533 | utils.print_nnl(err_text.decode(), file=sys.stderr) 534 | raise RuntimeError('OSM download script quit with nonzero exit code') 535 | 536 | if debug: 537 | utils.print_nnl(out_text.decode()) 538 | utils.print_nnl(err_text.decode(), file=sys.stderr) 539 | 540 | return exit_code 541 | 542 | 543 | def download_streets_from_name(place, 544 | which_result=1, 545 | prefix=None, 546 | directory='', 547 | debug=False, 548 | use_sumo_downloader=False, 549 | script_dir=None): 550 | """Downloads a street data defined by it's name from OpenStreetMap""" 551 | 552 | # Setup OSMnx 553 | ox_a.setup() 554 | 555 | if use_sumo_downloader: 556 | if script_dir is None: 557 | script_dir = search_tool_dir() 558 | 559 | api_resp = ox.osm_polygon_download( 560 | place, limit=which_result, polygon_geojson=0) 561 | if not api_resp: 562 | raise RuntimeError('Place not found') 563 | area_id = api_resp[which_result - 1]['osm_id'] 564 | exit_code = download_streets_from_id( 565 | area_id, prefix, directory, debug, script_dir) 566 | return exit_code 567 | 568 | else: 569 | if prefix is None: 570 | prefix = 'osm' 571 | 572 | file_name = prefix + '_city.osm.xml' 573 | file_path = os.path.join(directory, file_name) 574 | 575 | gdf_place = ox.gdf_from_place(place, which_result=which_result) 576 | polygon = gdf_place['geometry'].unary_union 577 | response = osm_xml.osm_net_download(polygon, network_type='drive') 578 | 579 | with open(file_path, 'wb') as file: 580 | return_code = file.write(response[0]) 581 | return return_code 582 | 583 | 584 | def load_veh_traces(place, directory='', file_suffix=None, delete_first_n=0, count_veh=None): 585 | """Load parsed traces if they are available otherwise parse, 586 | clean up (if requested) and save them. Return the traces""" 587 | 588 | filename_place = utils.string_to_filename(place) 589 | 590 | if file_suffix is None: 591 | filename_place_suffix = filename_place 592 | else: 593 | filename_place_suffix = filename_place + '.' + str(file_suffix) 594 | 595 | path_and_prefix = os.path.join(directory, filename_place) 596 | path_and_prefix_suffix = os.path.join(directory, filename_place_suffix) 597 | 598 | filename_traces_npy = path_and_prefix_suffix + '.traces.pickle.xz' 599 | filename_traces_xml = path_and_prefix_suffix + '.traces.xml' 600 | filename_network = path_and_prefix + '.net.xml' 601 | 602 | if os.path.isfile(filename_traces_npy): 603 | traces = utils.load(filename_traces_npy) 604 | else: 605 | coord_offsets = get_coordinates_offset(filename_network) 606 | traces = parse_veh_traces(filename_traces_xml, coord_offsets) 607 | traces = clean_veh_traces( 608 | traces, delete_first_n=delete_first_n, count_veh=count_veh) 609 | utils.save(traces, filename_traces_npy) 610 | return traces 611 | 612 | 613 | def clean_veh_traces(veh_traces, delete_first_n=0, count_veh=None): 614 | """Cleans up vehicle traces according to the given parameters""" 615 | 616 | # delete first n snapshots 617 | veh_traces = veh_traces[delete_first_n:] 618 | 619 | # Delete snapshots with wrong number of vehicles 620 | if count_veh is not None: 621 | retain_mask = np.ones(veh_traces.size, dtype=bool) 622 | for idx, snapshot in enumerate(veh_traces): 623 | if snapshot.size != count_veh: 624 | retain_mask[idx] = False 625 | logging.warning( 626 | 'Vehicle traces snapshot {:d} has wrong size ({:d} instead of {:d}), discarding'.format(idx, 627 | snapshot.size, 628 | count_veh)) 629 | veh_traces = veh_traces[retain_mask] 630 | count_discarded = veh_traces.size - np.sum(retain_mask) 631 | if count_discarded > 0: 632 | logging.warning('Discarded {:d} out of {:d} snapshots'.format(count_discarded, veh_traces.size)) 633 | 634 | return veh_traces 635 | 636 | 637 | def parse_veh_traces(filename, offsets=(0, 0), sort=True): 638 | """Parses a SUMO traces XML file and returns a numpy array""" 639 | 640 | tree = ET.parse(filename) 641 | root = tree.getroot() 642 | 643 | traces = np.zeros(len(root), dtype=object) 644 | for idx_timestep, timestep in enumerate(root): 645 | traces_snapshot = np.zeros( 646 | len(timestep), 647 | dtype=[('time', 'float'), 648 | ('id', 'uint'), 649 | ('x', 'float'), 650 | ('y', 'float')]) 651 | for idx_veh_node, veh_node in enumerate(timestep): 652 | veh = veh_node.attrib 653 | veh_id = int(veh['id'][3:]) 654 | traces_snapshot[idx_veh_node]['time'] = timestep.attrib['time'] 655 | traces_snapshot[idx_veh_node]['id'] = veh_id 656 | traces_snapshot[idx_veh_node]['x'] = float(veh['x']) 657 | traces_snapshot[idx_veh_node]['y'] = float(veh['y']) 658 | 659 | traces_snapshot['x'] -= offsets[0] 660 | traces_snapshot['y'] -= offsets[1] 661 | 662 | if sort: 663 | traces_snapshot.sort(order='id') 664 | 665 | traces[idx_timestep] = traces_snapshot 666 | 667 | return traces 668 | 669 | 670 | def vehicles_from_traces(graph_streets, snapshot): 671 | """ Builds a vehicles objects from the street graph 672 | and a snapshot of the SUMO vehicle traces""" 673 | 674 | count_veh = snapshot.size 675 | points_vehs = np.zeros(count_veh, dtype=object) 676 | 677 | for veh_idx, vehicle in enumerate(snapshot): 678 | points_vehs[veh_idx] = geom.Point(vehicle['x'], vehicle['y']) 679 | 680 | vehs = vehicles.generate_vehs( 681 | graph_streets, street_idxs=None, points_vehs_in=points_vehs) 682 | 683 | return vehs 684 | 685 | 686 | def get_coordinates_offset(filename): 687 | """Retrieves the x and y offset of the UTM projection from the SUMO net file""" 688 | 689 | tree = ET.parse(filename) 690 | root = tree.getroot() 691 | location = root.find('location') 692 | offset_string = location.attrib['netOffset'] 693 | offset_string_x, offset_string_y = offset_string.split(',') 694 | offset_x = float(offset_string_x) 695 | offset_y = float(offset_string_y) 696 | offsets = [offset_x, offset_y] 697 | return offsets 698 | 699 | 700 | def search_tool_dir(): 701 | """Searches for the SUMO tools directory""" 702 | 703 | paths = ['sumo/sumo/tools', # Local installation 704 | '/usr/lib/sumo/tools', # Arch Linux default location 705 | '/usr/share/sumo/tools'] # Debian default location 706 | for path in paths: 707 | if os.path.isdir(path): 708 | return path 709 | 710 | raise FileNotFoundError('Could not find the SUMO tools directory') 711 | --------------------------------------------------------------------------------