├── .github
└── workflows
│ └── python-app.yml
├── .gitignore
├── LICENSE
├── README.md
├── inputs
├── traj_ltpl_cl
│ ├── traj_ltpl_cl_IMS_2020_sim.csv
│ ├── traj_ltpl_cl_berlin.csv
│ ├── traj_ltpl_cl_modena.csv
│ └── traj_ltpl_cl_monteblanco.csv
└── veh_dyn_info
│ ├── tpamap_constloc_constvel.csv
│ ├── tpamap_constloc_varvel.csv
│ ├── tpamap_varloc_constvel_berlin.csv
│ └── tpamap_varloc_varvel_berlin.csv
├── outputs
├── .gitkeep
├── tpamap_berlin__27mps.csv
├── tpamap_berlin__56mps.csv
├── tpamap_berlin__83mps.csv
└── tpamap_varloc_varvel_berlin.csv
├── requirements.txt
├── resources
└── tpamap_berlin.png
├── setup.cfg
├── setup.py
├── tests
├── helperfuncs
│ ├── test_calc_cosyidcs.py
│ ├── test_concat_tpamaps.py
│ ├── test_preprocess_ltplrefline.py
│ └── test_transform_coordinates_xy2s.py
├── interface
│ ├── test_MapInterface_comm.py
│ ├── test_MapInterface_variants.py
│ └── test_get_frictionvals.py
├── test_discretization_example.py
└── visualization
│ └── test_visualization.py
├── tpa_map_functions
├── __init__.py
├── helperfuncs
│ ├── __init__.py
│ ├── calc_cosyidcs.py
│ ├── concat_tpamaps_ltpl.py
│ ├── concat_tpamaps_vehdynsim.py
│ ├── import_vehdyninfo.py
│ ├── preprocess_ltplrefline.py
│ ├── save_tpamap.py
│ └── transform_coordinates_xy2s.py
├── interface
│ ├── MapInterface.py
│ └── __init__.py
└── visualization
│ ├── __init__.py
│ └── visualize_tpamap.py
└── tpa_map_gui
├── Readme.md
├── main_tpaGUI.py
└── src
├── __init__.py
└── build_GUI.py
/.github/workflows/python-app.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Python application
5 |
6 | on:
7 | push:
8 | branches: "**"
9 | pull_request:
10 | branches: [ main ]
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - uses: actions/checkout@v2
19 | - name: Set up Python 3.8
20 | uses: actions/setup-python@v2
21 | with:
22 | python-version: 3.8
23 | - name: Install dependencies
24 | run: |
25 | python -m pip install --upgrade pip
26 | pip install flake8 pytest
27 | pip install tpa_map_functions
28 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
29 | - name: Lint with flake8
30 | run: |
31 | # stop the build if there are Python syntax errors or undefined names
32 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
33 | # exit-zero treats all errors as warnings. The GitHub editor is 120 chars wide
34 | flake8 . --count --exit-zero --max-line-length=120 --statistics
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # git ignore folders
2 | build/
3 | dist/
4 | tpa_map_functions.egg-info/
5 | venv/
6 | .vscode/
7 |
8 | # git ignore files
9 | outputs/*.csv
10 | *.pyc
11 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Functions for Tire Performance Assessment Maps
2 |
3 | The software of this repository is used within the [TUM Autonomous Motorsports project](https://www.mw.tum.de/en/ftm/main-research/intelligent-vehicle-systems/roborace-autonomous-motorsport/).
4 |
5 | The core functionality of this repository (in ``./tpa_map_functions``) is available as pypi package: https://pypi.org/project/tpa-map-functions/
6 |
7 | ## Description
8 | This repository provides several functions to generate and process race track maps containing specific local information, which is further used for trajectory planning or vehicle dynamics simulation.
9 |
10 | ### local acceleration limits
11 | The local acceleration limits are used for trajectory planning within the TUM Autonomous Motorsports project. These location-dependent values are stored in a tpa-map (tpa = tire performance assessment) and are provided to the local trajectory planning module via an interface. In addition to location-dependent acceleration limits, vehicle velocity-dependent acceleration limits can be included (e.g. to account for aerodynamic forces acting on the vehicle (downforce)).
12 |
13 | These tpa-maps are static, but there is an option to update the local acceleration limits in real-time when the tire performance assessment (tpa) module is running in parallel and communication is enabled. This allows real-time adaption to changing race track conditions.
14 |
15 | ### local tire model scaling
16 | The local tire model scaling is used for vehicle dynamics simulation within the TUM Autonomous Motorsports project. These maps have the same format but contain local scaling factors for the tire model. This allows simulate a varying tire-road friction coefficient. The local tire model scaling maps allow location-dependent and time-dependent scaling factors.
17 |
18 | The picture below shows an example tire performance map of the Berlin Formula-e race track.
19 |
20 | 
21 |
22 | ## List of components
23 | * `inputs`: This folder contains input files (e.g. reference line or tpa-maps).
24 | * `outputs`: This folder contains generated tpa-maps.
25 | * `tests`: This folder contains scripts to test several functions within this folder (e.g. interface, visualization).
26 | * `tpa_map_functions/helperfuncs`: This folder contains some helper functions used within this repository.
27 | * `tpa_map_functions/interface`: This folder contains functions to provide an interface to the local trajectory planner and to the
28 | tire performance assessment module.
29 | * `tpa_map_functions/visualization`: This folder contains functions to visualize tpa-maps.
30 | * `tpa_map_gui`: This folder contains software to run a GUI which allows to generate location- and time-dependent tire-road friction maps for vehicle dynamics simulation or to generate tpa-maps which are used for trajectory planning. A more detailed description can be found within this folder.
31 |
32 | ## How to generate tpa-maps
33 | With these steps, a new tpa-map can be generated using an existing reference line:
34 | 1. Open ``main_tpaGUI.py`` and specify the name of the reference line (in ``/inputs``) and the settings for ref-line preprocessing (options: use original step size or resample to new step size).
35 | 2. Create a tpa-map (consult ``/tpa_map_gui/Readme.md`` for more details).
36 | 3. Reformat output maps depending on type (use ``/tests/helperfuncs/concat_tpamaps.py`` and adjust settings for local scaling factors or for multiple tpa-maps with velocity-dependent data).
37 | 4. Use final, single map file (located in ``/outputs``) as input for local trajectory planner or vehicle dynamics simulation.
38 |
--------------------------------------------------------------------------------
/inputs/veh_dyn_info/tpamap_constloc_constvel.csv:
--------------------------------------------------------------------------------
1 | # section_id, s_m, x_m, y_m, ax_max_mps2, ay_max_mps2
2 | 0.0, 0.0, 0.0, 0.0, 12.5, 12.5
3 |
--------------------------------------------------------------------------------
/inputs/veh_dyn_info/tpamap_constloc_varvel.csv:
--------------------------------------------------------------------------------
1 | # section_id, s_m, x_m, y_m, ax_max_mps2__27mps, ay_max_mps2__27mps, ax_max_mps2__55mps, ay_max_mps2__55mps, ax_max_mps2__83mps, ay_max_mps2__83mps
2 | 0.0, 0.0, 0.0, 0.0, 12.5, 12.5, 14.0, 15.0, 18.0, 20.0
3 |
--------------------------------------------------------------------------------
/outputs/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TUMFTM/tpa_map_functions/2771e48583d9b55094e9558df421144463beff2c/outputs/.gitkeep
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy>=1.18.1
2 | ad-interface-functions>=0.21
3 | trajectory-planning-helpers>=0.74
4 | pyzmq>=19.0.2
5 | matplotlib>=3.3.1
6 |
--------------------------------------------------------------------------------
/resources/tpamap_berlin.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/TUMFTM/tpa_map_functions/2771e48583d9b55094e9558df421144463beff2c/resources/tpamap_berlin.png
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude = *__init__.py, *-env/*
3 | ignore = E402, W503, E704
4 | max-line-length = 120
5 |
6 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | """
4 | Package on pypi.org can be updated with the following commands:
5 | python3 setup.py sdist bdist_wheel
6 | sudo python3 -m twine upload dist/*
7 | """
8 |
9 | with open("README.md", "r") as fh:
10 | long_description = fh.read()
11 |
12 | setuptools.setup(
13 | name='tpa_map_functions',
14 | version='0.19',
15 | url='https://github.com/TUMFTM',
16 | author="Leonhard Hermansdorfer",
17 | author_email="leo.hermansdorfer@tum.de",
18 | description="Functions to process local acceleration limits for trajectory planning within the TUM Autonomous Motorsports project",
19 | long_description=long_description,
20 | long_description_content_type="text/markdown",
21 | packages=setuptools.find_packages(exclude=["*inputs*", "*outputs*", "*tests*", "*resources*", "*venv*", "*tpa_map_gui*"]),
22 | install_requires=[
23 | 'numpy>=1.18.1',
24 | 'ad-interface-functions>=0.21',
25 | 'trajectory-planning-helpers>=0.74',
26 | 'pyzmq>=19.0.2',
27 | 'matplotlib>=3.3.1'
28 | ],
29 | classifiers=[
30 | "Programming Language :: Python :: 3",
31 | "Programming Language :: Python :: 3.6",
32 | "Programming Language :: Python :: 3.7",
33 | "Programming Language :: Python :: 3.8",
34 | "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
35 | "Operating System :: OS Independent",
36 | ])
37 |
--------------------------------------------------------------------------------
/tests/helperfuncs/test_calc_cosyidcs.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import sys
3 | import numpy as np
4 |
5 | # import custom modules
6 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
7 | sys.path.append(path2tmf)
8 |
9 | import tpa_map_functions as tmf
10 |
11 | # User Input -------------------------------------------------------------------------------------------------------
12 |
13 | track_name = 'modena'
14 | bool_enable_debug = True
15 |
16 | # mode for resampling reference line, options: "const_steps", "var_steps"
17 | mode_resample_refline = 'var_steps'
18 | stepsize_resample_m = 11.11
19 | section_length_min_m = 15
20 | section_length_max_m = 200
21 |
22 | # test data
23 | s_coordinates = np.asarray([239.09252732, 239.30584717, 239.51885986, 239.73167419, 239.94430542, 240.15682983,
24 | 240.36885071])
25 |
26 | # Preprocess Reference Line ----------------------------------------------------------------------------------------
27 |
28 | filepath2ltpl_refline = os.path.join(path2tmf, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_' + track_name + '.csv')
29 |
30 |
31 | output_data = tmf.helperfuncs.preprocess_ltplrefline.\
32 | preprocess_ltplrefline(filepath2ltpl_refline=filepath2ltpl_refline,
33 | mode_resample_refline=mode_resample_refline,
34 | stepsize_resample_m=stepsize_resample_m,
35 | section_length_limits_m=[section_length_min_m, section_length_max_m],
36 | bool_enable_debug=bool_enable_debug)
37 |
38 | refline_resampled = output_data["refline_resampled"]
39 |
40 | test_output = tmf.helperfuncs.calc_cosyidcs.calc_cosyidcs(np.squeeze(refline_resampled["section_id"]),
41 | refline_resampled["refline_resampled"],
42 | s_coordinates)
43 |
44 | print(test_output)
45 |
--------------------------------------------------------------------------------
/tests/helperfuncs/test_concat_tpamaps.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import sys
3 |
4 | # import custom modules
5 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
6 | sys.path.append(path2tmf)
7 |
8 | import tpa_map_functions as tmf
9 |
10 |
11 | # User Input -----------------------------------------------------------------------------------------------------------
12 |
13 | # mode = 'local scaling factors'
14 | mode = 'local acceleration limits'
15 |
16 | # settings for mode 'local acceleration limits'
17 | if mode == 'local acceleration limits':
18 |
19 | filename_output = 'tpamap_varloc_varvel.csv'
20 |
21 | # part of naming which is identical for all tpa-maps which should be concatenated; only difference is "_XXmps"
22 | tpamap_identifier = "tpamap_berlin"
23 |
24 |
25 | # settings for mode 'local scaling factors'
26 | elif mode == 'local scaling factors':
27 |
28 | filename_output = 'tpamap_tum_mcs.csv'
29 |
30 | # list names of local tire model scaling maps which should be concatenated
31 | # NOTE: concatenating local acceleration limit data is not intended with this function. This function is build for
32 | # concatenating local tire scaling factors. This is just an example.
33 | list_tpamaps = ["tpamap_berlin__27mps.csv",
34 | "tpamap_berlin__56mps.csv",
35 | "tpamap_berlin__83mps.csv"]
36 |
37 | # list the specific timesteps where each map should be used raw (100%);
38 | # the maps are interpolated between those timesteps
39 | time_interpsteps = [0.0, 20.0, 35.0]
40 |
41 | # set to True if map should be used within vehicle dynamics simulation;
42 | # the file is always necessary, therefore, set to False if no varying friction influence is needed
43 | bool_enable_tpamaps = True
44 |
45 |
46 | # Manage paths ---------------------------------------------------------------------------------------------------------
47 |
48 | path2tpamaps = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions', 'outputs')
49 |
50 | filepath2output = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0],
51 | 'tpa_map_functions', 'outputs', filename_output)
52 |
53 | # concatenate map data -------------------------------------------------------------------------------------------------
54 |
55 | if mode == 'local acceleration limits':
56 |
57 | tmf.helperfuncs.concat_tpamaps_ltpl.concat_tpamaps_ltpl(path2tpamaps=path2tpamaps,
58 | filepath2output=filepath2output,
59 | tpamap_identifier=tpamap_identifier)
60 |
61 | elif mode == 'local scaling factors':
62 |
63 | tmf.helperfuncs.concat_tpamaps_vehdynsim.concat_tpamaps_vehdynsim(path2tpamaps=path2tpamaps,
64 | filepath2output=filepath2output,
65 | list_tpamaps=list_tpamaps,
66 | time_interpsteps=time_interpsteps,
67 | bool_enable_tpamaps=bool_enable_tpamaps)
68 |
--------------------------------------------------------------------------------
/tests/helperfuncs/test_preprocess_ltplrefline.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os.path
3 | import sys
4 | import matplotlib.pyplot as plt
5 |
6 | # import custom modules
7 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
8 | sys.path.append(path2tmf)
9 |
10 | import tpa_map_functions as tmf
11 |
12 | # User Input -------------------------------------------------------------------------------------------------------
13 |
14 | track_name = 'modena'
15 | bool_enable_debug = True
16 |
17 | # mode for resampling reference line, options: "const_steps", "var_steps"
18 | mode_resample_refline = 'var_steps'
19 | stepsize_resample_m = 11.11
20 | section_length_min_m = 15
21 | section_length_max_m = 200
22 |
23 | test_source = 'path' # or 'path'
24 |
25 | # Preprocess Reference Line ----------------------------------------------------------------------------------------
26 |
27 | filepath2ltpl_refline = os.path.join(path2tmf, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_' + track_name + '.csv')
28 |
29 | if test_source == 'file':
30 |
31 | # load reference line
32 | with open(filepath2ltpl_refline, 'r') as fh:
33 | csv_data_refline = np.genfromtxt(fh, delimiter=';')
34 |
35 | reference_line = csv_data_refline[:, 0:2]
36 |
37 | mode_resample_refline = 'const_steps'
38 |
39 | output_data = tmf.helperfuncs.preprocess_ltplrefline.preprocess_ltplrefline(reference_line=reference_line,
40 | stepsize_resample_m=stepsize_resample_m,
41 | bool_enable_debug=bool_enable_debug)
42 | else:
43 |
44 | output_data = tmf.helperfuncs.preprocess_ltplrefline.\
45 | preprocess_ltplrefline(filepath2ltpl_refline=filepath2ltpl_refline,
46 | mode_resample_refline=mode_resample_refline,
47 | stepsize_resample_m=stepsize_resample_m,
48 | section_length_limits_m=[section_length_min_m, section_length_max_m],
49 | bool_enable_debug=bool_enable_debug)
50 |
51 | test = np.concatenate((output_data['refline_resampled']['section_id'],
52 | output_data['refline_resampled']['refline_resampled']), axis=1)
53 |
54 | if bool_enable_debug:
55 |
56 | refline_original = output_data['refline']
57 | refline_resampled = output_data['refline_resampled']['refline_resampled']
58 |
59 | idxs_plot = output_data['refline_resampled']['sectionid_change']
60 |
61 | if mode_resample_refline == "const_steps":
62 |
63 | plt.figure(figsize=(7, 7))
64 |
65 | plt.plot(refline_original[:, 1], refline_original[:, 2], 'k--', label='original reference line')
66 | plt.plot(refline_original[:, 1], refline_original[:, 2], 'kx', label='original reference line')
67 | plt.plot(refline_resampled[:, 1][idxs_plot], refline_resampled[:, 2][idxs_plot],
68 | 'r', label='resampled reference line')
69 | plt.plot(refline_resampled[:, 1][idxs_plot], refline_resampled[:, 2][idxs_plot],
70 | 'ro', label='resampled reference line')
71 |
72 | plt.axis('equal')
73 | plt.legend()
74 | plt.xlabel('x in meters')
75 | plt.ylabel('y in meters')
76 |
77 | plt.show(block=False)
78 |
79 | elif mode_resample_refline == "var_steps":
80 |
81 | plt.figure(figsize=(7, 7))
82 |
83 | plt.plot(refline_original[:, 1], refline_original[:, 2], 'k--', label='original reference line')
84 | plt.plot(refline_original[:, 1], refline_original[:, 2], 'kx', label='original reference line')
85 | plt.plot(refline_resampled[:, 1][idxs_plot], refline_resampled[:, 2][idxs_plot], 'r',
86 | label='resampled reference line')
87 | plt.plot(refline_resampled[:, 1][idxs_plot], refline_resampled[:, 2][idxs_plot], 'ro',
88 | label='resampled reference line')
89 |
90 | plt.axis('equal')
91 | plt.legend()
92 | plt.xlabel('x in meters')
93 | plt.ylabel('y in meters')
94 |
95 | plt.show(block=False)
96 |
97 | plt.figure()
98 |
99 | ax1 = plt.subplot(2, 1, 1)
100 | ax1.plot(refline_original[:, 0], output_data['refline_resampled']['ax_mps2'], label="long. acc.")
101 | ax1.plot(refline_original[:, 0], output_data['refline_resampled']['ay_mps2'], label="lat. acc.")
102 |
103 | for s in refline_resampled[:, 0][idxs_plot]:
104 | plt.vlines(s, -10, 10, colors='k', linestyle='--')
105 |
106 | plt.grid()
107 | plt.xlabel("track position in m")
108 | plt.ylabel("long./lat. acc. in mps2")
109 | plt.legend()
110 |
111 | ax2 = plt.subplot(2, 1, 2, sharex=ax1)
112 |
113 | ax2.step(refline_resampled[:, 0][idxs_plot],
114 | np.multiply(output_data['refline_resampled']['ax_trigger'][idxs_plot], 0.9),
115 | where='post', linewidth=2.0, label="trigger: long. acc.")
116 | ax2.step(refline_resampled[:, 0][idxs_plot],
117 | np.multiply(output_data['refline_resampled']['ay_trigger'][idxs_plot], 0.8),
118 | where='post', linewidth=2.0, label="trigger: lat. acc.")
119 |
120 | ax2.step(refline_resampled[:, 0],
121 | np.multiply(output_data['refline_resampled']['list_section_category'], 1.0), where='post',
122 | linewidth=2.0, label="section type")
123 |
124 | for s in refline_resampled[:, 0][idxs_plot]:
125 | plt.vlines(s, -7, 7, colors='k', linestyle='--')
126 |
127 | plt.ylim([-7, 7])
128 |
129 | plt.grid()
130 | plt.xlabel("track position in m")
131 | plt.ylabel("section type")
132 |
133 | plt.legend()
134 | plt.show(block=False)
135 |
136 | # plot histogram containing distances between coordinate points
137 | plt.figure()
138 |
139 | plt.hist(np.diff(refline_resampled[:, 0][idxs_plot]), bins=25, histtype='bar', align='mid', rwidth=0.8)
140 |
141 | plt.axvline(x=output_data['refline_resampled']['mean_diff_m'], color='g', label='mean')
142 | plt.axvline(x=(output_data['refline_resampled']['mean_diff_m']
143 | + output_data['refline_resampled']['std_diff_m']), color='y', label='stand.dev.')
144 | plt.axvline(x=(output_data['refline_resampled']['mean_diff_m']
145 | - output_data['refline_resampled']['std_diff_m']), color='y')
146 | plt.axvline(x=output_data['refline_resampled']['min_diff_m'], color='r', label='min/max')
147 | plt.axvline(x=output_data['refline_resampled']['max_diff_m'], color='r')
148 |
149 | plt.legend()
150 | plt.grid()
151 | plt.xlabel('distance between reference line coordinate points in meters')
152 | plt.ylabel('bin count')
153 |
154 | plt.show()
155 |
--------------------------------------------------------------------------------
/tests/helperfuncs/test_transform_coordinates_xy2s.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os.path
3 | import sys
4 | import matplotlib.pyplot as plt
5 |
6 | # import custom modules
7 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
8 | sys.path.append(path2tmf)
9 |
10 | import tpa_map_functions as tmf
11 |
12 | # User Input -------------------------------------------------------------------------------------------------------
13 |
14 | track_name = 'monteblanco'
15 |
16 | # Preprocess Reference Line ----------------------------------------------------------------------------------------
17 | filepath2ltpl_refline = os.path.join(path2tmf, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_' + track_name + '.csv')
18 |
19 | # load reference line
20 | with open(filepath2ltpl_refline, 'r') as fh:
21 | csv_data_refline = np.genfromtxt(fh, delimiter=';')
22 |
23 | test_array = np.vstack((csv_data_refline, csv_data_refline))
24 |
25 | position_samples = test_array[:, 0:2] + (1 - np.random.rand(test_array.shape[0], 2))
26 |
27 | test = tmf.helperfuncs.transform_coordinates_xy2s. \
28 | transform_coordinates_xy2s(coordinates_sxy_m=np.hstack((csv_data_refline[:, 7][:, np.newaxis],
29 | csv_data_refline[:, 0:2])),
30 | position_m=position_samples,
31 | s_tot_m=csv_data_refline[-1, 7])
32 |
--------------------------------------------------------------------------------
/tests/interface/test_MapInterface_comm.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import time
4 |
5 | # custom modules
6 | path_root2Module = os.path.join(os.path.abspath(__file__).split("tpa_map_functions")[0],
7 | "tpa_map_functions")
8 |
9 | sys.path.append(path_root2Module)
10 |
11 | import tpa_map_functions.interface.MapInterface as MapInterface
12 |
13 | """
14 | Created by: Leonhard Hermansdorfer
15 | Created on: 10.12.2019
16 | """
17 |
18 | filepath2tpamap = os.path.join(path_root2Module, 'inputs', 'veh_dyn_info', 'tpamap_constloc_constvel.csv')
19 |
20 | # Get data from TPA
21 | zmq_opts = {"ip": "localhost", # IP of device running map interface
22 | "port_data": "47208", # port number, standard: "47208"
23 | "topic": "tpa_to_ltpl" # zmq topic of publisher
24 | }
25 |
26 | # create a map interface class and kick it every 100ms
27 | MapInt = MapInterface.MapInterface(filepath2localgg=filepath2tpamap,
28 | zmq_opts_sub_tpa=zmq_opts,
29 | bool_enable_interface2tpa=True)
30 |
31 | updateFrequency = 20
32 |
33 | while True:
34 |
35 | # save start time
36 | t_start = time.perf_counter()
37 |
38 | # update
39 | MapInt.update()
40 |
41 | duration = time.perf_counter() - t_start
42 | sleep_time = 1 / updateFrequency - duration
43 |
44 | if sleep_time > 0.0:
45 | time.sleep(sleep_time)
46 | else:
47 | print("Didn't get enough sleep... (TPA Map Interface)")
48 |
--------------------------------------------------------------------------------
/tests/interface/test_MapInterface_variants.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import numpy as np
4 |
5 | # custom modules
6 | path2module = os.path.join(os.path.abspath(__file__).split("tpa_map_functions")[0], "tpa_map_functions")
7 |
8 | sys.path.append(path2module)
9 |
10 | import tpa_map_functions.interface.MapInterface as MapInterface
11 |
12 | """
13 | Created by: Leonhard Hermansdorfer
14 | Created on: 10.12.2019
15 | """
16 |
17 | # Get data from TPA
18 | zmq_opts = {"ip": "localhost", # IP of device running map interface
19 | "port_data": "47208", # port number, standard: "47208"
20 | "topic": "tpa_to_ltpl" # zmq topic of publisher
21 | }
22 |
23 | # test 1 - constant local acceleration limits, const velocity
24 | # test 2 - variable local acceleration limits, const velocity
25 | # test 3 - constant local acceleration limits, variable velocity
26 | # test 4 - variable local acceleration limits, variable velocity
27 |
28 | filename_tpamap = ['tpamap_constloc_constvel.csv',
29 | 'tpamap_constloc_varvel.csv',
30 | 'tpamap_varloc_constvel_berlin.csv',
31 | 'tpamap_varloc_varvel_berlin.csv']
32 |
33 | bool_enable_velocitydependence = [False,
34 | True,
35 | False,
36 | True]
37 |
38 | # run all tests with interpolation disabled (=False) and enabled (True)
39 | for ele in [False, True]:
40 | counter = 0
41 |
42 | while counter <= len(filename_tpamap) - 1:
43 |
44 | print('----------------------------------------------------------')
45 | print('run test {} with file: {}\nInterpolation: {}'.format(counter + 1, filename_tpamap[counter], ele))
46 |
47 | MapInt = MapInterface.MapInterface(filepath2localgg=os.path.join(path2module, 'inputs', 'veh_dyn_info',
48 | filename_tpamap[counter]),
49 | bool_enable_interpolation=ele,
50 | bool_enable_velocitydependence=bool_enable_velocitydependence[counter])
51 |
52 | pos_xy = np.random.rand(566, 2)
53 | pos_s = np.arange(0, 1501, 25)[:, np.newaxis]
54 |
55 | if bool_enable_velocitydependence[counter]:
56 | velocity_xy = np.random.rand(566, 1) * 100
57 | velocity_s = np.random.rand(pos_s.shape[0], pos_s.shape[1]) * 100
58 | velocity_emergency = np.arange(0, 100, 2.33)
59 |
60 | else:
61 | velocity_xy = np.asarray([])
62 | velocity_s = np.asarray([])
63 | velocity_emergency = np.asarray([])
64 |
65 | # xy-coordinates
66 | gg_xy = MapInt.get_acclim_tpainterface(position_m=pos_xy,
67 | position_mode='xy-cosy',
68 | velocity_mps=velocity_xy)
69 |
70 | # s-coordinates
71 | gg_s = MapInt.get_acclim_tpainterface(position_m=pos_s,
72 | position_mode='s-cosy',
73 | velocity_mps=velocity_s)
74 |
75 | acc_emergency = MapInt.get_acclim_tpainterface(position_m=np.asarray(0),
76 | position_mode='emergency',
77 | velocity_mps=velocity_emergency)
78 |
79 | if gg_xy.shape[1] != 2 and gg_s.shape[1] != 2:
80 | raise ValueError('TEST: TPA MapInterface: wrong shape of output local gg array!')
81 |
82 | del MapInt
83 |
84 | print('test {} passed!'.format(counter + 1))
85 | print('----------------------------------------------------------')
86 |
87 | counter += 1
88 |
89 | print('tests passed')
90 |
--------------------------------------------------------------------------------
/tests/interface/test_get_frictionvals.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import time
3 | import sys
4 | import os.path
5 | import matplotlib.pyplot as plt
6 | import trajectory_planning_helpers as tph
7 |
8 | # custom modules
9 | path2module = os.path.join(os.path.abspath(__file__).split("tpa_map_functions")[0], "tpa_map_functions")
10 |
11 | sys.path.append(path2module)
12 |
13 | import tpa_map_functions
14 |
15 | """
16 | Created by: Leonhard Hermansdorfer
17 | Created on: 10.12.2019
18 | """
19 |
20 |
21 | # User Input -----------------------------------------------------------------------------------------------------------
22 |
23 | trackname = 'berlin'
24 | tpamap_name = "tpamap_varloc_varvel_berlin"
25 |
26 | updateFrequency = 100
27 | laps_to_go = 5
28 | s_terminate_m = 800
29 | bool_plot = True
30 |
31 | # set indices for looping reference line during tpa map interface request
32 |
33 | # length of trajectory for which tpa map info is requested during each step
34 | idx_stop = 100
35 |
36 | # index shift between each request (simulates a driving vehicle)
37 | delta_idx = 20
38 |
39 | # tpa interface settings
40 | bool_enable_interface2tpa = False
41 | bool_enable_interpolation = True
42 | bool_enable_velocitydependence = True
43 |
44 | # enable lapwise scaling of acceleration limits
45 | bool_apply_acclimit_scaling = True
46 |
47 | # tpa zmq settings
48 | zmq_opts = {"ip": "localhost", # IP of device running map interface
49 | "port_data": "47208", # port number, standard: "21212"
50 | "topic": "tpa_to_ltpl" # zmq topic of publisher
51 | }
52 |
53 | # Set Up ---------------------------------------------------------------------------------------------------------------
54 |
55 | filepath2tpamap = os.path.join(path2module, "inputs", "veh_dyn_info", tpamap_name + ".csv")
56 | filepath2ltpl_refline = os.path.join(path2module, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_' + trackname + '.csv')
57 |
58 | # load reference line from file
59 | dict_refline = tpa_map_functions.helperfuncs.preprocess_ltplrefline.\
60 | preprocess_ltplrefline(filepath2ltpl_refline=filepath2ltpl_refline,
61 | stepsize_resample_m=50.0)
62 |
63 | coordinates_sxy_m = dict_refline['refline']
64 | refline = dict_refline['refline'][:, 1:3]
65 |
66 | # create a map interface class
67 | myInterface = tpa_map_functions.interface.MapInterface.\
68 | MapInterface(filepath2localgg=filepath2tpamap,
69 | zmq_opts_sub_tpa=zmq_opts,
70 | bool_enable_interface2tpa=bool_enable_interface2tpa,
71 | bool_enable_interpolation=bool_enable_interpolation,
72 | bool_enable_velocitydependence=bool_enable_velocitydependence)
73 |
74 | # testing --------------------------------------------------------------------------------------------------------------
75 |
76 | lapcounter = 0
77 | idx_start = 0
78 | log_duration = []
79 | output_data = []
80 |
81 | while True:
82 |
83 | delta_idx_tmp = delta_idx + np.random.randint(-20, 21)
84 |
85 | list_trajectories = []
86 | traj_scoord_m = []
87 | acc_lim = []
88 |
89 | if idx_stop > refline.shape[0] > idx_start:
90 |
91 | idx_stop -= refline.shape[0]
92 | trajectory = np.vstack((refline[idx_start:, :], refline[:idx_stop, :]))
93 |
94 | elif idx_stop > refline.shape[0]:
95 |
96 | idx_stop -= refline.shape[0]
97 | trajectory = refline[idx_start:idx_stop, :]
98 |
99 | elif idx_start > refline.shape[0]:
100 |
101 | idx_start -= refline.shape[0]
102 | trajectory = refline[idx_start:idx_stop, :]
103 |
104 | lapcounter += 1
105 |
106 | print('lap completed\n')
107 |
108 | elif idx_stop < idx_start:
109 | trajectory = np.vstack((refline[idx_start:, :], refline[:idx_stop, :]))
110 |
111 | else:
112 | trajectory = refline[idx_start:idx_stop, :]
113 |
114 | for row in trajectory:
115 | traj_scoord_m.append(tph.path_matching_global.path_matching_global(coordinates_sxy_m, row)[0])
116 |
117 | idx_start += delta_idx_tmp
118 | idx_stop += delta_idx_tmp
119 |
120 | # provide artificial velocity array for request
121 | arr_velocity_mps = np.full((trajectory.shape[0], 1), 55)
122 | # arr_velocity_mps = np.linspace(0, 94, trajectory.shape[0])[:, np.newaxis]
123 |
124 | # apply lapwise scaling of acceleration limits
125 | if bool_apply_acclimit_scaling:
126 | myInterface.scale_acclim_lapwise(lap_counter=lapcounter,
127 | laps_interp=[1, 4],
128 | s_current_m=traj_scoord_m[0],
129 | s_total_m=coordinates_sxy_m[-1, 0],
130 | scaling=[1.0, 0.6])
131 |
132 | # save start time
133 | t_start = time.perf_counter()
134 |
135 | if bool_enable_velocitydependence:
136 | acc_lim = myInterface.get_acclim_tpainterface(position_m=trajectory,
137 | position_mode='xy-cosy',
138 | velocity_mps=arr_velocity_mps)
139 |
140 | acc_emergency = myInterface.get_acclim_tpainterface(position_m=np.asarray(0),
141 | position_mode='emergency',
142 | velocity_mps=np.arange(0, 100, 10))
143 |
144 | else:
145 | acc_lim = myInterface.get_acclim_tpainterface(position_m=trajectory,
146 | position_mode='xy-cosy')
147 |
148 | acc_emergency = myInterface.get_acclim_tpainterface(position_m=np.asarray(0),
149 | position_mode='emergency')
150 |
151 | myInterface.update()
152 |
153 | # simulate race strategy intervention
154 | # if traj_scoord_m[-1] > 500 and traj_scoord_m[-1] < 1200:
155 | # myInterface.set_acclim_strategy(10, 8, True)
156 |
157 | # elif traj_scoord_m[-1] > 1200:
158 | # myInterface.set_acclim_strategy(1, 1, False)
159 |
160 | duration = time.perf_counter() - t_start
161 | sleep_time = 1 / updateFrequency - duration
162 | # print("sleep: {:.3f} s".format(sleep_time))
163 | # print("duration: {:.3f} s".format(duration))
164 |
165 | output_data.append(np.hstack((np.vstack(traj_scoord_m), acc_lim)))
166 |
167 | if sleep_time > 0.0:
168 | time.sleep(sleep_time)
169 | else:
170 | pass
171 | # logging.warning("Didn't get enough sleep...")
172 |
173 | log_duration.append(duration)
174 |
175 | if len(log_duration) == 100:
176 | print('mean duration over last 100 timesteps: ', np.mean(log_duration), ' s')
177 | print('max duration of last 100 timesteps: ', max(log_duration), ' s')
178 | print('min duration of last 100 timesteps: ', min(log_duration), ' s\n')
179 |
180 | log_duration = []
181 |
182 | if laps_to_go <= lapcounter and s_terminate_m > 0 and traj_scoord_m[-1] > s_terminate_m:
183 | break
184 |
185 | # plot results
186 |
187 | list_linestyle = ['-', '--', '-.', ':'] * 2
188 | list_linestyle.sort()
189 | list_linecolor = ['black', 'blue']
190 |
191 | if bool_plot:
192 |
193 | plt.figure()
194 | plt.xlim([0, coordinates_sxy_m[-1, 0]])
195 |
196 | for i_count in range(int(myInterface.localgg_mps2.shape[1] / 2)):
197 |
198 | if int(myInterface.localgg_mps2.shape[1] / 2) > 1:
199 | label = 'ground truth of tpamap at ' + str(myInterface.velocity_steps[i_count + 1]) + ' mps'
200 | else:
201 | label = 'ground truth of tpamap'
202 |
203 | plt.step(myInterface.coordinates_sxy_m[:, 0],
204 | np.vstack((np.asarray(myInterface.localgg_mps2[0, 0 + i_count * 2]),
205 | np.vstack(myInterface.localgg_mps2[:-1, 0 + i_count * 2]))),
206 | color=list_linecolor[i_count % 2], linestyle=list_linestyle[i_count], linewidth=2.0, label=label)
207 |
208 | for ele in output_data:
209 | plt.step(ele[:, 0], ele[:, 1])
210 |
211 | plt.draw()
212 | plt.pause(0.01)
213 |
214 | plt.grid()
215 | plt.legend()
216 | plt.show()
217 |
218 | # plt.figure()
219 |
220 | # plt.step(myInterface.coordinates_sxy_m[:, 0],
221 | # np.vstack((np.asarray(myInterface.localgg_mps2[0, 0]),
222 | # np.vstack(myInterface.localgg_mps2[:-1, 0]))),
223 | # 'k', label='ground truth of tpa map')
224 |
225 | # plt.step(output_data[0][:, 0], output_data[0][:, 1])
226 |
227 | # plt.show()
228 |
--------------------------------------------------------------------------------
/tests/test_discretization_example.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import matplotlib.pyplot as plt
3 | # import tikzplotlib
4 |
5 | s_map_m = np.arange(0, 80, 10)
6 | acc_mps2 = np.asarray([11.0, 12.0, 12.5, 13.0, 12.0, 11.0, 10.0, 12.0])
7 | acc_mps2_interp = np.asarray([11.0, 11.0, 12.0, 12.5, 12.0, 11.0, 10.0, 10.0])
8 |
9 | s_stepsize_traj = np.arange(0, 80, 5.5)
10 | s_stepsize_traj_shifted = s_stepsize_traj + 2
11 |
12 | # acc_result_traj = np.full((1, s_stepsize_traj.size), 10)
13 |
14 | xlim = [0, 70]
15 | ylim = [9.5, 13.5]
16 |
17 | # calc s coordinate
18 |
19 | acc_received_wo_interp = list()
20 | acc_received_with_interp = list()
21 |
22 | for s_coordinates in [s_stepsize_traj, s_stepsize_traj_shifted]:
23 |
24 | dummy_acc_rcvd_wo_interp = list()
25 | dummy_acc_rcvd_with_interp = list()
26 |
27 | for s_m in s_coordinates:
28 |
29 | idx_min = np.argmin(abs(s_map_m - s_m))
30 |
31 | if (s_map_m - s_m)[idx_min] == 0:
32 | dummy_acc_rcvd_wo_interp.append(acc_mps2[idx_min])
33 | dummy_acc_rcvd_with_interp.append(acc_mps2_interp[idx_min])
34 |
35 | elif (s_map_m - s_m)[idx_min] > 0:
36 | dummy_acc_rcvd_wo_interp.append(acc_mps2[idx_min - 1])
37 | dummy_acc_rcvd_with_interp.append(np.interp(s_m, s_map_m[idx_min - 1:idx_min + 1],
38 | acc_mps2_interp[idx_min - 1:idx_min + 1]))
39 |
40 | elif (s_map_m - s_m)[idx_min] < 0:
41 | dummy_acc_rcvd_wo_interp.append(acc_mps2[idx_min])
42 | dummy_acc_rcvd_with_interp.append(np.interp(s_m, s_map_m[idx_min:idx_min + 2],
43 | acc_mps2_interp[idx_min:idx_min + 2]))
44 |
45 | acc_received_wo_interp.append(dummy_acc_rcvd_wo_interp)
46 | acc_received_with_interp.append(dummy_acc_rcvd_with_interp)
47 |
48 | # prepare plot filling
49 |
50 | x = np.hstack((s_stepsize_traj[:, np.newaxis], s_stepsize_traj[:, np.newaxis],
51 | s_stepsize_traj_shifted[:, np.newaxis], s_stepsize_traj_shifted[:, np.newaxis]))
52 |
53 | x = np.hstack(x)
54 |
55 | y1 = acc_received_wo_interp[0][0:2] * 2
56 | y2 = acc_received_wo_interp[1][0:2] * 2
57 |
58 | y3 = acc_received_with_interp[0][0:2] * 2
59 | y4 = acc_received_with_interp[1][0:2] * 2
60 |
61 | for int_counter, s in enumerate(x[4:-4:2]):
62 |
63 | if np.min(np.abs(s_stepsize_traj - s)) == 0:
64 | idx_min = np.argmin(np.abs(s_stepsize_traj - s))
65 |
66 | y1.extend(((acc_received_wo_interp[0][idx_min - 1]), (acc_received_wo_interp[0][idx_min])))
67 | y3.extend(((acc_received_with_interp[0][idx_min - 1]), (acc_received_with_interp[0][idx_min])))
68 |
69 | idx_min = np.argmin(np.abs(s_stepsize_traj_shifted - s))
70 |
71 | if (s_stepsize_traj_shifted[idx_min] - s) > 0:
72 | y2.extend(((acc_received_wo_interp[1][idx_min - 1]), (acc_received_wo_interp[1][idx_min - 1])))
73 | y4.extend(((acc_received_with_interp[1][idx_min - 1]), (acc_received_with_interp[1][idx_min - 1])))
74 |
75 | elif (s_stepsize_traj_shifted[idx_min] - s) < 0:
76 | y2.extend(((acc_received_wo_interp[1][idx_min]), (acc_received_wo_interp[1][idx_min])))
77 | y4.extend(((acc_received_with_interp[1][idx_min - 1]), (acc_received_with_interp[1][idx_min - 1])))
78 |
79 | else:
80 | ValueError()
81 |
82 | elif np.min(np.abs(s_stepsize_traj_shifted - s)) == 0:
83 | idx_min = np.argmin(np.abs(s_stepsize_traj_shifted - s))
84 |
85 | y1.extend(((acc_received_wo_interp[0][idx_min]), (acc_received_wo_interp[0][idx_min])))
86 | y2.extend(((acc_received_wo_interp[1][idx_min - 1]), (acc_received_wo_interp[1][idx_min])))
87 | y3.extend(((acc_received_with_interp[0][idx_min]), (acc_received_with_interp[0][idx_min])))
88 | y4.extend(((acc_received_with_interp[1][idx_min - 1]), (acc_received_with_interp[1][idx_min])))
89 |
90 | # y1_tilde = list()
91 |
92 | # for ele in np.hstack(np.hstack((s_stepsize_traj[:, np.newaxis], s_stepsize_traj_shifted[:, np.newaxis]))):
93 | #
94 | # idx_min1 = np.argmin(abs(s_stepsize_traj - ele))
95 | # idx_min2 = np.argmin(abs(s_stepsize_traj_shifted - ele))
96 | #
97 | # if (s_stepsize_traj - ele)[idx_min1] == 0:
98 | # y1_tilde.append(acc_received_wo_interp[0][idx_min1])
99 | # y1_tilde.append(acc_received_wo_interp[0][idx_min1])
100 | #
101 | # elif (s_stepsize_traj - ele)[idx_min1] > 0:
102 | # y1_tilde.append(acc_received_wo_interp[0][idx_min1])
103 | # y1_tilde.append(acc_received_wo_interp[0][idx_min1])
104 | #
105 | # elif (s_stepsize_traj - ele)[idx_min1] < 0:
106 | # y1_tilde.append(acc_received_wo_interp[0][idx_min1])
107 | # y1_tilde.append(acc_received_wo_interp[0][idx_min1])
108 |
109 |
110 | # y1 = [11, 11, 11, 11, 11, 11, 11, 11,
111 | # 11, 12, 12, 12, 12, 12, 12, 12,
112 | # 12, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5, 12.5,
113 | # 12.5, 13, 13, 13, 13, 13, 13, 13,
114 | # 13, 12, 12, 12, 12, 12, 12, 12,
115 | # 12, 11, 11, 11,
116 | # 11, 10, 10, 10, 10, 10, 10, 10, 10, 10]
117 | #
118 | # y2 = [11, 11, 11, 11, 11, 11, 11, 11,
119 | # 11, 11, 11, 12, 12, 12, 12, 12,
120 | # 12, 12, 12, 12.5, 12.5, 12.5, 12.5, 12.5,
121 | # 12.5, 12.5, 12.5, 13, 13, 13, 13, 12,
122 | # 12, 12, 12, 12, 12, 12, 12,
123 | # 11, 11, 11, 11,
124 | # 11, 11, 10, 10, 10, 10, 10, 10, 10, 10, 10]
125 |
126 | # plot figure ----------------------------------------------------------------------------------------------------------
127 | fig, (ax0, ax1) = plt.subplots(nrows=2, constrained_layout=True)
128 |
129 | # upper subplot
130 |
131 |
132 | ax0.step(s_map_m, acc_mps2, where='post', color='k', linewidth=2.0, label='stored values')
133 |
134 | ax0.scatter(s_stepsize_traj, acc_received_wo_interp[0], marker='x', label='path coordinates at t_0')
135 | ax0.step(s_stepsize_traj, acc_received_wo_interp[0], where='post', label='acc. limits at t_0')
136 | ax0.scatter(s_stepsize_traj_shifted, acc_received_wo_interp[1], marker='o', label='path coordinates at t_1')
137 | ax0.step(s_stepsize_traj_shifted, acc_received_wo_interp[1], where='post', label='acc. limits at t_1')
138 |
139 |
140 | # ax0.scatter(x,y1, marker='+')
141 | # ax0.scatter(x,y2, marker='*')
142 | ax0.fill_between(x[0:len(y1)], y1, y2, where=(y1 > y2), alpha=0.2, color='k', hatch='/')
143 |
144 |
145 | # lower subplot
146 |
147 | ax1.step(s_map_m, acc_mps2, where='post', color='k', linewidth=2.0, label='stored values')
148 | ax1.plot(s_map_m, acc_mps2_interp, color='k', linewidth=2.0, linestyle='--', label='interpolated values')
149 |
150 | ax1.scatter(s_stepsize_traj, acc_received_with_interp[0], marker='x', label='path coordinates at t_0')
151 | ax1.step(s_stepsize_traj, acc_received_with_interp[0], where='post', label='acc. limits at t_0')
152 | ax1.scatter(s_stepsize_traj_shifted, acc_received_with_interp[1], marker='o', label='path coordinates at t_1')
153 | ax1.step(s_stepsize_traj_shifted, acc_received_with_interp[1], where='post', label='acc. limits at t_1')
154 |
155 | ax1.fill_between(x[0:len(y3)], y3, y4, where=(y3 > y4), alpha=0.2, color='k', hatch='/')
156 |
157 |
158 | # configure axis
159 | ax0.set_xlabel('s in meters')
160 | ax0.set_ylabel('acceleration limits in m/s^2')
161 | ax1.set_xlabel('s in meters')
162 | ax1.set_ylabel('acceleration limits in m/s^2')
163 |
164 | ax0.set_xlim(xlim)
165 | ax0.set_ylim(ylim)
166 | ax1.set_xlim(xlim)
167 | ax1.set_ylim(ylim)
168 |
169 | ax0.legend()
170 | ax1.legend()
171 |
172 | # export to tikz
173 | plt.draw()
174 | # tikzplotlib.save('discretization_frictionmap.tex')
175 |
176 | plt.show()
177 |
--------------------------------------------------------------------------------
/tests/visualization/test_visualization.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import sys
3 |
4 | # import custom modules
5 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
6 | sys.path.append(path2tmf)
7 |
8 | import tpa_map_functions as tmf
9 |
10 | # User Input -------------------------------------------------------------------------------------------------------
11 |
12 | track_name = 'berlin'
13 | tpamap_name = 'tpamap_varloc_varvel_berlin'
14 | bool_enable_debug = True
15 |
16 | # Preprocess Reference Line ----------------------------------------------------------------------------------------
17 |
18 | filepath2ltpl_refline = os.path.join(path2tmf, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_' + track_name + '.csv')
19 | filepath2tpamap = os.path.join(path2tmf, 'outputs', tpamap_name + '.csv')
20 |
21 | dict_output = tmf.helperfuncs.preprocess_ltplrefline.preprocess_ltplrefline(filepath2ltpl_refline=filepath2ltpl_refline,
22 | bool_enable_debug=bool_enable_debug)
23 |
24 | tmf.visualization.visualize_tpamap.visualize_tpamap(filepath2tpamap=filepath2tpamap,
25 | refline=dict_output['refline'],
26 | width_right=dict_output['width_right'],
27 | width_left=dict_output['width_left'],
28 | normvec_normalized=dict_output['normvec_normalized'],
29 | distance_scoord_labels=200)
30 |
--------------------------------------------------------------------------------
/tpa_map_functions/__init__.py:
--------------------------------------------------------------------------------
1 | import tpa_map_functions.helperfuncs
2 | import tpa_map_functions.interface
3 | import tpa_map_functions.visualization
4 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/__init__.py:
--------------------------------------------------------------------------------
1 | import tpa_map_functions.helperfuncs.calc_cosyidcs
2 | import tpa_map_functions.helperfuncs.concat_tpamaps_ltpl
3 | import tpa_map_functions.helperfuncs.concat_tpamaps_vehdynsim
4 | import tpa_map_functions.helperfuncs.import_vehdyninfo
5 | import tpa_map_functions.helperfuncs.preprocess_ltplrefline
6 | import tpa_map_functions.helperfuncs.save_tpamap
7 | import tpa_map_functions.helperfuncs.transform_coordinates_xy2s
8 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/calc_cosyidcs.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | """
4 | Created by: Leonhard Hermansdorfer
5 | Created on: 05.09.2020
6 | """
7 |
8 |
9 | def calc_cosyidcs(sectionid: np.array,
10 | coordinates_sxy_m: np.array,
11 | s_actual_m: np.array) -> tuple:
12 | """Calculates section and section indices of current position (as s-coordinate).
13 |
14 | :param sectionid: section id array of race track
15 | :type sectionid: np.array
16 | :param coordinates_sxy_m: s-coordinates of race track (from global raceline)
17 | :type coordinates_sxy_m: np.array
18 | :param s_actual_m: currently driven s-coordinate (from current trajectory)
19 | :type s_actual_m: np.array
20 | :return: section id of current position, starting s-coodinate of current section,
21 | start/end index of current section
22 | :rtype: tuple
23 | """
24 |
25 | a = np.where(s_actual_m >= coordinates_sxy_m[-1, 0])[0]
26 |
27 | if np.any(a):
28 | idx_min = int(np.min(a) - 1)
29 |
30 | print('WARNING: fix in calc_cosyidcs.py was triggered! '
31 | + 's_max is: {}; requested s-coordinate(s): {}'.format(coordinates_sxy_m[-1, 0], s_actual_m[a]))
32 |
33 | s_actual_m[a] = s_actual_m[idx_min]
34 |
35 | sectionid_change = np.concatenate((np.asarray([True]), np.diff(sectionid) != 0))
36 | coordinates_sxy_m = coordinates_sxy_m[sectionid_change]
37 |
38 | i = np.searchsorted(coordinates_sxy_m[:, 0], s_actual_m, side='right') - 1
39 | i[i < 0] = 0
40 |
41 | k = np.hstack((np.where(sectionid_change)[0][i][:, np.newaxis],
42 | np.where(sectionid_change)[0][i + 1][:, np.newaxis]))
43 |
44 | return sectionid[sectionid_change][i], coordinates_sxy_m[i, 0], k
45 |
46 |
47 | # ----------------------------------------------------------------------------------------------------------------------
48 | # testing --------------------------------------------------------------------------------------------------------------
49 | # ----------------------------------------------------------------------------------------------------------------------
50 | if __name__ == '__main__':
51 |
52 | sectionid = np.asarray([1, 1, 1, 2, 3, 3, 4, 5, 6, 6, 7])
53 |
54 | coordinates_sxy_m = np.hstack((np.arange(11)[:, np.newaxis], np.zeros((11, 2))))
55 | s_actual_m = np.linspace(1, 15.0, 11)
56 |
57 | a, b, c = calc_cosyidcs(sectionid=sectionid,
58 | coordinates_sxy_m=coordinates_sxy_m,
59 | s_actual_m=s_actual_m)
60 |
61 | pass
62 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/concat_tpamaps_ltpl.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import numpy as np
3 | import datetime
4 |
5 | """
6 | Created by: Leonhard Hermansdorfer
7 | Created on: 04.02.2021
8 | """
9 |
10 |
11 | def concat_tpamaps_ltpl(path2tpamaps: str(),
12 | filepath2output: str(),
13 | tpamap_identifier: str()):
14 | """Concatenates seperate tpamaps to a single, velocity-dependent tpamap which can be used for local trajectory
15 | planning.
16 |
17 | This function concatenates separate tpamaps to a single, velocity-dependent tpamap. The input files must have
18 | an identical identifier (prefix). The only difference comes from the distinct velocity steps where each tpamap
19 | is valid. This velocity info must be included in the filenames.
20 | Example: tpamap_berlin__27mps.csv, tpamap_berlin__56mps.csv, tpamap_berlin__83mps.csv
21 |
22 | :param path2tpamaps: path to folder where seperate tpamaps are located.
23 | :type path2tpamaps: str
24 | :param filepath2output: path to file where the concatenated tpamap should get stored.
25 | :type filepath2output: str
26 | :param tpamap_identifier: Identifier/ identical part of name of separate tpamap which should get concatenated.
27 | :type tpamap_identifier: str
28 | """
29 |
30 | i_count_files = 0
31 | list_velsteps = []
32 | list_tpamap_filenames = []
33 |
34 | # list all files in specified input folder
35 | for file in os.listdir(path2tpamaps):
36 |
37 | # search for files which end with specific extention and contain identifier
38 | if file.endswith(".csv") and tpamap_identifier in file:
39 |
40 | if "mps" in file:
41 | list_velsteps.append(int((file.split('_')[-1]).split('mps')[0]))
42 |
43 | list_tpamap_filenames.append(file)
44 |
45 | i_count_files += 1
46 |
47 | # sort filenames and velocity steps -> increasing order
48 | list_tpamap_filenames.sort()
49 | list_velsteps.sort()
50 |
51 | for j_count in range(len(list_velsteps)):
52 | list_dummy = list_velsteps.copy()
53 |
54 | val = list_dummy.pop(j_count)
55 |
56 | if val in list_dummy:
57 | raise ValueError("tpamap functions: velocity steps of to-be-concatenated tpamaps must be not equal!")
58 |
59 | # load reference line
60 | with open(os.path.join(path2tpamaps, list_tpamap_filenames[0]), 'r') as fh:
61 | csv_data_tpamap = np.genfromtxt(fh, delimiter=',', comments='#')
62 | tpamap_size = len(csv_data_tpamap)
63 |
64 | tpamap = np.zeros((tpamap_size, i_count_files * 2 + 4))
65 |
66 | # load and concatenate maps
67 | i_count = 0
68 | for filename in list_tpamap_filenames:
69 |
70 | # load reference line
71 | with open(os.path.join(path2tpamaps, filename), 'r') as fh:
72 | csv_data_tpamap = np.genfromtxt(fh, delimiter=',', comments='#')
73 | tpamap_size = len(csv_data_tpamap)
74 |
75 | if i_count == 0:
76 | tpamap[:, 0] = csv_data_tpamap[:, 0]
77 | tpamap[:, 1] = csv_data_tpamap[:, 1]
78 | tpamap[:, 2] = csv_data_tpamap[:, 2]
79 | tpamap[:, 3] = csv_data_tpamap[:, 3]
80 |
81 | else:
82 |
83 | if not np.all(np.equal(tpamap[:, :4], csv_data_tpamap[:, :4])):
84 | raise ValueError("tpamap functions: sxy-coordinates of included tpamaps are not equal!")
85 |
86 | tpamap[:, 4 + i_count * 2] = csv_data_tpamap[:, 4]
87 | tpamap[:, 5 + i_count * 2] = csv_data_tpamap[:, 5]
88 | i_count += 1
89 |
90 | # TODO: plausibility checks
91 | # - check whether every acc. limit is not below value at smaller velocity level
92 |
93 | # save data to csv file --------------------------------------------------------------------------------------------
94 |
95 | # prepare file header
96 | header = 'created on: ' + datetime.datetime.now().strftime("%Y-%m-%d") + ', '\
97 | + datetime.datetime.now().strftime("%H:%M:%S")
98 |
99 | header = header + '\n' + 'track: ' + tpamap_identifier
100 | header = header + '\n' + 'section_id,s_m,x_m,y_m'
101 |
102 | if len(list_velsteps) <= 1:
103 | header = header + ',ax_max_mps2,ay_max_mps2'
104 |
105 | else:
106 | for velstep in list_velsteps:
107 | header = header + ',ax_max_mps2__' + str(velstep) + 'mps' + ',ay_max_mps2__' + str(velstep) + 'mps'
108 |
109 | # write data file
110 | with open(filepath2output, 'wb') as fh:
111 | np.savetxt(fh, tpamap, fmt='%0.4f', delimiter=',', header=header)
112 |
113 | print('tpa map functions: tpamap saved successfully')
114 |
115 |
116 | # ----------------------------------------------------------------------------------------------------------------------
117 | # testing --------------------------------------------------------------------------------------------------------------
118 | # ----------------------------------------------------------------------------------------------------------------------
119 | if __name__ == "__main__":
120 | pass
121 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/concat_tpamaps_vehdynsim.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import numpy as np
3 |
4 | """
5 | Created by: Dominik Staerk
6 | Created on: 04.11.2020
7 | """
8 |
9 |
10 | def concat_tpamaps_vehdynsim(path2tpamaps: str(),
11 | filepath2output: str(),
12 | list_tpamaps: list(),
13 | time_interpsteps: list(),
14 | bool_enable_tpamaps: bool()):
15 | """Concatenates the generated tpamaps horizontally to further use them within the vehicle dynamics simulation.
16 |
17 | This is mandatory because the compiled friction_model in the vehicle_dynamics_model that processes the tpa-maps only
18 | can handle fixed size array inputs.
19 | The concatenated tpamap.csv will hold the tpamap_Mode in row 1 and column 1, the interpTime array in row 2 and
20 | column 1 to 10 and the map information in rows 3 to 2502 and column 1 to 23.
21 | A maximum number of 10 tpamaps can be concatenated and further processed!
22 |
23 | :param path2tpamaps: path to folder where separate tpamaps are located
24 | :type path2tpamaps: str
25 | :param filepath2output: path to file where concatenated tpamaps should get saved
26 | :type filepath2output: str
27 | :param list_tpamaps: list containing the filenames of the tpamap which should get concatenated
28 | :type list_tpamaps: list
29 | :param time_interpsteps: list containing the conrete timesteps used for interpolating the tpamaps
30 | :type time_interpsteps: list
31 | :param bool_enable_tpamaps: flag which enables or disables the tpamaps when the concatenated output file is loaded
32 | :type bool_enable_tpamaps: bool
33 | """
34 |
35 | # check arguments --------------------------------------------------------------------------------------------------
36 |
37 | # initialize empty array to concatenate tpa maps
38 | tpamap = np.zeros((2502, 23))
39 |
40 | if bool_enable_tpamaps:
41 |
42 | print('tpa map functions: bool_enable_tpamaps is True -> specified tpa maps will be concatenated')
43 |
44 | if len(list_tpamaps) > 10 or len(list_tpamaps) == 0:
45 | raise ValueError("tpa map functions: list 'list_tpamaps' must contain between one (min) and ten (max) tpa "
46 | "maps")
47 |
48 | if len(time_interpsteps) > 10 or len(time_interpsteps) == 0:
49 | raise ValueError("tpa map functions: list 'time_interpsteps' must contain between one (min) and ten (max) "
50 | "values")
51 |
52 | if len(list_tpamaps) != len(time_interpsteps):
53 | raise ValueError("tpa map functions: both lists 'list_tpamaps' and 'time_interpsteps' must have same "
54 | "number of entries")
55 |
56 | if not np.isclose(a=time_interpsteps[0], b=0.0, atol=1e-7):
57 | time_interpsteps[0] = 0.0
58 | print('WARNING tpa map functions: time_interpsteps first entry is not zero, but will be set to zero')
59 |
60 | if not np.all(np.diff(time_interpsteps) > 0):
61 | raise ValueError("tpa map functions: list 'time_interpsteps' must only contain an increasing set of values")
62 |
63 | while len(list_tpamaps) < 10:
64 | list_tpamaps.append(False)
65 |
66 | while len(time_interpsteps) < 10:
67 | time_interpsteps.append(0.0)
68 |
69 | # insert flag and timesteps into tpamap output array
70 | tpamap[0, 0] = 1.0
71 | tpamap[1, :len(time_interpsteps)] = time_interpsteps[:]
72 |
73 | # load and concatenate maps
74 | k = 0
75 | for map in list_tpamaps:
76 | if map:
77 | filepath2input_tpamaps = os.path.join(path2tpamaps, map)
78 |
79 | with open(filepath2input_tpamaps, 'r') as fh:
80 | csv_data_tpamap = np.genfromtxt(fh, delimiter=',')
81 | tpamap_size = len(csv_data_tpamap)
82 |
83 | else:
84 | break
85 |
86 | tpamap[2:tpamap_size + 2, 0] = csv_data_tpamap[:, 0]
87 | tpamap[2:tpamap_size + 2, 1] = csv_data_tpamap[:, 1]
88 | tpamap[2:tpamap_size + 2, 2] = csv_data_tpamap[:, 2]
89 | tpamap[2:tpamap_size + 2, 3 + k * 2] = csv_data_tpamap[:, 3]
90 | tpamap[2:tpamap_size + 2, 4 + k * 2] = csv_data_tpamap[:, 4]
91 | k += 1
92 |
93 | else:
94 |
95 | print('tpa map functions: bool_enable_tpamaps is False -> constant friction coeffs. will be applied')
96 |
97 | tpamap[0, 0] = 0.0
98 | tpamap[1, :10] = np.arange(0, 100, 10)
99 | tpamap[2:, :] = 1.0
100 | tpamap[2, 0] = 0.0
101 |
102 | # write data to tpamap_tum_mcs.csv
103 | with open(filepath2output, 'wb') as fh:
104 | np.savetxt(fh, tpamap, fmt='%0.4f', delimiter=',')
105 |
106 | print('tpa map functions: tpamap_tum_mcs.csv saved successfully')
107 |
108 |
109 | # ----------------------------------------------------------------------------------------------------------------------
110 | # testing --------------------------------------------------------------------------------------------------------------
111 | # ----------------------------------------------------------------------------------------------------------------------
112 | if __name__ == "__main__":
113 | pass
114 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/import_vehdyninfo.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | """
4 | Created by: Leonhard Hermansdorfer
5 | Created on: 05.09.2020
6 | """
7 |
8 |
9 | def import_vehdyninfo(filepath2localgg: str = "") -> np.ndarray:
10 | """
11 | .. description::
12 | This function imports the local acceleration limits specified by a 'localgg' file and checks validity of the
13 | imported data. The file contains the sectionID, s-, x- and y-coordinates of the underlying reference line and the
14 | corresponding acceleration limits in longitudinal and lateral direction. The file consists of a single row,
15 | which results in a constant acceleration limit for the whole racetrack, or of multiple rows, which results in
16 | location-dependent accelerations limits.
17 | The file format is [sectionID, s_m, x_m, y_m, ax_max_mps2, ay_max_mps2] with units [-, m, m, m, m/s^2, m/s^2].
18 |
19 | .. inputs::
20 | :param filepath2localgg: absolute path to 'localgg' file which contains vehicle acceleration limits
21 | :type filepath2localgg: str
22 |
23 | .. outputs::
24 | :return tpamap: tire performance assessment (tpa) map containing the reference line and long./lat.
25 | local acceleration limits
26 | :rtype tpamap: np.ndarray
27 | """
28 |
29 | # raise error if no path is provided
30 | if not filepath2localgg:
31 | raise ValueError('Missing path to file which contains vehicle acceleration limits!')
32 |
33 | # read header from csv file
34 | header_lines = []
35 | bool_continue = True
36 | with open(filepath2localgg, 'rb') as fh:
37 |
38 | while bool_continue:
39 | line = fh.readline()
40 |
41 | if "#" in str(line):
42 | header_lines.append(line)
43 | bool_continue = True
44 | else:
45 | bool_continue = False
46 |
47 | # load localgg data from csv file
48 | with open(filepath2localgg, 'rb') as fh:
49 | data_localggfile = np.loadtxt(fh, comments='#', delimiter=',')
50 |
51 | velocity_steps = []
52 |
53 | # read velocity steps from header
54 | try:
55 | for line in header_lines:
56 | for ele in str(line).split(','):
57 | if 'mps' in ele:
58 | velocity_steps.append(float(ele.split('__')[1].split('mps')[0]))
59 |
60 | count_veldep_columns = len(velocity_steps)
61 |
62 | if count_veldep_columns == 2:
63 | velocity_steps = []
64 |
65 | # catch error, if no velocity data is available
66 | except IndexError:
67 | count_veldep_columns = 2
68 | velocity_steps = []
69 | pass
70 |
71 | # Check Imported Data for Validity ---------------------------------------------------------------------------------
72 |
73 | # check dimension of localgg data
74 | if data_localggfile.ndim == 1:
75 |
76 | if data_localggfile.size < 6:
77 | raise ValueError('TPA MapInterface: wrong shape of localgg file data -> at least six columns required!')
78 |
79 | if data_localggfile.size != 4 + count_veldep_columns:
80 | raise ValueError('TPA MapInterface: wrong shape of localgg file data -> number of data columns and header '
81 | 'entries does not match!')
82 |
83 | # if data_localggfile.size > 5:
84 | # print('WARNING: TPA MapInterface: shape of localgg file data -> more than five columns provided!')
85 |
86 | tpamap = np.hstack((np.zeros(4), data_localggfile[4:]))[np.newaxis, :]
87 |
88 | elif data_localggfile.ndim == 2:
89 |
90 | if data_localggfile.shape[1] < 6:
91 | raise ValueError('TPA MapInterface: wrong shape of localgg file data -> at least six columns required!')
92 |
93 | if data_localggfile.shape[1] != 4 + count_veldep_columns:
94 | raise ValueError('TPA MapInterface: wrong shape of localgg file data -> number of data columns and header '
95 | 'entries does not match!')
96 |
97 | # if data_localggfile.shape[1] > 5:
98 | # print('WARNING: TPA MapInterface: shape of localgg file data -> more than five columns provided!')
99 |
100 | tpamap = data_localggfile
101 |
102 | # check validity of sxy-coordinates
103 | if np.any(tpamap[:, 1] < 0.0):
104 | raise ValueError('TPA MapInterface: one or more s-coordinate values are smaller than zero!')
105 |
106 | if np.any(np.diff(tpamap[:, 1]) <= 0.0):
107 | raise ValueError('TPA MapInterface: s-coordinates are not strictly monotone increasing!')
108 |
109 | # check whether endpoint and start point of s is close together in xy
110 | if not np.isclose(np.hypot(tpamap[0, 2] - tpamap[-1, 2], tpamap[0, 3] - tpamap[-1, 3]), 0.0):
111 | raise ValueError('TPA MapInterface: s-coordinates representing the race track are not closed; '
112 | 'first and last point are not equal!')
113 |
114 | else:
115 | raise ValueError("Localgg data file must provide one or two dimensions!")
116 |
117 | # check velocity data for validity
118 | if count_veldep_columns != 0:
119 |
120 | if velocity_steps[0::2] != velocity_steps[1::2]:
121 | raise ValueError('TPA MapInterface: data import: acceleration limits are not pairwise equal for same '
122 | 'velocity!')
123 |
124 | if not np.all(np.diff(np.asarray(velocity_steps[0::2])) > 0):
125 | raise ValueError('TPA MapInterface: data import: velocity steps are not increasing monotonously!')
126 |
127 | # check local acceleration limits for validity
128 | if np.any(tpamap[:, 4:] > 40.0):
129 | raise ValueError('TPA MapInterface: max. acceleration limit in localgg file exceeds 40 m/s^2!')
130 |
131 | if np.any(tpamap[:, 4:] < 1.0):
132 | raise ValueError('TPA MapInterface: min. acceleration limit in localgg file is below 1 m/s^2!')
133 |
134 | return tpamap, velocity_steps[::2]
135 |
136 |
137 | # ----------------------------------------------------------------------------------------------------------------------
138 | # testing --------------------------------------------------------------------------------------------------------------
139 | # ----------------------------------------------------------------------------------------------------------------------
140 | if __name__ == '__main__':
141 | pass
142 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/preprocess_ltplrefline.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import logging
3 | import math
4 |
5 | """
6 | Created by: Leonhard Hermansdorfer
7 | Created on: 12.11.2019
8 | """
9 |
10 |
11 | def preprocess_ltplrefline(filepath2ltpl_refline: str = str(),
12 | reference_line: np.array = None,
13 | mode_resample_refline: str = 'const_steps',
14 | stepsize_resample_m: float = 0,
15 | section_length_limits_m: np.array = None,
16 | interpolation_method: str = 'slinear',
17 | logger: object = None,
18 | bool_enable_debug: bool = False) -> dict:
19 | """
20 | Documentation
21 | This function reads the reference line file of also used in local trajectory module to obtain reference line,
22 | track width, track boundaries and global raceline.
23 | If the reference line is already available, it can be used directly. If so, the filepath has to be a empty string!
24 |
25 | Input
26 | :param filepath2ltpl_refline: path pointing to the file to be imported
27 | :param reference_line: reference line containing xy-coordinates in meters [x_m, y_m]
28 | :param mode_resample_refline: mode for resampling reference line, options: "const_steps", "var_steps"
29 | :param stepsize_resample_m: desired stepsize for resampled reference line in meters
30 | :param section_length_limits: desired min. and max. sections lenghts when variable steps are activated
31 | :param interpolation_method: interpolation method used for resampling of reference line
32 | :param logger: logger object for handling logs within this function
33 | :param bool_enable_debug: enables debug mode and provides more data in output dictionary
34 |
35 | Output
36 | :return refline: x and y coordinate of refline
37 | :return width right/left: witdth to track bounds at given refline coordinates in meters
38 | :return normvec_normalized: x and y components of normized normal vector at given refline coordinates
39 | :return raceline_glob: x,y-coordinates of global raceline
40 | :return bool_closedtrack: boolean indicating whether race track is closed (True) or not (False)
41 | :return refline_resampled: resampled reference line with reqeusted stepsize
42 | """
43 |
44 | # ------------------------------------------------------------------------------------------------------------------
45 | # Import Data ------------------------------------------------------------------------------------------------------
46 | # ------------------------------------------------------------------------------------------------------------------
47 |
48 | if not logger:
49 | logger = logging.getLogger('logs')
50 |
51 | dict_output = dict()
52 |
53 | # raise error, if both filepath and reference line array is provided
54 | if bool(filepath2ltpl_refline) and reference_line is not None:
55 | logger.critical('ERROR: path to reference line file AND reference line array provided! Only provide one input.')
56 | raise ValueError('path to reference line file AND reference line array provided!')
57 |
58 | # raise error, if neither filepath nor reference line array is provided
59 | elif not filepath2ltpl_refline and reference_line is None:
60 | logger.critical('ERROR: neither path to reference line file nor reference line array provided! At least '
61 | 'provide one input.')
62 | raise ValueError('neither path to reference line file nor reference line array provided!')
63 |
64 | if reference_line is not None:
65 | mode_resample_refline = "const_steps"
66 |
67 | if mode_resample_refline not in ["const_steps", "var_steps"]:
68 | logger.critical('ERROR: provided mode for resampling reference line is not valid!')
69 | raise ValueError('provided mode for resampling reference line is not valid!')
70 |
71 | if section_length_limits_m is None and mode_resample_refline == "var_steps":
72 | mode_resample_refline = "const_steps"
73 | logger.warning('WARNING: resampling mode is set to constant steps, '
74 | 'because no min/max section lengths are provided')
75 |
76 | if section_length_limits_m is not None and (np.any(np.less_equal(section_length_limits_m, 0))
77 | or (section_length_limits_m[0] >= section_length_limits_m[1])):
78 | logger.critical('ERROR: provided section length limits are not valid!')
79 | raise ValueError('provided section length limits are not valid!')
80 |
81 | # load reference line from file or proceed with existing reference line
82 | if bool(filepath2ltpl_refline):
83 |
84 | # load reference line
85 | with open(filepath2ltpl_refline, 'r') as fh:
86 | csv_data_refline = np.genfromtxt(fh, delimiter=';')
87 |
88 | # Parse csv file -----------------------------------------------------------------------------------------------
89 |
90 | # load data from csv file (closed; assumed order listed below)
91 | # x_ref_m, y_ref_m, width_right_m, width_left_m, x_normvec_m, y_normvec_m, alpha_m, s_racetraj_m,
92 | # psi_racetraj_rad, kappa_racetraj_radpm, vx_racetraj_mps, ax_racetraj_mps2
93 |
94 | # get reference line coordinates (x_ref_m, y_ref_m)
95 | refline_coordinates = csv_data_refline[:, 0:2]
96 |
97 | # get trackwidth right/left
98 | width_right = csv_data_refline[:, 2]
99 | width_left = csv_data_refline[:, 3]
100 |
101 | # get normized normal vectors
102 | normvec_normalized = csv_data_refline[:, 4:6]
103 |
104 | # get raceline alpha
105 | alpha_mincurv = csv_data_refline[:, 6]
106 |
107 | # get racline segments
108 | s_rl = csv_data_refline[:, 7]
109 |
110 | # get kappa at raceline points
111 | kappa_rl = csv_data_refline[:, 9]
112 |
113 | # get velocity at raceline points
114 | vel_rl = csv_data_refline[:, 10]
115 |
116 | # get long. acceleration at raceline points
117 | ax_rl = csv_data_refline[:, 11]
118 |
119 | # calculate lateral acceleration at raceline points
120 | ay_rl = kappa_rl * vel_rl**2
121 |
122 | # TESTING ------------------------------------------------------------------------------------------------------
123 | # test an unclosed race track
124 | # idx_cut = 333
125 |
126 | # refline_coordinates = refline_coordinates[0:idx_cut, :]
127 | # width_right = width_right[0:idx_cut]
128 | # width_left = width_left[0:idx_cut]
129 | # normvec_normalized = normvec_normalized[0:idx_cut, :]
130 | # alpha_mincurv = alpha_mincurv[0:idx_cut]
131 | # s_rl = s_rl[0:idx_cut]
132 | # kappa_rl = kappa_rl[0:idx_cut]
133 | # vel_rl = vel_rl[0:idx_cut]
134 | # ax_rl = ax_rl[0:idx_cut]
135 | # ay_rl = ay_rl[0:idx_cut]
136 | # TESTING End --------------------------------------------------------------------------------------------------
137 |
138 | # calculate coordinates of raceline
139 | xy = refline_coordinates + normvec_normalized * alpha_mincurv[:, np.newaxis]
140 |
141 | else:
142 | refline_coordinates = reference_line
143 |
144 | # Check reference and race line ------------------------------------------------------------------------------------
145 |
146 | # calculate distance between first and last coordinate of reference line
147 | distance_last2firstcoordinate_m = \
148 | math.sqrt(np.power(refline_coordinates[0, 0] - refline_coordinates[-1, 0], 2)
149 | + np.power(refline_coordinates[0, 1] - refline_coordinates[-1, 1], 2))
150 |
151 | # consider a reference line as not closed when distance between first and last entry is above 8 meters
152 | if distance_last2firstcoordinate_m < 8:
153 | bool_closedtrack = True
154 | refline = refline_coordinates
155 |
156 | if bool(filepath2ltpl_refline):
157 | raceline_glob = np.column_stack((s_rl, xy))
158 |
159 | else:
160 | bool_closedtrack = False
161 |
162 | # add an additional entry at the end of each array (necessary for subsequent steps)
163 |
164 | diff_refline_m = np.diff(refline_coordinates[-2:, :], axis=0)[0]
165 | refline = np.vstack([refline_coordinates, refline_coordinates[-1] + diff_refline_m])
166 |
167 | if bool(filepath2ltpl_refline):
168 | diff_raceline_m = np.diff(xy[-2:, :], axis=0)[0]
169 |
170 | raceline_glob = np.column_stack((np.vstack([s_rl[:, np.newaxis], 0]),
171 | np.vstack([xy, xy[-1] + diff_raceline_m])))
172 |
173 | raceline_glob[-1, 0] = round(raceline_glob[-2, 0] + math.sqrt(np.sum(np.square(diff_raceline_m))), 7)
174 |
175 | width_right = np.hstack([width_right, width_right[-1]])
176 | width_left = np.hstack([width_left, width_left[-1]])
177 | normvec_normalized = np.vstack([normvec_normalized, normvec_normalized[-1, :]])
178 | kappa_rl = np.hstack([kappa_rl, kappa_rl[-1]])
179 | vel_rl = np.hstack([vel_rl, vel_rl[-1]])
180 | ax_rl = np.hstack([ax_rl, ax_rl[-1]])
181 | ay_rl = np.hstack([ay_rl, ay_rl[-1]])
182 |
183 | if bool(filepath2ltpl_refline):
184 | dict_output = {'raceline_glob': raceline_glob,
185 | 'width_right': width_right,
186 | 'width_left': width_left,
187 | 'normvec_normalized': normvec_normalized}
188 |
189 | # use reference line instead of raceline for further calculation
190 | diff_coordinates_m = np.sqrt(np.sum(np.diff(refline, axis=0) ** 2, axis=1))
191 |
192 | s_refline_m = np.cumsum(diff_coordinates_m)
193 | s_refline_m = np.vstack((np.zeros(1), s_refline_m[:, np.newaxis]))
194 |
195 | refline_sxy = np.hstack((s_refline_m, refline))
196 |
197 | dict_output['refline'] = refline_sxy
198 | dict_output['bool_closedtrack'] = bool_closedtrack
199 |
200 | # ------------------------------------------------------------------------------------------------------------------
201 | # Resample Reference Line ------------------------------------------------------------------------------------------
202 | # ------------------------------------------------------------------------------------------------------------------
203 |
204 | dict_output['refline_resampled'] = dict()
205 | dict_output['refline_resampled'].update({'refline_resampled': refline_sxy})
206 |
207 | # resample reference line with constant step size ------------------------------------------------------------------
208 | if mode_resample_refline == "const_steps":
209 |
210 | str_log = str(stepsize_resample_m)
211 | diff_coordinates_m_mean = np.mean(diff_coordinates_m)
212 |
213 | # enforce usage of min. stepsize if desired stepsize is smaller
214 | if stepsize_resample_m < diff_coordinates_m_mean:
215 | stepsize_resample_m = int(np.round(diff_coordinates_m_mean))
216 |
217 | count = 1
218 | idx_prev = 0
219 | section_id = np.zeros(refline_sxy.shape[0])
220 |
221 | # assign same section id to every s-coordinate which lies between start and end coordinate of specific section
222 | while stepsize_resample_m * count < refline_sxy[-1, 0]:
223 |
224 | idx_tmp = np.argmin(np.abs(refline_sxy[:, 0] - stepsize_resample_m * count))
225 | section_id[idx_prev:idx_tmp] = count
226 | idx_prev = idx_tmp
227 | count += 1
228 |
229 | # fill last section entries which are smaller than required stepsize
230 | if idx_tmp < refline_sxy.shape[0]:
231 | section_id[idx_tmp:] = count
232 |
233 | # last section id entry has to be a new value
234 | section_id[-1] = section_id[-2] + 1
235 |
236 | # calc min/mean/max values for debugging
237 | print_info = refline_sxy[np.concatenate((np.asarray([True]), np.isclose(np.diff(section_id), 1, 1e-08))), 0]
238 |
239 | logger.warning("resample stepsize has to match stepsize of given reference line! "
240 | + "desired stepsize = " + str_log + " m; "
241 | + "continue with a min/mean/max stepsize of "
242 | + str(np.around(np.min(np.diff(print_info)), 3)) + '/'
243 | + str(np.around(np.mean(np.diff(print_info)), 3)) + '/'
244 | + str(np.around(np.max(np.diff(print_info)), 3)) + " m")
245 |
246 | dict_output['refline_resampled']['section_id'] = section_id[:, np.newaxis]
247 |
248 | # resample reference line with variable step size on basis of raceline ---------------------------------------------
249 | if mode_resample_refline == "var_steps":
250 |
251 | ax_trigger = [0] * ax_rl.shape[0]
252 | ay_trigger = [0] * ay_rl.shape[0]
253 |
254 | # detect situations where long. or lat. acceleration exceed a certain limit
255 | for int_row, ele_row in enumerate(np.hstack((ax_rl[:, np.newaxis], ay_rl[:, np.newaxis]))):
256 |
257 | if ele_row[0] < - 1.0:
258 | ax_trigger[int_row] = -1
259 | elif ele_row[0] > 1.0:
260 | ax_trigger[int_row] = 1
261 |
262 | if ele_row[1] < - 1.0:
263 | ay_trigger[int_row] = -1
264 | elif ele_row[1] > 1.0:
265 | ay_trigger[int_row] = 1
266 |
267 | ay_trigger = np.asarray(ay_trigger)
268 | ax_trigger = np.asarray(ax_trigger)
269 |
270 | # filter situations which occur only for a predefined number of data points (to avoid single outliers)
271 |
272 | # number of data points below which trigger points are "smoothed"
273 | delay_axtrigger = 3
274 | delay_aytrigger = 3
275 |
276 | count = [1, 1]
277 | prev = [0, 0]
278 |
279 | for i_count in range(len(ax_trigger)):
280 |
281 | if i_count == 0:
282 | prev = [ax_trigger[i_count], ay_trigger[i_count]]
283 |
284 | # check whether or not category has changed to previous data point
285 | bool_expr = np.equal(prev, [ax_trigger[i_count], ay_trigger[i_count]])
286 | if np.any(bool_expr):
287 | count += 1 * bool_expr
288 |
289 | bool_expr = np.logical_not(bool_expr)
290 |
291 | if np.any(bool_expr):
292 |
293 | # check whether current number of trigger points is below defined limit
294 | if np.any(count < max(delay_axtrigger, delay_aytrigger)):
295 |
296 | # overwrite ax trigger points
297 | if count[0] < delay_axtrigger and bool_expr[0]:
298 | logger.debug("ax section at {} m is only {} steps long".format(s_refline_m[i_count - count[0]],
299 | count[0]))
300 | logger.debug("insert ax trigger: {}".format(ax_trigger[i_count - count[0] - 1]))
301 |
302 | ax_trigger[i_count - count[0]:i_count] = ax_trigger[i_count - count[0] - 1]
303 | count[0] = 1
304 |
305 | # overwrite ay trigger points
306 | if count[1] < delay_aytrigger and bool_expr[1]:
307 | logger.debug("ay section at {} m is only {} steps long".format(s_refline_m[i_count - count[1]],
308 | count[1]))
309 | logger.debug("insert ay trigger: {}".format(ay_trigger[i_count - count[1] - 1]))
310 |
311 | ay_trigger[i_count - count[1]:i_count] = ay_trigger[i_count - count[1] - 1]
312 | count[1] = 1
313 |
314 | # reset counter if number of trigger points of one section is above defined limit
315 | if np.logical_and(count[0] >= delay_axtrigger, bool_expr[0]):
316 | count[0] = 1
317 | if np.logical_and(count[1] >= delay_aytrigger, bool_expr[1]):
318 | count[1] = 1
319 |
320 | prev[0] = ax_trigger[i_count]
321 | prev[1] = ay_trigger[i_count]
322 |
323 | # identify specific driving situations to resample reference line
324 | indices = []
325 | list_section_category = []
326 | list_sectcat_sparse = []
327 | section_category_prev = 0
328 | section_length_current = 0.0
329 |
330 | section_length_min = section_length_limits_m[0]
331 | section_length_max = section_length_limits_m[1]
332 |
333 | # section_categories:
334 | # 1 - pure braking
335 | # 2 - comined braking and turn (negative: left, positive: right)
336 | # 3 - pure turn (negative: left, positive: right)
337 | # 4 - combined acceleration and turn (negative: left, positive: right)
338 | # 5 - pure acceleration
339 | # 6 - high speed straight line
340 |
341 | diff_coordinates_m_ext = np.hstack((diff_coordinates_m, diff_coordinates_m[-1]))
342 |
343 | for i_count in range(len(ay_trigger)):
344 |
345 | # pure braking
346 | if ay_trigger[i_count] == 0 and ax_trigger[i_count] == -1:
347 | section_category = 1
348 |
349 | # combined braking and turn
350 | elif ay_trigger[i_count] != 0 and ax_trigger[i_count] == -1:
351 | section_category = 2 * np.sign(ay_trigger[i_count])
352 |
353 | # pure turning
354 | elif ay_trigger[i_count] != 0 and ax_trigger[i_count] == 0:
355 | section_category = 3 * np.sign(ay_trigger[i_count])
356 |
357 | # combined acceleration and turn
358 | elif ay_trigger[i_count] != 0 and ax_trigger[i_count] == 1:
359 | section_category = 4 * np.sign(ay_trigger[i_count])
360 |
361 | # pure acceleration
362 | elif ay_trigger[i_count] == 0 and ax_trigger[i_count] == 1:
363 | section_category = 5
364 |
365 | # high speed straight line
366 | elif ay_trigger[i_count] == 0 and ax_trigger[i_count] == 0 and vel_rl[i_count] > 40:
367 | section_category = 6
368 |
369 | else:
370 | section_category = -100
371 |
372 | # only after first iteration: set value of previous section
373 | if section_category_prev == 0:
374 | section_category_prev = section_category
375 |
376 | # check whether or not category has changed to previous data point
377 | if section_category_prev == section_category:
378 |
379 | # check whether current section length of same category exceeds max. section length
380 | if section_length_current < section_length_max:
381 | section_length_current += diff_coordinates_m_ext[i_count]
382 |
383 | else:
384 | section_length_current = 0.0
385 | indices.append(i_count)
386 | list_sectcat_sparse.append(section_category)
387 |
388 | elif section_category_prev != section_category:
389 |
390 | # check whether current section length of same category already exceeds min. section length
391 | if section_length_current >= section_length_min:
392 | section_length_current = 0.0
393 | indices.append(i_count)
394 | section_category_prev = section_category
395 | list_sectcat_sparse.append(section_category)
396 |
397 | else:
398 | section_length_current += diff_coordinates_m_ext[i_count]
399 | section_category = section_category_prev
400 |
401 | list_section_category.append(section_category)
402 |
403 | indices.insert(0, 0)
404 | indices.append(refline_sxy.shape[0] - 1)
405 | list_sectcat_sparse.insert(0, list_section_category[0])
406 | list_sectcat_sparse.append(100)
407 |
408 | # postprocess sections -----------------------------------------------------------------------------------------
409 | prev = list_sectcat_sparse[0]
410 | count = 1
411 |
412 | for i_count in range(1, len(list_sectcat_sparse)):
413 |
414 | if list_sectcat_sparse[i_count] == prev:
415 | count += 1
416 |
417 | elif count > 1 and (list_sectcat_sparse[i_count] != prev or i_count == len(list_sectcat_sparse) - 1):
418 | logger.debug("number of consecutive sections of type {}: {} sections".format(prev, count))
419 | logger.debug("sections start at {} m, end at {}".format(refline_sxy[indices[i_count - count], 0],
420 | refline_sxy[indices[i_count], 0]))
421 |
422 | interp = np.linspace(refline_sxy[indices[i_count - count], 0],
423 | refline_sxy[indices[i_count], 0],
424 | count + 1)
425 |
426 | # calculate indices which should be used for interpolated sections
427 | for j_count in range(1, len(interp)):
428 | idx = np.argmin(np.abs(refline_sxy[:, 0] - interp[j_count]))
429 | indices[i_count - count + j_count] = idx
430 |
431 | count = 1
432 | prev = list_sectcat_sparse[i_count]
433 |
434 | else:
435 | count = 1
436 | prev = list_sectcat_sparse[i_count]
437 |
438 | # new
439 | section_id = np.zeros(refline_sxy.shape[0], dtype=int)
440 |
441 | for idx in np.arange(len(indices) - 1):
442 | section_id[indices[idx]:indices[idx + 1]] = idx + 1
443 |
444 | section_id[-1] = section_id[-2] + 1
445 |
446 | dict_output['refline_resampled']['section_id'] \
447 | = ((section_id * 10 + np.abs(list_section_category)) * np.sign(list_section_category))[:, np.newaxis]
448 |
449 | # calculate data for debug plots
450 | if bool_enable_debug:
451 | dict_output['refline_resampled'].update({'ax_mps2': ax_rl,
452 | 'ay_mps2': ay_rl,
453 | 'ax_trigger': ax_trigger,
454 | 'ay_trigger': ay_trigger,
455 | 'list_section_category': list_section_category})
456 |
457 | dict_output['refline_resampled']['sectionid_change'] \
458 | = np.concatenate((np.asarray([True]), np.isclose(np.diff(section_id), 1, 1e-08)))
459 |
460 | if mode_resample_refline in ["const_steps", "var_steps"] and bool_enable_debug:
461 |
462 | diff_refline_resampled_m = np.diff(refline_sxy[dict_output['refline_resampled']['sectionid_change'], 0])
463 |
464 | mean_diff_m = np.mean(diff_refline_resampled_m)
465 | min_diff_m = np.min(diff_refline_resampled_m)
466 | max_diff_m = np.max(diff_refline_resampled_m)
467 | std_diff_m = np.std(diff_refline_resampled_m)
468 |
469 | logger.debug('mean distance between coordinates: ' + str(round(mean_diff_m, 3)) + ' m; '
470 | + 'min. distance between coordinates: ' + str(round(min_diff_m, 3)) + ' m; '
471 | + 'max. distance between coordinates: ' + str(round(max_diff_m, 3)) + ' m; '
472 | + 'standard deviation of distance between coordinates: ' + str(round(std_diff_m, 3)) + ' m')
473 |
474 | dict_output['refline_resampled'].update({'mean_diff_m': mean_diff_m,
475 | 'min_diff_m': min_diff_m,
476 | 'max_diff_m': max_diff_m,
477 | 'std_diff_m': std_diff_m})
478 |
479 | return dict_output
480 |
481 |
482 | # ----------------------------------------------------------------------------------------------------------------------
483 | # testing --------------------------------------------------------------------------------------------------------------
484 | # ----------------------------------------------------------------------------------------------------------------------
485 | if __name__ == '__main__':
486 | pass
487 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/save_tpamap.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os.path
3 | import datetime
4 |
5 | """
6 | Created by: Leonhard Hermansdorfer
7 | Created on: 08.01.2020
8 | """
9 |
10 |
11 | def save_tpamap_fromfile(filepath2output_tpamap: str,
12 | mode_save_tpamap: str,
13 | coordinates_sxy_m: np.array,
14 | long_limit: np.array,
15 | lat_limit: np.array,
16 | track_name: str,
17 | section_id: np.array = None,
18 | header_info: dict = dict()):
19 | """Creates a default tpa map and calls a separate function to save the generated tpa map to a csv-file.
20 |
21 | Input
22 | :param filepath2output_tpamap: filepath to where tpa map is saved
23 | :type filepath2output_tpamap: str
24 | :param mode_save_tpamap: determines which data has to be saved (acc. limits or friction coeff.)
25 | :type mode_save_tpamap: str
26 | :param coordinates_sxy_m: contains s-,x- and y-coordinate of reference line
27 | :type coordinates_sxy_m: np.array
28 | :param long_limit: value for initialization of long. acceleration limit of tpa map
29 | :type long_limit: np.array
30 | :param lat_limit: value for initialization of lat. acceleration limit of tpa map
31 | :type lat_limit: np.array
32 | :param section_id: section id for every coordinate
33 | :type section_id: np.array
34 | :param header_info: contains information which is placed into the file header, defaults to dict()
35 | :type header_info: dict, optional
36 |
37 | Output
38 | ---
39 | """
40 |
41 | len_refline = coordinates_sxy_m.shape[0]
42 |
43 | try:
44 | if long_limit.ndim == 0:
45 |
46 | if lat_limit.ndim == 0:
47 | long_limit = np.full((len_refline, 1), long_limit)
48 | lat_limit = np.full((len_refline, 1), lat_limit)
49 |
50 | elif lat_limit.shape[0] == len_refline:
51 | long_limit = np.full((len_refline, 1), long_limit)
52 |
53 | elif long_limit.shape[0] == len_refline:
54 |
55 | if lat_limit.ndim == 0:
56 | lat_limit = np.full((len_refline, 1), lat_limit)
57 |
58 | except ValueError:
59 | print('handling of ax and ay limits did fail - please check input again - data not saved!')
60 | return
61 |
62 | if mode_save_tpamap == 'acclimits':
63 |
64 | # calc default section_id when not provided via function
65 | if section_id is None:
66 |
67 | section_id = np.arange(1, len_refline + 1)[:, np.newaxis]
68 |
69 | # recalc acc. values to fill reference line
70 | elif section_id[0, 0] > 1:
71 |
72 | long_limit_ext = np.zeros((len_refline, 1))
73 | lat_limit_ext = np.zeros((len_refline, 1))
74 | long_limit_ext[-1] = long_limit[0]
75 | lat_limit_ext[-1] = lat_limit[0]
76 |
77 | sectionid_change = np.where(
78 | np.vstack((np.asarray([True]), np.abs(np.diff(section_id, axis=0)) > 0.5)))[0]
79 |
80 | for i_count, idx in enumerate(sectionid_change[0:-1]):
81 | long_limit_ext[idx:sectionid_change[i_count + 1]] = long_limit[i_count]
82 | lat_limit_ext[idx:sectionid_change[i_count + 1]] = lat_limit[i_count]
83 |
84 | long_limit = long_limit_ext
85 | lat_limit = lat_limit_ext
86 |
87 | data_output = np.hstack((section_id, coordinates_sxy_m, long_limit, lat_limit))
88 |
89 | # fill data array with friction coeffs
90 | elif mode_save_tpamap == 'frictioncoeff':
91 | data_output = np.hstack((coordinates_sxy_m, long_limit, lat_limit))
92 |
93 | else:
94 | raise ValueError('mode_save_tpamap unknown!')
95 |
96 | save_tpamap(filepath2output_tpamap=filepath2output_tpamap,
97 | tpamap=data_output,
98 | header_info=header_info,
99 | track_name=track_name)
100 |
101 | # ----------------------------------------------------------------------------------------------------------------------
102 |
103 |
104 | def save_tpamap(filepath2output_tpamap: str,
105 | tpamap: np.array,
106 | track_name: str,
107 | header_info: dict = dict()):
108 |
109 | """Saves the tpa map containing s-coordinate, x,y-coordinates and long./lat. acceleration limits to a csv-file.
110 |
111 | Input
112 | :param filepath2output_tpamap: filepath to where tpa map is saved
113 | :type filepath2output_tpamap: str
114 | :param tpamap: contains the entire tpa map data (s-,x-,y-coordinates, long. acc. limit, lat. acc. limit)
115 | :type tpamap: np.array
116 | :param header_info: contains information which is placed into the file header, defaults to dict()
117 | :type header_info: dict, optional
118 |
119 | Output
120 | ---
121 | """
122 |
123 | header = 'created on: ' + datetime.datetime.now().strftime("%Y-%m-%d") + ', '\
124 | + datetime.datetime.now().strftime("%H:%M:%S")
125 |
126 | try:
127 | header = header + '\n' + 'track: ' + track_name + '\n' + 'GUI mode: ' + str(header_info['gui_mode'])
128 | except KeyError:
129 | pass
130 |
131 | if header_info['gui_mode'] == 2:
132 | header = header + '\n' + 'section_id,s_m,x_m,y_m,ax_max_mps2,ay_max_mps2'
133 | else:
134 | header = header + '\n' + 's_m,x_m,y_m,lambda_mue_x,lambda_mue_y'
135 |
136 | with open(filepath2output_tpamap, 'wb') as fh:
137 | np.savetxt(fh, tpamap, fmt='%0.4f', delimiter=',', header=header)
138 |
139 | print('tpamap_' + track_name + ' saved successfully')
140 |
141 |
142 | # ----------------------------------------------------------------------------------------------------------------------
143 | # testing --------------------------------------------------------------------------------------------------------------
144 | # ----------------------------------------------------------------------------------------------------------------------
145 | if __name__ == '__main__':
146 |
147 | import sys
148 |
149 | # import custom modules
150 | path2module = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
151 |
152 | sys.path.append(path2module)
153 |
154 | import tpa_map_functions
155 |
156 | bool_plot = True
157 | stepsize_resample_m = 10
158 | ax_max_tires_mps2 = np.asarray(10.5)
159 | ay_max_tires_mps2 = np.asarray(10)
160 |
161 | track_name = "dummy track"
162 | header_custom = {"gui_mode": 2}
163 |
164 | filepath2ltpl_refline = os.path.join(path2module, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_berlin.csv')
165 | filepath2output_tpamap = os.path.join(path2module, 'outputs', 'testmap.csv')
166 |
167 | dict_output = tpa_map_functions.helperfuncs.preprocess_ltplrefline.\
168 | preprocess_ltplrefline(filepath2ltpl_refline=filepath2ltpl_refline,
169 | stepsize_resample_m=stepsize_resample_m)
170 |
171 | refline_resampled = dict_output['refline_resampled']['refline_resampled']
172 |
173 | # testing
174 | ax_max_tires_mps2 = np.random.normal(loc=ax_max_tires_mps2, size=(refline_resampled.shape[0], 1))
175 | ay_max_tires_mps2 = np.random.normal(loc=ay_max_tires_mps2, size=(refline_resampled.shape[0], 1))
176 | # testing end
177 |
178 | # ax_max_tires_mps2 = np.asarray(ax_max_tires_mps2)
179 | # ay_max_tires_mps2 = np.asarray(ay_max_tires_mps2)
180 |
181 | save_tpamap_fromfile(filepath2output_tpamap=filepath2output_tpamap,
182 | mode_save_tpamap='acclimits',
183 | coordinates_sxy_m=refline_resampled,
184 | long_limit=ax_max_tires_mps2,
185 | lat_limit=ay_max_tires_mps2,
186 | track_name=track_name,
187 | header_info=header_custom)
188 |
--------------------------------------------------------------------------------
/tpa_map_functions/helperfuncs/transform_coordinates_xy2s.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import trajectory_planning_helpers as tph
3 |
4 | """
5 | Created by: Leonhard Hermansdorfer
6 | Created on: 05.09.2020
7 | """
8 |
9 |
10 | def transform_coordinates_xy2s(coordinates_sxy_m: np.array,
11 | position_m: np.array,
12 | s_tot_m: float) -> np.array:
13 |
14 | distance_pathpoints = np.sqrt(np.diff(position_m[:, 0], 1) ** 2 + np.diff(position_m[:, 1], 1) ** 2)
15 |
16 | s_actual_m = np.zeros(position_m.shape[0])
17 |
18 | # tic_f = time.time()
19 |
20 | # match first entry of ego position on race line s-coordinate
21 | s_actual_m[0], _ = tph.path_matching_global.path_matching_global(path_cl=coordinates_sxy_m,
22 | ego_position=position_m[0, :])
23 |
24 | # TODO consider adding s_expected=self.lastcoordinate_s_m, s_range=40 (didn't increase performance)
25 | # -> test again
26 | # self.lastcoordinate_s_m = s_actual_m[0]
27 |
28 | # sum up distance
29 | s_actual_m[1:] = s_actual_m[0] + np.cumsum(distance_pathpoints)
30 |
31 | # TODO write without for loop (use < on array + matrix multiplication)
32 | # after timing, this seems to be faster than version below
33 | for index, row in enumerate(s_actual_m):
34 | if row >= s_tot_m:
35 | s_actual_m[index] -= s_tot_m * np.floor(row / s_tot_m)
36 | elif row < 0:
37 | s_actual_m[index] += s_tot_m
38 |
39 | return s_actual_m
40 |
--------------------------------------------------------------------------------
/tpa_map_functions/interface/MapInterface.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import zmq
3 | import time
4 | import os.path
5 | import sys
6 | import ad_interface_functions
7 |
8 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
9 | sys.path.append(path2tmf)
10 |
11 | import tpa_map_functions.helperfuncs.import_vehdyninfo
12 |
13 | """
14 | Created by: Leonhard Hermansdorfer
15 | Created on: 10.12.2019
16 | """
17 |
18 |
19 | class MapInterface:
20 | """Provides an interface to the local trajectory planner to access local acceleration limitations.
21 |
22 | The MapInterface class provides an interface between the local trajectory planning module and the tire performance
23 | assessment module to fetch and distribute local acceleration limitations used for trajectory planning.
24 | """
25 |
26 | # ------------------------------------------------------------------------------------------------------------------
27 | # Constructor ------------------------------------------------------------------------------------------------------
28 | # ------------------------------------------------------------------------------------------------------------------
29 |
30 | def __init__(self,
31 | filepath2localgg: str,
32 | zmq_opts_sub_tpa: dict = None,
33 | bool_enable_interface2tpa: bool = False,
34 | bool_enable_interpolation: bool = False,
35 | bool_enable_velocitydependence: bool = False):
36 | """Initializes MapInterface class to provide an interface for accessing the local acceleration limits and to
37 | enable communication to tpa module for online update of acc. limits.
38 |
39 | Input
40 | :param filepath2localgg: filepath to csv file where local acceleration limits are stored
41 | :type filepath2localgg: str
42 | :param zmq_opts_sub_tpa: zmq options for interface to tire performance assessment (tpa) module, defaults to None
43 | :type zmq_opts_sub_tpa: dict, optional
44 | :param bool_enable_interface2tpa: enables/disables (True/False) communication to tpa module, defaults to False
45 | :type bool_enable_interface2tpa: bool, optional
46 | :param bool_enable_interpolation: enables/disables (True/False) interpolation of acceleration limits for the
47 | actual s-position with regards to the s-coordinates of the tpa map,
48 | defaults to False
49 | :type bool_enable_interpolation: bool, optional
50 | :param bool_enable_velocitydependence: enables/disables (True/False) velocity dependent acceleration limits,
51 | defaults to False
52 | :type bool_enable_velocitydependence: bool, optional
53 |
54 | Error
55 | :raises ValueError: [description]
56 | :raises ValueError: [description]
57 | :raises FileExistsError: [description]
58 | """
59 |
60 | # write input to class instance
61 | self.__zmq_opts_sub_tpa = zmq_opts_sub_tpa
62 | self.__bool_enable_interface2tpa = bool_enable_interface2tpa
63 | self.__bool_enable_interpolation = bool_enable_interpolation
64 | self.__bool_enable_velocitydependence = bool_enable_velocitydependence
65 |
66 | # flag to indicate whether or not a tpa map was received via zmq
67 | self.__bool_received_tpamap = False
68 |
69 | self.s_tot_m = 0.0
70 | self.coordinates_sxy_m = np.zeros((1, 3))
71 | self.coordinates_sxy_m_extended = np.zeros((1, 3))
72 | self.localgg_mps2 = np.zeros((1, 2))
73 |
74 | # temporary scale factor, which is updated via self.scale_acclimits_lapwise()
75 | self.__scalefactor_tmp = 1.0
76 |
77 | # contains latest received local acceleration limits
78 | self.__localgg_lastupdate = np.zeros((1, 2))
79 |
80 | # TODO integrate into global s matching
81 | self.lastcoordinate_s_m = None
82 |
83 | self.__s_egopos_m = None
84 | self.__s_ltpllookahead_m = None
85 |
86 | # safety distances which is added/subtracted to/from current trajectory begin/ending
87 | # add to distance of current planning horizon
88 | self.__s_lookahead_safety_m = 50.0
89 | # subtract from current vehicle position
90 | self.__s_lookback_safety_m = 50.0
91 |
92 | # variables for strategy module
93 | self.bool_isactivate_strategy = False
94 | self.bool_switchedoff_strat = False
95 |
96 | # --------------------------------------------------------------------------------------------------------------
97 | # Read Data File Containing Tire Performance Assessment Map ----------------------------------------------------
98 | # --------------------------------------------------------------------------------------------------------------
99 |
100 | tpamap, velocity_steps = tpa_map_functions.helperfuncs.import_vehdyninfo.\
101 | import_vehdyninfo(filepath2localgg=filepath2localgg)
102 |
103 | self.section_id = tpamap[:, 0]
104 |
105 | # set data mode to global variable or global constant for further processing
106 | if tpamap.shape[0] > 1:
107 | self.data_mode = 'global_variable'
108 | self.sectionid_change = np.concatenate((np.asarray([True]), np.diff(self.section_id) > 0))
109 | self.coordinates_sxy_orignal_m = tpamap[:, 1:4]
110 | self.coordinates_sxy_m = self.coordinates_sxy_orignal_m[self.sectionid_change]
111 |
112 | else:
113 | self.data_mode = 'global_constant'
114 | self.sectionid_change = np.asarray([True])
115 | self.coordinates_sxy_orignal_m = tpamap[:, 1:4]
116 | self.coordinates_sxy_m = tpamap[:, 1:4]
117 |
118 | # Check Localgg Data with Velocity Dependence ------------------------------------------------------------------
119 |
120 | if not bool(velocity_steps) and self.__bool_enable_velocitydependence:
121 | raise ValueError('TPA MapInterface: velocity dependence is enabled, but no velocity dependent acc. limits '
122 | 'are provided in inputs file!')
123 |
124 | elif bool(velocity_steps) and not self.__bool_enable_velocitydependence:
125 | raise ValueError('TPA MapInterface: velocity dependent acc. limits are provided in file, but velocity '
126 | 'dependence is disbaled!')
127 |
128 | if not bool(velocity_steps):
129 | self.__count_velocity_steps = 1
130 |
131 | else:
132 | self.__count_velocity_steps = int(len(velocity_steps))
133 |
134 | velocity_steps.insert(0, 0.0)
135 | self.velocity_steps = np.asarray(velocity_steps)
136 |
137 | # if true, add all local acc. limits including velocity dependence
138 | if self.__bool_enable_velocitydependence:
139 | self.localgg_mps2 = tpamap[self.sectionid_change, 4:]
140 |
141 | else:
142 | self.localgg_mps2 = tpamap[self.sectionid_change, 4:6]
143 |
144 | # create separate localgg array for strategy updates
145 | self.localgg_strat_mps2 = self.localgg_mps2.copy()
146 |
147 | # skip when global constant values are used
148 | if self.data_mode == 'global_variable' and not self.__bool_enable_interface2tpa:
149 | self.format_rawtpamap()
150 |
151 | # check whether tpa-map file contains only one row (const. location)
152 | # and one value for each ax and ay (const. vel)
153 | if self.__bool_enable_interface2tpa and not (self.data_mode == 'global_constant'
154 | and self.__count_velocity_steps == 1):
155 | raise ValueError('tpa-map file in inputs/veh_dyn_info has wrong shape!\n'
156 | + 'When interface to tpa module is enabled, only provide one row of data and no '
157 | + 'velocity-dependent acceleraion limits!\n'
158 | + 'Shape of input data must be 1x6')
159 |
160 | # --------------------------------------------------------------------------------------------------------------
161 | # Initialize Communication: ZMQ --------------------------------------------------------------------------------
162 | # --------------------------------------------------------------------------------------------------------------
163 |
164 | if self.__zmq_opts_sub_tpa and self.__bool_enable_interface2tpa:
165 | self.zmq_context = zmq.Context()
166 |
167 | # Init ZMQ sockets for communication with tire performance assessment module
168 | self.sock_zmq_rcv_tpa = self.zmq_context.socket(zmq.SUB)
169 |
170 | self.sock_zmq_rcv_tpa.connect("tcp://%s:%s" % (self.__zmq_opts_sub_tpa["ip"],
171 | self.__zmq_opts_sub_tpa["port_data"]))
172 |
173 | self.sock_zmq_rcv_tpa.setsockopt_string(zmq.SUBSCRIBE, self.__zmq_opts_sub_tpa["topic"])
174 |
175 | # wait a short time until all sockets are really bound (ZMQ specific problem)
176 | time.sleep(0.5)
177 |
178 | # ------------------------------------------------------------------------------------------------------------------
179 | # Destructor -------------------------------------------------------------------------------------------------------
180 | # ------------------------------------------------------------------------------------------------------------------
181 |
182 | def __del__(self):
183 | """Clears all network related stuff.
184 | """
185 |
186 | try:
187 | self.sock_zmq_rcv_tpa.close()
188 | self.zmq_context.term()
189 |
190 | time.sleep(0.5)
191 | print("TPA MapInterface: Sockets closed!")
192 |
193 | except AttributeError:
194 | print("TPA MapInterface: closed!")
195 |
196 | # ------------------------------------------------------------------------------------------------------------------
197 | # Custom Class Methods ---------------------------------------------------------------------------------------------
198 | # ------------------------------------------------------------------------------------------------------------------
199 |
200 | def format_rawtpamap(self):
201 | """Formats s-coordinate of raw tpamap data loaded from file or received via zmq from tpa module.
202 | """
203 |
204 | self.s_tot_m = self.coordinates_sxy_m[-1, 0]
205 | self.coordinates_sxy_m_extended = np.vstack((self.coordinates_sxy_m[-2, :], self.coordinates_sxy_m))
206 | self.coordinates_sxy_m_extended[0, 0] = self.coordinates_sxy_m[-2, 0] - self.coordinates_sxy_m[-1, 0]
207 |
208 | # ------------------------------------------------------------------------------------------------------------------
209 |
210 | # @profile
211 | def get_acclim_tpainterface(self,
212 | position_m: np.array,
213 | position_mode: str,
214 | velocity_mps: np.array = np.asarray([])) -> np.array:
215 | """Provides an interface between the local acceleration limit map and the trajectory planer
216 |
217 | :param position_m: contains xy- or s-coordinates of planed path for request of local acceleration limits
218 | :type position_m: np.array
219 | :param position_mode: specifies whether xy-coordinates or s-coordinates are provided via 'position_m'
220 | :type position_mode: str
221 | :param velocity_mps: contains the vehicle velocity for which the local acceleration limits should get
222 | calculated, defaults to np.asarray([])
223 | :type velocity_mps: np.array, optional
224 |
225 | :return localgg: contains longitudinal and lateral acceleration limit for every requested position
226 | :rtype: np.array
227 | """
228 |
229 | # --------------------------------------------------------------------------------------------------------------
230 | # Check function arguments for validity ------------------------------------------------------------------------
231 | # --------------------------------------------------------------------------------------------------------------
232 |
233 | if position_mode != 'emergency':
234 |
235 | if position_mode == 'xy-cosy':
236 | count_columns = 2
237 |
238 | elif position_mode == 's-cosy':
239 | count_columns = 1
240 |
241 | else:
242 | raise ValueError('TPA MAPInterface: unknown position mode during local acceleration limit request!')
243 |
244 | # check if number of columns is valid depending on what position information is provided
245 | if position_m.ndim == 1:
246 | count_rows = 1
247 |
248 | if position_m.size != count_columns:
249 | raise ValueError('TPA MapInterface: wrong shape of position data during local gg request!')
250 |
251 | elif position_m.ndim == 2:
252 | count_rows = position_m.shape[0]
253 |
254 | if position_m.shape[1] != count_columns:
255 | raise ValueError('TPA MapInterface: wrong shape of position data during local gg request!')
256 |
257 | # check if velocity dependence is enabled and velocity values are provided
258 | if not self.__bool_enable_velocitydependence and velocity_mps.size != 0:
259 |
260 | if not self.__bool_enable_interface2tpa:
261 | raise ValueError('TPA MapInterface: velocity for velocity dependent acc. limits request is provided, '
262 | + 'but velocity dependence is disbaled!')
263 | else:
264 | print('TPA MapInterface: WARNING: velocity for velocity dependent acc. limits request is provided, '
265 | + 'but MapInterface has not received a velocity-dependent tpa-map!')
266 |
267 | if self.__bool_enable_velocitydependence and velocity_mps.size == 0:
268 | raise ValueError('TPA MapInterface: velocity dependence is enabled, but no velocity is provided for '
269 | 'request!')
270 |
271 | # TODO check if velocity array has correct shape and dimension
272 |
273 | # --------------------------------------------------------------------------------------------------------------
274 | # Handle request for emergency trajectory generation -----------------------------------------------------------
275 | # --------------------------------------------------------------------------------------------------------------
276 |
277 | # use max. acc, limits for emergency trajectory calculation (not reduced limits from strategy)
278 | if position_mode == 'emergency':
279 |
280 | # use min. acc. limit value for each velocity step
281 | localgg_emergency_min = np.min(self.localgg_mps2[:, :], axis=0)
282 |
283 | if self.__bool_enable_velocitydependence:
284 |
285 | version = 2
286 |
287 | if version == 1:
288 | ax = []
289 | ay = []
290 |
291 | ax_tmp = np.min(self.localgg_mps2[:, 0::2], axis=0)
292 | ay_tmp = np.min(self.localgg_mps2[:, 1::2], axis=0)
293 |
294 | for ele in velocity_mps:
295 | ax.append(np.interp(ele, self.velocity_steps[1:], ax_tmp))
296 | ay.append(np.interp(ele, self.velocity_steps[1:], ay_tmp))
297 |
298 | elif version == 2:
299 |
300 | ax, ay = self.interp_velocitysteps(x=velocity_mps,
301 | xp=np.hstack((self.velocity_steps, 150)),
302 | fp=np.concatenate((localgg_emergency_min[:2],
303 | localgg_emergency_min,
304 | localgg_emergency_min[-2:])))
305 |
306 | return np.hstack((np.asarray(ax)[:, np.newaxis], np.asarray(ay)[:, np.newaxis]))
307 |
308 | else:
309 | return np.hstack((localgg_emergency_min[0], localgg_emergency_min[1]))[np.newaxis, :]
310 |
311 | # --------------------------------------------------------------------------------------------------------------
312 | # Fetch location-dependent and -independent acceleration limits ------------------------------------------------
313 | # --------------------------------------------------------------------------------------------------------------
314 |
315 | # use tpa-map with updates from strategy if active, else use original tpa-map
316 | if self.bool_isactivate_strategy:
317 | localgg_mps2 = self.localgg_strat_mps2
318 |
319 | else:
320 | localgg_mps2 = self.localgg_mps2 * self.__scalefactor_tmp
321 |
322 | # calculate location-independent acceleration limits ('global constant') ---------------------------------------
323 | if self.data_mode == 'global_constant':
324 |
325 | if self.__bool_enable_velocitydependence:
326 | localgg_out = np.ones((count_rows, 2))
327 |
328 | ax = np.interp(velocity_mps, self.velocity_steps[1:], localgg_mps2[0][0::2])
329 | ay = np.interp(velocity_mps, self.velocity_steps[1:], localgg_mps2[0][1::2])
330 |
331 | localgg_out = np.hstack((ax, ay))
332 |
333 | else:
334 | localgg_out = np.ones((count_rows, 2)) * localgg_mps2[0]
335 |
336 | # calculate location-dependent acceleration limits ('global variable') -----------------------------------------
337 | elif self.data_mode == 'global_variable':
338 |
339 | # calculate s-coordinate when xy-coordinates are provided
340 | if position_mode == 'xy-cosy':
341 |
342 | s_actual_m = tpa_map_functions.helperfuncs.transform_coordinates_xy2s.\
343 | transform_coordinates_xy2s(coordinates_sxy_m=self.coordinates_sxy_orignal_m,
344 | position_m=position_m,
345 | s_tot_m=self.s_tot_m)
346 |
347 | else:
348 | s_actual_m = np.hstack(position_m)
349 |
350 | # save s-coordinate of first (position of vehicle) and last (current planning horizon) entry of trajectory
351 | self.__s_egopos_m = s_actual_m[0]
352 | self.__s_ltpllookahead_m = s_actual_m[-1]
353 |
354 | # if True, interpolate acceleration limits of actual s-position between given s-coordinates of map
355 | if self.__bool_enable_interpolation:
356 |
357 | # extend localgg array for interpolation
358 | localgg_extended = np.vstack((localgg_mps2[-2, :], localgg_mps2))
359 |
360 | # initialize empty local gg array containing one column for each velocity step
361 | ax_out = np.zeros((s_actual_m.shape[0], self.__count_velocity_steps * 2))
362 | idx_ax_out = 0
363 |
364 | for row in s_actual_m:
365 |
366 | idx = np.argmin(np.abs(self.coordinates_sxy_m[:, 0] - row)) + 1
367 |
368 | if (self.coordinates_sxy_m_extended[idx, 0] - row) > 0:
369 | idx -= 1
370 | if idx < 0:
371 | idx = self.coordinates_sxy_m.shape[0]
372 |
373 | grad = np.divide((row - self.coordinates_sxy_m_extended[idx, 0]),
374 | (self.coordinates_sxy_m_extended[idx + 1, 0]
375 | - self.coordinates_sxy_m_extended[idx, 0]))
376 |
377 | # Check Neighboring Cells --------------------------------------------------------------------------
378 | # check neighboring cell values to always guarantee a conservative acceleration limit
379 |
380 | # get information of neighbouring data separately for ax and ay
381 | bool_idx_isequal_idxplus = localgg_extended[idx] == localgg_extended[idx + 1]
382 | bool_idx_greater_idxplus = localgg_extended[idx] > localgg_extended[idx + 1]
383 | bool_idx_smaller_idxplus = localgg_extended[idx] < localgg_extended[idx + 1]
384 | bool_idxminus_isequal_idx = localgg_extended[idx - 1] == localgg_extended[idx]
385 | bool_idxminus_smaller_idx = localgg_extended[idx - 1] < localgg_extended[idx]
386 | bool_idxminus_greater_idx = localgg_extended[idx - 1] > localgg_extended[idx]
387 |
388 | # handle all cases where current value is greater than next value
389 | if np.any(bool_idx_greater_idxplus):
390 |
391 | # handle case where last value is smaller than current value
392 | if np.any(np.logical_and(bool_idx_greater_idxplus, bool_idxminus_smaller_idx)):
393 | ax_out[idx_ax_out, np.logical_and(bool_idx_greater_idxplus, bool_idxminus_smaller_idx)] = \
394 | (1 - grad) \
395 | * localgg_extended[idx - 1, np.logical_and(bool_idx_greater_idxplus,
396 | bool_idxminus_smaller_idx)] \
397 | + grad * localgg_extended[idx + 1, np.logical_and(bool_idx_greater_idxplus,
398 | bool_idxminus_smaller_idx)]
399 |
400 | # handle case where last value is greater than current value
401 | if np.any(np.logical_and(bool_idx_greater_idxplus, bool_idxminus_greater_idx)):
402 | ax_out[idx_ax_out, np.logical_and(bool_idx_greater_idxplus, bool_idxminus_greater_idx)] = \
403 | (1 - grad) \
404 | * localgg_extended[idx, np.logical_and(bool_idx_greater_idxplus,
405 | bool_idxminus_greater_idx)] \
406 | + grad * localgg_extended[idx + 1, np.logical_and(bool_idx_greater_idxplus,
407 | bool_idxminus_greater_idx)]
408 |
409 | # handle case where last value is equal to current value
410 | if np.any(np.logical_and(bool_idx_greater_idxplus, bool_idxminus_isequal_idx)):
411 | ax_out[idx_ax_out, np.logical_and(bool_idx_greater_idxplus, bool_idxminus_isequal_idx)] = \
412 | (1 - grad) \
413 | * localgg_extended[idx, np.logical_and(bool_idx_greater_idxplus,
414 | bool_idxminus_isequal_idx)] \
415 | + grad * localgg_extended[idx + 1, np.logical_and(bool_idx_greater_idxplus,
416 | bool_idxminus_isequal_idx)]
417 |
418 | # handle all cases where current value is smaller than next value
419 | if np.any(bool_idx_smaller_idxplus):
420 |
421 | # handle case where last value is smaller than current value
422 | if np.any(np.logical_and(bool_idx_smaller_idxplus, bool_idxminus_smaller_idx)):
423 | ax_out[idx_ax_out, np.logical_and(bool_idx_smaller_idxplus, bool_idxminus_smaller_idx)] = \
424 | (1 - grad) \
425 | * localgg_extended[idx - 1, np.logical_and(bool_idx_smaller_idxplus,
426 | bool_idxminus_smaller_idx)] \
427 | + grad * localgg_extended[idx, np.logical_and(bool_idx_smaller_idxplus,
428 | bool_idxminus_smaller_idx)]
429 |
430 | # handle case where last value is greater than current value
431 | if np.any(np.logical_and(bool_idx_smaller_idxplus, bool_idxminus_greater_idx)):
432 | ax_out[idx_ax_out, np.logical_and(bool_idx_smaller_idxplus, bool_idxminus_greater_idx)] = \
433 | localgg_extended[idx, np.logical_and(bool_idx_smaller_idxplus,
434 | bool_idxminus_greater_idx)]
435 |
436 | # handle case where last value is equal to current value
437 | if np.any(np.logical_and(bool_idx_smaller_idxplus, bool_idxminus_isequal_idx)):
438 |
439 | ax_out[idx_ax_out, np.logical_and(bool_idx_smaller_idxplus, bool_idxminus_isequal_idx)] = \
440 | localgg_extended[idx, np.logical_and(bool_idx_smaller_idxplus,
441 | bool_idxminus_isequal_idx)]
442 |
443 | # handle all cases where current value is equal to next value
444 | if np.any(bool_idx_isequal_idxplus):
445 | ax_out[idx_ax_out, bool_idx_isequal_idxplus] = localgg_extended[idx, bool_idx_isequal_idxplus]
446 |
447 | idx_ax_out += 1
448 |
449 | # time_globalpath = time.time() - tic_f
450 | # logging.debug('time to get local acceleration limits: ' + str(time_globalpath))
451 |
452 | # if False, no interpolation is used; values of the corresponding tpamap section are taken
453 | else:
454 | idx_list = []
455 |
456 | version = 2
457 |
458 | if version == 1:
459 |
460 | for row in s_actual_m:
461 | idx = np.argmin(np.abs(self.coordinates_sxy_m[:, 0] - row)) + 0
462 |
463 | if (self.coordinates_sxy_m_extended[idx + 1, 0] - row) > 0:
464 | idx -= 1
465 |
466 | if idx < 0:
467 | idx = self.coordinates_sxy_m.shape[0]
468 |
469 | idx_list.append(idx)
470 |
471 | ax_out = localgg_mps2[idx_list, :]
472 |
473 | elif version == 2:
474 | i = np.searchsorted(self.coordinates_sxy_m[:, 0], s_actual_m, side='right') - 1
475 | i[i < 0] = 0
476 |
477 | # print(np.max(np.abs(np.asarray(idx_list) - i)))
478 |
479 | ax_out = localgg_mps2[i, :]
480 |
481 | # if velocity dependence is enabled, the local acceleration limits are interpolated
482 | if self.__bool_enable_velocitydependence:
483 |
484 | # TEST
485 | # velocity_mps= np.linspace(1,97,100)[:, np.newaxis]
486 |
487 | # new version implemented due to performance issues
488 | # version 2 is 2x faster
489 | version = 2
490 |
491 | if version == 1:
492 |
493 | ax = []
494 | ay = []
495 | for i in range(ax_out.shape[0]):
496 | ax.append(np.interp(velocity_mps[i], self.velocity_steps[1:], ax_out[i, 0::2]))
497 | ay.append(np.interp(velocity_mps[i], self.velocity_steps[1:], ax_out[i, 1::2]))
498 |
499 | elif version == 2:
500 |
501 | ax, ay = self.interp_velocitysteps(x=velocity_mps,
502 | xp=np.hstack((self.velocity_steps, 150)),
503 | fp=np.concatenate((ax_out[:, :2], ax_out, ax_out[:, -2:]),
504 | axis=1))
505 |
506 | # print(np.max(np.abs(ax_new - ax)))
507 | # print(np.max(np.abs(ay_new - ay)))
508 |
509 | localgg_out = np.hstack((ax, ay))
510 |
511 | else:
512 | localgg_out = ax_out.copy()
513 |
514 | # raise error, if shape of return localgg is not equal to input trajectory; must have same length
515 | if position_m.shape[0] != localgg_out.shape[0]:
516 | raise ValueError('TPA MapInterface: number of rows of arrays for position request (input) and localgg '
517 | '(output) do not match!')
518 |
519 | return localgg_out
520 |
521 | # ------------------------------------------------------------------------------------------------------------------
522 |
523 | def interp_velocitysteps(self, x: np.array, xp: np.array, fp: np.array):
524 | """Interpolates acceleration limits for requested velocity steps.
525 |
526 | Input
527 | :param x: velocity steps for which the acceleration limits are requested
528 | :type x: np.array
529 | :param xp: velocity steps of the tpa-map (velocity where accelertion limits are provided)
530 | :type xp: np.array
531 | :param fp: acceleration limits used for interpolation
532 | :type fp: np.array
533 |
534 | Output
535 | :return: interpolated acceleration limits
536 | :rtype: tuple
537 | """
538 |
539 | # sort input velocity into velocity areas between interpolation points
540 | j = np.searchsorted(xp, x) - 1
541 | j[j < 0] = 0
542 |
543 | # get interpolation factor
544 | d = (x - xp[j]) / (xp[j + 1] - xp[j])
545 |
546 | if fp.ndim == 2:
547 | fpx_temp = fp[:, 0::2]
548 | fpy_temp = fp[:, 1::2]
549 | axis_interp = 1
550 |
551 | elif fp.ndim == 1:
552 | fpx_temp = fp[0::2]
553 | fpy_temp = fp[1::2]
554 | axis_interp = 0
555 |
556 | else:
557 | raise ValueError('TPA MapInterface: dimension error of array for velocity interpolation!')
558 |
559 | # interpolate ax values
560 | ax = (1 - d) * np.take_along_axis(fpx_temp, j, axis=axis_interp) \
561 | + np.take_along_axis(fpx_temp, j + 1, axis=axis_interp) * d
562 |
563 | # interpolate ay values
564 | ay = (1 - d) * np.take_along_axis(fpy_temp, j, axis=axis_interp) \
565 | + np.take_along_axis(fpy_temp, j + 1, axis=axis_interp) * d
566 |
567 | return ax, ay
568 |
569 | # ------------------------------------------------------------------------------------------------------------------
570 |
571 | def update(self):
572 | """Updates the MapInterface class with data from tire performance assessment module
573 |
574 | This function receives data from tpa module to update local acceleration limits. This function overwrites
575 | initial acceleration limits and handles the tpamap update which excludes the are within the trajectory planning
576 | horizon. This exclusion is necessary to avoid updating local acceleration limits leading to recursive
577 | infeasibility.
578 | """
579 |
580 | # jump update process when communication is disabled
581 | if self.__bool_enable_interface2tpa:
582 |
583 | # receive latest tire performance assessment data via ZMQ --------------------------------------------------
584 | data_tpainterface = ad_interface_functions.zmq_import.zmq_import(sock=self.sock_zmq_rcv_tpa, blocking=False)
585 |
586 | # check whether received tpa data is empty
587 | if data_tpainterface is not None:
588 |
589 | # check whether tpamap coordinates where already received
590 | if not self.__bool_received_tpamap:
591 | self.section_id = data_tpainterface[0][:, 0]
592 | self.sectionid_change = np.concatenate((np.asarray([True]), np.diff(self.section_id) > 0))
593 |
594 | self.coordinates_sxy_orignal_m = data_tpainterface[0][:, 1:4]
595 | self.coordinates_sxy_m = self.coordinates_sxy_orignal_m[self.sectionid_change]
596 |
597 | # check if data for velocity steps is available
598 | if np.all(data_tpainterface[1]) and len(data_tpainterface[1]) > 0:
599 | self.velocity_steps = data_tpainterface[1]
600 | self.__count_velocity_steps = int(len(self.velocity_steps))
601 | self.velocity_steps = np.hstack(([0.0], self.velocity_steps))
602 | self.__bool_enable_velocitydependence = True
603 |
604 | else:
605 | self.velocity_steps = np.zeros(1)
606 | self.__count_velocity_steps = 1
607 | self.__bool_enable_velocitydependence = False
608 |
609 | self.format_rawtpamap()
610 |
611 | self.__bool_received_tpamap = True
612 |
613 | # if current data mode is global_constant and data is received, switch to global_variable
614 | if self.data_mode == 'global_constant':
615 | self.localgg_mps2 = np.full((self.coordinates_sxy_m.shape[0], self.__count_velocity_steps * 2),
616 | np.tile(self.localgg_mps2, self.__count_velocity_steps))
617 | self.data_mode = 'global_variable'
618 |
619 | self.__localgg_lastupdate = data_tpainterface[0][:, 4:][self.sectionid_change]
620 |
621 | # insert updates beyond current planning horizon of ltpl
622 | self.insert_tpa_updates(array_to_update=self.localgg_mps2,
623 | array_data=self.__localgg_lastupdate)
624 |
625 | # ------------------------------------------------------------------------------------------------------------------
626 |
627 | def insert_tpa_updates(self,
628 | array_to_update: np.array,
629 | array_data: np.array):
630 | """Inserts local acceleration limits into the localgg array with respect to the planning horizon of the local
631 | trajectory planner.
632 |
633 | :param array_to_update: [description]
634 | :type array_to_update: np.array
635 | :param array_data: [description]
636 | :type array_data: np.array
637 | :return: [description]
638 | :rtype: np.array
639 | """
640 |
641 | # update stored tpamap besides planning horizon of local trajectory planer
642 | if self.__s_egopos_m is not None and self.__s_ltpllookahead_m is not None:
643 |
644 | s_horizon_fw_m = self.__s_ltpllookahead_m + self.__s_lookahead_safety_m
645 | s_horizon_bw_m = self.__s_egopos_m - self.__s_lookback_safety_m
646 |
647 | if s_horizon_fw_m > self.s_tot_m:
648 | s_horizon_fw_m -= self.s_tot_m
649 |
650 | if s_horizon_bw_m < 0:
651 | s_horizon_bw_m += self.s_tot_m
652 |
653 | idx_start = np.argmin(np.abs(s_horizon_fw_m - self.coordinates_sxy_m[:, 0]))
654 | idx_end = np.argmin(np.abs(s_horizon_bw_m - self.coordinates_sxy_m[:, 0]))
655 |
656 | # TODO: abort when track is short that planning horizon "overtakes" ego position
657 |
658 | if idx_start >= idx_end:
659 | array_to_update[idx_start:, :] = array_data[idx_start:, :]
660 | array_to_update[:idx_end, :] = array_data[:idx_end, :]
661 |
662 | else:
663 | array_to_update[idx_start:idx_end, :] = array_data[idx_start:idx_end, :]
664 |
665 | self.localgg_mps2 = array_to_update.copy()
666 |
667 | else:
668 | return
669 |
670 | # ------------------------------------------------------------------------------------------------------------------
671 |
672 | def set_acclim_strategy(self,
673 | ax_strat_mps2: float,
674 | ay_strat_mps2: float,
675 | bool_isactivate_strategy: bool):
676 | """Updates the tpa-map with acceleration limits which are set via race control.
677 |
678 | :param ax_strat_mps2: long. acceleration limit from race control
679 | :type ax_strat_mps2: float
680 | :param ay_strat_mps2: lat. acceleration limit from race control
681 | :type ay_strat_mps2: float
682 | :param bool_isactivate_strategy: flag to indicate whether or not race control target is active
683 | :type bool_isactivate_strategy: bool
684 | """
685 |
686 | # check whether strategy limitations are active
687 | if bool_isactivate_strategy:
688 |
689 | i_rows = self.coordinates_sxy_m.shape[0]
690 | i_columns = self.__count_velocity_steps
691 |
692 | # np.tile: Construct an array by repeating A the number of times given by reps
693 | manip_localgg_mps2 = np.tile(np.hstack((np.full((i_rows, 1), ax_strat_mps2),
694 | np.full((i_rows, 1), ay_strat_mps2))), i_columns)
695 |
696 | self.localgg_strat_mps2 = self.insert_tpa_updates(array_to_update=self.localgg_strat_mps2,
697 | array_data=manip_localgg_mps2)
698 |
699 | # detect when strategy stops to send acceleration limits
700 | if self.bool_isactivate_strategy != bool_isactivate_strategy and not bool_isactivate_strategy:
701 | self.bool_switchedoff_strat = True
702 |
703 | self.localgg_strat_mps2 = self.localgg_mps2.copy()
704 |
705 | else:
706 | self.bool_switchedoff_strat = False
707 |
708 | # avoid processing strategy input if tpa-map does not contain coordinates of the race track (-> use varloc mode)
709 | if bool_isactivate_strategy and self.localgg_mps2.shape[0] <= 1:
710 | self.bool_isactivate_strategy = False
711 | print('TPA MapInterface: WARNING: strategy input is ignored! intial localgg map must contain variable '
712 | 'location info!')
713 |
714 | else:
715 | self.bool_isactivate_strategy = bool_isactivate_strategy
716 |
717 | # ------------------------------------------------------------------------------------------------------------------
718 |
719 | def scale_acclim_lapwise(self,
720 | lap_counter: int,
721 | laps_interp: list,
722 | s_current_m: float,
723 | s_total_m: float,
724 | scaling: list):
725 | """Calculates linearly interpolated scaling factor for acceleration limits.
726 |
727 | :param lap_counter: number of current lap (first lap is expected with 0)
728 | :type lap_counter: int
729 | :param laps_interp: number of laps for interpolation: first entry = start lap, second entry = end lap
730 | :type laps_interp: list
731 | :param s_current_m: current position's s-coordinate
732 | :type s_current_m: float
733 | :param s_total_m: total length of racetrack
734 | :type s_total_m: float
735 | :param scaling: scale factors: first entry = start scaling, second entry = end scaling
736 | :type scaling: list
737 | """
738 |
739 | self.__scalefactor_tmp = np.interp(lap_counter * s_total_m + s_current_m,
740 | np.asarray(laps_interp) * s_total_m,
741 | scaling)
742 |
743 | # print('lap_counter: {}, s_current_m: {}, scalefactor_tmp: {}'
744 | # .format(lap_counter, s_current_m, self.__scalefactor_tmp))
745 |
746 |
747 | # ----------------------------------------------------------------------------------------------------------------------
748 | # testing --------------------------------------------------------------------------------------------------------------
749 | # ----------------------------------------------------------------------------------------------------------------------
750 | if __name__ == '__main__':
751 | pass
752 |
--------------------------------------------------------------------------------
/tpa_map_functions/interface/__init__.py:
--------------------------------------------------------------------------------
1 | import tpa_map_functions.interface.MapInterface
2 |
--------------------------------------------------------------------------------
/tpa_map_functions/visualization/__init__.py:
--------------------------------------------------------------------------------
1 | import tpa_map_functions.visualization.visualize_tpamap
2 |
--------------------------------------------------------------------------------
/tpa_map_functions/visualization/visualize_tpamap.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import os.path
3 | import sys
4 | import matplotlib.pyplot as plt
5 | from matplotlib.collections import PatchCollection
6 | from matplotlib.patches import Polygon
7 | from matplotlib.widgets import Slider
8 |
9 | # import tikzplotlib
10 |
11 | # import custom modules
12 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
13 | sys.path.append(path2tmf)
14 |
15 | import tpa_map_functions.helperfuncs.import_vehdyninfo
16 |
17 | """
18 | Created by: Leonhard Hermansdorfer
19 | Created on: 15.11.2019
20 | """
21 |
22 |
23 | def visualize_tpamap(refline: np.array,
24 | width_right: np.array,
25 | width_left: np.array,
26 | normvec_normalized: np.array,
27 | filepath2tpamap: str = str(),
28 | tpamap: np.array = None,
29 | distance_scoord_labels: float = 400.0,
30 | fig_handle=None):
31 | """Loads and plots the acceleration limits of the tpa map into a 2d race track map.
32 |
33 | Loads tpamap csv-file which contains resampled reference line and corresponding acceleration limits.
34 | Loads traj_ltpl_cl csv-file which contains reference line and corresponding trackwidth information.
35 |
36 | Input
37 | :param path_dict: contains all paths to relevant folders and file within this software module
38 | :type path_dict: dict
39 | :param refline: reference line of the ltpl csv-file
40 | :type refline: np.array
41 | :param width_right: trackwidth to rigth of reference line
42 | :type width_right: np.array
43 | :param width_left: trackwidth to left of reference line
44 | :type width_left: np.array
45 | :param normvec_normalized: normal vectors to each coordinate of reference line (necessary to calculate trackboundary
46 | coordinates)
47 | :type normvec_normalized: np.array
48 |
49 | Error
50 | :raises ValueError: [description]
51 | :raises ValueError: [description]
52 | :raises an: [description]
53 | :raises ValueError: [description]
54 | :raises error: [description]
55 | :raises ValueError: [description]
56 |
57 | Output
58 | ---
59 | """
60 |
61 | # raise error, if both filepath and reference line array is provided
62 | if bool(filepath2tpamap) and tpamap is not None:
63 | print('ERROR: path to tpamap file AND tpamap array provided! Only provide one input.')
64 | raise ValueError('path to tpamap file AND tpamap array provided!')
65 |
66 | # raise error, if neither filepath nor reference line array is provided
67 | elif not filepath2tpamap and tpamap is None:
68 | print('ERROR: neither path to tpamap file nor tpamap array provided! At least provide one input.')
69 | raise ValueError('neither path to tpamap file nor tpamap array provided!')
70 |
71 | # load tpamap data from file
72 | if bool(filepath2tpamap):
73 | tpamap, vel_steps = tpa_map_functions.helperfuncs.import_vehdyninfo.\
74 | import_vehdyninfo(filepath2localgg=filepath2tpamap)
75 |
76 | section_id = tpamap[:, 0] # noqa F841
77 | tpamap = tpamap[:, 1:]
78 |
79 | else:
80 | vel_steps = []
81 |
82 | # ------------------------------------------------------------------------------------------------------------------
83 |
84 | if refline.shape[1] != 3:
85 |
86 | # calculate cumulative length of reference line
87 | s_refline_m = np.cumsum(np.sqrt(np.sum((np.square(np.diff(np.vstack((refline, refline[0]))[:, 0])),
88 | np.square(np.diff(np.vstack((refline, refline[0]))[:, 1]))), axis=0)))
89 |
90 | refline_concat = np.hstack((np.vstack((np.zeros(1), s_refline_m[:, np.newaxis])),
91 | np.vstack((refline, refline[0]))))
92 |
93 | else:
94 | refline_concat = refline.copy()
95 | refline = refline[:, 1:3]
96 |
97 | refline_closed = np.vstack((refline, refline[0:3, :]))
98 |
99 | # calculate xy-coordinates of left/right track boundaries
100 | trackboundary_right_m = refline + np.multiply(normvec_normalized, width_right[:, np.newaxis])
101 | trackboundary_left_m = refline - np.multiply(normvec_normalized, width_left[:, np.newaxis])
102 | trackboundary_right_m__closed = np.vstack((trackboundary_right_m, trackboundary_right_m[0:3]))
103 | trackboundary_left_m__closed = np.vstack((trackboundary_left_m, trackboundary_left_m[0:3]))
104 |
105 | list_points2 = list()
106 | test_refline = list()
107 | tb_right_interp_m = list()
108 | tb_left_interp_m = list()
109 |
110 | bool_isdeleted_lastentry = False
111 | idx_min = 0
112 |
113 | # match tpamap coordinates onto original reference line for plotting
114 | for row in tpamap:
115 |
116 | idx_min_last = idx_min
117 |
118 | idx_min = np.argmin(np.sqrt(np.sum((
119 | np.square(row[1] - refline_concat[:, 1]),
120 | np.square(row[2] - refline_concat[:, 2])), axis=0)))
121 |
122 | if idx_min_last > 0 and idx_min == 0:
123 | idx_min = len(refline_concat) - 1
124 |
125 | if not(idx_min == idx_min_last):
126 |
127 | if bool_isdeleted_lastentry:
128 | i_add_start = 0
129 | else:
130 | i_add_start = 1
131 |
132 | for i_count in range(idx_min_last + i_add_start, idx_min + 1):
133 | test_refline.append(refline_concat[i_count, 1:])
134 | tb_right_interp_m.append(trackboundary_right_m__closed[i_count])
135 | tb_left_interp_m.append(trackboundary_left_m__closed[i_count])
136 |
137 | bool_isdeleted_lastentry = False
138 |
139 | # interpolate reference line and track boundaries
140 | if not(refline_concat[idx_min, 0] == row[0]):
141 |
142 | # calculate distance to coordinate before and after actual nearest coordinate
143 | # to determine between which coordinates to interploate
144 | dist_bw = np.sqrt(np.sum((np.square(row[1] - refline_closed[idx_min - 1, 0]),
145 | np.square(row[2] - refline_closed[idx_min - 1, 1]))))
146 | dist_fw = np.sqrt(np.sum((np.square(row[1] - refline_closed[idx_min + 1, 0]),
147 | np.square(row[2] - refline_closed[idx_min + 1, 1]))))
148 |
149 | if dist_bw > dist_fw:
150 | idx_add_start = 0
151 | idx_add_end = 2
152 |
153 | elif dist_bw < dist_fw:
154 | idx_add_start = -1
155 | idx_add_end = 1
156 |
157 | del test_refline[-1]
158 | del tb_right_interp_m[-1]
159 | del tb_left_interp_m[-1]
160 |
161 | bool_isdeleted_lastentry = True
162 |
163 | # raise an error when location of coordinate can not be identified with this method
164 | # TODO: avoid siatuation which would lead to error
165 | else:
166 | raise ValueError()
167 |
168 | dist = np.sqrt(np.sum((np.square(row[1] - refline_closed[idx_min + idx_add_start, 0]),
169 | np.square(row[2] - refline_closed[idx_min + idx_add_start, 1]))))
170 |
171 | x_vals = np.hstack((np.zeros(1), np.sqrt(np.sum((
172 | np.square(refline_closed[idx_min + idx_add_end - 1, 0]
173 | - refline_closed[idx_min + idx_add_start, 0]),
174 | np.square(refline_closed[idx_min + idx_add_end - 1, 1]
175 | - refline_closed[idx_min + idx_add_start, 1]))))))
176 |
177 | test_refline.append(np.hstack((
178 | np.interp(dist, x_vals, refline_closed[idx_min + idx_add_start:idx_min + idx_add_end, 0]),
179 | np.interp(dist, x_vals, refline_closed[idx_min + idx_add_start:idx_min + idx_add_end, 1]))))
180 |
181 | tb_right_interp_m.append(np.hstack((
182 | np.interp(dist, x_vals,
183 | trackboundary_right_m__closed[idx_min + idx_add_start:idx_min + idx_add_end, 0]),
184 | np.interp(dist, x_vals,
185 | trackboundary_right_m__closed[idx_min + idx_add_start:idx_min + idx_add_end, 1]))))
186 |
187 | tb_left_interp_m.append(np.hstack((
188 | np.interp(dist, x_vals,
189 | trackboundary_left_m__closed[idx_min + idx_add_start:idx_min + idx_add_end, 0]),
190 | np.interp(dist, x_vals,
191 | trackboundary_left_m__closed[idx_min + idx_add_start:idx_min + idx_add_end, 1]))))
192 |
193 | else:
194 | test_refline.append(refline[idx_min, :])
195 | tb_right_interp_m.append(trackboundary_right_m__closed[idx_min, :])
196 | tb_left_interp_m.append(trackboundary_left_m__closed[idx_min, :])
197 |
198 | list_points2.append(len(tb_left_interp_m) - 1)
199 |
200 | # check whether last entry is identical to first entry; if not, values of first entry are copied to last
201 | dist = np.sqrt(np.sum((np.square(test_refline[-1][0] - test_refline[0][0]),
202 | np.square(test_refline[-1][1] - test_refline[0][1]))))
203 |
204 | if dist < 0.01:
205 | if np.sqrt(np.sum((np.square(test_refline[-2][0] - test_refline[-1][0]),
206 | np.square(test_refline[-2][1] - test_refline[-1][1])))) < 0.01:
207 | del test_refline[-1]
208 | del tb_right_interp_m[-1]
209 | del tb_left_interp_m[-1]
210 |
211 | test_refline[-1] = test_refline[0]
212 | tb_right_interp_m[-1] = tb_right_interp_m[0]
213 | tb_left_interp_m[-1] = tb_left_interp_m[0]
214 |
215 | # transform lists to numpy arrays
216 | test_refline = np.array(test_refline)
217 | tb_right_interp_m = np.array(tb_right_interp_m)
218 | tb_left_interp_m = np.array(tb_left_interp_m)
219 |
220 | # testing ----------------------------------------------------------------------------------------------------------
221 | # calculate distance between interpolated reference line coordinates
222 | # to check whether the coordinates are equally spaced
223 |
224 | s_test = np.cumsum(np.sqrt(np.sum((np.square(np.diff(np.vstack((test_refline, test_refline[0]))[:, 0])),
225 | np.square(np.diff(np.vstack((test_refline, test_refline[0]))[:, 1]))), axis=0)))
226 |
227 | s_test = np.vstack((np.zeros(1), s_test[:, np.newaxis]))
228 |
229 | list_testing = []
230 |
231 | for i_counter in range(len(list_points2) - 1):
232 | list_testing.append(s_test[(list_points2[i_counter + 1])] - s_test[list_points2[i_counter]])
233 |
234 | list_testing = np.array(list_testing)
235 |
236 | # testing end ------------------------------------------------------------------------------------------------------
237 |
238 | # create list containing each polygon to plot;
239 | # polygons consist of the points of the respective left and right track boundary sections
240 |
241 | patches = []
242 |
243 | for i_row in range(len(list_points2) - 1):
244 |
245 | points = np.vstack((
246 | tb_right_interp_m[list_points2[i_row]:(list_points2[i_row + 1] + 1), :],
247 | np.flipud(tb_left_interp_m[list_points2[i_row]:(list_points2[i_row + 1] + 1), :])))
248 |
249 | polygon = Polygon(points, closed=True)
250 | patches.append(polygon)
251 |
252 | # raise error when number of polygons to plot and corresponding data for coloring the polygons are not equal
253 | if not(len(patches) == len(tpamap) - 1):
254 | raise ValueError('tpamap visualization - data mismatch')
255 |
256 | # define plot functions --------------------------------------------------------------------------------------------
257 |
258 | def plot_ax1(acomb_mps2: np.array,
259 | bool_is_firstcall: bool = False,
260 | xlim: list() = [],
261 | ylim: list() = []):
262 |
263 | # plot track boundaries
264 | ax1.plot(trackboundary_right_m[:, 0], trackboundary_right_m[:, 1], 'k')
265 | ax1.plot(trackboundary_left_m[:, 0], trackboundary_left_m[:, 1], 'k')
266 |
267 | # plot reference line
268 | # ax1.plot(tpamap[:, 1], tpamap[:, 2], 'k', linestyle='None', marker=',', label='reference line - interp')
269 | ax1.plot(refline[:, 0], refline[:, 1], 'r', label='reference line')
270 |
271 | # plot s-coordinate labels
272 | plotting_distance_m = np.arange(0, refline_concat[-1, 0], distance_scoord_labels)
273 |
274 | for int_counter, ele in enumerate(plotting_distance_m):
275 | idx = (np.abs(refline_concat[:, 0] - ele)).argmin()
276 |
277 | ax1.plot(refline_concat[idx, 1], refline_concat[idx, 2], 'bo')
278 | ax1.annotate('s=' + str(plotting_distance_m.tolist()[int_counter]) + ' m',
279 | (refline_concat[idx, 1], refline_concat[idx, 2]),
280 | xytext=(0, 30), textcoords='offset points', ha='center', va='bottom', color='blue',
281 | bbox=dict(boxstyle='round,pad=0.2', fc='yellow', alpha=0.8),
282 | arrowprops=dict(arrowstyle='->', color='b'))
283 |
284 | # plot areas with colors
285 | collection = PatchCollection(patches, cmap=plt.set_cmap('viridis'))
286 | collection.set_array(acomb_mps2)
287 | ax1.add_collection(collection)
288 | collection.set_clim(y_limits)
289 |
290 | list_cblabels = np.arange(0, y_limits[1] * 1.01, 1.0).round(2).tolist()
291 |
292 | if bool_is_firstcall:
293 | cbar = plt.colorbar(collection, ax=ax1)
294 |
295 | cbar.set_ticks(list_cblabels)
296 | b_list = []
297 |
298 | for ele in list_cblabels:
299 | b_list.append(str(ele))
300 |
301 | cbar.set_ticklabels(b_list)
302 | cbar.set_label('vehicle acc. limit in m/s^2')
303 |
304 | ax1.legend()
305 | ax1.set_title('tire performance map')
306 | ax1.set_xlabel('x in meters')
307 | ax1.set_ylabel('y in meters')
308 | ax1.axis('equal')
309 |
310 | if xlim and ylim:
311 | ax1.set_xlim(xlim)
312 | ax1.set_ylim(ylim)
313 |
314 | # -----------------------------------------------------
315 |
316 | def plot_ax2(no_vel: list):
317 |
318 | ax2.step(tpamap[:, 0], tpamap[:, 3 + (no_vel - 1) * 2], where='post', label='long. acc.')
319 | ax2.step(tpamap[:, 0], tpamap[:, 4 + (no_vel - 1) * 2], where='post', label='lat. acc.')
320 |
321 | ax2.grid()
322 | ax2.legend()
323 | ax2.set_xlabel('track position in meters')
324 | ax2.set_ylabel('long./lat. acc. in m/s^2')
325 | ax2.set_xlim((tpamap[0, 0], tpamap[-1, 0]))
326 | ax2.set_ylim((y_limits))
327 |
328 | # plot figure ------------------------------------------------------------------------------------------------------
329 | bool_add_subplot = True
330 | bool_add_slider = True
331 |
332 | if len(vel_steps) == 0:
333 | bool_add_slider = False
334 |
335 | # last value equals first entry, therefore discard
336 | acomb_mps2 = np.divide(np.sum(tpamap[:-1, 3:5], axis=1), 2)
337 |
338 | # set y limits of colobar denpending on data available
339 | if bool_add_subplot and bool_add_slider:
340 | y_limits = [max(np.min(tpamap[:, 3:]) - 2, 0), np.max(tpamap[:, 3:]) + 2]
341 | else:
342 | y_limits = [max(np.min(tpamap[:, 3:5]) - 2, 0), np.max(tpamap[:, 3:5]) + 2]
343 |
344 | # use existing figure when provided (e.g. when GUI is running)
345 | if fig_handle is None:
346 | fig = plt.figure(figsize=(14.5, 8))
347 | else:
348 | fig = fig_handle
349 |
350 | # create subplots if activated
351 | if bool_add_subplot:
352 | ax1 = plt.subplot(3, 1, (1, 2))
353 | plt.subplots_adjust(left=0.1, bottom=0.1, right=0.90, top=0.9, wspace=None, hspace=0.3)
354 | else:
355 | ax1 = plt.subplot()
356 |
357 | plot_ax1(acomb_mps2=acomb_mps2, bool_is_firstcall=True)
358 |
359 | # tikz specific settings
360 | # plt.draw()
361 | # fig_mainplot.canvas.draw()
362 | # fig_mainplot.canvas.flush_events()
363 | # tikzplotlib.save('tpa.tex')
364 |
365 | if bool_add_subplot:
366 | ax2 = plt.subplot(3, 1, 3)
367 |
368 | plot_ax2(no_vel=1)
369 |
370 | if bool_add_slider:
371 |
372 | plt.subplots_adjust(bottom=0.15)
373 | axcolor = 'lightgoldenrodyellow'
374 | axvel = plt.axes([0.1, 0.05, 0.65, 0.03], facecolor=axcolor)
375 | svel = Slider(axvel, 'velocity_step', valmin=1, valmax=len(vel_steps), valinit=1, valstep=1)
376 |
377 | def update_plot(val):
378 | no_vel = svel.val
379 | print("plot data for " + str(vel_steps[no_vel - 1]) + " mps")
380 |
381 | ax1_xlim = ax1.get_xlim()
382 | ax1_ylim = ax1.get_ylim()
383 |
384 | ax1.clear()
385 | acomb_mps2 = np.divide(np.sum(tpamap[:-1, (3 + (no_vel - 1) * 2):(5 + (no_vel - 1) * 2)], axis=1), 2)
386 | plot_ax1(acomb_mps2=acomb_mps2, xlim=list(ax1_xlim), ylim=list(ax1_ylim))
387 |
388 | ax2.clear()
389 | plot_ax2(no_vel=no_vel)
390 |
391 | fig.canvas.draw_idle()
392 |
393 | svel.on_changed(update_plot)
394 |
395 | plt.show()
396 |
397 |
398 | # ----------------------------------------------------------------------------------------------------------------------
399 | # testing --------------------------------------------------------------------------------------------------------------
400 | # ----------------------------------------------------------------------------------------------------------------------
401 | if __name__ == '__main__':
402 | pass
403 |
--------------------------------------------------------------------------------
/tpa_map_gui/Readme.md:
--------------------------------------------------------------------------------
1 | # Generation of tpa maps via GUI
2 |
3 | ## Overview
4 | This Readme explains how to create tpa maps which provide variable friction data
5 | for simulation and acceleration limits for trajectory planning.
6 |
7 | ## Setup
8 | 1. Create and activate a virtual environment.
9 | 2. Install the requirements listed in `requirements.txt`.
10 |
11 | ## How to gernerate tpa maps
12 | 1. Open the main script `main_tpaGUI.py` and adjust the variables within the section `User Input`. Documentation is provided within the script.
13 | 2. Run the python script `main_tpaGUI.py` to open the GUI.
14 | * Insert desired local scaling factors (gui_mode = 1) or long./lat. acceleration values (gui_mode = 2) or set to random or smooth.
15 | * Optionally insert an appendix which is added to the name of the tpa map
16 | that you want to generate.
17 | * Click `save map` to store the tpa map into the `/outputs` folder.
18 | 3. Repeat step 2 until all desired tpa maps are generated.
19 |
20 | The next steps will create `tpamap_tum_mcs.csv` which holds all information on a variable friction scenario that is processed by simulations in mod_control or sim_vehicle_dynamics.
21 |
22 | 4. Open the file `tpa_map_functions/helperfuncs/tpamap_concatenate.py`
23 | * Insert the names of the previously created tpa maps which should be part of a variable friction scenario simulated later in a mod_control or sim_vehicle_dynamics simulation (max 10 maps).
24 | * Enter the simulation times in the `time_interpsteps` array which are related to 100% activation of the corresponding tpa map (eg. `[0 10 20]` if map2 is 100% active at 10s and map3 100% active at 20s).
25 | * Set `bool_enable_tpamaps` to `True`
26 | 5. Run the python script `tpamap_concatenate.py`.
27 |
28 | The generated `tpamap_tum_mcs.csv` file is located in `/outputs`.
29 |
30 | ## tpa map structure
31 | The generated tpa map `tpamap_tum_mcs.csv` has the size [2502 x 23].
32 | * `bool_enable_tpamaps` is a scalar located in row 1 & column 1
33 | * `time_interpsteps` is located in row 2 and column 1 to 10
34 | * the actual tpa map, size [2500 x 23], is saved in row 2 to 2502 and column 1 to 23. Column 1 holds s_m, column 2 holds x_m, column 3 holds y_m. Column 4 upwards holds the local scaling factors from the concatenated tpa maps.
35 |
36 | ## Process pipeline
37 | Further information on how to integrate the generated tpa map is provided in `sim_vehicle_dynamics\vehicle_environment\variable_friction\Readme.md`.
38 |
39 | Author: [Dominik Staerk](mailto:dominik.staerk@tum.de)
40 |
41 | Contact person: [Leonhard Hermansdorfer](mailto:leo.hermansdorfer@tum.de)
42 |
--------------------------------------------------------------------------------
/tpa_map_gui/main_tpaGUI.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import sys
3 | import tkinter as tk
4 |
5 | # import custom modules
6 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
7 | sys.path.append(path2tmf)
8 |
9 | import tpa_map_functions as tmf
10 | import tpa_map_gui.src.build_GUI as build_GUI
11 |
12 |
13 | # User Input -----------------------------------------------------------------------------------------------------------
14 |
15 | # track name (located in /inputs/traj_ltpl_cl/traj_ltpl_cl_[track name].csv)
16 | name_refline = "berlin"
17 |
18 | # mode for reference line resampling - options "const_steps", "var_steps"
19 | mode_resample_refline = 'var_steps'
20 |
21 | # if "const_steps": stepsize_resample_m - desired stepsize for tpa-map resolution
22 | stepsize_resample_m = 25
23 |
24 | # if "var_steps":
25 | # - section_length_min_m: min. section length
26 | # - section_length_max_m: max. section length
27 | section_length_min_m = 15
28 | section_length_max_m = 200
29 |
30 | # gui_mode
31 | # 1: mode to customize local tire scaling factors
32 | # 2: mode to customize local acceleration limits
33 | gui_mode = 2
34 |
35 | # optional: necessary for initialization and for randomizing
36 | dict_settings = {"mean_lsc": 1.0, # mean of the random created local scaling factors
37 | "mean_acc": 12.0, # mean of the random created acceleration limits
38 | "amplitude_lsc": 0.5, # amplitude of the random created local scaling factors around the mean
39 | "amplitude_acc": 1.0} # amplitude of the random created acceleration limits around the mean
40 |
41 | # Manage paths ---------------------------------------------------------------------------------------------------------
42 |
43 | filepath2ltpl_refline = os.path.join(path2tmf, 'inputs', 'traj_ltpl_cl', 'traj_ltpl_cl_' + name_refline + '.csv')
44 |
45 | filepath2output_tpamap = os.path.join(path2tmf, 'outputs', 'tpamap_' + name_refline)
46 |
47 | # Load reference line --------------------------------------------------------------------------------------------------
48 |
49 | refline_dict = tmf.helperfuncs.preprocess_ltplrefline.\
50 | preprocess_ltplrefline(filepath2ltpl_refline=filepath2ltpl_refline,
51 | mode_resample_refline=mode_resample_refline,
52 | stepsize_resample_m=stepsize_resample_m,
53 | section_length_limits_m=[section_length_min_m, section_length_max_m])
54 |
55 | # Set up GUI -----------------------------------------------------------------------------------------------------------
56 |
57 | tk_root = tk.Tk()
58 | tk_root.title("Settings for local gg-scaling")
59 | tk_root.geometry('%dx%d+%d+%d' % (550, 450, 10, 10))
60 |
61 | manager = build_GUI.Manager(master=tk_root,
62 | refline_dict=refline_dict,
63 | refline_resampled=refline_dict['refline_resampled']['refline_resampled'],
64 | bool_closedtrack=refline_dict['bool_closedtrack'],
65 | filepath2output_tpamap=filepath2output_tpamap,
66 | gui_mode=gui_mode,
67 | csv_filename=name_refline,
68 | default=dict_settings)
69 |
70 | tk_root.mainloop()
71 |
--------------------------------------------------------------------------------
/tpa_map_gui/src/__init__.py:
--------------------------------------------------------------------------------
1 | import src.build_GUI
2 |
--------------------------------------------------------------------------------
/tpa_map_gui/src/build_GUI.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import sys
3 | import os.path
4 | import math
5 | import logging
6 | from scipy.interpolate import interp1d # noqa F401
7 | import matplotlib.pyplot as plt
8 | import tkinter as tk
9 |
10 |
11 | # import custom modules
12 | path2tmf = os.path.join(os.path.abspath(__file__).split('tpa_map_functions')[0], 'tpa_map_functions')
13 | sys.path.append(path2tmf)
14 |
15 | import tpa_map_functions as tmf
16 |
17 | """
18 | Created by: Dominik Staerk
19 | Created on: 04.11.2020
20 | """
21 |
22 | logging.basicConfig(format='%(levelname)s:IN--%(funcName)s--: %(message)s', level=logging.WARNING)
23 |
24 |
25 | class Manager(tk.Canvas):
26 | def __init__(self,
27 | refline_dict: dict(),
28 | refline_resampled: np.array,
29 | bool_closedtrack: bool,
30 | filepath2output_tpamap: str,
31 | gui_mode: int,
32 | csv_filename: str,
33 | default: dict = dict(),
34 | master=None,
35 | **kwargs):
36 | """
37 | Documentation: user-interface for setting local gg-scale factors manually
38 |
39 | Input
40 | :param master:
41 | :param kwargs:
42 | """
43 |
44 | if refline_dict['refline_resampled']['section_id'][0, 0] > 1:
45 | self.refline_resampled = refline_resampled[refline_dict['refline_resampled']['sectionid_change']]
46 | self.__mode_stepsize = 'var_steps'
47 |
48 | else:
49 | self.refline_resampled = refline_resampled
50 | self.__mode_stepsize = 'const_steps'
51 |
52 | if gui_mode == 1 and self.__mode_stepsize == 'var_steps':
53 | plt.close('all')
54 | raise ValueError('When GUI is operated with friction coeff., only const. step size is allowed!')
55 |
56 | self.__section_id = refline_dict['refline_resampled']['section_id']
57 |
58 | self.refline_resampled_org = refline_resampled
59 | self.refline_dict = refline_dict
60 | self.refline_original = refline_dict['refline']
61 | self.bool_closedtrack = bool_closedtrack
62 |
63 | self.filepath2output_tpamap = filepath2output_tpamap
64 | self.track_name = str(csv_filename)
65 | self.gui_mode = int(gui_mode)
66 | self.default = default
67 | self.mean_lsc = default['mean_lsc']
68 | self.mean_acc = default['mean_acc']
69 | self.amplitude_lsc = default['amplitude_lsc']
70 | self.amplitude_acc = default['amplitude_acc']
71 |
72 | tk.Canvas.__init__(self, master, **kwargs)
73 |
74 | # set initial number of rows and columns -----------------------------------------------------------------------
75 | self.int_number_rows = 8
76 |
77 | if self.gui_mode == 1 or self.gui_mode == 2:
78 | self.int_number_columns = 4
79 | else:
80 | print('Error: invalid GUI mode selected, check settings.ini\nreset to default GUI mode 1')
81 | self.gui_mode = 1
82 |
83 | self.row_counter = 0
84 | self.entry_counter = 0
85 | self.entries = {}
86 | self.fig_handle = None
87 |
88 | # set up main gui ----------------------------------------------------------------------------------------------
89 |
90 | self.frame_introtext = tk.Frame(master)
91 | self.frame_introtext.grid(row=0, columnspan=1)
92 |
93 | text_intro = 'abcabc\nxyz\ntesttest'
94 | tk.Label(self.frame_introtext, text=text_intro)
95 | # # msg.config(font=('times', 24))
96 |
97 | self.frame_scrollbarlabels = tk.Frame(master)
98 | self.frame_scrollbarlabels.grid(row=2)
99 |
100 | if self.gui_mode == 2:
101 | tk.Label(self.frame_scrollbarlabels, text="start of section\nin meters", relief='groove', width=15) \
102 | .pack(side="left") # .grid(row=0, column=0, ipadx=5, ipady=10)
103 | tk.Label(self.frame_scrollbarlabels, text="end of section\nin meters", relief='groove', width=15) \
104 | .pack(side="left") # .grid(row=0, column=1, ipadx=5, ipady=10)
105 | tk.Label(self.frame_scrollbarlabels, text="ax limits", relief='groove', width=15) \
106 | .pack(side="left") # .grid(row=0, column=2, ipadx=5, ipady=10)
107 | tk.Label(self.frame_scrollbarlabels, text="ay limits", relief='groove', width=15) \
108 | .pack(side="left") # .grid(row=0, column=2, ipadx=5, ipady=10)
109 | else:
110 | tk.Label(self.frame_scrollbarlabels, text="start of section\nin meters", relief='groove', width=15) \
111 | .pack(side="left") # .grid(row=0, column=0, ipadx=5, ipady=10)
112 | tk.Label(self.frame_scrollbarlabels, text="end of section\nin meters", relief='groove', width=15) \
113 | .pack(side="left") # .grid(row=0, column=1, ipadx=5, ipady=10)
114 | tk.Label(self.frame_scrollbarlabels, text="local scaling\nlong", relief='groove', width=15) \
115 | .pack(side="left") # .grid(row=0, column=2, ipadx=5, ipady=10)
116 | tk.Label(self.frame_scrollbarlabels, text="local scaling\nlat", relief='groove', width=15) \
117 | .pack(side="left") # .grid(row=0, column=2, ipadx=5, ipady=10)
118 | # self.frame_scrollbarlabels.columnconfigure(0, weight=1)
119 | # self.frame_scrollbarlabels.columnconfigure(1, weight=1)
120 | # self.frame_scrollbarlabels.columnconfigure(2, weight=1)
121 |
122 | self.container = tk.Frame(master)
123 | self.container.grid(row=3, column=0, sticky='nsew')
124 |
125 | self.canvas = tk.Canvas(self.container, width=500, height=250)
126 | self.scrollbar = tk.Scrollbar(self.container, orient='vertical', command=self.canvas.yview)
127 | self.canvas.config(yscrollcommand=self.scrollbar.set)
128 | self.canvas.grid(row=0, column=0, sticky='nsew')
129 | self.scrollbar.grid(row=0, column=1, sticky='nse')
130 |
131 | # self.container.bind('')
132 |
133 | for int_counter in range(self.int_number_rows):
134 | self.addblock()
135 |
136 | self.frame_buttons = tk.Frame(master)
137 | self.frame_buttons.grid(row=4)
138 |
139 | b1 = tk.Button(self.frame_buttons, text='add new entry', command=self.addblock)
140 | b1.pack(side='left', padx=5, pady=5)
141 | b2 = tk.Button(self.frame_buttons, text='update map', command=self.update_figure)
142 | b2.pack(side='left', padx=5, pady=5)
143 | b3 = tk.Button(self.frame_buttons, text="reset map", command=self.reset_figure)
144 | b3.pack(side='left', padx=5, pady=5)
145 | b5 = tk.Button(self.frame_buttons, text='quit', command=self.quit_job)
146 | b5.pack(side='left', padx=5, pady=5)
147 |
148 | self.mode_buttons = tk.Frame(master)
149 | self.mode_buttons.grid(row=5)
150 |
151 | b6 = tk.Button(self.mode_buttons, text='GUI mode 1', command=self.gui_mode_select_1)
152 | b6.pack(side='left', padx=5, pady=5)
153 | b7 = tk.Button(self.mode_buttons, text='GUI mode 2', command=self.gui_mode_select_2)
154 | b7.pack(side='left', padx=5, pady=5)
155 | b8 = tk.Button(self.mode_buttons, text='randomize map', command=self.randomize)
156 | b8.pack(side='left', padx=5, pady=5)
157 | b8 = tk.Button(self.mode_buttons, text='smooth random map', command=self.smooth)
158 | b8.pack(side='left', padx=5, pady=5)
159 |
160 | self.frame_usermessage = tk.Frame(master)
161 | self.frame_usermessage.grid(row=6)
162 | self.frame_usermessage.columnconfigure(0, weight=2)
163 | self.frame_usermessage.columnconfigure(1, weight=1)
164 |
165 | tk.Label(self.frame_usermessage, text='message: ', relief='groove', width=20).grid(row=0, column=0)
166 | self.label_usermsg = tk.Label(self.frame_usermessage, text='---').grid(row=0, column=1)
167 |
168 | self.name_prependix = tk.Frame(master)
169 | self.name_prependix.grid(row=7)
170 |
171 | tk.Label(self.name_prependix,
172 | text='enter tpamap_name-pendix (optional): ', relief='groove', width=30).pack(side="left")
173 | e1 = tk.Entry(self.name_prependix, width=10)
174 | self.e1 = e1
175 | e1.pack(side='left', padx=5, pady=5)
176 | b4 = tk.Button(self.name_prependix, text="save map", command=self.save_map)
177 | b4.pack(side='left', padx=5, pady=5)
178 |
179 | # intialize tpa map with random values
180 | self.randomize()
181 |
182 | # ------------------------------------------------------------------------------------------------------------------
183 | # Class functions --------------------------------------------------------------------------------------------------
184 | # ------------------------------------------------------------------------------------------------------------------
185 | def gui_mode_select_1(self):
186 | self.__init__(refline_dict=self.refline_dict,
187 | refline_resampled=self.refline_resampled_org,
188 | bool_closedtrack=self.bool_closedtrack,
189 | filepath2output_tpamap=self.filepath2output_tpamap,
190 | gui_mode=1,
191 | csv_filename=self.track_name,
192 | default=self.default)
193 |
194 | # ------------------------------------------------------------------------------------------------------------------
195 |
196 | def gui_mode_select_2(self):
197 | self.__init__(refline_dict=self.refline_dict,
198 | refline_resampled=self.refline_resampled_org,
199 | bool_closedtrack=self.bool_closedtrack,
200 | filepath2output_tpamap=self.filepath2output_tpamap,
201 | gui_mode=2,
202 | csv_filename=self.track_name,
203 | default=self.default)
204 |
205 | # ------------------------------------------------------------------------------------------------------------------
206 |
207 | def randomize(self):
208 |
209 | if self.gui_mode == 2:
210 | self.ax = np.vstack(self.mean_acc
211 | - self.amplitude_acc
212 | + 2 * self.amplitude_acc * np.random.sample(len(self.refline_resampled)))
213 |
214 | self.ay = np.vstack(self.mean_acc
215 | - self.amplitude_acc
216 | + 2 * self.amplitude_acc * np.random.sample(len(self.refline_resampled)))
217 |
218 | self.local_scaling = np.vstack(self.mean_acc
219 | - self.amplitude_acc
220 | + 2 * self.amplitude_acc * np.random.sample(len(self.refline_resampled)))
221 |
222 | else:
223 | self.local_scaling_long = np.vstack(self.mean_lsc
224 | - self.amplitude_lsc
225 | + 2 * self.amplitude_lsc
226 | * np.random.sample(len(self.refline_resampled)))
227 |
228 | self.local_scaling_lat = np.vstack(self.mean_lsc
229 | - self.amplitude_lsc
230 | + 2 * self.amplitude_lsc * np.random.sample(len(self.refline_resampled)))
231 |
232 | self.local_scaling = np.vstack(self.mean_lsc
233 | - self.amplitude_lsc
234 | + 2 * self.amplitude_lsc * np.random.sample(len(self.refline_resampled)))
235 |
236 | if self.gui_mode == 1:
237 | self.refline_initial = np.hstack([self.refline_resampled,
238 | self.local_scaling,
239 | self.local_scaling_long,
240 | self.local_scaling_lat])
241 | elif self.gui_mode == 2:
242 | self.refline_initial = np.hstack([self.refline_resampled,
243 | self.local_scaling,
244 | self.ax,
245 | self.ay])
246 |
247 | self.update_figure()
248 |
249 | # ------------------------------------------------------------------------------------------------------------------
250 |
251 | def smooth(self):
252 | random = np.random.rand() * 2 * math.pi
253 |
254 | for row in range(len(self.refline_resampled)):
255 | if self.gui_mode == 2:
256 | self.ax[row] = self.mean_acc + self.amplitude_acc * math.sin(
257 | (row / len(self.refline_resampled)) * 4 * math.pi + random)
258 | self.ay[row] = self.mean_acc + self.amplitude_acc * math.sin(
259 | (row / len(self.refline_resampled)) * 4 * math.pi + random)
260 | self.local_scaling[row] = self.mean_acc + self.amplitude_acc * math.sin(
261 | (row / len(self.refline_resampled)) * 4 * math.pi + random)
262 | else:
263 | self.local_scaling_long[row] = self.mean_lsc + self.amplitude_lsc * math.sin(
264 | (row / len(self.refline_resampled)) * 4 * math.pi + random)
265 | self.local_scaling_lat[row] = self.mean_lsc + self.amplitude_lsc * math.sin(
266 | (row / len(self.refline_resampled)) * 4 * math.pi + random)
267 | self.local_scaling[row] = self.mean_lsc + self.amplitude_lsc * math.sin(
268 | (row / len(self.refline_resampled)) * 4 * math.pi + random)
269 |
270 | if self.gui_mode == 1:
271 | self.refline_initial = np.hstack(
272 | [self.refline_resampled, self.local_scaling, self.local_scaling_long, self.local_scaling_lat])
273 | elif self.gui_mode == 2:
274 | self.refline_initial = np.hstack([self.refline_resampled, self.local_scaling, self.ax, self.ay])
275 |
276 | self.update_figure()
277 |
278 | # ------------------------------------------------------------------------------------------------------------------
279 |
280 | def quit_job(self):
281 | exit()
282 |
283 | # ------------------------------------------------------------------------------------------------------------------
284 |
285 | def addblock(self):
286 | self.frame_entries = tk.Frame(self.canvas)
287 | self.frame_entries.pack(anchor='center')
288 |
289 | for column in range(self.int_number_columns):
290 | entry = tk.Entry(self.frame_entries, width=15)
291 | self.entries[self.entry_counter] = entry
292 | entry.grid(row=0, column=column)
293 | self.entry_counter += 1
294 |
295 | self.frame_entries.columnconfigure(0, weight=1)
296 | self.frame_entries.columnconfigure(1, weight=1)
297 | self.frame_entries.columnconfigure(2, weight=1)
298 |
299 | self.canvas.create_window((0, (self.row_counter * 25)),
300 | window=self.frame_entries,
301 | anchor="nw",
302 | width=450,
303 | height=24)
304 |
305 | self.canvas.configure(scrollregion=self.canvas.bbox("all"))
306 |
307 | self.row_counter += 1
308 | print('add_block done')
309 |
310 | # ------------------------------------------------------------------------------------------------------------------
311 |
312 | def print_stuff(self, refline_plot: np.array):
313 |
314 | header_custom = {'track': self.track_name, 'gui_mode': self.gui_mode}
315 |
316 | if not self.entries[0].get():
317 | print('empty')
318 |
319 | else:
320 | print('not empty: ' + str(self.entries[0].get()))
321 |
322 | if not self.e1.get():
323 | print('pendix empty')
324 | self.filepath2output_tpamap_save = self.filepath2output_tpamap + '.csv'
325 |
326 | else:
327 | print('pendix: ' + str(self.e1.get()))
328 | self.filepath2output_tpamap_save = self.filepath2output_tpamap + '_' + self.e1.get() + '.csv'
329 |
330 | if self.gui_mode == 2:
331 | tmf.helperfuncs.save_tpamap.save_tpamap_fromfile(filepath2output_tpamap=self.filepath2output_tpamap_save,
332 | mode_save_tpamap='acclimits',
333 | coordinates_sxy_m=self.refline_resampled_org,
334 | long_limit=self.ax,
335 | lat_limit=self.ay,
336 | section_id=self.__section_id,
337 | header_info=header_custom,
338 | track_name=self.track_name)
339 |
340 | else:
341 | tmf.helperfuncs.save_tpamap.save_tpamap_fromfile(filepath2output_tpamap=self.filepath2output_tpamap_save,
342 | mode_save_tpamap='frictioncoeff',
343 | coordinates_sxy_m=self.refline_resampled_org,
344 | long_limit=self.local_scaling_long,
345 | lat_limit=self.local_scaling_lat,
346 | header_info=header_custom,
347 | track_name=self.track_name)
348 |
349 | print('print_stuff done')
350 |
351 | # ------------------------------------------------------------------------------------------------------------------
352 |
353 | def get_data(self) -> np.array:
354 |
355 | self.array_guidata = np.zeros([self.row_counter, self.int_number_columns])
356 |
357 | int_rows = 0
358 | int_columns = 0
359 |
360 | for entry in self.entries.values():
361 |
362 | if entry.get():
363 | try:
364 | entry = float(entry.get())
365 | self.array_guidata[int_rows, int_columns] = entry
366 |
367 | except ValueError:
368 | logging.warning('entries must not be strings!')
369 | break
370 |
371 | else:
372 | self.array_guidata[int_rows, int_columns] = np.nan
373 |
374 | # check values of one entry row for validity
375 | if int_columns == 3 and self.gui_mode == 1:
376 |
377 | if 0 < np.isnan(self.array_guidata[int_rows, :]).sum() < self.int_number_columns:
378 | tk.Label(self.frame_usermessage,
379 | text='make sure row ' + str(int_rows) + ' is either completely filled or empty! '
380 | 'Please double-check').grid(row=0, column=1, sticky=tk.W)
381 |
382 | break
383 |
384 | elif not np.isnan(self.array_guidata[int_rows, :]).any():
385 |
386 | if (self.array_guidata[int_rows, :2] < 0).any():
387 | logging.warning('at least one track coordinate values in row ' + str(int_rows)
388 | + ' is < 0! Please double-check')
389 | break
390 |
391 | elif self.array_guidata[int_rows, 2:3] <= 0:
392 | logging.warning('scaling factor in row ' + str(int_rows)
393 | + ' is <= 0! Please double-check')
394 | break
395 |
396 | elif self.array_guidata[int_rows, 0] >= self.array_guidata[int_rows, 1]:
397 | logging.warning('second value in row ' + str(int_rows)
398 | + ' is <= first value! Please double-check')
399 | break
400 |
401 | self.label_usermsg = tk.Label(self.frame_usermessage, text='data successfully updated') \
402 | .grid(row=0, column=1, sticky=tk.W)
403 |
404 | elif int_columns == 3 and self.gui_mode == 2:
405 |
406 | if 0 < np.isnan(self.array_guidata[int_rows, :]).sum() < self.int_number_columns:
407 | tk.Label(self.frame_usermessage,
408 | text='make sure row ' + str(int_rows) + ' is either completely filled or empty!'
409 | 'Please double-check').grid(row=0, column=1, sticky=tk.W)
410 |
411 | break
412 |
413 | elif not np.isnan(self.array_guidata[int_rows, :]).any():
414 |
415 | if (self.array_guidata[int_rows, :2] < 0).any():
416 | logging.warning('at least one track coordinate values in row ' + str(int_rows)
417 | + ' is < 0! Please double-check')
418 | break
419 |
420 | elif self.array_guidata[int_rows, 2:3] <= 0:
421 | logging.warning('acc limit in row ' + str(int_rows)
422 | + ' is <= 0! Please double-check')
423 | break
424 |
425 | elif self.array_guidata[int_rows, 0] >= self.array_guidata[int_rows, 1]:
426 | logging.warning('second value in row ' + str(int_rows)
427 | + ' is <= first value! Please double-check')
428 | break
429 |
430 | self.label_usermsg = tk.Label(self.frame_usermessage, text='data successfully updated') \
431 | .grid(row=0, column=1, sticky=tk.W)
432 |
433 | int_columns += 1
434 |
435 | if int_columns == self.int_number_columns:
436 | int_rows += 1
437 | int_columns = 0
438 |
439 | # ------------------------------------------------------------------------------------------------------------------
440 |
441 | def update_figure(self):
442 |
443 | self.refline_manip = self.refline_initial.copy()
444 |
445 | self.get_data()
446 |
447 | self.resample_userdata()
448 |
449 | self.plot_tpamap(self.refline_manip)
450 | print('update_figure done')
451 |
452 | # ------------------------------------------------------------------------------------------------------------------
453 |
454 | def save_map(self):
455 |
456 | self.refline_manip = self.refline_initial.copy()
457 |
458 | self.get_data()
459 |
460 | self.resample_userdata()
461 |
462 | self.print_stuff(self.refline_manip)
463 |
464 | self.plot_tpamap(self.refline_manip)
465 |
466 | tk.Label(self.frame_usermessage, text='map successfully saved').grid(row=0, column=1, sticky=tk.W)
467 |
468 | print('save_map done')
469 |
470 | # ------------------------------------------------------------------------------------------------------------------
471 |
472 | def resample_userdata(self):
473 | '''
474 | resample input data to the given reference line coordinates and stepsizes
475 | '''
476 | if self.gui_mode == 2:
477 | for int_rows in range(self.row_counter):
478 |
479 | if not np.isnan(self.array_guidata[int_rows, :]).any():
480 | s_start = self.array_guidata[int_rows, 0]
481 | s_end = self.array_guidata[int_rows, 1]
482 | idx_start = self.find_nearest(self.refline_initial[:, 0], s_start)
483 | idx_end = self.find_nearest(self.refline_initial[:, 0], s_end)
484 | self.ax[idx_start:idx_end] = self.array_guidata[int_rows, 2]
485 | self.ay[idx_start:idx_end] = self.array_guidata[int_rows, 3]
486 | a_combined = (self.ax[idx_start] + self.ay[idx_start]) / 2
487 | if s_end >= self.refline_manip[-1, 0]:
488 | self.ax[-1] = self.array_guidata[int_rows, 2]
489 | self.ay[-1] = self.array_guidata[int_rows, 3]
490 | a_combined = (self.ax[idx_start] + self.ay[idx_start]) / 2
491 |
492 | if idx_end == idx_start or ((idx_end < idx_start) and not self.bool_closedtrack):
493 | continue
494 |
495 | elif (idx_end < idx_start) and self.bool_closedtrack:
496 | self.refline_manip[idx_end:idx_start, 3] = a_combined
497 | self.refline_manip[idx_end:idx_start, 4] = self.ax[idx_start]
498 | self.refline_manip[idx_end:idx_start, 5] = self.ay[idx_start]
499 |
500 | else:
501 | self.refline_manip[idx_start:idx_end, 3] = a_combined
502 | self.refline_manip[idx_start:idx_end, 4] = self.ax[idx_start]
503 | self.refline_manip[idx_start:idx_end, 5] = self.ay[idx_start]
504 |
505 | else:
506 | continue
507 | else:
508 | for int_rows in range(self.row_counter):
509 |
510 | if not np.isnan(self.array_guidata[int_rows, :]).any():
511 | s_start = self.array_guidata[int_rows, 0]
512 | s_end = self.array_guidata[int_rows, 1]
513 | idx_start = self.find_nearest(self.refline_initial[:, 0], s_start)
514 | idx_end = self.find_nearest(self.refline_initial[:, 0], s_end)
515 | self.local_scaling_long[idx_start:idx_end] = self.array_guidata[int_rows, 2]
516 | self.local_scaling_lat[idx_start:idx_end] = self.array_guidata[int_rows, 3]
517 | local_scaling = (self.local_scaling_long[idx_start] + self.local_scaling_lat[idx_start]) / 2
518 | if s_end >= self.refline_manip[-1, 0]:
519 | self.local_scaling_long[-1] = self.array_guidata[int_rows, 2]
520 | self.local_scaling_lat[-1] = self.array_guidata[int_rows, 3]
521 | local_scaling = (self.local_scaling_long[idx_start] + self.local_scaling_lat[idx_start]) / 2
522 |
523 | if idx_end == idx_start or ((idx_end < idx_start) and not self.bool_closedtrack):
524 | continue
525 |
526 | elif (idx_end < idx_start) and self.bool_closedtrack:
527 | self.refline_manip[idx_end:idx_start, 3] = local_scaling
528 | self.refline_manip[idx_end:idx_start, 4] = self.local_scaling_long[idx_start]
529 | self.refline_manip[idx_end:idx_start, 5] = self.local_scaling_lat[idx_start]
530 |
531 | else:
532 | self.refline_manip[idx_start:idx_end, 3] = local_scaling
533 | self.refline_manip[idx_start:idx_end, 4] = self.local_scaling_long[idx_start]
534 | self.refline_manip[idx_start:idx_end, 5] = self.local_scaling_lat[idx_start]
535 |
536 | else:
537 | continue
538 | print('resample_userdata done')
539 |
540 | # ------------------------------------------------------------------------------------------------------------------
541 |
542 | def reset_figure(self):
543 |
544 | self.refline_manip = self.refline_initial.copy()
545 |
546 | self.plot_tpamap(self.refline_initial)
547 | print('reset_figure done')
548 |
549 | # ------------------------------------------------------------------------------------------------------------------
550 |
551 | # https://stackoverflow.com/questions/2566412/find-nearest-value-in-numpy-array
552 | def find_nearest(self, array, value):
553 | array = np.asarray(array)
554 | idx = (np.abs(array - value)).argmin()
555 |
556 | return idx
557 |
558 | # ------------------------------------------------------------------------------------------------------------------
559 |
560 | def plot_tpamap(self, refline_plot: np.array):
561 |
562 | tpamap = np.hstack((refline_plot[:, :3], refline_plot[:, 4:]))
563 |
564 | # if self.gui_mode == 2:
565 | # ylabel = 'local acc. limit in m/s^2'
566 | # else:
567 | # ylabel = 'local scaling factor'
568 |
569 | # dict_plotinfo = {'bool_set_blocking': False,
570 | # 'ylabel': ylabel}
571 |
572 | # generate main plot -------------------------------------------------------------------------------------------
573 |
574 | if self.fig_handle:
575 | self.fig_handle.clf()
576 | else:
577 | self.fig_handle = plt.figure(figsize=(12, 9))
578 |
579 | self.fig_handle.canvas.manager.window.wm_geometry("+%d+%d" % (600, 10))
580 |
581 | tmf.visualization.visualize_tpamap.visualize_tpamap(tpamap=tpamap,
582 | refline=self.refline_dict['refline'],
583 | width_right=self.refline_dict['width_right'],
584 | width_left=self.refline_dict['width_left'],
585 | normvec_normalized=self.refline_dict['normvec_normalized'],
586 | fig_handle=self.fig_handle)
587 |
588 | print('plot_tpamap done')
589 |
590 |
591 | # ----------------------------------------------------------------------------------------------------------------------
592 | # testing --------------------------------------------------------------------------------------------------------------
593 | # ----------------------------------------------------------------------------------------------------------------------
594 | if __name__ == "__main__":
595 | pass
596 |
--------------------------------------------------------------------------------