├── tests ├── __init__.py ├── task │ ├── __init__.py │ ├── helper_functions.py │ ├── test_aat.py │ ├── test_trip.py │ ├── test_race_task.py │ ├── test_aat_trip.py │ └── test_waypoint.py ├── thermals │ ├── __init__.py │ └── test_flight_phases.py ├── utilities │ ├── __init__.py │ └── test_helper_functions.py ├── competition │ ├── __init__.py │ ├── test_strepla.py │ ├── test_soaringspot.py │ ├── test_crosscountry_api.py │ └── test_crosscountry.py └── igc_files │ └── missing_lcu_lseeyou_lines.igc ├── docs ├── requirements.txt ├── source │ └── opensoar │ │ ├── modules.rst │ │ ├── opensoar.rst │ │ ├── opensoar.utilities.rst │ │ ├── opensoar.thermals.rst │ │ ├── opensoar.task.rst │ │ └── opensoar.competition.rst ├── index.rst ├── Makefile └── conf.py ├── opensoar ├── version.py ├── __init__.py ├── utilities │ ├── __init__.py │ ├── retry_utils.py │ ├── geojson_serializers.py │ └── helper_functions.py ├── task │ ├── waypoint_definition.png │ ├── __init__.py │ ├── trip.py │ ├── waypoint.py │ ├── task.py │ ├── race_task.py │ └── aat.py ├── thermals │ ├── __init__.py │ ├── pysoar_thermal_detector.py │ └── flight_phases.py └── competition │ ├── __init__.py │ ├── competitor.py │ ├── competition_day.py │ ├── daily_results_page.py │ ├── strepla.py │ └── crosscountry.py ├── MANIFEST.in ├── requirements.txt ├── .gitignore ├── setup.py ├── .github └── workflows │ └── test.yml ├── LICENSE ├── README.rst └── CHANGELOG.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/task/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/thermals/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/utilities/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/competition/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx==1.7.2 2 | -------------------------------------------------------------------------------- /opensoar/version.py: -------------------------------------------------------------------------------- 1 | __version__ = '2.1.1' 2 | -------------------------------------------------------------------------------- /opensoar/__init__.py: -------------------------------------------------------------------------------- 1 | from .version import __version__ 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include CHANGELOG.rst 3 | include LICENSE 4 | -------------------------------------------------------------------------------- /docs/source/opensoar/modules.rst: -------------------------------------------------------------------------------- 1 | opensoar 2 | ======== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | opensoar 8 | -------------------------------------------------------------------------------- /opensoar/utilities/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package contains helper functions, which support the other packages 3 | """ 4 | -------------------------------------------------------------------------------- /opensoar/task/waypoint_definition.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GliderGeek/opensoar/HEAD/opensoar/task/waypoint_definition.png -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyproj>=3.4.1 2 | aerofiles~=1.4.0 3 | beautifulsoup4~=4.6.0 4 | geojson>=3.0.0 5 | shapely>2.0.0 6 | requests~=2.32.3 7 | -------------------------------------------------------------------------------- /tests/igc_files/missing_lcu_lseeyou_lines.igc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GliderGeek/opensoar/HEAD/tests/igc_files/missing_lcu_lseeyou_lines.igc -------------------------------------------------------------------------------- /opensoar/task/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package provides the necessary classes for creating tasks and evaluating the performance of a competitor. 3 | """ 4 | -------------------------------------------------------------------------------- /opensoar/thermals/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package contains the algorithms for thermal detection and a container class to combine the 3 | thermal- and cruise phases with helper methods for easy access. 4 | """ 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # virtual environment folder 2 | env 3 | venv 4 | docs/build 5 | 6 | # from building package 7 | dist 8 | build 9 | opensoar.egg-info/ 10 | 11 | # IDE 12 | .idea 13 | 14 | *.pyc 15 | 16 | .DS_Store 17 | -------------------------------------------------------------------------------- /opensoar/competition/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This package provides functionality to combine multiple flights in a CompetitionDay. 3 | Besides manually combining Competitors, it also provides high level interfaces with competition websites 4 | (e.g. SoaringSpot), which make it very easy to download and analyse all flights within a 5 | published CompetitionDay. 6 | """ 7 | -------------------------------------------------------------------------------- /docs/source/opensoar/opensoar.rst: -------------------------------------------------------------------------------- 1 | opensoar package 2 | ================ 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | 9 | opensoar.competition 10 | opensoar.task 11 | opensoar.thermals 12 | opensoar.utilities 13 | 14 | Module contents 15 | --------------- 16 | 17 | .. automodule:: opensoar 18 | :members: 19 | :undoc-members: 20 | :show-inheritance: 21 | -------------------------------------------------------------------------------- /docs/source/opensoar/opensoar.utilities.rst: -------------------------------------------------------------------------------- 1 | opensoar.utilities package 2 | ========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | opensoar.utilities.helper_functions module 8 | ------------------------------------------ 9 | 10 | .. automodule:: opensoar.utilities.helper_functions 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | 16 | Module contents 17 | --------------- 18 | 19 | .. automodule:: opensoar.utilities 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. opensoar documentation master file, created by 2 | sphinx-quickstart on Fri Apr 13 20:59:40 2018. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to opensoar's documentation! 7 | ==================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | source/opensoar/opensoar 15 | 16 | Indices and tables 17 | ================== 18 | 19 | * :ref:`genindex` 20 | * :ref:`modindex` 21 | * :ref:`search` 22 | 23 | -------------------------------------------------------------------------------- /tests/task/helper_functions.py: -------------------------------------------------------------------------------- 1 | from aerofiles.igc import Reader 2 | 3 | from opensoar.competition.soaringspot import get_info_from_comment_lines 4 | 5 | 6 | def get_trace(igc_path): 7 | with open(igc_path, 'r') as f: 8 | parsed_igc_file = Reader(skip_duplicates=True).read(f) 9 | 10 | _, trace = parsed_igc_file['fix_records'] 11 | 12 | return trace 13 | 14 | 15 | def get_task(igc_path): 16 | with open(igc_path, 'r') as f: 17 | parsed_igc_file = Reader(skip_duplicates=True).read(f) 18 | 19 | task, contest_information, competitor_information = get_info_from_comment_lines(parsed_igc_file, date=parsed_igc_file["header"][1]["utc_date"]) 20 | return task 21 | -------------------------------------------------------------------------------- /docs/source/opensoar/opensoar.thermals.rst: -------------------------------------------------------------------------------- 1 | opensoar.thermals package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | opensoar.thermals.flight_phases module 8 | -------------------------------------- 9 | 10 | .. automodule:: opensoar.thermals.flight_phases 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | opensoar.thermals.pysoar_thermal_detector module 16 | ------------------------------------------------ 17 | 18 | .. automodule:: opensoar.thermals.pysoar_thermal_detector 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | 24 | Module contents 25 | --------------- 26 | 27 | .. automodule:: opensoar.thermals 28 | :members: 29 | :undoc-members: 30 | :show-inheritance: 31 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | exec(open('opensoar/version.py').read()) 4 | 5 | with open("README.rst", "r") as f: 6 | long_description = f.read() 7 | 8 | setup( 9 | name='opensoar', 10 | version=__version__, # has been import above in exec command 11 | license='MIT', 12 | description='Open source python library for glider flight analysis', 13 | url='https://github.com/glidergeek/opensoar', 14 | packages=find_packages(exclude=['tests']), 15 | long_description=long_description, 16 | install_requires=[ 17 | 'aerofiles~=1.4.0', 18 | 'beautifulsoup4~=4.6.0', 19 | 'pyproj>=3.4.1', 20 | 'geojson>=3.0.0', 21 | 'shapely>2.0.0', 22 | 'requests~=2.32.3', 23 | ] 24 | ) 25 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push 5 | 6 | jobs: 7 | 8 | build: 9 | name: Test 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v1 13 | - name: Set up Python 14 | uses: actions/setup-python@v2 15 | with: 16 | python-version: 3.9 17 | - name: Install dependencies 18 | run: | 19 | python -m pip install --upgrade pip 20 | pip install -r requirements.txt 21 | - name: Run tests 22 | run: python -m unittest discover -b --start-directory ./tests 23 | - name: build dist 24 | run: python3 -m pip install --upgrade build && python3 -m build 25 | - name: Publish package 26 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 27 | uses: pypa/gh-action-pypi-publish@release/v1 28 | with: 29 | password: ${{ secrets.PYPI_API_TOKEN }} 30 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 GliderGeek 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/source/opensoar/opensoar.task.rst: -------------------------------------------------------------------------------- 1 | opensoar.task package 2 | ===================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | opensoar.task.aat module 8 | ------------------------ 9 | 10 | .. automodule:: opensoar.task.aat 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | opensoar.task.race_task module 16 | ------------------------------ 17 | 18 | .. automodule:: opensoar.task.race_task 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | opensoar.task.task module 24 | ------------------------- 25 | 26 | .. automodule:: opensoar.task.task 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | opensoar.task.trip module 32 | ------------------------- 33 | 34 | .. automodule:: opensoar.task.trip 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | opensoar.task.waypoint module 40 | ----------------------------- 41 | 42 | .. automodule:: opensoar.task.waypoint 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | 48 | Module contents 49 | --------------- 50 | 51 | .. automodule:: opensoar.task 52 | :members: 53 | :undoc-members: 54 | :show-inheritance: 55 | -------------------------------------------------------------------------------- /docs/source/opensoar/opensoar.competition.rst: -------------------------------------------------------------------------------- 1 | opensoar.competition package 2 | ============================ 3 | 4 | Submodules 5 | ---------- 6 | 7 | opensoar.competition.competition_day module 8 | ------------------------------------------- 9 | 10 | .. automodule:: opensoar.competition.competition_day 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | opensoar.competition.competitor module 16 | -------------------------------------- 17 | 18 | .. automodule:: opensoar.competition.competitor 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | opensoar.competition.daily_results_page module 24 | ---------------------------------------------- 25 | 26 | .. automodule:: opensoar.competition.daily_results_page 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | opensoar.competition.soaringspot module 32 | --------------------------------------- 33 | 34 | .. automodule:: opensoar.competition.soaringspot 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | opensoar.competition.strepla module 40 | ----------------------------------- 41 | 42 | .. automodule:: opensoar.competition.strepla 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | 48 | Module contents 49 | --------------- 50 | 51 | .. automodule:: opensoar.competition 52 | :members: 53 | :undoc-members: 54 | :show-inheritance: 55 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | OpenSoar 2 | ======== 3 | 4 | .. image:: https://img.shields.io/pypi/v/opensoar.svg 5 | :target: https://pypi.org/project/opensoar/ 6 | :alt: pypi version and link 7 | 8 | .. image:: https://readthedocs.org/projects/opensoar/badge/?version=latest 9 | :target: http://opensoar.readthedocs.io/en/latest/?badge=latest 10 | :alt: Documentation Status 11 | 12 | The OpenSoar python library is meant to provide open source tooling for glider flight analysis. This may vary from 13 | thermal detection to competition scoring. 14 | 15 | Installation 16 | ============= 17 | :: 18 | 19 | pip install opensoar 20 | 21 | 22 | Reading in files with aerofiles 23 | ================================ 24 | 25 | .. image:: https://raw.githubusercontent.com/Turbo87/aerofiles/master/img/logo.png 26 | :target: https://github.com/Turbo87/aerofiles 27 | 28 | OpenSoar only performs analyses after the files have been read in. The `aerofiles library `_ provides the functionality 29 | to read the files. 30 | 31 | Example race task 32 | ================== 33 | :: 34 | 35 | from aerofiles.igc import Reader 36 | from opensoar.competition.soaringspot import get_info_from_comment_lines 37 | from opensoar.task.trip import Trip 38 | 39 | with open('example.igc', 'r') as f: 40 | parsed_igc_file = Reader().read(f) 41 | 42 | # example.igc comes from soaringspot and contains task inforamtion 43 | task, _, _ = get_info_from_comment_lines(parsed_igc_file) 44 | _, trace = parsed_igc_file['fix_records'] 45 | 46 | trip = Trip(task, trace) 47 | task_distance_covered = sum(trip.distances) 48 | 49 | 50 | Releasing 51 | ========== 52 | 53 | - add version number in changelog 54 | - change `__version__` in opensoar/version.py 55 | - merge to master 56 | - push tag, ci publishes to pypi 57 | -------------------------------------------------------------------------------- /opensoar/competition/competitor.py: -------------------------------------------------------------------------------- 1 | from typing import List, Union 2 | 3 | from opensoar.task.trip import Trip 4 | from opensoar.thermals.flight_phases import FlightPhases 5 | 6 | 7 | class Competitor: 8 | """ 9 | All the information of one entry in a CompetitionDay. This encompasses information about the pilot, 10 | the plane and the gps trace. 11 | """ 12 | 13 | def __init__(self, trace: List, competition_id: str=None, plane_model: str=None, ranking: Union[int, str]=None, 14 | pilot_name: str=None): 15 | 16 | """ 17 | 18 | :param trace: 19 | :param competition_id: 20 | :param plane_model: 21 | :param ranking: may also be 'HC' when competitor flies hors concours. 22 | :param pilot_name: 23 | """ 24 | 25 | self.trace = trace 26 | self.competition_id = competition_id 27 | self._plane_model = plane_model 28 | self.ranking = ranking 29 | self._pilot_name = pilot_name 30 | 31 | # to be set by analyse method 32 | self._trip = None 33 | self._phases = None 34 | 35 | @property 36 | def trip(self): 37 | return self._trip 38 | 39 | @property 40 | def pilot_name(self): 41 | return self._pilot_name 42 | 43 | @property 44 | def plane_model(self): 45 | return self._plane_model 46 | 47 | @property 48 | def phases(self): 49 | return self._phases 50 | 51 | def analyse(self, task, classification_method: str): 52 | 53 | if self.trace is None or len(self.trace) == 0: 54 | raise ValueError('No trace present') 55 | 56 | self._trip = Trip(task, self.trace) 57 | 58 | # competitor should have at least started 59 | if len(self._trip.fixes) >= 1: 60 | self._phases = FlightPhases(classification_method, self.trace, self._trip) 61 | -------------------------------------------------------------------------------- /tests/task/test_aat.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | from copy import deepcopy 4 | 5 | import datetime 6 | 7 | from opensoar.task.aat import AAT 8 | from tests.task.helper_functions import get_task 9 | 10 | 11 | class TestAAT(unittest.TestCase): 12 | 13 | # https://www.soaringspot.com/en_gb/cae-nls-nederlandse-kampioenschappen-zweefvliegen-2012/results/club/task-10-on-2012-05-26/daily 14 | # competitor 3, SP 15 | 16 | cwd = os.path.dirname(__file__) 17 | igc_path = os.path.join(cwd, '..', 'igc_files', 'aat_completed.igc') 18 | aat = get_task(igc_path) 19 | 20 | def test_number_of_legs(self): 21 | self.assertEqual(self.aat.no_legs, 5) 22 | 23 | def test_nominal_distances(self): 24 | nominal_distances = self.aat._nominal_distances 25 | expected_distances = [45.20, 43.25, 73.26, 88.28, 9.62] 26 | 27 | # note: the final distance is different from the one in soaringspot. 28 | # soaringspot says the last distance should be 9.12km. they wrongfully subtract 0.5km from the finish line 29 | 30 | self.assertEqual(len(nominal_distances), len(expected_distances)) 31 | 32 | for distance, expected_distance in zip(nominal_distances, expected_distances): 33 | self.assertAlmostEqual(distance / 1e3, expected_distance, places=2) 34 | 35 | def test_equal_aat(self): 36 | aat2 = get_task(self.igc_path) 37 | self.assertEqual(self.aat, aat2) 38 | 39 | def test_not_equal_aat(self): 40 | 41 | waypoints = self.aat.waypoints 42 | 43 | # test_unequal number_waypoints 44 | waypoints2 = deepcopy(waypoints) 45 | del waypoints2[2] 46 | aat2 = AAT(waypoints2, self.aat._t_min) 47 | self.assertNotEqual(self.aat, aat2) 48 | 49 | # test unequal t_min 50 | aat2 = AAT(waypoints, datetime.time(1, 0, 0)) 51 | self.assertNotEqual(self.aat, aat2) 52 | -------------------------------------------------------------------------------- /opensoar/task/trip.py: -------------------------------------------------------------------------------- 1 | class Trip: 2 | """ 3 | Realised 4 | """ 5 | 6 | def __init__(self, task, trace): 7 | 8 | task_result = task.apply_rules(trace) 9 | 10 | self.fixes = task_result[0] 11 | self.refined_start_time = task_result[1] 12 | self.outlanding_fix = task_result[2] 13 | self.distances = task_result[3] 14 | self.finish_time = task_result[4] 15 | self.sector_fixes = task_result[5] 16 | 17 | def completed_legs(self): 18 | return len(self.fixes) - 1 19 | 20 | def started_legs(self): 21 | if self.outlanded(): 22 | return len(self.fixes) 23 | else: 24 | return len(self.fixes) - 1 25 | 26 | def outlanding_leg(self): 27 | if self.outlanded(): 28 | return len(self.fixes) - 1 29 | else: 30 | return None 31 | 32 | def outlanded(self): 33 | return self.outlanding_fix is not None 34 | 35 | def fix_on_leg(self, fix, leg): 36 | """ 37 | Return whether fix takes place within certain leg, excluding the boundaries 38 | :param fix: 39 | :param leg: 40 | :return: 41 | """ 42 | larger_than_minimum = not self.fix_before_leg(fix, leg) 43 | smaller_than_maximum = not self.fix_after_leg(fix, leg) 44 | return larger_than_minimum and smaller_than_maximum 45 | 46 | def fix_before_leg(self, fix, leg): 47 | return (self.fixes[leg]['datetime'] - fix['datetime']).total_seconds() >= 0 48 | 49 | def fix_after_leg(self, fix, leg): 50 | if leg + 1 <= self.completed_legs(): 51 | return (fix['datetime'] - self.fixes[leg + 1]['datetime']).total_seconds() >= 0 52 | elif self.outlanded() and leg == self.outlanding_leg(): 53 | return False 54 | else: # leg > self.completed_legs() + 1 55 | raise ValueError('Leg not started') -------------------------------------------------------------------------------- /opensoar/utilities/retry_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Retry utilities for robust network operations. 3 | 4 | This module provides lightweight retry decorators for handling transient failures 5 | in network requests and file operations. 6 | """ 7 | 8 | import time 9 | import functools 10 | from typing import Callable, Union, Tuple, Type 11 | 12 | 13 | def retry( 14 | max_attempts: int = 3, 15 | delay: float = 1.0, 16 | backoff: float = 2.0, 17 | exceptions: Union[Type[Exception], Tuple[Type[Exception], ...]] = Exception 18 | ): 19 | """ 20 | Lightweight retry decorator for handling transient failures. 21 | 22 | Args: 23 | max_attempts: Maximum number of retry attempts 24 | delay: Initial delay between retries in seconds 25 | backoff: Multiplier for delay after each retry 26 | exceptions: Exception types to catch and retry on 27 | 28 | Returns: 29 | Decorated function with retry logic 30 | 31 | Example: 32 | @retry(max_attempts=3, delay=1.0, exceptions=requests.exceptions.RequestException) 33 | def download_file(url): 34 | response = requests.get(url) 35 | response.raise_for_status() 36 | return response.content 37 | """ 38 | def decorator(func: Callable): 39 | @functools.wraps(func) 40 | def wrapper(*args, **kwargs): 41 | last_exception = None 42 | current_delay = delay 43 | 44 | for attempt in range(max_attempts): 45 | try: 46 | return func(*args, **kwargs) 47 | except exceptions as e: 48 | last_exception = e 49 | if attempt == max_attempts - 1: 50 | raise 51 | 52 | print(f"Attempt {attempt + 1} failed: {e}. Retrying in {current_delay}s...") 53 | time.sleep(current_delay) 54 | current_delay *= backoff 55 | 56 | raise last_exception 57 | 58 | return wrapper 59 | return decorator 60 | 61 | 62 | # Predefined retry decorators for common use cases 63 | def web_request_retry(max_attempts: int = 3): 64 | """Retry decorator specifically for web requests.""" 65 | import requests 66 | return retry( 67 | max_attempts=max_attempts, 68 | delay=1.0, 69 | backoff=2.0, 70 | exceptions=requests.exceptions.RequestException 71 | ) 72 | -------------------------------------------------------------------------------- /opensoar/competition/competition_day.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import List 3 | 4 | from opensoar.competition.competitor import Competitor 5 | from opensoar.task.task import Task 6 | 7 | 8 | # TODO: This is a temporary fix for deepcopy issues with TimeZoneFix 9 | # Issue reference: https://github.com/Turbo87/aerofiles/issues/318 10 | # Remove this once aerofiles is updated with a proper fix 11 | from aerofiles.util.timezone import TimeZoneFix 12 | 13 | # TimeZoneFix from aerofiles doesn't support deepcopy properly 14 | # Add a __deepcopy__ method to fix serialization issues 15 | def _deepcopy_timezone_fix(self, memo): 16 | """Create a proper deep copy of a TimeZoneFix instance.""" 17 | return TimeZoneFix(self.fix) 18 | 19 | # Apply monkeypatch 20 | TimeZoneFix.__deepcopy__ = _deepcopy_timezone_fix 21 | 22 | 23 | class CompetitionDay: 24 | """ 25 | This class contains the competition day information, equal fo all competitors. 26 | """ 27 | 28 | def __init__(self, name: str, date: datetime.date, plane_class: str, competitors: List[Competitor], 29 | task: Task): 30 | """ 31 | :param name: description of the competition day. used for storing igc files. 32 | :param date: date on which the competition day takes place. used for storing igc files. 33 | :param plane_class: competition class (e.g. club-class). used for storing igc files. 34 | :param competitors: contestants in the competition day. 35 | :param task: 36 | """ 37 | 38 | if competitors is None: 39 | competitors = list() 40 | 41 | self.name = name 42 | self.competitors = competitors 43 | self.task = task 44 | self.date = date 45 | self.plane_class = plane_class 46 | 47 | def analyse_flights(self, classification_method: str, analysis_progress=None, skip_failed_analyses: bool=False): 48 | """ 49 | :param classification_method: method for detecting thermals. See FlightPhases for more info. 50 | :param analysis_progress: optional function to log the analysis progress. Should have the following signature: 51 | func(number_of_analyses, total_number_of_flights) 52 | :param skip_failed_analyses: if True, exceptions are caught during a failed analysis. a list is return with the 53 | competition ids of all failed analyses. 54 | :return: 55 | """ 56 | 57 | if self.task is None: 58 | raise ValueError('Task not present') 59 | 60 | if self.task.multistart: 61 | raise ValueError('Multistart is not supported') 62 | 63 | number_of_analyzed_flights = 0 64 | 65 | failed_comp_ids = [] 66 | for competitor in self.competitors: 67 | 68 | if skip_failed_analyses: 69 | try: 70 | competitor.analyse(self.task, classification_method) 71 | except Exception: 72 | failed_comp_ids.append(competitor.competition_id) 73 | else: 74 | competitor.analyse(self.task, classification_method) 75 | 76 | if analysis_progress is not None: 77 | number_of_analyzed_flights += 1 78 | analysis_progress(number_of_analyzed_flights, len(self.competitors)) 79 | 80 | return failed_comp_ids 81 | -------------------------------------------------------------------------------- /tests/task/test_trip.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | 4 | import datetime 5 | 6 | from opensoar.task.trip import Trip 7 | from tests.task.helper_functions import get_trace, get_task 8 | 9 | 10 | class TestTrip(unittest.TestCase): 11 | """ 12 | This testcase covers a completed race task. number 2, comp id HS: 13 | https://www.soaringspot.com/en/sallandse-tweedaagse-2014/results/club/task-1-on-2014-06-21/daily 14 | """ 15 | 16 | cwd = os.path.dirname(__file__) 17 | igc_path = os.path.join(cwd, '..', 'igc_files', 'race_task_completed.igc') 18 | race_task = get_task(igc_path) 19 | trace = get_trace(igc_path) 20 | trip = Trip(race_task, trace) 21 | 22 | def test_number_of_fixes(self): 23 | self.assertEqual(len(self.trip.fixes), 5) 24 | 25 | def test_distances(self): 26 | self.assertListEqual(self.trip.distances, self.race_task.distances) 27 | 28 | def test_outlanded(self): 29 | self.assertFalse(self.trip.outlanded()) 30 | 31 | def test_start_time(self): 32 | start_fix = self.trip.fixes[0] 33 | refined_start_time = self.trip.refined_start_time 34 | self.assertEqual(start_fix['datetime'], datetime.datetime(2014, 6, 21, 12, 12, 54, tzinfo=datetime.timezone.utc)) 35 | self.assertEqual(refined_start_time, datetime.datetime(2014, 6, 21, 12, 12, 55, tzinfo=datetime.timezone.utc)) 36 | 37 | def test_finish_time(self): 38 | finish_fix = self.trip.fixes[-1] 39 | self.assertEqual(finish_fix['datetime'], datetime.datetime(2014, 6, 21, 13, 21, 58, tzinfo=datetime.timezone.utc)) 40 | 41 | class TestOutlandingTrip(unittest.TestCase): 42 | """ 43 | This testcase covers an outlanding on a race task. number 7, comp id SU: 44 | https://www.soaringspot.com/en/sallandse-tweedaagse-2014/results/club/task-1-on-2014-06-21/daily 45 | """ 46 | 47 | cwd = os.path.dirname(__file__) 48 | igc_path = os.path.join(cwd, '..', 'igc_files', 'outlanding_race_task.igc') 49 | race_task = get_task(igc_path) 50 | trace = get_trace(igc_path) 51 | trip = Trip(race_task, trace) 52 | 53 | def test_total_distance(self): 54 | self.assertAlmostEqual(sum(self.trip.distances) / 1000, 89.99, places=2) 55 | 56 | def test_completed_legs(self): 57 | self.assertEqual(self.trip.completed_legs(), 2) 58 | 59 | def test_fix_after_leg_on_outlanding_leg(self): 60 | """A fix happening on the outlanding leg can never be after the leg, because that leg is never finished.""" 61 | fix = {'datetime': datetime.datetime(2014, 6, 21, 14, 44, 45)} 62 | fix_after_leg = self.trip.fix_after_leg(fix, leg=2) 63 | self.assertFalse(fix_after_leg) 64 | 65 | 66 | class TestEnlOutlandingTrip(unittest.TestCase): 67 | """ 68 | This testcase covers an ENL outlanding on a race task. number 10, comp id 2C: 69 | https://www.soaringspot.com/en/nk-zweefvliegen-2017/results/18-meter-klasse/task-8-on-2017-05-31/daily 70 | """ 71 | 72 | cwd = os.path.dirname(__file__) 73 | igc_path = os.path.join(cwd, '..', 'igc_files', 'outlanding_race_task_enl.igc') 74 | race_task = get_task(igc_path) 75 | trace = get_trace(igc_path) 76 | trip = Trip(race_task, trace) 77 | 78 | def test_total_distance(self): 79 | self.assertAlmostEqual(sum(self.trip.distances) / 1000, 378.13, places=2) 80 | 81 | def test_completed_legs(self): 82 | self.assertEqual(self.trip.completed_legs(), 4) 83 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | Changelog 2 | ========== 3 | 4 | unreleased 5 | ------------------------ 6 | Added 7 | ~~~~~~ 8 | Changed 9 | ~~~~~~~~ 10 | Deprecated 11 | ~~~~~~~~~~~~ 12 | Removed 13 | ~~~~~~~~~ 14 | Fixed 15 | ~~~~~~~~ 16 | Security 17 | ~~~~~~~~~ 18 | 19 | v2.1.1 - 2025/10/26 20 | ------------------------ 21 | Fixed 22 | ~~~~~~~~ 23 | * Fixed handling of duplicate competition IDs in SoaringSpot competitions 24 | 25 | v2.1.0 - 2025/05/25 26 | ------------------------ 27 | Added 28 | ~~~~~~ 29 | * Add crosscountry analysis for sailplane grand prix 30 | * Added retry logic on getting web page for enhanced reliability 31 | 32 | v2.0.1 - 2025/03/07 33 | ------------------------ 34 | Added 35 | ~~~~~~~~ 36 | * Include pilot name and plane in soaringspot competitor info 37 | 38 | Changed 39 | ~~~~~~~~ 40 | * Optionally include DNF and DNS soaringspot competitor info 41 | 42 | v2.0.0 - 2025/03/07 43 | ------------------------ 44 | Fixed 45 | ~~~~~~~~ 46 | * IGC files with duplicate fixes no longer cause errors 47 | 48 | Changed 49 | ~~~~~~~~ 50 | * Using timezone aware fixes throughout the library 51 | * Input times are now timezone aware 52 | * Output times are now timezone aware 53 | 54 | Removed 55 | ~~~~~~~~~ 56 | * Several time helper functions `utilities.helper_functions` 57 | 58 | 59 | v1.1.3 - 2024/08/18 60 | ------------------------ 61 | Fixed 62 | ~~~~~~~~ 63 | * Fixed starttime UTC correction with negative timezone values 64 | 65 | v1.1.2 - 2024/02/04 66 | ------------------------ 67 | Fixed 68 | ~~~~~~~~ 69 | * allow for soaringspot files without task info 70 | * allow for soaring competitors with empty competition ID 71 | 72 | v1.1.1 - 2023/08/25 73 | ------------------------ 74 | Added 75 | ~~~~~~ 76 | * extra geojson serializer for trip 77 | Fixed 78 | ~~~~~~~~ 79 | * automatic sector orientations should point inward 80 | 81 | v1.1.0 - 2023/05/20 82 | ------------------------ 83 | Added 84 | ~~~~~~ 85 | * geojson serializers 86 | Fixed 87 | ~~~~~~~~ 88 | * AAT inside sector determination 89 | 90 | v1.0.0 - 2023/01/09 91 | ------------------------ 92 | Changed 93 | ~~~~~~~~ 94 | * Replace TravisCI with github actions for automated tests and publish 95 | * Replace pygeodesy with pyproj for speed-up 96 | Removed 97 | ~~~~~~~~~ 98 | * StreplaDaily support: site no longer online 99 | * `calculate_distance` and `calculate_bearing` (replaced by `calculate_distance_bearing`) 100 | Fixed 101 | ~~~~~~~~ 102 | * is -> == on literals 103 | 104 | v0.1.7 105 | ------------------------ 106 | Fixed 107 | ~~~~~~~~ 108 | * wrong version number in package 109 | v0.1.6 110 | ------------------------ 111 | Changed 112 | ~~~~~~~~ 113 | * removed pinning from requirements to keep up to date 114 | Fixed 115 | ~~~~~~~~ 116 | * obtaining IGC download URLs for soaringspot 117 | * ranking and plane_model are nog longer switched in competition day 118 | 119 | v0.1.5 120 | ------------------------ 121 | Changed 122 | ~~~~~~~~ 123 | * updated pygeodesy dependency 124 | 125 | v0.1.4 126 | ------------------------ 127 | Fixed 128 | ~~~~~~~~ 129 | * relative urls for igc files using different base. (solves dev.soaringspot) 130 | 131 | v0.1.3 132 | ------------------------ 133 | Fixed 134 | ~~~~~~~~ 135 | * fix bug in handling AAT task for scoringStrepla 136 | 137 | v0.1.2 138 | ------------------------ 139 | * fix bug where moved_turnpoint caused failing task 140 | * skip flights which cannot be parsed 141 | 142 | v0.1.1 143 | ------------------------ 144 | * do not skip HC competitors 145 | * add flag skip_failed_analyses in CompetitionDay.analyze_flights() 146 | 147 | v0.1.0: initial release 148 | ------------------------ 149 | * competition module: CompetitionDay, Competitor, SoaringSpotDaily, StreplaDaily 150 | * task module: AAT, RaceTask, Trip, Waypoint 151 | * thermals module: FlightPhases, PySoarThermalDetector 152 | -------------------------------------------------------------------------------- /tests/competition/test_strepla.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import datetime 4 | 5 | import os 6 | 7 | from aerofiles.igc import Reader 8 | 9 | from opensoar.competition.competitor import Competitor 10 | from opensoar.competition.strepla import get_waypoint_name_lat_long, get_waypoints, get_waypoint, get_task_and_competitor_info, get_info_from_comment_lines 11 | from opensoar.task.aat import AAT 12 | 13 | 14 | class TestStrepla(unittest.TestCase): 15 | lscsc_lines = [ 16 | 'LSCSCS:AP4 Fronhofen Strassen-T:N4942358:E00851490', 17 | 'LSCSCT:074 Main Lohr-M:N4959700:E00934900', 18 | 'LSCSCT:050 Herbstein Kirche:N5033733:E00920800', 19 | 'LSCSCT:120 St Goar Bf:N5009067:E00742850', 20 | 'LSCSCT:079 Meisenheim Station:N4942550:E00739767', 21 | 'LSCSCT:010 Bensheim Lindenfels Krehberg TV:N4941150:E00843883', 22 | 'LSCSCF:ZP Reinheim (Darmstadt Dieburg):N4950433:E00851050', 23 | ] 24 | 25 | lscsr_lines = [ 26 | 'LSCSRSLINE:20000', 27 | 'LSCSRTKEYHOLE:500:10000:90', 28 | 'LSCSRFCYLINDER:2500', 29 | ] 30 | 31 | lscsd_lines = [ 32 | 'LSCSDCID:IBG', 33 | 'LSCSDName:Leip, Dennis', 34 | 'LSCSDGate open:10:44', 35 | 'LSCSDGate close:12:14', 36 | 'LSCSDTime window:03:30', 37 | 'LSCSDmax Elevation start:1200', 38 | 'LSCSDmax Elevation:3000', 39 | 'LSCSDQNH:1021', 40 | 'LSCSDElevation start:155', 41 | ] 42 | 43 | lscsa_lines = [] 44 | 45 | def test_waypoint_info_parsing(self): 46 | """test whether name and coordinates are correctly read from line in igc file""" 47 | 48 | lscs_line_tp = 'LSCSCT:074 Main Lohr-M:N4959700:E00934900' 49 | name, lat, lon = get_waypoint_name_lat_long(lscs_line_tp) 50 | 51 | self.assertEqual(name, '074 Main Lohr-M') 52 | self.assertAlmostEqual(lat, 49.9950, places=4) 53 | self.assertAlmostEqual(lon, 9.5817, places=4) 54 | 55 | def test_get_waypoints(self): 56 | task_info, competitor_information = get_task_and_competitor_info(self.lscsd_lines, self.lscsr_lines, []) 57 | waypoints = get_waypoints(self.lscsc_lines, task_info) 58 | self.assertEqual(len(waypoints), 7) 59 | 60 | def test_get_waypoint(self): 61 | 62 | lscsc_line = 'LSCSCS:AP4 Fronhofen Strassen-T:N4942358:E00851490' 63 | 64 | task_info, competitor_information = get_task_and_competitor_info(self.lscsd_lines, self.lscsr_lines, []) 65 | waypoint = get_waypoint(lscsc_line, task_info, n=0, n_tp=7) 66 | 67 | self.assertEqual(waypoint.name, 'AP4 Fronhofen Strassen-T') 68 | self.assertTrue(waypoint.is_line) 69 | 70 | def test_aat_from_file(self): 71 | """ 72 | Test if aat is correctly recognised and waypoint are correct 73 | file from: https://www.strepla.de/scs/Public/scoreDay.aspx?cId=451&idDay=7912, competitor 1 CX 74 | """ 75 | 76 | file_path = os.path.join(os.path.dirname(__file__), '..', 'igc_files', 'aat_strepla.igc') 77 | 78 | with open(file_path, 'r', encoding='utf-8') as f: 79 | parsed_igc_file = Reader().read(f) 80 | 81 | trace_errors, trace = parsed_igc_file['fix_records'] 82 | 83 | self.assertEqual(len(trace_errors), 0) 84 | 85 | task, _, _ = get_info_from_comment_lines(parsed_igc_file) 86 | 87 | self.assertIsInstance(task, AAT) 88 | self.assertEqual(task.t_min, datetime.timedelta(hours=2, minutes=30)) 89 | 90 | expected_waypoints = [ 91 | ('AP3 Muellhalde', None), 92 | ('Loreley', 20000), 93 | ('Kusel', 40000), 94 | ('Loreley', 20000), 95 | ('ZP Anspach/Taunus', None), 96 | ] 97 | 98 | self.assertEqual(len(task.waypoints), len(expected_waypoints)) 99 | 100 | for i, waypoint in enumerate(task.waypoints): 101 | expected_name, expected_r_max = expected_waypoints[i] 102 | self.assertEqual(waypoint.name, expected_name) 103 | if 0 < i < len(expected_waypoints) - 1: 104 | self.assertEqual(waypoint.r_max, expected_r_max) 105 | 106 | competitor = Competitor(trace, 'CX', 'Discus2b', 1, 'Karsten Leucker') 107 | competitor.analyse(task, 'pysoar') 108 | 109 | self.assertEqual(competitor.trip.refined_start_time.hour, 13) 110 | self.assertEqual(competitor.trip.refined_start_time.minute, 22) 111 | seconds = competitor.trip.refined_start_time.second 112 | 113 | dist_diff = sum(competitor.trip.distances) - 283500 114 | self.assertLessEqual(abs(seconds-40), 1) 115 | self.assertEqual(len(competitor.trip.fixes), len(expected_waypoints)) 116 | self.assertLessEqual(abs(dist_diff), 1000) 117 | -------------------------------------------------------------------------------- /tests/task/test_race_task.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | from copy import deepcopy 4 | 5 | import datetime 6 | 7 | from opensoar.competition.soaringspot import get_waypoints 8 | from opensoar.task.race_task import RaceTask 9 | from tests.task.helper_functions import get_task 10 | 11 | 12 | class TestRaceTask(unittest.TestCase): 13 | 14 | cwd = os.path.dirname(__file__) 15 | igc_path = os.path.join(cwd, '..', 'igc_files', 'race_task_completed.igc') 16 | race_task = get_task(igc_path) 17 | 18 | def test_number_of_legs(self): 19 | self.assertEqual(self.race_task.no_legs, 4) 20 | 21 | def test_distances(self): 22 | distances = self.race_task.distances 23 | expected_distances = [25.15, 27.20, 43.65, 5.23] 24 | 25 | self.assertEqual(len(distances), len(expected_distances)) 26 | 27 | for distance, expected_distance in zip(distances, expected_distances): 28 | self.assertAlmostEqual(distance/1000, expected_distance, places=2) 29 | 30 | def test_total_distance(self): 31 | self.assertAlmostEqual(self.race_task.total_distance / 1000, 101.24, places=2) 32 | 33 | def test_equal_tasks(self): 34 | race_task2 = get_task(self.igc_path) 35 | self.assertEqual(self.race_task, race_task2) 36 | 37 | def test_not_equal_tasks(self): 38 | waypoints = self.race_task.waypoints 39 | 40 | # test_unequal number_waypoints 41 | waypoints2 = deepcopy(waypoints) 42 | del waypoints2[2] 43 | race_task2 = RaceTask(waypoints2) 44 | self.assertNotEqual(self.race_task, race_task2) 45 | 46 | # test different waypoint 47 | waypoints3 = deepcopy(waypoints) 48 | waypoints3[2].r_max = 1000 49 | race_task2 = RaceTask(waypoints3) 50 | self.assertNotEqual(self.race_task, race_task2) 51 | 52 | # test different start_time 53 | race_task2 = RaceTask(waypoints, start_opening=datetime.time(0, 0, 0)) 54 | self.assertNotEqual(race_task2, self.race_task) 55 | 56 | # test different start buffer 57 | race_task2 = RaceTask(waypoints, start_time_buffer=5) 58 | self.assertNotEqual(self.race_task, race_task2) 59 | 60 | def test_race_reduced_legs(self): 61 | """ 62 | Race task with reduced legs, should produce correct distance 63 | 64 | https://www.soaringspot.com/en_gb/pribina-cup-2018-nitra-2018/results/15-meter/task-1-on-2018-04-02/daily 65 | """ 66 | 67 | lcu_lines = [ 68 | 'LCU::C020418195435301299000202', 69 | 'LCU::C0000000N00000000E', 70 | 'LCU::C4819183N01759550E158LEHOTA', 71 | 'LCU::C4907167N01819400E235PUCHOV', 72 | 'LCU::C4748117N01842983E271STUROVO', 73 | 'LCU::C4816767N01807967E001NITRA', 74 | 'LCU::C0000000N00000000E', 75 | ] 76 | 77 | lseeyou_lines = [ 78 | 'LSEEYOU OZ=-1,Style=2,SpeedStyle=0,R1=5000m,A1=180,Line=1', 79 | 'LSEEYOU OZ=0,Style=1,SpeedStyle=3,R1=500m,A1=180,Reduce=1', 80 | 'LSEEYOU OZ=1,Style=1,SpeedStyle=3,R1=500m,A1=180,Reduce=1', 81 | 'LSEEYOU OZ=2,Style=3,SpeedStyle=2,R1=3000m,A1=180,Reduce=1', 82 | ] 83 | 84 | waypoints = get_waypoints(lcu_lines, lseeyou_lines) 85 | race_task = RaceTask(waypoints) 86 | 87 | self.assertAlmostEqual(race_task.total_distance / 1000, 305.21, places=2) 88 | 89 | def test_race_moved_leg(self): 90 | """ 91 | Race task with moved waypoint, should not crash. somehow distances are not completely equal. 92 | 93 | https://www.soaringspot.com/en_gb/35th-world-gliding-championships-hosin-2018/results/18-meter/task-1-on-2018-07-29/daily 94 | """ 95 | 96 | lcu_lines = [ 97 | 'LCU::C290718193533301299000203', 98 | 'LCU::C0000000N00000000E', 99 | 'LCU::C4908600N01432867E011SP07RADONICE', 100 | 'LCU::C4936150N01352917E477ROZMITAL', 101 | 'LCU::C4940950N01240067E442PRIMDA', 102 | 'LCU::C4915633N01308733E385NYRSKO', 103 | 'LCU::C4902383N01429650E001SP01HOSIN', 104 | 'LCU::C0000000N00000000E', 105 | ] 106 | 107 | lseeyou_lines = [ 108 | 'LSEEYOU OZ=-1,Style=2,SpeedStyle=0,R1=5000m,A1=180,Line=1', 109 | 'LSEEYOU OZ=0,Style=1,SpeedStyle=3,R1=500m,A1=180', 110 | 'LSEEYOU OZ=1,Style=1,SpeedStyle=3,R1=500m,A1=180', 111 | 'LSEEYOU OZ=2,Style=1,SpeedStyle=3,R1=500m,A1=180', 112 | 'LSEEYOU OZ=3,Style=3,SpeedStyle=2,R1=5000m,A1=180,Move=1', 113 | ] 114 | 115 | waypoints = get_waypoints(lcu_lines, lseeyou_lines) 116 | 117 | try: 118 | race_task = RaceTask(waypoints) 119 | except Exception: 120 | self.fail() 121 | -------------------------------------------------------------------------------- /tests/task/test_aat_trip.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import unittest 3 | 4 | import os 5 | 6 | from opensoar.task.trip import Trip 7 | from tests.task.helper_functions import get_trace, get_task 8 | 9 | 10 | class TestAATTrip(unittest.TestCase): 11 | 12 | # https://www.soaringspot.com/en_gb/cae-nls-nederlandse-kampioenschappen-zweefvliegen-2012/results/club/task-10-on-2012-05-26/daily 13 | # competitor 3, SP 14 | 15 | cwd = os.path.dirname(__file__) 16 | igc_path = os.path.join(cwd, '..', 'igc_files', 'aat_completed.igc') 17 | aat = get_task(igc_path) 18 | trace = get_trace(igc_path) 19 | trip = Trip(aat, trace) 20 | 21 | def test_total_distance(self): 22 | total_distance = sum(self.trip.distances) 23 | self.assertAlmostEqual(total_distance / 1e3, 199.42, places=2) 24 | 25 | def test_start_time(self): 26 | start_time = self.trip.fixes[0]['datetime'] 27 | expected_start_time = datetime.datetime(2012, 5, 26, 12, 22, 8, tzinfo=datetime.timezone.utc) 28 | self.assertEqual(expected_start_time, start_time) 29 | 30 | def test_finish_time(self): 31 | finish_time = self.trip.finish_time 32 | expected_finish_time = datetime.datetime(2012, 5, 26, 15, 52, 8, tzinfo=datetime.timezone.utc) 33 | self.assertEqual(expected_finish_time, finish_time) 34 | 35 | 36 | class TestAATTripOutlandingOutside(unittest.TestCase): 37 | """ 38 | Test AAT Trip in which an outlanding takes place outside the last rounded sector. 39 | """ 40 | 41 | # https://www.soaringspot.com/en_gb/cae-nls-nederlandse-kampioenschappen-zweefvliegen-2012/results/club/task-10-on-2012-05-26/daily 42 | # competitor 5, CEO 43 | cwd = os.path.dirname(__file__) 44 | igc_path = os.path.join(cwd, '..', 'igc_files', 'aat_outlanding_outside_sector.igc') 45 | aat = get_task(igc_path) 46 | trace = get_trace(igc_path) 47 | trip = Trip(aat, trace) 48 | 49 | def test_trip_fixes(self): 50 | 51 | # assert correct number of trip fixes 52 | self.assertEqual(len(self.trip.fixes), 3) 53 | 54 | # assert if opensoar finds same fixes as seeyou, based on time 55 | fix_times = [ 56 | # tuple with (opensoar time, SeeYou time) 57 | (self.trip.refined_start_time, datetime.datetime(2012, 5, 26, 12, 14, 21, tzinfo=datetime.timezone.utc)), 58 | (self.trip.fixes[1]['datetime'], datetime.datetime(2012, 5, 26, 12, 49, 22, tzinfo=datetime.timezone.utc)), 59 | (self.trip.fixes[2]['datetime'], datetime.datetime(2012, 5, 26, 13, 26, 14, tzinfo=datetime.timezone.utc)), 60 | (self.trip.outlanding_fix['datetime'], datetime.datetime(2012, 5, 26, 14, 8, 0, tzinfo=datetime.timezone.utc)), 61 | ] 62 | 63 | for opensoar_time, seeyou_time in fix_times: 64 | self.assertEqual(seeyou_time, opensoar_time) 65 | 66 | # todo: fix total distance calculation. why is this different from seeyou? 67 | # def test_total_distance(self): 68 | # total_distance = sum(self.trip.distances) 69 | # self.assertAlmostEqual(total_distance / 1e3, 98.54, places=2) 70 | 71 | 72 | class TestAATTripOutlandingInside(unittest.TestCase): 73 | """ 74 | Test AAT Trip in which an outlanding takes place inside the last rounded sector. 75 | """ 76 | 77 | # https://www.soaringspot.com/en_gb/cae-nls-nederlandse-kampioenschappen-zweefvliegen-2012/results/club/task-10-on-2012-05-26/daily 78 | # competitor 7, YES 79 | cwd = os.path.dirname(__file__) 80 | igc_path = os.path.join(cwd, '..', 'igc_files', 'aat_outlanding_inside_sector.igc') 81 | aat = get_task(igc_path) 82 | trace = get_trace(igc_path) 83 | trip = Trip(aat, trace) 84 | 85 | def test_trip_fixes(self): 86 | 87 | # assert correct number of trip fixes 88 | self.assertEqual(len(self.trip.fixes), 4) 89 | 90 | # assert if opensoar finds same fixes as seeyou, based on time 91 | fix_times = [ 92 | # tuple with (opensoar time, SeeYou time) 93 | (self.trip.refined_start_time, datetime.datetime(2012, 5, 26, 12, 24, 14, tzinfo=datetime.timezone.utc)), 94 | (self.trip.fixes[1]['datetime'], datetime.datetime(2012, 5, 26, 12, 57, 53, tzinfo=datetime.timezone.utc)), 95 | (self.trip.fixes[2]['datetime'], datetime.datetime(2012, 5, 26, 13, 42, 31, tzinfo=datetime.timezone.utc)), 96 | (self.trip.fixes[3]['datetime'], datetime.datetime(2012, 5, 26, 14, 4, 5, tzinfo=datetime.timezone.utc)), 97 | (self.trip.outlanding_fix['datetime'], datetime.datetime(2012, 5, 26, 14, 5, 49, tzinfo=datetime.timezone.utc)), 98 | ] 99 | 100 | for opensoar_time, seeyou_time in fix_times: 101 | self.assertEqual(seeyou_time, opensoar_time) 102 | 103 | def test_total_distance(self): 104 | total_distance = sum(self.trip.distances) 105 | self.assertAlmostEqual(total_distance / 1e3, 86.07, places=2) 106 | 107 | # todo: add ENL testcase 108 | -------------------------------------------------------------------------------- /opensoar/utilities/geojson_serializers.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from geojson import Point, LineString, Polygon, Feature, FeatureCollection 4 | from opensoar.utilities.helper_functions import calculate_destination 5 | 6 | 7 | def circle_polygon(lat, lng, radius): 8 | from shapely.geometry import Point as ShapelyPoint 9 | from pyproj import Transformer 10 | from shapely.ops import transform 11 | 12 | local_azimuthal_projection = "+proj=aeqd +R=6371000 +units=m +lat_0={} +lon_0={}".format( 13 | lat, lng 14 | ) 15 | wgs84_to_aeqd = Transformer.from_proj('+proj=longlat +datum=WGS84 +no_defs', local_azimuthal_projection) 16 | aeqd_to_wgs84 = Transformer.from_proj(local_azimuthal_projection, '+proj=longlat +datum=WGS84 +no_defs') 17 | # Get polygon with lat lon coordinates 18 | point_transformed = ShapelyPoint(wgs84_to_aeqd.transform(lng, lat)) 19 | 20 | buffer = point_transformed.buffer(radius) 21 | circle = transform(aeqd_to_wgs84.transform, buffer) 22 | 23 | # todo: why nested list necessary? 24 | circle_coordinates = [[(lon, lat) for lon, lat in circle.exterior.coords]] 25 | polygon_feature = Feature(geometry=Polygon(circle_coordinates)) 26 | return polygon_feature 27 | 28 | 29 | def task_to_geojson_features(task) -> List[dict]: 30 | # TODO: have proper cutouts from sectors 31 | 32 | features = [] 33 | task_line_coords = [] 34 | for i, waypoint in enumerate(task.waypoints): 35 | lon = waypoint.longitude 36 | lat = waypoint.latitude 37 | task_line_coords.append((lon, lat)) 38 | 39 | if waypoint.is_line: 40 | angle1 = (waypoint.orientation_angle + 90) % 360 41 | angle2 = (waypoint.orientation_angle - 90 + 360) % 360 42 | waypoint_fix = {"lon": waypoint.longitude, "lat": waypoint.latitude} 43 | end_of_line1 = calculate_destination(waypoint_fix, waypoint.r_max, angle1) 44 | end_of_line2 = calculate_destination(waypoint_fix, waypoint.r_max, angle2) 45 | line1 = LineString([(waypoint_fix['lon'], waypoint_fix['lat']), (end_of_line1['lon'], end_of_line1['lat'])]) 46 | line2 = LineString([(waypoint_fix['lon'], waypoint_fix['lat']), (end_of_line2['lon'], end_of_line2['lat'])]) 47 | features.append(Feature(geometry=line1)) 48 | features.append(Feature(geometry=line2)) 49 | else: 50 | features.append(circle_polygon(lat, lon, waypoint.r_max)) 51 | if waypoint.r_min is not None: 52 | features.append(circle_polygon(lat, lon, waypoint.r_min)) 53 | 54 | if waypoint.angle_max == 180: 55 | # do not plot lines when full circle 56 | continue 57 | 58 | # + 180 because orientation is outward 59 | angle1 = (waypoint.orientation_angle + 180 - waypoint.angle_max + 360) % 360 60 | angle2 = (waypoint.orientation_angle + 180 + waypoint.angle_max) % 360 61 | waypoint_fix = {"lon": waypoint.longitude, "lat": waypoint.latitude} 62 | end_of_line1 = calculate_destination(waypoint_fix, waypoint.r_max, angle1) 63 | end_of_line2 = calculate_destination(waypoint_fix, waypoint.r_max, angle2) 64 | 65 | line1 = LineString([(waypoint_fix['lon'], waypoint_fix['lat']), (end_of_line1['lon'], end_of_line1['lat'])]) 66 | line2 = LineString([(waypoint_fix['lon'], waypoint_fix['lat']), (end_of_line2['lon'], end_of_line2['lat'])]) 67 | features.append(Feature(geometry=line1)) 68 | features.append(Feature(geometry=line2)) 69 | 70 | # task polyline 71 | features.append(Feature(geometry=LineString(task_line_coords))) 72 | return features 73 | 74 | 75 | def trip_to_geojson_features(trip, color: str) -> List[dict]: 76 | """ 77 | :param trip: 78 | :param color: hex string with leading hashtag (e.g. "#062123") 79 | :return: 80 | """ 81 | 82 | features = [] 83 | 84 | for sector in trip.sector_fixes: 85 | sector_fix_coords = [] 86 | for fix in sector: 87 | sector_fix_coords.append((fix['lon'], fix['lat'])) 88 | features.append(Feature(geometry=LineString(sector_fix_coords), properties={"stroke": color})) 89 | 90 | for fix in trip.fixes: 91 | features.append(Feature(geometry=Point([fix['lon'], fix['lat']]), properties={"marker-color": color})) 92 | 93 | outlanding_fix = trip.outlanding_fix 94 | if outlanding_fix is not None: 95 | features.append(Feature(geometry=Point([outlanding_fix['lon'], outlanding_fix['lat']]), properties={"marker-color": color})) 96 | 97 | return features 98 | 99 | 100 | def trace_to_geojson_features(trace) -> List[dict]: 101 | trace_line = Feature(geometry=LineString([(entry['lon'], entry['lat']) for entry in trace])) 102 | return [trace_line] 103 | 104 | 105 | def generate_geojson(features) -> dict: 106 | """This dict can be written to .json file""" 107 | return FeatureCollection(features) 108 | -------------------------------------------------------------------------------- /tests/utilities/test_helper_functions.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import datetime 4 | 5 | from opensoar.utilities.helper_functions import interpolate_fixes, dm2dd, dms2dd, \ 6 | both_none_or_same_float, both_none_or_same_str 7 | from opensoar.utilities.helper_functions import double_iterator 8 | from opensoar.utilities.helper_functions import triple_iterator 9 | from opensoar.utilities.helper_functions import calculate_distance_bearing 10 | from opensoar.utilities.helper_functions import range_with_bounds 11 | from opensoar.utilities.helper_functions import calculate_time_differences 12 | 13 | 14 | class TestHelperFunctions(unittest.TestCase): 15 | 16 | def test_double_iterator(self): 17 | lst = [1, 2, 3, 4] 18 | result = double_iterator(lst) 19 | self.assertListEqual(list(result), [(1, 2), (2, 3), (3, 4)]) 20 | 21 | def test_triple_iterator(self): 22 | lst = [1, 2, 3, 4, 5] 23 | result = triple_iterator(lst) 24 | self.assertListEqual(list(result), [(1, 2, 3), (2, 3, 4), (3, 4, 5)]) 25 | 26 | def test_calculate_distance_equal_fixes(self): 27 | 28 | fix1 = dict(lat=52.331783333333334, lon=6.249083333333333) 29 | fix2 = dict(lat=52.331783333333334, lon=6.249083333333333) 30 | 31 | self.assertEqual(calculate_distance_bearing(fix1, fix2)[0], 0) 32 | 33 | def test_range_with_bounds(self): 34 | self.assertListEqual(range_with_bounds(start=2, stop=4, interval=2), [2, 4]) 35 | self.assertListEqual(range_with_bounds(start=2, stop=6, interval=2), [2, 4, 6]) 36 | self.assertListEqual(range_with_bounds(start=2, stop=6, interval=3), [2, 5, 6]) 37 | self.assertListEqual(range_with_bounds(start=2, stop=6, interval=8), [2, 6]) 38 | 39 | def test_calculate_time_differences(self): 40 | 41 | time1 = datetime.datetime(2012, 5, 26, 12, 0, 50, tzinfo=datetime.timezone.utc) 42 | time2 = datetime.datetime(2012, 5, 26, 12, 0, 55, tzinfo=datetime.timezone.utc) 43 | time3 = datetime.datetime(2012, 5, 26, 12, 1, 2, tzinfo=datetime.timezone.utc) 44 | 45 | self.assertListEqual(calculate_time_differences(time1, time2, 2), [0, 2, 4, 5]) 46 | self.assertListEqual(calculate_time_differences(time2, time3, 2), [0, 2, 4, 6, 7]) 47 | 48 | def test_interpolate_fixes(self): 49 | fix1 = dict(datetime=datetime.datetime(2012, 5, 26, 12, 0, 10, tzinfo=datetime.timezone.utc), lat=50, lon=6) 50 | fix2 = dict(datetime=datetime.datetime(2012, 5, 26, 12, 0, 14, tzinfo=datetime.timezone.utc), lat=58, lon=8) 51 | 52 | interpolated_fixes = interpolate_fixes(fix1, fix2) 53 | 54 | # check length 55 | self.assertEqual(len(interpolated_fixes), 5) 56 | 57 | times = [ 58 | datetime.datetime(2012, 5, 26, 12, 0, 10, tzinfo=datetime.timezone.utc), 59 | datetime.datetime(2012, 5, 26, 12, 0, 11, tzinfo=datetime.timezone.utc), 60 | datetime.datetime(2012, 5, 26, 12, 0, 12, tzinfo=datetime.timezone.utc), 61 | datetime.datetime(2012, 5, 26, 12, 0, 13, tzinfo=datetime.timezone.utc), 62 | datetime.datetime(2012, 5, 26, 12, 0, 14, tzinfo=datetime.timezone.utc) 63 | ] 64 | lats = [50, 52, 54, 56, 58] 65 | lons = [6, 6.5, 7.0, 7.5, 8.0] 66 | 67 | # check individual entries 68 | for time, lat, lon, interpolated_fix in zip(times, lats, lons, interpolated_fixes): 69 | self.assertEqual(interpolated_fix['datetime'], time) 70 | self.assertEqual(interpolated_fix['lat'], lat) 71 | self.assertEqual(interpolated_fix['lon'], lon) 72 | 73 | def test_dm2dd(self): 74 | """Test conversion between coordinate units""" 75 | dd = dm2dd(degrees=49.0, minutes=59.700, cardinal='N') 76 | self.assertAlmostEqual(dd, 49.9950, places=4) 77 | 78 | dd = dm2dd(degrees=9.0, minutes=34.900, cardinal='E') 79 | self.assertAlmostEqual(dd, 9.5817, places=4) 80 | 81 | def test_dms2dd(self): 82 | """Test conversion between coordinate units""" 83 | dd = dms2dd(degrees=49.0, minutes=59, seconds=42, cardinal='N') 84 | self.assertAlmostEqual(dd, 49.9950, places=4) 85 | 86 | dd = dms2dd(degrees=9.0, minutes=34, seconds=54, cardinal='E') 87 | self.assertAlmostEqual(dd, 9.5817, places=4) 88 | 89 | def test_both_none_or_same_float(self): 90 | 91 | test_cases = [ 92 | (None, None, True), 93 | (None, 0.3, False), 94 | (0.3, None, False), 95 | (0.3, 0.3, True), 96 | (0.3, 0.31, False), 97 | ] 98 | 99 | for var1, var2, expected_bool in test_cases: 100 | result = both_none_or_same_float(var1, var2) 101 | self.assertEqual(expected_bool, result) 102 | 103 | def test_both_none_or_same_str(self): 104 | 105 | test_cases = [ 106 | (None, None, True), 107 | (None, 'test', False), 108 | ('test', None, False), 109 | ('test', 'test', True), 110 | ('test2', 'test', False), 111 | ] 112 | 113 | for var1, var2, expected_bool in test_cases: 114 | result = both_none_or_same_str(var1, var2) 115 | self.assertEqual(expected_bool, result) 116 | -------------------------------------------------------------------------------- /opensoar/thermals/pysoar_thermal_detector.py: -------------------------------------------------------------------------------- 1 | from opensoar.utilities.helper_functions import triple_iterator, calculate_bearing_change, calculate_distance_bearing 2 | 3 | 4 | class PySoarThermalDetector: 5 | """ 6 | Detector taken from the PySoar project. 7 | """ 8 | 9 | MINIMUM_BEARING_CHANGE_RATE = 1e-2 10 | CRUISE_THRESHOLD_BEARINGRATE = 4 # deg/s 11 | CRUISE_THRESHOLD_BEARINGTOT = 225 # deg 12 | THERMAL_THRESHOLD_DISTANCE = 1000 # m 13 | THERMAL_THRESHOLD_BEARINGRATE_AVG = 2 # deg/s 14 | THERMAL_THRESHOLD_BEARINGRATE = 4 # deg/s 15 | 16 | def __init__(self): 17 | pass 18 | 19 | def analyse(self, trace): 20 | 21 | # To prevent circular import with flight_phases 22 | from opensoar.thermals.flight_phases import Phase 23 | 24 | cruise = True 25 | possible_thermal_fixes = list() 26 | possible_cruise_fixes = list() 27 | sharp_thermal_entry_found = False 28 | turning_left = True 29 | total_bearing_change = 0 30 | 31 | # Start with first phase 32 | phases = [Phase(cruise, trace[0:2])] 33 | 34 | for fix_minus2, fix_minus1, fix in triple_iterator(trace): 35 | 36 | time_minus2 = fix_minus2['datetime'] 37 | time_minus1 = fix_minus1['datetime'] 38 | time = fix['datetime'] 39 | 40 | bearing_change = calculate_bearing_change(fix_minus2, fix_minus1, fix) 41 | delta_t = (0.5 * (time - time_minus1).total_seconds() + 42 | 0.5 * (time - time_minus2).total_seconds()) 43 | bearing_change_rate = bearing_change / delta_t 44 | 45 | if cruise: 46 | 47 | continuing_left = turning_left and bearing_change_rate < self.MINIMUM_BEARING_CHANGE_RATE 48 | continuing_right = not turning_left and bearing_change_rate > -self.MINIMUM_BEARING_CHANGE_RATE 49 | 50 | if continuing_left or continuing_right: 51 | 52 | total_bearing_change += bearing_change 53 | 54 | if len(possible_thermal_fixes) == 0: 55 | possible_thermal_fixes = [fix] 56 | else: 57 | if not sharp_thermal_entry_found and abs(bearing_change_rate) > self.CRUISE_THRESHOLD_BEARINGRATE: 58 | sharp_thermal_entry_found = True 59 | phases[-1].fixes.extend(possible_thermal_fixes) 60 | possible_thermal_fixes = [fix] 61 | else: 62 | possible_thermal_fixes.append(fix) 63 | 64 | else: # sign change 65 | total_bearing_change = bearing_change 66 | sharp_thermal_entry_found = False 67 | 68 | if len(possible_thermal_fixes) == 0: 69 | phases[-1].fixes.append(fix) 70 | else: 71 | phases[-1].fixes.extend([*possible_thermal_fixes, fix]) 72 | possible_thermal_fixes = list() 73 | 74 | turning_left = bearing_change_rate < 0 75 | 76 | if abs(total_bearing_change) > self.CRUISE_THRESHOLD_BEARINGTOT: 77 | cruise = False 78 | phases[-1].fixes.append(possible_thermal_fixes[0]) 79 | phases.append(Phase(cruise, possible_thermal_fixes)) 80 | 81 | possible_thermal_fixes = list() 82 | sharp_thermal_entry_found = False 83 | total_bearing_change = 0 84 | 85 | else: # thermal 86 | 87 | if abs(bearing_change_rate) > self.THERMAL_THRESHOLD_BEARINGRATE: 88 | if len(possible_cruise_fixes) != 0: 89 | phases[-1].fixes.extend([*possible_cruise_fixes, fix]) 90 | possible_cruise_fixes = list() 91 | else: 92 | phases[-1].fixes.append(fix) 93 | 94 | else: # possible cruise 95 | 96 | if len(possible_cruise_fixes) == 0: 97 | possible_cruise_fixes = [fix] 98 | total_bearing_change = bearing_change 99 | else: 100 | possible_cruise_fixes.append(fix) 101 | total_bearing_change += bearing_change 102 | 103 | delta_t = (time - possible_cruise_fixes[0]['datetime']).total_seconds() 104 | cruise_distance, _ = calculate_distance_bearing(possible_cruise_fixes[0], fix) 105 | temp_bearing_rate_avg = 0 if delta_t == 0 else total_bearing_change / delta_t 106 | 107 | if (cruise_distance > self.THERMAL_THRESHOLD_DISTANCE and 108 | abs(temp_bearing_rate_avg) < self.THERMAL_THRESHOLD_BEARINGRATE_AVG): 109 | 110 | cruise = True 111 | phases[-1].fixes.append(possible_cruise_fixes[0]) 112 | phases.append(Phase(cruise, possible_cruise_fixes)) 113 | possible_cruise_fixes = list() 114 | total_bearing_change = 0 115 | 116 | # add possible fixes at the end 117 | if cruise: 118 | if len(possible_thermal_fixes) != 0: 119 | phases[-1].fixes.extend(possible_thermal_fixes) 120 | else: 121 | if len(possible_cruise_fixes) != 0: 122 | phases[-1].fixes.extend(possible_cruise_fixes) 123 | 124 | return phases 125 | -------------------------------------------------------------------------------- /opensoar/task/waypoint.py: -------------------------------------------------------------------------------- 1 | from math import isclose 2 | 3 | from opensoar.utilities.helper_functions import both_none_or_same_float, both_none_or_same_str 4 | from opensoar.utilities.helper_functions import calculate_distance_bearing 5 | from opensoar.utilities.helper_functions import calculate_bearing_difference 6 | from opensoar.utilities.helper_functions import calculate_average_bearing 7 | 8 | 9 | class Waypoint(object): 10 | 11 | SEEYOU_SECTOR_MARGIN = 12 # SeeYou does not outland flights which come this close to the sector 12 | 13 | def __init__(self, name: str, latitude: float, longitude: float, r_min: float, angle_min: float, r_max: float, 14 | angle_max: float, is_line: bool, sector_orientation: str, 15 | distance_correction=None, orientation_angle=None): 16 | """ 17 | Waypoint is either the start point, one of the turn points or the finish point of a task. 18 | :param name: 19 | :param latitude: latitude in degrees 20 | :param longitude: in degrees 21 | :param r_min: in m or None 22 | :param angle_min: in degrees 23 | :param r_max: in m 24 | :param angle_max: in degrees 25 | :param is_line: boolean denoting whether waypoint is a line 26 | :param sector_orientation: valid values: 'fixed', 'symmetrical', 'next', 'previous', 'start' 27 | :param distance_correction: optional argument. valid values: 'displace_tp', 'shorten_legs' 28 | :param orientation_angle: optional argument. Should only be set when sector_orientation='fixed'. 29 | """ 30 | 31 | self.name = name 32 | 33 | self.latitude = latitude 34 | self.longitude = longitude 35 | 36 | self.r_min = r_min 37 | self.angle_min = angle_min 38 | self.r_max = r_max 39 | self.angle_max = angle_max 40 | self.orientation_angle = orientation_angle 41 | 42 | self.is_line = is_line 43 | 44 | if sector_orientation not in {'fixed', 'symmetrical', 'next', 'previous', 'start'}: 45 | raise ValueError('sector_orientation value not supported') 46 | 47 | self.sector_orientation = sector_orientation 48 | self.distance_correction = distance_correction 49 | 50 | def __eq__(self, other): 51 | 52 | return (self.name == other.name and 53 | isclose(self.latitude, other.latitude) and 54 | isclose(self.longitude, other.longitude) and 55 | both_none_or_same_float(self.r_min, other.r_min) and 56 | both_none_or_same_float(self.angle_min, other.angle_min) and 57 | isclose(self.r_max, other.r_max) and 58 | isclose(self.angle_max, other.angle_max) and 59 | isclose(self.orientation_angle, other.orientation_angle) and 60 | self.is_line == other.is_line and 61 | both_none_or_same_str(self.sector_orientation, other.sector_orientation) and 62 | both_none_or_same_str(self.distance_correction, other.distance_correction)) 63 | 64 | def __repr__(self): 65 | return "" % (self.latitude, self.longitude) 66 | 67 | @property 68 | def fix(self): 69 | return dict(lat=self.latitude, lon=self.longitude) 70 | 71 | def set_orientation_angle(self, angle_start=None, angle_previous=None, angle_next=None): 72 | # Fixed orientation is skipped as that has already been set 73 | 74 | if self.sector_orientation == "symmetrical": 75 | self.orientation_angle = calculate_average_bearing(angle_previous, angle_next) 76 | elif self.sector_orientation == "next": 77 | self.orientation_angle = angle_next 78 | elif self.sector_orientation == "previous": 79 | self.orientation_angle = angle_previous 80 | elif self.sector_orientation == "start": 81 | self.orientation_angle = angle_start 82 | elif self.sector_orientation == "fixed": 83 | if self.orientation_angle is None: 84 | raise ValueError("Orientation angle should be set fox fixed sector") 85 | 86 | else: 87 | raise ValueError("Unknown sector orientation: %s " % self.sector_orientation) 88 | 89 | def inside_sector(self, fix): 90 | 91 | distance, bearing = calculate_distance_bearing(self.fix, fix) 92 | 93 | angle_wrt_orientation = abs(calculate_bearing_difference(self.orientation_angle, bearing)) 94 | 95 | if self.is_line: 96 | raise ValueError('Calling inside_sector on a line') 97 | elif self.r_min is not None: 98 | inside_outer_sector = self.r_min - self.SEEYOU_SECTOR_MARGIN < distance < self.r_max + self.SEEYOU_SECTOR_MARGIN and (180 - angle_wrt_orientation) < self.angle_max 99 | inside_inner_sector = distance < self.r_min and (180 - angle_wrt_orientation) < self.angle_min 100 | return inside_outer_sector or inside_inner_sector 101 | else: # self.r_min is None 102 | return distance < (self.r_max + self.SEEYOU_SECTOR_MARGIN) and (180 - angle_wrt_orientation) < self.angle_max 103 | 104 | def outside_sector(self, fix): 105 | return not self.inside_sector(fix) 106 | 107 | def crossed_line(self, fix1, fix2): 108 | 109 | distance1, _ = calculate_distance_bearing(fix1, self.fix) 110 | distance2, _ = calculate_distance_bearing(fix2, self.fix) 111 | 112 | if not self.is_line: 113 | raise ValueError('Calling crossed_line on a sector!') 114 | else: 115 | if distance2 > self.r_max and distance1 > self.r_max: 116 | return False 117 | else: # either both within circle or only one, leading to small amount of false positives 118 | _, bearing1 = calculate_distance_bearing(self.fix, fix1) 119 | _, bearing2 = calculate_distance_bearing(self.fix, fix2) 120 | 121 | angle_wrt_orientation1 = abs(calculate_bearing_difference(self.orientation_angle, bearing1)) 122 | angle_wrt_orientation2 = abs(calculate_bearing_difference(self.orientation_angle, bearing2)) 123 | 124 | if self.sector_orientation == "next": # start line 125 | return angle_wrt_orientation2 < 90 < angle_wrt_orientation1 126 | elif self.sector_orientation == "previous": # finish line 127 | return angle_wrt_orientation1 < 90 < angle_wrt_orientation2 128 | else: 129 | raise ValueError("A line with this orientation is not implemented!") 130 | -------------------------------------------------------------------------------- /opensoar/competition/daily_results_page.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from bs4 import BeautifulSoup 3 | import os 4 | import requests 5 | import operator 6 | import os 7 | from abc import ABC, abstractmethod 8 | from typing import List 9 | 10 | from bs4 import BeautifulSoup 11 | 12 | from opensoar.competition.competition_day import CompetitionDay 13 | from opensoar.task.task import Task 14 | from opensoar.utilities.retry_utils import web_request_retry 15 | 16 | 17 | class DailyResultsPage(ABC): 18 | """ 19 | Abstract Base Class for daily result pages. Specific implementation example: soaringspot. 20 | """ 21 | 22 | def __init__(self, url): 23 | if url.startswith('http://') or url.startswith('https://'): 24 | self.url = url 25 | else: 26 | self.url = 'http://{}'.format(url) 27 | 28 | self._igc_directory = None # to be set in subclass 29 | self._html_soup = None # to be set when the page is evaluated 30 | 31 | @property 32 | def igc_directory(self): 33 | return self._igc_directory 34 | 35 | def set_igc_directory(self, target_directory, competition_name, plane_class, date): 36 | self._igc_directory = os.path.join(target_directory, competition_name, plane_class, 37 | date.strftime('%d-%m-%Y')) 38 | 39 | @web_request_retry(max_attempts=3) 40 | def _get_html_soup(self) -> BeautifulSoup: 41 | """ 42 | Get a BeautifulSoup object from the URL. 43 | 44 | Returns: 45 | BeautifulSoup object containing the parsed HTML 46 | """ 47 | 48 | if not self._html_soup: 49 | try: 50 | # Use requests with verify=True for secure connections 51 | # In production, you should ALWAYS verify SSL certificates 52 | response = requests.get(self.url, timeout=30) 53 | response.raise_for_status() # Raise exception for 4XX/5XX status codes 54 | 55 | # Parse the HTML with BeautifulSoup 56 | self._html_soup = BeautifulSoup(response.text, "html.parser") 57 | 58 | except requests.exceptions.SSLError: 59 | # Only if absolutely necessary, you can disable verification 60 | # But this should be a last resort and logged as a security concern 61 | print("SSL verification failed. Attempting with verification disabled.") 62 | response = requests.get(self.url, verify=False, timeout=30) 63 | response.raise_for_status() 64 | self._html_soup = BeautifulSoup(response.text, "html.parser") 65 | 66 | except requests.exceptions.RequestException as e: 67 | print(f"Error fetching URL {self.url}: {e}") 68 | raise 69 | 70 | return self._html_soup 71 | 72 | def igc_file_name(self, competition_id: str) -> str: 73 | """ 74 | Create igc file name from competition_id 75 | 76 | :param competition_id: 77 | :return: 78 | """ 79 | return '{}.igc'.format(competition_id) 80 | 81 | def igc_file_path(self, competition_id: str) -> str: 82 | """ 83 | Construct file_path from competition_id 84 | 85 | :param competition_id: 86 | :return: 87 | """ 88 | file_name = self.igc_file_name(competition_id) 89 | return os.path.join(self._igc_directory, file_name) 90 | 91 | @web_request_retry(max_attempts=3) 92 | def download_flight(self, igc_url: str, competition_id: str) -> str: 93 | """ 94 | Download flight and return file_path 95 | 96 | Args: 97 | igc_url: URL to download the IGC file 98 | competition_id: Competition ID used to name the file 99 | 100 | Returns: 101 | str: Path to the downloaded file 102 | """ 103 | # Make directory if necessary 104 | if not os.path.exists(self._igc_directory): 105 | os.makedirs(self._igc_directory) 106 | 107 | file_path = self.igc_file_path(competition_id) 108 | 109 | if not os.path.exists(file_path): 110 | response = requests.get(igc_url, timeout=30) 111 | response.raise_for_status() # Raise an exception for HTTP errors 112 | 113 | # Write the content to the file 114 | with open(file_path, 'wb') as f: 115 | f.write(response.content) 116 | 117 | # Verify file was created 118 | if not os.path.exists(file_path): 119 | raise FileNotFoundError(f"File was not created at {file_path}") 120 | 121 | if not os.path.exists(file_path): 122 | raise RuntimeError(f"Failed to download file from {igc_url}") 123 | 124 | return file_path 125 | 126 | @abstractmethod 127 | def generate_competition_day(self, target_directory: str, download_progress=None, start_time_buffer: int = 0, 128 | include_hc_competitors: bool=True) -> CompetitionDay: 129 | """ 130 | Construct a CompetitionDay. Information is pulled from the overview table and 131 | from the igc files, which are automatically downloaded. 132 | 133 | :param include_hc_competitors: optional argument for including contestants which fly 'Hors Concours', 134 | which means that they don't officially participate in the competition. 135 | :param target_directory: directory in which the igc files are saved 136 | :param download_progress: optional progress function. Should have the following signature: 137 | func(downloads, total_number_of_flights) 138 | :param start_time_buffer: optional relaxation on the start time in seconds. E.g. start_time_buffer = 10 means 139 | that a contestant can cross the start line 10 seconds before the official opening time 140 | :return: 141 | """ 142 | 143 | @staticmethod 144 | def _select_task(tasks: List[Task]) -> Task: 145 | """There might be different and duplicate tasks. The task selected is most frequently present in the list.""" 146 | 147 | unique_tasks = list() 148 | number_of_times_present = list() 149 | for task in tasks: 150 | if task in unique_tasks: 151 | index = unique_tasks.index(task) 152 | number_of_times_present[index] += 1 153 | else: 154 | unique_tasks.append(task) 155 | number_of_times_present.append(1) 156 | 157 | max_index, max_value = max(enumerate(number_of_times_present), key=operator.itemgetter(1)) 158 | return tasks[max_index] 159 | -------------------------------------------------------------------------------- /opensoar/task/task.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import List 3 | 4 | from opensoar.task.waypoint import Waypoint 5 | from opensoar.utilities.helper_functions import calculate_distance_bearing, calculate_bearing_difference, \ 6 | interpolate_fixes, double_iterator 7 | 8 | 9 | class Task: 10 | """ 11 | Base Class for specific task implementations. 12 | """ 13 | 14 | ENL_VALUE_THRESHOLD = 500 15 | ENL_TIME_THRESHOLD = 30 16 | 17 | def __init__(self, waypoints: List[Waypoint], timezone: int, start_opening: datetime.datetime, 18 | start_time_buffer: int, multistart: bool): 19 | """ 20 | :param waypoints: 21 | :param timezone: time difference wrt UTC in hours 22 | :param start_opening: in UTC 23 | :param start_time_buffer: in seconds 24 | :param multistart: flag whether multistart takes place 25 | """ 26 | 27 | self._waypoints = waypoints 28 | self.timezone = timezone 29 | self.start_opening = start_opening 30 | self.start_time_buffer = start_time_buffer 31 | self.multistart = multistart 32 | 33 | self.set_orientation_angles(self.waypoints) 34 | 35 | def __eq__(self, other): 36 | same_number_waypoints = len(self.waypoints) == len(other.waypoints) 37 | 38 | if not same_number_waypoints: 39 | return False 40 | else: 41 | for waypoint, other_waypoint in zip(self.waypoints, other.waypoints): 42 | if waypoint != other_waypoint: 43 | return False 44 | 45 | return (self.start_opening == other.start_opening and 46 | self.start_time_buffer == other.start_time_buffer) 47 | 48 | @property 49 | def waypoints(self): 50 | # waypoints may not be altered because subclasses perform tasks to calculate distances based on waypoints. 51 | return self._waypoints 52 | 53 | @property 54 | def no_tps(self): 55 | return len(self.waypoints) - 2 56 | 57 | @property 58 | def no_legs(self): 59 | return self.no_tps + 1 60 | 61 | @property 62 | def start(self): 63 | return self.waypoints[0] 64 | 65 | @property 66 | def finish(self): 67 | return self.waypoints[-1] 68 | 69 | @staticmethod 70 | def set_orientation_angles(waypoints): 71 | # sector orientations and angles 72 | for index in range(len(waypoints)): 73 | 74 | if index == 0: # necessary for index out of bounds 75 | _, angle = calculate_distance_bearing(waypoints[index].fix, waypoints[index + 1].fix) 76 | waypoints[index].set_orientation_angle(angle_next=angle) 77 | elif index == len(waypoints) - 1: # necessary for index out of bounds 78 | _, angle = calculate_distance_bearing(waypoints[index].fix, waypoints[index - 1].fix) 79 | waypoints[index].set_orientation_angle(angle_previous=angle) 80 | else: 81 | _, angle_start = calculate_distance_bearing(waypoints[index].fix, waypoints[0].fix) 82 | _, angle_previous = calculate_distance_bearing(waypoints[index].fix, waypoints[index - 1].fix) 83 | _, angle_next = calculate_distance_bearing(waypoints[index].fix, waypoints[index + 1].fix) 84 | waypoints[index].set_orientation_angle(angle_start=angle_start, 85 | angle_previous=angle_previous, 86 | angle_next=angle_next) 87 | 88 | @staticmethod 89 | def distance_shortened_leg(distance, current, currentP1, shortened_point): 90 | if shortened_point == "current": 91 | distance -= current.r_max if current.r_max is not None else current.r_min 92 | return distance 93 | elif shortened_point == "end": 94 | distance -= currentP1.r_max if currentP1.r_max is not None else currentP1.r_min 95 | return distance 96 | else: 97 | raise ValueError("Shortened point is not recognized: {}".format(shortened_point)) 98 | 99 | @staticmethod 100 | def distance_moved_turnpoint(distance, begin, end, moved_point, move_direction='reduce'): 101 | from math import sqrt, cos, pi, acos 102 | 103 | if moved_point == "begin": 104 | moved = begin 105 | other = end 106 | angle_reduction = 0 107 | elif moved_point == "end": 108 | moved = end 109 | other = begin 110 | angle_reduction = 0 111 | elif moved_point == "both_end": 112 | moved = end 113 | other = begin 114 | original_distance, _ = calculate_distance_bearing(begin.fix, end.fix) 115 | 116 | distance_moved_current = begin.r_max if begin.angle_max == 180 else begin.r_min 117 | angle_reduction = abs(acos((distance_moved_current ** 2 - distance ** 2 - original_distance ** 2) / (-2 * distance * original_distance))) * 180 / pi 118 | else: 119 | raise ValueError("Displaced point is not recognized: %s" % moved_point) 120 | 121 | displacement_dist = moved.r_max if moved.angle_max == 180 else moved.r_min 122 | bearing1 = moved.orientation_angle 123 | _, bearing2 = calculate_distance_bearing(other.fix, moved.fix, final_bearing=True) 124 | 125 | if move_direction == 'increase': 126 | angle = 180 - abs(calculate_bearing_difference(bearing1, bearing2)) - angle_reduction 127 | else: 128 | angle = abs(calculate_bearing_difference(bearing1, bearing2)) - angle_reduction 129 | distance = sqrt(distance**2 + displacement_dist**2 - 2 * distance * displacement_dist * cos(angle * pi / 180)) 130 | 131 | return distance 132 | 133 | def started(self, fix1, fix2): 134 | start = self.waypoints[0] 135 | if start.is_line: 136 | return start.crossed_line(fix1, fix2) 137 | else: 138 | return start.inside_sector(fix1) and start.outside_sector(fix2) 139 | 140 | def finished(self, fix1, fix2): 141 | finish = self.waypoints[-1] 142 | if finish.is_line: 143 | return finish.crossed_line(fix1, fix2) 144 | else: 145 | return finish.outside_sector(fix1) and finish.inside_sector(fix2) 146 | 147 | def determine_refined_start(self, trace, fixes): 148 | start_i = trace.index(fixes[0]) 149 | interpolated_fixes = interpolate_fixes(trace[start_i], trace[start_i+1]) 150 | 151 | for fix, next_fix in double_iterator(interpolated_fixes): 152 | if self.started(fix, next_fix): 153 | return fix['datetime'] 154 | 155 | raise ValueError('Start should have been determined') 156 | 157 | def enl_value_exceeded(self, fix) -> bool: 158 | """ 159 | Check whether ENL value is exceeded. 160 | :param fix: 161 | :return: returns False when not exceeded or when ENL information is not present in fix 162 | """ 163 | return 'ENL' in fix and fix['ENL'] > self.ENL_VALUE_THRESHOLD 164 | 165 | def enl_time_exceeded(self, enl_time): 166 | return enl_time >= self.ENL_TIME_THRESHOLD 167 | -------------------------------------------------------------------------------- /tests/task/test_waypoint.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from copy import deepcopy 3 | 4 | from opensoar.task.waypoint import Waypoint 5 | from opensoar.utilities.helper_functions import calculate_destination 6 | 7 | 8 | class TestWaypoint(unittest.TestCase): 9 | 10 | def test_waypoint_rmax(self): 11 | waypoint = Waypoint('test_waypoint', latitude=51.7509, longitude=-0.981, r_min=None, angle_min=180, r_max=50000, 12 | angle_max=20, is_line=False, sector_orientation='fixed', distance_correction=None, 13 | orientation_angle=190) 14 | fix_inside = {'lat': 52, 'lon': -0.981} # within maximum distance and within the angle 15 | self.assertTrue(waypoint.inside_sector(fix_inside)) 16 | 17 | fix_outside = {'lat': 51.5, 'lon': -1.0} # within maximum distance, but not within the angle 18 | self.assertFalse(waypoint.inside_sector(fix_outside)) 19 | 20 | def test_waypoint_rmin_and_rmax(self): 21 | # waypoint consists of 22 | # - inner half circle segment (angle_min=90) 23 | # - outer quarter circle segment (angle_min=45) 24 | 25 | wp = Waypoint('testwaypoint', latitude=52, longitude=1, r_min=5000, angle_min=90, r_max=10000, 26 | angle_max=45, is_line=False, sector_orientation='fixed', distance_correction=None, 27 | orientation_angle=180) 28 | 29 | point_in_inner_sector = calculate_destination(wp.fix, 3000, 80) 30 | point_in_outer_sector = calculate_destination(wp.fix, 7500, 35) 31 | point_outside_inner_sector = calculate_destination(wp.fix, 3000, 100) 32 | point_outside_outer_sector = calculate_destination(wp.fix, 7500, 55) 33 | point_outside_too_far = calculate_destination(wp.fix, 11000, 0) # distance too large 34 | 35 | self.assertTrue(wp.inside_sector(point_in_inner_sector)) 36 | self.assertTrue(wp.inside_sector(point_in_outer_sector)) 37 | self.assertFalse(wp.inside_sector(point_outside_inner_sector)) 38 | self.assertFalse(wp.inside_sector(point_outside_outer_sector)) 39 | self.assertFalse(wp.inside_sector(point_outside_too_far)) 40 | 41 | def test_equal_waypoints(self): 42 | waypoint1 = Waypoint('test_waypoint', latitude=51.7509, longitude=-0.981, r_min=None, angle_min=180, 43 | r_max=50000, angle_max=20, is_line=False, sector_orientation='fixed', 44 | distance_correction=None, orientation_angle=190) 45 | 46 | waypoint2 = Waypoint('test_waypoint', latitude=51.7509, longitude=-0.981, r_min=None, angle_min=180, 47 | r_max=50000, angle_max=20, is_line=False, sector_orientation='fixed', 48 | distance_correction=None, orientation_angle=190) 49 | 50 | self.assertTrue(waypoint1 == waypoint2) 51 | 52 | def test_unequal_waypoints(self): 53 | waypoint1 = Waypoint('test_waypoint', latitude=51.7509, longitude=-0.981, r_min=None, angle_min=180, 54 | r_max=50000, angle_max=20, is_line=False, sector_orientation='fixed', 55 | distance_correction=None, orientation_angle=190) 56 | 57 | waypoints = [deepcopy(waypoint1), 58 | deepcopy(waypoint1), 59 | deepcopy(waypoint1), 60 | deepcopy(waypoint1), 61 | deepcopy(waypoint1), 62 | deepcopy(waypoint1), 63 | deepcopy(waypoint1), 64 | deepcopy(waypoint1), 65 | deepcopy(waypoint1), 66 | deepcopy(waypoint1)] 67 | 68 | waypoints[0].latitude = 51 69 | waypoints[1].longitude = 0 70 | waypoints[2].r_min = 0.5 71 | waypoints[3].angle_min = None 72 | waypoints[4].r_max = 51000 73 | waypoints[5].angle_max = 10 74 | waypoints[6].is_line = True 75 | waypoints[7].sector_orientation = None 76 | waypoints[8].distance_correction = 'displace_tp' 77 | waypoints[9].orientation_angle = 180 78 | 79 | for waypoint in waypoints: 80 | self.assertFalse(waypoint == waypoint1) 81 | 82 | def test_crossed_start_line(self): 83 | """ 84 | Test whether points in correct order trigger line crossing 85 | Start line is W-E oriented, with a point north first and then a point south should be a start 86 | """ 87 | # start line is pointing sout 88 | 89 | start_line = Waypoint('testwaypoint', latitude=52, longitude=1, r_min=None, angle_min=None, r_max=1000, 90 | angle_max=45, is_line=True, sector_orientation='next', distance_correction=None, 91 | orientation_angle=180) 92 | 93 | # test direction of crossing 94 | point_north = calculate_destination(start_line.fix, 1000, 0) 95 | point_south = calculate_destination(start_line.fix, 1000, 180) 96 | self.assertTrue(start_line.crossed_line(point_north, point_south)) 97 | self.assertFalse(start_line.crossed_line(point_south, point_north)) 98 | 99 | # test within radius of line 100 | point_north_close = calculate_destination(start_line.fix, 500, 45) 101 | point_south_close = calculate_destination(start_line.fix, 500, 135) 102 | self.assertTrue(start_line.crossed_line(point_north_close, point_south_close)) 103 | 104 | # test outside radius of line 105 | point_north_far = calculate_destination(start_line.fix, 2000, 45) 106 | point_south_far = calculate_destination(start_line.fix, 2000, 135) 107 | self.assertFalse(start_line.crossed_line(point_north_far, point_south_far)) 108 | 109 | def test_crossed_finish_line(self): 110 | """ 111 | Test whether points in correct order trigger line crossing 112 | Finish line is W-E oriented, with a point north first and then a point south should be a finish 113 | """ 114 | # start line is pointing sout 115 | 116 | finish_line = Waypoint('testwaypoint', latitude=52, longitude=1, r_min=None, angle_min=None, r_max=1000, 117 | angle_max=45, is_line=True, sector_orientation='previous', distance_correction=None, 118 | orientation_angle=0) 119 | 120 | # test direction of crossing 121 | point_north = calculate_destination(finish_line.fix, 1000, 0) 122 | point_south = calculate_destination(finish_line.fix, 1000, 180) 123 | self.assertTrue(finish_line.crossed_line(point_north, point_south)) 124 | self.assertFalse(finish_line.crossed_line(point_south, point_north)) 125 | 126 | # test within radius of line 127 | point_north_close = calculate_destination(finish_line.fix, 500, 45) 128 | point_south_close = calculate_destination(finish_line.fix, 500, 135) 129 | self.assertTrue(finish_line.crossed_line(point_north_close, point_south_close)) 130 | 131 | # test outside radius of line 132 | point_north_far = calculate_destination(finish_line.fix, 2000, 45) 133 | point_south_far = calculate_destination(finish_line.fix, 2000, 135) 134 | self.assertFalse(finish_line.crossed_line(point_north_far, point_south_far)) 135 | -------------------------------------------------------------------------------- /opensoar/utilities/helper_functions.py: -------------------------------------------------------------------------------- 1 | from copy import copy 2 | from math import isclose, pi, sin, cos, atan2 3 | 4 | import datetime 5 | from typing import List 6 | 7 | from pyproj import Geod 8 | 9 | g = Geod(ellps='WGS84') 10 | 11 | 12 | def double_iterator(lst): 13 | """Create iterator with two values. E.g.: current, plus1 in a for loop""" 14 | a = iter(lst) 15 | b = copy(a) 16 | 17 | next(b, None) 18 | return zip(a, b) 19 | 20 | 21 | def triple_iterator(lst): 22 | """Create iterator with three values. E.g.: current, plus1, plus2 in a for loop""" 23 | 24 | a = iter(lst) 25 | b = copy(a) 26 | c = copy(a) 27 | 28 | next(b, None) 29 | next(c, None) 30 | next(c, None) 31 | return zip(a, b, c) 32 | 33 | 34 | def calculate_distance_bearing(fix1, fix2, final_bearing=False): 35 | """ 36 | Calculate bearing between fix1 and fix. By default the bearing is taking tangent to the great circle at fix1. 37 | :param final_bearing: switch to True results in taking the tangent at fix2. 38 | :param fix1: b-record from IGC file (dict with keys 'lat' and 'lon') 39 | :param fix2: b-record from IGC file (dict with keys 'lat' and 'lon') 40 | :return: distance in meters, bearing in degrees 41 | """ 42 | fw_bearing, bw_bearing, dist = g.inv(fix1['lon'], fix1['lat'], fix2['lon'], fix2['lat']) 43 | if fw_bearing < 0: 44 | fw_bearing += 360 45 | bw_bearing += 180 46 | 47 | if not final_bearing: 48 | return dist, fw_bearing 49 | else: 50 | return dist, bw_bearing 51 | 52 | 53 | def calculate_bearing_difference(bearing1, bearing2): 54 | """ 55 | Calculate smallest difference from bearing 1 -> bearing2. 56 | :param bearing1: start bearing in degrees (0-360) 57 | :param bearing2: end bearing in degrees (0-360) 58 | :return: angle between -180 and +180 degrees. 59 | """ 60 | # always return difference between -180 and +180 degrees 61 | difference = bearing2 - bearing1 62 | if -180 < difference < 180: 63 | return difference 64 | elif difference <= -180: 65 | return difference + 360 66 | elif difference >= 180: 67 | return difference - 360 68 | 69 | 70 | def calculate_bearing_change(fix_minus2, fix_minus1, fix): 71 | """ 72 | Calculate bearing change between three fixes. 73 | :param fix_minus2: b-record from IGC file (dict with keys 'lat' and 'lon') 74 | :param fix_minus1: b-record from IGC file (dict with keys 'lat' and 'lon') 75 | :param fix: fix1: b-record from IGC file (dict with keys 'lat' and 'lon') 76 | :return: bearing change in degrees between -180 and +180 degrees. 77 | Return 0 when two of the of the fixes are the same. 78 | """ 79 | 80 | _, bearing1 = calculate_distance_bearing(fix_minus2, fix_minus1) 81 | _, bearing2 = calculate_distance_bearing(fix_minus1, fix) 82 | 83 | return calculate_bearing_difference(bearing1, bearing2) 84 | 85 | 86 | def calculate_average_bearing(bearing1, bearing2): 87 | """ 88 | Calculate the average bearing 89 | :param bearing1: bearing in degrees 90 | :param bearing2: bearing in degrees 91 | :return: average bearing in degrees 92 | """ 93 | sin_a = sin(bearing1 * pi / 180) 94 | sin_b = sin(bearing2 * pi / 180) 95 | cos_a = cos(bearing1 * pi / 180) 96 | cos_b = cos(bearing2 * pi / 180) 97 | 98 | avg_bearing = atan2(sin_a + sin_b, cos_a + cos_b) * 180 / pi 99 | return (avg_bearing + 360) % 360 100 | 101 | 102 | def height_difference_fixes(fix1, fix2, gps_altitude=True): 103 | if gps_altitude: 104 | return fix2['gps_alt'] - fix1['gps_alt'] 105 | else: 106 | return fix2['pressure_alt'] - fix1['pressure_alt'] 107 | 108 | 109 | def altitude_gain_and_loss(fixes: List[dict], gps_altitude=True): 110 | if gps_altitude: 111 | altitude_key = 'gps_alt' 112 | else: 113 | altitude_key = 'pressure_alt' 114 | 115 | gain, loss = 0, 0 116 | for fix, next_fix in double_iterator(fixes): 117 | delta_h = next_fix[altitude_key] - fix[altitude_key] 118 | 119 | if delta_h >= 0: 120 | gain += delta_h 121 | else: 122 | loss += (-delta_h) 123 | 124 | return gain, loss 125 | 126 | 127 | def total_distance_travelled(fixes: List[dict]): 128 | """Calculates the total distance, summing over the inter fix distances""" 129 | distance = 0 130 | for fix, next_fix in double_iterator(fixes): 131 | inter_fix_dist, _ = calculate_distance_bearing(fix, next_fix) 132 | distance += inter_fix_dist 133 | 134 | return distance 135 | 136 | 137 | def range_with_bounds(start: int, stop: int, interval: int) -> List[int]: 138 | """Return list""" 139 | result = [int(val) for val in range(start, stop, interval)] 140 | if not isclose(result[-1], stop): 141 | result.append(stop) 142 | return result 143 | 144 | 145 | def calculate_time_differences(time1, time2, interval): 146 | differences = range_with_bounds(0, int((time2 - time1).total_seconds()), interval) 147 | return differences 148 | 149 | 150 | def interpolate_fixes(fix1, fix2, interval=1): 151 | """ 152 | Create list of fixes between fix1 and fix2. Split is defined at time interval. 153 | Only time, latitude and longitude are interpolated. 154 | :param fix1: b-record from IGC file (dict with keys 'lat' and 'lon') 155 | :param fix2: b-record from IGC file (dict with keys 'lat' and 'lon') 156 | :param interval: interval between fixes in seconds 157 | :return: list of fixes between fix1 and fix2 with given interval 158 | """ 159 | 160 | time_differences = calculate_time_differences(fix1['datetime'], fix2['datetime'], interval) 161 | 162 | fixes = list() 163 | for difference in time_differences: 164 | fraction = difference / time_differences[-1] 165 | 166 | lat = fix1['lat'] + fraction * (fix2['lat'] - fix1['lat']) 167 | lon = fix1['lon'] + fraction * (fix2['lon'] - fix1['lon']) 168 | time = fix1['datetime'] + datetime.timedelta(seconds=difference) 169 | fixes.append(dict(datetime=time, lat=lat, lon=lon)) 170 | 171 | return fixes 172 | 173 | 174 | def calculate_destination(start_fix, distance, bearing): 175 | if bearing > 180: 176 | bearing -= 360 177 | endlon, endlat, _ = g.fwd(start_fix['lon'], start_fix['lat'], bearing, distance) 178 | return dict(lat=endlat, lon=endlon) 179 | 180 | 181 | def dms2dd(degrees, minutes, seconds, cardinal): 182 | """convert coordinate format with degrees, minutes and second to degrees""" 183 | dd = degrees + minutes / 60.0 + seconds / 3600.0 184 | if cardinal in ('S', 'W'): 185 | dd *= -1 186 | return dd 187 | 188 | 189 | def dm2dd(degrees, minutes, cardinal): 190 | """convert coordinate format with degrees and minutes to degrees""" 191 | dd = degrees + minutes / 60.0 192 | if cardinal in ('S', 'W'): 193 | dd *= -1 194 | return dd 195 | 196 | 197 | def both_none_or_same_float(var1, var2): 198 | """Determine wheter both vars are the same. Either None or float""" 199 | if var1 is None: 200 | return var2 is None 201 | else: 202 | return var2 is not None and isclose(var1, var2) 203 | 204 | 205 | def both_none_or_same_str(var1, var2): 206 | """Determine wheter both vars are the same. Either None or float""" 207 | if var1 is None: 208 | return var2 is None 209 | else: 210 | return var2 is not None and var1 == var2 211 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/opensoar.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/opensoar.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/opensoar" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/opensoar" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /opensoar/competition/strepla.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper functions for Strepla competitions. 3 | The files from Strepla always contain task information, which can be used for competition analysis. 4 | """ 5 | import datetime 6 | from typing import List, Tuple 7 | from urllib.error import URLError 8 | 9 | from aerofiles.igc import Reader 10 | 11 | from opensoar.competition.competition_day import CompetitionDay 12 | from opensoar.competition.competitor import Competitor 13 | from opensoar.competition.daily_results_page import DailyResultsPage 14 | from opensoar.task.aat import AAT 15 | from opensoar.task.race_task import RaceTask 16 | from opensoar.task.waypoint import Waypoint 17 | from opensoar.utilities.helper_functions import dm2dd 18 | 19 | 20 | def get_task_and_competitor_info(lscsd_lines: List[str], lscsr_lines: List[str], lscsa_lines: List[str]) -> Tuple[dict, dict]: 21 | task_info = { 22 | 'tp': [], 23 | 's_line_rad': None, 24 | 'tp_key': False, 25 | 'tp_key_dim': None, 26 | 'tp_cyl': False, 27 | 'tp_cyl_rad': None, 28 | 'f_line': False, 29 | 'f_line_rad': None, 30 | 'f_cyl': False, 31 | 'f_cyl_rad': None, 32 | 'tp_aat_rad': [], 33 | 'tp_aat_angle': [], 34 | 'aat': False, 35 | 'time_window': None, 36 | 'gate_open': None, 37 | } 38 | 39 | competitor_information = { 40 | 'pilot_name': None, 41 | 'competition_id': None, 42 | } 43 | 44 | for line in [*lscsd_lines, *lscsr_lines, *lscsa_lines]: 45 | if line.startswith('LSCSRSLINE'): 46 | task_info['s_line_rad'] = int((line.split(':'))[1]) / 2 47 | elif line.startswith('LSCSRFLINE'): 48 | task_info['f_line'] = True 49 | task_info['f_line_rad'] = int((line.split(':'))[1]) 50 | elif line.startswith('LSCSRTKEYHOLE'): 51 | task_info['tp_key'] = True 52 | task_info['tp_key_dim'] = [int(part) for part in line.split(':')[1::]] 53 | elif line.startswith('LSCSRTCYLINDER'): 54 | task_info['tp_cyl'] = True 55 | task_info['tp_cyl_rad'] = int((line.split(':'))[1]) 56 | elif line.startswith('LSCSRFCYLINDER'): 57 | task_info['f_cyl'] = True 58 | task_info['f_cyl_rad'] = int((line.split(':'))[1]) 59 | elif line.startswith('LSCSA0'): 60 | task_info['tp_aat_rad'].append(int((line.split(':'))[1])) 61 | if int(line.split(':')[3]) == 0: 62 | task_info['tp_aat_angle'].append(360) 63 | else: 64 | task_info['tp_aat_angle'].append(int(line.split(':')[3])) 65 | task_info['aat'] = True 66 | elif line.startswith('LSCSDTime window'): 67 | _, hours, minutes = line.split(':') 68 | task_info['time_window'] = datetime.timedelta(hours=int(hours), minutes=int(minutes)) 69 | elif line.startswith('LSCSDGate open'): 70 | _, hours, minutes = line.split(':') 71 | task_info['gate_open'] = datetime.time(int(hours), int(minutes)) 72 | elif line.startswith('LSCSDName'): 73 | competitor_information['pilot_name'] = line.split(':')[1] 74 | elif line.startswith('LSCSDCID'): 75 | competitor_information['competition_id'] = line.split(':')[1] 76 | 77 | return task_info, competitor_information 78 | 79 | 80 | def get_waypoint_name_lat_long(lscs_line_tp: str) -> Tuple[str, float, float]: 81 | """Parse LSCSCT line (LSCSCT:074 Main Lohr-M:N4959700:E00934900)""" 82 | _, name, lat, lon = lscs_line_tp.split(':') 83 | 84 | lat_cardinal, lat_degrees, lat_minutes = lat[0], float(lat[1:3]), float(lat[3:5]) + float(lat[5:8]) / 1000 85 | lon_cardinal, lon_degrees, lon_minutes = lon[0], float(lon[1:4]), float(lon[4:6]) + float(lon[6:9]) / 1000 86 | 87 | lat = dm2dd(lat_degrees, lat_minutes, lat_cardinal) 88 | lon = dm2dd(lon_degrees, lon_minutes, lon_cardinal) 89 | 90 | return name, lat, lon 91 | 92 | 93 | def get_waypoint(lscs_line_tp: str, task_info: dict, n: int, n_tp: int) -> Waypoint: 94 | 95 | name, lat, lon = get_waypoint_name_lat_long(lscs_line_tp) 96 | 97 | r_min = None 98 | r_max = None 99 | angle_min = None 100 | angle_max = None 101 | orientation_angle = None 102 | line = False 103 | sector_orientation = None 104 | distance_correction = None 105 | 106 | if n == 0: 107 | line = True 108 | sector_orientation = "next" 109 | r_max = task_info['s_line_rad'] 110 | angle_max = 90 111 | elif 0 < n < (n_tp - 1): 112 | sector_orientation = "symmetrical" 113 | 114 | if task_info['aat']: 115 | angle_max = (task_info['tp_aat_angle'])[n - 1] / 2 116 | r_max = (task_info['tp_aat_rad'])[n - 1] 117 | sector_orientation = "previous" 118 | else: 119 | # turnpoint is DAEC keyhole 120 | if task_info['tp_key']: 121 | r_max = (task_info['tp_key_dim'])[1] 122 | angle_max = ((task_info['tp_key_dim'])[2]) / 2 123 | r_min = (task_info['tp_key_dim'])[0] 124 | angle_min = 180 125 | 126 | # turnpoint is cylinder 127 | elif task_info['tp_cyl']: 128 | r_max = task_info['tp_cyl_rad'] 129 | angle_max = 180 130 | 131 | elif n == n_tp - 1: 132 | sector_orientation = "previous" 133 | 134 | # finish is cylinder 135 | if task_info['f_cyl']: 136 | r_max = task_info['f_cyl_rad'] 137 | distance_correction = "shorten_legs" 138 | angle_max = 180 139 | 140 | # finish is line 141 | elif task_info['f_line']: 142 | r_max = task_info['f_line_rad'] 143 | angle_max = 90 144 | line = True 145 | 146 | return Waypoint(name, lat, lon, r_min, angle_min, r_max, angle_max, line, sector_orientation, distance_correction, 147 | orientation_angle) 148 | 149 | 150 | def get_waypoints(lscsc_lines: List[str], task_info: dict) -> List[Waypoint]: 151 | waypoints = list() 152 | for n, lscsc_line in enumerate(lscsc_lines): 153 | waypoint = get_waypoint(lscsc_line, task_info, n, len(lscsc_lines)) 154 | waypoints.append(waypoint) 155 | 156 | return waypoints 157 | 158 | 159 | def get_info_from_comment_lines(parsed_igc_file: dict, start_time_buffer: int=0): 160 | 161 | lscsd_lines = list() 162 | lscsr_lines = list() 163 | lscsc_lines = list() 164 | lscsa_lines = list() 165 | 166 | for comment_record in parsed_igc_file['comment_records'][1]: 167 | line = 'L{}{}'.format(comment_record['source'], comment_record['comment']) 168 | 169 | if line.startswith('LSCSD'): 170 | lscsd_lines.append(line) 171 | elif line.startswith('LSCSC'): 172 | lscsc_lines.append(line) 173 | elif line.startswith('LSCSR'): 174 | lscsr_lines.append(line) 175 | elif line.startswith('LSCSA'): 176 | lscsa_lines.append(line) 177 | 178 | task_information, competitor_information = get_task_and_competitor_info(lscsd_lines, lscsr_lines, lscsa_lines) 179 | waypoints = get_waypoints(lscsc_lines, task_information) 180 | 181 | aat = task_information['aat'] 182 | t_min = task_information.get('time_window', None) 183 | start_opening = task_information.get('gate_open', None) 184 | timezone = None # unclear where to get timezone information from strepla igc file 185 | 186 | if aat: 187 | task = AAT(waypoints, t_min, timezone, start_opening, start_time_buffer) 188 | else: 189 | task = RaceTask(waypoints, timezone, start_opening, start_time_buffer) 190 | 191 | return task, task_information, competitor_information 192 | -------------------------------------------------------------------------------- /tests/thermals/test_flight_phases.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import datetime 4 | from copy import deepcopy 5 | 6 | from opensoar.task.trip import Trip 7 | from opensoar.thermals.flight_phases import FlightPhases 8 | from opensoar.utilities.helper_functions import double_iterator 9 | 10 | from tests.task.helper_functions import get_trace, get_task 11 | 12 | 13 | class TestFlightPhases(unittest.TestCase): 14 | 15 | pysoar_phase_start_times = [ 16 | datetime.datetime(2014, 6, 21, 12, 12, 52, tzinfo=datetime.timezone.utc), 17 | datetime.datetime(2014, 6, 21, 12, 20, 22, tzinfo=datetime.timezone.utc), 18 | datetime.datetime(2014, 6, 21, 12, 24, 14, tzinfo=datetime.timezone.utc), 19 | datetime.datetime(2014, 6, 21, 12, 29, 22, tzinfo=datetime.timezone.utc), 20 | datetime.datetime(2014, 6, 21, 12, 33, 6, tzinfo=datetime.timezone.utc), 21 | datetime.datetime(2014, 6, 21, 12, 34, 50, tzinfo=datetime.timezone.utc), 22 | datetime.datetime(2014, 6, 21, 12, 37, 42, tzinfo=datetime.timezone.utc), 23 | datetime.datetime(2014, 6, 21, 12, 47, 14, tzinfo=datetime.timezone.utc), 24 | datetime.datetime(2014, 6, 21, 12, 52, 42, tzinfo=datetime.timezone.utc), 25 | datetime.datetime(2014, 6, 21, 13, 1, 0, tzinfo=datetime.timezone.utc), 26 | datetime.datetime(2014, 6, 21, 13, 4, 52, tzinfo=datetime.timezone.utc), 27 | ] 28 | 29 | cwd = os.path.dirname(__file__) 30 | igc_path = os.path.join(cwd, '..', 'igc_files', 'race_task_completed.igc') 31 | 32 | trace = get_trace(igc_path) 33 | race_task = get_task(igc_path) 34 | trip = Trip(race_task, trace) 35 | phases = FlightPhases('pysoar', trace, trip) 36 | 37 | def test_all_phases(self): 38 | 39 | all_phases = self.phases.all_phases(leg='all') 40 | 41 | # Check if end fixes are the same as the start fixes of next phase 42 | for phase, next_phase in double_iterator(all_phases): 43 | self.assertEqual(phase[1][-1], next_phase[1][0]) 44 | 45 | # check same number of phases 46 | self.assertEqual(len(all_phases), len(self.pysoar_phase_start_times)) 47 | 48 | # check if start times of phases are within 2 seconds 49 | for phase, pysoar_phase_start_time in zip(all_phases, self.pysoar_phase_start_times): 50 | time_diff = (pysoar_phase_start_time - phase.fixes[0]['datetime']).total_seconds() 51 | self.assertLessEqual(abs(time_diff), 2) 52 | 53 | def test_thermals(self): 54 | 55 | thermals = self.phases.thermals(leg='all') 56 | 57 | # check if indeed only thermals 58 | for thermal in thermals: 59 | self.assertFalse(thermal.is_cruise) 60 | 61 | # check if correct phases are classified as thermals 62 | for thermal, pysoar_start_time in zip(thermals, self.pysoar_phase_start_times[1::2]): 63 | time_diff = (pysoar_start_time - thermal.fixes[0]['datetime']).total_seconds() 64 | self.assertLessEqual(abs(time_diff), 2) 65 | 66 | def test_cruises(self): 67 | 68 | cruises = self.phases.cruises(leg='all') 69 | 70 | # check if indeed only cruises 71 | for cruise in cruises: 72 | self.assertTrue(cruise.is_cruise) 73 | 74 | # check if correct phases are classified as cruises 75 | for cruise, pysoar_start_time in zip(cruises, self.pysoar_phase_start_times[0::2]): 76 | time_diff = (pysoar_start_time - cruise.fixes[0]['datetime']).total_seconds() 77 | self.assertLessEqual(abs(time_diff), 2) 78 | 79 | def test_thermals_on_leg(self): 80 | 81 | thermals_leg2 = self.phases.thermals(leg=1) 82 | 83 | # check indeed subset of all thermals 84 | self.assertTrue(len(thermals_leg2) < len(self.phases.thermals())) 85 | 86 | # check all thermals 87 | for thermal in thermals_leg2: 88 | self.assertFalse(thermal.is_cruise) 89 | 90 | leg_start_time = self.trip.fixes[1]['datetime'] 91 | leg_end_time = self.trip.fixes[2]['datetime'] 92 | 93 | # check start-time of first thermal 94 | start_time = thermals_leg2[0].fixes[0]['datetime'] 95 | diff = (leg_start_time - start_time).total_seconds() 96 | self.assertEqual(diff, 0) 97 | 98 | # check endtime of last thermal 99 | end_time = thermals_leg2[-1].fixes[-1]['datetime'] 100 | diff = (leg_end_time - end_time).total_seconds() 101 | self.assertEqual(diff, 0) 102 | 103 | def test_cruises_on_leg(self): 104 | 105 | cruises_leg2 = self.phases.cruises(leg=1) 106 | 107 | # check indeed subset of all thermals 108 | self.assertTrue(len(cruises_leg2) < len(self.phases.cruises())) 109 | 110 | # check all cruises 111 | for cruise in cruises_leg2: 112 | self.assertTrue(cruise.is_cruise) 113 | 114 | def test_phases_on_leg_spanning_complete_leg(self): 115 | """This test covers the case when the phase starts before the start of the leg and ends after 116 | the end of the leg.""" 117 | 118 | trace = [ 119 | {'datetime': datetime.datetime(2012, 5, 26, 11, 33, 26, tzinfo=datetime.timezone.utc), 'lat': 52.468183333333336, 'lon': 6.3402, 'validity': 'A', 120 | 'pressure_alt': -37, 'gps_alt': 47, 'FXA': 2, 'SIU': 1}, 121 | {'datetime': datetime.datetime(2012, 5, 26, 11, 33, 34, tzinfo=datetime.timezone.utc), 'lat': 52.468183333333336, 'lon': 6.3402, 'validity': 'A', 122 | 'pressure_alt': -37, 'gps_alt': 47, 'FXA': 2, 'SIU': 1}, 123 | {'datetime': datetime.datetime(2012, 5, 26, 11, 33, 42, tzinfo=datetime.timezone.utc), 'lat': 52.468183333333336, 'lon': 6.3402, 'validity': 'A', 124 | 'pressure_alt': -37, 'gps_alt': 47, 'FXA': 2, 'SIU': 1}, 125 | {'datetime': datetime.datetime(2012, 5, 26, 11, 33, 50, tzinfo=datetime.timezone.utc), 'lat': 52.468183333333336, 'lon': 6.3402, 'validity': 'A', 126 | 'pressure_alt': -37, 'gps_alt': 48, 'FXA': 1, 'SIU': 1}, 127 | {'datetime': datetime.datetime(2012, 5, 26, 11, 33, 58, tzinfo=datetime.timezone.utc), 'lat': 52.468183333333336, 'lon': 6.340216666666667, 'validity': 'A', 128 | 'pressure_alt': -37, 'gps_alt': 48, 'FXA': 1, 'SIU': 1}, 129 | {'datetime': datetime.datetime(2012, 5, 26, 11, 34, 6, tzinfo=datetime.timezone.utc), 'lat': 52.46816666666667, 'lon': 6.339666666666667, 'validity': 'A', 130 | 'pressure_alt': -38, 'gps_alt': 49, 'FXA': 1, 'SIU': 1}, 131 | ] 132 | 133 | # originally this did trip = deepcopy(self._trip) 134 | # but this is broken with current aerofiles version 135 | _trace = get_trace(self.igc_path) 136 | race_task = get_task(self.igc_path) 137 | trip = Trip(race_task, _trace) 138 | 139 | trip.fixes = [ 140 | trace[1], 141 | trace[4] 142 | ] 143 | 144 | phases = FlightPhases('pysoar', trace, trip) 145 | 146 | # there should only be one phase: starting at first fix and ending at last fix of trace 147 | # these are conditions to a correct test setup, therefore no actual tests 148 | assert len(phases._phases) == 1 149 | assert phases._phases[0].fixes[0]['datetime'] == trace[0]['datetime'] 150 | assert phases._phases[0].fixes[-1]['datetime'] == trace[-1]['datetime'] 151 | 152 | all_phases_leg0 = phases.all_phases(leg=0) 153 | 154 | # check 1 phase found 155 | self.assertEqual(len(all_phases_leg0), 1) 156 | 157 | # check if phase correctly starts and ends at the trip fixes and not the trace fixes 158 | first_phase_fix = all_phases_leg0[0].fixes[0] 159 | last_phase_fix = all_phases_leg0[0].fixes[-1] 160 | self.assertEqual(first_phase_fix['datetime'], trip.fixes[0]['datetime']) 161 | self.assertEqual(last_phase_fix['datetime'], trip.fixes[-1]['datetime']) 162 | -------------------------------------------------------------------------------- /opensoar/thermals/flight_phases.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | from typing import Union, List 3 | 4 | from opensoar.thermals.pysoar_thermal_detector import PySoarThermalDetector 5 | 6 | Phase = namedtuple('Phase', 'is_cruise fixes') 7 | 8 | 9 | class FlightPhases: 10 | """ 11 | Container to combine the different flight phases (thermal and cruise) with helper methods for easy access. 12 | """ 13 | 14 | def __init__(self, classification_method: str, trace: list, trip=None): 15 | """ 16 | :param classification_method: currently only 'pysoar' supported 17 | :param trace: 18 | :param trip: optional parameter for obtain thermals per leg 19 | """ 20 | 21 | if classification_method == 'pysoar': 22 | self._thermal_detector = PySoarThermalDetector() 23 | else: 24 | raise ValueError('Classification method {} not supported'.format(classification_method)) 25 | 26 | self._trip = trip 27 | self._phases = self._thermal_detector.analyse(trace) 28 | 29 | def thermals(self, leg: Union[int, str]=None) -> List[Phase]: 30 | """ 31 | Obtain only thermal phases. 32 | 33 | :param leg: can be 0, 1, 2 or 'all'. Obtain only thermals within specified leg or all legs. 34 | :return: 35 | """ 36 | 37 | if leg is not None: 38 | 39 | self._check_leg(leg) 40 | 41 | thermals = list() 42 | for phase in self._phases: 43 | if phase.is_cruise: 44 | continue 45 | 46 | if leg == 'all': 47 | thermal = self._get_phase_within_trip(phase) 48 | else: 49 | thermal = self._get_phase_within_leg(phase, leg) 50 | 51 | if thermal is not None: 52 | thermals.append(thermal) 53 | 54 | return thermals 55 | else: 56 | return [phase for phase in self._phases if not phase.is_cruise] 57 | 58 | def cruises(self, leg: Union[int, str]=None) -> List[Phase]: 59 | """ 60 | Obtain only cruise phases. 61 | 62 | :param leg:can be 0, 1, ... or 'all'. Obtain only cruises within specified leg or all legs. 63 | :return: 64 | """ 65 | 66 | if leg is not None: 67 | self._check_leg(leg) 68 | 69 | cruises = list() 70 | for phase in self._phases: 71 | 72 | if not phase.is_cruise: 73 | continue 74 | 75 | if leg == 'all': 76 | cruise = self._get_phase_within_trip(phase) 77 | else: 78 | cruise = self._get_phase_within_leg(phase, leg) 79 | 80 | if cruise is not None: 81 | cruises.append(cruise) 82 | 83 | return cruises 84 | else: 85 | return [phase for phase in self._phases if phase.is_cruise] 86 | 87 | def all_phases(self, leg: Union[int, str]=None) -> List[Phase]: 88 | """ 89 | Obtain all phases (cruise and thermal). 90 | 91 | :param leg: obtain only phases within specified leg (using int for leg), 92 | or obtain only phases within trip (using leg='all') 93 | :return: 94 | """ 95 | 96 | if leg is not None: 97 | 98 | self._check_leg(leg) 99 | 100 | phases = list() 101 | for phase in self._phases: 102 | 103 | if leg == 'all': 104 | phase_ = self._get_phase_within_trip(phase) 105 | else: 106 | phase_ = self._get_phase_within_leg(phase, leg) 107 | 108 | if phase_ is not None: 109 | phases.append(phase_) 110 | 111 | return phases 112 | else: 113 | return self._phases 114 | 115 | def _check_leg(self, leg): 116 | if self._trip is None: 117 | raise ValueError('No trip specified') 118 | else: 119 | 120 | if type(leg) == str: 121 | if leg != 'all': 122 | raise NotImplementedError 123 | elif type(leg) == int: 124 | if leg > self._trip.started_legs() - 1: 125 | raise ValueError('Trip only contains {} legs'.format(self._trip.started_legs())) 126 | else: 127 | raise NotImplementedError 128 | 129 | def _get_phase_within_leg(self, phase: Phase, leg: int) -> Phase: 130 | 131 | """ 132 | Get part of phase that falls within a specified leg 133 | :param leg: 134 | :return: 135 | """ 136 | 137 | phase_start_in_leg = self._trip.fix_on_leg(phase.fixes[0], leg) 138 | phase_end_in_leg = self._trip.fix_on_leg(phase.fixes[-1], leg) 139 | phase_start_before_leg = self._trip.fix_before_leg(phase.fixes[0], leg) 140 | phase_end_after_leg = self._trip.fix_after_leg(phase.fixes[-1], leg) 141 | 142 | if not phase_start_in_leg and not phase_end_in_leg: 143 | if phase_start_before_leg and phase_end_after_leg: 144 | use_trip_start_fix = True 145 | use_trip_end_fix = True 146 | else: 147 | return None 148 | elif phase_start_in_leg and phase_end_in_leg: 149 | use_trip_start_fix = False 150 | use_trip_end_fix = False 151 | elif phase_start_in_leg and not phase_end_in_leg: 152 | use_trip_start_fix = False 153 | use_trip_end_fix = True 154 | else: # not phase_start_in_leg and phase_end_in_leg: 155 | use_trip_start_fix = True 156 | use_trip_end_fix = False 157 | 158 | start_fix = self._trip.fixes[leg] if use_trip_start_fix else phase.fixes[0] 159 | 160 | if use_trip_end_fix: 161 | if self._trip.outlanded() and leg == self._trip.outlanding_leg(): 162 | end_fix = self._trip.outlanding_fix 163 | else: 164 | end_fix = self._trip.fixes[leg + 1] 165 | else: 166 | end_fix = phase.fixes[-1] 167 | 168 | phase_start_index = phase.fixes.index(start_fix) 169 | phase_end_index = phase.fixes.index(end_fix) 170 | return Phase(phase.is_cruise, phase.fixes[phase_start_index:phase_end_index + 1]) 171 | 172 | def _get_phase_within_trip(self, phase): 173 | 174 | """ 175 | Get part of phase that falls within the trip. 176 | 177 | :param phase: 178 | :return: phase. None if completely outside trip 179 | """ 180 | 181 | first_leg = 0 182 | last_leg = self._trip.started_legs() - 1 183 | 184 | phase_start_before_trip = self._trip.fix_before_leg(phase.fixes[0], first_leg) 185 | phase_start_after_trip = self._trip.fix_after_leg(phase.fixes[0], last_leg) 186 | phase_end_before_trip = self._trip.fix_before_leg(phase.fixes[-1], first_leg) 187 | phase_end_after_trip = self._trip.fix_after_leg(phase.fixes[-1], last_leg) 188 | 189 | if phase_start_before_trip and phase_end_before_trip: 190 | return None 191 | elif phase_start_before_trip and not phase_end_before_trip: 192 | use_trip_start_fix = True 193 | if phase_end_after_trip: 194 | use_trip_end_fix = True 195 | else: 196 | use_trip_end_fix = False 197 | elif not phase_start_before_trip and not phase_end_after_trip: 198 | use_trip_start_fix = False 199 | use_trip_end_fix = False 200 | else: # if not phase_start_before_trip and phase_end_after_trip 201 | use_trip_end_fix = True 202 | use_trip_start_fix = False 203 | if phase_start_after_trip: 204 | return None 205 | 206 | start_fix = self._trip.fixes[first_leg] if use_trip_start_fix else phase.fixes[first_leg] 207 | 208 | if use_trip_end_fix: 209 | if self._trip.outlanded() and last_leg == self._trip.outlanding_leg(): 210 | end_fix = self._trip.outlanding_fix 211 | else: 212 | end_fix = self._trip.fixes[last_leg + 1] 213 | else: 214 | end_fix = phase.fixes[-1] 215 | 216 | phase_start_index = phase.fixes.index(start_fix) 217 | phase_end_index = phase.fixes.index(end_fix) 218 | return Phase(phase.is_cruise, phase.fixes[phase_start_index:phase_end_index + 1]) 219 | -------------------------------------------------------------------------------- /opensoar/task/race_task.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from opensoar.task.task import Task 4 | from opensoar.utilities.helper_functions import calculate_distance_bearing, double_iterator 5 | 6 | class RaceTask(Task): 7 | """ 8 | Race task. 9 | """ 10 | 11 | def __init__(self, waypoints, timezone=None, start_opening=None, start_time_buffer=0, multistart=False): 12 | """ 13 | :param waypoints: see super() 14 | :param timezone: see super() 15 | :param start_opening: see super() 16 | :param start_time_buffer: see super() 17 | :param multistart: see super() 18 | """ 19 | super().__init__(waypoints, timezone, start_opening, start_time_buffer, multistart) 20 | 21 | self.distances = self.calculate_task_distances() 22 | 23 | def __eq__(self, other): 24 | return super().__eq__(other) 25 | 26 | @property 27 | def total_distance(self): 28 | return sum(self.distances) 29 | 30 | def calculate_task_distances(self): 31 | 32 | distances = list() 33 | for leg in range(self.no_legs): 34 | 35 | begin = self.waypoints[leg] 36 | end = self.waypoints[leg+1] # next is built in name 37 | distance, _ = calculate_distance_bearing(begin.fix, end.fix) 38 | 39 | if begin.distance_correction == "shorten_legs": 40 | if end.distance_correction == "shorten_legs": 41 | distance = Task.distance_shortened_leg(distance, begin, end, "current") 42 | distance = Task.distance_shortened_leg(distance, begin, end, "end") 43 | elif end.distance_correction == "move_tp": 44 | distance = Task.distance_moved_turnpoint(distance, begin, end, "end") 45 | distance = Task.distance_shortened_leg(distance, begin, end, "current") 46 | elif end.distance_correction is None: 47 | distance = Task.distance_shortened_leg(distance, begin, end, "current") 48 | else: 49 | raise ValueError("This distance correction does not exist: %s" % end.distance_correction) 50 | 51 | elif begin.distance_correction == "move_tp": 52 | if end.distance_correction == "shorten_legs": 53 | distance = Task.distance_moved_turnpoint(distance, begin, end, "begin") 54 | distance = Task.distance_shortened_leg(distance, begin, end, "end") 55 | elif end.distance_correction == "move_tp": 56 | distance = Task.distance_moved_turnpoint(distance, begin, end, "begin") 57 | distance = Task.distance_moved_turnpoint(distance, begin, end, "both_end") 58 | elif end.distance_correction is None: 59 | distance = Task.distance_moved_turnpoint(distance, begin, end, "begin") 60 | else: 61 | raise ValueError("This distance correction does not exist: %s" % end.distance_correction) 62 | 63 | elif begin.distance_correction is None: 64 | if end.distance_correction == "shorten_legs": 65 | distance = Task.distance_shortened_leg(distance, begin, end, "end") 66 | elif end.distance_correction == "move_tp": 67 | distance = Task.distance_moved_turnpoint(distance, begin, end, "end") 68 | elif end.distance_correction is None: 69 | pass 70 | else: 71 | raise ValueError("This distance correction does not exist: %s" % end.distance_correction) 72 | 73 | else: 74 | raise ValueError("This distance correction does not exist: %s" % self.waypoints[leg].distance_correction) 75 | 76 | distances.append(distance) 77 | 78 | return distances 79 | 80 | def apply_rules(self, trace): 81 | 82 | fixes, outlanding_fix = self.determine_trip_fixes(trace) 83 | distances = self.determine_trip_distances(fixes, outlanding_fix) 84 | refined_start = self.determine_refined_start(trace, fixes) 85 | finish_time = fixes[-1]['datetime'] 86 | sector_fixes = [] # not applicable for race tasks 87 | return fixes, refined_start, outlanding_fix, distances, finish_time, sector_fixes 88 | 89 | def determine_trip_fixes(self, trace): 90 | 91 | leg = -1 92 | enl_first_fix = None 93 | enl_registered = False 94 | 95 | fixes = list() 96 | start_fixes = list() 97 | for fix_minus1, fix in double_iterator(trace): 98 | 99 | if not enl_registered and self.enl_value_exceeded(fix): 100 | if enl_first_fix is None: 101 | enl_first_fix = fix_minus1 102 | 103 | enl_time = (fix['datetime'] - enl_first_fix['datetime']).total_seconds() 104 | enl_registered = enl_registered or self.enl_time_exceeded(enl_time) 105 | elif not enl_registered: 106 | enl_first_fix = None 107 | 108 | if self.start_opening is None: 109 | after_start_opening = True 110 | else: 111 | after_start_opening = self.start_opening + datetime.timedelta(seconds=self.start_time_buffer) < fix['datetime'] 112 | 113 | if leg == -1 and after_start_opening: 114 | if self.started(fix_minus1, fix): 115 | fixes.append(fix_minus1) 116 | start_fixes.append(fix_minus1) 117 | leg += 1 118 | enl_first_fix = None 119 | enl_registered = False 120 | elif leg == 0: 121 | if self.started(fix_minus1, fix): # restart 122 | fixes[0] = fix_minus1 123 | start_fixes.append(fix_minus1) 124 | enl_first_fix = None 125 | enl_registered = False 126 | if self.finished_leg(leg, fix_minus1, fix) and not enl_registered: 127 | fixes.append(fix) 128 | leg += 1 129 | elif 0 < leg < self.no_legs: 130 | if self.finished_leg(leg, fix_minus1, fix) and not enl_registered: 131 | fixes.append(fix) 132 | leg += 1 133 | 134 | enl_fix = enl_first_fix if enl_registered else None 135 | 136 | outlanding_fix = None 137 | if len(fixes) is not len(self.waypoints): 138 | outlanding_fix = self.determine_outlanding_fix(trace, fixes, start_fixes, enl_fix) 139 | 140 | return fixes, outlanding_fix 141 | 142 | def determine_outlanding_fix(self, trace, fixes, start_fixes, enl_fix): 143 | 144 | outlanding_leg = len(fixes) - 1 145 | 146 | # check if there is an actual outlanding 147 | if len(fixes) == len(self.waypoints): 148 | return None 149 | 150 | # determine range within trace to be examined for outlanding fix 151 | last_tp_i = trace.index(fixes[-1]) if outlanding_leg != 0 else trace.index(start_fixes[0]) 152 | if enl_fix is not None: 153 | last_index = trace.index(enl_fix) 154 | else: 155 | last_index = len(trace) - 1 156 | 157 | # find fix which maximizes the distance 158 | outlanding_fix = max(trace[last_tp_i:last_index + 1], 159 | key=lambda x: self.determine_outlanding_distance(outlanding_leg, x)) 160 | 161 | max_distance = self.determine_outlanding_distance(outlanding_leg, outlanding_fix) 162 | if max_distance < 0: # no out-landing fix that improves the distance 163 | if enl_fix is not None: 164 | outlanding_fix = enl_fix 165 | else: 166 | outlanding_fix = trace[-1] 167 | 168 | return outlanding_fix 169 | 170 | def determine_outlanding_distance(self, outlanding_leg, fix): 171 | 172 | previous_waypoint = self.waypoints[outlanding_leg] 173 | next_waypoint = self.waypoints[outlanding_leg + 1] 174 | 175 | # outlanding distance = distance between tps minus distance from next tp to outlanding 176 | outlanding_dist, _ = calculate_distance_bearing(previous_waypoint.fix, next_waypoint.fix) 177 | outlanding_dist -= calculate_distance_bearing(next_waypoint.fix, fix)[0] 178 | 179 | return outlanding_dist if outlanding_dist > 0 else 0 180 | 181 | def determine_trip_distances(self, fixes, outlanding_fix): 182 | 183 | distances = list() 184 | for leg, fix in enumerate(fixes[1:]): 185 | distances.append(self.distances[leg]) 186 | 187 | if outlanding_fix is not None: 188 | outlanding_leg = len(fixes) - 1 189 | distances.append(self.determine_outlanding_distance(outlanding_leg, outlanding_fix)) 190 | 191 | return distances 192 | 193 | def finished_leg(self, leg, fix1, fix2): 194 | """Determines whether leg is finished.""" 195 | 196 | next_waypoint = self.waypoints[leg + 1] 197 | if next_waypoint.is_line: 198 | return next_waypoint.crossed_line(fix1, fix2) 199 | else: 200 | return next_waypoint.outside_sector(fix1) and next_waypoint.inside_sector(fix2) 201 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # opensoar documentation build configuration file, created by 5 | # sphinx-quickstart on Fri Apr 13 20:59:40 2018. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | cwd = os.path.dirname(__file__) 23 | top_dir = os.path.join(cwd, '..') 24 | sys.path.insert(0, top_dir) 25 | 26 | 27 | # -- General configuration ------------------------------------------------ 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | #needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | 'sphinx.ext.autodoc', 37 | ] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # The suffix of source filenames. 43 | source_suffix = '.rst' 44 | 45 | # The encoding of source files. 46 | #source_encoding = 'utf-8-sig' 47 | 48 | # The master toctree document. 49 | master_doc = 'index' 50 | 51 | # General information about the project. 52 | project = 'opensoar' 53 | copyright = '2018, Matthijs Beekman' 54 | 55 | # The version info for the project you're documenting, acts as replacement for 56 | # |version| and |release|, also used in various other places throughout the 57 | # built documents. 58 | # 59 | # The short X.Y version. 60 | version = '0.1.0' 61 | # The full version, including alpha/beta/rc tags. 62 | release = '0.1.0' 63 | 64 | # The language for content autogenerated by Sphinx. Refer to documentation 65 | # for a list of supported languages. 66 | #language = None 67 | 68 | # There are two options for replacing |today|: either, you set today to some 69 | # non-false value, then it is used: 70 | #today = '' 71 | # Else, today_fmt is used as the format for a strftime call. 72 | #today_fmt = '%B %d, %Y' 73 | 74 | # List of patterns, relative to source directory, that match files and 75 | # directories to ignore when looking for source files. 76 | exclude_patterns = [] 77 | 78 | # The reST default role (used for this markup: `text`) to use for all 79 | # documents. 80 | #default_role = None 81 | 82 | # If true, '()' will be appended to :func: etc. cross-reference text. 83 | #add_function_parentheses = True 84 | 85 | # If true, the current module name will be prepended to all description 86 | # unit titles (such as .. function::). 87 | #add_module_names = True 88 | 89 | # If true, sectionauthor and moduleauthor directives will be shown in the 90 | # output. They are ignored by default. 91 | #show_authors = False 92 | 93 | # The name of the Pygments (syntax highlighting) style to use. 94 | pygments_style = 'sphinx' 95 | 96 | # A list of ignored prefixes for module index sorting. 97 | #modindex_common_prefix = [] 98 | 99 | # If true, keep warnings as "system message" paragraphs in the built documents. 100 | #keep_warnings = False 101 | 102 | 103 | # -- Options for HTML output ---------------------------------------------- 104 | 105 | # The theme to use for HTML and HTML Help pages. See the documentation for 106 | # a list of builtin themes. 107 | # html_theme = "sphinx_rtd_theme" 108 | 109 | # Theme options are theme-specific and customize the look and feel of a theme 110 | # further. For a list of options available for each theme, see the 111 | # documentation. 112 | #html_theme_options = {} 113 | 114 | # Add any paths that contain custom themes here, relative to this directory. 115 | #html_theme_path = [] 116 | 117 | # The name for this set of Sphinx documents. If None, it defaults to 118 | # " v documentation". 119 | #html_title = None 120 | 121 | # A shorter title for the navigation bar. Default is the same as html_title. 122 | #html_short_title = None 123 | 124 | # The name of an image file (relative to this directory) to place at the top 125 | # of the sidebar. 126 | #html_logo = None 127 | 128 | # The name of an image file (within the static path) to use as favicon of the 129 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 130 | # pixels large. 131 | #html_favicon = None 132 | 133 | # Add any paths that contain custom static files (such as style sheets) here, 134 | # relative to this directory. They are copied after the builtin static files, 135 | # so a file named "default.css" will overwrite the builtin "default.css". 136 | html_static_path = ['_static'] 137 | 138 | # Add any extra paths that contain custom files (such as robots.txt or 139 | # .htaccess) here, relative to this directory. These files are copied 140 | # directly to the root of the documentation. 141 | #html_extra_path = [] 142 | 143 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 144 | # using the given strftime format. 145 | #html_last_updated_fmt = '%b %d, %Y' 146 | 147 | # If true, SmartyPants will be used to convert quotes and dashes to 148 | # typographically correct entities. 149 | #html_use_smartypants = True 150 | 151 | # Custom sidebar templates, maps document names to template names. 152 | #html_sidebars = {} 153 | 154 | # Additional templates that should be rendered to pages, maps page names to 155 | # template names. 156 | #html_additional_pages = {} 157 | 158 | # If false, no module index is generated. 159 | #html_domain_indices = True 160 | 161 | # If false, no index is generated. 162 | #html_use_index = True 163 | 164 | # If true, the index is split into individual pages for each letter. 165 | #html_split_index = False 166 | 167 | # If true, links to the reST sources are added to the pages. 168 | #html_show_sourcelink = True 169 | 170 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 171 | #html_show_sphinx = True 172 | 173 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 174 | #html_show_copyright = True 175 | 176 | # If true, an OpenSearch description file will be output, and all pages will 177 | # contain a tag referring to it. The value of this option must be the 178 | # base URL from which the finished HTML is served. 179 | #html_use_opensearch = '' 180 | 181 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 182 | #html_file_suffix = None 183 | 184 | # Output file base name for HTML help builder. 185 | htmlhelp_basename = 'opensoardoc' 186 | 187 | 188 | # -- Options for LaTeX output --------------------------------------------- 189 | 190 | latex_elements = { 191 | # The paper size ('letterpaper' or 'a4paper'). 192 | #'papersize': 'letterpaper', 193 | 194 | # The font size ('10pt', '11pt' or '12pt'). 195 | #'pointsize': '10pt', 196 | 197 | # Additional stuff for the LaTeX preamble. 198 | #'preamble': '', 199 | } 200 | 201 | # Grouping the document tree into LaTeX files. List of tuples 202 | # (source start file, target name, title, 203 | # author, documentclass [howto, manual, or own class]). 204 | latex_documents = [ 205 | ('index', 'opensoar.tex', 'opensoar Documentation', 206 | 'Matthijs Beekman', 'manual'), 207 | ] 208 | 209 | # The name of an image file (relative to this directory) to place at the top of 210 | # the title page. 211 | #latex_logo = None 212 | 213 | # For "manual" documents, if this is true, then toplevel headings are parts, 214 | # not chapters. 215 | #latex_use_parts = False 216 | 217 | # If true, show page references after internal links. 218 | #latex_show_pagerefs = False 219 | 220 | # If true, show URL addresses after external links. 221 | #latex_show_urls = False 222 | 223 | # Documents to append as an appendix to all manuals. 224 | #latex_appendices = [] 225 | 226 | # If false, no module index is generated. 227 | #latex_domain_indices = True 228 | 229 | 230 | # -- Options for manual page output --------------------------------------- 231 | 232 | # One entry per manual page. List of tuples 233 | # (source start file, name, description, authors, manual section). 234 | man_pages = [ 235 | ('index', 'opensoar', 'opensoar Documentation', 236 | ['Matthijs Beekman'], 1) 237 | ] 238 | 239 | # If true, show URL addresses after external links. 240 | #man_show_urls = False 241 | 242 | 243 | # -- Options for Texinfo output ------------------------------------------- 244 | 245 | # Grouping the document tree into Texinfo files. List of tuples 246 | # (source start file, target name, title, author, 247 | # dir menu entry, description, category) 248 | texinfo_documents = [ 249 | ('index', 'opensoar', 'opensoar Documentation', 250 | 'Matthijs Beekman', 'opensoar', 'One line description of project.', 251 | 'Miscellaneous'), 252 | ] 253 | 254 | # Documents to append as an appendix to all manuals. 255 | #texinfo_appendices = [] 256 | 257 | # If false, no module index is generated. 258 | #texinfo_domain_indices = True 259 | 260 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 261 | #texinfo_show_urls = 'footnote' 262 | 263 | # If true, do not generate a @detailmenu in the "Top" node's menu. 264 | #texinfo_no_detailmenu = False 265 | -------------------------------------------------------------------------------- /tests/competition/test_soaringspot.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import datetime 4 | 5 | from opensoar.competition.soaringspot import get_lat_long, get_fixed_orientation_angle, get_sector_orientation, \ 6 | get_sector_dimensions, get_waypoint, get_waypoints, SoaringSpotDaily, get_task_rules, get_info_from_comment_lines 7 | from opensoar.task.waypoint import Waypoint 8 | from opensoar.task.task import Task 9 | 10 | from aerofiles.igc import Reader 11 | 12 | 13 | class TestSoaringspot(unittest.TestCase): 14 | 15 | def test_get_lat_long(self): 16 | lcu_line = 'LCU::C5215000N00609500EDeventer' 17 | lat, lon = get_lat_long(lcu_line) 18 | 19 | self.assertAlmostEqual(lat, 52.25) 20 | self.assertAlmostEqual(lon, 6.1583333) 21 | 22 | def test_get_fixed_orientation_angle(self): 23 | angle = get_fixed_orientation_angle('LSEEYOU OZ=2,Style=0,R1=20000m,A1=45,A12=215') 24 | self.assertEqual(angle, 215) 25 | 26 | def test_get_sector_orientation(self): 27 | orientation = get_sector_orientation('LSEEYOU OZ=0,Style=1,R1=500m,A1=180', 3) 28 | self.assertEqual(orientation, 'symmetrical') 29 | 30 | def test_get_sector_orientation_start(self): 31 | """Should always be next, independent on Style. Sometimes soaringspot files have wrong styles.""" 32 | 33 | lseeyou_lines = [ 34 | 'LSEEYOU OZ=-1,Style=1,R1=500m,A1=180', 35 | 'LSEEYOU OZ=-1,Style=2,R1=500m,A1=180', 36 | 'LSEEYOU OZ=-1,Style=3,R1=500m,A1=180', 37 | 'LSEEYOU OZ=-1,Style=4,R1=500m,A1=180', 38 | ] 39 | 40 | for lseeyou_line in lseeyou_lines: 41 | orientation = get_sector_orientation(lseeyou_line, 3) 42 | self.assertEqual(orientation, 'next') 43 | 44 | def test_get_sector_orientation_finish(self): 45 | """Should always be previous, independent on Style. Sometimes soaringspot files have wrong styles.""" 46 | 47 | lseeyou_lines = [ 48 | 'LSEEYOU OZ=2,Style=1,R1=500m,A1=180', 49 | 'LSEEYOU OZ=2,Style=2,R1=500m,A1=180', 50 | 'LSEEYOU OZ=2,Style=3,R1=500m,A1=180', 51 | 'LSEEYOU OZ=2,Style=4,R1=500m,A1=180', 52 | ] 53 | 54 | for lseeyou_line in lseeyou_lines: 55 | orientation = get_sector_orientation(lseeyou_line, 4) 56 | self.assertEqual(orientation, 'previous') 57 | 58 | def test_get_sector_orientation_speedstyle_error(self): 59 | """In some of the soaringspot igc files a wrong SpeedStyle entry is found""" 60 | 61 | orientation = get_sector_orientation('LSEEYOU OZ=-1,Style=2SpeedStyle=0,R1=5000m,A1=180,Line=1', 4) 62 | self.assertEqual(orientation, 'next') 63 | 64 | orientation = get_sector_orientation('LSEEYOU OZ=0,Style=1SpeedStyle=3,R1=500m,A1=180,Reduce=1', 4) 65 | self.assertEqual(orientation, 'symmetrical') 66 | 67 | orientation = get_sector_orientation('LSEEYOU OZ=0,Style=1SpeedStyle=2,R1=500m,A1=180,Reduce=1', 4) 68 | self.assertEqual(orientation, 'symmetrical') 69 | 70 | def test_get_sector_dimensions(self): 71 | lseeyou_line = 'LSEEYOU OZ=0,Style=1,R1=500m,A1=180' 72 | r_min, angle_min, r_max, angle_max = get_sector_dimensions(lseeyou_line) 73 | self.assertEqual(r_max, 500) 74 | self.assertIsNone(r_min) 75 | self.assertEqual(angle_max, 180) 76 | self.assertIsNone(angle_min) 77 | 78 | def test_get_waypoint(self): 79 | lcu_line = 'LCU::C5215000N00609500EDeventer' 80 | lseeyou_line = 'LSEEYOU OZ=0,Style=1,R1=500m,A1=180' 81 | waypoint = get_waypoint(lcu_line, lseeyou_line, 3) 82 | self.assertTrue(isinstance(waypoint, Waypoint)) 83 | self.assertEqual(waypoint.name, 'Deventer') 84 | self.assertAlmostEqual(waypoint.latitude, 52.25) 85 | self.assertAlmostEqual(waypoint.longitude, 6.1583333) 86 | self.assertEqual(waypoint.r_max, 500) 87 | self.assertEqual(waypoint.angle_max, 180) 88 | 89 | def test_get_waypoints(self): 90 | lcu_lines = [ 91 | 'LCU::C210614200004301299000003', 92 | 'LCU::C5228133N00620000ESALLAND FL', 93 | 'LCU::C5226783N00620467ESALLAND AF1', 94 | 'LCU::C5215000N00609500EDeventer', 95 | 'LCU::C5204900N00626800ERuurlo', 96 | 'LCU::C5228400N00624600EArchemerberg', 97 | 'LCU::C5228133N00620000ESALLAND FL', 98 | 'LCU::C5228133N00620000ESALLAND FL' 99 | ] 100 | 101 | lseeyou_lines = [ 102 | 'LSEEYOU OZ=-1,Style=2,R1=2500m,A1=35,Line=1', 103 | 'LSEEYOU OZ=0,Style=1,R1=500m,A1=180', 104 | 'LSEEYOU OZ=1,Style=1,R1=500m,A1=180', 105 | 'LSEEYOU OZ=2,Style=1,R1=500m,A1=180', 106 | 'LSEEYOU OZ=3,Style=3,R1=500m,A1=180', 107 | ] 108 | 109 | waypoints = get_waypoints(lcu_lines, lseeyou_lines) 110 | Task.set_orientation_angles(waypoints) 111 | 112 | self.assertEqual(len(waypoints), 5) 113 | 114 | # name, orientation_angle, is_line 115 | expected = [ 116 | ("SALLAND AF1", 209, True), 117 | ("Deventer", 81, False), 118 | ("Ruurlo", 335, False), 119 | ("Archemerberg", 220, False), 120 | ("SALLAND FL", 84, False), 121 | ] 122 | 123 | wp = waypoints[0] 124 | print(wp.r_min, wp.angle_min, wp.r_max, wp.angle_max) 125 | 126 | for w, expected_w in zip(waypoints, expected): 127 | name, orientation_angle, is_line = expected_w 128 | self.assertEqual(w.name, name) 129 | self.assertEqual(int(w.orientation_angle), orientation_angle) 130 | self.assertEqual(w.is_line, is_line) 131 | 132 | def test_get_competitors(self): 133 | # old format 134 | soaringspot_page = SoaringSpotDaily( 135 | 'https://www.soaringspot.com/en/sallandse-tweedaagse-2014/results/club/task-1-on-2014-06-21/daily') 136 | 137 | competitors = soaringspot_page._get_competitors_info(include_hc_competitors=False, include_dns_competitors=True) 138 | self.assertEqual(len(competitors), 10) 139 | competitors = soaringspot_page._get_competitors_info(include_hc_competitors=False, include_dns_competitors=False) 140 | self.assertEqual(len(competitors), 8) 141 | competitor_pk = soaringspot_page._get_competitors_info(include_hc_competitors=False)[2] 142 | 143 | self.assertEqual(competitor_pk['competition_id'], 'PK') 144 | self.assertEqual(competitor_pk['ranking'], 3) 145 | self.assertEqual(competitor_pk['pilot_name'], 'Erik Berendes') 146 | self.assertEqual(competitor_pk['plane_model'], 'Pik20D') 147 | 148 | expected_igc_url = 'https://archive.soaringspot.com/contest/013/1323/flights/2477/2597322754.igc' 149 | self.assertEqual(competitor_pk['igc_url'], expected_igc_url) 150 | 151 | # new format 152 | soaringspot_page = SoaringSpotDaily( 153 | 'https://www.soaringspot.com/en_gb/pribina-cup-2025-nitra-2025/results/club/task-6-on-2025-04-24/daily') 154 | 155 | competitors = soaringspot_page._get_competitors_info(include_hc_competitors=False, include_dns_competitors=True) 156 | self.assertEqual(len(competitors), 15) 157 | competitors = soaringspot_page._get_competitors_info(include_hc_competitors=False, include_dns_competitors=False) 158 | self.assertEqual(len(competitors), 13) 159 | competitor_pk = soaringspot_page._get_competitors_info(include_hc_competitors=False)[1] 160 | 161 | self.assertEqual(competitor_pk['competition_id'], 'X11') 162 | self.assertEqual(competitor_pk['ranking'], 2) 163 | self.assertEqual(competitor_pk['pilot_name'], 'Kengo Matsumoto') 164 | self.assertEqual(competitor_pk['plane_model'], 'ASW-20') 165 | 166 | def test_get_competitors_info_relative_downloads(self): 167 | """relative IGC URLs""" 168 | soaringspot_page = SoaringSpotDaily( 169 | 'https://www.soaringspot.com/en_gb/nation-gliding-championships-taupo-2020/results/racing/task-5-on-2020-02-10/daily' 170 | ) 171 | competitors = soaringspot_page._get_competitors_info(include_hc_competitors=False) 172 | self.assertEqual(len(competitors), 10) 173 | 174 | def test_get_competitors_with_duplicate_ids(self): 175 | """Test handling of duplicate competition IDs""" 176 | soaringspot_page = SoaringSpotDaily( 177 | 'https://www.soaringspot.com/nl/keiheuvelcup2025/results/open-xpdr/task-1-on-2025-08-23/daily' 178 | ) 179 | 180 | competitors = soaringspot_page._get_competitors_info(include_hc_competitors=True) 181 | 182 | # Check that we got all competitors (34 total on this page) 183 | self.assertEqual(len(competitors), 34) 184 | 185 | # Extract all competition IDs 186 | competition_ids = [comp['competition_id'] for comp in competitors] 187 | 188 | # Check that all competition IDs are unique after processing 189 | self.assertEqual(len(competition_ids), len(set(competition_ids)), 190 | "Duplicate competition IDs detected after processing") 191 | 192 | # Find the pilots who originally had 'FS' as competition ID 193 | fs_pilots = [comp for comp in competitors if comp['competition_id'].startswith('FS')] 194 | 195 | # Should have 2 pilots with FS-based IDs 196 | self.assertEqual(len(fs_pilots), 2, 197 | "Expected 2 pilots with FS competition ID") 198 | 199 | # Verify the pilots are correct (Daan Spruyt and Flens Piet) 200 | pilot_names = {comp['pilot_name'] for comp in fs_pilots} 201 | self.assertIn('Daan Spruyt', pilot_names) 202 | self.assertIn('Flens Piet', pilot_names) 203 | 204 | # One should have 'FS' and the other should have 'FS_2' 205 | fs_ids = {comp['competition_id'] for comp in fs_pilots} 206 | self.assertIn('FS', fs_ids) 207 | self.assertIn('FS_2', fs_ids) 208 | 209 | # Verify rankings are preserved 210 | for comp in fs_pilots: 211 | if comp['pilot_name'] == 'Daan Spruyt': 212 | self.assertEqual(comp['ranking'], 2) 213 | self.assertEqual(comp['plane_model'], 'Ventus 2ax') 214 | elif comp['pilot_name'] == 'Flens Piet': 215 | self.assertEqual(comp['ranking'], 32) 216 | self.assertEqual(comp['plane_model'], 'DG 808C Classic 18m') 217 | 218 | # disabled because this URL is no longer used. unclear whether all dev urls are cleared 219 | # def test_get_competitors_dev_url(self): 220 | # soaringspot_page = SoaringSpotDaily( 221 | # 'https://dev.soaringspot.com/en_gb/53-hww/results/std/task-6-on-2019-06-01/daily') 222 | # 223 | # competitor_a1 = soaringspot_page._get_competitors_info(include_hc_competitors=False)[0] 224 | # 225 | # self.assertEqual(competitor_a1['competition_id'], 'A1') 226 | # self.assertEqual(competitor_a1['ranking'], 1) 227 | # 228 | # expected_igc_url = 'https://dev.soaringspot.com/en_gb/download-contest-flight/2818-5941231725' 229 | # self.assertEqual(competitor_a1['igc_url'], expected_igc_url) 230 | 231 | def test_get_competition_day(self): 232 | soaringspot_page = SoaringSpotDaily( 233 | 'https://www.soaringspot.com/en/sallandse-tweedaagse-2014/results/club/task-1-on-2014-06-21/daily') 234 | 235 | competition_name, date, plane_class = soaringspot_page._get_competition_day_info() 236 | 237 | self.assertEqual(competition_name, 'sallandse-tweedaagse-2014') 238 | self.assertEqual(plane_class, 'club') 239 | self.assertEqual(date, datetime.date(2014, 6, 21)) 240 | 241 | def test_get_task_rules(self): 242 | lseeyou_tsk_line = 'LSEEYOU TSK,NoStart=13:29:00,TaskTime=03:30:00,WpDis=False,' \ 243 | 'MinDis=True,NearDis=0.5km,NearAlt=200.0m,MinFinAlt=0.0km' 244 | 245 | start_opening, t_min, multi_start = get_task_rules(lseeyou_tsk_line) 246 | 247 | self.assertEqual(start_opening, datetime.time(13, 29, 0)) 248 | self.assertEqual(t_min, datetime.timedelta(hours=3, minutes=30, seconds=0)) 249 | 250 | def test_get_info_from_comment_lines_no_lcu_no_lseeyou(self): 251 | """ 252 | Some IGC files from soaringspot seem to miss the task information written in LCU and LSEEYOU lines 253 | This test checks that the function then return a None task. 254 | """ 255 | 256 | cwd = os.path.dirname(__file__) 257 | igc_path = os.path.join(cwd, '..', 'igc_files', 'missing_lcu_lseeyou_lines.igc') 258 | 259 | # try: # try utf-8 260 | with open(igc_path, 'r', encoding='latin1') as f: 261 | parsed_igc_file = Reader().read(f) 262 | 263 | task, contest_information, competitor_information = get_info_from_comment_lines(parsed_igc_file, date=datetime.date(2023, 7, 4)) 264 | self.assertIsNone(task, None) 265 | -------------------------------------------------------------------------------- /tests/competition/test_crosscountry_api.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration tests for Crosscountry API structure verification. 3 | 4 | These tests make real API calls to the Crosscountry endpoints and verify that the 5 | expected structure is present. These will help detect unexpected API changes. 6 | 7 | NOTE: These tests require internet connectivity and depend on the 8 | actual Crosscountry API being available. 9 | """ 10 | import unittest 11 | import shutil 12 | import datetime 13 | import json 14 | import logging 15 | from pathlib import Path 16 | import re 17 | 18 | from opensoar.competition.crosscountry import CrosscountryDaily 19 | 20 | 21 | 22 | # Configure logging 23 | logging.basicConfig(level=logging.WARNING) 24 | logger = logging.getLogger(__name__) 25 | 26 | 27 | class TestCrosscountryApiIntegration(unittest.TestCase): 28 | """ 29 | Integration tests that verify the Crosscountry API structure using real API calls. 30 | 31 | These tests depend on the actual API and should be run periodically to 32 | ensure that the API structure hasn't changed unexpectedly. 33 | """ 34 | 35 | def setUp(self): 36 | """Set up test fixtures.""" 37 | # Use valid API endpoint URLs directly rather than web URLs 38 | # The web URLs shown in the error message don't match the expected format 39 | self.comp_id = 86 # Use a known working competition ID 40 | self.day_id = 1547 # Use a known working day ID 41 | 42 | # Use the direct API endpoints instead of web URLs 43 | self.comp_api_url = f"https://www.crosscountry.aero/c/sgp/rest/comp/{self.comp_id}" 44 | self.day_api_url = f"https://www.crosscountry.aero/c/sgp/rest/day/{self.comp_id}/{self.day_id}" 45 | 46 | # Create Crosscountry daily instances with the API URLs 47 | self.sgp_comp = CrosscountryDaily(self.comp_api_url) 48 | self.sgp_day = CrosscountryDaily(self.day_api_url) 49 | 50 | # Verify the URLs were correctly parsed 51 | self.assertEqual(self.sgp_comp.competition_id, self.comp_id) 52 | self.assertEqual(self.sgp_day.competition_id, self.comp_id) 53 | self.assertEqual(self.sgp_day.day_id, self.day_id) 54 | 55 | # Directory to save response data for inspection if needed 56 | self.output_dir = Path("./test_output") 57 | self.output_dir.mkdir(exist_ok=True) 58 | 59 | def tearDown(self): 60 | """Clean up any files created during tests.""" 61 | # Keep the output files for manual inspection if needed 62 | shutil.rmtree(self.output_dir) 63 | 64 | def _save_response(self, data, filename): 65 | """Save response data to a file for manual inspection.""" 66 | path = self.output_dir / filename 67 | with open(path, 'w') as f: 68 | json.dump(data, f, indent=2) 69 | logger.info(f"Saved response data to {path}") 70 | 71 | def test_competition_data_structure(self): 72 | """Test the structure of the competition data response.""" 73 | try: 74 | # Get competition data from the real API 75 | comp_data = self.sgp_comp._get_competition_data() 76 | 77 | # Save the response for manual inspection 78 | self._save_response(comp_data, f"comp_{self.comp_id}_response.json") 79 | 80 | # Verify top-level structure 81 | self.assertIn('p', comp_data, "Missing pilots data ('p' key)") 82 | self.assertIn('c', comp_data, "Missing competition data ('c' key)") 83 | self.assertIn('i', comp_data, "Missing days info ('i' key)") 84 | 85 | # Verify competition info 86 | comp_info = comp_data['c'] 87 | self.assertIn('t', comp_info, "Missing competition title ('t' key)") 88 | self.assertIn('l', comp_info, "Missing location ('l' key)") 89 | 90 | # Verify pilots structure 91 | pilots = comp_data['p'] 92 | self.assertTrue(pilots, "Pilots dictionary is empty") 93 | 94 | # Verify pilot data structure (check first pilot) 95 | pilot_id = next(iter(pilots)) 96 | pilot = pilots[pilot_id] 97 | self.assertIn('f', pilot, "Missing pilot first name ('f' key)") 98 | self.assertIn('l', pilot, "Missing pilot last name ('l' key)") 99 | self.assertIn('s', pilot, "Missing pilot sailplane ('s' key)") 100 | 101 | # Log some sample data for verification 102 | logger.info(f"Competition: {comp_info['t']} at {comp_info['l']}") 103 | logger.info(f"Number of pilots: {len(pilots)}") 104 | 105 | # Verify competition days 106 | days = comp_data['i'] 107 | self.assertTrue(isinstance(days, list), "Days info is not a list") 108 | self.assertTrue(days, "No competition days found") 109 | 110 | # Verify day structure 111 | day = days[0] 112 | self.assertIn('i', day, "Missing day ID ('i' key)") 113 | self.assertIn('d', day, "Missing day date ('d' key)") 114 | self.assertIn('y', day, "Missing day type flag ('y' key)") 115 | # Note: 'w' key might not be present for all days 116 | 117 | logger.info(f"Found {len(days)} days in the competition") 118 | 119 | except Exception as e: 120 | self.fail(f"Error testing competition data structure: {str(e)}") 121 | 122 | def test_day_data_structure(self): 123 | """Test the structure of the day data response.""" 124 | try: 125 | # Get day data from the real API 126 | day_data = self.sgp_day._get_day_data() 127 | 128 | # Save the response for manual inspection 129 | self._save_response(day_data, f"day_{self.day_id}_response.json") 130 | 131 | # Verify top-level structure 132 | self.assertIn('d', day_data, "Missing day date ('d' key)") 133 | self.assertIn('a', day_data, "Missing start time ('a' key)") 134 | self.assertIn('r', day_data, "Missing results data ('r' key)") 135 | self.assertIn('k', day_data, "Missing task data ('k' key)") 136 | 137 | # Convert day timestamp to readable date 138 | day_timestamp = day_data['d'] 139 | day_date = datetime.datetime.fromtimestamp(day_timestamp / 1000).date() 140 | logger.info(f"Day date: {day_date}") 141 | 142 | # Verify results structure 143 | results = day_data['r'] 144 | self.assertIn('z', results, "Missing timezone ('z' key)") 145 | self.assertIn('s', results, "Missing standings ('s' key)") 146 | 147 | # Verify timezone 148 | timezone_ms = results['z'] 149 | timezone_hours = timezone_ms / (1000 * 60 * 60) 150 | logger.info(f"Timezone: UTC{'+' if timezone_hours >= 0 else ''}{timezone_hours}") 151 | 152 | # Verify standings 153 | standings = results['s'] 154 | self.assertTrue(isinstance(standings, list), "Standings is not a list") 155 | if not standings: 156 | logger.warning("No standings found in day data") 157 | else: 158 | # Verify standing structure 159 | standing = standings[0] 160 | self.assertIn('h', standing, "Missing pilot ID ('h' key)") 161 | self.assertIn('j', standing, "Missing competition ID ('j' key)") 162 | self.assertIn('q', standing, "Missing ranking ('q' key)") 163 | self.assertIn('r', standing, "Missing result status ('r' key)") 164 | self.assertIn('w', standing, "Missing flight ID ('w' key)") 165 | 166 | logger.info(f"Found {len(standings)} results for the day") 167 | 168 | # Verify task structure 169 | task = day_data['k'] 170 | self.assertIn('data', task, "Missing task data ('data' key)") 171 | 172 | task_data = task['data'] 173 | self.assertIn('g', task_data, "Missing waypoints list ('g' key)") 174 | 175 | # Verify waypoints 176 | waypoints = task_data['g'] 177 | self.assertTrue(isinstance(waypoints, list), "Waypoints is not a list") 178 | self.assertTrue(waypoints, "No waypoints found") 179 | 180 | # Verify waypoint structure 181 | waypoint = waypoints[0] 182 | self.assertIn('n', waypoint, "Missing waypoint name ('n' key)") 183 | self.assertIn('a', waypoint, "Missing latitude ('a' key)") 184 | self.assertIn('o', waypoint, "Missing longitude ('o' key)") 185 | self.assertIn('y', waypoint, "Missing type ('y' key)") 186 | self.assertIn('r', waypoint, "Missing radius ('r' key)") 187 | 188 | logger.info(f"Found {len(waypoints)} waypoints in the task") 189 | 190 | # Log the task details 191 | waypoint_names = [wp['n'] for wp in waypoints] 192 | logger.info(f"Task waypoints: {', '.join(waypoint_names)}") 193 | 194 | except Exception as e: 195 | self.fail(f"Error testing day data structure: {str(e)}") 196 | 197 | def test_available_days(self): 198 | """Test retrieval of available days from the competition.""" 199 | try: 200 | # Get available days 201 | days = self.sgp_comp.get_available_days() 202 | 203 | # Save the response for manual inspection 204 | self._save_response(days, f"comp_{self.comp_id}_available_days.json") 205 | 206 | # Verify days structure 207 | self.assertTrue(isinstance(days, list), "Available days is not a list") 208 | self.assertTrue(days, "No available days found") 209 | 210 | for day in days: 211 | self.assertIn('i', day, "Missing day ID ('i' key)") 212 | self.assertIn('d', day, "Missing day date ('d' key)") 213 | self.assertIn('y', day, "Missing day type flag ('y' key)") 214 | 215 | # Convert day timestamp to readable date 216 | day_timestamp = day['d'] 217 | logger.info(day_timestamp) 218 | day_date = datetime.datetime.strptime(day_timestamp, "%Y-%m-%d").date() 219 | day_status = "Race day" if day['y'] == 1 else "Non-race day" 220 | 221 | logger.info(f"Day {day['i']}: {day_date} - {day_status}") 222 | 223 | except Exception as e: 224 | self.fail(f"Error testing available days: {str(e)}") 225 | 226 | def test_competitors_info(self): 227 | """Test retrieval of competitor information from the day data.""" 228 | try: 229 | # Get competitors info 230 | competitors = self.sgp_day._get_competitors_info(include_dns_competitors=True) 231 | 232 | # Save the response for manual inspection 233 | self._save_response(competitors, f"day_{self.day_id}_competitors.json") 234 | 235 | # Verify competitors structure 236 | self.assertTrue(isinstance(competitors, list), "Competitors is not a list") 237 | 238 | if not competitors: 239 | logger.warning("No competitors found in day data") 240 | return 241 | 242 | for competitor in competitors: 243 | self.assertIn('competition_id', competitor, "Missing competition_id field") 244 | self.assertIn('pilot_name', competitor, "Missing pilot_name field") 245 | self.assertIn('plane_model', competitor, "Missing plane_model field") 246 | self.assertIn('ranking', competitor, "Missing ranking field") 247 | # IGC URL may be None for DNF/DNS competitors 248 | self.assertIn('igc_url', competitor, "Missing igc_url field") 249 | 250 | logger.info(f"Competitor {competitor['competition_id']}: " 251 | f"{competitor['pilot_name']} flying {competitor['plane_model']}") 252 | 253 | except Exception as e: 254 | self.fail(f"Error testing competitors info: {str(e)}") 255 | 256 | 257 | class TestCrosscountryUrlHandling(unittest.TestCase): 258 | """Test proper URL handling and ID extraction for Crosscountry URLs.""" 259 | 260 | def test_api_url_pattern(self): 261 | """Test extraction of IDs from direct API URLs.""" 262 | # Direct REST API URLs 263 | comp_url = "https://www.crosscountry.aero/c/sgp/rest/comp/86" 264 | day_url = "https://www.crosscountry.aero/c/sgp/rest/day/86/1547" 265 | 266 | sgp_comp = CrosscountryDaily(comp_url) 267 | sgp_day = CrosscountryDaily(day_url) 268 | 269 | self.assertEqual(sgp_comp.competition_id, 86) 270 | self.assertIsNone(sgp_comp.day_id) 271 | 272 | self.assertEqual(sgp_day.competition_id, 86) 273 | self.assertEqual(sgp_day.day_id, 1547) 274 | 275 | def test_web_url_pattern(self): 276 | """ 277 | Test extraction of IDs from web URLs. 278 | 279 | This test documents the current handling of web URLs and may fail 280 | if the Crosscountry class doesn't correctly handle these URL patterns. 281 | """ 282 | # Current web URLs (based on error messages) 283 | web_comp_url = "https://www.crosscountry.aero/c/sgp/overview/127" 284 | web_day_url = "https://www.crosscountry.aero/c/sgp/task/127/day/1925/overview" 285 | 286 | # Extract IDs using regex patterns 287 | comp_pattern = r"crosscountry\.aero/c/sgp/(?:overview|task)/(\d+)" 288 | day_pattern = r"crosscountry\.aero/c/sgp/task/\d+/day/(\d+)" 289 | 290 | # Extract competition ID 291 | comp_match = re.search(comp_pattern, web_comp_url) 292 | self.assertIsNotNone(comp_match, "Couldn't extract competition ID from web URL") 293 | comp_id = int(comp_match.group(1)) 294 | self.assertEqual(comp_id, 127) 295 | 296 | # Extract day ID 297 | day_match = re.search(day_pattern, web_day_url) 298 | self.assertIsNotNone(day_match, "Couldn't extract day ID from web URL") 299 | day_id = int(day_match.group(1)) 300 | self.assertEqual(day_id, 1925) 301 | 302 | # Create direct API URLs from extracted IDs 303 | api_comp_url = f"https://www.crosscountry.aero/c/sgp/rest/comp/{comp_id}" 304 | api_day_url = f"https://www.crosscountry.aero/c/sgp/rest/day/{comp_id}/{day_id}" 305 | 306 | logger.info(f"Converted web comp URL to API URL: {api_comp_url}") 307 | logger.info(f"Converted web day URL to API URL: {api_day_url}") 308 | 309 | 310 | if __name__ == "__main__": 311 | unittest.main() 312 | -------------------------------------------------------------------------------- /tests/competition/test_crosscountry.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for Crosscountry (Sailplane Grand Prix) module. 3 | 4 | This module tests the functionality for accessing Crosscountry API endpoints 5 | and downloading/analyzing IGC files. 6 | """ 7 | import datetime 8 | import json 9 | import unittest 10 | from unittest import mock 11 | from pathlib import Path 12 | 13 | from opensoar.competition.crosscountry import CrosscountryDaily 14 | from opensoar.task.race_task import RaceTask 15 | 16 | 17 | class MockResponse: 18 | """Mock urllib.request.urlopen response object.""" 19 | 20 | def __init__(self, data, status_code=200): 21 | self.data = json.dumps(data).encode('utf-8') 22 | self.status = status_code 23 | 24 | def read(self): 25 | return self.data 26 | 27 | def __enter__(self): 28 | return self 29 | 30 | def __exit__(self, *args): 31 | pass 32 | 33 | 34 | class TestCrosscountryDaily(unittest.TestCase): 35 | """Tests for CrosscountryDaily class.""" 36 | 37 | def setUp(self): 38 | """Set up test fixtures.""" 39 | self.day_url = "https://www.crosscountry.aero/c/sgp/rest/day/86/1547" 40 | self.comp_url = "https://www.crosscountry.aero/c/sgp/rest/comp/86" 41 | 42 | # Sample competition data 43 | self.comp_data = { 44 | "p": { 45 | "123": { 46 | "f": "John", 47 | "l": "Doe", 48 | "s": "LS8" 49 | }, 50 | "124": { 51 | "f": "Jane", 52 | "l": "Smith", 53 | "s": "ASG-29" 54 | }, 55 | "125": { 56 | "f": "Bob", 57 | "l": "Brown", 58 | "s": "Ventus" 59 | } 60 | }, 61 | "c": { 62 | "t": "Test Crosscountry Competition", 63 | "l": "Test Location" 64 | }, 65 | "i": [ 66 | { 67 | "i": 1547, 68 | "d": 1618012800000, # 2021-04-10 69 | "y": 1, # Race day 70 | "w": "JV" # Winner defined 71 | }, 72 | { 73 | "i": 1548, 74 | "d": 1618099200000, # 2021-04-11 75 | "y": 1, # Race day 76 | "w": None # No winner defined 77 | } 78 | ] 79 | } 80 | 81 | # Sample day data 82 | self.day_data = { 83 | "d": 1618012800000, # 2021-04-10 84 | "a": 36000000, # Start time 10:00:00 (in milliseconds) 85 | "r": { 86 | "z": 7200000, # UTC+2 in milliseconds 87 | "s": [ 88 | { 89 | "h": 123, 90 | "j": "ABC", 91 | "q": 1, 92 | "r": "", 93 | "w": 456 94 | }, 95 | { 96 | "h": 124, 97 | "j": "DEF", 98 | "q": 2, 99 | "r": "", 100 | "w": 457 101 | }, 102 | { 103 | "h": 125, 104 | "j": "GHI", 105 | "q": 0, 106 | "r": "DNS", 107 | "w": 0 108 | } 109 | ] 110 | }, 111 | "k": { 112 | "data": { 113 | "g": [ 114 | { 115 | "n": "Start", 116 | "a": 51.0, 117 | "o": 10.0, 118 | "y": "line", 119 | "r": 1000 120 | }, 121 | { 122 | "n": "TP1", 123 | "a": 51.1, 124 | "o": 10.1, 125 | "y": "cylinder", 126 | "r": 500 127 | }, 128 | { 129 | "n": "Finish", 130 | "a": 51.0, 131 | "o": 10.0, 132 | "y": "line", 133 | "r": 1000 134 | } 135 | ] 136 | } 137 | } 138 | } 139 | 140 | # Create a temporary directory for downloaded files 141 | self.temp_dir = Path("./test_igc_files") 142 | self.temp_dir.mkdir(exist_ok=True) 143 | 144 | def tearDown(self): 145 | """Clean up after tests.""" 146 | # Remove test files 147 | for file in self.temp_dir.glob("*.igc"): 148 | file.unlink() 149 | 150 | # Remove test directory 151 | self.temp_dir.rmdir() 152 | 153 | def test_extract_ids_from_url_day(self): 154 | """Test extraction of competition and day IDs from day URL.""" 155 | sgp = CrosscountryDaily(self.day_url) 156 | 157 | self.assertEqual(sgp.competition_id, 86) 158 | self.assertEqual(sgp.day_id, 1547) 159 | 160 | def test_extract_ids_from_url_comp(self): 161 | """Test extraction of competition ID from competition URL.""" 162 | sgp = CrosscountryDaily(self.comp_url) 163 | 164 | self.assertEqual(sgp.competition_id, 86) 165 | self.assertIsNone(sgp.day_id) 166 | 167 | @mock.patch('urllib.request.urlopen') 168 | def test_get_competition_data(self, mock_urlopen): 169 | """Test fetching competition data from the API.""" 170 | mock_urlopen.return_value = MockResponse(self.comp_data) 171 | 172 | sgp = CrosscountryDaily(self.comp_url) 173 | data = sgp._get_competition_data() 174 | 175 | self.assertEqual(data, self.comp_data) 176 | mock_urlopen.assert_called_once_with(f"{CrosscountryDaily.BASE_API_URL}/comp/86") 177 | 178 | @mock.patch('urllib.request.urlopen') 179 | def test_get_day_data(self, mock_urlopen): 180 | """Test fetching day data from the API.""" 181 | mock_urlopen.return_value = MockResponse(self.day_data) 182 | 183 | sgp = CrosscountryDaily(self.day_url) 184 | data = sgp._get_day_data() 185 | 186 | self.assertEqual(data, self.day_data) 187 | mock_urlopen.assert_called_once_with(f"{CrosscountryDaily.BASE_API_URL}/day/86/1547") 188 | 189 | @mock.patch('urllib.request.urlopen') 190 | def test_get_day_data_without_day_id(self, mock_urlopen): 191 | """Test fetching day data when day ID is not provided.""" 192 | mock_urlopen.side_effect = [ 193 | MockResponse(self.comp_data), 194 | MockResponse(self.day_data) 195 | ] 196 | 197 | sgp = CrosscountryDaily(self.comp_url) 198 | data = sgp._get_day_data() 199 | 200 | self.assertEqual(data, self.day_data) 201 | self.assertEqual(sgp.day_id, 1547) # Should select the latest day with a winner 202 | 203 | expected_calls = [ 204 | mock.call(f"{CrosscountryDaily.BASE_API_URL}/comp/86"), 205 | mock.call(f"{CrosscountryDaily.BASE_API_URL}/day/86/1547") 206 | ] 207 | mock_urlopen.assert_has_calls(expected_calls) 208 | 209 | @mock.patch('urllib.request.urlopen') 210 | def test_get_competition_day_info(self, mock_urlopen): 211 | """Test retrieving competition day information.""" 212 | mock_urlopen.side_effect = [ 213 | MockResponse(self.comp_data), 214 | MockResponse(self.day_data) 215 | ] 216 | 217 | sgp = CrosscountryDaily(self.day_url) 218 | name, date, class_name = sgp._get_competition_day_info() 219 | 220 | self.assertEqual(name, "Test Crosscountry Competition") 221 | self.assertEqual(date, datetime.date(2021, 4, 10)) 222 | self.assertEqual(class_name, "Default") 223 | 224 | @mock.patch('urllib.request.urlopen') 225 | def test_get_competitors_info(self, mock_urlopen): 226 | """Test retrieving competitor information.""" 227 | # Need to mock both competition data and day data since both are used 228 | mock_urlopen.side_effect = [ 229 | MockResponse(self.comp_data), # For _get_competition_data call 230 | MockResponse(self.day_data) # For _get_day_data call 231 | ] 232 | 233 | sgp = CrosscountryDaily(self.day_url) 234 | # Force caching of competition data 235 | sgp._competition_data = self.comp_data 236 | # Force caching of day data 237 | sgp._day_data = self.day_data 238 | 239 | competitors = sgp._get_competitors_info(include_dns_competitors=False) 240 | 241 | self.assertEqual(len(competitors), 2) # Should exclude DNS competitor 242 | 243 | self.assertEqual(competitors[0]['competition_id'], "ABC") 244 | self.assertEqual(competitors[0]['pilot_name'], "John Doe") 245 | self.assertEqual(competitors[0]['plane_model'], "LS8") 246 | self.assertEqual(competitors[0]['ranking'], 1) 247 | self.assertEqual(competitors[0]['igc_url'], f"{CrosscountryDaily.FLIGHT_DOWNLOAD_URL}/456") 248 | 249 | # Test including DNS competitors 250 | competitors = sgp._get_competitors_info(include_dns_competitors=True) 251 | self.assertEqual(len(competitors), 3) # Should include DNS competitor 252 | 253 | @mock.patch('urllib.request.urlopen') 254 | def test_get_available_days(self, mock_urlopen): 255 | """Test retrieving available competition days.""" 256 | mock_urlopen.return_value = MockResponse(self.comp_data) 257 | 258 | sgp = CrosscountryDaily(self.comp_url) 259 | days = sgp.get_available_days() 260 | 261 | self.assertEqual(len(days), 2) 262 | self.assertEqual(days[0]['i'], 1547) 263 | self.assertEqual(days[1]['i'], 1548) 264 | 265 | def test_extract_waypoints(self): 266 | """Test extracting waypoints from task data.""" 267 | task_data = self.day_data['k']['data'] 268 | 269 | sgp = CrosscountryDaily(self.day_url) 270 | waypoints = sgp._extract_waypoints(task_data) 271 | 272 | self.assertEqual(len(waypoints), 3) 273 | 274 | # Check start point 275 | self.assertEqual(waypoints[0].name, "Start") 276 | self.assertEqual(waypoints[0].latitude, 51.0) 277 | self.assertEqual(waypoints[0].longitude, 10.0) 278 | self.assertEqual(waypoints[0].r_max, 1000) 279 | self.assertEqual(waypoints[0].angle_max, 90) 280 | self.assertTrue(waypoints[0].is_line) 281 | self.assertEqual(waypoints[0].sector_orientation, "next") 282 | 283 | # Check turnpoint 284 | self.assertEqual(waypoints[1].name, "TP1") 285 | self.assertEqual(waypoints[1].latitude, 51.1) 286 | self.assertEqual(waypoints[1].longitude, 10.1) 287 | self.assertEqual(waypoints[1].r_max, 500) 288 | self.assertEqual(waypoints[1].angle_max, 180) 289 | self.assertFalse(waypoints[1].is_line) 290 | 291 | # Check finish point 292 | self.assertEqual(waypoints[2].name, "Finish") 293 | self.assertEqual(waypoints[2].r_max, 1000) 294 | self.assertTrue(waypoints[2].is_line) 295 | self.assertEqual(waypoints[2].sector_orientation, "previous") 296 | 297 | def test_extract_start_opening(self): 298 | """Test extracting start opening time from day data.""" 299 | sgp = CrosscountryDaily(self.day_url) 300 | start_opening = sgp._extract_start_opening(self.day_data) 301 | 302 | expected_datetime = datetime.datetime( 303 | 2021, 4, 10, 10, 0, 0, 304 | tzinfo=datetime.timezone(datetime.timedelta(hours=2)) 305 | ) 306 | self.assertEqual(start_opening, expected_datetime) 307 | 308 | @mock.patch('urllib.request.urlopen') 309 | @mock.patch('opensoar.competition.crosscountry.CrosscountryDaily.download_flight') 310 | @mock.patch('opensoar.competition.crosscountry.Reader') 311 | @mock.patch('builtins.open', new_callable=mock.mock_open) 312 | def test_generate_competition_day(self, mock_open, mock_reader, mock_download, mock_urlopen): 313 | """Test generating a CompetitionDay object from Crosscountry data.""" 314 | # Setup mocks 315 | mock_urlopen.side_effect = [ 316 | MockResponse(self.comp_data), # For _get_competition_data call 317 | MockResponse(self.day_data), # For _get_day_data call 318 | MockResponse(self.comp_data), # For _get_competition_day_info -> _get_competition_data 319 | MockResponse(self.day_data), # For _get_competition_day_info -> _get_day_data 320 | MockResponse(self.day_data), # For additional _get_day_data call 321 | MockResponse(self.comp_data), # For _get_competitors_info -> _get_competition_data 322 | MockResponse(self.day_data) # For _get_competitors_info -> _get_day_data 323 | ] 324 | 325 | # Cache data to prevent too many API calls 326 | sgp = CrosscountryDaily(self.day_url) 327 | sgp._competition_data = self.comp_data 328 | sgp._day_data = self.day_data 329 | 330 | # Mock downloading IGC files 331 | mock_download.side_effect = lambda url, cn: f"{self.temp_dir}/{cn}.igc" 332 | 333 | # Mock IGC file content and reading 334 | mock_igc_content = "AFILETYPENM" # Minimal IGC content for testing 335 | mock_open.return_value.read.return_value = mock_igc_content 336 | 337 | # Mock the Reader class and its read method 338 | mock_parser = mock.MagicMock() 339 | mock_parser.read.return_value = { 340 | 'fix_records': (None, [{'time': '101010', 'lat': 51.0, 'lon': 10.0}]) 341 | } 342 | mock_reader.return_value = mock_parser 343 | 344 | # Create CrosscountryDaily instance and generate competition day 345 | competition_day = sgp.generate_competition_day(str(self.temp_dir)) 346 | 347 | # Verify results 348 | self.assertEqual(competition_day.name, "Test Crosscountry Competition") 349 | self.assertEqual(competition_day.date, datetime.date(2021, 4, 10)) 350 | self.assertEqual(competition_day.plane_class, "Default") 351 | 352 | # Verify competitors were created 353 | self.assertEqual(len(competition_day.competitors), 2) 354 | self.assertEqual(competition_day.competitors[0].competition_id, "ABC") 355 | self.assertEqual(competition_day.competitors[0].pilot_name, "John Doe") 356 | 357 | # Verify task was created correctly 358 | self.assertIsInstance(competition_day.task, RaceTask) 359 | self.assertEqual(len(competition_day.task.waypoints), 3) 360 | 361 | # Verify that files were properly opened and read 362 | mock_open.assert_called() 363 | mock_reader.return_value.read.assert_called() 364 | 365 | if __name__ == "__main__": 366 | unittest.main() 367 | -------------------------------------------------------------------------------- /opensoar/task/aat.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from copy import deepcopy 3 | 4 | from opensoar.task.task import Task 5 | from opensoar.utilities.helper_functions import double_iterator, calculate_distance_bearing, calculate_destination 6 | 7 | 8 | # TODO: This is a temporary fix for deepcopy issues with TimeZoneFix 9 | # Issue reference: https://github.com/Turbo87/aerofiles/issues/318 10 | # Remove this once aerofiles is updated with a proper fix 11 | from aerofiles.util.timezone import TimeZoneFix 12 | 13 | # TimeZoneFix from aerofiles doesn't support deepcopy properly 14 | # Add a __deepcopy__ method to fix serialization issues 15 | def _deepcopy_timezone_fix(self, memo): 16 | """Create a proper deep copy of a TimeZoneFix instance.""" 17 | return TimeZoneFix(self.fix) 18 | 19 | # Apply monkeypatch 20 | TimeZoneFix.__deepcopy__ = _deepcopy_timezone_fix 21 | 22 | 23 | class AAT(Task): 24 | """ 25 | Assigned Area Task. 26 | """ 27 | 28 | def __init__(self, waypoints, t_min: datetime.timedelta, timezone: int=None, start_opening: datetime.time=None, 29 | start_time_buffer: int=0, multistart: bool=False): 30 | """ 31 | :param waypoints: see super() 32 | :param t_min: minimal time to complete task 33 | :param timezone: see super() 34 | :param start_opening: see super() 35 | :param start_time_buffer: see super() 36 | :param multistart: see super() 37 | """ 38 | super().__init__(waypoints, timezone, start_opening, start_time_buffer, multistart) 39 | 40 | self._t_min = t_min 41 | self._nominal_distances = self._calculate_nominal_distances() 42 | 43 | def __eq__(self, other): 44 | if self.t_min != other.t_min: 45 | return False 46 | else: 47 | return super().__eq__(other) 48 | 49 | @property 50 | def t_min(self): 51 | return self._t_min 52 | 53 | def _calculate_nominal_distances(self): 54 | distances = list() 55 | for start_waypoint, end_waypoint in double_iterator(self.waypoints): 56 | distance, _ = calculate_distance_bearing(start_waypoint.fix, end_waypoint.fix) 57 | distances.append(distance) 58 | return distances 59 | 60 | def apply_rules(self, trace): 61 | fixes, outlanding_fix, sector_fixes = self._calculate_trip_fixes(trace) 62 | start_time = self.determine_refined_start(trace, fixes) 63 | distances = self._determine_trip_distances(fixes, outlanding_fix) 64 | finish_time = self._determine_finish_time(fixes, outlanding_fix) 65 | return fixes, start_time, outlanding_fix, distances, finish_time, sector_fixes 66 | 67 | def _determine_finish_time(self, fixes, outlanding_fix): 68 | total_trip_time = (fixes[-1]['datetime'] - fixes[0]['datetime']).total_seconds() 69 | minimum_trip_time = self._t_min.total_seconds() 70 | if outlanding_fix is None and total_trip_time < minimum_trip_time: 71 | finish_time = fixes[0]['datetime'] + self._t_min 72 | else: 73 | finish_time = fixes[-1]['datetime'] 74 | return finish_time 75 | 76 | def _calculate_trip_fixes(self, trace): 77 | 78 | sector_fixes, enl_outlanding_fix = self._get_sector_fixes(trace) 79 | reduced_sector_fixes = self._reduce_sector_fixes(sector_fixes, max_fixes_sector=300) 80 | 81 | outlanded = len(sector_fixes) != self.no_legs+1 82 | 83 | if outlanded: 84 | outside_sector_fixes = self._get_outside_sector_fixes(trace, sector_fixes, enl_outlanding_fix) 85 | reduced_outside_sector_fixes = self._reduce_fixes(outside_sector_fixes, max_fixes=300) 86 | 87 | waypoint_fixes = self._get_waypoint_fixes(outlanded, reduced_sector_fixes, reduced_outside_sector_fixes) 88 | max_distance_fixes = self._compute_max_distance_fixes(outlanded, waypoint_fixes) 89 | 90 | waypoint_fixes = self._refine_max_distance_fixes(outlanded, max_distance_fixes, sector_fixes, 91 | reduced_outside_sector_fixes) 92 | max_distance_fixes = self._compute_max_distance_fixes(outlanded, waypoint_fixes) 93 | 94 | trip_fixes = max_distance_fixes[:-1] 95 | outlanding_fix = max_distance_fixes[-1] 96 | else: 97 | max_distance_fixes = self._compute_max_distance_fixes(outlanded, reduced_sector_fixes) 98 | waypoint_fixes = self._refine_max_distance_fixes(outlanded, max_distance_fixes, sector_fixes) 99 | 100 | max_distance_fixes = self._compute_max_distance_fixes(outlanded, waypoint_fixes) 101 | 102 | trip_fixes = max_distance_fixes 103 | outlanding_fix = None 104 | 105 | return trip_fixes, outlanding_fix, sector_fixes 106 | 107 | def _determine_trip_distances(self, fixes, outlanding_fix): 108 | 109 | distances = list() 110 | for leg, (fix1, fix2) in enumerate(double_iterator(fixes)): 111 | distance = self._calculate_distance_completed_leg(leg, fix1, fix2) 112 | distances.append(distance) 113 | 114 | if outlanding_fix is not None: 115 | outlanding_leg = len(fixes) - 1 116 | distance = self._calculate_distance_outlanding_leg(outlanding_leg, fixes[-1], outlanding_fix) 117 | distances.append(distance) 118 | 119 | return distances 120 | 121 | def _get_sector_fixes(self, trace): 122 | 123 | current_leg = -1 # not yet started 124 | sector_fixes = list() 125 | enl_first_fix = None 126 | enl_registered = False 127 | 128 | for fix_minus1, fix in double_iterator(trace): 129 | 130 | # check ENL when aircraft logs ENL and no ENL outlanding has taken place 131 | if not enl_registered and self.enl_value_exceeded(fix): 132 | if enl_first_fix is None: 133 | enl_first_fix = fix 134 | 135 | enl_time = (fix['datetime'] - enl_first_fix['datetime']).total_seconds() 136 | if self.enl_time_exceeded(enl_time): 137 | enl_registered = True 138 | if current_leg > 0: 139 | break 140 | elif not enl_registered: 141 | enl_first_fix = None 142 | 143 | if current_leg == -1: # before start 144 | if self.started(fix_minus1, fix): 145 | self._add_aat_sector_fix(sector_fixes, 0, fix_minus1) # at task start point 146 | current_leg = 0 147 | enl_registered = False 148 | enl_first_fix = None 149 | elif current_leg == 0: # first leg, re-start still possible 150 | if self.started(fix_minus1, fix): # restart 151 | sector_fixes[0] = [fix_minus1] # at task start point 152 | current_leg = 0 153 | enl_registered = False 154 | enl_first_fix = None 155 | elif self.waypoints[1].inside_sector(fix_minus1): # first sector 156 | if enl_registered: 157 | break # break when ENL is used and not restarted 158 | self._add_aat_sector_fix(sector_fixes, 1, fix_minus1) 159 | current_leg += 1 160 | elif 0 < current_leg < self.no_legs - 1: # at least second leg, no re-start possible 161 | if self.waypoints[current_leg].inside_sector(fix_minus1): # previous waypoint 162 | self._add_aat_sector_fix(sector_fixes, current_leg, fix_minus1) 163 | elif self.waypoints[current_leg + 1].inside_sector(fix_minus1): # next waypoint 164 | self._add_aat_sector_fix(sector_fixes, current_leg + 1, fix_minus1) 165 | current_leg += 1 166 | elif current_leg == self.no_legs - 1: # last leg 167 | if self.waypoints[current_leg].inside_sector(fix_minus1): 168 | self._add_aat_sector_fix(sector_fixes, current_leg, fix_minus1) 169 | elif self.finished(fix_minus1, fix): 170 | sector_fixes.append([fix]) # at task finish point 171 | break 172 | 173 | # add last fix to sector if not already present 174 | last_fix = trace[-1] 175 | last_waypoint = self.waypoints[current_leg] 176 | if not last_waypoint.is_line and last_waypoint.inside_sector(last_fix) and last_fix is not sector_fixes[-1][-1]: 177 | sector_fixes[-1].append(last_fix) 178 | 179 | if enl_registered: 180 | return sector_fixes, enl_first_fix 181 | else: 182 | return sector_fixes, None 183 | 184 | def _reduce_fixes(self, fixes, max_fixes): 185 | reduction_factor = len(fixes) // max_fixes + 1 186 | return fixes[0::reduction_factor] 187 | 188 | def _reduce_sector_fixes(self, sector_fixes, max_fixes_sector): 189 | reduced_sector_fixes = list() 190 | for sector, fixes in enumerate(sector_fixes): 191 | reduced_fixes = self._reduce_fixes(fixes, max_fixes_sector) 192 | reduced_sector_fixes.append(reduced_fixes) 193 | 194 | return reduced_sector_fixes 195 | 196 | def _get_outside_sector_fixes(self, trace, sector_fixes, enl_outlanding_fix): 197 | last_sector_fix = sector_fixes[-1][-1] 198 | last_sector_index = trace.index(last_sector_fix) 199 | 200 | outside_sector_fixes = list() 201 | if enl_outlanding_fix is not None: 202 | enl_outlanding_index = trace.index(enl_outlanding_fix) 203 | 204 | if enl_outlanding_index > last_sector_index: 205 | outside_sector_fixes = trace[last_sector_index + 1: enl_outlanding_index + 1] 206 | else: 207 | outside_sector_fixes = trace[last_sector_index+1:] 208 | 209 | return outside_sector_fixes 210 | 211 | def _add_aat_sector_fix(self, sector_fixes, taskpoint_index, fix): 212 | if len(sector_fixes) < (taskpoint_index + 1): 213 | sector_fixes.append([fix]) 214 | else: 215 | sector_fixes[taskpoint_index].append(fix) 216 | 217 | def _compute_max_distance_fixes(self, outlanded, waypoint_fixes): 218 | 219 | distances = self._calculate_distances_between_sector_fixes(outlanded, waypoint_fixes) 220 | 221 | # determine index on last sector/outlanding-group with maximum distance 222 | max_dist = 0 223 | maximized_dist_index = None 224 | for index, distance in enumerate(distances[-1]): 225 | if distance[0] > max_dist: 226 | max_dist = distance[0] 227 | maximized_dist_index = index 228 | 229 | last_fix = waypoint_fixes[-1][maximized_dist_index] 230 | max_distance_fixes = [last_fix] 231 | 232 | index = maximized_dist_index 233 | 234 | legs = len(waypoint_fixes) - 1 235 | for leg in list(reversed(range(legs))): 236 | index = distances[leg + 1][index][1] 237 | max_distance_fix = waypoint_fixes[leg][index] 238 | max_distance_fixes.insert(0, max_distance_fix) 239 | 240 | return max_distance_fixes 241 | 242 | def _calculate_distances_between_sector_fixes(self, outlanded, waypoint_fixes): 243 | 244 | distances = [[]] * len(waypoint_fixes) 245 | distances[0] = [[0, 0]] * len(waypoint_fixes[0]) 246 | 247 | completed_legs = len(waypoint_fixes) - 1 248 | if outlanded: 249 | completed_legs -= 1 250 | 251 | for leg in range(completed_legs): # successful legs 252 | 253 | distances[leg + 1] = [[0, 0] for _ in range(len(waypoint_fixes[leg + 1]))] 254 | 255 | for fix2_index, fix2 in enumerate(waypoint_fixes[leg + 1]): 256 | for fix1_index, fix1 in enumerate(waypoint_fixes[leg]): 257 | distance = self._calculate_distance_completed_leg(leg, fix1, fix2) 258 | total_distance = distances[leg][fix1_index][0] + distance 259 | if total_distance > distances[leg + 1][fix2_index][0]: 260 | distances[leg + 1][fix2_index] = [total_distance, fix1_index] 261 | if outlanded: 262 | leg = completed_legs 263 | distances[leg + 1] = [[0, 0] for _ in range(len(waypoint_fixes[leg + 1]))] 264 | for fix2_index, fix2 in enumerate(waypoint_fixes[leg + 1]): 265 | for fix1_index, fix1 in enumerate(waypoint_fixes[leg][0:fix2_index+1]): 266 | distance = self._calculate_distance_outlanding_leg(leg, fix1, fix2) 267 | total_distance = distances[leg][fix1_index][0] + distance 268 | if total_distance > distances[leg + 1][fix2_index][0]: 269 | distances[leg + 1][fix2_index] = [total_distance, fix1_index] 270 | 271 | return distances 272 | 273 | def _refine_max_distance_fixes(self, outlanded, max_distance_fixes, sector_fixes, outside_sector_fixes=None): 274 | """look around fixes whether more precise fixes can be found, increasing the distance""" 275 | 276 | if outside_sector_fixes is None: 277 | outside_sector_fixes = [] 278 | 279 | refinement_fixes = 10 280 | waypoint_fixes = [[max_distance_fixes[0]]] # already include start fix 281 | successfull_legs = len(max_distance_fixes) - 1 282 | if outlanded: 283 | successfull_legs -= 1 284 | 285 | for leg in range(len(max_distance_fixes) - 1): 286 | 287 | on_outlanding_leg = outlanded and leg > successfull_legs - 1 288 | 289 | fix = max_distance_fixes[leg+1] 290 | if on_outlanding_leg: 291 | if outside_sector_fixes: 292 | fixes = outside_sector_fixes 293 | else: 294 | fixes = sector_fixes[leg] 295 | else: 296 | fixes = sector_fixes[leg + 1] 297 | 298 | refinement_end, refinement_start = self._get_refinement_bounds(fix, fixes, refinement_fixes) 299 | waypoint_fixes.append(fixes[refinement_start:refinement_end]) 300 | 301 | return waypoint_fixes 302 | 303 | def _get_refinement_bounds(self, fix, fixes, refinement_fixes): 304 | """ 305 | :param fix: 306 | :param fixes: 307 | :param refinement_fixes: this number of fixes before and after each fix 308 | :return: 309 | """ 310 | max_distance_index = fixes.index(fix) 311 | refinement_start = max(max_distance_index - refinement_fixes, 0) 312 | refinement_end = min(len(fixes) + 1, max_distance_index + refinement_fixes + 1) 313 | return refinement_end, refinement_start 314 | 315 | def _calculate_distance_outlanding_leg(self, leg, start_tp_fix, outlanding_fix): 316 | if leg == 0: 317 | tp1 = self.waypoints[leg + 1] 318 | 319 | _, bearing = calculate_distance_bearing(start_tp_fix, outlanding_fix) 320 | closest_area_fix = calculate_destination(start_tp_fix, tp1.r_max, bearing) 321 | 322 | distance, _ = calculate_distance_bearing(self.start.fix, closest_area_fix) 323 | distance -= calculate_distance_bearing(outlanding_fix, closest_area_fix)[0] 324 | elif leg == self.no_legs - 1: # take finish-point of task 325 | distance, _ = calculate_distance_bearing(start_tp_fix, self.finish.fix) 326 | distance -= calculate_distance_bearing(self.finish.fix, outlanding_fix)[0] 327 | 328 | else: 329 | tp1 = self.waypoints[leg + 1] 330 | 331 | _, bearing = calculate_distance_bearing(tp1.fix, outlanding_fix) 332 | closest_area_fix = calculate_destination(tp1.fix, tp1.r_max, bearing) 333 | 334 | if leg == 0: 335 | distance, _ = calculate_distance_bearing(self.start.fix, closest_area_fix) 336 | else: 337 | distance, _ = calculate_distance_bearing(start_tp_fix, closest_area_fix) 338 | distance -= calculate_distance_bearing(outlanding_fix, closest_area_fix)[0] 339 | 340 | return distance 341 | 342 | def _calculate_distance_completed_leg(self, leg, start_tp_fix, end_tp_fix): 343 | if leg == 0: # take start-point of task 344 | start = self.waypoints[0] 345 | distance, _ = calculate_distance_bearing(start.fix, end_tp_fix) 346 | 347 | if start.distance_correction == 'shorten_legs': 348 | distance -= start.r_max 349 | elif leg == self.no_legs - 1: # take finish-point of task 350 | finish = self.waypoints[-1] 351 | distance, _ = calculate_distance_bearing(start_tp_fix, finish.fix) 352 | 353 | if finish.distance_correction == 'shorten_legs': 354 | distance -= finish.r_max 355 | else: 356 | distance, _ = calculate_distance_bearing(start_tp_fix, end_tp_fix) 357 | 358 | return distance 359 | 360 | def _get_waypoint_fixes(self, outlanded, sector_fixes, outside_sector_fixes=None): 361 | """ 362 | Waypoint fixes are fixes which can be used for the distance optimisation. They are grouped per waypoint. In 363 | case of an outlanding, the last sector waypoints are duplicated at the enable optimisation inside the sector. 364 | Optional fixes outside the sector on the outlanding leg are also added in the last list. 365 | :param outlanded: 366 | :param sector_fixes: 367 | :param outside_sector_fixes: 368 | :return: 369 | """ 370 | 371 | if outside_sector_fixes is None: 372 | outside_sector_fixes = list() 373 | 374 | waypoint_fixes = deepcopy(sector_fixes) 375 | if outlanded: 376 | waypoint_fixes.append(sector_fixes[-1]) 377 | waypoint_fixes[-1].extend(outside_sector_fixes) 378 | 379 | return waypoint_fixes 380 | -------------------------------------------------------------------------------- /opensoar/competition/crosscountry.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper functions for Crosscountry (Sailplane Grand Prix) competitions. 3 | This module provides functionality to access competition data from the Crosscountry API endpoints 4 | and download IGC files for analysis. 5 | """ 6 | import json 7 | import re 8 | import datetime 9 | from typing import List, Dict, Tuple, Optional 10 | import urllib.request 11 | import logging 12 | 13 | from aerofiles.igc import Reader 14 | 15 | from opensoar.competition.competition_day import CompetitionDay 16 | from opensoar.competition.competitor import Competitor 17 | from opensoar.competition.daily_results_page import DailyResultsPage 18 | from opensoar.task.task import Task 19 | from opensoar.task.waypoint import Waypoint 20 | from opensoar.task.race_task import RaceTask 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | class CrosscountryDaily(DailyResultsPage): 25 | """ 26 | Helper class for dealing with Crosscountry (Sailplane Grand Prix) daily result pages. 27 | This class interfaces with the Crosscountry API to retrieve competition data. 28 | """ 29 | 30 | # API endpoints 31 | BASE_API_URL = "https://www.crosscountry.aero/c/sgp/rest" 32 | FLIGHT_DOWNLOAD_URL = "https://www.crosscountry.aero/flight/download/sgp" 33 | 34 | def __init__(self, url: str): 35 | """ 36 | Initialize with the URL to the Crosscountry API. 37 | 38 | Args: 39 | url: URL to the Crosscountry API, in format: 40 | https://www.crosscountry.aero/c/sgp/rest/day/{comp_id}/{day_id} 41 | or 42 | https://www.crosscountry.aero/c/sgp/rest/comp/{comp_id} 43 | """ 44 | super().__init__(url) 45 | 46 | # Extract competition ID and day ID from the URL 47 | self.competition_id = None 48 | self.day_id = None 49 | self._extract_ids_from_url(url) 50 | 51 | # API data will be loaded on demand 52 | self._competition_data = None 53 | self._day_data = None 54 | 55 | def _extract_ids_from_url(self, url: str): 56 | """ 57 | Extract competition ID and day ID from the URL. 58 | 59 | Args: 60 | url: URL to the Crosscountry API 61 | """ 62 | # Try to match day URL pattern 63 | day_pattern = r'crosscountry\.aero/c/sgp/rest/day/(\d+)/(\d+)' 64 | day_match = re.search(day_pattern, url) 65 | 66 | if day_match: 67 | self.competition_id = int(day_match.group(1)) 68 | self.day_id = int(day_match.group(2)) 69 | logger.info(f"Extracted competition ID: {self.competition_id}, day ID: {self.day_id}") 70 | return 71 | 72 | # Try to match competition URL pattern 73 | comp_pattern = r'crosscountry\.aero/c/sgp/rest/comp/(\d+)' 74 | comp_match = re.search(comp_pattern, url) 75 | 76 | if comp_match: 77 | self.competition_id = int(comp_match.group(1)) 78 | logger.info(f"Extracted competition ID: {self.competition_id}") 79 | return 80 | 81 | # If it's an sgp.aero URL, we'll need to discover the competition ID 82 | sgp_pattern = r'sgp\.aero/([^/]+)' 83 | sgp_match = re.search(sgp_pattern, url) 84 | 85 | if sgp_match: 86 | self.competition_name = sgp_match.group(1) 87 | logger.info(f"Found Crosscountry competition name: {self.competition_name}, will need to discover API endpoints") 88 | return 89 | 90 | # If no patterns match, warn but don't fail yet 91 | logger.warning(f"Could not extract IDs from URL: {url}") 92 | 93 | def _get_competition_data(self) -> Dict: 94 | """ 95 | Fetch competition data from the Crosscountry API. 96 | 97 | Returns: 98 | Dictionary with competition data 99 | """ 100 | if self._competition_data is not None: 101 | return self._competition_data 102 | 103 | if not self.competition_id: 104 | raise ValueError("No competition ID available") 105 | 106 | url = f"{self.BASE_API_URL}/comp/{self.competition_id}" 107 | try: 108 | with urllib.request.urlopen(url) as response: 109 | data = json.loads(response.read()) 110 | self._competition_data = data 111 | return data 112 | except Exception as e: 113 | logger.error(f"Error fetching competition data: {e}") 114 | raise 115 | 116 | def _get_day_data(self) -> Dict: 117 | """ 118 | Fetch day data from the Crosscountry API. 119 | 120 | Returns: 121 | Dictionary with day data 122 | """ 123 | if self._day_data is not None: 124 | return self._day_data 125 | 126 | if not self.competition_id: 127 | raise ValueError("No competition ID available") 128 | 129 | if not self.day_id: 130 | # We need to select a day 131 | comp_data = self._get_competition_data() 132 | days = comp_data.get('i', []) 133 | # Only get the days that have a winner 134 | days = [day_data for day_data in days if day_data.get('w')] 135 | 136 | if not days: 137 | raise ValueError("No competition days found") 138 | 139 | # Sort days by date and get the latest 140 | sorted_days = sorted(days, key=lambda d: d.get('d', ''), reverse=True) 141 | self.day_id = sorted_days[0].get('i') 142 | 143 | if not self.day_id: 144 | raise ValueError("Could not determine day ID") 145 | 146 | logger.info(f"Selected day ID: {self.day_id}") 147 | 148 | url = f"{self.BASE_API_URL}/day/{self.competition_id}/{self.day_id}" 149 | try: 150 | with urllib.request.urlopen(url) as response: 151 | data = json.loads(response.read()) 152 | self._day_data = data 153 | return data 154 | except Exception as e: 155 | logger.error(f"Error fetching day data: {e}") 156 | raise 157 | 158 | def _get_competition_day_info(self) -> Tuple[str, datetime.date, str]: 159 | """ 160 | Get competition name, date, and class. 161 | 162 | Returns: 163 | tuple containing (competition_name, date, class_name) 164 | """ 165 | # Get competition data 166 | comp_data = self._get_competition_data() 167 | 168 | if not comp_data: 169 | raise ValueError("No competition data available") 170 | 171 | comp_info = comp_data.get('c', {}) 172 | competition_name = comp_info.get('t', 'Unknown Competition') 173 | 174 | # Get day data 175 | day_data = self._get_day_data() 176 | 177 | # Extract date 178 | timestamp_ms = day_data.get('d') 179 | if timestamp_ms: 180 | try: 181 | day_date = datetime.date.fromtimestamp(timestamp_ms / 1000) 182 | except ValueError: 183 | logger.warning(f"Could not parse date: {timestamp_ms}") 184 | day_date = datetime.date.today() 185 | else: 186 | day_date = datetime.date.today() 187 | 188 | # Use the class name from competition info 189 | class_name = "Default" # Crosscountry typically has just one class 190 | 191 | return competition_name, day_date, class_name 192 | 193 | def _get_competitors_info(self, include_hc_competitors: bool = True, include_dns_competitors: bool = False) -> List[Dict]: 194 | """ 195 | Extract competitor information from the Crosscountry API. 196 | 197 | Args: 198 | include_hc_competitors: Whether to include hors-concours competitors 199 | include_dns_competitors: Whether to include competitors who did not start or did not fly 200 | 201 | Returns: 202 | List of dictionaries with competitor information 203 | """ 204 | # Get day data 205 | competition_data = self._get_competition_data() 206 | day_data = self._get_day_data() 207 | 208 | competitors_info = [] 209 | 210 | # Get pilots info 211 | pilots = competition_data.get('p', {}) 212 | 213 | # Get results for the day 214 | results = day_data.get('r', {}).get('s', []) 215 | 216 | for result in results: 217 | pilot_id = result.get('h') 218 | pilot_info = pilots.get(str(pilot_id), {}) 219 | 220 | status = result.get('r', '') 221 | if status in ['DNS', 'DNF'] and not include_dns_competitors: 222 | continue 223 | 224 | # Check for HC (hors concours) status 225 | is_hc = isinstance(result.get('w'), int) and result.get('w') == 0 226 | if is_hc and not include_hc_competitors: 227 | continue 228 | 229 | # Extract ranking 230 | try: 231 | ranking = int(result.get('q', 0)) # Position in results 232 | except (ValueError, TypeError): 233 | ranking = result.get('q', 0) 234 | 235 | # Get competition ID (CN) 236 | competition_id = result.get('j', '') 237 | 238 | # Extract pilot name 239 | first_name = pilot_info.get('f', '') 240 | last_name = pilot_info.get('l', '') 241 | pilot_name = f"{first_name} {last_name}".strip() 242 | 243 | # Extract glider model 244 | plane_model = pilot_info.get('s', '') 245 | 246 | # Extract IGC URL if available 247 | igc_id = result.get('w') 248 | igc_url = f"{self.FLIGHT_DOWNLOAD_URL}/{igc_id}" if igc_id else None 249 | 250 | competitors_info.append({ 251 | 'ranking': ranking, 252 | 'competition_id': competition_id, 253 | 'igc_url': igc_url, 254 | 'pilot_name': pilot_name, 255 | 'plane_model': plane_model 256 | }) 257 | 258 | return competitors_info 259 | 260 | def get_available_days(self) -> List[Dict]: 261 | """ 262 | Get all available days/tasks for this competition. 263 | 264 | Returns: 265 | List of dictionaries with day information 266 | """ 267 | comp_data = self._get_competition_data() 268 | days = comp_data.get('i', []) 269 | 270 | # Filter out practice days if needed 271 | race_days = [day for day in days if day.get('y') == 1] # Type 1 seems to be race days 272 | 273 | return race_days 274 | 275 | def generate_competition_day(self, target_directory: str, download_progress=None, start_time_buffer: int = 0): 276 | """ 277 | Get competition day with all flights from the Crosscountry API. 278 | 279 | Args: 280 | target_directory: Directory in which the IGC files are saved 281 | download_progress: Optional progress function 282 | start_time_buffer: Optional relaxation on the start time in seconds 283 | 284 | Returns: 285 | CompetitionDay object 286 | """ 287 | # Set the directory for downloaded IGC files 288 | competition_name, date, class_name = self._get_competition_day_info() 289 | self.set_igc_directory(target_directory, competition_name, class_name, date) 290 | 291 | # Get the day data 292 | day_data = self._get_day_data() 293 | 294 | # Get competitors information 295 | competitors_info = self._get_competitors_info() 296 | 297 | # Get task information from the day data 298 | task_data = day_data.get('k', {}).get('data', {}) 299 | waypoints = self._extract_waypoints(task_data) 300 | 301 | # Extract task start time 302 | start_opening = self._extract_start_opening(day_data) 303 | 304 | # Create task object (assuming Race Task for Crosscountry) 305 | # Get timezone information if available 306 | timezone_offset = day_data.get('r', {}).get('z') 307 | timezone = timezone_offset // 3600000 if timezone_offset else None # Convert from ms to hours 308 | 309 | task = RaceTask(waypoints, timezone, start_opening, start_time_buffer) 310 | 311 | # Download flights and create Competitor objects 312 | competitors = [] 313 | files_downloaded = 0 314 | total_competitors = len(competitors_info) 315 | 316 | for competitor_info in competitors_info: 317 | competition_id = competitor_info['competition_id'] 318 | igc_url = competitor_info['igc_url'] 319 | ranking = competitor_info['ranking'] 320 | plane_model = competitor_info['plane_model'] 321 | pilot_name = competitor_info['pilot_name'] 322 | 323 | if igc_url is None: 324 | logger.info(f"No IGC file available for {competition_id}") 325 | continue 326 | 327 | try: 328 | file_path = self.download_flight(igc_url, competition_id) 329 | files_downloaded += 1 330 | 331 | # Try to read the IGC file with different encodings 332 | try: 333 | # Try utf-8 334 | with open(file_path, 'r', encoding='utf-8') as f: 335 | parsed_igc = Reader(skip_duplicates=True).read(f) 336 | except UnicodeDecodeError: 337 | # If not utf-8 use latin1 338 | with open(file_path, 'r', encoding='latin1') as f: 339 | parsed_igc = Reader(skip_duplicates=True).read(f) 340 | 341 | # Create and add the competitor 342 | trace = parsed_igc['fix_records'][1] 343 | competitor = Competitor(trace, competition_id, plane_model, ranking, pilot_name) 344 | competitors.append(competitor) 345 | 346 | # Update progress if callback provided 347 | if download_progress is not None: 348 | download_progress(files_downloaded, total_competitors) 349 | 350 | except Exception as e: 351 | logger.error(f"Error processing competitor {competition_id}: {e}") 352 | continue 353 | 354 | # Create CompetitionDay object with competitors and task 355 | competition_day = CompetitionDay(competition_name, date, class_name, competitors, task) 356 | 357 | return competition_day 358 | 359 | def _extract_waypoints(self, task_data: Dict) -> List[Waypoint]: 360 | """ 361 | Extract waypoints from the task data. 362 | 363 | Args: 364 | task_data: Dictionary containing task data 365 | 366 | Returns: 367 | List of Waypoint objects 368 | """ 369 | waypoints = [] 370 | 371 | # Extract turnpoints from task data 372 | turnpoints = task_data.get('g', []) 373 | 374 | for tp_idx, tp in enumerate(turnpoints): 375 | name = tp.get('n', f"TP{tp_idx}") 376 | lat = tp.get('a') # Latitude 377 | lon = tp.get('o') # Longitude 378 | 379 | if lat is None or lon is None: 380 | logger.warning(f"Skipping waypoint {name}: missing coordinates") 381 | continue 382 | 383 | # Get waypoint type 384 | wp_type = tp.get('y', 'cylinder') 385 | radius = tp.get('r', 500) # Default radius 500m 386 | 387 | # Different handling based on waypoint type 388 | if wp_type == 'line': 389 | # Start or finish line 390 | is_line = True 391 | r_min = None 392 | angle_min = None 393 | r_max = radius 394 | angle_max = 90 # Standard line is 90 degrees to bisector 395 | 396 | # Determine if start or finish based on position 397 | if tp_idx == 0: 398 | # Start line 399 | sector_orientation = "next" 400 | elif tp_idx == len(turnpoints) - 1: 401 | # Finish line 402 | sector_orientation = "previous" 403 | else: 404 | # Unlikely, but default to symmetrical 405 | sector_orientation = "symmetrical" 406 | else: 407 | # Cylinder or other point type 408 | is_line = False 409 | r_min = None 410 | angle_min = None 411 | r_max = radius 412 | angle_max = 180 # Full cylinder 413 | sector_orientation = "symmetrical" 414 | 415 | # Create Waypoint object 416 | waypoint = Waypoint( 417 | name=name, 418 | latitude=lat, 419 | longitude=lon, 420 | r_min=r_min, 421 | angle_min=angle_min, 422 | r_max=r_max, 423 | angle_max=angle_max, 424 | is_line=is_line, 425 | sector_orientation=sector_orientation 426 | ) 427 | 428 | waypoints.append(waypoint) 429 | 430 | # Set orientation angles based on waypoint positions 431 | Task.set_orientation_angles(waypoints) 432 | 433 | return waypoints 434 | 435 | def _extract_start_opening(self, day_data: Dict) -> Optional[datetime.datetime]: 436 | """ 437 | Extract start opening time from the day data. 438 | 439 | Args: 440 | day_data: Dictionary containing day data 441 | 442 | Returns: 443 | Start opening time as datetime.datetime or None if not available 444 | """ 445 | # Get date from day data 446 | timestamp_ms = day_data.get('d') 447 | if not timestamp_ms: 448 | return None 449 | 450 | try: 451 | task_date = datetime.date.fromtimestamp(timestamp_ms / 1000) 452 | except ValueError: 453 | logger.warning(f"Could not parse date: {timestamp_ms}") 454 | return None 455 | 456 | # Get start opening time in milliseconds 457 | start_ms = day_data.get('a') 458 | if start_ms is None: 459 | return None 460 | 461 | # Convert milliseconds to time 462 | start_seconds = start_ms // 1000 463 | hours = start_seconds // 3600 464 | minutes = (start_seconds % 3600) // 60 465 | seconds = start_seconds % 60 466 | 467 | start_time = datetime.time(hours, minutes, seconds) 468 | 469 | # Combine date and time 470 | start_opening = datetime.datetime.combine(task_date, start_time) 471 | 472 | # Set timezone if available 473 | timezone_offset = day_data.get('r', {}).get('z') 474 | if timezone_offset: 475 | timezone_hours = timezone_offset // 3600000 # Convert from milliseconds to hours 476 | tz = datetime.timezone(datetime.timedelta(hours=timezone_hours)) 477 | start_opening = start_opening.replace(tzinfo=tz) 478 | 479 | return start_opening 480 | 481 | if __name__ == "__main__": 482 | # Direct API URL for the day 483 | day_url = "https://www.crosscountry.aero/c/sgp/rest/day/86/1547" 484 | 485 | # Create a CrosscountryDaily instance 486 | crosscountry_daily = CrosscountryDaily(day_url) 487 | 488 | # Directory to store IGC files 489 | target_directory = "./bin" 490 | 491 | # Generate a CompetitionDay with all flights 492 | competition_day = crosscountry_daily.generate_competition_day(target_directory) 493 | 494 | # Now you can analyze flights using the existing OpenSoar framework 495 | for competitor in competition_day.competitors: 496 | competitor.analyse(competition_day.task, classification_method="pysoar") 497 | 498 | # Work with the analyzed flight data 499 | print(f"Competitor: {competitor.competition_id}") 500 | if competitor.phases: 501 | thermals = competitor.phases.thermals() 502 | print(f" Number of thermals: {len(thermals)}") 503 | --------------------------------------------------------------------------------