├── docs ├── _static │ └── .gitignore ├── api │ ├── modules.rst │ └── madlib.rst ├── authors.md ├── license.md ├── changelog.md ├── contributing.md ├── requirements.txt ├── how_to_guides.rst ├── Makefile ├── index.rst ├── reference.rst └── conf.py ├── CHANGELOG.md ├── tests ├── inputs │ ├── submitit_invalid.json │ ├── submitit_test.json │ ├── invalid_sensor_1.yaml │ ├── invalid_sensor_2.yaml │ ├── blind_sensor.yaml │ ├── invalid_sensor_4.yaml │ ├── invalid_sensor_3.yaml │ └── students_t_sensors.yaml ├── conftest.py ├── test_configs.py ├── test_madlib_observations.py ├── test_propagate.py ├── test_utils.py ├── test_madlib_sensor_collection.py ├── test_madlib_satellite.py └── test_hz_launcher.py ├── configs ├── __init__.py ├── submitit_slurm_template.json └── sample_sensor_network.yaml ├── scripts ├── __init__.py └── hz_launcher.py ├── SPDX.spdx ├── AUTHORS.md ├── .readthedocs.yml ├── src ├── madlib │ ├── __init__.py │ ├── _maneuver.py │ ├── _sensor_collection.py │ ├── _utils.py │ └── _observation.py └── maddg │ ├── _residuals.py │ └── _sim_launcher.py ├── setup.py ├── pyproject.toml ├── LICENSE.txt ├── .github └── workflows │ └── tests.yml ├── examples └── example_inputs │ └── sample_sensor_network.yaml ├── tox.ini ├── setup.cfg ├── README.md ├── .gitignore └── CONTRIBUTING.md /docs/_static/.gitignore: -------------------------------------------------------------------------------- 1 | # Empty directory 2 | -------------------------------------------------------------------------------- /docs/api/modules.rst: -------------------------------------------------------------------------------- 1 | madlib 2 | ====== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | madlib 8 | -------------------------------------------------------------------------------- /docs/authors.md: -------------------------------------------------------------------------------- 1 | ```{include} ../AUTHORS.md 2 | :relative-docs: docs/ 3 | :relative-images: 4 | ``` 5 | -------------------------------------------------------------------------------- /docs/license.md: -------------------------------------------------------------------------------- 1 | # License 2 | 3 | ```{literalinclude} ../LICENSE.txt 4 | :language: text 5 | ``` 6 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## Version 0.1.0 (development) 4 | 5 | - Initial re-scaffold release 6 | -------------------------------------------------------------------------------- /docs/changelog.md: -------------------------------------------------------------------------------- 1 | ```{include} ../CHANGELOG.md 2 | :relative-docs: docs/ 3 | :relative-images: 4 | ``` 5 | -------------------------------------------------------------------------------- /tests/inputs/submitit_invalid.json: -------------------------------------------------------------------------------- 1 | { 2 | "This":"is", 3 | "not":"valid", 4 | "format":"!" 5 | } -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | ```{include} ../CONTRIBUTING.md 2 | :relative-docs: docs/ 3 | :relative-images: 4 | ``` 5 | -------------------------------------------------------------------------------- /tests/inputs/submitit_test.json: -------------------------------------------------------------------------------- 1 | [ 2 | "hydra.job.chdir=True", 3 | "hydra/launcher=submitit_local" 4 | ] -------------------------------------------------------------------------------- /configs/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | -------------------------------------------------------------------------------- /scripts/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | -------------------------------------------------------------------------------- /configs/submitit_slurm_template.json: -------------------------------------------------------------------------------- 1 | [ 2 | "hydra/launcher=submitit_slurm", 3 | "hydra.launcher.partition=PARTITION", 4 | "hydra.launcher.constraint=CONSTRAINT", 5 | "hydra.launcher.nodes=1", 6 | "hydra.launcher.cpus_per_task=2" 7 | ] -------------------------------------------------------------------------------- /SPDX.spdx: -------------------------------------------------------------------------------- 1 | SPDXVersion: SPDX-2.1 2 | PackageName: MaDDG 3 | PackageHomePage: https://github.com/mit-ll/maddg 4 | PackageOriginator: MIT Lincoln Laboratory 5 | PackageCopyrightText: 2024 Massachusetts Institute of Technology 6 | PackageLicenseDeclared: MIT -------------------------------------------------------------------------------- /AUTHORS.md: -------------------------------------------------------------------------------- 1 | # Contributors 2 | 3 | * Jake Varey [jacob.varey@ll.mit.edu](mailto:jacob.varey@ll.mit.edu) 4 | * Michael Tierney [michael.kotson@ll.mit.edu](mailto:michael.kotson@ll.mit.edu) 5 | * Ryan Sullenberger [ryan.sullenberger@ll.mit.edu](mailto:ryan.sullenberger@ll.mit.edu) 6 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Dummy conftest.py for madlib. 6 | 7 | If you don't know what this is for, just leave it empty. 8 | Read more about conftest.py under: 9 | - https://docs.pytest.org/en/stable/fixture.html 10 | - https://docs.pytest.org/en/stable/writing_plugins.html 11 | """ 12 | 13 | # import pytest 14 | -------------------------------------------------------------------------------- /tests/inputs/invalid_sensor_1.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | # This sensor is for test purposes only. DO NOT MODIFY. 5 | 6 | sensor: 7 | id: sensor 8 | lat: 90.0 9 | lon: 0.0 10 | alt: 0.0 11 | dra: 1.0 12 | ddec: 1.0 13 | obs_per_collect: 1 14 | obs_time_spacing: 0 15 | collect_gap_mean: 86400 16 | collect_gap_std: 0 17 | obs_limits: 18 | el: [90, 90] 19 | sun_el: [90, 90] -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | # Requirements file for ReadTheDocs, check .readthedocs.yml. 2 | # To build the module reference correctly, make sure every external package 3 | # under `install_requires` in `setup.cfg` is also listed here! 4 | # sphinx_rtd_theme 5 | astropy>=5.3.1 6 | myst-parser[linkify] 7 | numba>=0.57.1 8 | numpy==1.26.4 9 | scipy>=1.11.1 10 | sphinx>=3.2.1 11 | sphinx_rtd_theme 12 | pydata_sphinx_theme 13 | sphinx-design==0.5.0 14 | sphinx-tabs==3.4.4 15 | nbsphinx==0.9.7 -------------------------------------------------------------------------------- /tests/inputs/invalid_sensor_2.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | # This sensor is for test purposes only. DO NOT MODIFY. 5 | 6 | sensor_list: 7 | sensor: 8 | id: sensor 9 | lat: 90.0 10 | alt: 0.0 11 | dra: 1.0 12 | ddec: 1.0 13 | obs_per_collect: 1 14 | obs_time_spacing: 0 15 | collect_gap_mean: 86400 16 | collect_gap_std: 0 17 | obs_limits: 18 | el: [90, 90] 19 | sun_el: [90, 90] -------------------------------------------------------------------------------- /docs/how_to_guides.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | How-To Guides 3 | ============= 4 | 5 | These how-to guides provide step-by-step examples demonstrating 6 | some of MaDDG's key functionality. 7 | 8 | .. toctree:: 9 | :maxdepth: 1 10 | :caption: Contents: 11 | 12 | how_to_guides/example_1-propagate-satellite 13 | how_to_guides/example_2-observe-satellite 14 | how_to_guides/example_3-sensor-collections 15 | how_to_guides/example_4-satellite-maneuvers 16 | how_to_guides/example_5-launching-simulations -------------------------------------------------------------------------------- /tests/inputs/blind_sensor.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | # This is a sensor that should not be able to see anything. 5 | # Its purpose is to test code branches that handle a complete lack of observations. 6 | 7 | sensor_list: 8 | magoo: 9 | id: magoo 10 | lat: 90.0 11 | lon: 0.0 12 | alt: 0.0 13 | dra: 1.0 14 | ddec: 1.0 15 | obs_per_collect: 1 16 | obs_time_spacing: 0 17 | collect_gap_mean: 86400 18 | collect_gap_std: 0 19 | obs_limits: 20 | el: [90, 90] 21 | sun_el: [90, 90] -------------------------------------------------------------------------------- /tests/test_configs.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Test file: test_configs.py 6 | Description: This file contains unit tests for the `configs` module 7 | """ 8 | 9 | from madlib._sensor_collection import SensorCollection 10 | 11 | 12 | def test_sensor_YAML(): 13 | """Test that the sample sensor network YAML file has a valid schema.""" 14 | yaml = "configs/sample_sensor_network.yaml" 15 | network = SensorCollection.fromYAML(yaml) 16 | 17 | # If the test makes it this far, then the validation check succeeded 18 | assert True 19 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # Read the Docs configuration file 2 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 3 | 4 | # Required 5 | version: 2 6 | 7 | # Build documentation in the docs/ directory with Sphinx 8 | sphinx: 9 | configuration: docs/conf.py 10 | 11 | # Build documentation with MkDocs 12 | #mkdocs: 13 | # configuration: mkdocs.yml 14 | 15 | # Optionally build your docs in additional formats such as PDF 16 | formats: 17 | - pdf 18 | 19 | build: 20 | os: ubuntu-22.04 21 | tools: 22 | python: "3.11" 23 | 24 | python: 25 | install: 26 | - requirements: docs/requirements.txt 27 | - {path: ., method: pip} 28 | -------------------------------------------------------------------------------- /src/madlib/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Maneuver Detection Library (MaDLib) 6 | """ 7 | 8 | from importlib.metadata import PackageNotFoundError, version # pragma: no cover 9 | 10 | from ._maneuver import ContinuousManeuver, ImpulsiveManeuver 11 | from ._observation import Observation, ObservationResidual 12 | from ._satellite import ContinuousThrustSatellite, Satellite 13 | from ._sensor import GroundOpticalSensor, SpaceOpticalSensor 14 | from ._sensor_collection import SensorCollection 15 | 16 | try: 17 | # Change here if project is renamed and does not equal the package name 18 | dist_name = "MaDDG" 19 | __version__ = version(dist_name) 20 | except PackageNotFoundError: # pragma: no cover 21 | __version__ = "unknown" 22 | finally: 23 | del version, PackageNotFoundError 24 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Setup file for MaDDG. 6 | Use setup.cfg to configure your project. 7 | 8 | This file was generated with PyScaffold 4.5. 9 | PyScaffold helps you to put up the scaffold of your new Python project. 10 | Learn more under: https://pyscaffold.org/ 11 | """ 12 | from setuptools import setup 13 | 14 | if __name__ == "__main__": 15 | try: 16 | setup(use_scm_version={"version_scheme": "no-guess-dev"}) 17 | except: # noqa 18 | print( 19 | "\n\nAn error occurred while building the project, " 20 | "please ensure you have the most updated version of setuptools, " 21 | "setuptools_scm and wheel with:\n" 22 | " pip install -U setuptools setuptools_scm wheel\n\n" 23 | ) 24 | raise 25 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | # AVOID CHANGING REQUIRES: IT WILL BE UPDATED BY PYSCAFFOLD! 3 | requires = ["setuptools>=46.1.0", "setuptools_scm[toml]>=5"] 4 | build-backend = "setuptools.build_meta" 5 | 6 | [tool.setuptools_scm] 7 | # For smarter version schemes and other configuration options, 8 | # check out https://github.com/pypa/setuptools_scm 9 | version_scheme = "no-guess-dev" 10 | 11 | [tool.coverage.run] 12 | branch = true 13 | omit = ["tests/*"] 14 | 15 | [tool.coverage.report] 16 | # Regexes for lines to exclude from consideration 17 | exclude_lines = [ 18 | 'pragma: no cover', 19 | 'def __repr__', 20 | 'raise NotImplementedError', 21 | 'class .*\bProtocol(\[.+\])?\):', 22 | '@(abc\.)?abstractmethod', 23 | '@(typing\.)?overload', 24 | 'except ImportError:', 25 | 'except ModuleNotFoundError:', 26 | 'if (typing\.)?TYPE_CHECKING:', 27 | 'if sys\.version_info', 28 | 'if __name__ == .__main__.:', 29 | ] -------------------------------------------------------------------------------- /tests/inputs/invalid_sensor_4.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | twilight_sun_el: &twilight_sun_el [-90, -18.0] 5 | 6 | basic_weather: &basic_weather 7 | cloud_prob: 0.5 8 | cloud_duration_mean: 10800.0 # seconds 9 | cloud_duration_std: 7200.0 # seconds 10 | 11 | sensor_list: 12 | A1: 13 | id: A1 14 | lat: 0.0 15 | lon: 0.0 16 | alt: 0.0 17 | dra: 1.0 18 | ddec: 1.0 19 | obs_per_collect: 1 20 | obs_time_spacing: 0 21 | collect_gap_mean: 6330.0 22 | collect_gap_std: 0 23 | obs_limits: 24 | el: [15.0, 90] 25 | sun_el: *twilight_sun_el 26 | invalid_limit: 1 27 | weather: *basic_weather 28 | A2: 29 | id: A2 30 | lat: 0.0 31 | lon: 0.0 32 | alt: 0.0 33 | dra: 1.0 34 | ddec: 1.0 35 | obs_per_collect: 1 36 | obs_time_spacing: 0 37 | collect_gap_mean: 6300.0 38 | collect_gap_std: 0 39 | obs_limits: 40 | el: [15.0, 90] 41 | sun_el: *twilight_sun_el 42 | invalid_limit: 2 43 | weather: *basic_weather 44 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2024 Massachusetts Institute of Technology. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | #AUTODOCDIR = api 11 | 12 | # User-friendly check for sphinx-build 13 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $?), 1) 14 | $(error "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://sphinx-doc.org/") 15 | endif 16 | 17 | .PHONY: help clean Makefile 18 | 19 | # Put it first so that "make" without argument is like "make help". 20 | help: 21 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | 23 | clean: 24 | rm -rf $(BUILDDIR)/* $(AUTODOCDIR) 25 | 26 | # Catch-all target: route all unknown targets to Sphinx using the new 27 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 28 | %: Makefile 29 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 30 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: Tests 5 | 6 | on: 7 | push: 8 | branches: [ "main" ] 9 | pull_request: 10 | branches: [ "main" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Set up Python 3.11 23 | uses: actions/setup-python@v3 24 | with: 25 | python-version: "3.11" 26 | - name: Install package 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install flake8 tox 30 | pip install . 31 | - name: Lint with flake8 32 | run: | 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Test with tox 38 | run: | 39 | tox -e coverage 40 | -------------------------------------------------------------------------------- /docs/api/madlib.rst: -------------------------------------------------------------------------------- 1 | madlib package 2 | ============== 3 | 4 | Submodules 5 | ---------- 6 | 7 | madlib.\_maneuver module 8 | ------------------------ 9 | 10 | .. automodule:: madlib._maneuver 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | :private-members: 15 | 16 | madlib.\_observation module 17 | --------------------------- 18 | 19 | .. automodule:: madlib._observation 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | :private-members: 24 | 25 | madlib.\_satellite module 26 | ------------------------- 27 | 28 | .. automodule:: madlib._satellite 29 | :members: 30 | :undoc-members: 31 | :show-inheritance: 32 | :private-members: 33 | 34 | madlib.\_sensor module 35 | ---------------------- 36 | 37 | .. automodule:: madlib._sensor 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | :private-members: 42 | 43 | madlib.\_sensor\_collection module 44 | ---------------------------------- 45 | 46 | .. automodule:: madlib._sensor_collection 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: 50 | :private-members: 51 | 52 | madlib.\_utils module 53 | --------------------- 54 | 55 | .. automodule:: madlib._utils 56 | :members: 57 | :undoc-members: 58 | :show-inheritance: 59 | :private-members: 60 | 61 | Module contents 62 | --------------- 63 | 64 | .. automodule:: madlib 65 | :members: 66 | :undoc-members: 67 | :show-inheritance: 68 | :private-members: 69 | -------------------------------------------------------------------------------- /examples/example_inputs/sample_sensor_network.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | twilight_sun_el: &twilight_sun_el [-90, -18.0] 5 | 6 | sensor_list: 7 | Alpha: 8 | id: Alpha 9 | lat: 0.0 10 | lon: 0.0 11 | alt: 0.0 12 | dra: 1.0 13 | ddec: 1.0 14 | obs_per_collect: 3 15 | obs_time_spacing: 1 16 | collect_gap_mean: 60.0 17 | collect_gap_std: 5 18 | obs_limits: 19 | el: [15.0, 90] 20 | sun_el: *twilight_sun_el 21 | Bravo: 22 | id: Bravo 23 | lat: 0.0 24 | lon: 15.0 25 | alt: 0.0 26 | dra: 1.0 27 | ddec: 1.0 28 | obs_per_collect: 5 29 | obs_time_spacing: 1 30 | collect_gap_mean: 10.0 31 | collect_gap_std: 0 32 | obs_limits: 33 | el: [15.0, 90] 34 | sun_el: *twilight_sun_el 35 | Charlie: 36 | id: Charlie 37 | lat: 0.0 38 | lon: 30.0 39 | alt: 0.0 40 | dra: 10.0 41 | ddec: 10.0 42 | obs_per_collect: 1 43 | obs_time_spacing: 0 44 | collect_gap_mean: 600.0 45 | collect_gap_std: 0 46 | obs_limits: 47 | el: [15.0, 90] 48 | sun_el: *twilight_sun_el 49 | Delta: 50 | id: Delta 51 | lat: 0.0 52 | lon: 45.0 53 | alt: 0.0 54 | dra: 1.0 55 | ddec: 1.0 56 | obs_per_collect: 3 57 | obs_time_spacing: 1 58 | collect_gap_mean: 120.0 59 | collect_gap_std: 5 60 | obs_limits: 61 | el: [15.0, 90] 62 | sun_el: *twilight_sun_el 63 | Echo: 64 | id: Echo 65 | lat: 0.0 66 | lon: 60.0 67 | alt: 0.0 68 | dra: 1.0 69 | ddec: 1.0 70 | obs_per_collect: 3 71 | obs_time_spacing: 1 72 | collect_gap_mean: 120.0 73 | collect_gap_std: 5 74 | obs_limits: 75 | el: [15.0, 90] 76 | sun_el: *twilight_sun_el 77 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. admonition:: Join the Discussion 2 | 3 | Feel free to share ideas and ask questions over at `MaDDG's discussion page`_. 4 | 5 | .. _MaDDG's discussion page: https://github.com/mit-ll/MaDDG/discussions 6 | 7 | ================================= 8 | Welcome to MaDDG's documentation! 9 | ================================= 10 | 11 | MaDDG (Maneuver Detection Data Generation) is a library for simulating 12 | high-fidelity observations of satellite trajectories with configurable maneuvers 13 | and custom sensor networks. MaDDG provides a simple interface for modeling complex 14 | observation scenarios. It allows you to create a satellite in any geocentric orbit, 15 | propagate its motion with a robust physical model, and track its position through 16 | optical sensors with customizable locations, observing limits, and noise parameters. 17 | 18 | Through its use of `hydra-zen`_ and the `submitit plugin`_, MaDDG` can easily configure 19 | an array of simulation scenarios and distribute them in a SLURM cluster, empowering users 20 | to create large-scale, realistic datasets for training reliable maneuver detection and 21 | characterization models. 22 | 23 | .. _hydra-zen: https://github.com/mit-ll-responsible-ai/hydra-zen 24 | .. _submitit plugin: https://hydra.cc/docs/plugins/submitit_launcher/ 25 | 26 | Installation 27 | ============ 28 | 29 | MaDDG is available on PyPI: 30 | 31 | .. code:: console 32 | 33 | $ pip install MaDDG 34 | 35 | To install from source, clone the `MaDDG repository`_ and run the following command from 36 | its top-level directory: 37 | 38 | .. _MaDDG repository: https://github.com/mit-ll/MaDDG 39 | 40 | .. code:: console 41 | 42 | $ pip install -e . 43 | 44 | If you want to modify the orbit propagation physics behind MaDDG, you 45 | will likely need to edit the `AstroForge`_ library, as well. AstroForge is 46 | an open-source astrodynamics library and a key requirement of MaDDG. See 47 | the `AstroForge documentation`_ for installation instructions. 48 | 49 | .. _AstroForge: https://github.com/mit-ll/AstroForge 50 | .. _AstroForge documentation: https://astroforge.readthedocs.io/en/latest/ 51 | 52 | Documentation Contents 53 | ====================== 54 | 55 | .. toctree:: 56 | :maxdepth: 2 57 | 58 | How-To Guides 59 | Reference -------------------------------------------------------------------------------- /src/maddg/_residuals.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | import numpy as np 5 | import pandas as pd 6 | from astropy.time import Time 7 | 8 | import madlib 9 | 10 | 11 | def calculate_residuals( 12 | sensors: madlib.SensorCollection, 13 | satellite: madlib.Satellite, 14 | sim_duration_days: float, 15 | t_start_mjd: float = Time.now().mjd, 16 | ) -> pd.DataFrame | None: 17 | """Calculates the residuals. 18 | 19 | Parameters 20 | ---------- 21 | sensors : madlib.SensorCollection 22 | Collection of Sensors 23 | satellite : madlib.Satellite 24 | The Satellite object observed in the simulation 25 | sim_duration_days : float 26 | Duration of the simulation (days) 27 | t_start_mjd : float, optional 28 | Time (MJD) at the start of the simulation, by default astropy.time.Time.now().mjd 29 | 30 | Returns 31 | ------- 32 | pd.DataFrame | None 33 | pandas DataFrame of results (or None, if nobs == 0) 34 | 35 | Raises 36 | ------ 37 | MadlibException 38 | Raised if nobs != nexpected... 39 | number of actual and expected observations were not 40 | the same, so residuals cannot be calculated. This 41 | is likely a random occurrence. 42 | """ 43 | ### SIMULATION TIMING 44 | 45 | t_end_mjd = t_start_mjd + sim_duration_days 46 | sensors.generate_obs_timing(t_start_mjd, t_end_mjd) 47 | 48 | ### GENERATE OBSERVATIONS 49 | obs = sensors.observe(satellite) 50 | 51 | actual_obs = obs.pos_observed 52 | predicted_obs = obs.pos_expected 53 | 54 | nobs = obs.count_valid_observations() 55 | 56 | ### CALCULATE AND RETURN RESIDUALS 57 | output = None 58 | if nobs > 0: 59 | obs_res = np.array([actual_obs[n] - predicted_obs[n] for n in range(nobs)]) 60 | ra_res = np.array([obs_res[n].ra for n in range(nobs)]) * 3600 61 | dec_res = np.array([obs_res[n].dec for n in range(nobs)]) * 3600 62 | t = np.array([x.mjd for x in actual_obs]) 63 | sensor_ids = np.array([x.sensor_id for x in predicted_obs]) 64 | 65 | output = { 66 | "MJD": t, 67 | "RA Arcsec": ra_res, 68 | "DEC Arcsec": dec_res, 69 | "SensorID": sensor_ids, 70 | } 71 | output = pd.DataFrame(output) 72 | 73 | return output 74 | -------------------------------------------------------------------------------- /src/madlib/_maneuver.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | from typing import Callable, Tuple 5 | 6 | import numpy as np 7 | from numpy.typing import NDArray 8 | 9 | 10 | class ImpulsiveManeuver: 11 | """ 12 | ImpulseManeuver class holds an impulse maneuver definition, which includes the time of the maneuver, and the impulsive delta-v. 13 | 14 | Properties 15 | ---------- 16 | time : float 17 | Timestamp of the maneuver (MJD, UTC) 18 | dv : NDArray[np.float64] 19 | 3D Array of the impulsive delta-v (RSW frame) 20 | """ 21 | 22 | _time: float 23 | _dv: NDArray[np.float64] 24 | 25 | def __init__(self, time: float, dv: NDArray[np.float64]): 26 | """Initialize the ImpulseManeuver class. 27 | 28 | Parameters 29 | ---------- 30 | time : float 31 | Timestamp of the maneuver (MJD, UTC) 32 | dv : NDArray[np.float64] 33 | 3D Array of the impulsive delta-v (RSW frame) 34 | """ 35 | self._time = time 36 | self._dv = dv 37 | 38 | @property 39 | def time(self) -> float: 40 | return self._time 41 | 42 | @property 43 | def dv(self) -> NDArray[np.float64]: 44 | return self._dv 45 | 46 | 47 | class ContinuousManeuver: 48 | """ 49 | ContinuousManeuver class holds a continuous maneuver definition, which includes the acceleration 50 | function defining the maneuver, and the time range over which the maneuver occurs. 51 | """ 52 | 53 | _accel_func: Callable 54 | _time_range: Tuple[float, float] 55 | 56 | def __init__(self, f: Callable, time_range: Tuple[float, float]): 57 | """Create a ContinuousManeuver object 58 | 59 | Parameters 60 | ---------- 61 | f : Callable 62 | Function which evaluates the acceleration at a given time 63 | time_range : Tuple[float, float] 64 | Time range within which this maneuver is valid 65 | """ 66 | self._accel_func = f 67 | if time_range[0] >= time_range[1]: 68 | a, b = time_range 69 | time_range = (b, a) 70 | 71 | self._time_range = time_range 72 | 73 | def __call__(self, t: float) -> NDArray[np.float64]: 74 | """Compute the acceleration for this maneuver at the time given 75 | 76 | Parameters 77 | ---------- 78 | t : float 79 | Timestamp (MJD, UT1) to evaluate the acceleration 80 | 81 | Returns 82 | ------- 83 | NDArray[np.float64] 84 | Acceleration vector, shape: (3,). (TETED, km/s^2) 85 | """ 86 | if self._time_range[0] <= t <= self._time_range[1]: 87 | return self._accel_func(t) 88 | 89 | return np.zeros(3) 90 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | # Tox configuration file 2 | # Read more under https://tox.wiki/ 3 | # THIS SCRIPT IS SUPPOSED TO BE AN EXAMPLE. MODIFY IT ACCORDING TO YOUR NEEDS! 4 | 5 | [tox] 6 | minversion = 3.24 7 | envlist = default 8 | isolated_build = True 9 | 10 | 11 | [testenv] 12 | description = Invoke pytest to run automated tests 13 | setenv = 14 | TOXINIDIR = {toxinidir} 15 | passenv = 16 | HOME 17 | SETUPTOOLS_* 18 | extras = 19 | testing 20 | commands = 21 | pytest {posargs} 22 | 23 | 24 | # # To run `tox -e lint` you need to make sure you have a 25 | # # `.pre-commit-config.yaml` file. See https://pre-commit.com 26 | # [testenv:lint] 27 | # description = Perform static analysis and style checks 28 | # skip_install = True 29 | # deps = pre-commit 30 | # passenv = 31 | # HOMEPATH 32 | # PROGRAMDATA 33 | # SETUPTOOLS_* 34 | # commands = 35 | # pre-commit run --all-files {posargs:--show-diff-on-failure} 36 | 37 | 38 | [testenv:{build,clean}] 39 | description = 40 | build: Build the package in isolation according to PEP517, see https://github.com/pypa/build 41 | clean: Remove old distribution files and temporary build artifacts (./build and ./dist) 42 | # https://setuptools.pypa.io/en/stable/build_meta.html#how-to-use-it 43 | skip_install = True 44 | changedir = {toxinidir} 45 | deps = 46 | build: build[virtualenv] 47 | passenv = 48 | SETUPTOOLS_* 49 | commands = 50 | clean: python -c 'import shutil; [shutil.rmtree(p, True) for p in ("build", "dist", "docs/_build")]' 51 | clean: python -c 'import pathlib, shutil; [shutil.rmtree(p, True) for p in pathlib.Path("src").glob("*.egg-info")]' 52 | build: python -m build {posargs} 53 | # By default, both `sdist` and `wheel` are built. If your sdist is too big or you don't want 54 | # to make it available, consider running: `tox -e build -- --wheel` 55 | 56 | 57 | [testenv:{docs,doctests,linkcheck}] 58 | description = 59 | docs: Invoke sphinx-build to build the docs 60 | doctests: Invoke sphinx-build to run doctests 61 | linkcheck: Check for broken links in the documentation 62 | passenv = 63 | SETUPTOOLS_* 64 | setenv = 65 | DOCSDIR = {toxinidir}/docs 66 | BUILDDIR = {toxinidir}/docs/_build 67 | docs: BUILD = html 68 | doctests: BUILD = doctest 69 | linkcheck: BUILD = linkcheck 70 | deps = 71 | -r {toxinidir}/docs/requirements.txt 72 | # ^ requirements.txt shared with Read The Docs 73 | commands = 74 | sphinx-build --color -b {env:BUILD} -d "{env:BUILDDIR}/doctrees" "{env:DOCSDIR}" "{env:BUILDDIR}/{env:BUILD}" {posargs} 75 | 76 | 77 | [testenv:publish] 78 | description = 79 | Publish the package you have been developing to a package index server. 80 | By default, it uses testpypi. If you really want to publish your package 81 | to be publicly accessible in PyPI, use the `-- --repository pypi` option. 82 | skip_install = True 83 | changedir = {toxinidir} 84 | passenv = 85 | # See: https://twine.readthedocs.io/en/latest/ 86 | TWINE_USERNAME 87 | TWINE_PASSWORD 88 | TWINE_REPOSITORY 89 | TWINE_REPOSITORY_URL 90 | deps = twine 91 | commands = 92 | python -m twine check dist/* 93 | python -m twine upload {posargs:--repository {env:TWINE_REPOSITORY:testpypi}} dist/* 94 | 95 | [testenv:coverage] 96 | description = Runs test suite and measures test-coverage. Fails if coverage is 97 | below 100 prcnt. Run `tox -e coverage -- -n 0` to disable parallelization. 98 | setenv = NUMBA_DISABLE_JIT=1 99 | usedevelop = true 100 | basepython = python3.12 101 | deps = 102 | pytest 103 | coverage[toml] 104 | pytest-cov 105 | numpy 106 | commands = pytest --cov-report term-missing --cov-config=pyproject.toml --cov-fail-under=100 --cov=src/maddg --cov=src/madlib --cov=scripts/ tests -------------------------------------------------------------------------------- /configs/sample_sensor_network.yaml: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | twilight_sun_el: &twilight_sun_el [-90, -18.0] 5 | 6 | basic_weather: &basic_weather 7 | cloud_prob: 0.5 8 | cloud_duration_mean: 10800.0 # seconds 9 | cloud_duration_std: 7200.0 # seconds 10 | 11 | sensor_list: 12 | A1: 13 | id: A1 14 | lat: 0.0 15 | lon: 0.0 16 | alt: 0.0 17 | dra: 1.0 18 | ddec: 1.0 19 | obs_per_collect: 1 20 | obs_time_spacing: 0 21 | collect_gap_mean: 6330.0 22 | collect_gap_std: 0 23 | obs_limits: 24 | el: [15.0, 90] 25 | sun_el: *twilight_sun_el 26 | weather: *basic_weather 27 | A2: 28 | id: A2 29 | lat: 0.0 30 | lon: 0.0 31 | alt: 0.0 32 | dra: 1.0 33 | ddec: 1.0 34 | obs_per_collect: 1 35 | obs_time_spacing: 0 36 | collect_gap_mean: 6300.0 37 | collect_gap_std: 0 38 | obs_limits: 39 | el: [15.0, 90] 40 | sun_el: *twilight_sun_el 41 | weather: *basic_weather 42 | B1: 43 | id: B1 44 | lat: -7.41 45 | lon: 72.45 46 | alt: -62.0 47 | dra: 1.0 48 | ddec: 1.0 49 | obs_per_collect: 1 50 | obs_time_spacing: 0 51 | collect_gap_mean: 6340.0 52 | collect_gap_std: 0 53 | obs_limits: 54 | el: [15.0, 90] 55 | sun_el: *twilight_sun_el 56 | weather: *basic_weather 57 | B2: 58 | id: B2 59 | lat: -7.41 60 | lon: 72.45 61 | alt: -62.0 62 | dra: 10.0 63 | ddec: 10.0 64 | obs_per_collect: 1 65 | obs_time_spacing: 0 66 | collect_gap_mean: 6315.0 67 | collect_gap_std: 0 68 | obs_limits: 69 | el: [15.0, 90] 70 | sun_el: *twilight_sun_el 71 | weather: *basic_weather 72 | B3: 73 | id: B3 74 | lat: -7.41 75 | lon: 72.45 76 | alt: -62.0 77 | dra: 1.0 78 | ddec: 1.0 79 | obs_per_collect: 1 80 | obs_time_spacing: 0 81 | collect_gap_mean: 6350.0 82 | collect_gap_std: 0 83 | obs_limits: 84 | el: [15.0, 90] 85 | sun_el: *twilight_sun_el 86 | weather: *basic_weather 87 | C1: 88 | id: C1 89 | lat: 33.82 90 | lon: 276.79 91 | alt: 1512.0 92 | dra: 1.0 93 | ddec: 1.0 94 | obs_per_collect: 1 95 | obs_time_spacing: 0 96 | collect_gap_mean: 6350.0 97 | collect_gap_std: 0 98 | obs_limits: 99 | el: [15.0, 90] 100 | sun_el: *twilight_sun_el 101 | weather: *basic_weather 102 | C2: 103 | id: C2 104 | lat: 33.82 105 | lon: 278.50 106 | alt: 1512.0 107 | dra: 10.0 108 | ddec: 10.0 109 | obs_per_collect: 1 110 | obs_time_spacing: 0 111 | collect_gap_mean: 6340.0 112 | collect_gap_std: 0 113 | obs_limits: 114 | el: [15.0, 90] 115 | sun_el: *twilight_sun_el 116 | weather: *basic_weather 117 | C3: 118 | id: C3 119 | lat: 33.82 120 | lon: 278.29 121 | alt: 1512.0 122 | dra: 1.0 123 | ddec: 1.0 124 | obs_per_collect: 1 125 | obs_time_spacing: 0 126 | collect_gap_mean: 6340.0 127 | collect_gap_std: 0 128 | obs_limits: 129 | el: [15.0, 90] 130 | sun_el: *twilight_sun_el 131 | weather: *basic_weather 132 | D1: 133 | id: D1 134 | lat: 20.71 135 | lon: 203.74 136 | alt: 3060.0 137 | dra: 1.0 138 | ddec: 1.0 139 | obs_per_collect: 1 140 | obs_time_spacing: 0 141 | collect_gap_mean: 6330.0 142 | collect_gap_std: 0 143 | obs_limits: 144 | el: [15.0, 90] 145 | sun_el: *twilight_sun_el 146 | weather: *basic_weather 147 | D2: 148 | id: D2 149 | lat: 20.71 150 | lon: 203.74 151 | alt: 3060.0 152 | dra: 10.0 153 | ddec: 10.0 154 | obs_per_collect: 1 155 | obs_time_spacing: 0 156 | collect_gap_mean: 6350.0 157 | collect_gap_std: 0 158 | obs_limits: 159 | el: [15.0, 90] 160 | sun_el: *twilight_sun_el 161 | weather: *basic_weather 162 | D3: 163 | id: D3 164 | lat: 20.71 165 | lon: 203.74 166 | alt: 3060.0 167 | dra: 1.0 168 | ddec: 1.0 169 | obs_per_collect: 1 170 | obs_time_spacing: 0 171 | collect_gap_mean: 6350.0 172 | collect_gap_std: 0 173 | obs_limits: 174 | el: [15.0, 90] 175 | sun_el: *twilight_sun_el 176 | weather: *basic_weather 177 | -------------------------------------------------------------------------------- /docs/reference.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | Reference API 3 | ============================ 4 | 5 | A thorough documentation of the MaDDG API. 6 | 7 | Internally, MaDDG is divided into two core components: ``madlib`` and ``maddg``. 8 | ``madlib`` contains the individual objects that are used to construct a simulation, 9 | such as satellites, maneuvers, sensors. ``maddg`` provides the framework that combines 10 | these objects into simulations and then deploys those simulations to compute nodes. 11 | 12 | madlib 13 | ====== 14 | 15 | Satellites 16 | ---------- 17 | 18 | Satellites in MaDDG can be created from an initial set of 19 | position and velocity vectors, initial Keplerian elements, or 20 | even just the latitude and longitude for a geostationary 21 | object. 22 | 23 | The :func:`~madlib._satellite.Satellite` class supports 24 | impulsive maneuvers, while the 25 | :func:`~madlib._satellite.ContinuousThrustSatellite` class 26 | can support continuous maneuvers. 27 | 28 | .. currentmodule:: madlib._satellite 29 | 30 | .. autosummary:: 31 | :toctree: generated/ 32 | 33 | Satellite 34 | ContinuousThrustSatellite 35 | 36 | Sensors 37 | ------- 38 | 39 | Currently, MaDDG only supports optical sensors. These sensors 40 | report the angular position of a target satellite (in Right Ascension 41 | and Declination) at scheduled times. Sensors can be ground- 42 | or space-based, and each sensor can be configured with 43 | unique noise profiles and observing conditions. 44 | 45 | .. currentmodule:: madlib._sensor 46 | 47 | .. autosummary:: 48 | :toctree: generated/ 49 | 50 | _Sensor 51 | _OpticalSensor 52 | GroundOpticalSensor 53 | SpaceOpticalSensor 54 | 55 | .. currentmodule:: madlib._sensor_collection 56 | 57 | Multiple sensors can be combined into a network, each providing 58 | its own measurements of the target satellite. 59 | 60 | .. autosummary:: 61 | :toctree: generated/ 62 | 63 | SensorCollection 64 | 65 | Observations 66 | ------------ 67 | 68 | .. currentmodule:: madlib._observation 69 | 70 | Sensors' measurements of the target satellite's position 71 | are called observations. In most cases, we are actually 72 | interested in the target satellite's residuals, which 73 | are the difference between the object's measured position 74 | and its expected position at any given time. 75 | 76 | .. autosummary:: 77 | :toctree: generated/ 78 | 79 | Observation 80 | ObservationResidual 81 | ObservationCollection 82 | 83 | Maneuvers 84 | --------- 85 | 86 | Satellites perform different types of maneuvers for a wide 87 | variety of reasons, from small burns to maintain an orbit to 88 | large burns that drastically alter the trajectory. 89 | 90 | Maneuvers in MaDDG are divided into impulsive and continuous. 91 | 92 | .. currentmodule:: madlib._maneuver 93 | 94 | .. autosummary:: 95 | :toctree: generated/ 96 | 97 | ImpulsiveManeuver 98 | ContinuousManeuver 99 | 100 | maddg 101 | ===== 102 | 103 | Residual Calculation 104 | -------------------- 105 | 106 | .. currentmodule:: maddg._residuals 107 | 108 | The :func:`~maddg._residuals.calculate_residuals` method 109 | is a core simulation function. Given a network of sensors, 110 | a target satellite, a simulation duration and a start time, 111 | it will propagate the target and sensors through time and 112 | return a time series of the target's residuals in Right Ascension 113 | and Declination. 114 | 115 | These residuals show the difference between the target's observed 116 | position and its expected position, making them a useful metric 117 | for detecting and characterizing satellite maneuvers. 118 | 119 | .. autosummary:: 120 | :toctree: generated/ 121 | 122 | calculate_residuals 123 | 124 | Launching Simulations 125 | --------------------- 126 | 127 | MaDDG is designed to launch multiple simulations, and it 128 | can launch them in parallel if you have a multi-core CPU or 129 | access to a SLURM cluster. 130 | 131 | .. currentmodule:: maddg._sim_launcher 132 | 133 | The :func:`~maddg._sim_launcher.launcher` method is used to 134 | define a set of simulations. Users can define the satellites 135 | and sensors, assign the compute infrastructure and 136 | configure the simulation outputs. 137 | 138 | .. autosummary:: 139 | :toctree: generated/ 140 | 141 | create_task_fn 142 | launcher -------------------------------------------------------------------------------- /tests/inputs/invalid_sensor_3.yaml: -------------------------------------------------------------------------------- 1 | twilight_sun_el: &twilight_sun_el -18.0 2 | 3 | basic_weather: &basic_weather 4 | cloud_prob: 0.5 5 | cloud_duration_mean: 10800.0 # seconds 6 | cloud_duration_std: 7200.0 # seconds 7 | 8 | sensor_list: 9 | A1: 10 | id: A1 11 | lat: 0.0 12 | lon: 0.0 13 | alt: 0.0 14 | dra: 1.0 15 | ddec: 1.0 16 | student_dof: 1 17 | obs_per_collect: 1 18 | obs_time_spacing: 0 19 | collect_gap_mean: 6330.0 20 | collect_gap_std: 0 21 | obs_limits: 22 | el: [15.0, 90] 23 | sun_el: [*twilight_sun_el, 90] 24 | weather: *basic_weather 25 | A2: 26 | id: A2 27 | lat: 0.0 28 | lon: 0.0 29 | alt: 0.0 30 | dra: 1.0 31 | ddec: 1.0 32 | obs_per_collect: 1 33 | obs_time_spacing: 0 34 | collect_gap_mean: 6300.0 35 | collect_gap_std: 0 36 | obs_limits: 37 | el: [15.0, 90] 38 | sun_el: [*twilight_sun_el, 90] 39 | weather: *basic_weather 40 | B1: 41 | id: B1 42 | lat: -7.41 43 | lon: 72.45 44 | alt: -62.0 45 | dra: 1.0 46 | ddec: 1.0 47 | students_dof: 1 48 | obs_per_collect: 1 49 | obs_time_spacing: 0 50 | collect_gap_mean: 6340.0 51 | collect_gap_std: 0 52 | obs_limits: 53 | el: [15.0, 90] 54 | sun_el: [*twilight_sun_el, 90] 55 | weather: *basic_weather 56 | B2: 57 | id: B2 58 | lat: -7.41 59 | lon: 72.45 60 | alt: -62.0 61 | dra: 10.0 62 | ddec: 10.0 63 | students_dof: 1 64 | obs_per_collect: 1 65 | obs_time_spacing: 0 66 | collect_gap_mean: 6315.0 67 | collect_gap_std: 0 68 | obs_limits: 69 | el: [15.0, 90] 70 | sun_el: [*twilight_sun_el, 90] 71 | weather: *basic_weather 72 | B3: 73 | id: B3 74 | lat: -7.41 75 | lon: 72.45 76 | alt: -62.0 77 | dra: 1.0 78 | ddec: 1.0 79 | students_dof: 10 80 | obs_per_collect: 1 81 | obs_time_spacing: 0 82 | collect_gap_mean: 6350.0 83 | collect_gap_std: 0 84 | obs_limits: 85 | el: [15.0, 90] 86 | sun_el: [*twilight_sun_el, 90] 87 | weather: *basic_weather 88 | C1: 89 | id: C1 90 | lat: 33.82 91 | lon: 276.79 92 | alt: 1512.0 93 | dra: 1.0 94 | ddec: 1.0 95 | students_dof: 30.0 96 | obs_per_collect: 1 97 | obs_time_spacing: 0 98 | collect_gap_mean: 6350.0 99 | collect_gap_std: 0 100 | obs_limits: 101 | el: [15.0, 90] 102 | sun_el: [*twilight_sun_el, 90] 103 | weather: *basic_weather 104 | C2: 105 | id: C2 106 | lat: 33.82 107 | lon: 278.50 108 | alt: 1512.0 109 | dra: 10.0 110 | ddec: 10.0 111 | students_dof: 1 112 | obs_per_collect: 1 113 | obs_time_spacing: 0 114 | collect_gap_mean: 6340.0 115 | collect_gap_std: 0 116 | obs_limits: 117 | el: [15.0, 90] 118 | sun_el: [*twilight_sun_el, 90] 119 | weather: *basic_weather 120 | C3: 121 | id: C3 122 | lat: 33.82 123 | lon: 278.29 124 | alt: 1512.0 125 | dra: 1.0 126 | ddec: 1.0 127 | students_dof: 1 128 | obs_per_collect: 1 129 | obs_time_spacing: 0 130 | collect_gap_mean: 6340.0 131 | collect_gap_std: 0 132 | obs_limits: 133 | el: [15.0, 90] 134 | sun_el: [*twilight_sun_el, 90] 135 | weather: *basic_weather 136 | D1: 137 | id: D1 138 | lat: 20.71 139 | lon: 203.74 140 | alt: 3060.0 141 | dra: 1.0 142 | ddec: 1.0 143 | students_dof: 1 144 | obs_per_collect: 1 145 | obs_time_spacing: 0 146 | collect_gap_mean: 6330.0 147 | collect_gap_std: 0 148 | obs_limits: 149 | el: [15.0, 90] 150 | sun_el: [*twilight_sun_el, 90] 151 | weather: *basic_weather 152 | D2: 153 | id: D2 154 | lat: 20.71 155 | lon: 203.74 156 | alt: 3060.0 157 | dra: 10.0 158 | ddec: 10.0 159 | students_dof: 1 160 | obs_per_collect: 1 161 | obs_time_spacing: 0 162 | collect_gap_mean: 6350.0 163 | collect_gap_std: 0 164 | obs_limits: 165 | el: [15.0, 90] 166 | sun_el: [*twilight_sun_el, 90] 167 | weather: *basic_weather 168 | D3: 169 | id: D3 170 | lat: 20.71 171 | lon: 203.74 172 | alt: 3060.0 173 | dra: 1.0 174 | ddec: 1.0 175 | students_dof: 1 176 | obs_per_collect: 1 177 | obs_time_spacing: 0 178 | collect_gap_mean: 6350.0 179 | collect_gap_std: 0 180 | obs_limits: 181 | el: [15.0, 90] 182 | sun_el: [*twilight_sun_el, 90] 183 | weather: *basic_weather 184 | -------------------------------------------------------------------------------- /tests/inputs/students_t_sensors.yaml: -------------------------------------------------------------------------------- 1 | twilight_sun_el: &twilight_sun_el -18.0 2 | 3 | basic_weather: &basic_weather 4 | cloud_prob: 0.5 5 | cloud_duration_mean: 10800.0 # seconds 6 | cloud_duration_std: 7200.0 # seconds 7 | 8 | sensor_list: 9 | A1: 10 | id: A1 11 | lat: 0.0 12 | lon: 0.0 13 | alt: 0.0 14 | dra: 1.0 15 | ddec: 1.0 16 | students_dof: 1 17 | obs_per_collect: 1 18 | obs_time_spacing: 0 19 | collect_gap_mean: 6330.0 20 | collect_gap_std: 0 21 | obs_limits: 22 | el: [15.0, 90] 23 | sun_el: [*twilight_sun_el, 90] 24 | weather: *basic_weather 25 | A2: 26 | id: A2 27 | lat: 0.0 28 | lon: 0.0 29 | alt: 0.0 30 | dra: 1.0 31 | ddec: 1.0 32 | obs_per_collect: 1 33 | obs_time_spacing: 0 34 | collect_gap_mean: 6300.0 35 | collect_gap_std: 0 36 | obs_limits: 37 | el: [15.0, 90] 38 | sun_el: [*twilight_sun_el, 90] 39 | weather: *basic_weather 40 | B1: 41 | id: B1 42 | lat: -7.41 43 | lon: 72.45 44 | alt: -62.0 45 | dra: 1.0 46 | ddec: 1.0 47 | students_dof: 1 48 | obs_per_collect: 1 49 | obs_time_spacing: 0 50 | collect_gap_mean: 6340.0 51 | collect_gap_std: 0 52 | obs_limits: 53 | el: [15.0, 90] 54 | sun_el: [*twilight_sun_el, 90] 55 | weather: *basic_weather 56 | B2: 57 | id: B2 58 | lat: -7.41 59 | lon: 72.45 60 | alt: -62.0 61 | dra: 10.0 62 | ddec: 10.0 63 | students_dof: 1 64 | obs_per_collect: 1 65 | obs_time_spacing: 0 66 | collect_gap_mean: 6315.0 67 | collect_gap_std: 0 68 | obs_limits: 69 | el: [15.0, 90] 70 | sun_el: [*twilight_sun_el, 90] 71 | weather: *basic_weather 72 | B3: 73 | id: B3 74 | lat: -7.41 75 | lon: 72.45 76 | alt: -62.0 77 | dra: 1.0 78 | ddec: 1.0 79 | students_dof: 10 80 | obs_per_collect: 1 81 | obs_time_spacing: 0 82 | collect_gap_mean: 6350.0 83 | collect_gap_std: 0 84 | obs_limits: 85 | el: [15.0, 90] 86 | sun_el: [*twilight_sun_el, 90] 87 | weather: *basic_weather 88 | C1: 89 | id: C1 90 | lat: 33.82 91 | lon: 276.79 92 | alt: 1512.0 93 | dra: 1.0 94 | ddec: 1.0 95 | students_dof: 30.0 96 | obs_per_collect: 1 97 | obs_time_spacing: 0 98 | collect_gap_mean: 6350.0 99 | collect_gap_std: 0 100 | obs_limits: 101 | el: [15.0, 90] 102 | sun_el: [*twilight_sun_el, 90] 103 | weather: *basic_weather 104 | C2: 105 | id: C2 106 | lat: 33.82 107 | lon: 278.50 108 | alt: 1512.0 109 | dra: 10.0 110 | ddec: 10.0 111 | students_dof: 1 112 | obs_per_collect: 1 113 | obs_time_spacing: 0 114 | collect_gap_mean: 6340.0 115 | collect_gap_std: 0 116 | obs_limits: 117 | el: [15.0, 90] 118 | sun_el: [*twilight_sun_el, 90] 119 | weather: *basic_weather 120 | C3: 121 | id: C3 122 | lat: 33.82 123 | lon: 278.29 124 | alt: 1512.0 125 | dra: 1.0 126 | ddec: 1.0 127 | students_dof: 1 128 | obs_per_collect: 1 129 | obs_time_spacing: 0 130 | collect_gap_mean: 6340.0 131 | collect_gap_std: 0 132 | obs_limits: 133 | el: [15.0, 90] 134 | sun_el: [*twilight_sun_el, 90] 135 | weather: *basic_weather 136 | D1: 137 | id: D1 138 | lat: 20.71 139 | lon: 203.74 140 | alt: 3060.0 141 | dra: 1.0 142 | ddec: 1.0 143 | students_dof: 1 144 | obs_per_collect: 1 145 | obs_time_spacing: 0 146 | collect_gap_mean: 6330.0 147 | collect_gap_std: 0 148 | obs_limits: 149 | el: [15.0, 90] 150 | sun_el: [*twilight_sun_el, 90] 151 | weather: *basic_weather 152 | D2: 153 | id: D2 154 | lat: 20.71 155 | lon: 203.74 156 | alt: 3060.0 157 | dra: 10.0 158 | ddec: 10.0 159 | students_dof: 1 160 | obs_per_collect: 1 161 | obs_time_spacing: 0 162 | collect_gap_mean: 6350.0 163 | collect_gap_std: 0 164 | obs_limits: 165 | el: [15.0, 90] 166 | sun_el: [*twilight_sun_el, 90] 167 | weather: *basic_weather 168 | D3: 169 | id: D3 170 | lat: 20.71 171 | lon: 203.74 172 | alt: 3060.0 173 | dra: 1.0 174 | ddec: 1.0 175 | students_dof: 1 176 | obs_per_collect: 1 177 | obs_time_spacing: 0 178 | collect_gap_mean: 6350.0 179 | collect_gap_std: 0 180 | obs_limits: 181 | el: [15.0, 90] 182 | sun_el: [*twilight_sun_el, 90] 183 | weather: *basic_weather 184 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # This file is used to configure your project. 2 | # Read more about the various options under: 3 | # https://setuptools.pypa.io/en/latest/userguide/declarative_config.html 4 | # https://setuptools.pypa.io/en/latest/references/keywords.html 5 | 6 | [metadata] 7 | name = MaDDG 8 | description = Maneuver Detection Data Generation 9 | author = see AUTHORS.md 10 | author_email = michael.kotson@ll.mit.edu 11 | license = MIT 12 | license_files = LICENSE.txt 13 | long_description = file: README.md 14 | long_description_content_type = text/markdown; charset=UTF-8; variant=GFM 15 | url = https://github.com/mit-ll/MaDDG 16 | # Add here related links, for example: 17 | project_urls = 18 | Documentation = https://maddg.readthedocs.io/en/latest/ 19 | # Source = https://github.com/pyscaffold/pyscaffold/ 20 | # Changelog = https://pyscaffold.org/en/latest/changelog.html 21 | # Tracker = https://github.com/pyscaffold/pyscaffold/issues 22 | # Conda-Forge = https://anaconda.org/conda-forge/pyscaffold 23 | # Download = https://pypi.org/project/PyScaffold/#files 24 | # Twitter = https://twitter.com/PyScaffold 25 | 26 | # Change if running only on Windows, Mac or Linux (comma-separated) 27 | platforms = any 28 | 29 | # Add here all kinds of additional classifiers as defined under 30 | # https://pypi.org/classifiers/ 31 | classifiers = 32 | Development Status :: 4 - Beta 33 | Programming Language :: Python 34 | 35 | 36 | [options] 37 | zip_safe = False 38 | packages = find_namespace: 39 | include_package_data = True 40 | package_dir = 41 | =src 42 | 43 | # Require a min/specific Python version (comma-separated conditions) 44 | python_requires = >=3.11, <3.13 45 | 46 | # Add here dependencies of your project (line-separated), e.g. requests>=2.2,<3.0. 47 | # Version specifiers like >=2.2,<3.0 avoid problems due to API changes in 48 | # new major versions. This works if the required packages follow Semantic Versioning. 49 | # For more information, check out https://semver.org/. 50 | install_requires = 51 | AstroForge 52 | requests >= 2.18.4 53 | astropy >= 5.3.1 54 | matplotlib >= 3.7.2 55 | numpy >= 1.26.4, < 2 56 | pandas >= 2.0.3 57 | hydra-core >= 1.3.2 58 | hydra-submitit-launcher >= 1.2.0 59 | hydra-zen >= 0.11.0 60 | submitit >= 1.4.5 61 | pyyaml >= 6.0.1 62 | jsonschema >= 4.19.0 63 | 64 | [options.packages.find] 65 | where = src 66 | exclude = 67 | tests 68 | 69 | [options.extras_require] 70 | # Add here additional requirements for extra features, to install with: 71 | # `pip install MaDDG[PDF]` like: 72 | # PDF = ReportLab; RXP 73 | 74 | # Add here test requirements (semicolon/line-separated) 75 | testing = 76 | setuptools 77 | pytest 78 | pytest-cov 79 | 80 | [options.entry_points] 81 | # Add here console scripts like: 82 | # console_scripts = 83 | # script_name = madlib.module:function 84 | # For example: 85 | # console_scripts = 86 | # fibonacci = madlib.skeleton:run 87 | # And any other entry points, for example: 88 | # pyscaffold.cli = 89 | # awesome = pyscaffoldext.awesome.extension:AwesomeExtension 90 | 91 | [tool:pytest] 92 | # Specify command line options as you would do when invoking pytest directly. 93 | # e.g. --cov-report html (or xml) for html/xml output or --junitxml junit.xml 94 | # in order to write a coverage file that can be read by Jenkins. 95 | # CAUTION: --cov flags may prohibit setting breakpoints while debugging. 96 | # Comment those flags to avoid this pytest issue. 97 | addopts = 98 | --cov madlib --cov-report term-missing 99 | --verbose 100 | norecursedirs = 101 | dist 102 | build 103 | .tox 104 | testpaths = tests 105 | # Use pytest markers to select/deselect specific tests 106 | # markers = 107 | # slow: mark tests as slow (deselect with '-m "not slow"') 108 | # system: mark end-to-end system tests 109 | 110 | [devpi:upload] 111 | # Options for the devpi: PyPI server and packaging tool 112 | # VCS export must be deactivated since we are using setuptools-scm 113 | no_vcs = 1 114 | formats = bdist_wheel 115 | 116 | [flake8] 117 | # Some sane defaults for the code style checker flake8 118 | max_line_length = 88 119 | extend_ignore = E203, W503 120 | # ^ Black-compatible 121 | # E203 and W503 have edge cases handled by black 122 | exclude = 123 | .tox 124 | build 125 | dist 126 | .eggs 127 | docs/conf.py 128 | 129 | [pyscaffold] 130 | # PyScaffold's parameters when the project was created. 131 | # This will be used when updating. Do not change! 132 | version = 4.5 133 | package = madlib 134 | extensions = 135 | markdown 136 | no_skeleton 137 | -------------------------------------------------------------------------------- /tests/test_madlib_observations.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Test file: test_madlib_observations.py 6 | Description: This file contains unit tests which test "edge cases" in 7 | the madlib._observations module. 8 | """ 9 | 10 | import pathlib 11 | import sys 12 | 13 | import astroforge as af 14 | import numpy as np 15 | from astropy.time import Time 16 | 17 | # add parent directory of __file__ to sys.path, if isn't already included 18 | if str(pathlib.Path(__file__).parents[1]) not in sys.path: 19 | sys.path.append(str(pathlib.Path(__file__).parents[1])) 20 | 21 | import pytest 22 | 23 | import madlib 24 | from madlib._observation import Observation, combineObsCollections 25 | from madlib._utils import MadlibException 26 | 27 | # --- setup dummy sensor 28 | SST_PARAMS = { 29 | "lat": -21.89567, 30 | "lon": 114.0898731, 31 | "alt": 0.067845, 32 | "dra": 0.3, 33 | "ddec": 0.3, 34 | "obs_per_collect": (3, 5), 35 | "obs_time_spacing": 1.5, 36 | "collect_gap_mean": 7200, 37 | "collect_gap_std": 800, 38 | "obs_limits": None, 39 | "obs_limits": None, 40 | "id": "SST", 41 | } 42 | 43 | seed = 4445 44 | 45 | 46 | def test_Observation_madlib_exception_1(): 47 | """Observations must be at the same time for computing a residual""" 48 | with pytest.raises(MadlibException): 49 | obs1 = Observation(mjd=54321.0) 50 | obs2 = Observation(mjd=43210.0) 51 | return obs1 - obs2 52 | 53 | 54 | def test_Observation_madlib_exception_2(): 55 | """Can only subtract two Observation objects""" 56 | with pytest.raises(MadlibException): 57 | obs1 = Observation(mjd=54321.0) 58 | obs2 = 54321.0 59 | return obs1 - obs2 # type: ignore 60 | 61 | 62 | def test_Observation_special_handling_angles_that_wrap(): 63 | obs1 = Observation(mjd=54321.0) 64 | obs1.ra = 359.0 65 | obs2 = Observation(mjd=54321.0) 66 | obs2.ra = 2.0 67 | residual = obs2 - obs1 68 | assert np.isclose(residual.ra, 3.0) # type: ignore 69 | 70 | 71 | def test_Observation_asarray(): 72 | obs = Observation(mjd=54321.0) 73 | assert isinstance(obs, madlib._observation.Observation) 74 | assert isinstance(obs.asarray(), np.ndarray) 75 | 76 | 77 | def test_ObservationResidual_asarray(): 78 | obs1 = Observation(mjd=54321.0) 79 | obs2 = Observation(mjd=54321.0) 80 | residual = obs2 - obs1 81 | assert isinstance(residual.asarray(), np.ndarray) 82 | 83 | 84 | def test_ObservationCollection_edge_cases(seed=seed): 85 | # --- generate an ObservationCollection 86 | # --- orbital state 87 | np.random.seed(seed) 88 | th = 2 * np.pi * np.random.rand() 89 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 90 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 91 | a0 = np.zeros((3,)) 92 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 93 | epoch = t_start.mjd 94 | 95 | # --- define satellites 96 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 97 | 98 | # --- setup sensor and generate ObservationCollection(s) 99 | sensor = madlib.GroundOpticalSensor(**SST_PARAMS) 100 | times = sensor.generate_obs_timing(epoch, epoch + 0.5) # + 0.5 [days] 101 | 102 | ObsColl1 = sensor.observe(sat0, times) 103 | ObsColl2 = sensor.observe(sat0, times) 104 | 105 | ObsColl1_pos_observed_original_size = ObsColl1.pos_observed.size 106 | ObsColl1_pos_truth_original_size = ObsColl1.pos_truth.size 107 | ObsColl1 + ObsColl2 # type: ignore 108 | 109 | collectionList = [ObsColl1, ObsColl2] 110 | combinedObsCollections = combineObsCollections(collectionList) 111 | 112 | # --- check that ObsColl2 was concatenated with ObsColl1 during the addition line above 113 | # check if ObsColl1.pos_observed now contains ObsColl2.pos_observed 114 | assert all( 115 | ObsColl1.pos_observed[-ObsColl2.pos_observed.size :] == ObsColl2.pos_observed 116 | ) 117 | assert all(ObsColl1.pos_truth[-ObsColl2.pos_truth.size :] == ObsColl2.pos_truth) 118 | # check if size is correct 119 | assert ( 120 | ObsColl1.pos_observed.size 121 | == ObsColl1_pos_observed_original_size + ObsColl2.pos_observed.size 122 | ) 123 | assert ( 124 | ObsColl1.pos_truth.size 125 | == ObsColl1_pos_truth_original_size + ObsColl2.pos_truth.size 126 | ) 127 | 128 | # --- check _obvervation.combineObsCollections() 129 | assert ( 130 | ObsColl1.pos_observed.size + ObsColl2.pos_observed.size 131 | == combinedObsCollections.pos_observed.size 132 | ) 133 | assert ( 134 | ObsColl1.pos_truth.size + ObsColl2.pos_truth.size 135 | == combinedObsCollections.pos_truth.size 136 | ) 137 | -------------------------------------------------------------------------------- /tests/test_propagate.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Test file: test_propagate.py 6 | Description: This file contains a unit tests which propagates a satellite, 7 | with and without a manuever, and compares the final lat&lon values to 8 | known values. This unit test in itself provides ~80% code coverage. 9 | """ 10 | 11 | import pathlib 12 | import sys 13 | 14 | import astroforge as af 15 | import numpy as np 16 | from astropy.time import Time 17 | 18 | # add parent directory of __file__ to sys.path, if isn't already included 19 | if str(pathlib.Path(__file__).parents[1]) not in sys.path: 20 | sys.path.append(str(pathlib.Path(__file__).parents[1])) 21 | import madlib 22 | 23 | 24 | def test_sat_impulse_maneuver(seed=4445): 25 | np.random.seed(seed) 26 | # --- orbital state 27 | th = 2 * np.pi * np.random.rand() 28 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 29 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 30 | a0 = np.zeros((3,)) 31 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 32 | epoch = t_start.mjd 33 | 34 | # --- maneuver definition 35 | man_time = epoch + 5.0 / 24 36 | man_dv = np.array([0.0, 5.0, 0.0]) / 1000 37 | man = madlib.ImpulsiveManeuver(man_time, man_dv) 38 | 39 | # --- define satellites 40 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 41 | sat1 = madlib.Satellite(epoch, x0, v0, a0, man) 42 | 43 | # --- setup sensor 44 | SST_PARAMS = { 45 | "lat": -21.89567, 46 | "lon": 114.0898731, 47 | "alt": 0.067845, 48 | "dra": 0.3, 49 | "ddec": 0.3, 50 | "obs_per_collect": (3, 5), 51 | "obs_time_spacing": 1.5, 52 | "collect_gap_mean": 7200, 53 | "collect_gap_std": 800, 54 | # "obs_limits": {"el": (20.0, 91.0)}, 55 | "obs_limits": None, 56 | "id": "SST", 57 | } 58 | 59 | sensor = madlib.GroundOpticalSensor(**SST_PARAMS) 60 | times = sensor.generate_obs_timing(epoch, epoch + 2) # + 2 [hours] 61 | 62 | manuv_f = sensor.observe(sat0, times).pos_truth 63 | manuv_t = sensor.observe(sat1, times).pos_truth 64 | manuv_f_dict = { 65 | "ra": np.array([x.ra for x in manuv_f]), 66 | "dec": np.array([x.dec for x in manuv_f]), 67 | } 68 | manuv_t_dict = { 69 | "ra": np.array([x.ra for x in manuv_t]), 70 | "dec": np.array([x.dec for x in manuv_t]), 71 | } 72 | 73 | assert all( 74 | np.isclose( 75 | np.array( 76 | [ 77 | manuv_f_dict["ra"][-1], 78 | manuv_f_dict["dec"][-1], 79 | manuv_t_dict["ra"][-1], 80 | manuv_t_dict["dec"][-1], 81 | ] 82 | ), 83 | np.array( 84 | [ 85 | 28.88567036042452, 86 | 3.295301473939045, 87 | 25.393658788650338, 88 | 3.3090154095997817, 89 | ] 90 | ), 91 | ) 92 | ) 93 | 94 | 95 | def test_sat_continuous_maneuver(seed=4445): 96 | np.random.seed(seed) 97 | # --- orbital state 98 | th = 2 * np.pi * np.random.rand() 99 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 100 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 101 | a0 = np.zeros((3,)) 102 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 103 | epoch = t_start.mjd 104 | 105 | # --- maneuver definition 106 | # ContinuousThrust Function Definition 107 | def acc_f(t): 108 | return np.array([0.0, 1.0e-7, 0.0]) 109 | 110 | acc_t_range = (epoch, epoch + 3) 111 | # ContinuousManeuver Definition 112 | man = madlib.ContinuousManeuver(acc_f, acc_t_range) 113 | 114 | # --- define satellites 115 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 116 | sat1 = madlib.ContinuousThrustSatellite(epoch, x0, v0, a0, man) 117 | 118 | # --- setup sensor 119 | SST_PARAMS = { 120 | "lat": -21.89567, 121 | "lon": 114.0898731, 122 | "alt": 0.067845, 123 | "dra": 0.3, 124 | "ddec": 0.3, 125 | "obs_per_collect": (3, 5), 126 | "obs_time_spacing": 1.5, 127 | "collect_gap_mean": 7200, 128 | "collect_gap_std": 800, 129 | # "obs_limits": {"el": (20.0, 91.0)}, 130 | "obs_limits": None, 131 | "id": "SST", 132 | } 133 | 134 | sensor = madlib.GroundOpticalSensor(**SST_PARAMS) 135 | times = sensor.generate_obs_timing(epoch, epoch + 2) # + 2 [hours] 136 | 137 | manuv_f = sensor.observe(sat0, times).pos_truth 138 | manuv_t = sensor.observe(sat1, times).pos_truth 139 | manuv_f_dict = { 140 | "ra": np.array([x.ra for x in manuv_f]), 141 | "dec": np.array([x.dec for x in manuv_f]), 142 | } 143 | manuv_t_dict = { 144 | "ra": np.array([x.ra for x in manuv_t]), 145 | "dec": np.array([x.dec for x in manuv_t]), 146 | } 147 | dt = np.array([Time(x.mjd, format="mjd").datetime for x in manuv_f]) 148 | 149 | assert all( 150 | np.isclose( 151 | np.array( 152 | [ 153 | manuv_f_dict["ra"][-1], 154 | manuv_f_dict["dec"][-1], 155 | manuv_t_dict["ra"][-1], 156 | manuv_t_dict["dec"][-1], 157 | ] 158 | ), 159 | np.array( 160 | [ 161 | 28.88567036042452, 162 | 3.295301473939045, 163 | 30.024913619492622, 164 | 3.3128772755122093, 165 | ] 166 | ), 167 | ) 168 | ) 169 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import numpy as np 3 | 4 | from madlib._utils import calc_separation_angle, MadlibException 5 | 6 | 7 | def assert_vector_angle(V1, V2, angle_rad): 8 | sep_rad = calc_separation_angle(V1, V2) 9 | sep_deg = calc_separation_angle(V1, V2, in_deg=True) 10 | 11 | np.testing.assert_almost_equal(sep_rad, angle_rad, decimal=4) 12 | np.testing.assert_almost_equal(sep_deg, angle_rad * 180.0 / np.pi, decimal=4) 13 | 14 | 15 | def assert_multiple_vector_angles(V1, V2, angle_rad_list): 16 | angle_rad_truth = np.array(angle_rad_list) 17 | 18 | sep_rad = calc_separation_angle(V1, V2) 19 | sep_deg = calc_separation_angle(V1, V2, in_deg=True) 20 | 21 | np.testing.assert_allclose(sep_rad, angle_rad_truth, atol=1e-4) 22 | np.testing.assert_allclose(sep_deg, angle_rad_truth * 180.0 / np.pi, atol=1e-4) 23 | 24 | 25 | class TestSeparationAngle: 26 | """Test behavior of calc_separation_angle function""" 27 | 28 | def test_single_separations(self): 29 | """Test the separation between a few individual vectors in radians.""" 30 | N1_1 = np.array( 31 | [ 32 | [1, 0, 0], 33 | ] 34 | ) 35 | N1_2 = np.array( 36 | [ 37 | [0, 1, 0], 38 | ] 39 | ) 40 | N1_3 = np.array( 41 | [ 42 | [-1, 0, 0], 43 | ] 44 | ) 45 | N1_4 = np.array( 46 | [ 47 | [2, 0, 0], 48 | ] 49 | ) 50 | N1_5 = np.array( 51 | [ 52 | [1, 1, 0], 53 | ] 54 | ) 55 | N1_6 = np.array( 56 | [ 57 | [1, -1, 0], 58 | ] 59 | ) 60 | N1_7 = np.array( 61 | [ 62 | [-1, -1, 0], 63 | ] 64 | ) 65 | N1_8 = np.array( 66 | [ 67 | [1, 2, 3], 68 | ] 69 | ) 70 | 71 | assert_vector_angle(N1_1, N1_1, 0.0) 72 | assert_vector_angle(N1_1, N1_2, np.pi / 2) 73 | assert_vector_angle(N1_2, N1_1, np.pi / 2) 74 | assert_vector_angle(N1_1, N1_3, np.pi) 75 | assert_vector_angle(N1_1, N1_4, 0.0) 76 | assert_vector_angle(N1_1, N1_5, np.pi / 4) 77 | assert_vector_angle(N1_1, N1_6, np.pi / 4) 78 | assert_vector_angle(N1_1, N1_7, 3 * np.pi / 4) 79 | assert_vector_angle(N1_1, N1_8, 1.30024656) 80 | 81 | def test_multiple_vectors(self): 82 | """Test the pairwise separation between arrays of vectors.""" 83 | N2_1 = np.array( 84 | [ 85 | [0.1, 0.2, 0.3], 86 | [0.4, 0.5, 0.6], 87 | [0.7, 0.8, 0.9], 88 | [1.0, 1.1, 1.2], 89 | ] 90 | ) 91 | N2_2 = np.array( 92 | [ 93 | [-1.2, -1.1, -1.0], 94 | [-0.9, -0.8, -0.7], 95 | [-0.6, -0.5, -0.4], 96 | [-0.3, -0.2, -0.1], 97 | ] 98 | ) 99 | 100 | separations = [2.67990488, 2.87801221, 2.87801221, 2.67990488] 101 | 102 | assert_multiple_vector_angles(N2_1, N2_2, separations) 103 | 104 | def test_invalid_shapes(self): 105 | """Test invalid or incompatible vector shapes.""" 106 | 107 | # Case: Neither array is two-dimensional 108 | B1 = np.array([0, 0, 0]) 109 | B2 = np.array([0, 0, 0]) 110 | 111 | with pytest.raises(MadlibException): 112 | calc_separation_angle(B1, B2) 113 | 114 | # Case: Only one of the arrays is two-dimensional 115 | B1 = np.array( 116 | [ 117 | [0, 0, 0], 118 | ] 119 | ) 120 | B2 = np.array([0, 0, 0]) 121 | 122 | with pytest.raises(MadlibException): 123 | calc_separation_angle(B1, B2) 124 | 125 | with pytest.raises(MadlibException): 126 | calc_separation_angle(B2, B1) 127 | 128 | # Case: Neither array has three columns 129 | B1 = np.array( 130 | [ 131 | [0, 0], 132 | [0, 0], 133 | ] 134 | ) 135 | B2 = np.array( 136 | [ 137 | [0, 0, 0, 0], 138 | [0, 0, 0, 0], 139 | ] 140 | ) 141 | 142 | with pytest.raises(MadlibException): 143 | calc_separation_angle(B1, B2) 144 | 145 | # Case: Only one of the arrays has three columns 146 | B1 = np.array( 147 | [ 148 | [0, 0, 0], 149 | [0, 0, 0], 150 | ] 151 | ) 152 | B2 = np.array( 153 | [ 154 | [0, 0, 0, 0], 155 | [0, 0, 0, 0], 156 | ] 157 | ) 158 | 159 | with pytest.raises(MadlibException): 160 | calc_separation_angle(B1, B2) 161 | 162 | B1 = np.array( 163 | [ 164 | [0, 0], 165 | [0, 0], 166 | ] 167 | ) 168 | B2 = np.array( 169 | [ 170 | [0, 0, 0], 171 | [0, 0, 0], 172 | ] 173 | ) 174 | 175 | with pytest.raises(MadlibException): 176 | calc_separation_angle(B1, B2) 177 | 178 | # Case: The arrays have different numbers of rows 179 | B1 = np.array( 180 | [ 181 | [0, 0, 0], 182 | [0, 0, 0], 183 | [0, 0, 0], 184 | ] 185 | ) 186 | B2 = np.array( 187 | [ 188 | [0, 0, 0], 189 | [0, 0, 0], 190 | ] 191 | ) 192 | 193 | with pytest.raises(MadlibException): 194 | calc_separation_angle(B1, B2) 195 | -------------------------------------------------------------------------------- /src/madlib/_sensor_collection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | from pathlib import Path 5 | from typing import List, Sequence 6 | 7 | import numpy as np 8 | import yaml 9 | from jsonschema import validate 10 | from jsonschema.exceptions import ValidationError 11 | from numpy.typing import NDArray 12 | 13 | from madlib._utils import MadlibException, sensor_yaml_schema 14 | 15 | from ._observation import ObservationCollection, combineObsCollections 16 | from ._satellite import Satellite 17 | from ._sensor import GroundOpticalSensor, _Sensor 18 | 19 | 20 | class SensorException(Exception): 21 | """SensorException class""" 22 | 23 | def __init__(self, message=None): 24 | super().__init__(message) 25 | 26 | 27 | class SensorCollection: 28 | """Class containing multiple sensor objects that can generate a 29 | comprehensive observing schedule and collate observations.""" 30 | 31 | def __init__(self, sensorList: Sequence[_Sensor]): 32 | """Initialize SensorCollection class 33 | 34 | Parameters 35 | ---------- 36 | sensorList : List[Sensor] 37 | List of Sensors to include in observation network 38 | """ 39 | self.sensorList = list(sensorList) 40 | self.numSensors = len(sensorList) 41 | 42 | self.obsTimes: List[NDArray[np.float64]] | None = None 43 | 44 | @staticmethod 45 | def paramsFromYAML( 46 | yaml_file: str | Path, 47 | ): 48 | """Parse a YAML into sensor parameters, returned as a list of dicts""" 49 | try: 50 | with open(yaml_file, "r") as f: 51 | sensor_data = yaml.safe_load(f) 52 | validate(sensor_data, sensor_yaml_schema) 53 | except ValidationError as e: 54 | if e.message == "'sensor_list' is a required property": 55 | msg = f"The sensor YAML file {yaml_file} must contain the top-level property 'sensor_list'." 56 | else: 57 | e_path = str(e.path) 58 | section = "/".join(list(e_path)) 59 | msg = ( 60 | f"Error in the sensor YAML file [{yaml_file}] in the entry [{section}]: " 61 | f"{e.message}" 62 | ) 63 | raise MadlibException(msg) from None 64 | 65 | sensor_data = sensor_data["sensor_list"] 66 | 67 | return sensor_data 68 | 69 | @classmethod 70 | def fromYAML(cls, yaml_file: str): 71 | """Instantiate a SensorCollection object from a YAML file 72 | 73 | Parameters 74 | ---------- 75 | yaml_file : str 76 | Path to YAML file defining the sensors in the collection 77 | """ 78 | sensor_data = cls.paramsFromYAML(yaml_file) 79 | sensors = [GroundOpticalSensor(**params) for key, params in sensor_data.items()] 80 | sensor_network = cls(sensors) 81 | 82 | return sensor_network 83 | 84 | def generate_obs_timing(self, start: float, end: float): 85 | """Given a start time and an end time (in MJD), generate an 86 | array of observation times (also in MJD) based on the sensors' 87 | defined parameters. 88 | 89 | Parameters 90 | ---------- 91 | start : float 92 | Earliest possible observation timestamp (MJD) 93 | end : float 94 | Latest possible observation (MJD) 95 | 96 | Raises 97 | ------ 98 | SensorException 99 | The observation schedule has already been generated. 100 | """ 101 | if self.obsTimes is not None: 102 | message = "The observation schedule has already been generated." 103 | raise SensorException(message=message) 104 | 105 | self.obsTimes = [ 106 | sensor.generate_obs_timing(start, end) for sensor in self.sensorList 107 | ] 108 | 109 | def add_sensor(self, sensor: _Sensor): 110 | """Add a new sensor to the existing collection, provided the 111 | sensor timing has not already been generated. 112 | 113 | Parameters 114 | ---------- 115 | sensor : _Sensor 116 | The sensor object to add to the collection 117 | """ 118 | if self.obsTimes is not None: 119 | message = ( 120 | "Cannot add new sensors to a SensorCollection " 121 | "if observation timing has already been generated." 122 | ) 123 | raise (SensorException(message=message)) 124 | 125 | self.sensorList.append(sensor) 126 | self.numSensors = len(self.sensorList) 127 | 128 | def observe(self, target_satellite: Satellite) -> ObservationCollection: 129 | """Given a madlib.Satellite, generate an ObservationCollection 130 | 131 | Parameters 132 | ---------- 133 | target_satellite : Satellite 134 | madlib.Satellite is a class for propagating a satellite 135 | 136 | Returns 137 | ------- 138 | ObservationCollection 139 | Observations of a satellite from multiple sensors, combined into a single object. 140 | 141 | Raises 142 | ------ 143 | SensorException 144 | The observation schedule has already been generated. 145 | """ 146 | if self.obsTimes is None: 147 | message = "obsTimes is None. Did you forget to generate obs timing first?" 148 | raise SensorException(message=message) 149 | else: 150 | obsCollections: List[ObservationCollection] = [ 151 | sensor.observe(target_satellite, obstimes) 152 | for sensor, obstimes in zip(self.sensorList, self.obsTimes) 153 | ] 154 | observations: ObservationCollection = combineObsCollections(obsCollections) 155 | 156 | return observations 157 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 12 | 13 | [![Project generated with PyScaffold](https://img.shields.io/badge/-PyScaffold-005CA0?logo=pyscaffold)](https://pyscaffold.org/) 14 | 15 | # MaDDG (Maneuver Detection Data Generation) 16 | 17 |

A library for simulating high-fidelity observations of satellite trajectories with configurable maneuvers and custom sensor networks.

18 | 19 | MaDDG provides a simple interface for modeling complex observation scenarios. It allows you to create a satellite in any geocentric orbit, propagate its motion with a robust physical model, and track its position through optical sensors with customizable locations, observing limits, and noise parameters. 20 | 21 | Through its use of [hydra-zen](https://github.com/mit-ll-responsible-ai/hydra-zen) and the [submitit plugin](https://hydra.cc/docs/plugins/submitit_launcher/), MaDDG can easily configure an array of simulation scenarios and distribute them in a SLURM cluster, empowering users to create large-scale, realistic datasets for training reliable maneuver detection and characterization models. 22 | 23 | ## Installation 24 | 25 | MaDDG is available on PyPI: 26 | 27 | ```console 28 | pip install MaDDG 29 | ``` 30 | 31 | To install from source, clone this repository and run the following 32 | command from its top-level directory: 33 | 34 | ```console 35 | pip install -e . 36 | ``` 37 | 38 | If you want to modify the orbit propagation physics behind MaDDG, you 39 | will likely need to edit the [AstroForge](https://github.com/mit-ll/AstroForge) library, as well. AstroForge is 40 | an open-source astrodynamics library and a key requirement of MaDDG. See 41 | the [AstroForge documentation](https://astroforge.readthedocs.io/en/latest/) for installation instructions. 42 | 43 | ## Usage 44 | 45 | For details on how to use the various features of MaDDG, we recommend following the Jupyter notebooks in the `examples/` directory. 46 | 47 | ## Citation 48 | 49 | Please use this DOI number reference, published on [Zenodo](https://zenodo.org), when citing the software: 50 | 51 | [![DOI](https://zenodo.org/badge/921266858.svg)](https://doi.org/10.5281/zenodo.15080638) 52 | 53 | ## Post Processing Scripts 54 | 55 | ### Weather Event (Cloud Dropout) 56 | 57 | Use `dropout.py` to apply pseudo weather-based data dropout to dataset .csv files created with `hz_launcher.py`. 58 | 59 | ``` 60 | $ python scripts/dropout.py --help 61 | usage: dropout.py [-h] --path PATH [--cloud_prob CLOUD_PROB] [--cloud_duration_mean CLOUD_DURATION_MEAN] [--cloud_duration_std CLOUD_DURATION_STD] [--num_runs NUM_RUNS] 62 | [--save_copy_of_original] [--save_plots] [--submitit] 63 | 64 | Script description 65 | 66 | options: 67 | -h, --help show this help message and exit 68 | --path PATH The path to the input data file (.csv) (default: None) 69 | --cloud_prob CLOUD_PROB 70 | Probability of a cloud event blocking the sky during 71 | any nighttime observable window for each sensor (default: 0.5) 72 | --cloud_duration_mean CLOUD_DURATION_MEAN 73 | Mean duration of a cloud event (seconds) (default: 10800.0) 74 | --cloud_duration_std CLOUD_DURATION_STD 75 | Standard deviation of a cloud event (seconds) (default: 3600.0) 76 | --num_runs NUM_RUNS Number of dropout datasets to generate (default: 1) 77 | --save_copy_of_original 78 | Raise this flag to save copy of the original input data along 79 | side of the modified dataset with dropouts (default: False) 80 | --save_plots Raise this flag to generate and save plots (default: False) 81 | --submitit Raise this flag to use submitit to launch jobs across multiple 82 | nodes in parallel (default: False) 83 | ``` 84 | 85 | Example vscode launch.json entry: 86 | 87 | ```json 88 | { 89 | "name": "Launch Dropout", 90 | "type": "python", 91 | "request": "launch", 92 | "program": "scripts/dropout.py", 93 | "console": "integratedTerminal", 94 | "cwd": "/path/to/MaDDG", 95 | "args": [ 96 | "--path=/path/to/complete.csv", 97 | "--cloud_prob=1.0", 98 | "--cloud_duration_mean=0,3600,7200,10800,14400,18000,21600,25200,28800,32400,36000,39600,43200,46800,50400,54000,57600", 99 | "--cloud_duration_std=0.0", 100 | "--num_runs=10", 101 | "--save_copy_of_original", 102 | "--submitit", 103 | ] 104 | } 105 | ``` 106 | 107 | ## Disclaimer 108 | 109 | DISTRIBUTION STATEMENT A. Approved for public release. Distribution is unlimited. 110 | 111 | Research was sponsored by the United States Air Force Research Laboratory and the United 112 | States Air Force Artificial Intelligence Accelerator and was accomplished under Cooperative 113 | Agreement Number FA8750-19-2-1000. The views and conclusions contained in this document 114 | are those of the authors and should not be interpreted as representing the official 115 | policies, eitherexpressed or implied, of the United States Air Force or the U.S. 116 | Government. The U.S.Government is authorized to reproduce and distribute reprints 117 | for Government purposes notwithstanding any copyright notation herein. 118 | 119 | © 2024 Massachusetts Institute of Technology. 120 | -------------------------------------------------------------------------------- /src/madlib/_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | import numpy as np 5 | from numpy.typing import NDArray 6 | 7 | 8 | class MadlibException(Exception): 9 | """MadlibException Class""" 10 | 11 | pass 12 | 13 | 14 | sensor_yaml_schema = { 15 | "type": "object", 16 | "properties": { 17 | "sensor_list": { 18 | "type": "object", 19 | "patternProperties": { 20 | ".": { 21 | "type": "object", 22 | "properties": { 23 | "id": {"type": "string"}, 24 | "lat": {"type": "number", "minimum": -90, "maximum": 90}, 25 | "lon": {"type": "number", "minimum": -180, "maximum": 360}, 26 | "alt": {"type": "number"}, 27 | "dra": {"type": "number", "minimum": 0}, 28 | "ddec": {"type": "number", "minimum": 0}, 29 | "students_dof": {"type": "number", "exclusiveMinimum": 0}, 30 | "obs_per_collect": {"type": "integer", "exclusiveMinimum": 0}, 31 | "obs_time_spacing": {"type": "number", "minimum": 0}, 32 | "collect_gap_mean": {"type": "number", "minimum": 0}, 33 | "collect_gap_std": {"type": "number", "minimum": 0}, 34 | "obs_limits": { 35 | "type": ["object", "null"], 36 | "properties": { 37 | "el": { 38 | "type": "array", 39 | "items": { 40 | "type": "number", 41 | "minimum": -90, 42 | "maximum": 90, 43 | }, 44 | "minItems": 2, 45 | "maxItems": 2, 46 | }, 47 | "az": { 48 | "type": "array", 49 | "items": { 50 | "type": "number", 51 | "minimum": -180, 52 | "maximum": 180, 53 | }, 54 | "minItems": 2, 55 | "maxItems": 2, 56 | }, 57 | "sun_el": { 58 | "type": "array", 59 | "items": { 60 | "type": "number", 61 | "minimum": -90, 62 | "maximum": 90, 63 | }, 64 | "minItems": 2, 65 | "maxItems": 2, 66 | }, 67 | "dec": { 68 | "type": "array", 69 | "items": { 70 | "type": "number", 71 | "minimum": -90, 72 | "maximum": 90, 73 | }, 74 | "minItems": 2, 75 | "maxItems": 2, 76 | }, 77 | "range_": { 78 | "type": "array", 79 | "items": { 80 | "type": "number", 81 | "minimum": 0, 82 | }, 83 | "minItems": 2, 84 | "maxItems": 2, 85 | }, 86 | }, 87 | }, 88 | "weather": { 89 | "type": "object", 90 | "properties": { 91 | "cloud_prob": { 92 | "type": "number", 93 | "minimum": 0, 94 | "maximum": 1, 95 | }, 96 | "cloud_duration_mean": {"type": "number", "minimum": 0}, 97 | "cloud_duration_std": {"type": "number", "minimum": 0}, 98 | }, 99 | }, 100 | }, 101 | "required": [ 102 | "lat", 103 | "lon", 104 | "alt", 105 | "dra", 106 | "ddec", 107 | "collect_gap_mean", 108 | ], 109 | "additionalProperties": False, 110 | } 111 | }, 112 | } 113 | }, 114 | "required": ["sensor_list"], 115 | } 116 | 117 | 118 | def calc_separation_angle( 119 | v1: NDArray[np.float64], 120 | v2: NDArray[np.float64], 121 | in_deg: bool = False, 122 | ): 123 | """Returns the angle between vectors v1 and v2, both with 124 | shapes (N, 3). Output is in radians by default, in degrees if 125 | is True.""" 126 | 127 | shape_1 = v1.shape 128 | shape_2 = v2.shape 129 | 130 | if (len(shape_1) != 2) or (len(shape_2) != 2): 131 | raise MadlibException("Input arrays must have shape (N,3).") 132 | 133 | if (shape_1[1] != 3) or (shape_2[1] != 3): 134 | raise MadlibException("Input arrays must have shape (N,3).") 135 | 136 | if shape_1[0] != shape_2[0]: 137 | raise MadlibException( 138 | "Input arrays must have the same number of rows. " 139 | f"The first input has {shape_1[0]} rows, and " 140 | f"the second input has {shape_2[0]}" 141 | ) 142 | 143 | norm_1 = np.linalg.norm(v1, axis=1, keepdims=True) 144 | norm_2 = np.linalg.norm(v2, axis=1, keepdims=True) 145 | 146 | v1_u = v1 / norm_1 147 | v2_u = v2 / norm_2 148 | 149 | dot = np.clip(np.sum(v1_u * v2_u, axis=1), -1.0, 1.0) 150 | angles = np.arccos(dot) 151 | 152 | if in_deg: 153 | angles *= 180.0 / np.pi 154 | 155 | return angles 156 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | # File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig 5 | # Created by https://www.toptal.com/developers/gitignore/api/windows,visualstudiocode,macos,linux,python 6 | # Edit at https://www.toptal.com/developers/gitignore?templates=windows,visualstudiocode,macos,linux,python 7 | 8 | ### Linux ### 9 | *~ 10 | 11 | # temporary files which can be created if a process still has a handle open of a deleted file 12 | .fuse_hidden* 13 | 14 | # KDE directory preferences 15 | .directory 16 | 17 | # Linux trash folder which might appear on any partition or disk 18 | .Trash-* 19 | 20 | # .nfs files are created when an open file is removed but is still being accessed 21 | .nfs* 22 | 23 | ### macOS ### 24 | # General 25 | .DS_Store 26 | .AppleDouble 27 | .LSOverride 28 | 29 | # Icon must end with two \r 30 | Icon 31 | 32 | 33 | # Thumbnails 34 | ._* 35 | 36 | # Files that might appear in the root of a volume 37 | .DocumentRevisions-V100 38 | .fseventsd 39 | .Spotlight-V100 40 | .TemporaryItems 41 | .Trashes 42 | .VolumeIcon.icns 43 | .com.apple.timemachine.donotpresent 44 | 45 | # Directories potentially created on remote AFP share 46 | .AppleDB 47 | .AppleDesktop 48 | Network Trash Folder 49 | Temporary Items 50 | .apdisk 51 | 52 | ### macOS Patch ### 53 | # iCloud generated files 54 | *.icloud 55 | 56 | ### Python ### 57 | # Byte-compiled / optimized / DLL files 58 | __pycache__/ 59 | *.py[cod] 60 | *$py.class 61 | 62 | # C extensions 63 | *.so 64 | 65 | # Distribution / packaging 66 | .Python 67 | build/ 68 | develop-eggs/ 69 | dist/ 70 | downloads/ 71 | eggs/ 72 | .eggs/ 73 | lib/ 74 | lib64/ 75 | parts/ 76 | sdist/ 77 | var/ 78 | wheels/ 79 | share/python-wheels/ 80 | *.egg-info/ 81 | .installed.cfg 82 | *.egg 83 | MANIFEST 84 | 85 | # PyInstaller 86 | # Usually these files are written by a python script from a template 87 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 88 | *.manifest 89 | *.spec 90 | 91 | # Installer logs 92 | pip-log.txt 93 | pip-delete-this-directory.txt 94 | 95 | # Unit test / coverage reports 96 | htmlcov/ 97 | .tox/ 98 | .nox/ 99 | .coverage 100 | .coverage.* 101 | .cache 102 | nosetests.xml 103 | coverage.xml 104 | *.cover 105 | *.py,cover 106 | .hypothesis/ 107 | .pytest_cache/ 108 | cover/ 109 | 110 | # Translations 111 | *.mo 112 | *.pot 113 | 114 | # Django stuff: 115 | *.log 116 | local_settings.py 117 | db.sqlite3 118 | db.sqlite3-journal 119 | 120 | # Flask stuff: 121 | instance/ 122 | .webassets-cache 123 | 124 | # Scrapy stuff: 125 | .scrapy 126 | 127 | # Sphinx documentation 128 | docs/_build/ 129 | 130 | # Build and docs folder/files 131 | build/* 132 | dist/* 133 | sdist/* 134 | docs/api/* 135 | docs/_rst/* 136 | docs/_build/* 137 | docs/generated/* 138 | cover/* 139 | MANIFEST 140 | 141 | # PyBuilder 142 | .pybuilder/ 143 | target/ 144 | 145 | # Jupyter Notebook 146 | .ipynb_checkpoints 147 | 148 | # IPython 149 | profile_default/ 150 | ipython_config.py 151 | 152 | # pyenv 153 | # For a library or package, you might want to ignore these files since the code is 154 | # intended to run in multiple environments; otherwise, check them in: 155 | # .python-version 156 | 157 | # pipenv 158 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 159 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 160 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 161 | # install all needed dependencies. 162 | #Pipfile.lock 163 | 164 | # poetry 165 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 166 | # This is especially recommended for binary packages to ensure reproducibility, and is more 167 | # commonly ignored for libraries. 168 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 169 | #poetry.lock 170 | 171 | # pdm 172 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 173 | #pdm.lock 174 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 175 | # in version control. 176 | # https://pdm.fming.dev/#use-with-ide 177 | .pdm.toml 178 | 179 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 180 | __pypackages__/ 181 | 182 | # Celery stuff 183 | celerybeat-schedule 184 | celerybeat.pid 185 | 186 | # SageMath parsed files 187 | *.sage.py 188 | 189 | # Environments 190 | .env 191 | .venv 192 | env/ 193 | venv/ 194 | ENV/ 195 | env.bak/ 196 | venv.bak/ 197 | 198 | # Spyder project settings 199 | .spyderproject 200 | .spyproject 201 | 202 | # Rope project settings 203 | .ropeproject 204 | 205 | # mkdocs documentation 206 | /site 207 | 208 | # mypy 209 | .mypy_cache/ 210 | .dmypy.json 211 | dmypy.json 212 | 213 | # Pyre type checker 214 | .pyre/ 215 | 216 | # pytype static type analyzer 217 | .pytype/ 218 | 219 | # Cython debug symbols 220 | cython_debug/ 221 | 222 | # PyCharm 223 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 224 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 225 | # and can be added to the global gitignore or merged into this file. For a more nuclear 226 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 227 | #.idea/ 228 | 229 | ### Python Patch ### 230 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration 231 | poetry.toml 232 | 233 | # ruff 234 | .ruff_cache/ 235 | 236 | # LSP config files 237 | pyrightconfig.json 238 | 239 | ### VisualStudioCode ### 240 | .vscode/ 241 | # !.vscode/settings.json 242 | # !.vscode/tasks.json 243 | # !.vscode/launch.json 244 | # !.vscode/extensions.json 245 | # !.vscode/*.code-snippets 246 | 247 | # Local History for Visual Studio Code 248 | .history/ 249 | 250 | # Built Visual Studio Code Extensions 251 | *.vsix 252 | 253 | ### VisualStudioCode Patch ### 254 | # Ignore all local history of files 255 | .history 256 | .ionide 257 | 258 | ### Windows ### 259 | # Windows thumbnail cache files 260 | Thumbs.db 261 | Thumbs.db:encryptable 262 | ehthumbs.db 263 | ehthumbs_vista.db 264 | 265 | # Dump file 266 | *.stackdump 267 | 268 | # Folder config file 269 | [Dd]esktop.ini 270 | 271 | # Recycle Bin used on file shares 272 | $RECYCLE.BIN/ 273 | 274 | # Windows Installer files 275 | *.cab 276 | *.msi 277 | *.msix 278 | *.msm 279 | *.msp 280 | 281 | # Windows shortcuts 282 | *.lnk 283 | 284 | # End of https://www.toptal.com/developers/gitignore/api/windows,visualstudiocode,macos,linux,python 285 | 286 | # Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option) 287 | 288 | # Hydra-Zen outputs 289 | multirun/ 290 | 291 | # Other 292 | tmp/ 293 | scratch/ 294 | outputs/ 295 | examples/example_outputs/ 296 | 297 | # pytest and pytest-cov 298 | .coverage 299 | .coveragerc 300 | coverage_re/ 301 | htmlcov/ 302 | lcov.info 303 | tests/outputs 304 | pytest.ini 305 | 306 | # Configuration files that should not be published via git 307 | configs/private* -------------------------------------------------------------------------------- /tests/test_madlib_sensor_collection.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Test file: test_madlib_sensor.py 6 | Description: This file contains unit tests which test "edge cases" in 7 | the madlib._sensor module. 8 | """ 9 | 10 | import astroforge as af 11 | import numpy as np 12 | from astropy.time import Time 13 | 14 | import madlib 15 | from madlib._utils import MadlibException 16 | from madlib._sensor_collection import SensorCollection, SensorException 17 | 18 | import pytest 19 | 20 | 21 | seed = 4445 22 | 23 | yaml = "configs/sample_sensor_network.yaml" 24 | 25 | 26 | def test_SensorCollection_init_from_params(): 27 | # Sensors 28 | sensor_params = SensorCollection.paramsFromYAML(yaml) 29 | sensor_objects = [ 30 | madlib.GroundOpticalSensor(**params) for key, params in sensor_params.items() 31 | ] 32 | sensors = SensorCollection(sensor_objects) 33 | 34 | assert isinstance(sensors, madlib._sensor_collection.SensorCollection) 35 | assert sensors.sensorList == sensor_objects 36 | assert sensors.numSensors == len(sensor_objects) 37 | assert sensors.obsTimes == None 38 | 39 | 40 | def test_SensorCollection_init_from_yaml(): 41 | sensors = SensorCollection.fromYAML(yaml) 42 | 43 | assert isinstance(sensors, madlib._sensor_collection.SensorCollection) 44 | assert sensors.numSensors == 11 45 | assert sensors.obsTimes == None 46 | 47 | 48 | def test_SensorCollection_generate_obs_timing(): 49 | sensors = SensorCollection.fromYAML(yaml) 50 | 51 | ### SIMULATION TIMING 52 | t_start_mjd = Time("2011-11-11T11:11:11", format="isot", scale="utc").mjd 53 | t_end_mjd = t_start_mjd + 1 54 | 55 | assert sensors.obsTimes == None 56 | sensors.generate_obs_timing(t_start_mjd, t_end_mjd) 57 | assert sensors.obsTimes != None 58 | 59 | with pytest.raises( 60 | SensorException 61 | ): # The observation schedule has already been generated. 62 | sensors.generate_obs_timing(t_start_mjd, t_end_mjd) 63 | 64 | 65 | def test_add_sensor_post_timing(): 66 | sensors = SensorCollection.fromYAML(yaml) 67 | 68 | ### SIMULATION TIMING 69 | t_start_mjd = Time("2011-11-11T11:11:11", format="isot", scale="utc").mjd 70 | t_end_mjd = t_start_mjd + 1 71 | 72 | SENSOR_PARAMS = { 73 | "lat": -21.89567, 74 | "lon": 114.0898731, 75 | "alt": 0.067845, 76 | "dra": 0.3, 77 | "ddec": 0.3, 78 | "obs_per_collect": 3, 79 | "obs_time_spacing": 1.5, 80 | "collect_gap_mean": 7200, 81 | "collect_gap_std": 800, 82 | "obs_limits": None, 83 | "id": "SENSOR", 84 | } 85 | 86 | assert sensors.numSensors == 11 87 | 88 | new_sensor_1 = madlib.GroundOpticalSensor(**SENSOR_PARAMS) 89 | sensors.add_sensor(new_sensor_1) 90 | assert sensors.numSensors == 12 91 | 92 | sensors.generate_obs_timing(t_start_mjd, t_end_mjd) 93 | 94 | new_sensor_2 = madlib.GroundOpticalSensor(**SENSOR_PARAMS) 95 | 96 | with pytest.raises(SensorException): 97 | sensors.add_sensor(new_sensor_2) 98 | 99 | assert sensors.numSensors == 12 100 | 101 | 102 | def test_SensorCollection_observe(seed=seed): 103 | # --- generate an ObservationCollection 104 | # --- orbital state 105 | np.random.seed(seed) 106 | th = 2 * np.pi * np.random.rand() 107 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 108 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 109 | a0 = np.zeros((3,)) 110 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 111 | epoch = t_start.mjd 112 | 113 | # --- define satellites 114 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 115 | 116 | # --- sensors 117 | sensors = SensorCollection.fromYAML(yaml) 118 | 119 | # --- setup sensor and generate ObservationCollection(s) 120 | sensors.generate_obs_timing(epoch, epoch + 1) # + 1 [days] 121 | 122 | observations = sensors.observe(sat0) 123 | assert isinstance(observations, madlib._observation.ObservationCollection) 124 | assert isinstance(observations.pos_observed[0], madlib._observation.Observation) 125 | assert isinstance(observations.pos_truth[0], madlib._observation.Observation) 126 | 127 | 128 | def test_SensorCollection_observe_student_dist(seed=seed): 129 | # --- generate an ObservationCollection with different noise distributions 130 | # --- orbital state 131 | np.random.seed(seed) 132 | th = 2 * np.pi * np.random.rand() 133 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 134 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 135 | a0 = np.zeros((3,)) 136 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 137 | epoch = t_start.mjd 138 | 139 | # --- define satellites 140 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 141 | 142 | # --- sensors 143 | sensors = SensorCollection.fromYAML("tests/inputs/students_t_sensors.yaml") 144 | 145 | # --- setup sensor and generate ObservationCollection(s) 146 | sensors.generate_obs_timing(epoch, epoch + 1) # + 1 [days] 147 | 148 | observations = sensors.observe(sat0) 149 | assert isinstance(observations, madlib._observation.ObservationCollection) 150 | assert isinstance(observations.pos_observed[0], madlib._observation.Observation) 151 | assert isinstance(observations.pos_truth[0], madlib._observation.Observation) 152 | 153 | 154 | def test_SensorCollection_error(seed=seed): 155 | """Try and fail to make observations without generating obs timing""" 156 | # --- generate an ObservationCollection 157 | # --- orbital state 158 | np.random.seed(seed) 159 | th = 2 * np.pi * np.random.rand() 160 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 161 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 162 | a0 = np.zeros((3,)) 163 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 164 | epoch = t_start.mjd 165 | 166 | # --- define satellites 167 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 168 | 169 | # --- sensors 170 | sensors = SensorCollection.fromYAML(yaml) 171 | 172 | failed = False 173 | try: 174 | observations = sensors.observe(sat0) 175 | except SensorException: 176 | failed = True 177 | 178 | assert failed 179 | 180 | 181 | class TestInvalidYAML: 182 | """Group of tests for handling invalid YAML structures""" 183 | 184 | def test_missing_sensor_list(self): 185 | """Every sensor network YAML needs a top-level attribute called sensor_list.""" 186 | failed = False 187 | try: 188 | params = SensorCollection.paramsFromYAML( 189 | "tests/inputs/invalid_sensor_1.yaml" 190 | ) 191 | except MadlibException: 192 | failed = True 193 | 194 | assert failed 195 | 196 | def test_missing_required(self): 197 | """Make sure a YAML is invalid if one of its sensors is missing a required attribute.""" 198 | failed = False 199 | try: 200 | params = SensorCollection.paramsFromYAML( 201 | "tests/inputs/invalid_sensor_2.yaml" 202 | ) 203 | except MadlibException: 204 | failed = True 205 | 206 | assert failed 207 | 208 | def test_unknown_property(self): 209 | """Make sure a YAML is invalid if one of its sensors has an unexpected 210 | (in this case, misspelled) property.""" 211 | failed = False 212 | try: 213 | params = SensorCollection.paramsFromYAML( 214 | "tests/inputs/invalid_sensor_3.yaml" 215 | ) 216 | except MadlibException: 217 | failed = True 218 | 219 | assert failed 220 | -------------------------------------------------------------------------------- /src/madlib/_observation.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | from dataclasses import dataclass 5 | from typing import List, Self 6 | 7 | import numpy as np 8 | from numpy.typing import NDArray 9 | 10 | from ._utils import MadlibException 11 | 12 | 13 | @dataclass(kw_only=True) 14 | class Observation: 15 | """ 16 | Class for holding observables. All angles are in degrees. 17 | 18 | Parameters 19 | ---------- 20 | mjd : float 21 | Timestamp of the observation, described as a MJD in UTC 22 | 23 | ra : float | None 24 | Topocentric right ascension angle, by default None 25 | 26 | dec : float | None 27 | Topocentric declination angle, by default None 28 | 29 | az : float | None 30 | Azimuth angle, by default None 31 | 32 | el : float | None 33 | Elevation angle, by default None 34 | 35 | range_ : float | None 36 | Distance between sensor and target, by default None 37 | 38 | range_rate : float | None 39 | Time rate of change of the distance between the sensor and 40 | target, by default None 41 | 42 | lat : float | None 43 | Geodetic latitude, by default None 44 | 45 | lon : float | None 46 | Geodetic longitude, by default None 47 | 48 | sun_el : float | None 49 | Elevation angle of sun, by default None 50 | 51 | sun_separation : float | None 52 | Separation angle between target and sun, by default None 53 | 54 | sensor_id : str | None 55 | Unique Sensor ID, by default None 56 | """ 57 | 58 | # time (MJD, UTC) 59 | mjd: float 60 | 61 | # standard observables for optics 62 | ra: float | None = None 63 | dec: float | None = None 64 | 65 | # standard observables for radar 66 | az: float | None = None 67 | el: float | None = None 68 | range_: float | None = None 69 | range_rate: float | None = None 70 | 71 | # non-standard observables 72 | lat: float | None = None 73 | lon: float | None = None 74 | 75 | # Position of sun 76 | sun_el: float | None = None 77 | sun_separation: float | None = None 78 | 79 | # Sensor ID for bookkeeping 80 | sensor_id: str | None = None 81 | 82 | _keys = [ 83 | "ra", 84 | "dec", 85 | "az", 86 | "el", 87 | "range_", 88 | "range_rate", 89 | "lat", 90 | "lon", 91 | "sun_el", 92 | "sun_separation", 93 | ] 94 | 95 | def __sub__(self, other: Self) -> "ObservationResidual": 96 | """Subtracts another instance of Observation class. 97 | 98 | Parameters 99 | ---------- 100 | other : Self 101 | The other instance of Observation class to subract 102 | 103 | Returns 104 | ------- 105 | Self 106 | The subtracted result 107 | 108 | Raises 109 | ------ 110 | MadlibException 111 | Can only subtract two Observation objects 112 | MadlibException 113 | Observations must be at the same time for computing a residual 114 | """ 115 | 116 | if not isinstance(other, Observation): 117 | raise MadlibException("Can only subtract two Observation objects") 118 | 119 | d1 = self.__dict__ 120 | d2 = other.__dict__ 121 | diff = { 122 | key: d1[key] - d2[key] 123 | for key in self._keys 124 | if d1[key] is not None and d2[key] is not None 125 | } 126 | 127 | # special handling of angles that wrap at 0/360 128 | for key in ("ra", "az", "lon"): 129 | if d1[key] is not None and d2[key] is not None: 130 | temp = np.unwrap(np.array([d1[key], d2[key]]), period=360) 131 | diff[key] = temp[0] - temp[1] 132 | 133 | # add the timestamp to the dict 134 | if abs(d1["mjd"] - d2["mjd"]) > 1e-9: 135 | raise MadlibException( 136 | "Observations must be at the same time for computing a residual" 137 | ) 138 | 139 | diff["mjd"] = d1["mjd"] 140 | 141 | # Remove the solar elevation 142 | _ = diff.pop("sun_el", None) 143 | 144 | return ObservationResidual(**diff) 145 | 146 | def asarray(self) -> NDArray[np.float64]: 147 | """Convert this observation to a flat 1-D array""" 148 | return np.array( 149 | [val if val is not None else np.NaN for val in self.__dict__.values()] 150 | ) 151 | 152 | 153 | @dataclass(kw_only=True) 154 | class ObservationResidual: 155 | """ 156 | Class for holding the difference between two observables. 157 | 158 | Parameters 159 | ---------- 160 | mjd : float 161 | Timestamp of the observation, described as a MJD in UTC 162 | 163 | ra : float | None 164 | Topocentric right ascension angle difference, by default None 165 | 166 | dec : float | None 167 | Topocentric declination angle difference, by default None 168 | 169 | az : float | None 170 | Azimuth angle difference, by default None 171 | 172 | el : float | None 173 | Elevation angle difference, by default None 174 | 175 | range_ : float | None 176 | Distance between sensor and target difference, by default None 177 | 178 | range_rate : float | None 179 | Time rate of change of the distance between the sensor and target 180 | difference, by default None 181 | 182 | lat : float | None 183 | Geodetic latitude difference, by default None 184 | 185 | lon : float | None 186 | Geodetic longitude difference, by default None 187 | """ 188 | 189 | # time (MJD, UTC) 190 | mjd: float 191 | 192 | # standard observables for optics 193 | ra: float | None = None 194 | dec: float | None = None 195 | 196 | # standard observables for radar 197 | az: float | None = None 198 | el: float | None = None 199 | range_: float | None = None 200 | range_rate: float | None = None 201 | 202 | # non-standard observables 203 | lat: float | None = None 204 | lon: float | None = None 205 | 206 | def asarray(self) -> NDArray[np.float64]: 207 | """Convert this observation to a flat 1-D array""" 208 | return np.array( 209 | [val if val is not None else np.NaN for val in self.__dict__.values()] 210 | ) 211 | 212 | 213 | @dataclass(kw_only=True) 214 | class ObservationCollection: 215 | """Class for holding observed and true positions of satellites. 216 | 217 | Parameters 218 | ---------- 219 | pos_observed: np.ndarray[Observation, np.dtype[np.float64]] 220 | Realistic observations of a satellite given sensor noise parameters 221 | 222 | pos_truth: np.ndarray[Observation, np.dtype[np.float64]] 223 | True observations of a satellite ignoring all noise sources 224 | 225 | pos_expected: np.ndarray[Observation, np.dtype[np.float64]] 226 | Observations expected if no noise or maneuvers occur 227 | 228 | Raises 229 | ------ 230 | MadlibException 231 | Can only add two ObservationCollection objects 232 | """ 233 | 234 | pos_observed: np.ndarray[Observation, np.dtype[np.float64]] 235 | pos_truth: np.ndarray[Observation, np.dtype[np.float64]] 236 | pos_expected: np.ndarray[Observation, np.dtype[np.float64]] 237 | 238 | def __add__(self, other: "ObservationCollection"): 239 | self.pos_observed = np.concatenate((self.pos_observed, other.pos_observed)) 240 | self.pos_truth = np.concatenate((self.pos_truth, other.pos_truth)) 241 | self.pos_expected = np.concatenate((self.pos_expected, other.pos_expected)) 242 | 243 | def count_valid_observations(self): 244 | return len(self.pos_observed) 245 | 246 | 247 | def combineObsCollections( 248 | collectionList: List[ObservationCollection], 249 | ) -> ObservationCollection: 250 | """Given observations of a satellite from multiple sensors, combine them into a single object. 251 | 252 | Parameters 253 | ---------- 254 | collectionList : List[ObservationCollection] 255 | List of observations from multiple sensors of a single satellite. 256 | 257 | Returns 258 | ------- 259 | ObservationCollection 260 | Combined collection of observations from all sensors. 261 | """ 262 | 263 | pos_observed = np.concatenate([col.pos_observed for col in collectionList]) 264 | pos_truth = np.concatenate([col.pos_truth for col in collectionList]) 265 | pos_expected = np.concatenate([col.pos_expected for col in collectionList]) 266 | 267 | combinedCollection = ObservationCollection( 268 | pos_observed=pos_observed, 269 | pos_truth=pos_truth, 270 | pos_expected=pos_expected, 271 | ) 272 | 273 | return combinedCollection 274 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # This file is execfile()d with the current directory set to its containing dir. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | # 7 | # All configuration values have a default; values that are commented out 8 | # serve to show the default. 9 | 10 | import os 11 | import sys 12 | import shutil 13 | 14 | # -- Path setup -------------------------------------------------------------- 15 | 16 | __location__ = os.path.dirname(__file__) 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | sys.path.insert(0, os.path.join(__location__, "../src")) 22 | 23 | # -- Run sphinx-apidoc ------------------------------------------------------- 24 | # This hack is necessary since RTD does not issue `sphinx-apidoc` before running 25 | # `sphinx-build -b html . _build/html`. See Issue: 26 | # https://github.com/readthedocs/readthedocs.org/issues/1139 27 | # DON'T FORGET: Check the box "Install your project inside a virtualenv using 28 | # setup.py install" in the RTD Advanced Settings. 29 | # Additionally it helps us to avoid running apidoc manually 30 | 31 | try: # for Sphinx >= 1.7 32 | from sphinx.ext import apidoc 33 | except ImportError: 34 | from sphinx import apidoc 35 | 36 | output_dir = os.path.join(__location__, "api") 37 | module_dir = os.path.join(__location__, "../src/madlib") 38 | try: 39 | shutil.rmtree(output_dir) 40 | except FileNotFoundError: 41 | pass 42 | 43 | try: 44 | import sphinx 45 | 46 | cmd_line = ( 47 | f"sphinx-apidoc --private --implicit-namespaces -f -o {output_dir} {module_dir}" 48 | ) 49 | 50 | args = cmd_line.split(" ") 51 | if tuple(sphinx.__version__.split(".")) >= ("1", "7"): 52 | # This is a rudimentary parse_version to avoid external dependencies 53 | args = args[1:] 54 | 55 | apidoc.main(args) 56 | except Exception as e: 57 | print("Running `sphinx-apidoc` failed!\n{}".format(e)) 58 | 59 | # -- General configuration --------------------------------------------------- 60 | 61 | # If your documentation needs a minimal Sphinx version, state it here. 62 | # needs_sphinx = '1.0' 63 | 64 | # Add any Sphinx extension module names here, as strings. They can be extensions 65 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 66 | extensions = [ 67 | "sphinx.ext.autodoc", 68 | "sphinx.ext.intersphinx", 69 | "sphinx.ext.todo", 70 | "sphinx.ext.autosummary", 71 | "sphinx.ext.viewcode", 72 | "sphinx.ext.coverage", 73 | "sphinx.ext.doctest", 74 | "sphinx.ext.ifconfig", 75 | "sphinx.ext.mathjax", 76 | "sphinx.ext.napoleon", 77 | "sphinx_design", 78 | "nbsphinx", 79 | ] 80 | 81 | # Add any paths that contain templates here, relative to this directory. 82 | templates_path = ["_templates"] 83 | 84 | 85 | # Enable markdown 86 | extensions.append("myst_parser") 87 | 88 | # Configure MyST-Parser 89 | myst_enable_extensions = [ 90 | "amsmath", 91 | "colon_fence", 92 | "deflist", 93 | "dollarmath", 94 | "html_image", 95 | "replacements", 96 | "smartquotes", 97 | "substitution", 98 | "tasklist", 99 | ] 100 | 101 | # The suffix of source filenames. 102 | source_suffix = [".rst", ".md"] 103 | 104 | # The encoding of source files. 105 | # source_encoding = 'utf-8-sig' 106 | 107 | # The master toctree document. 108 | master_doc = "index" 109 | 110 | # General information about the project. 111 | project = "MaDDG" 112 | copyright = "2024, Massachusetts Institute of Technology" 113 | 114 | # The version info for the project you're documenting, acts as replacement for 115 | # |version| and |release|, also used in various other places throughout the 116 | # built documents. 117 | # 118 | # version: The short X.Y version. 119 | # release: The full version, including alpha/beta/rc tags. 120 | # If you don’t need the separation provided between version and release, 121 | # just set them both to the same value. 122 | try: 123 | from madlib import __version__ as version 124 | 125 | v_split = version.split(".") 126 | version = f"{v_split[0]}.{v_split[1]}" 127 | release = f"{v_split[0]}.{v_split[1]}.{v_split[2]}" 128 | except ImportError: 129 | version = "" 130 | release = "" 131 | 132 | 133 | # The language for content autogenerated by Sphinx. Refer to documentation 134 | # for a list of supported languages. 135 | # language = None 136 | 137 | # There are two options for replacing |today|: either, you set today to some 138 | # non-false value, then it is used: 139 | # today = '' 140 | # Else, today_fmt is used as the format for a strftime call. 141 | # today_fmt = '%B %d, %Y' 142 | 143 | # List of patterns, relative to source directory, that match files and 144 | # directories to ignore when looking for source files. 145 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"] 146 | 147 | # The reST default role (used for this markup: `text`) to use for all documents. 148 | # default_role = None 149 | 150 | # If true, '()' will be appended to :func: etc. cross-reference text. 151 | # add_function_parentheses = True 152 | 153 | # If true, the current module name will be prepended to all description 154 | # unit titles (such as .. function::). 155 | # add_module_names = True 156 | 157 | # If true, sectionauthor and moduleauthor directives will be shown in the 158 | # output. They are ignored by default. 159 | # show_authors = False 160 | 161 | # The name of the Pygments (syntax highlighting) style to use. 162 | pygments_style = "sphinx" 163 | 164 | # A list of ignored prefixes for module index sorting. 165 | # modindex_common_prefix = [] 166 | 167 | # If true, keep warnings as "system message" paragraphs in the built documents. 168 | # keep_warnings = False 169 | 170 | # If this is True, todo emits a warning for each TODO entries. The default is False. 171 | todo_emit_warnings = True 172 | 173 | 174 | # -- Options for HTML output ------------------------------------------------- 175 | 176 | # The theme to use for HTML and HTML Help pages. See the documentation for 177 | # a list of builtin themes. 178 | html_theme = "alabaster" 179 | 180 | # Theme options are theme-specific and customize the look and feel of a theme 181 | # further. For a list of options available for each theme, see the 182 | # documentation. 183 | #html_theme_options = {"sidebar_width": "300px", "page_width": "1200px"} 184 | html_theme_options = { 185 | "collapse_navigation": True, 186 | "navigation_depth": 4, 187 | "pygment_light_style": "default", 188 | "pygment_dark_style": "zenburn", 189 | } 190 | 191 | # Add any paths that contain custom themes here, relative to this directory. 192 | # html_theme_path = [] 193 | 194 | # The name for this set of Sphinx documents. If None, it defaults to 195 | # " v documentation". 196 | # html_title = None 197 | 198 | # A shorter title for the navigation bar. Default is the same as html_title. 199 | # html_short_title = None 200 | 201 | # The name of an image file (relative to this directory) to place at the top 202 | # of the sidebar. 203 | # html_logo = "" 204 | 205 | # The name of an image file (within the static path) to use as favicon of the 206 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 207 | # pixels large. 208 | # html_favicon = None 209 | 210 | # Add any paths that contain custom static files (such as style sheets) here, 211 | # relative to this directory. They are copied after the builtin static files, 212 | # so a file named "default.css" will overwrite the builtin "default.css". 213 | html_static_path = ["_static"] 214 | 215 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 216 | # using the given strftime format. 217 | # html_last_updated_fmt = '%b %d, %Y' 218 | 219 | # If true, SmartyPants will be used to convert quotes and dashes to 220 | # typographically correct entities. 221 | # html_use_smartypants = True 222 | 223 | # Custom sidebar templates, maps document names to template names. 224 | # html_sidebars = {} 225 | 226 | # Additional templates that should be rendered to pages, maps page names to 227 | # template names. 228 | # html_additional_pages = {} 229 | 230 | # If false, no module index is generated. 231 | # html_domain_indices = True 232 | 233 | # If false, no index is generated. 234 | # html_use_index = True 235 | 236 | # If true, the index is split into individual pages for each letter. 237 | # html_split_index = False 238 | 239 | # If true, links to the reST sources are added to the pages. 240 | # html_show_sourcelink = True 241 | 242 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 243 | # html_show_sphinx = True 244 | 245 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 246 | # html_show_copyright = True 247 | 248 | # If true, an OpenSearch description file will be output, and all pages will 249 | # contain a tag referring to it. The value of this option must be the 250 | # base URL from which the finished HTML is served. 251 | # html_use_opensearch = '' 252 | 253 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 254 | # html_file_suffix = None 255 | 256 | # Output file base name for HTML help builder. 257 | htmlhelp_basename = "MaDDG-doc" 258 | 259 | 260 | # -- Options for LaTeX output ------------------------------------------------ 261 | 262 | latex_elements = { 263 | # The paper size ("letterpaper" or "a4paper"). 264 | # "papersize": "letterpaper", 265 | # The font size ("10pt", "11pt" or "12pt"). 266 | # "pointsize": "10pt", 267 | # Additional stuff for the LaTeX preamble. 268 | # "preamble": "", 269 | } 270 | 271 | # Grouping the document tree into LaTeX files. List of tuples 272 | # (source start file, target name, title, author, documentclass [howto/manual]). 273 | latex_documents = [ 274 | ("index", "user_guide.tex", "MaDDG Documentation", "Ryan Sullenberger", "manual") 275 | ] 276 | 277 | # The name of an image file (relative to this directory) to place at the top of 278 | # the title page. 279 | # latex_logo = "" 280 | 281 | # For "manual" documents, if this is true, then toplevel headings are parts, 282 | # not chapters. 283 | # latex_use_parts = False 284 | 285 | # If true, show page references after internal links. 286 | # latex_show_pagerefs = False 287 | 288 | # If true, show URL addresses after external links. 289 | # latex_show_urls = False 290 | 291 | # Documents to append as an appendix to all manuals. 292 | # latex_appendices = [] 293 | 294 | # If false, no module index is generated. 295 | # latex_domain_indices = True 296 | 297 | # -- External mapping -------------------------------------------------------- 298 | python_version = ".".join(map(str, sys.version_info[0:2])) 299 | intersphinx_mapping = { 300 | "sphinx": ("https://www.sphinx-doc.org/en/master", None), 301 | "python": ("https://docs.python.org/" + python_version, None), 302 | "matplotlib": ("https://matplotlib.org", None), 303 | "numpy": ("https://numpy.org/doc/stable", None), 304 | "sklearn": ("https://scikit-learn.org/stable", None), 305 | "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None), 306 | "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), 307 | "setuptools": ("https://setuptools.pypa.io/en/stable/", None), 308 | "pyscaffold": ("https://pyscaffold.org/en/stable", None), 309 | } 310 | 311 | print(f"loading configurations for {project} {version} ...", file=sys.stderr) 312 | -------------------------------------------------------------------------------- /src/maddg/_sim_launcher.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | import json 5 | import shutil 6 | from pathlib import Path 7 | from typing import Callable, Tuple 8 | 9 | import numpy as np 10 | import pandas as pd 11 | from hydra_zen import launch, make_config 12 | 13 | from madlib import SensorCollection 14 | from madlib._utils import MadlibException 15 | 16 | 17 | class NotImplementedError(Exception): 18 | """NotImplementedError Exception""" 19 | 20 | pass 21 | 22 | 23 | def create_task_fn(method: Callable) -> Callable: 24 | """Create a task function for hydra. Config (cfg) will be passed in via hydra. 25 | 26 | Parameters 27 | ---------- 28 | method : Callable 29 | _description_ 30 | 31 | Returns 32 | ------- 33 | Callable 34 | _description_ 35 | """ 36 | 37 | def task_fn(cfg): 38 | try: 39 | output = method(**cfg) 40 | 41 | if output is not None: 42 | output.to_csv("output.csv", index=False) 43 | 44 | except MadlibException as e: 45 | with open("error.txt", "w") as f: 46 | f.write(str(e)) 47 | 48 | return task_fn 49 | 50 | 51 | def launcher( 52 | simulator_method: Callable, 53 | mtype: str, 54 | num_sim_pairs: int, 55 | sensor_yaml: str | Path, 56 | outdir: str | Path, 57 | dv_ric_mean_kms: Tuple[float, float, float] = (0.0, 0.0, 0.0), 58 | dv_ric_std_kms: Tuple[float, float, float] = (0.0, 0.1, 1.0), 59 | cont_thrust_duration_days: float | None = None, 60 | cont_thrust_mag: float = 1e-7, 61 | cont_thrust_model: int = 0, 62 | submitit: str = "", 63 | multirun_root: str = "", 64 | rm_multirun_root: bool = False, 65 | start_mjd: float | None = None, 66 | sim_duration_days: float = 3.0, 67 | random_seed: int | None = None, 68 | pred_err: float = 0.0, 69 | sensor_dra: float | None = None, 70 | sensor_ddec: float | None = None, 71 | sims_per_task: int = 1, 72 | ) -> None: 73 | """Hydra job launcher wrapper. 74 | 75 | Parameters 76 | ---------- 77 | simulator_method : Callable 78 | Task function 79 | mtype : str 80 | Maneuver type: 81 | "impulse" = ImpulseManeuver, 82 | "continuous" = ContinuousManeuver, 83 | num_sim_pairs : int 84 | Number of simulations to perform per maneuver type. 85 | sensor_yaml : str | Path 86 | Path to YAML file defining the sensor network for the simulation 87 | outdir : str | Path 88 | Path to output directory where the concatenated results will be saved in a .csv file (complete.csv) 89 | dv_ric_mean_kms : Tuple[float, float, float], optional 90 | Mean values of normal distributions to use when sampling 91 | the radial, in-track, and cross-track delta-V values, respectively, 92 | of impulsive maneuvers. In units of km/s, by default [0.0, 0.0, 0.0] 93 | dv_ric_std_kms : Tuple[float, float, float], optional 94 | Standard deviations of normal distributions to use when sampling 95 | the radial, in-track, and cross-track delta-V values, respectively, 96 | of impulsive maneuvers. In units of km/s, by default [0.0, 0.1, 1.0] 97 | cont_thrust_duration_days : float | None, optional 98 | Duration in days of the continuous maneuver that begins at simulation start, by default None 99 | (if None, maneuver duration is equal to simulation duration) 100 | cont_thrust_mag : float, optional 101 | Magnitude of the continuous thrust (km/s/s), by default 1e-7 102 | cont_thrust_model : int, optional 103 | Which continuous thrust model to use: 104 | 0 = applies a continuous thrust in the [0,1,0] direction, 105 | 1 = applies a continuous thrust in a random direction, 106 | by default 0 107 | submitit : string, optional 108 | If specified, the path to a config JSON file defining how to launch jobs across multiple GPUs using submitit, by default None (serial launch only) 109 | multirun_root : string, optional 110 | If specified, the path to a directory where multirun output will be stored, by default None (./multirun will be used) 111 | rm_multirun_root : bool, optional 112 | Whether or not to delete the hydra multirun directory after finishing the simulation, by default False 113 | start_mjd : float, optional 114 | MJD at which the simulation should begin, by default None (current MJD) 115 | sim_duration_days : float, optional 116 | Duration of the simulation (days), by default 3.0 117 | random_seed : int, optional 118 | Random seed to use for numpy, by default None 119 | pred_err : float 120 | Fractional error on predicted initial orbital state 121 | sensor_dra : float, optional 122 | Sensor metric accuracy in the right ascension direction (arcsec). 123 | If not set, value is None, and `dra` value in sensor_yaml file 124 | will be used, by default: None 125 | sensor_ddec : float, optional 126 | Sensor metric accuracy in the declination direction (arcsec). 127 | If not set, value is None, and `dra` value in sensor_yaml file 128 | will be used, by default: None 129 | sims_per_task : int, optional 130 | Number of simulations to perform per task function, by default 1 131 | 132 | Raises 133 | ------ 134 | NotImplementedError 135 | If an mtype was requested that is not yet implemented. 136 | """ 137 | 138 | error_runs = [] 139 | log_runs = [] 140 | 141 | # Parse the sensor YAML file 142 | sensor_data = SensorCollection.paramsFromYAML(sensor_yaml) 143 | 144 | # Update sensor `dra` if was given as an input argument 145 | if sensor_dra is not None: 146 | for key in sensor_data.keys(): 147 | sensor_data[key]["dra"] = sensor_dra 148 | 149 | # Update sensor `ddec` if was given as an input argument 150 | if sensor_ddec is not None: 151 | for key in sensor_data.keys(): 152 | sensor_data[key]["ddec"] = sensor_ddec 153 | 154 | if cont_thrust_duration_days is None: 155 | cont_thrust_duration_days = sim_duration_days 156 | 157 | Conf = make_config( 158 | seq_id=0, 159 | sensor_params=sensor_data, 160 | maneuver_type=0, 161 | num_sim_pairs=num_sim_pairs, 162 | dv_ric_mean_kms=dv_ric_mean_kms, 163 | dv_ric_std_kms=dv_ric_std_kms, 164 | cont_thrust_duration_days=0, 165 | cont_thrust_mag=1e-7, 166 | cont_thrust_model=0, 167 | start_mjd=start_mjd, 168 | sim_duration_days=sim_duration_days, 169 | random_seed=random_seed, 170 | pred_err=pred_err, 171 | sims_per_task=sims_per_task, 172 | ) 173 | 174 | # useful into to stout 175 | print(f"INFO :: {mtype = }") 176 | print(f"INFO :: {sim_duration_days = }") 177 | print(f"INFO :: {sims_per_task = }") 178 | 179 | # hydra_zen overrides: initialize 180 | overrides = [] 181 | 182 | # hydra_zen overrides: mtype specific cases 183 | if mtype == "impulse": 184 | overrides += [ 185 | f"maneuver_type=0,1", 186 | ] 187 | elif mtype == "continuous": 188 | print(f"INFO :: {cont_thrust_duration_days = }") 189 | print(f"INFO :: {cont_thrust_mag = }") 190 | print(f"INFO :: {cont_thrust_model = }") 191 | overrides += [ 192 | f"maneuver_type=0,2", 193 | f"cont_thrust_duration_days={cont_thrust_duration_days}", 194 | f"cont_thrust_mag={cont_thrust_mag}", 195 | f"cont_thrust_model={cont_thrust_model}", 196 | ] 197 | else: 198 | # all mtypes case? 199 | # maneuver_type=0,1,2 200 | raise NotImplementedError("An mtype was requested that is not yet implemented.") 201 | 202 | # create task function 203 | task_fn = create_task_fn(simulator_method) 204 | 205 | # define seq_id string 206 | # seq_id = ",".join([str(n) for n in range(num_sim_pairs)]) 207 | seq_id = ",".join([str(n) for n in np.arange(0, num_sim_pairs, sims_per_task)]) 208 | 209 | # setup hydra_zen overrides 210 | overrides += [ 211 | f"seq_id={seq_id}", 212 | ] 213 | 214 | # configure submitit if using 215 | if submitit != "": 216 | with open(submitit, "r") as f: 217 | submitit_overrides = json.load(f) 218 | 219 | # Make sure the JSON contents are a list of strings 220 | str_list_check = all(isinstance(n, str) for n in submitit_overrides) 221 | if (type(submitit_overrides) != list) or (not str_list_check): 222 | raise MadlibException( 223 | f"The specified submitit configuration file {submitit} is not formatted properly. " 224 | "The JSON must be a list of strings." 225 | ) 226 | 227 | overrides += submitit_overrides 228 | 229 | else: 230 | overrides += [ 231 | "hydra.job.chdir=True", 232 | ] 233 | 234 | # configure multirun directory if non-default 235 | if multirun_root != "": 236 | # Append datetime structure to root 237 | multirun_dir = Path(multirun_root) / "${now:%Y-%m-%d}/${now:%H-%M-%S}" 238 | multirun_dir = str(multirun_dir) 239 | overrides += [ 240 | f"hydra.sweep.dir={multirun_dir}", 241 | ] 242 | 243 | # setup outdir 244 | outdir = Path(outdir) 245 | outdir.mkdir(parents=True, exist_ok=True) 246 | 247 | # launch jobs 248 | (jobs,) = launch( 249 | Conf, 250 | task_fn, 251 | multirun=True, 252 | to_dictconfig=True, 253 | overrides=overrides, 254 | version_base="1.3", 255 | ) 256 | 257 | # initialize df_merged dataframe that will contain all output.csv files concatenated 258 | df_merged = pd.DataFrame() 259 | 260 | # get multirun root directory 261 | rundir = Path(jobs[0].working_dir).parent 262 | 263 | # concatenate all output.csv files 264 | jobfiles = sorted(rundir.rglob("output.csv")) 265 | for csv_file in jobfiles: 266 | df_temp = pd.read_csv(csv_file) 267 | df_merged = pd.concat((df_merged, df_temp), ignore_index=True) 268 | 269 | # export merged to disk 270 | df_merged.to_csv(outdir / "complete.csv", index=False) 271 | 272 | # concatenate all error.txt files 273 | errfiles = sorted(rundir.rglob("error.txt")) 274 | for err in errfiles: 275 | with open(err, "r") as f: 276 | text = f.read() 277 | error_runs.append((err, text)) 278 | 279 | # concatenate all zen_launch.log files 280 | logfiles = sorted(rundir.rglob("zen_launch.log")) 281 | for logfile in logfiles: 282 | with open(logfile, "r") as f: 283 | text = f.read() 284 | log_runs.append((logfile, text)) 285 | 286 | # keep track of errors checkpoint-style 287 | with open(outdir / "errors.txt", "w") as f: 288 | for errfile, errtext in error_runs: 289 | f.write(str(errfile) + "\n") 290 | f.write(errtext + "\n\n") 291 | 292 | # keep track of zen_launch logs checkpoint-style 293 | with open(outdir / "logs.txt", "w") as f: 294 | for logfile, logtext in log_runs: 295 | if logtext != "": 296 | f.write(str(logfile) + "\n") 297 | f.write(logtext + "\n\n") 298 | 299 | # export merged to disk 300 | df_merged.to_csv(outdir / "complete.csv", index=False) 301 | 302 | # copy multirun.yaml file to outdir if exists 303 | try: 304 | multirun_yaml = next(rundir.glob("multirun.yaml")) 305 | shutil.copy2(multirun_yaml, outdir / "multirun.yaml") 306 | except (StopIteration, OSError): 307 | pass 308 | 309 | # remove multirun root directory? 310 | if rm_multirun_root: 311 | # DIRECTORIES WILL ONLY BE REMOVED IF THEIR CONTENTS ARE 312 | # EXCLUSIVELY HYDRA-ZEN MULTIRUN OUTPUTS 313 | for job in jobs: 314 | working_dir = Path(job.working_dir) 315 | hydra_dir = working_dir / ".hydra" 316 | if hydra_dir.exists() and hydra_dir.is_dir(): 317 | shutil.rmtree(hydra_dir) 318 | 319 | (working_dir / "zen_launch.log").unlink(missing_ok=True) 320 | (working_dir / "output.csv").unlink(missing_ok=True) 321 | (working_dir / "error.txt").unlink(missing_ok=True) 322 | 323 | contents = list(working_dir.glob("*")) 324 | if len(contents) == 0: 325 | shutil.rmtree(working_dir) 326 | 327 | if (rundir / ".submitit").exists(): 328 | shutil.rmtree(rundir / ".submitit") 329 | 330 | parent = rundir.parent 331 | grandparent = parent.parent 332 | 333 | (rundir / "multirun.yaml").unlink(missing_ok=True) 334 | contents = list(rundir.glob("*")) 335 | if len(contents) == 0: 336 | shutil.rmtree(rundir) 337 | 338 | parent_contents = list(parent.glob("*")) 339 | if len(parent_contents) == 0: 340 | shutil.rmtree(parent) 341 | 342 | grandparent_contents = list(grandparent.glob("*")) 343 | if len(grandparent_contents) == 0: 344 | shutil.rmtree(grandparent) 345 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Welcome to `MaDDG` contributor's guide. 4 | 5 | This document focuses on getting any potential contributor familiarized with 6 | the development processes, but [other kinds of contributions] are also appreciated. 7 | 8 | If you are new to using [git] or have never collaborated in a project previously, 9 | please have a look at [contribution-guide.org]. Other resources are also 10 | listed in the excellent [guide created by FreeCodeCamp] [^contrib1]. 11 | 12 | Please notice, all users and contributors are expected to be **open, 13 | considerate, reasonable, and respectful**. When in doubt, 14 | [Python Software Foundation's Code of Conduct] is a good reference in terms of 15 | behavior guidelines. 16 | 17 | ## Issue Reports 18 | 19 | If you experience bugs or general issues with `MaDDG`, please have a look 20 | on the [issue tracker]. 21 | If you don't see anything useful there, please feel free to fire an issue report. 22 | 23 | :::{tip} 24 | Please don't forget to include the closed issues in your search. 25 | Sometimes a solution was already reported, and the problem is considered 26 | **solved**. 27 | ::: 28 | 29 | New issue reports should include information about your programming environment 30 | (e.g., operating system, Python version) and steps to reproduce the problem. 31 | Please try also to simplify the reproduction steps to a very minimal example 32 | that still illustrates the problem you are facing. By removing other factors, 33 | you help us to identify the root cause of the issue. 34 | 35 | ## Documentation Improvements 36 | 37 | You can help improve `MaDDG` docs by making them more readable and coherent, or 38 | by adding missing information and correcting mistakes. 39 | 40 | `MaDDG` documentation uses [Sphinx] as its main documentation compiler. 41 | This means that the docs are kept in the same repository as the project code, and 42 | that any documentation update is done in the same way was a code contribution. 43 | 44 | ```{todo} Don't forget to mention which markup language you are using. 45 | 46 | e.g., [reStructuredText] or [CommonMark] with [MyST] extensions. 47 | ``` 48 | 49 | :::{tip} 50 | Please notice that the [GitHub web interface] provides a quick way to 51 | propose changes in `MaDDG`'s files. While this mechanism can 52 | be tricky for normal code contributions, it works perfectly fine for 53 | contributing to the docs, and can be quite handy. 54 | 55 | If you are interested in trying this method out, please navigate to 56 | the `docs` folder in the source [repository], find which file you 57 | would like to propose changes and click in the little pencil icon at the 58 | top, to open [GitHub's code editor]. Once you finish editing the file, 59 | please write a message in the form at the bottom of the page describing 60 | which changes have you made and what are the motivations behind them and 61 | submit your proposal. 62 | ::: 63 | 64 | When working on documentation changes in your local machine, you can 65 | compile them using [tox] : 66 | 67 | ``` 68 | tox -e docs 69 | ``` 70 | 71 | and use Python's built-in web server for a preview in your web browser 72 | (`http://localhost:8000`): 73 | 74 | ``` 75 | python3 -m http.server --directory 'docs/_build/html' 76 | ``` 77 | 78 | ## Code Contributions 79 | 80 | ```{todo} Please include a reference or explanation about the internals of the project. 81 | 82 | An architecture description, design principles or at least a summary of the 83 | main concepts will make it easy for potential contributors to get started 84 | quickly. 85 | ``` 86 | 87 | ### Submit an issue 88 | 89 | Before you work on any non-trivial code contribution it's best to first create 90 | a report in the [issue tracker] to start a discussion on the subject. 91 | This often provides additional considerations and avoids unnecessary work. 92 | 93 | ### Create an environment 94 | 95 | Before you start coding, we recommend creating an isolated [virtual environment] 96 | to avoid any problems with your installed Python packages. 97 | This can easily be done via either [virtualenv]: 98 | 99 | ``` 100 | virtualenv 101 | source /bin/activate 102 | ``` 103 | 104 | or [Miniconda]: 105 | 106 | ``` 107 | conda create -n MaDDG python=3 six virtualenv pytest pytest-cov 108 | conda activate MaDDG 109 | ``` 110 | 111 | ### Clone the repository 112 | 113 | 1. Create an user account on GitHub if you do not already have one. 114 | 115 | 2. Fork the project [repository]: click on the *Fork* button near the top of the 116 | page. This creates a copy of the code under your account on GitHub. 117 | 118 | 3. Clone this copy to your local disk: 119 | 120 | ``` 121 | git clone git@github.com:YourLogin/MaDDG.git 122 | cd MaDDG 123 | ``` 124 | 125 | 4. You should run: 126 | 127 | ``` 128 | pip install -U pip setuptools -e . 129 | ``` 130 | 131 | to be able to import the package under development in the Python REPL. 132 | 133 | ```{todo} if you are not using pre-commit, please remove the following item: 134 | ``` 135 | 136 | 5. Install [pre-commit]: 137 | 138 | ``` 139 | pip install pre-commit 140 | pre-commit install 141 | ``` 142 | 143 | `MaDDG` comes with a lot of hooks configured to automatically help the 144 | developer to check the code being written. 145 | 146 | ### Implement your changes 147 | 148 | 1. Create a branch to hold your changes: 149 | 150 | ``` 151 | git checkout -b my-feature 152 | ``` 153 | 154 | and start making changes. Never work on the main branch! 155 | 156 | 2. Start your work on this branch. Don't forget to add [docstrings] to new 157 | functions, modules and classes, especially if they are part of public APIs. 158 | 159 | 3. Add yourself to the list of contributors in `AUTHORS.rst`. 160 | 161 | 4. When you’re done editing, do: 162 | 163 | ``` 164 | git add 165 | git commit 166 | ``` 167 | 168 | to record your changes in [git]. 169 | 170 | ```{todo} if you are not using pre-commit, please remove the following item: 171 | ``` 172 | 173 | Please make sure to see the validation messages from [pre-commit] and fix 174 | any eventual issues. 175 | This should automatically use [flake8]/[black] to check/fix the code style 176 | in a way that is compatible with the project. 177 | 178 | :::{important} 179 | Don't forget to add unit tests and documentation in case your 180 | contribution adds an additional feature and is not just a bugfix. 181 | 182 | Moreover, writing a [descriptive commit message] is highly recommended. 183 | In case of doubt, you can check the commit history with: 184 | 185 | ``` 186 | git log --graph --decorate --pretty=oneline --abbrev-commit --all 187 | ``` 188 | 189 | to look for recurring communication patterns. 190 | ::: 191 | 192 | 5. Please check that your changes don't break any unit tests with: 193 | 194 | ``` 195 | tox 196 | ``` 197 | 198 | (after having installed [tox] with `pip install tox` or `pipx`). 199 | 200 | You can also use [tox] to run several other pre-configured tasks in the 201 | repository. Try `tox -av` to see a list of the available checks. 202 | 203 | ### Submit your contribution 204 | 205 | 1. If everything works fine, push your local branch to the remote server with: 206 | 207 | ``` 208 | git push -u origin my-feature 209 | ``` 210 | 211 | 2. Go to the web page of your fork and click "Create pull request" 212 | to send your changes for review. 213 | 214 | Find more detailed information in [creating a PR]. You might also want to open 215 | the PR as a draft first and mark it as ready for review after the feedbacks 216 | from the continuous integration (CI) system or any required fixes. 217 | 218 | 219 | ### Troubleshooting 220 | 221 | The following tips can be used when facing problems to build or test the 222 | package: 223 | 224 | 1. Make sure to fetch all the tags from the upstream [repository]. 225 | The command `git describe --abbrev=0 --tags` should return the version you 226 | are expecting. If you are trying to run CI scripts in a fork repository, 227 | make sure to push all the tags. 228 | You can also try to remove all the egg files or the complete egg folder, i.e., 229 | `.eggs`, as well as the `*.egg-info` folders in the `src` folder or 230 | potentially in the root of your project. 231 | 232 | 2. Sometimes [tox] misses out when new dependencies are added, especially to 233 | `setup.cfg` and `docs/requirements.txt`. If you find any problems with 234 | missing dependencies when running a command with [tox], try to recreate the 235 | `tox` environment using the `-r` flag. For example, instead of: 236 | 237 | ``` 238 | tox -e docs 239 | ``` 240 | 241 | Try running: 242 | 243 | ``` 244 | tox -r -e docs 245 | ``` 246 | 247 | 3. Make sure to have a reliable [tox] installation that uses the correct 248 | Python version (e.g., 3.7+). When in doubt you can run: 249 | 250 | ``` 251 | tox --version 252 | # OR 253 | which tox 254 | ``` 255 | 256 | If you have trouble and are seeing weird errors upon running [tox], you can 257 | also try to create a dedicated [virtual environment] with a [tox] binary 258 | freshly installed. For example: 259 | 260 | ``` 261 | virtualenv .venv 262 | source .venv/bin/activate 263 | .venv/bin/pip install tox 264 | .venv/bin/tox -e all 265 | ``` 266 | 267 | 4. [Pytest can drop you] in an interactive session in the case an error occurs. 268 | In order to do that you need to pass a `--pdb` option (for example by 269 | running `tox -- -k --pdb`). 270 | You can also setup breakpoints manually instead of using the `--pdb` option. 271 | 272 | ## Maintainer tasks 273 | 274 | ### Releases 275 | 276 | ```{todo} This section assumes you are using PyPI to publicly release your package. 277 | 278 | If instead you are using a different/private package index, please update 279 | the instructions accordingly. 280 | ``` 281 | 282 | If you are part of the group of maintainers and have correct user permissions 283 | on [PyPI], the following steps can be used to release a new version for 284 | `MaDDG`: 285 | 286 | 1. Make sure all unit tests are successful. 287 | 2. Tag the current commit on the main branch with a release tag, e.g., `v1.2.3`. 288 | 3. Push the new tag to the upstream [repository], 289 | e.g., `git push upstream v1.2.3` 290 | 4. Clean up the `dist` and `build` folders with `tox -e clean` 291 | (or `rm -rf dist build`) 292 | to avoid confusion with old builds and Sphinx docs. 293 | 5. Run `tox -e build` and check that the files in `dist` have 294 | the correct version (no `.dirty` or [git] hash) according to the [git] tag. 295 | Also check the sizes of the distributions, if they are too big (e.g., > 296 | 500KB), unwanted clutter may have been accidentally included. 297 | 6. Run `tox -e publish -- --repository pypi` and check that everything was 298 | uploaded to [PyPI] correctly. 299 | 300 | [^contrib1]: Even though, these resources focus on open source projects and 301 | communities, the general ideas behind collaborating with other developers 302 | to collectively create software are general and can be applied to all sorts 303 | of environments, including private companies and proprietary code bases. 304 | 305 | 306 | [black]: https://pypi.org/project/black/ 307 | [commonmark]: https://commonmark.org/ 308 | [contribution-guide.org]: http://www.contribution-guide.org/ 309 | [creating a pr]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request 310 | [descriptive commit message]: https://chris.beams.io/posts/git-commit 311 | [docstrings]: https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html 312 | [first-contributions tutorial]: https://github.com/firstcontributions/first-contributions 313 | [flake8]: https://flake8.pycqa.org/en/stable/ 314 | [git]: https://git-scm.com 315 | [github web interface]: https://docs.github.com/en/github/managing-files-in-a-repository/managing-files-on-github/editing-files-in-your-repository 316 | [github's code editor]: https://docs.github.com/en/github/managing-files-in-a-repository/managing-files-on-github/editing-files-in-your-repository 317 | [github's fork and pull request workflow]: https://guides.github.com/activities/forking/ 318 | [guide created by freecodecamp]: https://github.com/freecodecamp/how-to-contribute-to-open-source 319 | [miniconda]: https://docs.conda.io/en/latest/miniconda.html 320 | [myst]: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html 321 | [other kinds of contributions]: https://opensource.guide/how-to-contribute 322 | [pre-commit]: https://pre-commit.com/ 323 | [pypi]: https://pypi.org/ 324 | [pyscaffold's contributor's guide]: https://pyscaffold.org/en/stable/contributing.html 325 | [pytest can drop you]: https://docs.pytest.org/en/stable/usage.html#dropping-to-pdb-python-debugger-at-the-start-of-a-test 326 | [python software foundation's code of conduct]: https://www.python.org/psf/conduct/ 327 | [restructuredtext]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/ 328 | [sphinx]: https://www.sphinx-doc.org/en/master/ 329 | [tox]: https://tox.readthedocs.io/en/stable/ 330 | [virtual environment]: https://realpython.com/python-virtual-environments-a-primer/ 331 | [virtualenv]: https://virtualenv.pypa.io/en/stable/ 332 | 333 | 334 | ```{todo} Please review and change the following definitions: 335 | ``` 336 | 337 | [repository]: https://github.com//MaDDG 338 | [issue tracker]: https://github.com//MaDDG/issues 339 | -------------------------------------------------------------------------------- /scripts/hz_launcher.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | import argparse 5 | import logging 6 | import pathlib 7 | import shutil 8 | import sys 9 | import time 10 | import warnings 11 | from typing import Tuple 12 | 13 | import numpy as np 14 | import pandas as pd 15 | from astropy.time import Time 16 | from hydra.conf import HydraConf, JobConf 17 | from hydra_zen import store 18 | 19 | import madlib 20 | from maddg._residuals import calculate_residuals 21 | from maddg._sim_launcher import launcher 22 | from madlib._utils import MadlibException 23 | 24 | log_task = logging.getLogger("simulator_task") 25 | log_task.setLevel(logging.WARNING) 26 | 27 | 28 | def parseArgs(): 29 | parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) 30 | 31 | parser.add_argument( 32 | "num_pairs", 33 | type=int, 34 | help="Number of Maneuver/Non-Maneuver pairs per simulation launch", 35 | ) 36 | 37 | parser.add_argument( 38 | "sensor_yaml", 39 | type=str, 40 | help="Path to a YAML file defining the sensor network for the simulation", 41 | ) 42 | 43 | parser.add_argument( 44 | "--start_mjd", 45 | type=float, 46 | default=-1.0, 47 | help="MJD at start of simulation. If <0, start at current system time. (default: -1.0)", 48 | ) 49 | 50 | parser.add_argument( 51 | "--sim_duration_days", 52 | type=float, 53 | default=3.0, 54 | help=("Duration of simulation in whole days\n" " (default: %(default)f)"), 55 | ) 56 | 57 | parser.add_argument( 58 | "--mtype", 59 | type=str, 60 | choices=["impulse", "continuous", "all"], 61 | default="impulse", 62 | help=("Maneuver type for orbit simulations\n" " (default: %(default)s)"), 63 | ) 64 | 65 | parser.add_argument( 66 | "--dv_ric_mean_kms", 67 | type=float, 68 | nargs=3, 69 | default=[0, 0, 0], 70 | help=( 71 | "Mean values for impulsive RIC thrust vector normal distributions, in km/s\n" 72 | " (default: %(default)s)" 73 | ), 74 | ) 75 | 76 | parser.add_argument( 77 | "--dv_ric_std_kms", 78 | type=float, 79 | nargs=3, 80 | default=[0, 0.1, 1], 81 | help=( 82 | "Standard deviations for impulsive RIC thrust vector normal distributions, in km/s\n" 83 | " (default: %(default)s)" 84 | ), 85 | ) 86 | 87 | parser.add_argument( 88 | "--sensor_dra", 89 | type=float, 90 | default=None, 91 | help=( 92 | "Sensor metric accuracy in the right ascension direction (arcsec).\n" 93 | " If not set, value is None, and `dra` value in sensor_yaml file\n" 94 | " will be used. (default: None)" 95 | ), 96 | ) 97 | 98 | parser.add_argument( 99 | "--sensor_ddec", 100 | type=float, 101 | default=None, 102 | help=( 103 | "Sensor metric accuracy in the declination direction (arcsec).\n" 104 | " If not set, value is None, and `dra` value in sensor_yaml file\n" 105 | " will be used. (default: None)" 106 | ), 107 | ) 108 | 109 | parser.add_argument( 110 | "--cont_thrust_duration_days", 111 | type=float, 112 | help=( 113 | "Duration in days of continuous thrust, which begins at start of simulation\n" 114 | " (default: %(default)s)" 115 | ), 116 | ) 117 | 118 | parser.add_argument( 119 | "--cont_thrust_model", 120 | type=int, 121 | choices=[0, 1], 122 | default=0, 123 | help=( 124 | "Continous thrust model:\n" 125 | " 0 = continuous thrust in [0,1,0] direction\n" 126 | " 1 = continuous thrust in random direction\n" 127 | " (relevant only for --mtype continuous)\n" 128 | " (default: %(default)s)" 129 | ), 130 | ) 131 | 132 | parser.add_argument( 133 | "--cont_thrust_mag", 134 | type=float, 135 | default=1.0e-7, 136 | help=( 137 | "Magnitude of the constant acceleration (km/s^2) for any continuous constant trust models\n" 138 | " (relevant only for --mtype continuous, --cont_thrust_mode {0, 1})\n" 139 | " (default: %(default)s)" 140 | ), 141 | ) 142 | 143 | parser.add_argument( 144 | "--outdir", 145 | type=str, 146 | default="outputs", 147 | help="Path to output directory", 148 | ) 149 | 150 | parser.add_argument( 151 | "--submitit", 152 | type=str, 153 | default="", 154 | help="Optional JSON config file defining how to launch jobs across multiple GPUs using submitit", 155 | ) 156 | 157 | parser.add_argument( 158 | "--multirun_root", 159 | type=str, 160 | default="", 161 | help="Optional path to directory where multirun results should be saved (./multirun by default)", 162 | ) 163 | 164 | parser.add_argument( 165 | "--pred_err", 166 | type=float, 167 | default=0.0, 168 | help="Fractional error on predicted initial orbital state", 169 | ) 170 | 171 | parser.add_argument( 172 | "--rm_multirun_root", 173 | action="store_true", 174 | help="Raise this flag to remove the multirun root directory after merging data", 175 | ) 176 | 177 | parser.add_argument( 178 | "--overwrite", 179 | action="store_true", 180 | help="Raise this flag to proceed with overwriting any data in outdir if outdir exists and is not empty", 181 | ) 182 | 183 | parser.add_argument( 184 | "--sims_per_task", 185 | type=int, 186 | default=1, 187 | help=( 188 | "Number of simulations per task function. Must evenly divide into `num_pairs`.\n" 189 | " (default: %(default)i)" 190 | ), 191 | ) 192 | 193 | return parser 194 | 195 | 196 | def simulator_task( 197 | seq_id: int, 198 | sensor_params: dict, 199 | maneuver_type: int, 200 | sim_duration_days: float, 201 | num_sim_pairs: int | None = None, 202 | start_mjd: float | None = None, 203 | dv_ric_mean_kms: Tuple[float, float, float] = (0.0, 0.0, 0.0), 204 | dv_ric_std_kms: Tuple[float, float, float] = (0.0, 0.1, 1.0), 205 | cont_thrust_duration_days: float | None = None, 206 | cont_thrust_mag: float = 1e-7, 207 | cont_thrust_model: int = 0, 208 | pred_err: float = 0.0, 209 | random_seed: int | None = None, 210 | sims_per_task: int = 1, 211 | ) -> pd.DataFrame | None: 212 | """Generates a satellite, propagates it with and without a maneuver. 213 | 214 | Parameters 215 | ---------- 216 | seq_id : int 217 | Unique ID of satellite 218 | sensor_params: dict 219 | Dictionary defining the parameters for each active sensor 220 | maneuver_type : int 221 | Specifies whether ot not the satellite is performing a maneuver: 222 | 0 = no maneuver, 223 | 1 = impulse maneuver, 224 | 2 = continuous maneuver, 225 | sim_duration_days : float 226 | Duration of the simulation (days) 227 | num_sim_pairs : int, optional 228 | Number of simulation to perform per maneuver type. 229 | start_mjd : float, optional 230 | MJD at which the simulation should begin, by default None (current MJD) 231 | dv_ric_mean_kms : Tuple[float, float, float], optional 232 | Mean values of normal distributions to use when sampling 233 | the radial, in-track, and cross-track delta-V values, respectively, 234 | of impulsive maneuvers. In units of km/s, by default (0.0, 0.0, 0.0) 235 | dv_ric_std_kms : Tuple[float, float, float], optional 236 | Standard deviations of normal distributions to use when sampling 237 | the radial, in-track, and cross-track delta-V values, respectively, 238 | of impulsive maneuvers. In units of km/s, by default (0.0, 0.1, 1.0) 239 | cont_thrust_duration_days : float | None, optional 240 | Duration in days of the continuous maneuver that begins at simulation start, by default None 241 | (if None, maneuver duration is equal to simulation duration) 242 | Magnitude of the continuous thrust (km/s/s) (default 1e-7) 243 | cont_thrust_model : int, optional 244 | Which continuous thrust model to use: 245 | 0 = applies a continuous thrust in the [0,1,0] direction, 246 | 1 = applies a continuous thrust in a random direction, 247 | (default 0) 248 | pred_err : float, optional 249 | Fractional error on predicted initial orbital state, by default 0.0 250 | random_seed : int, optional 251 | Random seed to use for numpy, by default None 252 | sims_per_task : int, optional 253 | Number of simulations to perform per task function, by default 1 254 | 255 | Returns 256 | ------- 257 | pd.core.frame.DataFrame 258 | Results (residuals) 259 | or, None (if no result) 260 | 261 | Raises 262 | ------ 263 | ValueError 264 | Occurs if supplied `cont_thrust_model` is not a supported option 265 | """ 266 | # Set the random seed for numpy 267 | np.random.seed(seed=random_seed) 268 | 269 | # Offset `seq_id` based off of value of `maneuver_type` so that each sequence ID is unique 270 | if num_sim_pairs is not None: 271 | seq_id = seq_id + num_sim_pairs * maneuver_type 272 | 273 | residual_dfs = [] 274 | for seq_id in np.arange(seq_id, seq_id + sims_per_task): 275 | if cont_thrust_duration_days is None: 276 | cont_thrust_duration_days = sim_duration_days 277 | 278 | # Declarations 279 | sat_observed = None 280 | 281 | sensors = [ 282 | madlib.GroundOpticalSensor(**params) 283 | for key, params in sensor_params.items() 284 | ] 285 | sensor_network = madlib.SensorCollection(sensors) 286 | 287 | # Timing 288 | if start_mjd is None: 289 | epoch = Time.now().mjd + np.random.random() 290 | else: 291 | epoch = start_mjd 292 | 293 | # Satellite Parameters 294 | sat_longitude = 360 * np.random.random() 295 | 296 | # Define Maneuver and setup appropriate Satellite Class 297 | maneuver = None 298 | maneuver_start_mjd = None 299 | maneuver_end_mjd = None 300 | maneuver_r_kms = None 301 | maneuver_i_kms = None 302 | maneuver_c_kms = None 303 | if maneuver_type == 0: 304 | """no maneuver""" 305 | # Setup Satellite 306 | sat_observed = madlib.Satellite.from_GEO_longitude(sat_longitude, epoch) 307 | elif maneuver_type == 1: 308 | """impulse maneuver""" 309 | # Maneuver Definition 310 | man_time = epoch + sim_duration_days * np.random.random() 311 | maneuver_start_mjd = man_time 312 | 313 | mean_rad, mean_in, mean_crs = dv_ric_mean_kms 314 | std_rad, std_in, std_crs = dv_ric_std_kms 315 | dv_rad = mean_rad + std_rad * np.random.randn() 316 | dv_in = mean_in + std_in * np.random.randn() 317 | dv_crs = mean_crs + std_crs * np.random.randn() 318 | 319 | man_dv = np.array([dv_rad, dv_in, dv_crs]) / 1000 320 | maneuver = madlib.ImpulsiveManeuver(man_time, man_dv) 321 | 322 | maneuver_r_kms = dv_rad 323 | maneuver_i_kms = dv_in 324 | maneuver_c_kms = dv_crs 325 | # Setup Satellite 326 | sat_observed = madlib.Satellite.from_GEO_longitude(sat_longitude, epoch) 327 | elif maneuver_type == 2: 328 | """continuous maneuver""" 329 | 330 | if cont_thrust_model == 0: 331 | accel_vec = np.array([0.0, 1.0, 0.0]) * cont_thrust_mag 332 | 333 | def acc_f(t): 334 | return accel_vec 335 | 336 | elif cont_thrust_model == 1: 337 | rand_unit_vec = np.random.randn(3) 338 | rand_unit_vec /= np.linalg.norm(rand_unit_vec) 339 | accel_vec = rand_unit_vec * cont_thrust_mag 340 | 341 | def acc_f(t): 342 | return accel_vec 343 | 344 | else: 345 | raise ValueError(f"{cont_thrust_model = } is not a supported option") 346 | 347 | man_time = (epoch, epoch + cont_thrust_duration_days) 348 | maneuver = madlib.ContinuousManeuver(acc_f, man_time) 349 | # Setup Satellite 350 | sat_observed = madlib.ContinuousThrustSatellite.from_GEO_longitude( 351 | sat_longitude, epoch 352 | ) 353 | 354 | maneuver_start_mjd = man_time[0] 355 | maneuver_end_mjd = man_time[1] 356 | maneuver_r_kms, maneuver_i_kms, maneuver_c_kms = accel_vec 357 | 358 | if isinstance( 359 | sat_observed, (madlib.Satellite, madlib.ContinuousThrustSatellite) 360 | ): 361 | # If prediction error has been specified, use it to define a "true" orbit 362 | if np.abs(pred_err) > 1e-8: 363 | r_err = 1 + pred_err * np.random.randn(3) 364 | v_err = 1 + pred_err * np.random.randn(3) 365 | 366 | true_r = sat_observed.x * r_err 367 | true_v = sat_observed.v * v_err 368 | 369 | sat_observed.x_true = true_r 370 | sat_observed.v_true = true_v 371 | 372 | sat_observed.maneuver = maneuver 373 | 374 | # Observe and calculate residuals 375 | residual_df = calculate_residuals( 376 | sensors=sensor_network, 377 | satellite=sat_observed, 378 | sim_duration_days=sim_duration_days, 379 | t_start_mjd=epoch, 380 | ) 381 | 382 | if residual_df is not None: 383 | residual_df["Maneuver"] = maneuver_type 384 | residual_df["Sequence"] = int(seq_id) 385 | residual_df["Maneuver_Start_MJD"] = maneuver_start_mjd 386 | residual_df["Maneuver_End_MJD"] = maneuver_end_mjd 387 | residual_df["Maneuver_DV_Radial_KmS"] = maneuver_r_kms 388 | residual_df["Maneuver_DV_InTrack_KmS"] = maneuver_i_kms 389 | residual_df["Maneuver_DV_CrossTrack_KmS"] = maneuver_c_kms 390 | residual_dfs.append(residual_df) 391 | 392 | if len(residual_dfs) == 0: 393 | return None 394 | else: 395 | return pd.concat(residual_dfs, ignore_index=True) 396 | 397 | 398 | if __name__ == "__main__": 399 | time_start = time.time() 400 | 401 | # parse arguments 402 | parser = parseArgs() 403 | args = parser.parse_args() 404 | 405 | # check if num_pairs is evenly divisible by sims_per_task 406 | if args.num_pairs % args.sims_per_task != 0: 407 | warnings.warn( 408 | "`num_pairs` is not evenly divisible by `sims_per_task`. Number of sequences generated may be different than expected." 409 | ) 410 | 411 | # check outdir for any existing files 412 | try: 413 | outdir_has_files = any(pathlib.Path(args.outdir).iterdir()) 414 | except FileNotFoundError: 415 | outdir_has_files = False 416 | 417 | # exit (abort simulation) if outdir is not empty, unless --overwrite was specified 418 | # otherwise, delete existing folder and proceed 419 | if outdir_has_files and (not args.overwrite): 420 | # abort simulation (do nothing) 421 | sys.exit( 422 | "Outdir exists and is not empty. --overwrite argument was not passed. Aborting simulation." 423 | ) 424 | elif outdir_has_files and args.overwrite: 425 | # delete all files in outdir and proceed 426 | shutil.rmtree(args.outdir) 427 | print( 428 | "INFO :: outdir exists and --overwrite argument was passed. Existing data was deleted." 429 | ) 430 | 431 | # customize Hydra's configuration 432 | store(HydraConf(job=JobConf(chdir=True))) 433 | store.add_to_hydra_store(overwrite_ok=True) 434 | 435 | simulator_method = simulator_task 436 | num_sim_pairs = args.num_pairs 437 | sensor_yaml = args.sensor_yaml 438 | outdir = args.outdir 439 | dv_ric_mean_kms = ( 440 | args.dv_ric_mean_kms[0], 441 | args.dv_ric_mean_kms[1], 442 | args.dv_ric_mean_kms[2], 443 | ) 444 | dv_ric_std_kms = ( 445 | args.dv_ric_std_kms[0], 446 | args.dv_ric_std_kms[1], 447 | args.dv_ric_std_kms[2], 448 | ) 449 | 450 | sim_duration_days = args.sim_duration_days 451 | 452 | if not args.cont_thrust_duration_days: 453 | cont_thrust_duration_days = sim_duration_days 454 | else: 455 | cont_thrust_duration_days = args.cont_thrust_duration_days 456 | 457 | cont_thrust_mag = args.cont_thrust_mag 458 | cont_thrust_model = args.cont_thrust_model 459 | pred_err = args.pred_err 460 | submitit = args.submitit 461 | multirun_root = args.multirun_root 462 | rm_multirun_root = args.rm_multirun_root 463 | mtype = args.mtype 464 | sensor_dra = args.sensor_dra 465 | sensor_ddec = args.sensor_ddec 466 | 467 | start_mjd = args.start_mjd 468 | if start_mjd < 0: 469 | start_mjd = None 470 | 471 | print("Setting up job launcher...") 472 | 473 | launcher( 474 | simulator_method, 475 | mtype, 476 | num_sim_pairs, 477 | sensor_yaml, 478 | outdir, 479 | dv_ric_mean_kms=dv_ric_mean_kms, 480 | dv_ric_std_kms=dv_ric_std_kms, 481 | cont_thrust_duration_days=cont_thrust_duration_days, 482 | cont_thrust_mag=cont_thrust_mag, 483 | cont_thrust_model=cont_thrust_model, 484 | submitit=submitit, 485 | multirun_root=multirun_root, 486 | rm_multirun_root=rm_multirun_root, 487 | start_mjd=start_mjd, 488 | sim_duration_days=sim_duration_days, 489 | pred_err=pred_err, 490 | sensor_dra=sensor_dra, 491 | sensor_ddec=sensor_ddec, 492 | sims_per_task=args.sims_per_task, 493 | ) 494 | time_stop = time.time() 495 | print(f"INFO :: Elapsed time (sec) = {time_stop - time_start}") 496 | -------------------------------------------------------------------------------- /tests/test_madlib_satellite.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Test file: test_madlib_satellite.py 6 | Description: This file contains unit tests which test "edge cases" in 7 | the madlib._satellite module. 8 | """ 9 | 10 | import pathlib 11 | import sys 12 | 13 | import astroforge as af 14 | import numpy as np 15 | from astropy.time import Time 16 | 17 | # add parent directory of __file__ to sys.path, if isn't already included 18 | if str(pathlib.Path(__file__).parents[1]) not in sys.path: 19 | sys.path.append(str(pathlib.Path(__file__).parents[1])) 20 | 21 | import madlib 22 | from madlib._utils import MadlibException 23 | from madlib._satellite import Satellite, ContinuousThrustSatellite 24 | from madlib._maneuver import ImpulsiveManeuver 25 | 26 | import pytest 27 | 28 | 29 | seed = 4445 30 | 31 | 32 | def test_Satellite_kwargs_NotImplementedError(): 33 | _epoch = 54321.0 34 | _pos = np.zeros((3,)) 35 | _vel = np.zeros((3,)) 36 | _accel = np.zeros((3,)) 37 | _manuever_info = None 38 | with pytest.raises(NotImplementedError): 39 | Satellite( 40 | _epoch, 41 | _pos, 42 | _vel, 43 | _accel, 44 | _manuever_info, 45 | today="Tuesday", # **kwargs, should raise NotImplementedError 46 | ) 47 | 48 | 49 | def test_Satellite_property_and_setters(): 50 | _epoch = 54321.0 51 | _pos = np.zeros((3,)) 52 | _vel = np.zeros((3,)) 53 | _accel = np.zeros((3,)) 54 | 55 | # --- Satellite w/ ImpulseManeuver 56 | _manuever_info = None 57 | sat = Satellite( 58 | _epoch, 59 | _pos, 60 | _vel, 61 | _accel, 62 | _manuever_info, 63 | ) 64 | 65 | maneuver = ImpulsiveManeuver( 66 | 54321.0 + 1.0, # _time (mjd) 67 | np.ones((3,)), # _dv 68 | ) 69 | 70 | # --- @maneuver.setter 71 | assert sat.maneuver == None 72 | sat.maneuver = maneuver 73 | assert sat.maneuver == maneuver 74 | 75 | # --- @property : epoch 76 | assert sat.epoch == _epoch 77 | 78 | # --- @epoch.setter & @property 79 | assert sat.epoch == _epoch 80 | sat.epoch = _epoch + 1 81 | assert sat.epoch == _epoch + 1 82 | 83 | # --- @x.setter & @property 84 | assert all(sat.x == _pos) 85 | sat.x = _pos + 1 86 | assert all(sat.x == _pos + 1) 87 | 88 | # --- @v.setter & @property 89 | assert all(sat.v == _vel) 90 | sat.v = _vel + 1 91 | assert all(sat.v == _vel + 1) 92 | 93 | # --- @a.setter & @property 94 | assert all(sat.acc == _accel) 95 | sat.acc = _accel + 1 96 | assert all(sat.acc == _accel + 1) 97 | 98 | # --- Satellite.copy() test 99 | sat_copy = sat.copy() 100 | assert all( 101 | [ 102 | sat_copy.does_maneuver == sat.does_maneuver, 103 | sat_copy.maneuver.time == sat.maneuver.time, # type: ignore 104 | all(sat_copy.maneuver.dv == sat.maneuver.dv), # type: ignore 105 | sat_copy.epoch == sat.epoch, 106 | all(sat_copy.x == sat.x), 107 | all(sat_copy.v == sat.v), 108 | all(sat_copy.acc == sat.acc), 109 | ] 110 | ) 111 | 112 | # --- __str__() and __repr__() methods test 113 | sat_str = str(sat) 114 | sat_repr = repr(sat) 115 | assert isinstance(sat_str, str) 116 | assert isinstance(sat_repr, str) 117 | 118 | 119 | def test_Satellite_invalid_instantiations(): 120 | _epoch = 54321.0 121 | _pos = [1, 2, 3] 122 | _vel = np.zeros((3,)) 123 | _accel = np.zeros((3,)) 124 | 125 | with pytest.raises(TypeError): 126 | Satellite( 127 | _epoch, 128 | _pos, # type: ignore 129 | _vel, 130 | _accel, 131 | ) 132 | 133 | _pos = np.zeros((1, 3)) 134 | 135 | with pytest.raises(ValueError): 136 | Satellite( 137 | _epoch, 138 | _pos, 139 | _vel, 140 | _accel, 141 | ) 142 | 143 | 144 | def test_Satellite_propagate_method(seed=seed): 145 | np.random.seed(seed) 146 | # --- orbital state 147 | th = 2 * np.pi * np.random.rand() 148 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 149 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 150 | a0 = np.zeros((3,)) 151 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 152 | epoch = t_start.mjd 153 | 154 | # --- maneuver definition 155 | man_time = epoch + 5.0 / 24 156 | man_dv = np.array([0.0, 5.0, 0.0]) / 1000 157 | man = madlib.ImpulsiveManeuver(man_time, man_dv) 158 | 159 | # --- define satellites 160 | sat0 = madlib.Satellite(epoch, x0, v0, a0) # maneuver = False 161 | sat1 = madlib.Satellite(epoch, x0, v0, a0, man) # manuever = True 162 | 163 | # --- propagate, times is a scalar 164 | _times = epoch + 1 165 | _X, _V = sat0.propagate(_times) 166 | assert np.allclose(_X, [2.83737953e04, 3.11888127e04, -3.64526039e-01]) 167 | assert np.allclose(_V, [-2.27434029e00, 2.06902511e00, 1.99407148e-04]) 168 | 169 | # --- propagate, raise ValueError: One of the propagation times is the exact same time as the maneuver. 170 | with pytest.raises(ValueError): 171 | sat1.propagate(sat1.maneuver.time) # type: ignore 172 | 173 | 174 | def test_Satellite_from_GEO_longitude(seed=seed): 175 | np.random.seed(seed) 176 | # Satellite 177 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 178 | epoch = t_start.mjd 179 | sat_longitude = 360 * np.random.random() 180 | sat_observed = madlib.Satellite.from_GEO_longitude(sat_longitude, epoch) 181 | 182 | assert sat_observed.epoch == epoch 183 | assert isinstance(sat_observed, madlib._satellite.Satellite) 184 | 185 | 186 | def test_Satellite_from_keplerian(seed=seed, rtol=1e-3, atol=1e-3): 187 | np.random.seed(seed) 188 | # First, let's create a satellite from Cartesian coordinates we can use for comparison 189 | th = 2 * np.pi * np.random.rand() 190 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 191 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 192 | a0 = np.zeros((3,)) 193 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 194 | epoch = t_start.mjd 195 | sat0 = madlib.Satellite(epoch, x0, v0, a0) 196 | assert all(sat0.x == x0) 197 | assert all(sat0.v == v0) 198 | assert all(sat0.acc == a0) 199 | 200 | # Convert Cartesian -> Keplerian elements 201 | keplerian_elements = af.coordinates.cartesian_to_keplerian(sat0.x, sat0.v) 202 | 203 | # Create Satellite from Keplerian elements 204 | sat1 = madlib.Satellite.from_keplerian( 205 | epoch=epoch, 206 | inclination_rad=keplerian_elements["inclination_rad"], 207 | raan_rad=keplerian_elements["raan_rad"], 208 | argp_rad=keplerian_elements["argp_rad"], 209 | ecc=keplerian_elements["eccentricity"], 210 | semi_major_axis_km=keplerian_elements["semi_major_axis_km"], 211 | mean_anomaly_rad=keplerian_elements["mean_anomaly_rad"], 212 | ) 213 | 214 | # Check: sat1 should be the same as sat0 215 | assert np.allclose(sat0.x, sat1.x, rtol=rtol, atol=atol) 216 | assert np.allclose(sat0.v, sat1.v, rtol=rtol, atol=atol) 217 | assert np.allclose(sat0.acc, sat1.acc, rtol=rtol, atol=atol) 218 | 219 | # Let's do it again but with a bunch of randomly created satellite orbits 220 | for i in range(1000): 221 | sat_longitude = 360 * np.random.random() 222 | sat_rand = madlib.Satellite.from_GEO_longitude(sat_longitude, epoch) 223 | keplerian_elements = af.coordinates.cartesian_to_keplerian( 224 | sat_rand.x, sat_rand.v 225 | ) 226 | sat_from_keplerian = madlib.Satellite.from_keplerian( 227 | epoch=epoch, 228 | inclination_rad=keplerian_elements["inclination_rad"], 229 | raan_rad=keplerian_elements["raan_rad"], 230 | argp_rad=keplerian_elements["argp_rad"], 231 | ecc=keplerian_elements["eccentricity"], 232 | semi_major_axis_km=keplerian_elements["semi_major_axis_km"], 233 | mean_anomaly_rad=keplerian_elements["mean_anomaly_rad"], 234 | ) 235 | assert np.allclose(sat_rand.x, sat_from_keplerian.x, rtol=rtol, atol=atol) 236 | assert np.allclose(sat_rand.v, sat_from_keplerian.v, rtol=rtol, atol=atol) 237 | assert np.allclose(sat_rand.acc, sat_from_keplerian.acc, rtol=rtol, atol=atol) 238 | 239 | 240 | def test_ContinuousThrustSatellite_edge_cases(seed=seed): 241 | np.random.seed(seed) 242 | # --- orbital state 243 | th = 2 * np.pi * np.random.rand() 244 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 245 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 246 | a0 = np.zeros((3,)) 247 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 248 | epoch = t_start.mjd 249 | 250 | # --- ContinuousThrustSatellite w/ ContinuousManeuver 251 | # ContinuousThrust Function Definition 252 | def acc_f(t): 253 | return np.array([0.0, 1.0e-7, 0.0]) 254 | 255 | acc_t_range = (epoch, epoch + 3) 256 | # ContinuousManeuver Definition 257 | man = madlib.ContinuousManeuver(acc_f, acc_t_range) 258 | sat = madlib.ContinuousThrustSatellite( 259 | epoch, 260 | x0, 261 | v0, 262 | a0, 263 | man, 264 | ) 265 | 266 | # --- check case: _time_range input to ContinuousManeuver is reversed 267 | acc_t_range_rev = (acc_t_range[1], acc_t_range[0]) 268 | man_rev_t = madlib.ContinuousManeuver(acc_f, acc_t_range_rev) 269 | assert acc_t_range == man_rev_t._time_range 270 | 271 | # --- propagate, times is a scalar 272 | _times = epoch + 1 273 | _X, _V = sat.propagate(_times) 274 | assert all( 275 | np.isclose(_X, np.array([2.79082377e04, 3.14487181e04, -3.31180976e-01])) 276 | ) 277 | assert all( 278 | np.isclose(_V, np.array([-2.29974384e00, 2.05373079e00, 2.00473790e-04])) 279 | ) 280 | 281 | 282 | def test_ContinuousThrustSatellite_invalid_propagate(seed=seed): 283 | """Trying to apply a continuous thrust on an ordinary satellite should fail.""" 284 | np.random.seed(seed) 285 | # --- orbital state 286 | th = 2 * np.pi * np.random.rand() 287 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 288 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 289 | a0 = np.zeros((3,)) 290 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 291 | epoch = t_start.mjd 292 | 293 | # --- ContinuousThrustSatellite w/ ContinuousManeuver 294 | # ContinuousThrust Function Definition 295 | def acc_f(t): 296 | return np.array([0.0, 1.0e-7, 0.0]) 297 | 298 | acc_t_range = (epoch, epoch + 3) 299 | # ContinuousManeuver Definition 300 | man = madlib.ContinuousManeuver(acc_f, acc_t_range) 301 | sat = madlib.Satellite( 302 | epoch, 303 | x0, 304 | v0, 305 | a0, 306 | man, 307 | ) 308 | 309 | failed = False 310 | try: 311 | sat.propagate(epoch + 1) 312 | except MadlibException: 313 | failed = True 314 | 315 | assert failed 316 | 317 | 318 | def test_ContinuousThrustSatellite_no_maneuver(seed=seed): 319 | """A ContinuousThrustSatellite with no maneuver should behave like 320 | a regular Satellite with no maneuver.""" 321 | np.random.seed(seed) 322 | # --- orbital state 323 | th = 2 * np.pi * np.random.rand() 324 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 325 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 326 | a0 = np.zeros((3,)) 327 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 328 | epoch = t_start.mjd 329 | 330 | # ContinuousManeuver Definition 331 | sat_c = madlib.ContinuousThrustSatellite(epoch, x0, v0, a0, None) 332 | 333 | # Ordinary satellite with no maneuver 334 | sat_n = madlib.Satellite(epoch, x0, v0) 335 | 336 | _times = epoch + 1 337 | Xc, Vc = sat_c.propagate(_times) 338 | Xn, Vn = sat_n.propagate(_times) 339 | assert np.allclose(Xc, Xn) 340 | assert np.allclose(Vc, Vn) 341 | 342 | 343 | class TestTrueOrbits: 344 | 345 | th = 2 * np.pi * np.random.rand() 346 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 347 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 348 | a0 = np.zeros((3,)) 349 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 350 | epoch: float = t_start.mjd 351 | 352 | th_true = th + 0.1 353 | x0_true = af.constants.Rgeo * np.array([np.cos(th_true), np.sin(th_true), 0.001]) 354 | v0_true = af.constants.Vgeo * np.array([-np.sin(th_true), np.cos(th_true), 0.001]) 355 | a0_true = a0 + 1e-5 356 | epoch_true = epoch + 0.1 357 | 358 | sat_default = madlib.Satellite(epoch, x0, v0) 359 | 360 | sat_modified = madlib.Satellite( 361 | epoch, 362 | x0, 363 | v0, 364 | a0, 365 | epoch_true=epoch_true, 366 | pos_true=x0_true, 367 | vel_true=v0_true, 368 | acc_true=a0_true, 369 | ) 370 | 371 | def test_define_true_orbit(self): 372 | 373 | assert self.sat_default.epoch == self.sat_default.epoch_true 374 | assert self.sat_default.x_true is not None 375 | assert all(np.isclose(self.sat_default.x, self.sat_default.x_true)) 376 | assert all(np.isclose(self.sat_default.v, self.sat_default.v_true)) 377 | assert all(np.isclose(self.sat_default.acc, self.sat_default.acc_true)) 378 | 379 | assert self.sat_modified.epoch != self.sat_modified.epoch_true 380 | assert self.sat_modified.x_true is not None 381 | assert all(~np.isclose(self.sat_modified.x, self.sat_modified.x_true)) 382 | assert all(~np.isclose(self.sat_modified.v, self.sat_modified.v_true)) 383 | assert all(~np.isclose(self.sat_modified.acc, self.sat_modified.acc_true)) 384 | 385 | def test_propagate_true_orbit(self): 386 | x_default, v_default = self.sat_default.propagate(self.epoch + 0.5) 387 | x_default_true, v_default_true = self.sat_default.propagate( 388 | self.epoch + 0.5, use_true_orbit=True 389 | ) 390 | 391 | x_modified, v_modified = self.sat_modified.propagate(self.epoch + 0.5) 392 | x_modified_true, v_modified_true = self.sat_modified.propagate( 393 | self.epoch + 0.5, use_true_orbit=True 394 | ) 395 | 396 | assert np.allclose(x_default, x_default_true) 397 | assert np.allclose(v_default, v_default_true) 398 | assert ~np.allclose(x_modified, x_modified_true) 399 | assert ~np.allclose(v_modified, v_modified_true) 400 | 401 | def test_propagate_true_orbit_continuous(self): 402 | np.random.seed(0) 403 | # --- orbital state 404 | th = 2 * np.pi * np.random.rand() 405 | x0 = af.constants.Rgeo * np.array([np.cos(th), np.sin(th), 0.0]) 406 | v0 = af.constants.Vgeo * np.array([-np.sin(th), np.cos(th), 0.0]) 407 | a0 = np.zeros((3,)) 408 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 409 | epoch = t_start.mjd 410 | 411 | # --- ContinuousThrustSatellite w/ ContinuousManeuver 412 | # ContinuousThrust Function Definition 413 | def acc_f(t): 414 | return np.array([0.0, 1.0e-7, 0.0]) 415 | 416 | acc_t_range = (epoch, epoch + 3) 417 | # ContinuousManeuver Definition 418 | man = madlib.ContinuousManeuver(acc_f, acc_t_range) 419 | 420 | th_true = th + 0.1 421 | x0_true = af.constants.Rgeo * np.array( 422 | [np.cos(th_true), np.sin(th_true), 0.001] 423 | ) 424 | v0_true = af.constants.Vgeo * np.array( 425 | [-np.sin(th_true), np.cos(th_true), 0.001] 426 | ) 427 | a0_true = a0 + 1e-5 428 | epoch_true = epoch + 0.1 429 | 430 | sat = madlib.ContinuousThrustSatellite( 431 | epoch, 432 | x0, 433 | v0, 434 | a0, 435 | man, 436 | epoch_true=epoch_true, 437 | pos_true=x0_true, 438 | vel_true=v0_true, 439 | acc_true=a0_true, 440 | ) 441 | 442 | assert sat.epoch != sat.epoch_true 443 | assert sat.x_true is not None 444 | assert ~np.allclose(sat.x, sat.x_true) 445 | assert ~np.allclose(sat.v, sat.v_true) 446 | assert ~np.allclose(sat.acc, sat.acc_true) 447 | 448 | x, v = sat.propagate(epoch + 1.0) 449 | x_true, v_true = sat.propagate(epoch + 1.0, use_true_orbit=True) 450 | 451 | assert ~np.allclose(x, x_true) 452 | assert ~np.allclose(v, v_true) 453 | 454 | def test_setters(self): 455 | self.sat_default.epoch_true = self.epoch_true 456 | self.sat_default.x_true = self.x0_true 457 | self.sat_default.v_true = self.v0_true 458 | self.sat_default.acc_true = self.a0_true 459 | 460 | assert self.sat_default.epoch != self.sat_default.epoch_true 461 | assert self.sat_default.x_true is not None 462 | assert ~np.allclose(self.sat_default.x, self.sat_default.x_true) 463 | assert ~np.allclose(self.sat_default.v, self.sat_default.v_true) 464 | assert ~np.allclose(self.sat_default.acc, self.sat_default.acc_true) 465 | 466 | 467 | class TestCrossTags: 468 | t_start = Time("2011-11-11T11:11:11", format="isot", scale="utc") 469 | epoch = t_start.mjd 470 | sat = madlib.Satellite.from_GEO_longitude(lon=0.0, epoch=epoch) 471 | 472 | def test_zero_cross_tag(self): 473 | cross_sat = self.sat.create_cross_tag( 474 | cross_mjd=self.epoch, 475 | delta_pos_km=np.zeros(3), 476 | delta_vel_kms=np.zeros(3), 477 | ) 478 | 479 | assert np.isclose(cross_sat.epoch, self.sat.epoch, atol=0.1, rtol=0) 480 | assert np.allclose(cross_sat.x, self.sat.x) 481 | assert np.allclose(cross_sat.v, self.sat.v) 482 | assert np.allclose(cross_sat.acc, self.sat.acc) 483 | 484 | x, v = self.sat.propagate(self.epoch + 1) 485 | x_cross, v_cross = cross_sat.propagate(self.epoch + 1) 486 | 487 | assert np.allclose(x, x_cross) 488 | assert np.allclose(v, v_cross) 489 | 490 | def test_nonzero_cross_tag(self): 491 | cross_sat = self.sat.create_cross_tag( 492 | cross_mjd=self.epoch + 0.5, 493 | delta_pos_km=np.array([10, 10, 10]), 494 | delta_vel_kms=np.array([-5, -5, -5]), 495 | ) 496 | 497 | assert ~np.isclose(cross_sat.epoch, self.sat.epoch, atol=0.1, rtol=0) 498 | assert ~np.allclose(cross_sat.x, self.sat.x) 499 | assert ~np.allclose(cross_sat.v, self.sat.v) 500 | 501 | x, v = self.sat.propagate(np.array([self.epoch + 0.5, self.epoch + 1.0])) 502 | x_cross, v_cross = cross_sat.propagate( 503 | np.array([self.epoch + 0.5, self.epoch + 1.0]) 504 | ) 505 | 506 | assert np.allclose(x[0] + 10, x_cross[0], rtol=0, atol=0.1) 507 | assert np.allclose(v[0] - 5, v_cross[0], rtol=0, atol=0.1) 508 | 509 | assert ~np.allclose(x[1], x_cross[1]) 510 | assert ~np.allclose(v[1], v_cross[1]) 511 | -------------------------------------------------------------------------------- /tests/test_hz_launcher.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2024 Massachusetts Institute of Technology 2 | # SPDX-License-Identifier: MIT 3 | 4 | """ 5 | Test file: test_hz_launcher.py 6 | Description: This file contains unit tests for the `hz_launcher` module 7 | """ 8 | 9 | import pathlib 10 | import sys 11 | import numpy as np 12 | import pytest 13 | 14 | # add parent directory of __file__ to sys.path, if isn't already included 15 | if str(pathlib.Path(__file__).parents[1]) not in sys.path: 16 | sys.path.append(str(pathlib.Path(__file__).parents[1])) 17 | 18 | from scripts.hz_launcher import simulator_task, parseArgs 19 | from madlib._sensor_collection import SensorCollection 20 | 21 | sensor_yaml_11 = "configs/sample_sensor_network.yaml" 22 | sensor_params_11 = SensorCollection.paramsFromYAML(sensor_yaml_11) 23 | 24 | sensor_yaml_blind = "tests/inputs/blind_sensor.yaml" 25 | sensor_params_blind = SensorCollection.paramsFromYAML(sensor_yaml_blind) 26 | 27 | 28 | class TestSimulator: 29 | """Test behavior of simulator_task function""" 30 | 31 | def test_single_non_maneuver(self): 32 | """Test the outputs of a simple satellite simulation with no maneuvers.""" 33 | seq_id = 0 34 | maneuver_type = 0 35 | sim_duration_days = 2.0 36 | 37 | results = simulator_task( 38 | seq_id=seq_id, 39 | sensor_params=sensor_params_11, 40 | maneuver_type=maneuver_type, 41 | start_mjd=60196.5, 42 | sim_duration_days=sim_duration_days, 43 | random_seed=0, 44 | ) 45 | 46 | assert results is not None 47 | 48 | max_mjd = max(results["MJD"]) 49 | min_mjd = min(results["MJD"]) 50 | 51 | assert isinstance(max_mjd, float) 52 | assert isinstance(min_mjd, float) 53 | 54 | # The following assertions must be true for the given inputs 55 | assert all(results["Maneuver"] == maneuver_type) 56 | assert all(results["Sequence"] == seq_id) 57 | assert max_mjd - min_mjd < sim_duration_days 58 | assert all(results["Maneuver_Start_MJD"].isna()) 59 | assert all(results["Maneuver_End_MJD"].isna()) 60 | assert all(results["Maneuver_DV_Radial_KmS"].isna()) 61 | assert all(results["Maneuver_DV_InTrack_KmS"].isna()) 62 | assert all(results["Maneuver_DV_CrossTrack_KmS"].isna()) 63 | 64 | def test_single_maneuver(self): 65 | """Test the outputs of a simple satellite simulation with an impulsive maneuver.""" 66 | seq_id = 0 67 | maneuver_type = 1 68 | sim_duration_days = 2.0 69 | 70 | results = simulator_task( 71 | seq_id=seq_id, 72 | sensor_params=sensor_params_11, 73 | maneuver_type=maneuver_type, 74 | start_mjd=60196.5, 75 | sim_duration_days=sim_duration_days, 76 | random_seed=0, 77 | ) 78 | 79 | assert results is not None 80 | 81 | max_mjd = max(results["MJD"]) 82 | min_mjd = min(results["MJD"]) 83 | 84 | assert isinstance(max_mjd, float) 85 | assert isinstance(min_mjd, float) 86 | 87 | activated_sensors = list(set(results["SensorID"])) 88 | 89 | # The following assertions must be true for the given inputs 90 | assert all(results["Maneuver"] == maneuver_type) 91 | assert all(results["Sequence"] == seq_id) 92 | assert max_mjd - min_mjd < sim_duration_days 93 | np.testing.assert_allclose( 94 | results["Maneuver_Start_MJD"], 60197.930379, atol=1e-6 95 | ) 96 | assert all(results["Maneuver_End_MJD"].isna()) 97 | np.testing.assert_allclose(results["Maneuver_DV_Radial_KmS"], 0, atol=1e-6) 98 | assert all(np.abs(results["Maneuver_DV_InTrack_KmS"]) > 1e-6) 99 | assert all(np.abs(results["Maneuver_DV_CrossTrack_KmS"]) > 1e-6) 100 | 101 | # The following assertions must be true for the given seed and default sensors 102 | assert len(results) == 34 103 | assert len(activated_sensors) == 3 104 | 105 | def test_start_now(self): 106 | """Test the no-maneuver case when starting epoch is current time.""" 107 | seq_id = 0 108 | maneuver_type = 0 109 | sim_duration_days = 1.0 110 | 111 | results = simulator_task( 112 | seq_id=seq_id, 113 | sensor_params=sensor_params_11, 114 | maneuver_type=maneuver_type, 115 | start_mjd=None, 116 | sim_duration_days=sim_duration_days, 117 | random_seed=0, 118 | ) 119 | 120 | assert results is not None 121 | 122 | max_mjd = max(results["MJD"]) 123 | min_mjd = min(results["MJD"]) 124 | 125 | assert isinstance(max_mjd, float) 126 | assert isinstance(min_mjd, float) 127 | 128 | activated_sensors = list(set(results["SensorID"])) 129 | 130 | # The following assertions must be true for the given inputs 131 | assert all(results["Maneuver"] == maneuver_type) 132 | assert all(results["Sequence"] == seq_id) 133 | assert max_mjd - min_mjd < sim_duration_days 134 | 135 | # The following assertions must be true for the given seed and default sensors 136 | assert len(results) > 0 137 | assert len(activated_sensors) > 0 138 | 139 | def test_fixed_continuous(self): 140 | """Test the outputs of a continuous thrust with a fixed vector.""" 141 | seq_id = 0 142 | maneuver_type = 2 143 | cont_thrust_duration_days = 0.25 144 | cont_thrust_model = 0 145 | cont_thrust_mag = 1e-9 146 | sim_duration_days = 2.0 147 | 148 | results = simulator_task( 149 | seq_id=seq_id, 150 | sensor_params=sensor_params_11, 151 | maneuver_type=maneuver_type, 152 | cont_thrust_duration_days=cont_thrust_duration_days, 153 | cont_thrust_mag=cont_thrust_mag, 154 | cont_thrust_model=cont_thrust_model, 155 | start_mjd=60196.5, 156 | sim_duration_days=sim_duration_days, 157 | random_seed=0, 158 | ) 159 | 160 | assert results is not None 161 | 162 | max_mjd = max(results["MJD"]) 163 | min_mjd = min(results["MJD"]) 164 | 165 | assert isinstance(max_mjd, float) 166 | assert isinstance(min_mjd, float) 167 | 168 | activated_sensors = list(set(results["SensorID"])) 169 | 170 | final_ra = results["RA Arcsec"].iloc[-1] 171 | final_dec = results["DEC Arcsec"].iloc[-1] 172 | 173 | # The following assertions must be true for the given inputs 174 | assert all(results["Maneuver"] == maneuver_type) 175 | assert all(results["Sequence"] == seq_id) 176 | assert max_mjd - min_mjd < sim_duration_days 177 | np.testing.assert_allclose(results["Maneuver_Start_MJD"], 60196.5, atol=1e-6) 178 | np.testing.assert_allclose(results["Maneuver_End_MJD"], 60196.75, atol=1e-6) 179 | np.testing.assert_allclose(results["Maneuver_DV_Radial_KmS"], 0, atol=1e-6) 180 | np.testing.assert_allclose( 181 | results["Maneuver_DV_InTrack_KmS"], cont_thrust_mag, atol=1e-6 182 | ) 183 | np.testing.assert_allclose(results["Maneuver_DV_CrossTrack_KmS"], 0, atol=1e-6) 184 | 185 | def test_random_continuous(self): 186 | """Test the outputs of a continuous thrust with a random vector.""" 187 | seq_id = 0 188 | maneuver_type = 2 189 | cont_thrust_mag = 1e-9 190 | cont_thrust_duration_days = 0.5 191 | cont_thrust_model = 1 192 | sim_duration_days = 2.0 193 | 194 | results = simulator_task( 195 | seq_id=seq_id, 196 | sensor_params=sensor_params_11, 197 | maneuver_type=maneuver_type, 198 | cont_thrust_mag=cont_thrust_mag, 199 | cont_thrust_duration_days=cont_thrust_duration_days, 200 | cont_thrust_model=cont_thrust_model, 201 | start_mjd=60196.5, 202 | sim_duration_days=sim_duration_days, 203 | random_seed=0, 204 | ) 205 | 206 | assert results is not None 207 | 208 | max_mjd = max(results["MJD"]) 209 | min_mjd = min(results["MJD"]) 210 | 211 | assert isinstance(max_mjd, float) 212 | assert isinstance(min_mjd, float) 213 | 214 | # The following assertions must be true for the given inputs 215 | assert all(results["Maneuver"] == maneuver_type) 216 | assert all(results["Sequence"] == seq_id) 217 | assert max_mjd - min_mjd < sim_duration_days 218 | np.testing.assert_allclose(results["Maneuver_Start_MJD"], 60196.5, atol=1e-6) 219 | # By default, continuous mnvr duration should be equal to simulation duration 220 | np.testing.assert_allclose(results["Maneuver_End_MJD"], 60197.0, atol=1e-6) 221 | np.testing.assert_allclose( 222 | results["Maneuver_DV_Radial_KmS"], 2.605e-10, atol=1e-12 223 | ) 224 | np.testing.assert_allclose( 225 | results["Maneuver_DV_InTrack_KmS"], 5.454e-10, atol=1e-12 226 | ) 227 | np.testing.assert_allclose( 228 | results["Maneuver_DV_CrossTrack_KmS"], -7.967e-10, atol=1e-12 229 | ) 230 | 231 | def test_invalid_continuous(self): 232 | """Test the outputs of a continuous thrust with an invalid thrust mode.""" 233 | seq_id = 0 234 | maneuver_type = 2 235 | cont_thrust_model = 3 236 | sim_duration_days = 0.5 237 | 238 | failed = False 239 | try: 240 | results = simulator_task( 241 | seq_id=seq_id, 242 | sensor_params=sensor_params_11, 243 | maneuver_type=maneuver_type, 244 | cont_thrust_model=cont_thrust_model, 245 | start_mjd=60196.5, 246 | sim_duration_days=sim_duration_days, 247 | random_seed=0, 248 | ) 249 | except ValueError: 250 | failed = True 251 | 252 | assert failed 253 | 254 | def test_invalid_thrust_mode(self): 255 | """Test that an invalid thrust mode returns None""" 256 | seq_id = 0 257 | maneuver_type = 3 258 | cont_thrust_model = 1 259 | sim_duration_days = 0.5 260 | 261 | results = simulator_task( 262 | seq_id=seq_id, 263 | sensor_params=sensor_params_11, 264 | maneuver_type=maneuver_type, 265 | cont_thrust_model=cont_thrust_model, 266 | start_mjd=60196.5, 267 | sim_duration_days=sim_duration_days, 268 | random_seed=0, 269 | ) 270 | 271 | assert results == None 272 | 273 | def test_no_obs(self): 274 | """Test that a simulation with no observations returns None""" 275 | seq_id = 0 276 | maneuver_type = 0 277 | sim_duration_days = 0.5 278 | 279 | results = simulator_task( 280 | seq_id=seq_id, 281 | sensor_params=sensor_params_blind, 282 | maneuver_type=maneuver_type, 283 | start_mjd=60196.5, 284 | sim_duration_days=sim_duration_days, 285 | random_seed=0, 286 | ) 287 | 288 | assert results is None 289 | 290 | 291 | class TestArgs: 292 | """Test behavior of argument parsing""" 293 | 294 | def test_defaults(self): 295 | """Test default argument values""" 296 | inputs = ["0", "sensor_yaml_path"] 297 | 298 | parser = parseArgs() 299 | args = parser.parse_args(inputs) 300 | 301 | # Count number of input arguments 302 | # (This is to make sure you update the tests when you add an arg) 303 | assert len(args.__dict__) == 19 304 | 305 | assert isinstance(args.num_pairs, int) 306 | assert args.num_pairs == 0 307 | assert isinstance(args.sensor_yaml, str) 308 | assert args.sensor_yaml == "sensor_yaml_path" 309 | assert isinstance(args.start_mjd, float) 310 | np.testing.assert_almost_equal(args.start_mjd, -1, decimal=7) 311 | assert isinstance(args.sim_duration_days, float) 312 | np.testing.assert_almost_equal(args.sim_duration_days, 3.0, decimal=7) 313 | assert isinstance(args.mtype, str) 314 | assert args.mtype == "impulse" 315 | assert args.cont_thrust_duration_days is None 316 | assert isinstance(args.cont_thrust_model, int) 317 | assert args.cont_thrust_model == 0 318 | assert isinstance(args.cont_thrust_mag, float) 319 | np.testing.assert_almost_equal(args.cont_thrust_mag, 1e-7, decimal=9) 320 | assert isinstance(args.outdir, str) 321 | assert args.outdir == "outputs" 322 | assert isinstance(args.submitit, str) 323 | assert args.submitit == "" 324 | assert isinstance(args.multirun_root, str) 325 | assert args.multirun_root == "" 326 | assert isinstance(args.pred_err, float) 327 | np.testing.assert_almost_equal(args.pred_err, 0, decimal=7) 328 | assert isinstance(args.rm_multirun_root, bool) 329 | assert not args.rm_multirun_root 330 | assert isinstance(args.overwrite, bool) 331 | assert not args.overwrite 332 | 333 | dv_mean = args.dv_ric_mean_kms 334 | dv_std = args.dv_ric_std_kms 335 | 336 | assert isinstance(dv_mean, list) 337 | assert isinstance(dv_std, list) 338 | assert len(dv_mean) == 3 339 | assert len(dv_std) == 3 340 | np.testing.assert_almost_equal(dv_mean[0], 0, decimal=7) 341 | np.testing.assert_almost_equal(dv_mean[1], 0, decimal=7) 342 | np.testing.assert_almost_equal(dv_mean[2], 0, decimal=7) 343 | np.testing.assert_almost_equal(dv_std[0], 0, decimal=7) 344 | np.testing.assert_almost_equal(dv_std[1], 0.1, decimal=7) 345 | np.testing.assert_almost_equal(dv_std[2], 1, decimal=7) 346 | 347 | def test_inputs(self): 348 | """Test that input values work as expected""" 349 | inputs = [ 350 | "10", 351 | "sensor_yaml_path", 352 | "--start_mjd", 353 | "1000", 354 | "--sim_duration_days", 355 | "5.0", 356 | "--mtype", 357 | "continuous", 358 | "--dv_ric_mean_kms", 359 | "2", 360 | "2", 361 | "2", 362 | "--dv_ric_std_kms", 363 | "5", 364 | "5", 365 | "5", 366 | "--sensor_dra", 367 | "1", 368 | "--sensor_ddec", 369 | "1", 370 | "--cont_thrust_duration_days", 371 | "2.5", 372 | "--cont_thrust_model", 373 | "1", 374 | "--cont_thrust_mag", 375 | "1e-3", 376 | "--outdir", 377 | "test1", 378 | "--submitit", 379 | "test2", 380 | "--multirun_root", 381 | "test3", 382 | "--pred_err", 383 | "1e-3", 384 | "--rm_multirun_root", 385 | "--overwrite", 386 | "--sims_per_task", 387 | "10", 388 | ] 389 | 390 | parser = parseArgs() 391 | args = parser.parse_args(inputs) 392 | 393 | # Count number of input arguments 394 | # (This is to make sure you update the tests when you add an arg) 395 | assert len(args.__dict__) == 19 396 | 397 | assert isinstance(args.num_pairs, int) 398 | assert isinstance(args.sensor_yaml, str) 399 | assert args.sensor_yaml == "sensor_yaml_path" 400 | assert args.num_pairs == 10 401 | assert isinstance(args.start_mjd, float) 402 | np.testing.assert_almost_equal(args.start_mjd, 1000, decimal=7) 403 | assert isinstance(args.sim_duration_days, float) 404 | np.testing.assert_almost_equal(args.sim_duration_days, 5.0, decimal=7) 405 | assert isinstance(args.mtype, str) 406 | assert args.mtype == "continuous" 407 | assert isinstance(args.sensor_dra, float) 408 | np.testing.assert_almost_equal(args.sensor_dra, 1.0, decimal=7) 409 | assert isinstance(args.sensor_ddec, float) 410 | np.testing.assert_almost_equal(args.sensor_ddec, 1.0, decimal=7) 411 | assert isinstance(args.cont_thrust_duration_days, float) 412 | np.testing.assert_almost_equal(args.cont_thrust_duration_days, 2.5) 413 | assert isinstance(args.cont_thrust_model, int) 414 | assert args.cont_thrust_model == 1 415 | assert isinstance(args.cont_thrust_mag, float) 416 | np.testing.assert_almost_equal(args.cont_thrust_mag, 1e-3, decimal=9) 417 | assert isinstance(args.outdir, str) 418 | assert args.outdir == "test1" 419 | assert isinstance(args.submitit, str) 420 | assert args.submitit == "test2" 421 | assert isinstance(args.multirun_root, str) 422 | assert args.multirun_root == "test3" 423 | assert isinstance(args.pred_err, float) 424 | np.testing.assert_almost_equal(args.pred_err, 1e-3, decimal=9) 425 | assert isinstance(args.rm_multirun_root, bool) 426 | assert args.rm_multirun_root 427 | assert isinstance(args.overwrite, bool) 428 | assert args.overwrite 429 | assert isinstance(args.sims_per_task, int) 430 | assert args.sims_per_task == 10 431 | 432 | dv_mean = args.dv_ric_mean_kms 433 | dv_std = args.dv_ric_std_kms 434 | 435 | assert isinstance(dv_mean, list) 436 | assert isinstance(dv_std, list) 437 | assert len(dv_mean) == 3 438 | assert len(dv_std) == 3 439 | np.testing.assert_almost_equal(dv_mean[0], 2, decimal=7) 440 | np.testing.assert_almost_equal(dv_mean[1], 2, decimal=7) 441 | np.testing.assert_almost_equal(dv_mean[2], 2, decimal=7) 442 | np.testing.assert_almost_equal(dv_std[0], 5, decimal=7) 443 | np.testing.assert_almost_equal(dv_std[1], 5, decimal=7) 444 | np.testing.assert_almost_equal(dv_std[2], 5, decimal=7) 445 | 446 | class TestRicLengths: 447 | """Make sure the RIC inputs can only have length 3""" 448 | 449 | def test_mean_short(self): 450 | """dv_ric_mean must have 3 inputs""" 451 | inputs = [ 452 | "10", 453 | "sensor_yaml_path", 454 | "--dv_ric_mean_kms", 455 | "2", 456 | "2", 457 | ] 458 | 459 | parser = parseArgs() 460 | 461 | with pytest.raises(SystemExit): 462 | parser.parse_args(inputs) 463 | 464 | def test_mean_long(self): 465 | """dv_ric_mean must have 3 inputs""" 466 | inputs = [ 467 | "10", 468 | "sensor_yaml_path", 469 | "--dv_ric_mean_kms", 470 | "2", 471 | "2", 472 | "2", 473 | "2", 474 | ] 475 | 476 | parser = parseArgs() 477 | 478 | with pytest.raises(SystemExit): 479 | parser.parse_args(inputs) 480 | 481 | def test_std_short(self): 482 | """dv_ric_std must have 3 inputs""" 483 | inputs = [ 484 | "10", 485 | "sensor_yaml_path", 486 | "--dv_std_mean_kms", 487 | "2", 488 | "2", 489 | ] 490 | 491 | parser = parseArgs() 492 | 493 | with pytest.raises(SystemExit): 494 | parser.parse_args(inputs) 495 | 496 | def test_std_long(self): 497 | """dv_ric_std must have 3 inputs""" 498 | inputs = [ 499 | "10", 500 | "sensor_yaml_path", 501 | "--dv_std_mean_kms", 502 | "2", 503 | "2", 504 | "2", 505 | "2", 506 | ] 507 | 508 | parser = parseArgs() 509 | 510 | with pytest.raises(SystemExit): 511 | parser.parse_args(inputs) 512 | --------------------------------------------------------------------------------