├── __init__.py
├── logger
├── __init__.py
├── listener
│ └── __init__.py
├── utils
│ ├── __init__.py
│ ├── truewinds
│ │ └── __init__.py
│ └── stderr_logging.py
├── transforms
│ ├── README
│ ├── derived_data_transform.py
│ ├── unique_transform.py
│ ├── regex_filter_transform.py
│ ├── regex_replace_transform.py
│ ├── split_transform.py
│ ├── __init__.py
│ ├── to_json_transform.py
│ ├── extract_field_transform.py
│ ├── from_json_transform.py
│ ├── timestamp_transform.py
│ ├── parse_nmea_transform.py
│ ├── count_transform.py
│ └── to_das_record_transform.py
├── readers
│ ├── network_reader.py
│ ├── __init__.py
│ └── redis_reader.py
└── writers
│ ├── network_writer.py
│ ├── __init__.py
│ ├── record_screen_writer.py
│ └── redis_writer.py
├── server
├── __init__.py
├── README.md
└── supervisord
│ ├── supervisor.d
│ └── openrvdas.ini
│ └── supervisord.ini
├── test
├── __init__.py
├── logger
│ ├── __init__.py
│ ├── listener
│ │ └── __init__.py
│ ├── readers
│ │ └── __init__.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── test_timestamp.py
│ │ ├── test.crt
│ │ ├── test.key
│ │ ├── test_formats.py
│ │ └── test_das_record.py
│ ├── writers
│ │ ├── __init__.py
│ │ ├── test_writer.py
│ │ ├── test_timeout_writer.py
│ │ └── test_websocket_writer.py
│ └── transforms
│ │ ├── __init__.py
│ │ ├── test_transform.py
│ │ ├── test_split_transform.py
│ │ ├── test_extract_field_transform.py
│ │ ├── test_nmea_checksum_transform.py
│ │ ├── test_prefix_transform.py
│ │ ├── test_count_transform.py
│ │ ├── test_regex_filter_transform.py
│ │ ├── test_strip_transform.py
│ │ ├── test_max_min_transform.py
│ │ ├── test_timestamp_transform.py
│ │ ├── test_slice_transform.py
│ │ ├── test_to_das_record.py
│ │ ├── test_from_json_transform.py
│ │ └── test_nmea_transform.py
├── server
│ └── __init__.py
├── database
│ └── __init__.py
├── django_gui
│ └── __init__.py
├── configs
│ ├── sample_logger_manager_writer.yaml
│ ├── parallel_logger.yaml
│ ├── composed_logger.yaml
│ ├── simple_logger.yaml
│ └── sample_configs.yaml
└── NBP1406
│ ├── devices
│ └── HydroDasNBP.yaml
│ └── simulate_NBP1406.yaml
├── utils
├── __init__.py
├── jinja_config_creator
│ ├── test_files
│ │ ├── .gitignore
│ │ └── sample_devices.yaml
│ ├── README.md
│ └── cruise_config_generator.py
├── requirements_mysql.txt
└── requirements.txt
├── database
├── __init__.py
├── influxdb
│ └── settings.py.dist
├── setup_mysql_connector.sh
├── setup_postgresql_connector.sh
├── setup_mongo_connector.sh
├── mongo_settings.py.dist
└── settings.py.dist
├── django_gui
├── __init__.py
├── apps.py
├── static
│ └── django_gui
│ │ ├── favicon.ico
│ │ ├── json-viewer
│ │ ├── README.md
│ │ └── json-viewer.css
│ │ ├── widget.html.js
│ │ └── stderr_log_utils.js
├── README.md
├── templates
│ ├── base.html
│ ├── admin
│ │ └── base_site.html
│ └── django_gui
│ │ ├── base.html
│ │ ├── login.html
│ │ ├── widget.html
│ │ ├── choose_file.html
│ │ └── change_mode.html
├── wsgi.py
└── urls.py
├── local
├── devices
│ ├── AIS.yaml
│ ├── Garmin.yaml
│ ├── Knudsen.yaml
│ ├── Seapath.yaml
│ ├── Trimble.yaml
│ ├── EngineeringNBP.yaml
│ ├── misc_device_types.yaml
│ └── DEPRECATED.md
└── logger_templates
│ ├── udp_logger_template.yaml
│ ├── serial_logger_template.yaml
│ ├── snapshot_logger_template.yaml
│ ├── parse_data_logger_template.yaml
│ ├── true_winds_logger_template.yaml
│ └── DEPRECATED.md
├── display
├── css
│ ├── leaflet
│ │ └── README.md
│ └── map_demo.css
├── js
│ ├── leaflet
│ │ └── README.md
│ ├── widgets
│ │ └── settings.js.dist
│ ├── d3
│ │ ├── LICENSE
│ │ └── README.md
│ └── highcharts
│ │ └── code
│ │ └── modules
│ │ └── solid-gauge.js
├── images
│ ├── demo_html.png
│ └── map_demo_html.png
├── README.md
└── html
│ └── map_demo.html
├── docs
├── images
│ ├── log_in.png
│ ├── supervisor.png
│ ├── dual_writer.png
│ ├── nbp_dashboard.png
│ ├── nbp_display.png
│ ├── nbp_initial.png
│ ├── nbp_mode_off.png
│ ├── nbp_running.png
│ ├── s330_yellow.png
│ ├── composed_reader.png
│ ├── composed_writer.png
│ ├── nbp_change_mode.png
│ ├── network_writer.png
│ ├── general_dataflow.png
│ ├── generic_listener.png
│ ├── grafana_dashboard.png
│ ├── nbp_basic_display.png
│ ├── sample_cruise_off.png
│ ├── sample_cruise_port.png
│ ├── django_gui_messages.png
│ ├── local_supervisor_web.png
│ ├── read_transform_write.png
│ ├── sample_cruise_widget.png
│ ├── display_widgets_small.png
│ ├── logger_manager_diagram.png
│ ├── no_configuration_loaded.png
│ ├── sample_cruise_edit_s330.png
│ ├── sample_cruise_running.png
│ ├── using_a_composed_writer.png
│ ├── django_gui_logger_status.png
│ ├── django_gui_static_widget.png
│ ├── web_based_logger_manager.png
│ ├── console_based_logger_manager.png
│ ├── sample_cruise_edit_s330_small.png
│ └── sample_cruise_select_logger_config.png
├── html
│ └── README.md
├── INSTALL.md
├── security.md
├── README.md
└── secure_websockets.md
├── contrib
├── raspberrypi
│ ├── README.md
│ └── readers
│ │ ├── test_onewire_reader.py
│ │ ├── test_bme280_reader.py
│ │ └── test_bme688_reader.py
├── utils
│ └── JSON_YAML_Creator
│ │ ├── public
│ │ ├── robots.txt
│ │ ├── favicon.ico
│ │ ├── logo192.png
│ │ ├── logo512.png
│ │ ├── manifest.json
│ │ └── index.html
│ │ ├── src
│ │ ├── setupTests.js
│ │ ├── index.js
│ │ ├── App.test.js
│ │ ├── App.js
│ │ ├── App.css
│ │ └── components
│ │ │ ├── index.css
│ │ │ ├── KWArgs.js
│ │ │ └── JSONYAMLOutput.js
│ │ ├── README.md
│ │ └── package.json
├── logger_templates
│ ├── serial_logger_template_new.yaml
│ ├── parse_data_logger_template.yaml
│ ├── udp_logger_template.yaml
│ ├── serial_logger_template.yaml
│ ├── snapshot_logger_template.yaml
│ ├── true_winds_logger_template.yaml
│ └── calibration_logger_template.yaml
├── devices
│ ├── AIS.yaml
│ ├── Knudsen.yaml
│ └── EngineeringNBP.yaml
├── config_templates
│ └── serial_reader_config_template.yaml
├── csiro
│ └── test
│ │ └── Regex_device_catalogue.yaml
└── README.md
├── .flake8
├── manage.py
├── .github
└── ISSUE_TEMPLATE
│ ├── feature_request.md
│ └── bug_report.md
└── .gitignore
/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/logger/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/server/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/database/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/django_gui/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/logger/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/server/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/logger/listener/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/logger/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/database/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/django_gui/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/logger/listener/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/logger/readers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/logger/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/logger/writers/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/logger/utils/truewinds/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/logger/transforms/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/local/devices/AIS.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/AIS.yaml
--------------------------------------------------------------------------------
/local/devices/Garmin.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/Garmin.yaml
--------------------------------------------------------------------------------
/local/devices/Knudsen.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/Knudsen.yaml
--------------------------------------------------------------------------------
/local/devices/Seapath.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/Seapath.yaml
--------------------------------------------------------------------------------
/local/devices/Trimble.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/Trimble.yaml
--------------------------------------------------------------------------------
/utils/jinja_config_creator/test_files/.gitignore:
--------------------------------------------------------------------------------
1 | sample_cruise.yaml
--------------------------------------------------------------------------------
/local/devices/EngineeringNBP.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/EngineeringNBP.yaml
--------------------------------------------------------------------------------
/local/devices/misc_device_types.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/devices/misc_device_types.yaml
--------------------------------------------------------------------------------
/display/css/leaflet/README.md:
--------------------------------------------------------------------------------
1 | Retrieved from https://unpkg.com/leaflet@1.3.4/dist/leaflet.css
2 |
--------------------------------------------------------------------------------
/display/js/leaflet/README.md:
--------------------------------------------------------------------------------
1 | Retrieved from https://unpkg.com/leaflet@1.3.4/dist/leaflet.js
2 |
--------------------------------------------------------------------------------
/local/logger_templates/udp_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/logger_templates/udp_logger_template.yaml
--------------------------------------------------------------------------------
/docs/images/log_in.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/log_in.png
--------------------------------------------------------------------------------
/local/logger_templates/serial_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/logger_templates/serial_logger_template.yaml
--------------------------------------------------------------------------------
/contrib/raspberrypi/README.md:
--------------------------------------------------------------------------------
1 | This directory is for code that supports running OpenRVDAS on a Raspberry Pi.
2 |
--------------------------------------------------------------------------------
/docs/images/supervisor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/supervisor.png
--------------------------------------------------------------------------------
/local/logger_templates/snapshot_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/logger_templates/snapshot_logger_template.yaml
--------------------------------------------------------------------------------
/display/images/demo_html.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/display/images/demo_html.png
--------------------------------------------------------------------------------
/docs/images/dual_writer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/dual_writer.png
--------------------------------------------------------------------------------
/docs/images/nbp_dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_dashboard.png
--------------------------------------------------------------------------------
/docs/images/nbp_display.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_display.png
--------------------------------------------------------------------------------
/docs/images/nbp_initial.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_initial.png
--------------------------------------------------------------------------------
/docs/images/nbp_mode_off.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_mode_off.png
--------------------------------------------------------------------------------
/docs/images/nbp_running.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_running.png
--------------------------------------------------------------------------------
/docs/images/s330_yellow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/s330_yellow.png
--------------------------------------------------------------------------------
/local/logger_templates/parse_data_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/logger_templates/parse_data_logger_template.yaml
--------------------------------------------------------------------------------
/local/logger_templates/true_winds_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ../../contrib/logger_templates/true_winds_logger_template.yaml
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude = .git,docs,venv,django_gui/migrations
3 | max-line-length=100
4 | # max-complexity = 10
5 |
--------------------------------------------------------------------------------
/docs/images/composed_reader.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/composed_reader.png
--------------------------------------------------------------------------------
/docs/images/composed_writer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/composed_writer.png
--------------------------------------------------------------------------------
/docs/images/nbp_change_mode.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_change_mode.png
--------------------------------------------------------------------------------
/docs/images/network_writer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/network_writer.png
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/public/robots.txt:
--------------------------------------------------------------------------------
1 | # https://www.robotstxt.org/robotstxt.html
2 | User-agent: *
3 | Disallow:
4 |
--------------------------------------------------------------------------------
/display/images/map_demo_html.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/display/images/map_demo_html.png
--------------------------------------------------------------------------------
/docs/images/general_dataflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/general_dataflow.png
--------------------------------------------------------------------------------
/docs/images/generic_listener.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/generic_listener.png
--------------------------------------------------------------------------------
/docs/images/grafana_dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/grafana_dashboard.png
--------------------------------------------------------------------------------
/docs/images/nbp_basic_display.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/nbp_basic_display.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_off.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_off.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_port.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_port.png
--------------------------------------------------------------------------------
/django_gui/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class DjangoGuiConfig(AppConfig):
5 | name = 'django_gui'
6 |
--------------------------------------------------------------------------------
/docs/images/django_gui_messages.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/django_gui_messages.png
--------------------------------------------------------------------------------
/docs/images/local_supervisor_web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/local_supervisor_web.png
--------------------------------------------------------------------------------
/docs/images/read_transform_write.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/read_transform_write.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_widget.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_widget.png
--------------------------------------------------------------------------------
/docs/images/display_widgets_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/display_widgets_small.png
--------------------------------------------------------------------------------
/docs/images/logger_manager_diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/logger_manager_diagram.png
--------------------------------------------------------------------------------
/docs/images/no_configuration_loaded.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/no_configuration_loaded.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_edit_s330.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_edit_s330.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_running.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_running.png
--------------------------------------------------------------------------------
/docs/images/using_a_composed_writer.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/using_a_composed_writer.png
--------------------------------------------------------------------------------
/django_gui/static/django_gui/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/django_gui/static/django_gui/favicon.ico
--------------------------------------------------------------------------------
/docs/images/django_gui_logger_status.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/django_gui_logger_status.png
--------------------------------------------------------------------------------
/docs/images/django_gui_static_widget.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/django_gui_static_widget.png
--------------------------------------------------------------------------------
/docs/images/web_based_logger_manager.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/web_based_logger_manager.png
--------------------------------------------------------------------------------
/docs/images/console_based_logger_manager.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/console_based_logger_manager.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_edit_s330_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_edit_s330_small.png
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/contrib/utils/JSON_YAML_Creator/public/favicon.ico
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/public/logo192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/contrib/utils/JSON_YAML_Creator/public/logo192.png
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/public/logo512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/contrib/utils/JSON_YAML_Creator/public/logo512.png
--------------------------------------------------------------------------------
/docs/images/sample_cruise_select_logger_config.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/OceanDataTools/openrvdas/HEAD/docs/images/sample_cruise_select_logger_config.png
--------------------------------------------------------------------------------
/django_gui/README.md:
--------------------------------------------------------------------------------
1 | # OpenRVDAS Django GUI
2 |
3 | Please see the [OpenRVDAS Django Web Interface document](../docs/django_interface.md) in the OpenRVDAS [docs directory](../docs).
4 |
--------------------------------------------------------------------------------
/local/devices/DEPRECATED.md:
--------------------------------------------------------------------------------
1 | # This directory is deprecated
2 |
3 | Please refer to definitions in contrib/devices/ for device type definitions. Symlinks to those definitions are retained here for backwards compatibility only.
4 |
--------------------------------------------------------------------------------
/logger/transforms/README:
--------------------------------------------------------------------------------
1 | NOTE: Certain bits of code, like ComposedReader, assume that all
2 | transforms are threadsafe and, if they contain any critical sections,
3 | implement thread-based locks to prevent re-entry-based mischief.
4 |
--------------------------------------------------------------------------------
/local/logger_templates/DEPRECATED.md:
--------------------------------------------------------------------------------
1 | # This directory is deprecated
2 |
3 | Please refer to definitions in contrib/logger_templates/ for device type definitions. Symlinks to those definitions are retained here for backwards compatibility only.
4 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/setupTests.js:
--------------------------------------------------------------------------------
1 | // jest-dom adds custom jest matchers for asserting on DOM nodes.
2 | // allows you to do things like:
3 | // expect(element).toHaveTextContent(/react/i)
4 | // learn more: https://github.com/testing-library/jest-dom
5 | import '@testing-library/jest-dom/extend-expect';
6 |
--------------------------------------------------------------------------------
/utils/requirements_mysql.txt:
--------------------------------------------------------------------------------
1 | # Python/pip requirements for MySQL functionality in OpenRVDAS. Invoked on
2 | # installation by the script utils/install_openrvdas.sh. See also requirements.txt.
3 |
4 | # Flags
5 | --trusted-host pypi.org
6 | --trusted-host files.pythonhosted.org
7 |
8 | # Packages
9 | mysqlclient
10 | mysql-connector
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/index.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom';
3 | import App from './App';
4 |
5 | // Everything starts here. Calls the App component
6 | ReactDOM.render(
7 |
8 |
9 | ,
10 | document.getElementById('root')
11 | );
12 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/App.test.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { render } from '@testing-library/react';
3 | import App from './App';
4 |
5 | test('renders learn react link', () => {
6 | const { getByText } = render( );
7 | const linkElement = getByText(/learn react/i);
8 | expect(linkElement).toBeInTheDocument();
9 | });
10 |
--------------------------------------------------------------------------------
/django_gui/templates/base.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/base.html" %}
2 |
3 | {% block title %}{{ title }} | {{ site_title|default:_('OpenRVDAS site admin') }}{% endblock %}
4 |
5 | {% block branding %}
6 |
7 | {% endblock %}
8 |
9 | {% block nav-global %}{% endblock %}
10 |
--------------------------------------------------------------------------------
/django_gui/templates/admin/base_site.html:
--------------------------------------------------------------------------------
1 | {% extends "admin/base.html" %}
2 |
3 | {% block title %}{{ title }} | {{ site_title|default:_('OpenRVDAS site admin') }}{% endblock %}
4 |
5 | {% block branding %}
6 |
7 | {% endblock %}
8 |
9 | {% block nav-global %}{% endblock %}
10 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/README.md:
--------------------------------------------------------------------------------
1 | # Pre-requisites:
2 |
3 | ## Node. Download at https://nodejs.org/en/download/
4 |
5 | ## To run JSON/YAML Creator
6 |
7 | cd to project directory and run:
8 |
9 | ### `npm install`
10 |
11 | then:
12 |
13 | ### `npm start`
14 |
15 | Browser should open on its own, but if not, load in browser:
16 |
17 | ### localhost:3000
18 |
--------------------------------------------------------------------------------
/server/README.md:
--------------------------------------------------------------------------------
1 | # OpenRVDAS Servers
2 |
3 | ## Overview
4 |
5 | Please see the [README.md file in the parent directory](../README.md)
6 | for an introduction to the OpenRVDAS system, and the [Controlling
7 | OpenRVDAS Loggers](../docs/controlling_loggers.md) and [Cached Data
8 | Server](../docs/cached_data_server.md) documents for information on
9 | the servers defined in this directory.
10 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/App.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import './App.css';
3 | import Menu from './components/Menu';
4 | import './components/index.css';
5 |
6 | // Main component that brings subcomponents
7 | function App() {
8 | return (
9 |
10 |
YAML/JSON Creator
11 |
12 |
13 | );
14 | }
15 |
16 | export default App;
17 |
--------------------------------------------------------------------------------
/django_gui/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for openrvdas project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_gui.settings")
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/utils/jinja_config_creator/test_files/sample_devices.yaml:
--------------------------------------------------------------------------------
1 | # NOTE: This is a sample only
2 |
3 | cruise:
4 | id: cruise_1
5 | start: 'yyyy-mm-dd'
6 | end: 'yyyy-mm-dd'
7 |
8 | defaults:
9 | sourceip: '224.0.36.0'
10 | definition_path: /opt/openrvdas/contrib/devices/devices.yaml
11 | file_pathbase: /home/rvdas #base path for TextFileWriter filename & LogFileWriter filebase kwargs
12 | prefix: shipname
13 |
14 | devices:
15 | winch:
16 | port: 50000
17 |
18 | met:
19 | port: 50012
20 |
--------------------------------------------------------------------------------
/database/influxdb/settings.py.dist:
--------------------------------------------------------------------------------
1 | """
2 | Settings for InfluxDB operations.
3 |
4 | THIS FILE MUST BE COPIED OVER TO database/influxdb/settings.py to be
5 | operational.
6 |
7 | See below for additional database-specific install requirements
8 | """
9 | # flake8: noqa E502
10 |
11 | ################################################################################
12 | # InfluxDB settings
13 | INFLUXDB_URL = 'http://localhost:8086'
14 | INFLUXDB_ORG = 'openrvdas'
15 | INFLUXDB_BUCKET = 'openrvdas'
16 | INFLUXDB_AUTH_TOKEN = 'DEFAULT_INFLUXDB_AUTH_TOKEN'
17 | INFLUXDB_VERIFY_SSL = False
18 |
--------------------------------------------------------------------------------
/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import sys
4 |
5 | if __name__ == "__main__":
6 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_gui.settings")
7 | try:
8 | from django.core.management import execute_from_command_line
9 | except ImportError as exc:
10 | raise ImportError(
11 | "Couldn't import Django. Are you sure it's installed and "
12 | "available on your PYTHONPATH environment variable? Did you "
13 | "forget to activate a virtual environment?"
14 | ) from exc
15 | execute_from_command_line(sys.argv)
16 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/public/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "short_name": "React App",
3 | "name": "Create React App Sample",
4 | "icons": [
5 | {
6 | "src": "favicon.ico",
7 | "sizes": "64x64 32x32 24x24 16x16",
8 | "type": "image/x-icon"
9 | },
10 | {
11 | "src": "logo192.png",
12 | "type": "image/png",
13 | "sizes": "192x192"
14 | },
15 | {
16 | "src": "logo512.png",
17 | "type": "image/png",
18 | "sizes": "512x512"
19 | }
20 | ],
21 | "start_url": ".",
22 | "display": "standalone",
23 | "theme_color": "#000000",
24 | "background_color": "#ffffff"
25 | }
26 |
--------------------------------------------------------------------------------
/logger/readers/network_reader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
8 | from logger.readers.reader import Reader # noqa: E402
9 |
10 |
11 | class NetworkReader(Reader):
12 | def __init__(self, network, eol='',
13 | encoding='utf-8', encoding_errors='ignore'):
14 | logging.error("NetworkReader has been replaced by TCPReader and UDPReader")
15 |
16 | def read(self):
17 | logging.error("not reading anything: NetworkReader has been "
18 | "replaced by TCPReader and UDPReader")
19 |
--------------------------------------------------------------------------------
/logger/transforms/derived_data_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | from os.path import dirname, realpath
5 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
6 |
7 | from logger.transforms.transform import Transform # noqa: E402
8 |
9 |
10 | ################################################################################
11 | class DerivedDataTransform(Transform):
12 | """Trivial base class for derived data transforms. Derived data
13 | transforms used to have some complicated invocation process to make
14 | them more efficient, but we've opted to make them just like any
15 | other transform for simplicity.
16 | """
17 | pass
18 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/django_gui/templates/django_gui/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | {% block title %}OpenRVDAS Cruise Management{% endblock %}
7 |
8 |
9 |
10 | {% block content %}{% endblock %}
11 |
12 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/test/logger/writers/test_writer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.writers.writer import Writer # noqa: E402
9 |
10 |
11 | class TestWriter(unittest.TestCase):
12 | ############################
13 | def test_warn_if_deprecated(self):
14 | class ChildWriter(Writer):
15 | def __init__(self):
16 | super().__init__(input_format='foo')
17 |
18 | def write(self, record):
19 | pass
20 |
21 | with self.assertLogs(logging.getLogger(), logging.WARNING):
22 | ChildWriter()
23 |
24 |
25 | if __name__ == '__main__':
26 | unittest.main()
27 |
--------------------------------------------------------------------------------
/django_gui/static/django_gui/json-viewer/README.md:
--------------------------------------------------------------------------------
1 | JSONViewer
2 | =======
3 | ## information
4 | * visualise JSON objects, root level may contains array or object
5 | * no jquery or other libraries
6 |
7 | ## use
8 | * insert js & css files to your pages (see src/ folder)
9 | * create new instance of JSONViewer object
10 | * append instance container to the DOM using "getContainer()" method
11 | * visualise json using "showJSON()" method, which accepts 3 arguments - json file, optional: visualise to max level (-1 unlimited, 0..n), optional: collapse all at level (-1 unlimited, 0..n)
12 |
13 | ## test
14 | * see test/index.html for test
15 |
16 | ## credits
17 | * create by Roman Makudera 2016 (c)
18 | * MIT licence, code is free to use
19 |
--------------------------------------------------------------------------------
/logger/readers/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa F401
2 |
3 | from .cached_data_reader import CachedDataReader
4 | from .composed_reader import ComposedReader
5 | from .database_reader import DatabaseReader
6 | from .logfile_reader import LogfileReader
7 | from .modbus_reader import ModBusTCPReader
8 | from .mqtt_reader import MQTTReader
9 | from .network_reader import NetworkReader
10 | from .polled_serial_reader import PolledSerialReader
11 | from .redis_reader import RedisReader
12 | from .serial_reader import SerialReader
13 | from .tcp_reader import TCPReader
14 | from .text_file_reader import TextFileReader
15 | from .timeout_reader import TimeoutReader
16 | from .udp_reader import UDPReader
17 | from .socket_reader import SocketReader
18 |
--------------------------------------------------------------------------------
/contrib/logger_templates/serial_logger_template_new.yaml:
--------------------------------------------------------------------------------
1 | ###################
2 | logger_templates:
3 | #################
4 | serial_logger_template:
5 | configs:
6 | 'off': {}
7 | net:
8 | config_template: serial_net_config_template
9 | variables:
10 | baud_rate: <>
11 | serial_port: <>
12 | raw_udp_port: <>
13 | udp_destination: <>
14 | net+file:
15 | config_template: serial_net_file_config_template
16 | variables:
17 | baud_rate: <>
18 | serial_port: <>
19 | raw_udp_port: <>
20 | udp_destination: <>
21 |
--------------------------------------------------------------------------------
/utils/requirements.txt:
--------------------------------------------------------------------------------
1 | # Python/pip requirements for OpenRVDAS. Invoked on installation by the
2 | # script utils/install_openrvdas.sh. See also requirements_mysql.txt for
3 | # files required to enable MySQL functionality with OpenRVDAS
4 |
5 | # Flags
6 | --trusted-host pypi.org
7 | --trusted-host files.pythonhosted.org
8 |
9 | # Packages
10 | pyserial
11 | uwsgi
12 | websockets
13 | PyYAML
14 | parse
15 | psutil
16 | setproctitle
17 | influxdb_client
18 | pyModbusTCP
19 |
20 | # For Geofencing
21 | geopandas
22 | shapely
23 |
24 | django
25 | djangorestframework
26 |
27 | # Version-restricted packages
28 | #django==5.0.3
29 | #djangorestframework==3.15.1
30 |
31 | #includes the swagger ui css and javascript
32 | drf-spectacular[sidecar]
33 |
34 |
--------------------------------------------------------------------------------
/docs/html/README.md:
--------------------------------------------------------------------------------
1 | # OpenRVDAS Logger Component HTML Documents
2 | © David Pablo Cohn - david.cohn@gmail.com
3 | 2019-09-03
4 |
5 | This directory contains automatically-generated HTML documentation for OpenRVDAS logger components and servers. It is best viewed by viewing [the directory's index.html page](https://htmlpreview.github.io/?https://github.com/oceandatatools/openrvdas/blob/master/docs/html/index.html).
6 |
7 | The documents in this directory were generated automatically by [pdoc](https://pdoc3.github.io/pdoc/) using the commands:
8 |
9 | ```
10 | pip3 install pdoc3
11 |
12 | # Generate docs for logger components and some server scripts
13 | pdoc3 --force --html -o docs/html logger
14 | pdoc3 --force --html --filter logger_,server_api,cached -o docs/html server
15 | ```
16 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.transform import Transform # noqa: E402
9 |
10 |
11 | ############################
12 | class TestTransform(unittest.TestCase):
13 | ############################
14 | def test_warn_if_deprecated(self):
15 | class ChildTransform(Transform):
16 | def __init__(self):
17 | super().__init__(input_format='foo')
18 |
19 | def transform(self, record):
20 | return str(record) + '+'
21 |
22 | with self.assertLogs(logging.getLogger(), logging.WARNING):
23 | ChildTransform()
24 |
25 |
26 | if __name__ == '__main__':
27 | unittest.main()
28 |
--------------------------------------------------------------------------------
/logger/writers/network_writer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
8 | from logger.writers.writer import Writer # noqa: E402
9 |
10 |
11 | class NetworkWriter(Writer):
12 | def __init__(self, network, num_retry=2, eol='',
13 | encoding='utf-8', encoding_errors='ignore'):
14 | super().__init__(encoding=encoding,
15 | encoding_errors=encoding_errors)
16 | logging.error("NetworkWriter has been replaced by TCPWriter and UDPWriter")
17 |
18 | def write(self, record):
19 | logging.error("not writing anything: NetworkWriter has been replaced "
20 | "by TCPWriter and UDPWriter")
21 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/App.css:
--------------------------------------------------------------------------------
1 | .App {
2 | text-align: center;
3 | background-color: #fff;
4 | min-height: 100vh;
5 | color: black;
6 | }
7 |
8 | .App-logo {
9 | height: 40vmin;
10 | pointer-events: none;
11 | }
12 |
13 | @media (prefers-reduced-motion: no-preference) {
14 | .App-logo {
15 | animation: App-logo-spin infinite 20s linear;
16 | }
17 | }
18 |
19 | .App-header {
20 | background-color: #282c34;
21 | min-height: 100vh;
22 | display: flex;
23 | flex-direction: column;
24 | align-items: center;
25 | justify-content: center;
26 | font-size: calc(10px + 2vmin);
27 | color: white;
28 | }
29 |
30 | .App-link {
31 | color: #61dafb;
32 | }
33 |
34 | @keyframes App-logo-spin {
35 | from {
36 | transform: rotate(0deg);
37 | }
38 | to {
39 | transform: rotate(360deg);
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/test/configs/sample_logger_manager_writer.yaml:
--------------------------------------------------------------------------------
1 | # A sample file for logger/listener/listen.py
2 | ##
3 | # To run, type:
4 | #
5 | # logger/listener/listen.py --config_file test/configs/sample_logger_manager_writer.yaml
6 | #
7 | # Dictionary key/value pairs are used as keyword arguments for creating a
8 | # Listener. The keys "reader", "transform" and "writer" are special-cased to
9 | # indicate that the respective entities should be instantiated (recursively,
10 | # if necessary).
11 |
12 | # One reader. If we had multiple readers, we'd list them sequentially
13 | # in a list, and they would be executed in parallel.
14 |
15 | readers:
16 | class: TextFileReader
17 |
18 | writers:
19 | - class: LoggerManagerWriter
20 | module: logger.writers.logger_manager_writer
21 | kwargs:
22 | database: django
23 | allowed_prefixes:
24 | - 'set_active_mode '
25 | - 'sleep '
26 |
27 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/components/index.css:
--------------------------------------------------------------------------------
1 | .test {
2 | font-family: 'Roboto', sans-serif;
3 | }
4 |
5 | .addButton {
6 | border-radius: 15px;
7 | }
8 |
9 | .copyButton {
10 | border-radius: 15px;
11 | }
12 |
13 | .blue-container {
14 | width: auto;
15 | background: #00c2ff;
16 | margin-top: 15%;
17 | padding: 4em 0 2em;
18 | border-top-left-radius: 40px;
19 | transform: skewY(-2deg);
20 | }
21 |
22 | .blue-container:before {
23 | content: '';
24 | width: 40px;
25 | height: 40px;
26 | background-color: #00c2ff;
27 | position: absolute;
28 | top: -39px;
29 | right: 0;
30 | z-index: -2;
31 | }
32 |
33 | .blue-container:after {
34 | content: '';
35 | width: 80px;
36 | height: 80px;
37 | background-color: #fff;
38 | top: -80px;
39 | position: absolute;
40 | right: 0;
41 | border-radius: 50%;
42 | z-index: -1;
43 | }
44 |
45 | .blue-container-text {
46 | transform: skewY(2deg);
47 | }
48 |
--------------------------------------------------------------------------------
/display/js/widgets/settings.js.dist:
--------------------------------------------------------------------------------
1 | /*******************************************************************************
2 | Site-specific settings for display widgets.
3 |
4 | THIS FILE MUST BE COPIED OVER TO settings.js (and have the correct server
5 | name and port copied in) FOR WIDGETS TO BE OPERATIONAL.
6 |
7 | *******************************************************************************/
8 |
9 | // Location of data server - unless otherwise specified, assume that the
10 | // data server is on the same host as is serving the page, on path /cds-ws.
11 | // Locally, the path is localhost:8766, but this is served externally by
12 | // Nginx as WEBSOCKET_HOST:SERVER_PORT/cds-ws
13 | var WEBSOCKET_PROTOCOL = 'ws';
14 | var WEBSOCKET_HOST = window.location.hostname || 'localhost';
15 | var WEBSOCKET_PORT = 80;
16 | var WEBSOCKET_PATH = '/cds-ws';
17 | var WEBSOCKET_DATA_SERVER = WEBSOCKET_PROTOCOL + '://' + WEBSOCKET_HOST + ':' + WEBSOCKET_PORT + WEBSOCKET_PATH;
18 |
--------------------------------------------------------------------------------
/logger/writers/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa F401
2 |
3 | from .cached_data_writer import CachedDataWriter
4 | from .composed_writer import ComposedWriter
5 | from .database_writer import DatabaseWriter
6 | from .email_writer import EmailWriter
7 | from .file_writer import FileWriter
8 | from .influxdb_writer import InfluxDBWriter
9 | from .logfile_writer import LogfileWriter
10 | from .logger_manager_writer import LoggerManagerWriter
11 | from .mqtt_writer import MQTTWriter
12 | from .network_writer import NetworkWriter
13 | from .record_screen_writer import RecordScreenWriter
14 | from .redis_writer import RedisWriter
15 | from .regex_logfile_writer import RegexLogfileWriter
16 | from .serial_writer import SerialWriter
17 | from .tcp_writer import TCPWriter
18 | from .text_file_writer import TextFileWriter
19 | from .timeout_writer import TimeoutWriter
20 | from .udp_writer import UDPWriter
21 | from .websocket_writer import WebsocketWriter
22 | from .socket_writer import SocketWriter
23 |
--------------------------------------------------------------------------------
/contrib/logger_templates/parse_data_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ###################
2 | logger_templates:
3 | #################
4 | parse_data_logger_template:
5 | configs:
6 | 'off': {}
7 |
8 | 'on': &parse_data_on
9 | readers:
10 | - class: UDPReader
11 | kwargs:
12 | port: <>
13 | transforms: # Add timestamp and logger label
14 | - class: ParseTransform
15 | kwargs:
16 | metadata_interval: 10
17 | definition_path: <>
18 | writers:
19 | - class: CachedDataWriter
20 | kwargs:
21 | data_server: <>
22 |
23 | on+influx:
24 | <<: *parse_data_on
25 | writers:
26 | - class: CachedDataWriter
27 | kwargs:
28 | data_server: <>
29 | - class: InfluxDBWriter
30 | kwargs:
31 | bucket_name: <>
32 |
--------------------------------------------------------------------------------
/django_gui/templates/django_gui/login.html:
--------------------------------------------------------------------------------
1 | {% extends 'django_gui/base.html' %}
2 | {% load static %}
3 |
4 | {% block content %}
5 | OpenRVDAS
6 |
7 |
8 |
Please Sign In
9 |
23 |
24 |
25 | {% endblock %}
26 |
27 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Screenshots**
24 | If applicable, add screenshots to help explain your problem.
25 |
26 | **Desktop (please complete the following information):**
27 | - OS: [e.g. iOS]
28 | - Browser [e.g. chrome, safari]
29 | - Version [e.g. 22]
30 |
31 | **Smartphone (please complete the following information):**
32 | - Device: [e.g. iPhone6]
33 | - OS: [e.g. iOS8.1]
34 | - Browser [e.g. stock browser, safari]
35 | - Version [e.g. 22]
36 |
37 | **Additional context**
38 | Add any other context about the problem here.
39 |
--------------------------------------------------------------------------------
/utils/jinja_config_creator/README.md:
--------------------------------------------------------------------------------
1 | # Configuration Generator
2 | Given a Jinja template and a yaml file of devices, generate the full cruise yaml configuration file.
3 |
4 | ## Pre-requisites:
5 |
6 | ### Jinja2
7 | Install with pip
8 |
9 | `pip install Jinja2`
10 |
11 | Documentation available at https://jinja.palletsprojects.com/
12 |
13 | ## To use Config Generator
14 |
15 | Create a Jinja template for your cruise definition file. Sample template can be found in tests directory.
16 |
17 | Create a devices.yaml file specifying the variables in your template. Sample file can be found in tests directory.
18 |
19 | Note that for each device an optional `transform_type` may be specified. Acceptable transform types include:
20 | - xml
21 | - hexstring
22 | - pyparse
23 | - (unspecified defaults to regex)
24 |
25 | Run the config generator with this command:
26 |
27 | `python cruise_config_generator.py --voyage_details test_files/sample_devices.yaml --template test_files/cruise_template.jinja --config_output test_files/sample_cruise.yaml`
28 |
29 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "json",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "@testing-library/jest-dom": "^4.2.4",
7 | "@testing-library/react": "^9.3.2",
8 | "@testing-library/user-event": "^7.1.2",
9 | "js-yaml": "^3.14.0",
10 | "react": "^16.13.1",
11 | "react-bootstrap": "^1.0.1",
12 | "react-copy-to-clipboard": "^5.0.2",
13 | "react-dom": "^16.13.1",
14 | "react-scripts": "3.4.1",
15 | "yaml": "^1.10.0"
16 | },
17 | "scripts": {
18 | "start": "react-scripts start",
19 | "build": "react-scripts build",
20 | "test": "react-scripts test",
21 | "eject": "react-scripts eject"
22 | },
23 | "eslintConfig": {
24 | "extends": "react-app"
25 | },
26 | "browserslist": {
27 | "production": [
28 | ">0.2%",
29 | "not dead",
30 | "not op_mini all"
31 | ],
32 | "development": [
33 | "last 1 chrome version",
34 | "last 1 firefox version",
35 | "last 1 safari version"
36 | ]
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/test/logger/utils/test_timestamp.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import unittest
5 |
6 | sys.path.append('.')
7 | from logger.utils import timestamp # noqa: E402
8 |
9 |
10 | class TestTimestamp(unittest.TestCase):
11 |
12 | def test_timestamp(self):
13 | self.assertAlmostEqual(timestamp.timestamp(timestamp.time_str()),
14 | timestamp.timestamp(), places=1)
15 | self.assertEqual(timestamp.timestamp('1970-01-01T00:00:10.0Z'), 10.0)
16 |
17 | def test_time_str(self):
18 | self.assertEqual(timestamp.time_str(1507810403.33),
19 | '2017-10-12T12:13:23.330000Z')
20 | self.assertEqual(timestamp.time_str(1507810403.33, time_format='%H/%M'), '12/13')
21 |
22 | def test_date_str(self):
23 | self.assertEqual(timestamp.date_str(1507810403.33), '2017-10-12')
24 | self.assertEqual(timestamp.date_str(1507810403.33, date_format='%Y+%j'),
25 | '2017+285')
26 |
27 |
28 | if __name__ == '__main__':
29 | unittest.main()
30 |
--------------------------------------------------------------------------------
/contrib/devices/AIS.yaml:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # AIS Device Type definition
3 | #
4 | # See README.md in this directory
5 |
6 | ######################################
7 | AIS:
8 | category: "device_type"
9 | # !AIVDM,1,1,,B,15O5G4000oKPfggK2F2RQj7>0@FU,0*04
10 | format:
11 | AIVDM: "!AIVDM,{NumMessages:d},{MessageNum:d},{NextMessage:od},{Channel:ow},{Body:nc},{FillBits:d}*{Checksum:x}"
12 | AIVDO: "!AIVDO,{NumMessages:d},{MessageNum:d},{NextMessage:od},{Channel:ow},{Body:nc},{FillBits:d}*{Checksum:x}"
13 |
14 | fields:
15 | NumMessages:
16 | description: "Total number of sentences needed to transfer message"
17 | units: ""
18 | MessageNum:
19 | description: "Message sentence number"
20 | units: ""
21 | NextMessage:
22 | description: "Sequential identifier to link multiple messages"
23 | units: ""
24 | Channel:
25 | description: "AIS channel"
26 | units: ""
27 | Body:
28 | description: "Encapsulated Binary Coded Data (as per ITU-R M.1371)"
29 | units: ""
30 | FillBits:
31 | description: "Number of fill bits"
32 | units: ""
33 |
--------------------------------------------------------------------------------
/test/configs/parallel_logger.yaml:
--------------------------------------------------------------------------------
1 | # A sample file for logger/listener/listen.py
2 | ##
3 | # To run, type:
4 | #
5 | # logger/listener/listen.py --config_file test/configs/parallel_logger.yaml
6 | #
7 | # Dictionary key/value pairs are used as keyword arguments for creating a
8 | # Listener. The keys "reader", "transform" and "writer" are special-cased to
9 | # indicate that the respective entities should be instantiated (recursively,
10 | # if necessary).
11 |
12 | # We have two readers, enclosed in a list. They will be run in
13 | # parallel. The "interval" kwarg tells the TextFileReader to read one
14 | # line from its file every interval seconds.
15 | readers:
16 | - class: TextFileReader
17 | kwargs: # initialization kwargs
18 | file_spec: LICENSE
19 | interval: 1
20 | - class: TextFileReader
21 | kwargs: # initialization kwargs
22 | file_spec: README.md
23 | interval: 0.5
24 |
25 | # No transforms
26 |
27 | # Only a single writer, not enclosed in a list, to demonstrate/test
28 | # that lists are only needed if we've got multiple components.
29 | writers:
30 | class: TextFileWriter
31 | # no initialization kwargs; no filename, so write to stdout
32 |
33 |
--------------------------------------------------------------------------------
/test/logger/utils/test.crt:
--------------------------------------------------------------------------------
1 | -----BEGIN CERTIFICATE-----
2 | MIIDCjCCAfICCQD/ny7oHu8xkzANBgkqhkiG9w0BAQsFADBHMQswCQYDVQQGEwJV
3 | UzELMAkGA1UECAwCQUwxDTALBgNVBAcMBE5vbmUxDTALBgNVBAoMBE5vbmUxDTAL
4 | BgNVBAMMBHRlc3QwHhcNMjMwNTIxMDIzNjU5WhcNMjQwNTIwMDIzNjU5WjBHMQsw
5 | CQYDVQQGEwJVUzELMAkGA1UECAwCQUwxDTALBgNVBAcMBE5vbmUxDTALBgNVBAoM
6 | BE5vbmUxDTALBgNVBAMMBHRlc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
7 | AoIBAQC0LznAeNHnbgN7yGuj10VnKMfUBZwlG3ePV8nmuQoGSLyQQh2wG9igDftx
8 | 9uWHvt+G7Bsl0h2GP1xAWI8Hc/l/d2hHqfuVc3x++eTwjOJReEFIYr3hbTtMr07h
9 | t6os9A2jlPgPkpmdsZyuQIXlNXfS4NrGayfy3BUP095aDJTLNlldF8hWd7Q6fNUf
10 | XcTc+PyrsFpneisH5fQYXyRhxFz77ueiq2hWBP80SxIXyBETaYXJroEhx30jDnGK
11 | MZKTGDQ7ddC/dl9tDVY4bfPTldZxV6N8ES6bCG5rm1u6yE36DgGA5Q0VW9sd2G7/
12 | Wa/rFxF7LhX/E5O4F8joQbJRO+gVAgMBAAEwDQYJKoZIhvcNAQELBQADggEBALAV
13 | ijQcYUzAnKkUUure3mpqbL4nm9zjyaaSCsOKQ/eH1vlr7EYshOQqWUS+9vfJ0HEa
14 | emSf3dV132homiMfZhsMMoRXaM8oOhEQ74ycAD7z53NKp7cjw+tvsKVS2hF1cn+O
15 | lydM6y6WX2+88j785KOFYThclm3W6mtbXBV7hIq+c3lD8V50+oH44lsKGvFSMcYg
16 | UpwaxBxzkGG3RjCIpNxhLWoKsuyDFe0aQncv7mLyIiionB5UUusSB+JFv2vcdJcT
17 | /9N5CylcfdH/tJINPdz50QzFia56C8hdDYkPWHNM7XIR6XhBZdRuNEmJLk5DYIpU
18 | SBlW6EH1UutuxhfaEKw=
19 | -----END CERTIFICATE-----
20 |
--------------------------------------------------------------------------------
/logger/transforms/unique_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """Pass the record to the next transform/writer only if the contents of the
3 | record have changed from the previous value.
4 | """
5 |
6 | import sys
7 |
8 | from os.path import dirname, realpath
9 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
10 | from logger.transforms.transform import Transform # noqa: E402
11 |
12 |
13 | ################################################################################
14 | class UniqueTransform(Transform):
15 | """Return the record only if it has changed from the previous value."""
16 |
17 | def __init__(self):
18 | """Starts with an empty record."""
19 | self.prev_record = ""
20 |
21 | ############################
22 | def transform(self, record: str):
23 |
24 | # See if it's something we can process, and if not, try digesting
25 | if not self.can_process_record(record): # inherited from Transform()
26 | return self.digest_record(record) # inherited from Transform()
27 |
28 | """If same as previous, return None, else record."""
29 | if record == self.prev_record:
30 | return None
31 |
32 | self.prev_record = record
33 |
34 | return record
35 |
--------------------------------------------------------------------------------
/contrib/config_templates/serial_reader_config_template.yaml:
--------------------------------------------------------------------------------
1 | ###################
2 | config_templates:
3 | #################
4 | serial_net_config_template: &serial_net_config_base
5 | readers:
6 | - class: SerialReader
7 | kwargs:
8 | baudrate: <>
9 | port: <>
10 | transforms:
11 | - class: TimestampTransform
12 | writers:
13 | - class: ComposedWriter
14 | kwargs:
15 | transforms:
16 | - class: PrefixTransform
17 | kwargs:
18 | prefix: <>
19 | writers:
20 | - class: UDPWriter
21 | kwargs:
22 | port: <>
23 | destination: <>
24 |
25 | serial_net_file_config_template:
26 | <<: *serial_net_config_base
27 | writers:
28 | - class: LogfileWriter
29 | kwargs:
30 | filebase: <>/<>/raw/<>_<>
31 | - class: ComposedWriter
32 | kwargs:
33 | transforms:
34 | - class: PrefixTransform
35 | kwargs:
36 | prefix: <>
37 | writers:
38 | - class: UDPWriter
39 | kwargs:
40 | port: <>
41 | destination: <>
42 |
--------------------------------------------------------------------------------
/test/configs/composed_logger.yaml:
--------------------------------------------------------------------------------
1 | # A sample file for logger/listener/listen.py demonstrating the recursive
2 | # definition functionality by using a ComposedReader or ComposedWriter.
3 | #
4 | # To run, type:
5 | #
6 | # logger/listener/listen.py --config_file test/configs/composed_logger.yaml
7 | #
8 | # Dictionary key/value pairs are used as keyword arguments for creating a
9 | # Listener. The keys "reader", "transform" and "writer" are special-cased to
10 | # indicate that the respective entities should be instantiated (recursively,
11 | # if necessary).
12 |
13 | readers:
14 | - class: TextFileReader # multiple readers, so enclose in a list.
15 | kwargs:
16 | file_spec: README.md # initialization keyword args
17 | interval: 0.5
18 | - class: ComposedReader
19 | kwargs:
20 | readers:
21 | class: TextFileReader
22 | kwargs:
23 | file_spec: LICENSE
24 | interval: 1
25 | transforms:
26 | class: PrefixTransform
27 | kwargs:
28 | prefix: 'composed:'
29 |
30 | #transforms: # No transforms, so omit entirely
31 |
32 | # Only a single writer, not enclosed in a list, to demonstrate/test
33 | # that lists are only needed if we've got multiple components.
34 | writers:
35 | class: TextFileWriter
36 | # no kwargs; no filename provided, so will write to stdout
37 |
--------------------------------------------------------------------------------
/test/configs/simple_logger.yaml:
--------------------------------------------------------------------------------
1 | # A sample file for logger/listener/listen.py
2 | ##
3 | # To run, type:
4 | #
5 | # logger/listener/listen.py --config_file test/configs/simple_logger.yaml
6 | #
7 | # Dictionary key/value pairs are used as keyword arguments for creating a
8 | # Listener. The keys "reader", "transform" and "writer" are special-cased to
9 | # indicate that the respective entities should be instantiated (recursively,
10 | # if necessary).
11 |
12 | # One reader. If we had multiple readers, we'd list them sequentially
13 | # in a list, and they would be executed in parallel.
14 |
15 | readers:
16 | class: TextFileReader
17 | kwargs: # initialization kwargs
18 | file_spec: LICENSE
19 |
20 | # Timestamp and label. We have two transforms; they are encapsulated
21 | # in a list and will be executed sequentially.
22 | transforms:
23 | - class: TimestampTransform # TimestampTransform has no kwargs
24 | - class: PrefixTransform
25 | kwargs:
26 | prefix: "license:" # enclose in quotes because the ":" can confuse YAML
27 |
28 | # Only a single writer. If there were multiple writers, they would be
29 | # called in parallel.
30 | writers:
31 | class: TextFileWriter
32 | # No initialization kwargs; no filename, so TextFileWriter
33 | # will write to stdout
34 |
35 | interval: 0.2
36 | check_format: false
37 |
--------------------------------------------------------------------------------
/django_gui/static/django_gui/json-viewer/json-viewer.css:
--------------------------------------------------------------------------------
1 | .json-viewer {
2 | color: #000;
3 | padding-left: 20px;
4 | }
5 |
6 | .json-viewer ul {
7 | list-style-type: none;
8 | margin: 0;
9 | margin: 0 0 0 1px;
10 | border-left: 1px dotted #ccc;
11 | padding-left: 2em;
12 | }
13 |
14 | .json-viewer .hide {
15 | display: none;
16 | }
17 |
18 | .json-viewer .type-string {
19 | color: #0B7500;
20 | }
21 |
22 | .json-viewer .type-date {
23 | color: #CB7500;
24 | }
25 |
26 | .json-viewer .type-boolean {
27 | color: #1A01CC;
28 | font-weight: bold;
29 | }
30 |
31 | .json-viewer .type-number {
32 | color: #1A01CC;
33 | }
34 |
35 | .json-viewer .type-null, .json-viewer .type-undefined {
36 | color: #90a;
37 | }
38 |
39 | .json-viewer a.list-link {
40 | color: #000;
41 | text-decoration: none;
42 | position: relative;
43 | }
44 |
45 | .json-viewer a.list-link:before {
46 | color: #aaa;
47 | content: "\25BC";
48 | position: absolute;
49 | display: inline-block;
50 | width: 1em;
51 | left: -1em;
52 | }
53 |
54 | .json-viewer a.list-link.collapsed:before {
55 | content: "\25B6";
56 | }
57 |
58 | .json-viewer a.list-link.empty:before {
59 | content: "";
60 | }
61 |
62 | .json-viewer .items-ph {
63 | color: #aaa;
64 | padding: 0 1em;
65 | }
66 |
67 | .json-viewer .items-ph:hover {
68 | text-decoration: underline;
69 | }
70 |
--------------------------------------------------------------------------------
/server/supervisord/supervisor.d/openrvdas.ini:
--------------------------------------------------------------------------------
1 | [program:cached_data_server]
2 | command=/usr/local/bin/python3 server/cached_data_server.py --port 8766 --disk_cache /var/tmp/openrvdas/disk_cache --max_records 8640 -v
3 | directory=/opt/openrvdas
4 | autostart=false
5 | autorestart=true
6 | startretries=3
7 | stderr_logfile=/var/log/openrvdas/cached_data_server.err.log
8 | stdout_logfile=/var/log/openrvdas/cached_data_server.out.log
9 | user=pablo
10 |
11 | [program:logger_manager]
12 | command=/usr/local/bin/python3 server/logger_manager.py --database django --no-console --data_server_websocket :8766 -v
13 | directory=/opt/openrvdas
14 | autostart=false
15 | autorestart=true
16 | startretries=3
17 | stderr_logfile=/var/log/openrvdas/logger_manager.err.log
18 | stdout_logfile=/var/log/openrvdas/logger_manager.out.log
19 | user=pablo
20 |
21 | [group:openrvdas]
22 | programs=cached_data_server,logger_manager
23 |
24 | [program:simulate_nbp_serial]
25 | command=/usr/local/bin/python3 logger/utils/simulate_serial.py --config test/NBP1406/simulate_NBP1406.yaml --loop
26 | directory=/opt/openrvdas
27 | autostart=false
28 | autorestart=true
29 | startretries=3
30 | stderr_logfile=/var/log/openrvdas/simulate_serial.err.log
31 | stdout_logfile=/var/log/openrvdas/simulate_serial.out.log
32 | user=pablo
33 |
34 | [group:simulate]
35 | programs=simulate_nbp_serial
36 |
--------------------------------------------------------------------------------
/django_gui/templates/django_gui/widget.html:
--------------------------------------------------------------------------------
1 | {% extends 'django_gui/base.html' %}
2 |
3 | {% block content %}
4 |
5 |
11 |
12 |
13 |
14 | Widget
15 | Widget
16 | {# ######################################################################## #}
17 |
18 | {% if field_list %}
19 |
20 |
21 | waiting for first data...
22 | {% for field_name in field_list %}
23 |
24 | {{ field_name }}
25 | -
26 |
27 | {% endfor %}
28 |
29 |
30 | {% else %}
31 | Call with desired fields in comma-separated
32 | list following url specification. e.g.:
33 |
34 | http://localhost:8000/widget/S330Pitch,S330Roll
35 |
36 | {% endif %}
37 |
38 |
39 | {% endblock content %}
40 |
--------------------------------------------------------------------------------
/contrib/logger_templates/udp_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ###################
2 | logger_templates:
3 | #################
4 | udp_logger_template:
5 | configs:
6 | 'off': {}
7 | net:
8 | readers:
9 | - class: UDPReader
10 | kwargs:
11 | port: <>
12 | transforms:
13 | - class: TimestampTransform
14 | - class: PrefixTransform
15 | kwargs:
16 | prefix: <>
17 | writers:
18 | - class: UDPWriter
19 | kwargs:
20 | port: <>
21 | destination: <>
22 | net+file:
23 | readers:
24 | - class: UDPReader
25 | kwargs:
26 | port: <>
27 | transforms:
28 | - class: TimestampTransform
29 | writers:
30 | - class: LogfileWriter
31 | kwargs:
32 | filebase: <>/<>/raw/<>_<>
33 | - class: ComposedWriter
34 | kwargs:
35 | transforms:
36 | - class: PrefixTransform
37 | kwargs:
38 | prefix: <>
39 | writers:
40 | - class: UDPWriter
41 | kwargs:
42 | port: <>
43 | destination: <>
44 |
--------------------------------------------------------------------------------
/test/NBP1406/devices/HydroDasNBP.yaml:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # NBP-Specific Hydro-DAS Device Type Definitions
3 | #
4 | # See README.md in this directory
5 |
6 | ######################################
7 | HydroDasNBP:
8 | category: "device_type"
9 | description: "Aggregator of Hydro-DAS data"
10 |
11 | # 12.16954 16.81086 193.1034 3877.242 -1 31 47 35 42
12 | format: "{Voltage:g} {CaseTemp:g} {Fluorometer:g} {Transmissometer:g} {SeawaterValve:d} {Flow1Freq:g} {Flow2Freq:g} {Flow3Freq:g} {Flow4Freq:g}"
13 |
14 | fields:
15 | Voltage:
16 | units: "V"
17 | description: "Power Supply Voltage"
18 | CaseTemp:
19 | units: "degrees C"
20 | description: "Internal Case Temperature"
21 | Fluorometer:
22 | units: "mV"
23 | description: "Fluorometer"
24 | Transmissometer:
25 | units: "mV"
26 | description: "Transmissometer"
27 | SeawaterValve:
28 | units: "-1 or 0"
29 | description: "Seawater Valve"
30 | Flow1Freq:
31 | units: "Hz"
32 | description: "Flow Meter 1 Frequency"
33 | Flow2Freq:
34 | units: "Hz"
35 | description: "Flow Meter 2 Frequency"
36 | Flow3Freq:
37 | units: "Hz"
38 | description: "Flow Meter 3 Frequency"
39 | Flow4Freq:
40 | units: "Hz"
41 | description: "Flow Meter 4 Frequency"
42 |
--------------------------------------------------------------------------------
/logger/transforms/regex_filter_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import re
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
8 | from logger.transforms.transform import Transform # noqa: E402
9 |
10 |
11 | ################################################################################
12 | class RegexFilterTransform(Transform):
13 | """Only return records matching the specified regular expression."""
14 | ############################
15 |
16 | def __init__(self, pattern, flags=0, negate=False):
17 | """If negate=True, only return records that *don't* match the pattern."""
18 | self.pattern = re.compile(pattern, flags)
19 | self.negate = negate
20 |
21 | ############################
22 | def transform(self, record: str):
23 | """Does record contain pattern?"""
24 |
25 | # See if it's something we can process, and if not, try digesting
26 | if not self.can_process_record(record): # inherited from Transform()
27 | return self.digest_record(record) # inherited from Transform()
28 |
29 | match = self.pattern.search(record)
30 | if match is None:
31 | if self.negate:
32 | return record
33 | return None
34 | else:
35 | if self.negate:
36 | return None
37 | return record
38 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_split_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.split_transform import SplitTransform # noqa: E402
9 |
10 |
11 | class TestSplitTransform(unittest.TestCase):
12 |
13 | def test_default(self):
14 | test_str = '1\n2\n3\n4'
15 | transform = SplitTransform()
16 | self.assertEqual(transform.transform(test_str), ['1', '2', '3', '4'])
17 |
18 | test_str = '1ab2ab3ab4ab'
19 | transform = SplitTransform('ab')
20 | self.assertEqual(transform.transform(test_str), ['1', '2', '3', '4'])
21 |
22 |
23 | ################################################################################
24 | if __name__ == '__main__':
25 | import argparse
26 | parser = argparse.ArgumentParser()
27 | parser.add_argument('-v', '--verbosity', dest='verbosity',
28 | default=0, action='count',
29 | help='Increase output verbosity')
30 | args = parser.parse_args()
31 |
32 | LOGGING_FORMAT = '%(asctime)-15s %(message)s'
33 | logging.basicConfig(format=LOGGING_FORMAT)
34 |
35 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
36 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
37 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
38 |
39 | unittest.main(warnings='ignore')
40 |
--------------------------------------------------------------------------------
/contrib/logger_templates/serial_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ###################
2 | logger_templates:
3 | #################
4 | serial_logger_template:
5 | configs:
6 | 'off': {}
7 | net:
8 | readers:
9 | - class: SerialReader
10 | kwargs:
11 | baudrate: <>
12 | port: <>
13 | transforms:
14 | - class: TimestampTransform
15 | - class: PrefixTransform
16 | kwargs:
17 | prefix: <>
18 | writers:
19 | - class: UDPWriter
20 | kwargs:
21 | port: <>
22 | destination: <>
23 | net+file:
24 | readers:
25 | - class: SerialReader
26 | kwargs:
27 | baudrate: <>
28 | port: <>
29 | transforms:
30 | - class: TimestampTransform
31 | writers:
32 | - class: LogfileWriter
33 | kwargs:
34 | filebase: <>/<>/raw/<>_<>
35 | - class: ComposedWriter
36 | kwargs:
37 | transforms:
38 | - class: PrefixTransform
39 | kwargs:
40 | prefix: <>
41 | writers:
42 | - class: UDPWriter
43 | kwargs:
44 | port: <>
45 | destination: <>
46 |
--------------------------------------------------------------------------------
/contrib/raspberrypi/readers/test_onewire_reader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import time
5 | import unittest
6 |
7 | from os.path import dirname, realpath
8 | sys.path.append(dirname(dirname(dirname(dirname(realpath(__file__))))))
9 | from contrib.raspberrypi.readers.onewire_reader import OneWireReader # noqa: E402
10 |
11 |
12 | ################################################################################
13 | ################################################################################
14 | class TestOneWireReader(unittest.TestCase):
15 | ############################
16 | def test_interval(self):
17 | iterations = 3
18 | reader = OneWireReader(interval=1, temp_in_f=True)
19 | now = time.time()
20 | for i in range(iterations):
21 | reader.read()
22 | self.assertGreater(time.time(), now + iterations)
23 |
24 | ############################
25 | def test_conversions(self):
26 | reader = OneWireReader()
27 | f_reader = OneWireReader(temp_in_f=True)
28 | record = reader.read().split()
29 | f_record = f_reader.read().split()
30 |
31 | # Check temp conversion
32 | for i in range(len(record)):
33 | self.assertAlmostEqual(
34 | first=float(record[i]),
35 | second=(float(f_record[i]) - 32) * 5 / 9,
36 | delta=0.5)
37 |
38 |
39 | if __name__ == '__main__':
40 | unittest.main()
41 |
--------------------------------------------------------------------------------
/logger/transforms/regex_replace_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import re
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
8 | from logger.transforms.transform import Transform # noqa: E402
9 |
10 |
11 | ################################################################################
12 | class RegexReplaceTransform(Transform):
13 | """Apply regex replacements to record."""
14 | ############################
15 |
16 | def __init__(self, patterns, count=0, flags=0):
17 | """
18 | patterns - a dict of {old:new, old:new} patterns to be searched and replaced.
19 | Note that replacement order is not guaranteed.
20 | """
21 | self.patterns = patterns
22 | self.count = count
23 | self.flags = flags
24 |
25 | ############################
26 | def transform(self, record: str):
27 | """Does record contain pattern?"""
28 |
29 | # See if it's something we can process, and if not, try digesting
30 | if not self.can_process_record(record): # inherited from Transform()
31 | return self.digest_record(record) # inherited from Transform()
32 |
33 | # Apply all patterns in order
34 | result = record
35 | for old_str, new_str in self.patterns.items():
36 | result = re.sub(old_str, new_str, result, self.count, self.flags)
37 | return result
38 |
--------------------------------------------------------------------------------
/contrib/csiro/test/Regex_device_catalogue.yaml:
--------------------------------------------------------------------------------
1 | includes:
2 | - contrib/csiro/test/Seapath_regex.yaml
3 |
4 | devices:
5 | seap:
6 | category: "device"
7 | device_type: "Seapath330"
8 | serial_number: "unknown"
9 | description: "Just another device description."
10 |
11 | # Map from device_type field names to names specific for this
12 | # specific device.
13 | fields:
14 | TalkerID: 'TalkerID'
15 | GPSTime: 'GPSTime'
16 | GPSStatus: 'GPSStatus'
17 | FixQuality: 'FixQuality'
18 | NumSats: 'NumSats'
19 | HDOP: 'HDOP'
20 | AntennaHeight: 'AntennaHeight'
21 | GeoidHeight: 'GeoidHeight'
22 | LastDGPSUpdate: 'LastDGPSUpdate'
23 | DGPSStationID: 'DGPSStationID'
24 | CourseTrue: 'CourseTrue'
25 | CourseMag: 'CourseMag'
26 | SpeedOverGround: 'SpeedOverGround'
27 | SpeedKm: 'SpeedKm'
28 | Mode: 'Mode'
29 | GPSDay: 'GPSDay'
30 | GPSMonth: 'GPSMonth'
31 | GPSYear: 'GPSYear'
32 | GPSDate: 'GPSDate'
33 | LocalZoneHours: 'LocalZoneHours'
34 | LocalZoneMinutes: 'LocalZoneMinutes'
35 | GyroCal: 'GyroCal'
36 | GyroOffset: 'GyroOffset'
37 | Roll: 'Roll'
38 | Pitch: 'Pitch'
39 | HeadingTrue: 'HeadingTrue'
40 | Heave: 'Heave'
41 | Latitude: 'Latitude'
42 | NorS: 'NorS'
43 | Longitude: 'Longitude'
44 | EorW: 'EorW'
45 | MagneticVar: 'MagneticVar'
46 | MagneticVarEorW: 'MagneticVarEorW'
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/components/KWArgs.js:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react';
2 | import { Dropdown, Button } from 'react-bootstrap';
3 |
4 | export default function KWArgs(props) {
5 | const [kwargs, setKwargs] = useState('');
6 | const [value, setValue] = useState('');
7 |
8 | const kwargsChange = (e) => {
9 | setKwargs(e.target.innerHTML);
10 | };
11 |
12 | const divStyle = {
13 | display: 'flex',
14 | alignItems: 'center',
15 | };
16 | const handleTextChange = (e) => {
17 | setValue(e.target.value);
18 | };
19 |
20 | return (
21 |
22 |
Kwargs
23 |
24 |
25 |
26 | {kwargs.length > 0 ? kwargs : 'Please Select a Reader'}
27 |
28 |
29 |
30 | {props.items.map((item) => (
31 | {item}
32 | ))}
33 |
34 |
35 |
36 | {
40 | let arr = [props.kwClass, kwargs, value];
41 | props.kwargCallback(arr);
42 | }}
43 | >
44 | Add
45 | {' '}
46 |
47 |
48 | );
49 | }
50 |
--------------------------------------------------------------------------------
/display/js/d3/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2010-2017 Mike Bostock
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without modification,
5 | are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | * Neither the name of the author nor the names of contributors may be used to
15 | endorse or promote products derived from this software without specific prior
16 | written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
--------------------------------------------------------------------------------
/database/setup_mysql_connector.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Set up appropriate user and test databases and users for mysql_connector.
4 |
5 | if [ ! $# == 2 ]; then
6 | echo Usage:
7 | echo
8 | echo $0 MYSQL_USER_NAME MYSQL_USER_PASSWORD
9 | echo
10 | echo Will create required tables and named MySQL user and give user
11 | echo access to the newly-created tables.
12 | exit
13 | fi
14 |
15 | if [ ! `which mysql` ]; then
16 | echo '####################################################################'
17 | echo NOTE: Before running this script, please install and set up
18 | echo the appropriate MySQL server.
19 | echo '####################################################################'
20 | exit
21 | fi
22 |
23 | USER=$1
24 | PWD=$2
25 |
26 | # Create databases if they don't exist, and give user access. Also
27 | # give user 'test' access to test database.
28 | #mysql -u root -p$ROOT_PWD < /opt/openrvdas/mkdocs.yml <', views.edit_config, name='edit_config'),
34 | path('choose_file/', views.choose_file, name='choose_file'),
35 | path('widget/', views.widget, name='widget'),
36 | path('widget/', views.widget, name='widget'),
37 | path('fields/', views.fields, name='fields'),
38 | ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
39 |
--------------------------------------------------------------------------------
/logger/transforms/__init__.py:
--------------------------------------------------------------------------------
1 | # flake8: noqa F401
2 |
3 | from .count_transform import CountTransform
4 | from .delta_transform import DeltaTransform
5 | from .derived_data_transform import DerivedDataTransform
6 | from .extract_field_transform import ExtractFieldTransform
7 | from .format_transform import FormatTransform
8 | from .from_json_transform import FromJSONTransform
9 | from .geofence_transform import GeofenceTransform
10 | from .interpolation_transform import InterpolationTransform
11 | from .max_min_transform import MaxMinTransform
12 | from .modify_value_transform import ModifyValueTransform
13 | from .nmea_checksum_transform import NMEAChecksumTransform
14 | from .nmea_transform import NMEATransform
15 | from .parse_nmea_transform import ParseNMEATransform
16 | from .parse_transform import ParseTransform
17 | from .prefix_transform import PrefixTransform
18 | from .qc_filter_transform import QCFilterTransform
19 | from .regex_filter_transform import RegexFilterTransform
20 | from .regex_replace_transform import RegexReplaceTransform
21 | from .select_fields_transform import SelectFieldsTransform
22 | from .slice_transform import SliceTransform
23 | from .split_transform import SplitTransform
24 | from .strip_transform import StripTransform
25 | from .subsample_transform import SubsampleTransform
26 | from .timestamp_transform import TimestampTransform
27 | from .to_das_record_transform import ToDASRecordTransform
28 | from .to_json_transform import ToJSONTransform
29 | from .true_winds_transform import TrueWindsTransform
30 | from .unique_transform import UniqueTransform
31 | from .value_filter_transform import ValueFilterTransform
32 | from .xml_aggregator_transform import XMLAggregatorTransform
33 |
--------------------------------------------------------------------------------
/utils/jinja_config_creator/cruise_config_generator.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # flake8: noqa: W605
3 | """
4 | Barebones script to generate OpenRVDAS config file from minimal input file
5 | using a Jinja template.
6 |
7 | Can be run from the command line as follows:
8 | ```
9 | python cruise_config_generator.py\
10 | --voyage_details tests\cruise_devices.yaml\
11 | --template tests\cruise_template.jinja\
12 | --config_output tests\sample_cruise.yaml
13 | ```
14 |
15 | Author: Hugh Barker
16 | Organisation: CSIRO
17 | Vessel: Investigator
18 | Date: June 2022
19 | """
20 | import yaml
21 | import argparse
22 | from jinja2 import Template
23 |
24 | if __name__ == '__main__':
25 | parser = argparse.ArgumentParser(
26 | description='Create an OpenRVDAS config file using jinja templates')
27 |
28 | parser.add_argument('--voyage_details', required=True,
29 | help='Input voyage configuration yaml -\
30 | voyage details and a list of devices')
31 | parser.add_argument('--template', required=True,
32 | help='jinja2 template used to create full configuration')
33 | parser.add_argument('--config_output', required=True,
34 | help='File to output full configuration yaml to')
35 |
36 | args = parser.parse_args()
37 |
38 | with open(args.voyage_details) as f:
39 | data = yaml.load(f, Loader=yaml.FullLoader)
40 |
41 | with open(args.template) as f:
42 | template = Template(f.read(), trim_blocks=True, lstrip_blocks=True)
43 |
44 | config = template.render(data)
45 |
46 | with open(args.config_output, 'w') as f:
47 | f.write(config)
48 |
--------------------------------------------------------------------------------
/logger/transforms/to_json_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import json
4 | import logging
5 | import sys
6 |
7 | from typing import Union
8 | from os.path import dirname, realpath
9 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
10 | from logger.utils.das_record import DASRecord # noqa: E402
11 | from logger.transforms.transform import Transform # noqa: E402
12 |
13 |
14 | ################################################################################
15 | #
16 | class ToJSONTransform(Transform):
17 | """Convert passed DASRecords, lists or dicts to JSON. If pretty == True,
18 | format the JSON output for easy reading.
19 | """
20 |
21 | ############################
22 | def __init__(self, pretty=False):
23 | self.pretty = pretty
24 |
25 | ############################
26 | def transform(self, record: Union[DASRecord, float, int, bool, str, dict, list, set]):
27 | """Convert record to JSON."""
28 |
29 | # See if it's something we can process, and if not, try digesting
30 | if not self.can_process_record(record): # inherited from Transform()
31 | return self.digest_record(record) # inherited from Transform()
32 |
33 | if type(record) is DASRecord:
34 | return record.as_json(self.pretty)
35 |
36 | if type(record) in [float, int, bool, str, dict, list, set]:
37 | if self.pretty:
38 | return json.dumps(record, sort_keys=True, indent=4)
39 | else:
40 | return json.dumps(record)
41 |
42 | logging.warning('ToJSON transform received record format it could not '
43 | 'serialize: "%s"', type(record))
44 | return None
45 |
--------------------------------------------------------------------------------
/contrib/devices/Knudsen.yaml:
--------------------------------------------------------------------------------
1 | ######################################
2 | Knudsen3260:
3 | category: "device_type"
4 | description: "Knudsen Chirp 3260"
5 |
6 | format:
7 | - "{LF:g}kHz,{LFDepth:of},{LFValid:od},{HF:g}kHz,{HFDepth:of},{HFValid:od},{SoundSpeed:g},{Latitude:f},{Longitude:f}"
8 | - ",,,{HF:g}kHz,{HFDepth:of},{HFValid:od},{SoundSpeed:g},{Latitude:f},{Longitude:f}"
9 | - "{LF:g}kHz,{LFDepth:of},{LFValid:od},,,,{SoundSpeed:g},{Latitude:f},{Longitude:f}"
10 | - "$PKEL99,{LF:g}kHz,{LFDepth:of},{LFValid:od},{HF:g}kHz,{HFDepth:of},{HFValid:od},{SoundSpeed:g},{Latitude:f},{Longitude:f}"
11 | - "$PKEL99,{LF:g}kHz,{LFDepth:of},{LFValid:od},,,,{SoundSpeed:g},{Latitude:f},{Longitude:f}"
12 | - "$PKEL99,,,,{HF:g}kHz,{HFDepth:of},{HFValid:od},{SoundSpeed:g},{Latitude:f},{Longitude:f}"
13 |
14 | fields:
15 | LF:
16 | units: "kHz"
17 | description: "Frequency of low frequency transducer"
18 | LFDepth:
19 | units: "meters"
20 | description: Depth in meters from transducer"
21 | LFValid:
22 | units: "0"
23 | description: "Valid if present (value may always be zero?)"
24 | HF:
25 | units: "kHz"
26 | description: "Frequency of high frequency transducer"
27 | HFDepth:
28 | units: "meters"
29 | description: Depth in meters from transducer"
30 | HFValid:
31 | units: "0"
32 | description: "Valid if present (value may always be zero?)"
33 | SoundSpeed:
34 | units: "meters/second"
35 | description: "Sound speed velocity"
36 | Latitude:
37 | units: "degrees north"
38 | description: "Latitude in degrees north"
39 | Longitude:
40 | units: "degrees west"
41 | description: "Longitude in degrees west"
42 |
43 |
--------------------------------------------------------------------------------
/contrib/devices/EngineeringNBP.yaml:
--------------------------------------------------------------------------------
1 | ################################################################################
2 | # NBP-Specific Engineering Device Type Definitions
3 | #
4 | # See README.md in this directory
5 |
6 | ######################################
7 | EngineeringNBP:
8 | category: "device_type"
9 | description: "Aggregator of engineering data"
10 |
11 | # 12.26 19.28 507.5 568.8 234.6 -751.9 0 0 NAN NAN -11.5 -7.5
12 | format: "{Voltage:g} {CaseTemp:g} {Pump1Flow:g} {Pump2Flow:g} {Pump3Flow:g} {SeismicPressure:g} {PIRCaseRes:g} {PIRCaseMv:g} {Unknown1:w} {Unknown2:w} {Freezer1Temp:g} {Freezer2Temp:g}"
13 |
14 | fields:
15 | Voltage:
16 | units: "V"
17 | description: "Power Supply Voltage"
18 | CaseTemp:
19 | units: "degrees C"
20 | description: "Internal Case Temperature"
21 | Pump1Flow:
22 | units: "L/min"
23 | description: "Pump #1 flow rate"
24 | Pump2Flow:
25 | units: "L/min"
26 | description: "Pump #2 flow rate"
27 | Pump3Flow:
28 | units: "L/min"
29 | description: "Pump #3 flow rate"
30 | SeismicPressure:
31 | units: "lbs/sq-in"
32 | description: "Seismic air pressure"
33 | PIRCaseRes:
34 | units: "kOhm"
35 | description: "PIR case resistance (not currently hooked up, data is irrelevant)"
36 | PIRCaseMv:
37 | units: "mV"
38 | description: "PIR case ratiometric output (not currently hooked up, data is irrelevant)"
39 | Freezer1Temp:
40 | units: "degrees C"
41 | description: "Freezer #1 temperature"
42 | Freezer2Temp:
43 | units: "degrees C"
44 | description: "Freezer #2 temperature"
45 | Freezer3Temp:
46 | units: "degrees C"
47 | description: "Freezer #3 temperature"
48 |
--------------------------------------------------------------------------------
/display/css/map_demo.css:
--------------------------------------------------------------------------------
1 | #map-container { position: relative; top:20; width: 100%; height: 800px; }
2 | #map { position: absolute; top:0; bottom:0; right:0; left:0; }
3 |
4 | .custom .leaflet-popup-tip,
5 | .custom .leaflet-popup-content-wrapper {
6 | background: #e93434;
7 | color: #ffffff;
8 | }
9 | .info {
10 | background:#fff;
11 | position:absolute;
12 | width:400px;
13 | top:10px;
14 | right:10px;
15 | border-radius:2px;
16 | }
17 | .info .item {
18 | display:block;
19 | border-bottom:1px solid #eee;
20 | padding:10px;
21 | text-decoration:none;
22 | }
23 | .info .item small { color:#888; }
24 | .info .item:hover,
25 | .info .item.active { background:#f8f8f8; }
26 | .info .item:last-child { border-bottom:none; }
27 |
28 | .leaflet-popup-content {
29 | width:380px;
30 | }
31 |
32 | .tabs {
33 | position:relative;
34 | min-height:200px;
35 | clear:both;
36 | margin:25px 0;
37 | }
38 | .tab {
39 | float:left;
40 | display: none;
41 | }
42 | .tab:first-of-type {
43 | display: inline-block;
44 | }
45 | .tabs-link {
46 | position: relative;
47 | top: -14px;
48 | height: 20px;
49 | left: -40px;
50 | }
51 | .tab-link {
52 | background:#eee;
53 | display: inline-block;
54 | padding:10px;
55 | border:1px solid #ccc;
56 | margin-left:-1px;
57 | position:relative;
58 | list-style-type: none;
59 | left:1px;
60 | top:1px;
61 | cursor:pointer;
62 | }
63 | .tab-link {
64 | background:#f8f8f8;
65 | }
66 | .pointContent {
67 | background:white;
68 | position:absolute;
69 | top:28px;
70 | left:0;
71 | right:0;
72 | bottom:0;
73 | padding:20px;
74 | border:1px solid #ccc;
75 | }
76 | .tab:target {
77 | display: block;
78 | }
79 |
--------------------------------------------------------------------------------
/contrib/raspberrypi/readers/test_bme280_reader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import time
5 | import unittest
6 |
7 | from os.path import dirname, realpath
8 | sys.path.append(dirname(dirname(dirname(dirname(realpath(__file__))))))
9 | from contrib.raspberrypi.readers.bme280_reader import BME280Reader # noqa: E402
10 |
11 |
12 | ################################################################################
13 | ################################################################################
14 | class TestBME280Reader(unittest.TestCase):
15 | ############################
16 | def test_interval(self):
17 | iterations = 4
18 | reader = BME280Reader(interval=1)
19 | now = time.time()
20 | for i in range(iterations):
21 | reader.read()
22 | self.assertAlmostEqual(first=time.time(),
23 | second=now + iterations,
24 | delta=0.3)
25 |
26 | ############################
27 | def test_conversions(self):
28 | reader = BME280Reader()
29 | f_reader = BME280Reader(temp_in_f=True)
30 | in_reader = BME280Reader(pressure_in_inches=True)
31 | record = reader.read()
32 | f_record = f_reader.read()
33 | in_record = in_reader.read()
34 |
35 | # Check temp conversion
36 | self.assertAlmostEqual(
37 | first=float(record.split()[0]),
38 | second=(float(f_record.split()[0]) - 32) * 5 / 9,
39 | delta=0.5)
40 |
41 | # Check pressure conversion
42 | self.assertAlmostEqual(
43 | first=float(record.split()[2]),
44 | second=float(in_record.split()[2]) * 33.86389,
45 | delta=0.1)
46 |
47 |
48 | if __name__ == '__main__':
49 | unittest.main()
50 |
--------------------------------------------------------------------------------
/contrib/raspberrypi/readers/test_bme688_reader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import time
5 | import unittest
6 |
7 | from os.path import dirname, realpath
8 | sys.path.append(dirname(dirname(dirname(dirname(realpath(__file__))))))
9 | from contrib.raspberrypi.readers.bme688_reader import BME688Reader # noqa: E402
10 |
11 |
12 | ################################################################################
13 | ################################################################################
14 | class TestBME688Reader(unittest.TestCase):
15 | ############################
16 | def test_interval(self):
17 | iterations = 4
18 | reader = BME688Reader(interval=1)
19 | now = time.time()
20 | for i in range(iterations):
21 | reader.read()
22 | self.assertAlmostEqual(first=time.time(),
23 | second=now + iterations,
24 | delta=0.3)
25 |
26 | ############################
27 | def test_conversions(self):
28 | reader = BME688Reader()
29 | f_reader = BME688Reader(temp_in_f=True)
30 | in_reader = BME688Reader(pressure_in_inches=True)
31 | record = reader.read()
32 | f_record = f_reader.read()
33 | in_record = in_reader.read()
34 |
35 | # Check temp conversion
36 | self.assertAlmostEqual(
37 | first=float(record.split()[0]),
38 | second=(float(f_record.split()[0]) - 32) * 5 / 9,
39 | delta=0.5)
40 |
41 | # Check pressure conversion
42 | self.assertAlmostEqual(
43 | first=float(record.split()[2]),
44 | second=float(in_record.split()[2]) * 33.86389,
45 | delta=0.1)
46 |
47 |
48 | if __name__ == '__main__':
49 | unittest.main()
50 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_nmea_checksum_transform.py:
--------------------------------------------------------------------------------
1 | # !/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.nmea_checksum_transform import NMEAChecksumTransform # noqa: E402
9 |
10 |
11 | class DummyWriter:
12 | """Dummy 'Writer' class we can use to test that the write option on
13 | NMEAChecksumTransform is properly called.
14 | """
15 |
16 | def __init__(self):
17 | self.write_message = None
18 |
19 | def write(self, message):
20 | logging.debug('write called: %s', message)
21 | self.write_message = message
22 |
23 |
24 | class TestNMEAChecksumTransform(unittest.TestCase):
25 |
26 | def test_default(self):
27 | t = NMEAChecksumTransform()
28 |
29 | alpha = '$PSXN,20,1,0,0,0*3A'
30 | self.assertEqual(t.transform(alpha), alpha)
31 |
32 | beta = '$PSXN,20,1,0,0,0*3C'
33 | with self.assertLogs(level=logging.WARNING):
34 | self.assertEqual(t.transform(beta), None)
35 |
36 | # Test that it complains when we get a non-string input
37 | with self.assertLogs(level=logging.WARNING):
38 | self.assertEqual(t.transform({1: 3}), None)
39 |
40 | def test_writer(self):
41 | writer = DummyWriter()
42 | t = NMEAChecksumTransform(error_message='message: ', writer=writer)
43 |
44 | alpha = '$PSXN,20,1,0,0,0*3A'
45 | self.assertEqual(t.transform(alpha), alpha)
46 |
47 | beta = '$PSXN,20,1,0,0,0*3C'
48 | self.assertEqual(t.transform(beta), None)
49 |
50 | # Now test that the error message got sent to the writer
51 | self.assertEqual(writer.write_message, 'message: $PSXN,20,1,0,0,0*3C')
52 |
53 |
54 | if __name__ == '__main__':
55 | unittest.main()
56 |
--------------------------------------------------------------------------------
/test/logger/utils/test.key:
--------------------------------------------------------------------------------
1 | -----BEGIN PRIVATE KEY-----
2 | MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQC0LznAeNHnbgN7
3 | yGuj10VnKMfUBZwlG3ePV8nmuQoGSLyQQh2wG9igDftx9uWHvt+G7Bsl0h2GP1xA
4 | WI8Hc/l/d2hHqfuVc3x++eTwjOJReEFIYr3hbTtMr07ht6os9A2jlPgPkpmdsZyu
5 | QIXlNXfS4NrGayfy3BUP095aDJTLNlldF8hWd7Q6fNUfXcTc+PyrsFpneisH5fQY
6 | XyRhxFz77ueiq2hWBP80SxIXyBETaYXJroEhx30jDnGKMZKTGDQ7ddC/dl9tDVY4
7 | bfPTldZxV6N8ES6bCG5rm1u6yE36DgGA5Q0VW9sd2G7/Wa/rFxF7LhX/E5O4F8jo
8 | QbJRO+gVAgMBAAECggEBAIBjrjkRXjAT2fOXyetC9zccfdb78FkYi0befQUkEJqN
9 | tZ7tHY5hKShTDjoUfVUOkuPm1SlL4LHJ8AgBkljzMHK9ONbnrla9pZYspg8ptBHt
10 | KEdV5Ir+dGSd/g0j++RwOk+3VFJ0WXVeG5CmdUHvqWPfyMPdkuegHnnas/NiR5vL
11 | +Hb+qVDzZxuR/ZipPMveRyDaiewJJ3WLwAzNIJSVUjHTAhNU//ChS8LOyQe2V2VQ
12 | BQoXXTH05/fMIfF0fToxOz5ti4ahlZZ0FcM55CRoJCWRD5BREq3sxx4Hf+ojRFsF
13 | fzV8zPesYYhM0Pj8GxYPpz9Kq+9PNusaJLUwfuxNnPkCgYEA7qHb80pC6nbTCid2
14 | lm/ghyvv9898xiQ0EfOwdFfZP9YryykQ2hMlj42cjRHl1onE0YkM3yDyrnGpBzqR
15 | dT2YB9E4ou8fiScYB4zbQz8c14ZM2lbx8Jr0xJ5S4NKJPbBR+890fbfFk8+IvBwN
16 | AQvbWxPNOXJwk+IGwE1xqLJYBgcCgYEAwUxhffkiwH6M+91SUz+Wugd6PmXu1iqU
17 | 9l504KTfxdLKW+lHH4azYWAUwqvUC8ntzLOkaSAZqYKL0Be9PgwJ9ka7plRzO5S/
18 | cI2OJ3Nkep3IXsOaqMNOU4g9L/sTfV4xuGFNoxC+Utu5VKGCXN7u5B5ptrkzCHaT
19 | HMKmQ4zn+gMCgYEA4LKMN4P/imMkyPTr1y+1TdJzfPZk2PYSCa1aw9JMgK2cQwqC
20 | EbXhslB3zuXBc8f6CgtwkVGm8deYf3QIr0q2MvYwzEs5eJ3VJJbfh2yQLekJyLYB
21 | zrs6K0GhbI0SKYIzUFbBq/c1Fb9fUzFelVOi0xnMLoGxu64jCURi96Qm+lUCgYBo
22 | SWs36dgD/sLVh/zIiUAqsaoUTC5lQagIm8F3CbzpH+apW9xRd/0DhjDtLoNCsPkp
23 | PapwRS9TMul3pbcl6JzfUB4hiOJ6vK/ka5rMUgdd2A6ogePJNUXm71QDtPQ46T7t
24 | +SFH3tqwHXeJ33FT5bsBPB9Qw+5ynHaWR7YoL7dgPQKBgQCd3jcLYpfjhJmznfXK
25 | 6AUcsIjDlMNGfEWoDq9jWZdjw735cBkjRSMpGLGiHvaS0VZvlz6BVy3bGkQmhV4A
26 | BRFY5x3DfgeV485qtaMkUjCbSIvOo1/lPtujy+WTTIZLbaf7/is2LAXMfz+JOldP
27 | u0ztbiyvEtc48sqtt/1ifiE63Q==
28 | -----END PRIVATE KEY-----
29 |
--------------------------------------------------------------------------------
/docs/security.md:
--------------------------------------------------------------------------------
1 | # OpenRVDAS Security
2 | © 2019 David Pablo Cohn - DRAFT 2019-09-15
3 |
4 | At present, OpenRVDAS makes very broad security assumptions, most of
5 | which should be tightened up:
6 |
7 | * **Server machine is physically secure**: This assumption seems
8 | relatively safe. It's hard to protect against physical hardware
9 | attacks.
10 |
11 | * **Root and "rvdas" user accounts are secure**: We assume that both
12 | the root account and that the user `rvdas` account on the
13 | OpenRVDAS server are secure from malicious actors. A malicious
14 | actor could change code and/or configuration.
15 |
16 | We do not assume that other user accounts are secure from
17 | malicious actors, so in theory the server could be attacked by a
18 | user subjecting the machine to a heavy load, or filling up
19 | available disk.
20 |
21 | * **Network is free of malicious actors**: We assume that the system
22 | is running on a ship's internal network, and thus that the servers
23 | are not going to be subject to DOS attacks or maliciously
24 | malformed requests.
25 |
26 | The Django interface allows any user to view the console and
27 | display pages, but only allows authenticated Django superusers
28 | (typically user `rvdas`) to load configurations and start/stop
29 | loggers.
30 |
31 | At present, the system loads cruise definitions using a browser
32 | file chooser. This allows anyone who has the Django superuser
33 | password to upload and run an arbitrary logger. Because the
34 | TextFileReader and TextFileWriter components read/write text files
35 | (as their names suggest), this would in theory allow a user to
36 | read and/or overwriter any server file that user rvdas has access
37 | to. This is a major security flaw described in Issue #145.
38 |
39 |
--------------------------------------------------------------------------------
/logger/transforms/extract_field_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
8 | from logger.utils.das_record import DASRecord # noqa: E402
9 | from logger.transforms.transform import Transform # noqa: E402
10 |
11 |
12 | ################################################################################
13 | #
14 | class ExtractFieldTransform(Transform):
15 | """Extract a field from passed DASRecord or dict.
16 | """
17 |
18 | def __init__(self, field_name):
19 | """Extract the specified field from the passed DASRecord or dict.
20 | """
21 | self.field_name = field_name
22 |
23 | ############################
24 | def transform(self, record):
25 | """Extract the specified field from the passed DASRecord or dict.
26 | """
27 | if not record:
28 | return None
29 |
30 | # If we've got a list, hope it's a list of records. Recurse,
31 | # calling transform() on each of the list elements in order and
32 | # return the resulting list.
33 | if type(record) is list:
34 | results = []
35 | for single_record in record:
36 | results.append(self.transform(single_record))
37 | return results
38 |
39 | if type(record) is DASRecord:
40 | return record.fields.get(self.field_name)
41 | elif type(record) is dict:
42 | fields = record.get('fields')
43 | if not fields:
44 | return None
45 | return fields.get(self.field_name)
46 |
47 | logging.warning('ExtractFieldTransform found no field "%s" in record "%s"',
48 | self.field_name, record)
49 | return None
50 |
--------------------------------------------------------------------------------
/logger/transforms/from_json_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import json
4 | import logging
5 | import sys
6 |
7 | from os.path import dirname, realpath
8 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
9 | from logger.utils.das_record import DASRecord # noqa: E402
10 | from logger.transforms.transform import Transform # noqa: E402
11 |
12 |
13 | ################################################################################
14 | #
15 | class FromJSONTransform(Transform):
16 | """Convert passed JSON to either a DASRecord or a dict.
17 | """
18 |
19 | def __init__(self, das_record=False):
20 | """Parse the received JSON and convert to appropriate data
21 | structure. If das_record == True, assume we've been passed a dict
22 | of field:value pairs, and try to embed them into a DASRecord.
23 | """
24 | self.das_record = das_record
25 |
26 | ############################
27 | def transform(self, record: str):
28 | """Parse JSON record to Python data struct or DASRecord."""
29 |
30 | # See if it's something we can process, and if not, try digesting
31 | if not self.can_process_record(record): # inherited from BaseModule()
32 | return self.digest_record(record) # inherited from BaseModule()
33 |
34 | try:
35 | data = json.loads(record)
36 | except json.decoder.JSONDecodeError:
37 | logging.warning('Failed to parse JSON string: "%s"', record)
38 | return None
39 |
40 | if not self.das_record:
41 | return data
42 |
43 | if not type(data) is dict:
44 | logging.warning('FromJSON asked to create DASRecord from non-dict '
45 | 'data: "%s"', type(data))
46 | return None
47 |
48 | return DASRecord(fields=data)
49 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_prefix_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.prefix_transform import PrefixTransform # noqa: E402
9 |
10 |
11 | class TestPrefixTransform(unittest.TestCase):
12 |
13 | def test_default(self):
14 | transform = PrefixTransform('prefix')
15 | self.assertIsNone(transform.transform(None))
16 | self.assertEqual(transform.transform('foo'), 'prefix foo')
17 |
18 | transform = PrefixTransform('prefix', sep='\t')
19 | self.assertEqual(transform.transform('foo'), 'prefix\tfoo')
20 |
21 | def test_map(self):
22 | transform = PrefixTransform({'p1': 'prefix1',
23 | 'p2': 'prefix2'},
24 | quiet=True)
25 | self.assertIsNone(transform.transform(None))
26 | self.assertEqual(transform.transform('foop1'), 'prefix1 foop1')
27 | self.assertEqual(transform.transform('foop2'), 'prefix2 foop2')
28 | self.assertEqual(transform.transform('foo'), None)
29 |
30 | transform = PrefixTransform({'p1': 'prefix1',
31 | 'p2': 'prefix2',
32 | '': 'prefix3'},
33 | quiet=True)
34 | self.assertEqual(transform.transform('foop1'), 'prefix1 foop1')
35 | self.assertEqual(transform.transform('foop2'), 'prefix2 foop2')
36 | self.assertEqual(transform.transform('foo'), 'prefix3 foo')
37 |
38 | transform = PrefixTransform({'p1': 'prefix1',
39 | 'p2': 'prefix2'})
40 | with self.assertLogs(logging.getLogger(), logging.WARNING):
41 | transform.transform('foo')
42 |
43 |
44 | if __name__ == '__main__':
45 | unittest.main()
46 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
10 |
11 |
12 |
13 |
14 |
15 |
19 |
20 |
21 |
22 |
26 |
27 | OpenRVDAS JSON/YAML
28 |
29 |
30 | You need to enable JavaScript to run this app.
31 |
32 |
37 |
42 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/contrib/logger_templates/snapshot_logger_template.yaml:
--------------------------------------------------------------------------------
1 | logger_templates:
2 | #################
3 | snapshot_logger_template:
4 | configs:
5 | 'off': {}
6 |
7 | 'on': &snapshot_on
8 | readers:
9 | - class: CachedDataReader
10 | kwargs:
11 | data_server: <>
12 | subscription:
13 | fields: <>
14 | transforms:
15 | - class: InterpolationTransform
16 | module: logger.transforms.interpolation_transform
17 | kwargs:
18 | interval: <>
19 | window: <>
20 | data_id: <>
21 | metadata_interval: 60 # send metadata every 60 seconds
22 | field_spec: [
23 | # These fields we do a simple boxcar average on
24 | { sources: [MwxAirTemp, RTMPTemp,PortTrueWindSpeed, StbdTrueWindSpeed,
25 | MwxBarometer, KnudDepthHF, KnudDepthLF, Grv1Value],
26 | algorithm: boxcar_average,
27 | window: <>,
28 | result_prefix: Avg # Prefix each field name with this for output
29 | },
30 | # These fields we do a polar average on
31 | { sources: [PortTrueWindDir, StbdTrueWindDir],
32 | algorithm: polar_average,
33 | window: <>,
34 | result_prefix: Avg # Prefix each field name with this for output
35 | }
36 | ]
37 | writers:
38 | - class: CachedDataWriter
39 | kwargs:
40 | data_server: <>
41 |
42 | on+influx:
43 | <<: *snapshot_on
44 | writers:
45 | - class: CachedDataWriter
46 | kwargs:
47 | data_server: <>
48 | - class: InfluxDBWriter
49 | kwargs:
50 | bucket_name: <>
51 |
--------------------------------------------------------------------------------
/test/logger/utils/test_formats.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import unittest
5 |
6 | sys.path.append('.')
7 | from logger.utils import formats # noqa: E402
8 |
9 |
10 | class TestFormat(unittest.TestCase):
11 |
12 | def test_can_accept(self):
13 | self.assertTrue(formats.Bytes.can_accept(formats.JSON_Record))
14 | self.assertFalse(formats.JSON.can_accept(formats.Bytes))
15 | self.assertTrue(formats.JSON.can_accept(formats.JSON_Record))
16 | self.assertTrue(formats.JSON_Record.can_accept(formats.JSON_Record))
17 | self.assertFalse(formats.JSON_Record.can_accept(formats.JSON))
18 |
19 | def test_common(self):
20 | self.assertEqual(formats.Python_Record.common(formats.JSON_Record),
21 | formats.Bytes)
22 | self.assertEqual(formats.Python_Record.common(formats.Python),
23 | formats.Python)
24 | self.assertEqual(formats.JSON_Record.common(formats.JSON_Record),
25 | formats.JSON_Record)
26 |
27 | def test_unknown(self):
28 | self.assertFalse(formats.Unknown.can_accept(formats.JSON_Record))
29 | self.assertFalse(formats.Python_Record.can_accept(formats.Unknown))
30 |
31 | self.assertEqual(formats.Unknown.common(formats.JSON_Record), None)
32 | self.assertEqual(formats.JSON_Record.common(formats.Unknown), None)
33 |
34 | def test_is_format(self):
35 | self.assertTrue(formats.is_format(formats.Unknown))
36 | self.assertTrue(formats.is_format(formats.Bytes))
37 | self.assertTrue(formats.is_format(formats.JSON_Record))
38 |
39 | self.assertFalse(formats.is_format(formats.is_format))
40 | self.assertFalse(formats.is_format('a string'))
41 | self.assertFalse(formats.is_format(None))
42 | self.assertFalse(formats.is_format(self))
43 |
44 |
45 | if __name__ == '__main__':
46 | unittest.main()
47 |
--------------------------------------------------------------------------------
/logger/transforms/timestamp_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 |
5 | from os.path import dirname, realpath
6 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
7 | from logger.utils import timestamp # noqa: E402
8 | from logger.transforms.transform import Transform # noqa: E402
9 |
10 |
11 | ################################################################################
12 | class TimestampTransform(Transform):
13 | """Prepend a timestamp to a text record."""
14 | def __init__(self, time_format=timestamp.TIME_FORMAT,
15 | time_zone=timestamp.timezone.utc, sep=' '):
16 | """If timestamp_format is not specified, use default format"""
17 | self.time_format = time_format
18 | self.time_zone = time_zone
19 | self.sep = sep
20 |
21 | ############################
22 | def transform(self, record: str, ts=None):
23 | """Prepend a timestamp"""
24 |
25 | # First off, grab a current timestamp
26 | ts = ts or timestamp.time_str(time_format=self.time_format,
27 | time_zone=self.time_zone)
28 |
29 | # See if it's something we can process, and if not, try digesting
30 | if not self.can_process_record(record): # inherited from Transform()
31 | # Special case: if we can't process it, but it's a list, pass
32 | # along the same initial timestamp so all elements in the list
33 | # share the same timestamp.
34 | if isinstance(record, list):
35 | return [self.transform(r, ts) for r in record]
36 | # If not str and not list, pass it along to digest_record()
37 | # to let it try and/or complain.
38 | else:
39 | return self.digest_record(record) # inherited from Transform()
40 |
41 | # If it is something we can process, put a timestamp on it.
42 | return ts + self.sep + record
43 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_count_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.count_transform import CountTransform # noqa: E402
9 | from logger.utils.das_record import DASRecord # noqa: E402
10 |
11 |
12 | class TestCountTransform(unittest.TestCase):
13 | ############################
14 | def test_default(self):
15 | counts = CountTransform()
16 |
17 | self.assertDictEqual(
18 | counts.transform({'f1': 1, 'f2': 1.5}), {'f1:count': 1, 'f2:count': 1})
19 |
20 | self.assertEqual(
21 | counts.transform({'f1': 1}), {'f1:count': 2})
22 |
23 | self.assertDictEqual(
24 | counts.transform({'f1': 1.1, 'f2': 1.5, 'f3': 'string'}),
25 | {'f1:count': 3, 'f2:count': 2, 'f3:count': 1})
26 |
27 | record = DASRecord(data_id='foo',
28 | message_type='bar',
29 | fields={'f1': 1.1, 'f2': 1.0})
30 | result = counts.transform(record)
31 | self.assertEqual(result.data_id, 'foo_counts')
32 | self.assertDictEqual(result.fields, {'f1:count': 4, 'f2:count': 3})
33 |
34 |
35 | ################################################################################
36 | if __name__ == '__main__':
37 | import argparse
38 | parser = argparse.ArgumentParser()
39 | parser.add_argument('-v', '--verbosity', dest='verbosity',
40 | default=0, action='count',
41 | help='Increase output verbosity')
42 | args = parser.parse_args()
43 |
44 | LOGGING_FORMAT = '%(asctime)-15s %(filename)s:%(lineno)d %(message)s'
45 | logging.basicConfig(format=LOGGING_FORMAT)
46 |
47 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
48 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
49 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
50 |
51 | unittest.main(warnings='ignore')
52 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_regex_filter_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.regex_filter_transform import RegexFilterTransform # noqa: E402
9 |
10 |
11 | class TestRegexFilterTransform(unittest.TestCase):
12 |
13 | def test_default(self):
14 | transform = RegexFilterTransform(pattern='^foo')
15 | self.assertIsNone(transform.transform(None))
16 | self.assertIsNone(transform.transform('not foo'))
17 | self.assertEqual(transform.transform('foo bar'), 'foo bar')
18 |
19 | transform = RegexFilterTransform(pattern='^foo', negate=True)
20 | self.assertEqual(transform.transform('not foo'), 'not foo')
21 | self.assertIsNone(transform.transform('foo bar'))
22 |
23 | transform = RegexFilterTransform(pattern='^\dfoo') # noqa: W605
24 | self.assertIsNone(transform.transform(None))
25 | self.assertIsNone(transform.transform('not foo'))
26 | self.assertEqual(transform.transform('9foo bar'), '9foo bar')
27 |
28 | transform = RegexFilterTransform(pattern='^\dfoo', negate=True) # noqa: W605
29 | self.assertEqual(transform.transform('not foo'), 'not foo')
30 | self.assertIsNone(transform.transform('6foo bar'))
31 |
32 |
33 | if __name__ == '__main__':
34 | import argparse
35 | parser = argparse.ArgumentParser()
36 | parser.add_argument('-v', '--verbosity', dest='verbosity',
37 | default=0, action='count',
38 | help='Increase output verbosity')
39 | args = parser.parse_args()
40 |
41 | LOGGING_FORMAT = '%(asctime)-15s %(filename)s:%(lineno)d %(message)s'
42 | logging.basicConfig(format=LOGGING_FORMAT)
43 |
44 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
45 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
46 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
47 |
48 | unittest.main(warnings='ignore')
49 |
--------------------------------------------------------------------------------
/display/README.md:
--------------------------------------------------------------------------------
1 | # OpenRVDAS Static Widgets
2 |
3 | ## Overview
4 |
5 | Please see the [README.md file in the parent directory](../README.md)
6 | for an introduction to the OpenRVDAS system, and [OpenRVDAS Display Widgets](../docs/display_widgets.md) for a description of code in this directory.
7 |
8 | This directory contains Javascript code and libraries (Highcharts and
9 | Leaflet) that are able to connect to a DataServer via websocket and
10 | display the data they receive on a web page. It is the obligation of
11 | the user to ensure that they have the appropriate licenses and
12 | rights to use the code in the included libraries.
13 |
14 | ## Contributing
15 |
16 | Please contact David Pablo Cohn (*david dot cohn at gmail dot com*) - to discuss
17 | opportunities for participating in code development.
18 |
19 | ## License
20 |
21 | This code is made available under the MIT license:
22 |
23 | Copyright (c) 2017 David Pablo Cohn
24 |
25 | Permission is hereby granted, free of charge, to any person obtaining a copy
26 | of this software and associated documentation files (the "Software"), to deal
27 | in the Software without restriction, including without limitation the rights
28 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
29 | copies of the Software, and to permit persons to whom the Software is
30 | furnished to do so, subject to the following conditions:
31 |
32 | The above copyright notice and this permission notice shall be included in all
33 | copies or substantial portions of the Software.
34 |
35 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
36 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
37 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
38 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
39 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
40 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
41 | SOFTWARE.
42 |
43 | ## Additional Licenses
44 |
45 |
--------------------------------------------------------------------------------
/logger/transforms/parse_nmea_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 |
5 | from os.path import dirname, realpath
6 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
7 | from logger.utils import nmea_parser # noqa: E402
8 | from logger.transforms.transform import Transform # noqa: E402
9 |
10 |
11 | ################################################################################
12 | class ParseNMEATransform(Transform):
13 | """Parse a " " record and return
14 | corresponding DASRecord."""
15 |
16 | def __init__(self, json=False,
17 | message_path=nmea_parser.DEFAULT_MESSAGE_PATH,
18 | sensor_path=nmea_parser.DEFAULT_SENSOR_PATH,
19 | sensor_model_path=nmea_parser.DEFAULT_SENSOR_MODEL_PATH,
20 | time_format=None):
21 | """
22 | ```
23 | json Return a JSON-encoded representation of the DASRecord instead
24 | of DASRecord itself.
25 |
26 | message_path, sensor_path, sensor_model_path
27 | Wildcarded path matching JSON definitions for sensor messages,
28 | sensors and sensor models.
29 | ```
30 | """
31 | self.json = json
32 | self.parser = nmea_parser.NMEAParser(message_path, sensor_path,
33 | sensor_model_path,
34 | time_format=time_format)
35 |
36 | ############################
37 | def transform(self, record: str):
38 | """Parse record and return DASRecord."""
39 |
40 | # See if it's something we can process, and if not, try digesting
41 | if not self.can_process_record(record): # inherited from Transform()
42 | return self.digest_record(record) # inherited from Transform()
43 |
44 | result = self.parser.parse_record(record)
45 | if not result:
46 | return None
47 | if self.json:
48 | return result.as_json()
49 | return result
50 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_strip_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.strip_transform import StripTransform # noqa: E402
9 |
10 |
11 | class TestStripTransform(unittest.TestCase):
12 |
13 | def test_default(self):
14 |
15 | alpha = ' abc defg '
16 | transform = StripTransform()
17 | self.assertEqual(transform.transform(alpha), 'abcdefg')
18 | transform = StripTransform(strip_prefix=True)
19 | self.assertEqual(transform.transform(alpha), 'abc defg ')
20 | transform = StripTransform(strip_suffix=True)
21 | self.assertEqual(transform.transform(alpha), ' abc defg')
22 | transform = StripTransform(strip_prefix=True, strip_suffix=True)
23 | self.assertEqual(transform.transform(alpha), 'abc defg')
24 |
25 | transform = StripTransform(chars=' cd')
26 | self.assertEqual(transform.transform(alpha), 'abefg')
27 | transform = StripTransform(chars=' cad', strip_prefix=True)
28 | self.assertEqual(transform.transform(alpha), 'bc defg ')
29 |
30 | beta = '\x01\x05abc d\x19'
31 | transform = StripTransform(unprintable=True)
32 | self.assertEqual(transform.transform(beta), 'abc d')
33 |
34 |
35 | ################################################################################
36 | if __name__ == '__main__':
37 | import argparse
38 | parser = argparse.ArgumentParser()
39 | parser.add_argument('-v', '--verbosity', dest='verbosity',
40 | default=0, action='count',
41 | help='Increase output verbosity')
42 | args = parser.parse_args()
43 |
44 | LOGGING_FORMAT = '%(asctime)-15s %(message)s'
45 | logging.basicConfig(format=LOGGING_FORMAT)
46 |
47 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
48 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
49 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
50 |
51 | unittest.main(warnings='ignore')
52 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_max_min_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.max_min_transform import MaxMinTransform # noqa: E402
9 | from logger.utils.das_record import DASRecord # noqa: E402
10 |
11 |
12 | class TestMaxMinTransform(unittest.TestCase):
13 |
14 | ############################
15 | def test_default(self):
16 | max_min = MaxMinTransform()
17 |
18 | self.assertDictEqual(
19 | max_min.transform({'f1': 1, 'f2': 1.5, 'f3': 'string', 'f4': []}),
20 | {'f1:max': 1, 'f1:min': 1, 'f2:max': 1.5, 'f2:min': 1.5})
21 |
22 | self.assertEqual(
23 | max_min.transform({'f1': 1, 'f2': 1.5, 'f3': 'string', 'f4': []}), None)
24 |
25 | self.assertDictEqual(
26 | max_min.transform({'f1': 1.1, 'f2': 1.5, 'f3': 'string', 'f4': []}),
27 | {'f1:max': 1.1})
28 |
29 | record = DASRecord(data_id='foo',
30 | message_type='bar',
31 | fields={'f1': 1.1, 'f2': 1.0, 'f3': 'string', 'f4': []})
32 | result = max_min.transform(record)
33 | self.assertEqual(result.data_id, 'foo_limits')
34 | self.assertDictEqual(result.fields, {'f2:min': 1.0})
35 |
36 |
37 | ################################################################################
38 | if __name__ == '__main__':
39 | import argparse
40 | parser = argparse.ArgumentParser()
41 | parser.add_argument('-v', '--verbosity', dest='verbosity',
42 | default=0, action='count',
43 | help='Increase output verbosity')
44 | args = parser.parse_args()
45 |
46 | LOGGING_FORMAT = '%(asctime)-15s %(filename)s:%(lineno)d %(message)s'
47 | logging.basicConfig(format=LOGGING_FORMAT)
48 |
49 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
50 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
51 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
52 |
53 | unittest.main(warnings='ignore')
54 |
--------------------------------------------------------------------------------
/logger/writers/record_screen_writer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import shutil
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
8 | from logger.utils.das_record import DASRecord # noqa: E402
9 | from logger.writers.writer import Writer # noqa: E402
10 |
11 |
12 | class RecordScreenWriter(Writer):
13 | """Write DASRecords to terminal screen in some survivable
14 | format. Mostly intended for debugging."""
15 |
16 | def __init__(self):
17 | self.values = {}
18 | self.timestamps = {}
19 | self.latest = 0
20 |
21 | ############################
22 | def move_cursor(self, x, y):
23 | print('\033[{};{}f'.format(str(x), str(y)))
24 |
25 | ############################
26 | # receives a DASRecord
27 | def write(self, record: DASRecord):
28 |
29 | # See if it's something we can process, and if not, try digesting
30 | if not self.can_process_record(record): # inherited from BaseModule()
31 | self.digest_record(record) # inherited from BaseModule()
32 | return
33 |
34 | # Incorporate the values from the record
35 | self.latest = record.timestamp
36 | for field in record.fields:
37 | self.values[field] = record.fields[field]
38 | self.timestamps[field] = self.latest
39 |
40 | # Get term size, in case it's been resized
41 | (cols, rows) = shutil.get_terminal_size()
42 | self.move_cursor(0, 0)
43 | # Redraw stuff
44 | keys = sorted(self.values.keys())
45 | for i in range(rows):
46 | # Go through keys in alpha order
47 | if i < len(keys):
48 | key = keys[i]
49 | line = '{} : {}'.format(key, self.values[key])
50 |
51 | # Fill the rest of the screen with blank lines
52 | else:
53 | line = ''
54 |
55 | # Pad the lines out to screen width to overwrite old stuff
56 | pad_size = cols - len(line)
57 | line += ' ' * pad_size
58 |
59 | print(line)
60 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_timestamp_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 | import unittest
5 |
6 | sys.path.append('.')
7 | from logger.utils import timestamp # noqa: E402
8 | from logger.transforms.timestamp_transform import TimestampTransform # noqa: E402
9 |
10 |
11 | class TestTimestampTransform(unittest.TestCase):
12 |
13 | ############################
14 | def test_default(self):
15 | transform = TimestampTransform()
16 |
17 | self.assertIsNone(transform.transform(None))
18 |
19 | result = transform.transform('blah')
20 | time_str = result.split()[0]
21 | then = timestamp.timestamp(time_str=time_str)
22 | now = timestamp.timestamp()
23 |
24 | self.assertAlmostEqual(then, now, places=1)
25 | self.assertEqual(result.split()[1], 'blah')
26 |
27 | ############################
28 | def test_list(self):
29 | transform = TimestampTransform()
30 |
31 | self.assertIsNone(transform.transform(None))
32 |
33 | record = ['foo', 'bar', 'baz']
34 | result = transform.transform(record)
35 | timestamps = [r.split()[0] for r in result]
36 | self.assertEqual(timestamps[0], timestamps[1])
37 | self.assertEqual(timestamps[1], timestamps[2])
38 |
39 | then = timestamp.timestamp(time_str=timestamps[0])
40 | now = timestamp.timestamp()
41 | self.assertAlmostEqual(then, now, places=1)
42 |
43 | sources = [r.split()[1] for r in result]
44 | self.assertEqual(sources, record)
45 |
46 | ############################
47 | # Try handing a custom timestamp format (in this case, a date). It
48 | # bears mentioning that this test will fail if run exactly at
49 | # midnight...
50 | def test_custom(self):
51 | transform = TimestampTransform(time_format=timestamp.DATE_FORMAT)
52 |
53 | self.assertIsNone(transform.transform(None))
54 |
55 | result = transform.transform('blah')
56 | today = timestamp.date_str()
57 | self.assertEqual(result.split()[0], today)
58 | self.assertEqual(result.split()[1], 'blah')
59 |
60 |
61 | if __name__ == '__main__':
62 | unittest.main()
63 |
--------------------------------------------------------------------------------
/contrib/README.md:
--------------------------------------------------------------------------------
1 | # How to Use the 'contrib/' Directory
2 |
3 | __NOTE:__ This directory and the current instructions are maintained for compatibility reasons, but as of 2024-10-01, the preferred way to contribute code to OpenRVDAS is to open pull requests to the [openrvdas_contrib](https://github.com/OceanDataTools/openrvdas_contrib) repository.
4 | ----
5 |
6 | In short, this is where you should put code that you wish to
7 | contribute to the OpenRVDAS project.
8 |
9 | This differs from the 'local/' directory in that local/ is for
10 | definitions, configurations and code specific to a particular ship,
11 | project and/or organization. The contrib/ directory is intended for
12 | code that the author believes may be of use outside their specific
13 | project. Code from contrib/ that proves especially useful may be
14 | incorporated into the core OpenRVDAS structure.
15 |
16 | Below, we propose a directory structure that should prevent file
17 | collisions:
18 |
19 | ```
20 | contrib/
21 | my_project/ - Project/individual/organization, e.g. coriolix, pablo68, etc.
22 | database/ - Mimic the structure of top-level OpenRVDAS with your code
23 | logger/ - " "
24 | utils/ - " "
25 | ```
26 |
27 | Note that Reader/TransformWriter code placed in the contrib/ directory may be incorporated at runtime by a listener by using the ``module`` declaration in the logger configuration:
28 |
29 | ```
30 | readers:
31 | # Code in contrib/my_project/readers/custom_reader.py
32 | class: MyCustomReader
33 | module: contrib.my_project.readers.custom_reader
34 | kwargs:
35 | host_port: 'host_port:8174'
36 | interval: 5
37 |
38 | writers:
39 | # Code in contrib/my_project/writers/custom_writer.py
40 | class: MyCustomWriter
41 | module: contrib.my_project.writers.custom_writer
42 | kwargs:
43 | base_path: '/var/tmp/log/custom_writer'
44 | ```
45 |
46 | In general, we recommend that any organization or ship using OpenRVDAS
47 | create their own branch or fork of the code to allow them to
48 | selectively merge OpenRVDAS code updates as they see fit. Following
49 | the above structure will simplify the process.
50 |
51 |
--------------------------------------------------------------------------------
/contrib/logger_templates/true_winds_logger_template.yaml:
--------------------------------------------------------------------------------
1 | ###################
2 | logger_templates:
3 | #################
4 | # Derived true winds logger
5 | true_winds_logger_template:
6 | configs:
7 | 'off': {}
8 |
9 | 'on': &true_winds_on
10 | readers:
11 | - class: CachedDataReader
12 | kwargs:
13 | data_server: <>
14 | subscription:
15 | fields:
16 | <>:
17 | seconds: 0
18 | <>:
19 | seconds: 0
20 | <>:
21 | seconds: 0
22 | <>:
23 | seconds: 0
24 | <>:
25 | seconds: 0
26 | transforms:
27 | - class: TrueWindsTransform
28 | kwargs:
29 | data_id: <>
30 | convert_speed_factor: <>
31 | course_field: <>
32 | heading_field: <>
33 | speed_field: <>
34 | wind_dir_field: <>
35 | wind_speed_field: <>
36 |
37 | # Output fields
38 | apparent_dir_name: <>
39 | true_dir_name: <>
40 | true_speed_name: <>
41 | update_on_fields:
42 | - <>
43 | max_field_age:
44 | <>: <>
45 | <>: <>
46 | <>: <>
47 | <>: <>
48 | <>: <>
49 | metadata_interval: 10
50 | writers:
51 | - class: CachedDataWriter
52 | kwargs:
53 | data_server: <>
54 |
55 | on+influx:
56 | <<: *true_winds_on
57 | writers:
58 | - class: CachedDataWriter
59 | kwargs:
60 | data_server: <>
61 | - class: InfluxDBWriter
62 | kwargs:
63 | bucket_name: <>
64 |
--------------------------------------------------------------------------------
/database/setup_mongo_connector.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | # Set up appropriate user and test databases and users for mongo_connector.
4 |
5 |
6 | # use admin
7 | # db.createUser(
8 | # {
9 | # user: "$USER",
10 | # pwd: "$PWD",
11 | # roles: [ { role: "userAdminAnyDatabase", db: "admin" }, "readWriteAnyDatabase" ]
12 | # }
13 | # )
14 |
15 |
16 | if [ ! $# == 2 ]; then
17 | echo Usage:
18 | echo
19 | echo $0 MONGO_USER_NAME MONGO_USER_PASSWORD
20 | echo
21 | echo Will create required tables and named Mongo user and give user
22 | echo access to the newly-created tables.
23 | exit
24 | fi
25 |
26 | if [ ! `which mongo` ]; then
27 | echo '####################################################################'
28 | echo NOTE: Before running this script, please install and set up
29 | echo the appropriate MongoDB server.
30 | echo '####################################################################'
31 | exit
32 | fi
33 |
34 | USER=$1
35 | PWD=$2
36 |
37 | # Create databases if they don't exist, and give user access. Also
38 | # give user 'test' access to test database.
39 | #mysql -u root -p$ROOT_PWD <
26 |
27 | """
28 | import logging
29 | import sys
30 |
31 | sys.path.append('.')
32 |
33 | DEFAULT_DATABASE = 'data'
34 | DEFAULT_DATABASE_HOST = '127.0.0.1:27017'
35 | DEFAULT_DATABASE_USER = 'rvdas'
36 | DEFAULT_DATABASE_PASSWORD = 'rvdas'
37 |
38 | # Which database connector to use. When selecting a new connector, you may
39 | # need to run the corresponding database/setup_xxx_connector.sh script.
40 |
41 | Connector = None
42 | DATABASE_ENABLED = False
43 | MONGO_ENABLED = False
44 |
45 | try:
46 | # Specify/uncomment the database you're using here
47 |
48 | # To set up MongoDB connectivity, install, configure and start
49 | # Mongo server, and set up appropriate mongo user:
50 | # apt-get install mongodb-org # ubuntu
51 | #
52 | # pip3 install pymongo
53 | #
54 | # # Create 'data' and 'test' databases and mysql_user
55 | # database/setup_mongo_connector.sh
56 | #
57 | from database.mongo_connector import MONGO_ENABLED, MongoConnector as Connector
58 | #from database.mongo_record_connector import MONGO_ENABLED, MongoRecordConnector as Connector
59 | if MONGO_ENABLED:
60 | DATABASE_ENABLED = True
61 |
62 | # Put instructions and imports for other databases here
63 |
64 | except ImportError:
65 | pass
66 |
67 | if not DATABASE_ENABLED:
68 | logging.warning('Settings in database/settings.py not configured; database '
69 | 'functionality may not be available.')
70 |
--------------------------------------------------------------------------------
/django_gui/templates/django_gui/choose_file.html:
--------------------------------------------------------------------------------
1 | {% extends 'django_gui/base.html' %}
2 |
3 | {% load static %}
4 |
5 | {% block content %}
6 |
7 | Choose Cruise Definition File
8 | Choose Cruise Definition File
9 |
10 | {% if target_file %}
11 | {# ############################################################ #}
12 | {# If we have a target file, ask them to confirm loading it. #}
13 |
34 |
35 | {% else %}
36 | {# ############################################################ #}
37 | {# Directory listing here... #}
38 |
71 | {% endif %}
72 |
73 | {% endblock %}
74 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .DS_Store?
3 | #.DS_Store#
4 |
5 | pylintrc
6 |
7 | __pycache__
8 | *.pyc
9 |
10 | *~
11 | *.bak
12 |
13 | db.sqlite3
14 | django_gui/migrations
15 |
16 | # Files created by utils/install_openrvdas.sh
17 | django_gui/openrvdas_nginx.conf
18 | django_gui/openrvdas_uwsgi.ini
19 | django_gui/uwsgi_params
20 |
21 | # Files copied from their respective .dist sources
22 | # by utils/install_openrvdas.sh
23 | django_gui/settings.py
24 | django_gui/settings.py-e
25 | database/settings.py
26 | database/settings.py-e
27 | logger/settings.py
28 | display/js/widgets/settings.js
29 |
30 | # InfluxDB
31 | database/influxdb/LICENSE
32 | database/influxdb/README.md
33 | database/influxdb/influx
34 | database/influxdb/influxd
35 | database/influxdb/settings.py
36 | database/influxdb/settings.py-e
37 |
38 | static/
39 |
40 | # PyCharm
41 | .idea/
42 |
43 | # Visual Studio Code
44 | .vscode
45 |
46 | # virtualenv
47 | venv/
48 | .venv
49 |
50 |
51 | # SSH
52 | *.pem
53 | *.crt
54 | *.key
55 |
56 | # CORIOLIX
57 | local/rcrv/settings.py
58 | local/rcrv/config_template.yaml
59 | local/rcrv/.coriolix_preferences
60 |
61 | # OceanX
62 | local/alucia
63 |
64 | # Stony Brook
65 | local/stonybrook
66 |
67 | local/unols
68 |
69 | local/usap/palmer
70 |
71 | # local for Scripps Institution of Oceanography
72 | #
73 | # SIO is testing this software out and wants the ability to make rapid
74 | # changes for a while. Once stable, we can more define what we
75 | # want to share with better granularity.
76 | local/sio/
77 |
78 | # local for STARC (USCGC Healy)
79 | #
80 | # STARC is testing this software out and wants the ability to make rapid
81 | # changes for a while. Once stable, we can more define what we
82 | # want to share with better granularity.
83 | local/starc/
84 |
85 | # Created by build scripts
86 | .install_influxdb_preferences
87 | .install_openrvdas_preferences
88 | .install_mysql_preferences
89 | .install_waterwall_preferences
90 |
91 | # Symlink for legacy widget paths
92 | widgets
93 |
94 | # For optional doc server
95 | mkdocs.yml
96 |
97 | # Contrib logger creation tool
98 | contrib/utils/JSON_YAML_Creator/node_modules
99 | contrib/utils/JSON_YAML_Creator/package-lock.json
100 | contrib/utils/JSON_YAML_Creator/yarn.lock
101 |
102 | # For sqlite_gui
103 | sqlite_gui
104 | .install_sqlite_gui_preferences
105 | server/openrvdas.sql
106 |
107 | # tmp
108 | tmp
109 |
--------------------------------------------------------------------------------
/display/js/d3/README.md:
--------------------------------------------------------------------------------
1 | # D3: Data-Driven Documents
2 |
3 |
4 |
5 | **D3** (or **D3.js**) is a JavaScript library for visualizing data using web standards. D3 helps you bring data to life using SVG, Canvas and HTML. D3 combines powerful visualization and interaction techniques with a data-driven approach to DOM manipulation, giving you the full capabilities of modern browsers and the freedom to design the right visual interface for your data.
6 |
7 | ## Resources
8 |
9 | * [API Reference](https://github.com/d3/d3/blob/master/API.md)
10 | * [Release Notes](https://github.com/d3/d3/releases)
11 | * [Gallery](https://github.com/d3/d3/wiki/Gallery)
12 | * [Examples](https://observablehq.com/@d3)
13 | * [Wiki](https://github.com/d3/d3/wiki)
14 |
15 | ## Installing
16 |
17 | If you use npm, `npm install d3`. Otherwise, download the [latest release](https://github.com/d3/d3/releases/latest). The released bundle supports anonymous AMD, CommonJS, and vanilla environments. You can load directly from [d3js.org](https://d3js.org), [CDNJS](https://cdnjs.com/libraries/d3), or [unpkg](https://unpkg.com/d3/). For example:
18 |
19 | ```html
20 |
21 | ```
22 |
23 | For the minified version:
24 |
25 | ```html
26 |
27 | ```
28 |
29 | You can also use the standalone D3 microlibraries. For example, [d3-selection](https://github.com/d3/d3-selection):
30 |
31 | ```html
32 |
33 | ```
34 |
35 | D3 is written using [ES2015 modules](http://www.2ality.com/2014/09/es6-modules-final.html). Create a [custom bundle using Rollup](https://bl.ocks.org/mbostock/bb09af4c39c79cffcde4), Webpack, or your preferred bundler. To import D3 into an ES2015 application, either import specific symbols from specific D3 modules:
36 |
37 | ```js
38 | import {scaleLinear} from "d3-scale";
39 | ```
40 |
41 | Or import everything into a namespace (here, `d3`):
42 |
43 | ```js
44 | import * as d3 from "d3";
45 | ```
46 |
47 | In Node:
48 |
49 | ```js
50 | var d3 = require("d3");
51 | ```
52 |
53 | You can also require individual modules and combine them into a `d3` object using [Object.assign](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign):
54 |
55 | ```js
56 | var d3 = Object.assign({}, require("d3-format"), require("d3-geo"), require("d3-geo-projection"));
57 | ```
58 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_to_das_record.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import time
6 | import unittest
7 |
8 | sys.path.append('.')
9 | from logger.transforms.to_das_record_transform import ToDASRecordTransform # noqa: E402
10 |
11 |
12 | class TestToDASRecordTransform(unittest.TestCase):
13 | ############################
14 | def test_field_name(self):
15 | t = ToDASRecordTransform(data_id='my_data_id', field_name='my_field_name')
16 | das_record = t.transform('my_value')
17 | self.assertAlmostEqual(das_record.timestamp, time.time(), delta=0.001)
18 | self.assertEqual(das_record.data_id, 'my_data_id')
19 | self.assertDictEqual(das_record.fields, {'my_field_name': 'my_value'})
20 |
21 | das_record = t.transform(['this should show up as a field value'])
22 | self.assertEqual(das_record[0].fields.get('my_field_name'),
23 | 'this should show up as a field value')
24 |
25 | ############################
26 | def test_no_field_name(self):
27 | t = ToDASRecordTransform(data_id='my_data_id')
28 | das_record = t.transform({'f1': 'v1', 'f2': 'v2'})
29 | self.assertAlmostEqual(das_record.timestamp, time.time(), delta=0.001)
30 | self.assertEqual(das_record.data_id, 'my_data_id')
31 | self.assertDictEqual(das_record.fields, {'f1': 'v1', 'f2': 'v2'})
32 |
33 | with self.assertLogs(level='WARNING'):
34 | das_record = t.transform('this should log a warning')
35 | self.assertEqual(das_record, None)
36 |
37 | with self.assertLogs(level='WARNING'):
38 | das_record = t.transform(['this should log a warning'])
39 | self.assertEqual(das_record, [])
40 |
41 |
42 | ################################################################################
43 | if __name__ == '__main__':
44 | import argparse
45 | parser = argparse.ArgumentParser()
46 | parser.add_argument('-v', '--verbosity', dest='verbosity',
47 | default=0, action='count',
48 | help='Increase output verbosity')
49 | args = parser.parse_args()
50 |
51 | LOGGING_FORMAT = '%(asctime)-15s %(filename)s:%(lineno)d %(message)s'
52 | logging.basicConfig(format=LOGGING_FORMAT)
53 |
54 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
55 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
56 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
57 |
58 | unittest.main(warnings='ignore')
59 |
--------------------------------------------------------------------------------
/database/settings.py.dist:
--------------------------------------------------------------------------------
1 | """
2 | Settings for database operations.
3 |
4 | THIS FILE MUST BE COPIED OVER TO database/settings.py to be
5 | operational.
6 |
7 | See below for additional database-specific install requirements
8 |
9 | """
10 | # flake8: noqa E502
11 |
12 | import logging
13 | import sys
14 |
15 | sys.path.append('.')
16 |
17 | DEFAULT_DATABASE = 'data'
18 | DEFAULT_DATABASE_HOST = 'localhost'
19 | DEFAULT_DATABASE_USER = 'rvdas'
20 | DEFAULT_DATABASE_PASSWORD = 'rvdas'
21 |
22 | # Which database connector to use. When selecting a new connector, you may
23 | # need to run the corresponding database/setup_xxx_connector.sh script.
24 |
25 | Connector = None
26 | DATABASE_ENABLED = False
27 | MONGO_ENABLED = False
28 | MYSQL_ENABLED = False
29 | POSTGRES_ENABLED = False
30 |
31 | try:
32 | # Specify/uncomment the database you're using here
33 |
34 | # To set up MySQL connectivity, install, configure and start
35 | # MySQL server, and set up appropriate mysql user:
36 | # apt-get install mysql-server libmysqlclient-dev # ubuntu
37 | #
38 | # pip3 install mysqlclient, mysql-connector-python
39 | #
40 | # # Create 'data' and 'test' databases and mysql_user
41 | # database/setup_mysql_connector.sh
42 | #
43 | from database.mysql_connector import MYSQL_ENABLED, MySQLConnector as Connector
44 | #from database.mysql_record_connector import MYSQL_ENABLED, MySQLRecordConnector as Connector
45 | if MYSQL_ENABLED:
46 | DATABASE_ENABLED = True
47 |
48 | # To set up PostgreSQL connectivity, install, configure and start
49 | # PostgreSQL server, and set up appropriate postgres user:
50 | # apt-get install postgresql-12 postgresql-client-12 # ubuntu
51 | #
52 | # pip3 install psycopg2-binary
53 | #
54 | # # Create 'data' and 'test' databases and mysql_user
55 | # database/setup_postgres_connector.sh
56 | #
57 | # from database.postgresql_connector import POSTGRES_ENABLED, PostgreSQLConnector as Connector
58 | # from database.postgresql_record_connector import POSTGRES_ENABLED, PostgreSQLRecordConnector as Connector
59 | # if POSTGRES_ENABLED:
60 | # DATABASE_ENABLED = True
61 |
62 | # Put instructions and imports for other databases here
63 |
64 | except ImportError:
65 | pass
66 |
67 | if not DATABASE_ENABLED:
68 | logging.info('Settings in database/settings.py not configured; database '
69 | 'functionality may not be available.')
70 |
--------------------------------------------------------------------------------
/logger/utils/stderr_logging.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import json
4 | import logging
5 | import time
6 | import sys
7 |
8 | from os.path import dirname, realpath
9 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
10 | from logger.utils.timestamp import LOGGING_TIME_FORMAT # noqa: E402
11 |
12 | DEFAULT_LOGGING_FORMAT = ' '.join([
13 | '%(asctime)-15sZ',
14 | '%(levelno)s',
15 | '%(levelname)s',
16 | '%(filename)s:%(lineno)d',
17 | '%(message)s',
18 | ])
19 |
20 | STDERR_FORMATTER = logging.Formatter(fmt=DEFAULT_LOGGING_FORMAT,
21 | datefmt=LOGGING_TIME_FORMAT)
22 | STDERR_FORMATTER.converter = time.gmtime
23 |
24 |
25 | class StdErrLoggingHandler(logging.Handler):
26 | """Write Python logging.* messages to whatever writer we're passed. To
27 | use, run
28 |
29 | logging.getLogger().addHandler(StdErrLoggingHandler(my_writer))
30 | """
31 |
32 | def __init__(self, writers, parse_to_json=False):
33 | """
34 | writers - either a Writer object or a list of Writer objects
35 |
36 | parse_to_json - if true, expect to receive output as
37 | a string in DEFAULT_LOGGING_FORMAT, and parse it into a dict of
38 | the respective values.
39 | """
40 | super().__init__()
41 | self.writers = writers
42 | self.parse_to_json = parse_to_json
43 |
44 | def emit(self, record):
45 | # Temporarily push the logging level up as high as it can go to
46 | # effectively disable recursion induced by logging that occurs inside
47 | # whatever writer we're using.
48 | log_level = logging.root.getEffectiveLevel()
49 | logging.root.setLevel(logging.CRITICAL)
50 |
51 | message = STDERR_FORMATTER.format(record)
52 |
53 | # If we're supposed to parse string into a dict
54 | if self.parse_to_json:
55 | try:
56 | (asctime, levelno, levelname, mesg) = message.split(' ', maxsplit=3)
57 | levelno = int(levelno)
58 | fields = {'asctime': asctime, 'levelno': levelno,
59 | 'levelname': levelname, 'message': mesg}
60 | except ValueError:
61 | fields = {'message': message}
62 | message = json.dumps(fields)
63 |
64 | # Write message out to each writer
65 | if isinstance(self.writers, list):
66 | [writer.write(message) for writer in self.writers if writer]
67 | else:
68 | self.writers.write(message)
69 | logging.root.setLevel(log_level)
70 |
--------------------------------------------------------------------------------
/test/configs/sample_configs.yaml:
--------------------------------------------------------------------------------
1 | eng1->net:
2 | name: eng1->net
3 | readers:
4 | class: SerialReader
5 | kwargs: {baudrate: 9600, port: /tmp/tty_eng1}
6 | transforms: {class: TimestampTransform}
7 | writers:
8 | class: ComposedWriter
9 | kwargs:
10 | transforms:
11 | class: PrefixTransform
12 | kwargs: {prefix: eng1}
13 | writers:
14 | class: NetworkWriter
15 | kwargs: {network: ':6224'}
16 | gyr1->net:
17 | name: gyr1->net
18 | readers:
19 | class: SerialReader
20 | kwargs: {baudrate: 9600, port: /tmp/tty_gyr1}
21 | transforms: {class: TimestampTransform}
22 | writers:
23 | class: ComposedWriter
24 | kwargs:
25 | transforms:
26 | class: PrefixTransform
27 | kwargs: {prefix: gyr1}
28 | writers:
29 | class: NetworkWriter
30 | kwargs: {network: ':6224'}
31 | knud->net:
32 | host_id: knud.host
33 | name: knud->net
34 | readers:
35 | class: SerialReader
36 | kwargs: {baudrate: 9600, port: /tmp/tty_knud}
37 | transforms: {class: TimestampTransform}
38 | writers:
39 | class: ComposedWriter
40 | kwargs:
41 | transforms:
42 | class: PrefixTransform
43 | kwargs: {prefix: knud}
44 | writers:
45 | class: NetworkWriter
46 | kwargs: {network: ':6224'}
47 | mwx1->net:
48 | name: mwx1->net
49 | readers:
50 | class: SerialReader
51 | kwargs: {baudrate: 9600, port: /tmp/tty_mwx1}
52 | transforms: {class: TimestampTransform}
53 | writers:
54 | class: ComposedWriter
55 | kwargs:
56 | transforms:
57 | class: PrefixTransform
58 | kwargs: {prefix: mwx1}
59 | writers:
60 | class: NetworkWriter
61 | kwargs: {network: ':6224'}
62 | rtmp->net:
63 | name: rtmp->net
64 | readers:
65 | class: SerialReader
66 | kwargs: {baudrate: 9600, port: /tmp/tty_rtmp}
67 | transforms: {class: TimestampTransform}
68 | writers:
69 | class: ComposedWriter
70 | kwargs:
71 | transforms:
72 | class: PrefixTransform
73 | kwargs: {prefix: rtmp}
74 | writers:
75 | class: NetworkWriter
76 | kwargs: {network: ':6224'}
77 | s330->net:
78 | name: s330->net
79 | readers:
80 | class: SerialReader
81 | kwargs: {baudrate: 9600, port: /tmp/tty_s330}
82 | transforms: {class: TimestampTransform}
83 | writers:
84 | class: ComposedWriter
85 | kwargs:
86 | transforms:
87 | class: PrefixTransform
88 | kwargs: {prefix: s330}
89 | writers:
90 | class: NetworkWriter
91 | kwargs: {network: ':6224'}
92 |
--------------------------------------------------------------------------------
/logger/transforms/count_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | from os.path import dirname, realpath
7 |
8 | from typing import Union
9 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
10 | from logger.utils.das_record import DASRecord # noqa: E402
11 | from logger.transforms.transform import Transform # noqa: E402
12 |
13 |
14 | ################################################################################
15 | #
16 | class CountTransform(Transform):
17 | """Return number of times the passed fields have been seen as a dict
18 | (or DASRecord, depending on what was passed in) where the keys are
19 | 'field_name:count' and the values are the number of times the passed
20 | in fields have been seen. E.g:
21 | ```
22 | counts = CountTransform()
23 | counts.transform({'f1': 1, 'f2': 1.5}) -> {'f1:count':1, 'f2:count':1}
24 | counts.transform({'f1': 1}) -> {'f1:count':2}
25 | counts.transform({'f1': 1.1, 'f2': 1.4}) -> {'f1:count':3, 'f2:count':2}
26 | ```
27 | """
28 |
29 | def __init__(self):
30 | """
31 | """
32 | self.counts = {}
33 |
34 | ############################
35 | def transform(self, record: Union[DASRecord, dict]):
36 | """Return counts of the previous times we've seen these field names."""
37 |
38 | # See if it's something we can process, and if not, try digesting
39 | if not self.can_process_record(record): # inherited from Transform()
40 | return self.digest_record(record) # inherited from Transform()
41 |
42 | if type(record) is DASRecord:
43 | fields = record.fields
44 | elif type(record) is dict:
45 | fields = record
46 | else:
47 | logging.warning('Input to CountTransform must be either '
48 | 'DASRecord or dict. Received type "%s"', type(record))
49 | return None
50 |
51 | new_counts = {}
52 |
53 | for field, value in fields.items():
54 | if field not in self.counts:
55 | self.counts[field] = 1
56 | else:
57 | self.counts[field] += 1
58 | new_counts[field + ':count'] = self.counts[field]
59 |
60 | if type(record) is DASRecord:
61 | if record.data_id:
62 | data_id = record.data_id + '_counts' if record.data_id else 'counts'
63 | return DASRecord(data_id=data_id,
64 | message_type=record.message_type,
65 | timestamp=record.timestamp,
66 | fields=new_counts)
67 |
68 | return new_counts
69 |
--------------------------------------------------------------------------------
/test/logger/writers/test_timeout_writer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import tempfile
6 | import time
7 | import unittest
8 |
9 | sys.path.append('.')
10 | from logger.writers.text_file_writer import TextFileWriter # noqa: E402
11 | from logger.writers.timeout_writer import TimeoutWriter # noqa: E402
12 |
13 |
14 | def create_file(filename, lines, interval=0, pre_sleep_interval=0):
15 | time.sleep(pre_sleep_interval)
16 | logging.info('creating file "%s"', filename)
17 | with open(filename, 'w') as f:
18 | for line in lines:
19 | time.sleep(interval)
20 | f.write(line + '\n')
21 | f.flush()
22 |
23 |
24 | class TestTimeoutWriter(unittest.TestCase):
25 | ############################
26 | def test_basic(self):
27 | # Create a file
28 | temp_dir = tempfile.TemporaryDirectory()
29 | temp_dir_name = temp_dir.name
30 | test_file = temp_dir_name + '/test.txt'
31 | logging.info('creating temporary file "%s"', test_file)
32 |
33 | client_writer = TextFileWriter(filename=test_file)
34 | timeout_writer = TimeoutWriter(writer=client_writer, timeout=0.5,
35 | message='off', resume_message='on')
36 |
37 | time.sleep(0.75) # trigger an "off" message
38 | timeout_writer.write('foo') # trigger an "on" message
39 | time.sleep(1.2) # should trigger just one "off" message
40 |
41 | timeout_writer.quit()
42 |
43 | # Now check the file - make sure we got
44 | with open(test_file, 'r') as f:
45 | lines = f.read().strip().split('\n')
46 | self.assertEqual(lines[0], 'off')
47 | self.assertEqual(lines[1], 'on')
48 | self.assertEqual(lines[2], 'off')
49 | self.assertEqual(len(lines), 3)
50 |
51 |
52 | ################################################################################
53 | if __name__ == '__main__':
54 | import argparse
55 | parser = argparse.ArgumentParser()
56 | parser.add_argument('-v', '--verbosity', dest='verbosity',
57 | default=0, action='count',
58 | help='Increase output verbosity')
59 | args = parser.parse_args()
60 |
61 | LOGGING_FORMAT = '%(asctime)-15s %(message)s'
62 | logging.basicConfig(format=LOGGING_FORMAT)
63 |
64 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
65 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
66 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
67 |
68 | # logging.getLogger().setLevel(logging.DEBUG)
69 | unittest.main(warnings='ignore')
70 |
--------------------------------------------------------------------------------
/test/NBP1406/simulate_NBP1406.yaml:
--------------------------------------------------------------------------------
1 | # Config file for creating/feeding simulated serial ports for
2 | # test/NBP1406/NBP1406_cruise.yaml. Run using
3 | #
4 | # logger/utils/simulate_data.py --config test/NBP1406/simulate_NBP1406.yaml
5 |
6 |
7 | PCOD:
8 | class: Serial
9 | port: /tmp/tty_PCOD
10 | filebase: test/NBP1406/data/PCOD/raw/NBP1406_PCOD
11 |
12 | cwnc:
13 | class: Serial
14 | port: /tmp/tty_cwnc
15 | filebase: test/NBP1406/data/cwnc/raw/NBP1406_cwnc
16 |
17 | gp02:
18 | class: Serial
19 | port: /tmp/tty_gp02
20 | filebase: test/NBP1406/data/gp02/raw/NBP1406_gp02
21 |
22 | gyr1:
23 | class: Serial
24 | port: /tmp/tty_gyr1
25 | filebase: test/NBP1406/data/gyr1/raw/NBP1406_gyr1
26 |
27 | knud:
28 | class: Serial
29 | port: /tmp/tty_knud
30 | filebase: test/NBP1406/data/knud/raw/NBP1406_knud
31 |
32 | mwx1:
33 | class: Serial
34 | port: /tmp/tty_mwx1
35 | filebase: test/NBP1406/data/mwx1/raw/NBP1406_mwx1
36 |
37 | pguv:
38 | class: Serial
39 | port: /tmp/tty_pguv
40 | filebase: test/NBP1406/data/pguv/raw/NBP1406_pguv
41 |
42 | s330:
43 | class: Serial
44 | port: /tmp/tty_s330
45 | filebase: test/NBP1406/data/s330/raw/NBP1406_s330
46 |
47 | svp1:
48 | class: Serial
49 | port: /tmp/tty_svp1
50 | filebase: test/NBP1406/data/svp1/raw/NBP1406_svp1
51 |
52 | tsg2:
53 | class: Serial
54 | port: /tmp/tty_tsg2
55 | filebase: test/NBP1406/data/tsg2/raw/NBP1406_tsg2
56 |
57 | adcp:
58 | class: Serial
59 | port: /tmp/tty_adcp
60 | filebase: test/NBP1406/data/adcp/raw/NBP1406_adcp
61 |
62 | eng1:
63 | class: Serial
64 | port: /tmp/tty_eng1
65 | filebase: test/NBP1406/data/eng1/raw/NBP1406_eng1
66 |
67 | grv1:
68 | class: Serial
69 | port: /tmp/tty_grv1
70 | filebase: test/NBP1406/data/grv1/raw/NBP1406_grv1
71 |
72 | hdas:
73 | class: Serial
74 | port: /tmp/tty_hdas
75 | filebase: test/NBP1406/data/hdas/raw/NBP1406_hdas
76 |
77 | mbdp:
78 | class: Serial
79 | port: /tmp/tty_mbdp
80 | filebase: test/NBP1406/data/mbdp/raw/NBP1406_mbdp
81 |
82 | pco2:
83 | class: Serial
84 | port: /tmp/tty_pco2
85 | filebase: test/NBP1406/data/pco2/raw/NBP1406_pco2
86 |
87 | rtmp:
88 | class: Serial
89 | port: /tmp/tty_rtmp
90 | filebase: test/NBP1406/data/rtmp/raw/NBP1406_rtmp
91 |
92 | seap:
93 | class: Serial
94 | port: /tmp/tty_seap
95 | filebase: test/NBP1406/data/seap/raw/NBP1406_seap
96 |
97 | tsg1:
98 | class: Serial
99 | port: /tmp/tty_tsg1
100 | filebase: test/NBP1406/data/tsg1/raw/NBP1406_tsg1
101 |
102 | twnc:
103 | class: Serial
104 | port: /tmp/tty_twnc
105 | filebase: test/NBP1406/data/twnc/raw/NBP1406_twnc
106 |
--------------------------------------------------------------------------------
/contrib/logger_templates/calibration_logger_template.yaml:
--------------------------------------------------------------------------------
1 | # How to use this template
2 | #
3 | # Put this in your calibrations file, e,g.
4 | # local/your_ship/calibration_files/calibrations-2025-04-15.yaml:
5 | #
6 | # variables:
7 | # # Default calibration factors
8 | #
9 | # # Rtmp calibration
10 | # rtmp_field_name: RTmpValue
11 | # rtmp_output_name: CalibratedRTmpValue
12 | # rtmp_mult_factor: 1.00223
13 | # rtmp_add_factor: 0.0442
14 | #
15 | # # SSpd calibration
16 | # sspd_field_name: SSpd
17 | # sspd_output_name: CalibratedSSpd
18 | # sspd_mult_factor: 0.5443
19 | # sspd_add_factor: 0
20 | #
21 | # Put these loggers in your cruise definition file:
22 | #
23 | # includes:
24 | # local/your_ship/calibration_files/calibrations-2025-04-15.yaml
25 | #
26 | # loggers:
27 | # rtmp_cal: # compute and write out calibrated values for rtmp
28 | # logger_template: calibration_logger_template
29 | # variables:
30 | # field_name: rtmp_field_name
31 | # output_name: rtmp_output_name
32 | # mult_factor: rtmp_mult_factor
33 | # add_factor: rtmp_add_factor
34 | #
35 | # sspd_cal: # compute and write out calibrated values for sspd
36 | # logger_template: calibration_logger_template
37 | # variables:
38 | # field_name: sspd_field_name
39 | # output_name: sspd_output_name
40 | # mult_factor: sspd_mult_factor
41 | # add_factor: sspd_add_factor
42 |
43 | logger_templates:
44 | #################
45 | calibration_logger_template:
46 | configs:
47 | 'off': {}
48 |
49 | # Write calibrated values back to CDS
50 | 'on': &calibration_on
51 | readers:
52 | - class: CachedDataReader
53 | kwargs:
54 | return_das_record: true
55 | data_id: <>
56 | data_server: <>
57 | subscription:
58 | fields: [<>]
59 | transforms:
60 | - class: ModifyValueTransform
61 | kwargs:
62 | #quiet: true
63 | fields:
64 | <>:
65 | mult_factor: <>
66 | add_factor: <>
67 | output_name: <>
68 | delete_original: true
69 | writers:
70 | - class: CachedDataWriter
71 | kwargs:
72 | data_server: <>
73 |
74 | # Write calibrated values back to CDS and InfluxDB
75 | on+influx:
76 | <<: *calibration_on
77 | writers:
78 | - class: CachedDataWriter
79 | kwargs:
80 | data_server: <>
81 | - class: InfluxDBWriter
82 | kwargs:
83 | bucket_name: <>
84 |
--------------------------------------------------------------------------------
/display/html/map_demo.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
25 |
26 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_from_json_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import unittest
6 |
7 | sys.path.append('.')
8 | from logger.transforms.from_json_transform import FromJSONTransform # noqa: E402
9 | from logger.transforms.to_json_transform import ToJSONTransform # noqa: E402
10 | from logger.utils.das_record import DASRecord # noqa: E402
11 |
12 | sample_dict = {
13 | 'field1': 'value1',
14 | 'field2': 'value2',
15 | 'field3': 'value3',
16 | }
17 |
18 | to_str = '{"field1": "value1", "field2": "value2", "field3": "value3"}'
19 |
20 | pretty_str = """{
21 | "field1": "value1",
22 | "field2": "value2",
23 | "field3": "value3"
24 | }"""
25 |
26 |
27 | class TestFromJSONTransform(unittest.TestCase):
28 |
29 | ############################
30 | def test_both_ways(self):
31 | to_trans = ToJSONTransform()
32 | from_trans = FromJSONTransform()
33 |
34 | result = from_trans.transform(to_trans.transform(sample_dict))
35 | self.assertDictEqual(sample_dict, result)
36 |
37 | ############################
38 | def test_to(self):
39 | to_trans = ToJSONTransform()
40 | self.assertEqual(to_trans.transform(sample_dict), to_str)
41 |
42 | ############################
43 | def test_to_pretty(self):
44 | to_trans = ToJSONTransform(pretty=True)
45 | self.assertEqual(to_trans.transform(sample_dict), pretty_str)
46 |
47 | ############################
48 |
49 | def test_from(self):
50 | from_trans = FromJSONTransform()
51 | result = from_trans.transform(pretty_str)
52 | self.assertDictEqual(sample_dict, result)
53 |
54 | ############################
55 | def test_das_record_from(self):
56 | from_trans = FromJSONTransform(das_record=True)
57 | result = from_trans.transform(pretty_str)
58 | self.assertEqual(type(result), DASRecord)
59 | self.assertDictEqual(sample_dict, result.fields)
60 |
61 |
62 | ################################################################################
63 | if __name__ == '__main__':
64 | import argparse
65 | parser = argparse.ArgumentParser()
66 | parser.add_argument('-v', '--verbosity', dest='verbosity',
67 | default=0, action='count',
68 | help='Increase output verbosity')
69 | args = parser.parse_args()
70 |
71 | LOGGING_FORMAT = '%(asctime)-15s %(filename)s:%(lineno)d %(message)s'
72 | logging.basicConfig(format=LOGGING_FORMAT)
73 |
74 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
75 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
76 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
77 |
78 | unittest.main(warnings='ignore')
79 |
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # OpenRVDAS
2 | © David Pablo Cohn - david.cohn@gmail.com
3 | 2024-05-17
4 |
5 | The Open Research Vessel Data Acquisition System (OpenRVDAS) is a software framework used for building custom data acquisition systems (DAS). OpenRVDAS target audiences are oceanographic research vessel operators and operators of other science-related platforms that have the need to record streaming data. OpenRVDAS is capable of reading data records from serial ports and network-aware sensors, optionally modifying those data records and streaming either the result to one or more destinations, including logfiles, network ports, databases, etc.
6 |
7 | OpenRVDAS is designed to be modular and extensible, relying on simple composition of Readers, Transforms and Writers to achieve the needed datalogging functionality.
8 |
9 | The project code repository is at [https://github.com/oceandatatools/openrvdas](https://github.com/oceandatatools/openrvdas).
10 |
11 | ## Where to start?
12 | * [OpenRVDAS Quickstart](quickstart.md) if you want to just grab the code and poke around with basic loggers as quickly as possible.
13 | * [GUI Quickstart](quickstart_gui.md) if you want to play with the web-based interface.
14 |
15 | Other relevant documents are:
16 |
17 | * [The Listener Script - listen.py](listen_py.md) - how to use OpenRVDAS's core utility script
18 | * [Configuration Files](configuration_files.md) - how to define configuration files to simplify running loggers with listen.py
19 | * [OpenRVDAS Components](components.md) - what components exist and what they do
20 | * [Simulating Live Data](simulating_live_data.md) - using the simulate_data.py script to simulate a live system using stored data for development and testing
21 | * [Grafana/InfluxDB-based Displays](grafana_displays.md) - an introduction to using InfluxDB and Grafana for displaying data
22 | * [Parsing](parsing.md) - how to work with the included RecordParser to turn raw text records into structured data fields
23 | * [Security assumptions](security.md) - the (rather naive) security assumptions made about the environment in which OpenRVDAS runs.
24 |
25 | OpenRVDAS is a part of the [Ocean Data Tools project](http://oceandata.tools).
26 |
27 | **DISCLAIMER**: THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF
28 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
29 | WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
30 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
31 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
32 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
33 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE,
34 | INCLUDING INJURY, LOSS OF LIFE, PROPERTY, SANITY OR CREDIBILITY AMONG
35 | YOUR PEERS WHO WILL TELL YOU THAT YOU REALLY SHOULD HAVE KNOWN BETTER.
36 |
--------------------------------------------------------------------------------
/server/supervisord/supervisord.ini:
--------------------------------------------------------------------------------
1 | ; Sample supervisor config file.
2 | ;
3 |
4 | [unix_http_server]
5 | file=/opt/openrvdas/server/supervisord/supervisor.sock ; the path to the socket file
6 | chmod=0770 ; socket file mode (default 0700)
7 | chown=pablo:wheel ; socket file uid:gid owner
8 | ;username=pablo ; default is no username (open server)
9 | ;password=rvdas ; default is no password (open server)
10 |
11 | [inet_http_server] ; inet (TCP) server disabled by default
12 | port=localhost:8002 ; ip_address:port specifier, *:port for all iface
13 | ;username=pablo ; default is no username (open server)
14 | ;password=rvdas ; default is no password (open server)
15 |
16 | [supervisord]
17 | logfile=/var/log/openrvdas/supervisord.log ; main log file; default $CWD/supervisord.log
18 | stderr_logfile_maxbytes=0 ; max main logfile bytes b4 rotation; default 50MB
19 | logfile_maxbytes=50MB ; max main logfile bytes b4 rotation; default 50MB
20 | logfile_backups=10 ; # of main logfile backups; 0 means none, default 10
21 | loglevel=info ; log level; default info; others: debug,warn,trace
22 | pidfile=/usr/local/var/run/supervisord.pid ; supervisord pidfile; default supervisord.pid
23 | nodaemon=true ; start in foreground if true; default false
24 | minfds=1024 ; min. avail startup file descriptors; default 1024
25 | minprocs=200 ; min. avail process descriptors;default 200
26 | umask=022 ; process file creation umask; default 022
27 | user=pablo ; setuid to this UNIX account at startup; recommended if root
28 |
29 | ; The rpcinterface:supervisor section must remain in the config file for
30 | ; RPC (supervisorctl/web interface) to work. Additional interfaces may be
31 | ; added by defining them in separate [rpcinterface:x] sections.
32 |
33 | [rpcinterface:supervisor]
34 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
35 |
36 | ; The supervisorctl section configures how supervisorctl will connect to
37 | ; supervisord. configure it match the settings in either the unix_http_server
38 | ; or inet_http_server section.
39 |
40 | [supervisorctl]
41 | serverurl=unix:///opt/openrvdas/server/supervisord/supervisor.sock ; use a unix:// URL for a unix socket
42 | serverurl=http://localhost:8002 ; use an http:// url to specify an inet socket
43 | ;username=chris ; should be same as in [*_http_server] if set
44 | ;password=123 ; should be same as in [*_http_server] if set
45 | ;prompt=mysupervisor ; cmd line prompt (default "supervisor")
46 | ;history_file=~/.sc_history ; use readline history if available
47 |
48 | [include]
49 | files = /opt/openrvdas/server/supervisord/supervisor.d/*.ini
50 |
--------------------------------------------------------------------------------
/contrib/utils/JSON_YAML_Creator/src/components/JSONYAMLOutput.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import YAML from 'yaml';
3 | import './index.css';
4 | import { CopyToClipboard } from 'react-copy-to-clipboard';
5 |
6 | export default function YAMLOutput(props) {
7 | const divStyle = {
8 | display: 'flex',
9 | alignItems: 'center',
10 | };
11 |
12 | const combined = {
13 | readers: props.readers.map((element) => element),
14 | transforms: props.transforms.map((element) => element),
15 | writers: props.writers.map((element) => element),
16 | };
17 |
18 | return (
19 |
20 |
21 |
22 |
23 |
24 | {JSON.stringify(
25 | combined,
26 | function (key, val) {
27 | if (key !== 'kwargClass') return val;
28 | },
29 | 2
30 | )}
31 |
32 |
33 |
34 |
44 | Copy JSON
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 | {YAML.stringify(
54 | JSON.parse(
55 | JSON.stringify(
56 | combined,
57 | function (key, val) {
58 | if (key !== 'kwargClass') return val;
59 | },
60 | 2
61 | )
62 | )
63 | )}
64 |
65 |
66 |
67 |
81 | Copy YAML
82 |
83 |
84 |
85 |
86 |
87 | );
88 | }
89 |
--------------------------------------------------------------------------------
/test/logger/writers/test_websocket_writer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 | import threading
6 | import time
7 | import unittest
8 |
9 | from os.path import dirname, realpath
10 |
11 | sys.path.append('.')
12 | from logger.writers.websocket_writer import WebsocketWriter # noqa: E402
13 | from logger.readers.websocket_reader import WebsocketReader # noqa: E402
14 |
15 |
16 | class WebsocketWriterTest(unittest.TestCase):
17 |
18 | # Method to run in separate thread to run WebsocketReader
19 | def _run_writer(self, uri, records, cert_file=None, key_file=None):
20 | writer = WebsocketWriter(uri=uri, cert_file=cert_file, key_file=key_file)
21 |
22 | while not writer.client_map:
23 | time.sleep(0.1)
24 | logging.debug('writer got client')
25 |
26 | for record in records:
27 | logging.debug(f'writer writing {record}')
28 | writer.write(record)
29 | time.sleep(0.1)
30 | logging.debug('writer thread exiting')
31 |
32 | def test_read(self):
33 | uri = 'ws://localhost:8080'
34 | records = ["Hello, world!", "Goodbye, world!"]
35 | writer_thread = threading.Thread(target=self._run_writer,
36 | args=(uri, records),
37 | daemon=True)
38 | writer_thread.start()
39 |
40 | reader = WebsocketReader(uri=uri)
41 | for record in records:
42 | logging.debug(f'reader expecting {record}')
43 | received = reader.read()
44 | self.assertEqual(received, record)
45 | logging.debug(f'reader got {received}')
46 | logging.debug('reader exiting')
47 |
48 | writer_thread.join()
49 | logging.debug('writer thread joined')
50 |
51 | def test_read_ssl(self):
52 | uri = 'wss://localhost:8081'
53 | records = ["Hello, world!", "Goodbye, world!"]
54 | cert_file = dirname(dirname(realpath(__file__))) + '/utils/test.crt'
55 | key_file = dirname(dirname(realpath(__file__))) + '/utils/test.key'
56 | writer_thread = threading.Thread(target=self._run_writer,
57 | args=(uri, records, cert_file, key_file),
58 | daemon=True)
59 | writer_thread.start()
60 |
61 | reader = WebsocketReader(uri=uri)
62 | for record in records:
63 | logging.debug(f'reader expecting {record}')
64 | received = reader.read()
65 | self.assertEqual(received, record)
66 | logging.debug(f'reader got {received}')
67 | logging.debug('reader exiting')
68 |
69 | writer_thread.join()
70 | logging.debug('writer thread joined')
71 |
72 |
73 | if __name__ == "__main__":
74 | unittest.main()
75 |
--------------------------------------------------------------------------------
/logger/transforms/to_das_record_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | from typing import Union
7 | from os.path import dirname, realpath
8 | from json import JSONDecodeError
9 |
10 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
11 | from logger.utils.das_record import DASRecord # noqa: E402
12 | from logger.transforms.transform import Transform # noqa: E402
13 |
14 |
15 | ################################################################################
16 | #
17 | class ToDASRecordTransform(Transform):
18 | """Convert passed record to DASRecord. If record is a str, assume a
19 | JSON-encoded DASRecord. If record is a dict, use its fields as DASRecord
20 | fields. If initialized with a field_name, expect to be passed strings,
21 | and use those strings as the corresponding field values.
22 | """
23 |
24 | def __init__(self, data_id=None, field_name=None):
25 | self.data_id = data_id
26 | self.field_name = field_name
27 |
28 | ############################
29 | def transform(self, record: Union[str, dict]):
30 | """Convert record to DASRecord."""
31 |
32 | # See if it's something we can process, and if not, try digesting
33 | if not self.can_process_record(record): # inherited from Transform()
34 | return self.digest_record(record) # inherited from Transform()
35 |
36 | if isinstance(record, str):
37 | # If str, assume it's JSON unless field_name is set
38 | if self.field_name:
39 | return DASRecord(data_id=self.data_id, fields={self.field_name: record})
40 | else:
41 | try:
42 | return DASRecord(json_str=record)
43 | except JSONDecodeError:
44 | logging.warning(f'String could not be parsed as JSON DASRecord: {record}')
45 | return None
46 | # Else, if it's a dict, figure out whether it's a simple dict, or has a timestamp,
47 | # fields, etc. If not, use keys, values as fields
48 | elif isinstance(record, dict):
49 | data_id = self.data_id or record.get('data_id')
50 | timestamp = record.get('timestamp')
51 | fields = record.get('fields')
52 |
53 | # Does it have keys that mark it as a proper DASRecord already?
54 | if isinstance(fields, dict):
55 | return DASRecord(data_id=data_id, timestamp=timestamp, fields=fields)
56 |
57 | # Otherwise, assume the whole dict is a dict of fields
58 | return DASRecord(data_id=self.data_id, fields=record)
59 | else:
60 | logging.warning('ToDASRecordTransform input should be of type '
61 | f'str or dict, but received {type(record)}: {record}')
62 | return None
63 |
--------------------------------------------------------------------------------
/django_gui/templates/django_gui/change_mode.html:
--------------------------------------------------------------------------------
1 | {% extends 'django_gui/base.html' %}
2 |
3 | {% load static %}
4 |
5 | {% block content %}
6 |
7 |
20 |
21 |
22 |
23 |
24 | Logging Mode Manager
25 | Logging Mode Manager
26 |
27 |
28 |
29 | {# ################################################################## #}
30 | {# If user authenticated, allow to switch modes. Else just show mode #}
31 |
53 |
54 |
55 |
56 |
57 |
58 |
74 | {% endblock %}
75 |
--------------------------------------------------------------------------------
/django_gui/static/django_gui/widget.html.js:
--------------------------------------------------------------------------------
1 | //////////////////////////////////////////////////////////////////////////////
2 | // Javascript behind and specific to the index.html page.
3 | //
4 | // Typical invocation will look like:
5 | //
6 | //
12 | //
13 | //
14 | //
15 | //
16 | // Note that this also counts on variables WEBSOCKET_SERVER and
17 | // FIELD_LIST being set in the calling page.
18 |
19 | ////////////////////////////////////////////////////////////////////
20 | // Widget-specific functions
21 |
22 | ///////////////////////////
23 | function initial_send_message() {
24 | var fields = {};
25 | var field_list = FIELD_LIST;
26 | for (f_i = 0; f_i < field_list.length; f_i++) {
27 | var field = field_list[f_i];
28 | fields[field] = {'seconds':-1};
29 | }
30 | return {'type':'subscribe', 'fields': fields};
31 | }
32 |
33 | ///////////////////////////////////////////////////////////////
34 | function process_message(message_str) {
35 | var message = JSON.parse(message_str);
36 |
37 | // Figure out what kind of message we got
38 | var message_type = message.type;
39 | var status = message.status;
40 |
41 | switch (message_type) {
42 | case 'data':
43 | var data_dict = message.data;
44 | if (data_dict == undefined) {
45 | console.log('Got data message with no data?!?: ' + message_str);
46 | return;
47 | }
48 | for (var field_name in data_dict) {
49 | var value_list = data_dict[field_name];
50 | var last_pair = value_list[value_list.length-1];
51 | var timestamp = last_pair[0];
52 | var value = last_pair[1];
53 | var td = document.getElementById(field_name + '_value');
54 | td.innerHTML = value
55 |
56 | var ts_td = document.getElementById('timestamp');
57 | ts_td.innerHTML = Date(timestamp * 1000);
58 | }
59 | break;
60 | case 'subscribe':
61 | if (status != 200) {
62 | console.log('Got bad status for subscribe request: ' + message_str);
63 | console.log('Original subscribe request: '
64 | + JSON.stringify(initial_send_message()));
65 | }
66 | break;
67 | case 'ready': // if no data are ready
68 | console.log('no data ready');
69 | break;
70 | default:
71 | console.log('Got unknown message type: ' + message_str);
72 | }
73 | }
74 |
75 | // Sleep function we'll use if there are no data ready
76 | const sleep = (milliseconds) => {
77 | return new Promise(resolve => setTimeout(resolve, milliseconds))
78 | }
79 |
--------------------------------------------------------------------------------
/django_gui/static/django_gui/stderr_log_utils.js:
--------------------------------------------------------------------------------
1 | //////////////////////////////////////////////////////////////////////////////
2 | // Javascript for fetching log lines whose data_id matches some data_id,
3 | // such as 'stderr:logger:s330', and appending them to a specified target div.
4 | //
5 | // Typical invocation will look like:
6 | //
7 | //
8 | //
9 | //
10 | // Will take lines whose id matches 'stderr:logger:gyr1' and append them
11 | // to a div on the paged whose identity is 'gyr1_stderr'. Etc.
12 |
13 | ////////////////////////////
14 | // Process CDS data message (hopefully) containing log lines and add
15 | // to the div we've been passed. Expects STDERR_DIV_MAP to be defined
16 | // as an associative array of {field_name: div}, where field_name is
17 | // a match for, e.g. stderr.logger.s330, and div is the id of the page's
18 | // div into which matching lines should be placed.
19 | function process_stderr_message(target_div_id, log_line_list) {
20 | // target_div_id - e.g. 's330_stderr
21 | // log_line_list - should be [(timestamp, line), (timestamp, line),...],
22 | // where 'line' is the log message to be recorded.
23 | if (!log_line_list || log_line_list.length == 0) {
24 | return;
25 | }
26 | var new_log_lines = '';
27 |
28 | for (var list_i = 0; list_i < log_line_list.length; list_i++) {
29 | // Skip duplicate messages
30 | if (list_i > 0 && log_line_list[list_i] == log_line_list[list_i-1]) {
31 | continue;
32 | }
33 | var [timestamp, log_line] = log_line_list[list_i];
34 |
35 | // Clean up message and add to new_log_lines list
36 | log_line = log_line.replace('\n',' ') + ' \n';
37 | new_log_lines += color_log_line(log_line);
38 | }
39 |
40 | // Once all log lines have been added, fetch the div where we're
41 | // going to put them, and add to bottom.
42 | if (new_log_lines.length > 0) {
43 | var target_div = document.getElementById(target_div_id);
44 | if (target_div) {
45 | target_div.innerHTML += new_log_lines;
46 | target_div.scrollTop = target_div.scrollHeight; // scroll to bottom
47 | } else {
48 | console.log('Couldn\'t find div for ' + target_div_id);
49 | }
50 | }
51 | }
52 |
53 | // Add HTML coloring to message depending on log level
54 | function color_log_line(message) {
55 | var color = '';
56 | if (message.indexOf(' 30 WARNING ') > 0) {
57 | color = '#e09100';
58 | } else if (message.indexOf(' 40 ERROR ') > 0) {
59 | color = 'orange';
60 | } else if (message.indexOf(' 50 CRITICAL ') > 0) {
61 | color = 'red';
62 | }
63 | if (color !== '') {
64 | message = '' + message + ' ';
65 | }
66 | return message;
67 | }
68 |
--------------------------------------------------------------------------------
/docs/secure_websockets.md:
--------------------------------------------------------------------------------
1 | # Using Secure Websockets with OpenRVDAS
2 | © 2020 David Pablo Cohn - DRAFT 2020-12-11
3 |
4 | ## Overview
5 |
6 | OpenRVDAS uses websockets to relay logger data and status information to
7 | the [CachedDataServer](cached_data_server.md), which then support
8 | display of near-realtime updates to the web console and display widgets.
9 | While on-server websocket communication is conducted (insecurely) on port
10 | 8766, in some environments security may dictate restricting off-server access
11 | to that port. For this reason, NGINX is configured to also make the cached
12 | data server available on the default web console port at path `/cds-ws`.
13 | The [OpenRVDAS installation script](../utils/install_openrvdas.sh) allows
14 | configuring NGINX to require secure websockets (`wss://`) for off-server
15 | access along this path.
16 |
17 | If, during installation, the user specifies that secure websockets should
18 | be used, they will be prompted to either specify the location of a `.crt`
19 | and `.key` certificate files, or will be coached through creation of those
20 | files via a self-signed certificate.
21 |
22 | ## Getting browsers to accept your self-signed certificate
23 |
24 | If the server already has a valid certificate, nothing more needs to be done.
25 | If the user follows the prompts to create a self-signed certificate, most browsers
26 | will balk at accepting them without a little extra work. You will need to first
27 | create a `.pem` file which, in this case, should just be a renamed copy of the
28 | public part of the certificate:
29 |
30 | ```cp my_created_certificate.crt my_created_certificate.pem```
31 |
32 | Copy this `.pem` file to the machine on which you will be running the browser
33 | and import it into your keychain.
34 |
35 | As of 2020-12-11, on a Macintosh, you would do this as follows:
36 |
37 | 1. Open the Keychain Access app and select `File > Import Items...`
38 | 1. Navigate to the `.pem` file you've copied to your machine and import it.
39 | 1. You should now see the imported certificate under the "Certificates"
40 | header in the Keychain Access app. Double-click it and expand the `Trust`
41 | section of the new window.
42 | 1. In the "When using this certificate..." drop-down, select "Always Trust"
43 | and close the window.
44 | 1. The first time you navigate your browser to the server, you will still get
45 | a certificate warning but, if you select the "Advanced" link at the bottom
46 | of the warning (in Chrome, at least), it will give you an option to continue
47 | to the page.
48 |
49 | The method of accepting self-signed certificates will undoubtedly continue to
50 | change and make these instructions obsolete. At the very least, you can ask
51 | Google for the latest concerting your specific browser and OS:
52 |
53 | E.g.: [install certificate in chrome on macos](https://www.google.com/search?q=install+certificate+in+chrome+on+macos)
54 |
--------------------------------------------------------------------------------
/logger/readers/redis_reader.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | # Don't barf if they don't have redis installed. Only complain if
7 | # they actually try to use it, below
8 | try:
9 | import redis
10 | REDIS_ENABLED = True
11 | except ModuleNotFoundError:
12 | REDIS_ENABLED = False
13 |
14 | from os.path import dirname, realpath
15 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
16 | from logger.readers.reader import Reader # noqa: E402
17 | from logger.utils.formats import Text # noqa: E402
18 |
19 | DEFAULT_HOST = 'localhost'
20 | DEFAULT_PORT = '6379'
21 |
22 |
23 | ################################################################################
24 | class RedisReader(Reader):
25 | """Read messages from a redis pubsub channel."""
26 |
27 | def __init__(self, channel, password=None):
28 | """
29 | Read text records from a Redis pubsub server channel.
30 | ```
31 | channel Redis channel to read from, format channel[@hostname[:port]]
32 | ```
33 | """
34 | super().__init__(output_format=Text)
35 |
36 | if not REDIS_ENABLED:
37 | raise ModuleNotFoundError('RedisReader(): Redis is not installed. Please '
38 | 'try "pip3 install redis" prior to use.')
39 | self.channel = channel
40 | self.hostname = DEFAULT_HOST
41 | self.port = DEFAULT_PORT
42 |
43 | if channel.find('@') > 0:
44 | (self.channel, self.hostname) = channel.split(sep='@', maxsplit=1)
45 | if self.hostname.find(':') > 0:
46 | (self.hostname, self.port) = self.hostname.split(sep=':', maxsplit=1)
47 | self.port = int(self.port)
48 |
49 | # Connect to the specified server and subscribe to channel
50 | try:
51 | self.redis = redis.StrictRedis(host=self.hostname, port=self.port,
52 | password=password, decode_responses=True)
53 | self.pubsub = self.redis.pubsub()
54 | self.pubsub.subscribe(self.channel)
55 | except redis.exceptions.ConnectionError as e:
56 | logging.error('Unable to connect to server at %s:%d',
57 | self.hostname, self.port)
58 | raise e
59 |
60 | ############################
61 | def read(self):
62 | """Read/wait for message from pubsub channel."""
63 |
64 | while True:
65 | message = next(iter(self.pubsub.listen()))
66 | logging.debug('Got message "%s"', message)
67 | if message.get('type') == 'message':
68 | data = message.get('data')
69 | if data:
70 | return data
71 |
72 | # Alternatively, we could use
73 | # while True:
74 | # message = self.pubsub.get_message(timeout=10)
75 | # if message:
76 | # record = message.get('data')
77 | # if record:
78 | # return record
79 |
--------------------------------------------------------------------------------
/display/js/highcharts/code/modules/solid-gauge.js:
--------------------------------------------------------------------------------
1 | /*
2 | Highcharts JS v6.1.1 (2018-06-27)
3 | Solid angular gauge module
4 |
5 | (c) 2010-2017 Torstein Honsi
6 |
7 | License: www.highcharts.com/license
8 | */
9 | (function(l){"object"===typeof module&&module.exports?module.exports=l:l(Highcharts)})(function(l){(function(e){var l=e.pInt,u=e.pick,m=e.each,r=e.isNumber,w=e.wrap,v;w(e.Renderer.prototype.symbols,"arc",function(a,f,d,c,e,b){a=a(f,d,c,e,b);b.rounded&&(c=((b.r||c)-b.innerR)/2,b=["A",c,c,0,1,1,a[12],a[13]],a.splice.apply(a,[a.length-1,0].concat(["A",c,c,0,1,1,a[1],a[2]])),a.splice.apply(a,[11,3].concat(b)));return a});v={initDataClasses:function(a){var f=this.chart,d,c=0,t=this.options;this.dataClasses=
10 | d=[];m(a.dataClasses,function(b,h){b=e.merge(b);d.push(b);b.color||("category"===t.dataClassColor?(h=f.options.colors,b.color=h[c++],c===h.length&&(c=0)):b.color=e.color(t.minColor).tweenTo(e.color(t.maxColor),h/(a.dataClasses.length-1)))})},initStops:function(a){this.stops=a.stops||[[0,this.options.minColor],[1,this.options.maxColor]];m(this.stops,function(a){a.color=e.color(a[1])})},toColor:function(a,f){var d=this.stops,c,e,b=this.dataClasses,h,g;if(b)for(g=b.length;g--;){if(h=b[g],c=h.from,d=
11 | h.to,(void 0===c||a>=c)&&(void 0===d||a<=d)){e=h.color;f&&(f.dataClass=g);break}}else{this.isLog&&(a=this.val2lin(a));a=1-(this.max-a)/(this.max-this.min);for(g=d.length;g--&&!(a>d[g][0]););c=d[g]||d[g+1];d=d[g+1]||c;a=1-(d[0]-a)/(d[0]-c[0]||1);e=c.color.tweenTo(d.color,a)}return e}};e.seriesType("solidgauge","gauge",{colorByPoint:!0},{translate:function(){var a=this.yAxis;e.extend(a,v);!a.dataClasses&&a.options.dataClasses&&a.initDataClasses(a.options);a.initStops(a.options);e.seriesTypes.gauge.prototype.translate.call(this)},
12 | drawPoints:function(){var a=this,f=a.yAxis,d=f.center,c=a.options,t=a.chart.renderer,b=c.overshoot,h=r(b)?b/180*Math.PI:0,g;r(c.threshold)&&(g=f.startAngleRad+f.translate(c.threshold,null,null,null,!0));this.thresholdAngleRad=u(g,f.startAngleRad);m(a.points,function(b){var g=b.graphic,k=f.startAngleRad+f.translate(b.y,null,null,null,!0),m=l(u(b.options.radius,c.radius,100))*d[2]/200,n=l(u(b.options.innerRadius,c.innerRadius,60))*d[2]/200,p=f.toColor(b.y,b),q=Math.min(f.startAngleRad,f.endAngleRad),
13 | r=Math.max(f.startAngleRad,f.endAngleRad);"none"===p&&(p=b.color||a.color||"none");"none"!==p&&(b.color=p);k=Math.max(q-h,Math.min(r+h,k));!1===c.wrap&&(k=Math.max(q,Math.min(r,k)));q=Math.min(k,a.thresholdAngleRad);k=Math.max(k,a.thresholdAngleRad);k-q>2*Math.PI&&(k=q+2*Math.PI);b.shapeArgs=n={x:d[0],y:d[1],r:m,innerR:n,start:q,end:k,rounded:c.rounded};b.startR=m;g?(b=n.d,g.animate(e.extend({fill:p},n)),b&&(n.d=b)):(b.graphic=t.arc(n).addClass(b.getClassName(),!0).attr({fill:p,"sweep-flag":0}).add(a.group),
14 | "square"!==c.linecap&&b.graphic.attr({"stroke-linecap":"round","stroke-linejoin":"round"}),b.graphic.attr({stroke:c.borderColor||"none","stroke-width":c.borderWidth||0}))})},animate:function(a){a||(this.startAngleRad=this.thresholdAngleRad,e.seriesTypes.pie.prototype.animate.call(this,a))}})})(l)});
15 |
--------------------------------------------------------------------------------
/logger/writers/redis_writer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import logging
4 | import sys
5 |
6 | # Don't barf if they don't have redis installed. Only complain if
7 | # they actually try to use it, below
8 | try:
9 | import redis
10 | REDIS_ENABLED = True
11 | except ModuleNotFoundError:
12 | REDIS_ENABLED = False
13 |
14 | from os.path import dirname, realpath
15 | sys.path.append(dirname(dirname(dirname(realpath(__file__)))))
16 | from logger.writers.writer import Writer # noqa: E402
17 |
18 | DEFAULT_HOST = 'localhost'
19 | DEFAULT_PORT = '6379'
20 |
21 |
22 | class RedisWriter(Writer):
23 | """Write to redis server pubsub channel."""
24 |
25 | def __init__(self, channel, password=None):
26 | """
27 | Write text records to a Redis pubsub server channel.
28 | ```
29 | channel Redis channel to write to, format channel[@hostname[:port]]
30 | ```
31 | """
32 | if not REDIS_ENABLED:
33 | raise ModuleNotFoundError('RedisReader(): Redis is not installed. Please '
34 | 'try "pip3 install redis" prior to use.')
35 | self.channel = channel
36 | self.hostname = DEFAULT_HOST
37 | self.port = DEFAULT_PORT
38 |
39 | if channel.find('@') > 0:
40 | (self.channel, self.hostname) = channel.split(sep='@', maxsplit=1)
41 | if self.hostname.find(':') > 0:
42 | (self.hostname, self.port) = self.hostname.split(sep=':', maxsplit=1)
43 | self.port = int(self.port)
44 |
45 | # Connect to the specified server
46 | try:
47 | self.redis = redis.StrictRedis(host=self.hostname, port=self.port,
48 | password=password, decode_responses=True)
49 | self.redis.ping()
50 | self.pubsub = self.redis.pubsub()
51 | except redis.exceptions.ConnectionError as e:
52 | logging.error('Unable to connect to server at %s:%d',
53 | self.hostname, self.port)
54 | raise e
55 |
56 | ############################
57 | def write(self, record):
58 | """Write the record to the pubsub channel."""
59 |
60 | # See if it's something we can process, and if not, try digesting
61 | if not self.can_process_record(record): # inherited from BaseModule()
62 | self.digest_record(record) # inherited from BaseModule()
63 | return
64 |
65 | # If record is not a string, try converting to JSON. If we don't know
66 | # how, throw a hail Mary and force it into str format
67 | # if not type(record) is str:
68 | # if type(record) in [int, float, bool, list, dict]:
69 | # record = json.dumps(record)
70 | # else:
71 | # record = str(record)
72 |
73 | try:
74 | self.redis.publish(self.channel, record)
75 | except redis.exceptions.ConnectionError as e:
76 | logging.error('Unable to connect to server at %s:%d',
77 | self.hostname, self.port)
78 | raise e
79 |
--------------------------------------------------------------------------------
/test/logger/transforms/test_nmea_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # flake8: noqa E501 - ignore long lines
4 |
5 | import json
6 | import logging
7 | import pprint
8 | import sys
9 | import tempfile
10 | import time
11 | import unittest
12 | import warnings
13 |
14 | sys.path.append('.')
15 | from logger.transforms.nmea_transform import NMEATransform
16 |
17 |
18 | class TestNMEATransform(unittest.TestCase):
19 | # set to None to see full diff for long results
20 | maxDiff = None
21 |
22 | test_records = [
23 | ({'data_id': 'ship_nav', 'timestamp': 1708644853.370747,
24 | 'fields': {'Latitude': 4404.67098,
25 | 'Longitude': 11500.0019,
26 | 'CourseTrue': 1.1,
27 | 'SpeedOverGround': 8.1,
28 | 'HeadingTrue': 355.0,
29 | 'SurfaceCourse': 355.0,
30 | 'SurfaceSpeed': 7.861768248937385,
31 | 'DriftCourse': 72.40182954879155,
32 | 'DriftSpeed': 0.8819736152620004,
33 | 'Roll': 0.08,
34 | 'Pitch': 0.65,
35 | 'Heave': -0.03,
36 | 'DepthBelowTransducer': 4279.79,
37 | 'OffsetTransducer': 7.33,
38 | 'PositionSource': 'posmv'}},
39 | ['$GPSTN,posmv*05', '$GPDPT,4279.79,7.33*66'])
40 |
41 | ]
42 |
43 | ############################
44 | def test_nmea_transform(self):
45 | t = NMEATransform(nmea_list=['STNTransform', 'DPTTransform'],
46 | stn_talker_id='GPSTN', id_field='PositionSource',
47 | dpt_talker_id='GPDPT', depth_field='DepthBelowTransducer', offset_field='OffsetTransducer')
48 |
49 | for j, (line, expected) in enumerate(self.test_records):
50 | result = t.transform(line)
51 | logging.info('expected: %s, result: %s', expected, result)
52 | self.assertEqual(expected, result)
53 |
54 | def test_bad_nmea_list(self):
55 | with self.assertLogs(logging.getLogger(), level='INFO') as cm:
56 | t = NMEATransform(nmea_list=['BADTransform'])
57 | # Check that the first log message in the output is the one we want
58 | self.assertIn('BADTransform is not in classes', cm.output[0])
59 |
60 |
61 | ################################################################################
62 | if __name__ == '__main__':
63 | import argparse
64 |
65 | parser = argparse.ArgumentParser()
66 | parser.add_argument('-v', '--verbosity', dest='verbosity',
67 | default=0, action='count',
68 | help='Increase output verbosity')
69 | args = parser.parse_args()
70 |
71 | LOGGING_FORMAT = '%(asctime)-15s %(filename)s:%(lineno)d %(message)s'
72 | logging.basicConfig(format=LOGGING_FORMAT)
73 |
74 | LOG_LEVELS = {0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG}
75 | args.verbosity = min(args.verbosity, max(LOG_LEVELS))
76 | logging.getLogger().setLevel(LOG_LEVELS[args.verbosity])
77 |
78 | unittest.main(warnings='ignore')
--------------------------------------------------------------------------------