├── img ├── dop.png ├── sats.png ├── geo_offset.png ├── sats_used.png ├── clock_pps_offset.png └── grafana_gpsd_dashboard_1.png ├── Pipfile ├── gps_setserial.service ├── gpsd_exporter.service ├── set_serial_gps ├── gpsd.service ├── gpsd_exporter.defaults ├── Dockerfile ├── gps ├── watch_options.py ├── __init__.py ├── packet.py ├── aiogps.py ├── client.py ├── misc.py ├── gps.py └── fake.py ├── docker-compose.build.yml ├── docker-compose.yml ├── Pipfile.lock ├── fix_gps_board.sh ├── LICENSE ├── entrypoint.sh ├── .gitignore ├── generate-release-notes.sh ├── PUBLISHING.md ├── RELEASE_NOTES.md ├── docker-publish-org.yml ├── .github └── workflows │ ├── docker-publish.yml │ └── release-notes.yml ├── README.md └── gpsd_exporter.py /img/dop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/HEAD/img/dop.png -------------------------------------------------------------------------------- /img/sats.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/HEAD/img/sats.png -------------------------------------------------------------------------------- /img/geo_offset.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/HEAD/img/geo_offset.png -------------------------------------------------------------------------------- /img/sats_used.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/HEAD/img/sats_used.png -------------------------------------------------------------------------------- /img/clock_pps_offset.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/HEAD/img/clock_pps_offset.png -------------------------------------------------------------------------------- /img/grafana_gpsd_dashboard_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/HEAD/img/grafana_gpsd_dashboard_1.png -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | prometheus-client = "*" 8 | 9 | [dev-packages] 10 | 11 | [requires] 12 | python_version = "*" 13 | -------------------------------------------------------------------------------- /gps_setserial.service: -------------------------------------------------------------------------------- 1 | # Some U-Blox GPS units need to be forced to 115200 baud. 2 | 3 | [Unit] 4 | Description=GPS Set Serial service 5 | After=gspd.service 6 | 7 | [Service] 8 | ExecStart=/usr/local/gpsd-prometheus-exporter/set_serial_gps 9 | RemainAfterExit=true 10 | Type=oneshot 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /gpsd_exporter.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=gpsd monitor exporter 3 | After=network.target 4 | Before=gpsd.service 5 | 6 | 7 | [Service] 8 | Environment="PYTHONUNBUFFERED=1" 9 | Restart=always 10 | EnvironmentFile=-/etc/default/gpsd_exporter.defaults 11 | ExecStart=/usr/local/bin/gpsd_exporter.py $GPSD_MON_OPTIONS 12 | Type=simple 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | Alias=gpsd_monitor.service 17 | -------------------------------------------------------------------------------- /set_serial_gps: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | /usr/bin/systemctl stop gpsd.service gpsd.socket 3 | sleep 1 4 | 5 | /usr/bin/ubxtool -v 1 -s 9600 -S 115200 -f /dev/ttyS0 6 | /usr/bin/ubxtool -v 1 -s 115200 -f /dev/ttyS0 -d GLONASS 7 | /usr/bin/ubxtool -v 1 -s 115200 -f /dev/ttyS0 -e GALILEO 8 | /usr/bin/ubxtool -v 1 -s 115200 -f /dev/ttyS0 -p SAVE 9 | 10 | sleep 1 11 | 12 | /usr/bin/systemctl start gpsd.service gpsd.socket 13 | 14 | -------------------------------------------------------------------------------- /gpsd.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=GPS (Global Positioning System) Daemon 3 | Requires=gpsd.socket 4 | # Needed with chrony SOCK refclock 5 | After=chronyd.service 6 | 7 | [Service] 8 | ExecStartPre=-/usr/local/gpsd-prometheus-exporter/set_serial_gps 9 | Type=forking 10 | EnvironmentFile=-/etc/default/gpsd 11 | ExecStart=/usr/sbin/gpsd $GPSD_OPTIONS $OPTIONS $DEVICES 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | Also=gpsd.socket 16 | -------------------------------------------------------------------------------- /gpsd_exporter.defaults: -------------------------------------------------------------------------------- 1 | # Devices gpsd should collect to at boot time. 2 | # They need to be read/writeable, either by user gpsd or the group dialout. 3 | 4 | # Other options you want to pass to gpsd 5 | GPSD_MON_OPTIONS="-v --pps-histogram --offset-from-geopoint --geopoint-lon 38.897809878104574 --geopoint-lat -77.03655125936501 --pps-time1 0.0030415" 6 | 7 | # To measure an offset from a fixed geo point: --offset-from-geopoin --geopoint-lat --geopoint-lon 8 | # observe the clock offset from the pps signal --pps-histogram 9 | # If you calibriated you pps signal specify the fudge time1 with --pps-time1 10 | # 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim 2 | ENV DEBIAN_FRONTEND=noninteractive 3 | ENV PYTHONPATH=/app 4 | 5 | LABEL org.opencontainers.image.source https://github.com/brendanbank/gpsd-prometheus-exporter 6 | LABEL org.opencontainers.image.description "Prometheus exporter for the gpsd GPS daemon. Collects metrics from the gpsd server and exposes them for scraping." 7 | LABEL org.opencontainers.image.licenses BSD-3-Clause 8 | 9 | # Python deps from PyPI 10 | RUN pip install --no-cache-dir prometheus-client 11 | 12 | WORKDIR /app 13 | 14 | # Copy only required runtime files 15 | COPY entrypoint.sh /app/ 16 | COPY gpsd_exporter.py /app/ 17 | COPY gps /app/gps 18 | 19 | RUN chmod +x /app/entrypoint.sh 20 | 21 | ENV GEOPOINT_LON=38.897809878104574 22 | ENV GEOPOINT_LAT=-77.03655125936501 23 | 24 | CMD [ "./entrypoint.sh" ] 25 | 26 | -------------------------------------------------------------------------------- /gps/watch_options.py: -------------------------------------------------------------------------------- 1 | """WATCH options - controls what data is streamed, and how it's converted.""" 2 | WATCH_ENABLE = 0x000001 # enable streaming 3 | WATCH_DISABLE = 0x000002 # disable watching 4 | WATCH_JSON = 0x000010 # JSON output 5 | WATCH_NMEA = 0x000020 # output in NMEA 6 | WATCH_RARE = 0x000040 # output of packets in hex 7 | WATCH_RAW = 0x000080 # output of raw packets 8 | 9 | WATCH_SCALED = 0x000100 # scale output to floats 10 | WATCH_TIMING = 0x000200 # timing information 11 | WATCH_DEVICE = 0x000800 # watch specific device 12 | WATCH_SPLIT24 = 0x001000 # split AIS Type 24s 13 | WATCH_PPS = 0x002000 # enable PPS JSON 14 | 15 | WATCH_NEWSTYLE = 0x010000 # force JSON streaming 16 | WATCH_OLDSTYLE = 0x020000 # force old-style streaming 17 | -------------------------------------------------------------------------------- /docker-compose.build.yml: -------------------------------------------------------------------------------- 1 | services: 2 | 3 | gpsd-exporter: 4 | build: 5 | context: . 6 | dockerfile: Dockerfile 7 | image: gpsd-prometheus-exporter:stable 8 | container_name: gpsd-exporter 9 | ports: 10 | - "${EXPORTER_PORT:-9015}:9015" 11 | environment: 12 | - GPSD_HOST=${GPSD_HOST:-localhost} 13 | - GPSD_PORT=${GPSD_PORT:-2947} 14 | - GEOPOINT_LON=${GEOPOINT_LON:-38.897809878} 15 | - GEOPOINT_LAT=${GEOPOINT_LAT:--77.036551259} 16 | - PPS_BUCKET_SIZE=${PPS_BUCKET_SIZE:-50000} 17 | - PPS_BUCKET_COUNT=${PPS_BUCKET_COUNT:-40} 18 | - PPS_TIME1=${PPS_TIME1} 19 | - GEO_BUCKET_SIZE=${GEO_BUCKET_SIZE:-0.5} 20 | - GEO_BUCKET_COUNT=${GEO_BUCKET_COUNT:-40} 21 | - EXPORTER_PORT=${EXPORTER_PORT:-9015} 22 | - DEBUG=${DEBUG:-0} 23 | - VERBOSE=${VERBOSE:-1} 24 | restart: unless-stopped 25 | network_mode: host 26 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | 3 | gpsd-exporter: 4 | image: ghcr.io/brendanbank/gpsd-prometheus-exporter:latest 5 | container_name: gpsd-exporter 6 | ports: 7 | - "${EXPORTER_PORT:-9015}:9015" 8 | environment: 9 | - GPSD_HOST=${GPSD_HOST:-host.docker.internal} 10 | - GPSD_PORT=${GPSD_PORT:-2947} 11 | - GEOPOINT_LON=${GEOPOINT_LON:-38.897809878} 12 | - GEOPOINT_LAT=${GEOPOINT_LAT:--77.036551259} 13 | - PPS_BUCKET_SIZE=${PPS_BUCKET_SIZE:-50000} 14 | - PPS_BUCKET_COUNT=${PPS_BUCKET_COUNT:-40} 15 | - PPS_TIME1=${PPS_TIME1} 16 | - GEO_BUCKET_SIZE=${GEO_BUCKET_SIZE:-0.5} 17 | - GEO_BUCKET_COUNT=${GEO_BUCKET_COUNT:-40} 18 | - EXPORTER_PORT=${EXPORTER_PORT:-9015} 19 | - DEBUG=${DEBUG:-0} 20 | - VERBOSE=${VERBOSE:-1} 21 | extra_hosts: 22 | - "host.docker.internal:host-gateway" 23 | restart: unless-stopped 24 | network_mode: host 25 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "c3b48562df5b71cb2c3944bbd5d37d0c628201aa5e035ab5bbc9d5cb24b05fd0" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "*" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "prometheus-client": { 20 | "hashes": [ 21 | "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1", 22 | "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92" 23 | ], 24 | "index": "pypi", 25 | "markers": "python_version >= '3.8'", 26 | "version": "==0.19.0" 27 | } 28 | }, 29 | "develop": {} 30 | } 31 | -------------------------------------------------------------------------------- /gps/__init__.py: -------------------------------------------------------------------------------- 1 | # Make core client functions available without prefix. 2 | # This code is generated by scons. Do not hand-hack it! 3 | # 4 | # This file is Copyright 2010 by the GPSD project 5 | # SPDX-License-Identifier: BSD-2-Clause 6 | # 7 | # This code runs compatibly under Python 2 and 3.x for x >= 2. 8 | # Preserve this property! 9 | from __future__ import absolute_import # Ensure Python2 behaves like Python 3 10 | 11 | from .gps import * 12 | from .misc import * 13 | 14 | # Keep in sync with gpsd.h 15 | api_version_major = 3 # bumped on incompatible changes 16 | api_version_minor = 15 # bumped on compatible changes 17 | 18 | # at some point this will need an override method 19 | __iconpath__ = '/tmp/gps/share/gpsd/icons' 20 | 21 | __version__ = '3.25' 22 | 23 | # The 'client' module exposes some C utility functions for Python clients. 24 | # The 'packet' module exposes the packet getter via a Python interface. 25 | 26 | # vim: set expandtab shiftwidth=4 27 | -------------------------------------------------------------------------------- /fix_gps_board.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Thanks to https://lang-ship.com/reference/Arduino/libraries/RTC_RV-3028-C7_Arduino_Library/class_r_v3028.html#a9cbc9a009d4e5dbfeb29e366140be42b 4 | # And the folks at https://github.com/raspberrypi/linux/issues/2912 5 | 6 | # Raspberry Pi GPS/RTC Expansion Board https://store.uputronics.com/index.php?route=product/product&path=60_64&product_id=81 7 | 8 | function wait_for_EEBusy_done { 9 | busy=$((0x80)) 10 | while (( busy == 0x80 )) 11 | do 12 | status=$( i2cget -y 1 0x52 0x0E ) 13 | busy=$((status & 0x80)) 14 | done 15 | } 16 | 17 | rmmod rtc_rv3028 18 | 19 | wait_for_EEBusy_done 20 | 21 | # disable auto refresh 22 | register=$( i2cget -y 1 0x52 0x0F ) 23 | writeback=$((register | 0x08)) 24 | i2cset -y 1 0x52 0x0F $writeback 25 | 26 | # enable BSM in level switching mode 27 | register=$( i2cget -y 1 0x52 0x37 ) 28 | writeback=$((register | 0x0C)) 29 | i2cset -y 1 0x52 0x37 $writeback 30 | 31 | # update EEPROM 32 | i2cset -y 1 0x52 0x27 0x00 33 | i2cset -y 1 0x52 0x27 0x11 34 | 35 | wait_for_EEBusy_done 36 | 37 | # reenable auto refresh 38 | register=$( i2cget -y 1 0x52 0x0F ) 39 | writeback=$((register & ~0x08)) 40 | i2cset -y 1 0x52 0x0F $writeback 41 | 42 | wait_for_EEBusy_done 43 | 44 | modprobe rtc_rv3028 45 | 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2023, Brendan Bank 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | EXPORTER_ARGS="" 4 | 5 | if [ "${DEBUG}" != "0" ] && [ -n "${DEBUG}" ]; then 6 | EXPORTER_ARGS="${EXPORTER_ARGS} -d" 7 | fi 8 | 9 | # Add verbose flag if VERBOSE is set 10 | if [ -n "${VERBOSE}" ]; then 11 | EXPORTER_ARGS="${EXPORTER_ARGS} -v" 12 | fi 13 | 14 | if [ -n "${GPSD_HOST}" ]; then 15 | EXPORTER_ARGS="${EXPORTER_ARGS} --hostname ${GPSD_HOST}" 16 | fi 17 | if [ -n "${GPSD_PORT}" ]; then 18 | EXPORTER_ARGS="${EXPORTER_ARGS} --port ${GPSD_PORT}" 19 | fi 20 | if [ -n "${EXPORTER_PORT}" ]; then 21 | EXPORTER_ARGS="${EXPORTER_ARGS} --exporter-port ${EXPORTER_PORT}" 22 | fi 23 | 24 | if [ -n "${GEOPOINT_LON}" ]; then 25 | EXPORTER_ARGS="${EXPORTER_ARGS} --geopoint-lon ${GEOPOINT_LON}" 26 | fi 27 | if [ -n "${GEOPOINT_LAT}" ]; then 28 | EXPORTER_ARGS="${EXPORTER_ARGS} --geopoint-lat ${GEOPOINT_LAT}" 29 | fi 30 | 31 | if [ -n "${GEO_BUCKET_SIZE}" ]; then 32 | EXPORTER_ARGS="${EXPORTER_ARGS} --geo-bucket-size ${GEO_BUCKET_SIZE}" 33 | fi 34 | if [ -n "${GEO_BUCKET_COUNT}" ]; then 35 | EXPORTER_ARGS="${EXPORTER_ARGS} --geo-bucket-count ${GEO_BUCKET_COUNT}" 36 | fi 37 | 38 | if [ -n "${PPS_BUCKET_SIZE}" ]; then 39 | EXPORTER_ARGS="${EXPORTER_ARGS} --pps-bucket-size ${PPS_BUCKET_SIZE}" 40 | fi 41 | if [ -n "${PPS_BUCKET_COUNT}" ]; then 42 | EXPORTER_ARGS="${EXPORTER_ARGS} --pps-bucket-count ${PPS_BUCKET_COUNT}" 43 | fi 44 | 45 | # Add PPS histogram support if PPS_TIME1 is set 46 | if [ -n "${PPS_TIME1}" ]; then 47 | EXPORTER_ARGS="${EXPORTER_ARGS} --pps-histogram --pps-time1 ${PPS_TIME1}" 48 | fi 49 | 50 | echo ./gpsd_exporter.py --offset-from-geopoint ${EXPORTER_ARGS} 51 | ./gpsd_exporter.py --offset-from-geopoint ${EXPORTER_ARGS} 52 | 53 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | .project 131 | .pydevproject 132 | ntpd_monitor.log 133 | .settings/ 134 | /.DS_Store 135 | .DS_Store 136 | -------------------------------------------------------------------------------- /generate-release-notes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Generate Release Notes from Commit 4 | # Usage: ./generate-release-notes.sh [tag-name] 5 | 6 | COMMIT_HASH=${1:-"a395c6ffe5d6383d6c6d58e4b77c8c803f366180"} 7 | TAG_NAME=${2:-"v1.0.0"} 8 | 9 | echo "Generating release notes from commit: $COMMIT_HASH" 10 | echo "Tag name: $TAG_NAME" 11 | echo "" 12 | 13 | # Get commits from the specified commit to HEAD 14 | echo "## What's New in $TAG_NAME" > RELEASE_NOTES.md 15 | echo "" >> RELEASE_NOTES.md 16 | echo "### Changes since commit $COMMIT_HASH" >> RELEASE_NOTES.md 17 | echo "" >> RELEASE_NOTES.md 18 | 19 | # Get all commits from the specified commit to HEAD 20 | COMMITS=$(git log --oneline --no-merges $COMMIT_HASH..HEAD) 21 | 22 | if [ -z "$COMMITS" ]; then 23 | echo "No commits found since $COMMIT_HASH" 24 | echo "### No changes since $COMMIT_HASH" >> RELEASE_NOTES.md 25 | else 26 | echo "$COMMITS" | while read -r commit; do 27 | if [ ! -z "$commit" ]; then 28 | echo "- $commit" >> RELEASE_NOTES.md 29 | fi 30 | done 31 | fi 32 | 33 | # Add Docker image information 34 | echo "" >> RELEASE_NOTES.md 35 | echo "### Docker Image" >> RELEASE_NOTES.md 36 | echo "" >> RELEASE_NOTES.md 37 | echo "The Docker image is available at:" >> RELEASE_NOTES.md 38 | echo "\`\`\`bash" >> RELEASE_NOTES.md 39 | echo "ghcr.io/brendanbank/gpsd-prometheus-exporter:$TAG_NAME" >> RELEASE_NOTES.md 40 | echo "\`\`\`" >> RELEASE_NOTES.md 41 | echo "" >> RELEASE_NOTES.md 42 | 43 | # Add usage example 44 | echo "### Usage" >> RELEASE_NOTES.md 45 | echo "" >> RELEASE_NOTES.md 46 | echo "\`\`\`bash" >> RELEASE_NOTES.md 47 | echo "docker run -d \\" >> RELEASE_NOTES.md 48 | echo " --name gpsd-exporter \\" >> RELEASE_NOTES.md 49 | echo " -p 9015:9015 \\" >> RELEASE_NOTES.md 50 | echo " -e GPSD_HOST=host.docker.internal \\" >> RELEASE_NOTES.md 51 | echo " ghcr.io/brendanbank/gpsd-prometheus-exporter:$TAG_NAME" >> RELEASE_NOTES.md 52 | echo "\`\`\`" >> RELEASE_NOTES.md 53 | 54 | # Add multi-platform information 55 | echo "" >> RELEASE_NOTES.md 56 | echo "### Multi-Platform Support" >> RELEASE_NOTES.md 57 | echo "" >> RELEASE_NOTES.md 58 | echo "This release includes Docker images for:" >> RELEASE_NOTES.md 59 | echo "- **linux/amd64**: Intel/AMD 64-bit processors" >> RELEASE_NOTES.md 60 | echo "- **linux/arm64/v8**: Apple Silicon, modern ARM64 servers" >> RELEASE_NOTES.md 61 | echo "- **linux/arm/v7**: Raspberry Pi, older ARM devices" >> RELEASE_NOTES.md 62 | echo "" >> RELEASE_NOTES.md 63 | echo "Docker will automatically select the correct image for your platform." >> RELEASE_NOTES.md 64 | 65 | echo "" 66 | echo "Release notes generated in RELEASE_NOTES.md" 67 | echo "Content:" 68 | echo "----------------------------------------" 69 | cat RELEASE_NOTES.md 70 | 71 | -------------------------------------------------------------------------------- /PUBLISHING.md: -------------------------------------------------------------------------------- 1 | # Publishing Guide 2 | 3 | This guide explains how to publish the gpsd-prometheus-exporter Docker image to GitHub Container Registry (ghcr.io) using fine-grained personal access tokens and automated CI/CD. 4 | 5 | ## Setup Fine-Grained Personal Access Token 6 | 7 | ### 1. Create Fine-Grained Token 8 | 9 | 1. Go to GitHub → Settings → Developer settings → Personal access tokens → Fine-grained tokens 10 | 2. Click "Generate new token" 11 | 3. Configure the token: 12 | 13 | **Token name**: `gpsd-exporter-publish` 14 | 15 | **Repository access**: 16 | - Select "Only select repositories" 17 | - Choose your repository 18 | 19 | **Permissions**: 20 | - **Repository permissions**: 21 | - `Contents`: Read and write 22 | - `Metadata`: Read-only 23 | - `Pull requests`: Read and write 24 | - `Workflows`: Read and write 25 | 26 | **Token expiration**: Choose appropriate expiration (e.g., 90 days) 27 | 28 | ### 2. Store Token as Repository Secret 29 | 30 | 1. Go to your repository → Settings → Secrets and variables → Actions 31 | 2. Click "New repository secret" 32 | 3. Name: `FINE_GRAINED_TOKEN` 33 | 4. Value: Paste your fine-grained token 34 | 35 | ### 3. Enable Package Publishing 36 | 37 | The workflow uses the built-in `GITHUB_TOKEN` for package publishing, which is automatically provided by GitHub Actions and has the necessary permissions for the repository. 38 | 39 | ## CI/CD Pipeline Features 40 | 41 | The pipeline includes: 42 | 43 | ### 1. **Testing Job** 44 | - Python syntax validation 45 | - Dependency installation 46 | - Entrypoint script testing 47 | 48 | ### 2. **Build and Push Job** 49 | - Multi-platform Docker builds 50 | - Automatic tagging based on: 51 | - Git tags (semantic versions) 52 | - Branch names 53 | - Commit SHA 54 | - Caching for faster builds 55 | 56 | ### 3. **Security Scanning** 57 | - Trivy vulnerability scanning 58 | - SARIF report upload to GitHub Security tab 59 | 60 | ## Pipeline Triggers 61 | 62 | The pipeline runs on: 63 | - **Push to main/develop branches**: Build and push latest 64 | - **Pull requests**: Test only (no push) 65 | - **Git tags (v*)**: Build and push versioned releases 66 | - **Manual trigger**: Via GitHub Actions UI 67 | 68 | ## Usage Examples 69 | 70 | ### Manual Publishing 71 | 72 | ```bash 73 | # Login with fine-grained token 74 | echo $FINE_GRAINED_TOKEN | docker login ghcr.io -u YOUR_USERNAME --password-stdin 75 | 76 | # Build and push 77 | docker build -t ghcr.io/YOUR_USERNAME/gpsd-prometheus-exporter:latest . 78 | docker push ghcr.io/YOUR_USERNAME/gpsd-prometheus-exporter:latest 79 | ``` 80 | 81 | ### Automated Publishing 82 | 83 | ```bash 84 | # Create a release 85 | git tag v1.0.0 86 | git push origin v1.0.0 87 | # Pipeline automatically builds and publishes 88 | ``` 89 | 90 | ### Using the Published Image 91 | 92 | Update your `docker-compose.yml`: 93 | 94 | ```yaml 95 | services: 96 | gpsd-exporter: 97 | image: ghcr.io/YOUR_USERNAME/gpsd-prometheus-exporter:latest 98 | # ... rest of configuration 99 | ``` 100 | 101 | ## Security Benefits of Fine-Grained Tokens 102 | 103 | 1. **Minimal permissions**: Only access to specific repository 104 | 2. **Time-limited**: Automatic expiration 105 | 3. **Granular control**: Exact permissions needed 106 | 4. **Audit trail**: Clear logging of token usage 107 | 5. **No cross-repo access**: Cannot access other repositories 108 | 109 | ## Troubleshooting 110 | 111 | ### Token Issues 112 | - Ensure token has correct repository permissions 113 | - Check token expiration 114 | - Verify token is stored as `FINE_GRAINED_TOKEN` secret 115 | - Ensure repository has package publishing enabled 116 | 117 | ### Build Issues 118 | - Check GitHub Actions logs for detailed error messages 119 | - Verify Dockerfile syntax 120 | - Ensure all dependencies are available 121 | 122 | ### Registry Issues 123 | - Verify repository is public (or you have access) 124 | - Check package visibility settings 125 | - Ensure image name matches repository name 126 | 127 | ## Best Practices 128 | 129 | 1. **Use semantic versioning** for releases 130 | 2. **Test locally** before pushing 131 | 3. **Monitor security scans** in GitHub Security tab 132 | 4. **Rotate tokens** regularly 133 | 5. **Use specific tags** for production deployments 134 | -------------------------------------------------------------------------------- /RELEASE_NOTES.md: -------------------------------------------------------------------------------- 1 | ---------------------------------------- 2 | ## What's New in 1.0.2 3 | 4 | ### Changes since commit a395c6ffe5d6383d6c6d58e4b77c8c803f366180 5 | 6 | - e18b986 Refactor GitHub Actions workflow for Docker publishing: remove Trivy security scan job and add support for multiple platforms in the build process. 7 | - 9b2cb45 fix CI/CD workflows 8 | - 0b4f5f4 Update docker-compose.yml to add new environment variables for PPS_TIME1, DEBUG, and VERBOSE, enhancing configuration options for the service. 9 | - 676ec42 Enhance GitHub Actions workflow for Docker publishing by adding Docker Buildx setup and building an image for Trivy vulnerability scanning. Update Trivy scan configuration to use the newly built image and skip version checks. 10 | - ee639ec Update GitHub Actions workflow for Docker publishing: add exit code and severity filters for Trivy scan, and upgrade upload action to v3. 11 | - 7e9d2bf Update README.md to add a closing note wishing users good luck. Test CI/CD 12 | - 70f9be3 auto release notes 13 | - 20b1dc3 dd github actions. 14 | - f827bce Update PUBLISHING.md to remove unnecessary package permissions, add section on enabling package publishing, and include troubleshooting tips for repository settings. 15 | - d2d02b9 Refactor gpsd_exporter.py to use the packaging module for gps version checks, enhancing compatibility and error handling. Add a new PUBLISHING.md file detailing the process for publishing the Docker image to GitHub Container Registry, including CI/CD pipeline features and best practices. 16 | - 1c833b9 Update gpsd_exporter.py to increase default retry delay from 5 to 10 seconds, improve error handling for connection issues, and adjust README.md to clarify gps version requirements, now specifying a minimum of 3.18. 17 | - 915512c raise kb error 18 | - 0e1cdf1 Update Dockerfile to use Python 3.12 for improved compatibility and performance. 19 | - 79e067c Update Dockerfile to install the latest gps package version. Modify gpsd_exporter.py to handle JSON encoding issues in newer Python versions and adjust version constraints for the gps package. Update README.md to reflect the new gps version requirement. 20 | - 3b6d067 Refactor error handling in gpsd_exporter.py to re-raise KeyboardInterrupt for proper shutdown management in the main loop. 21 | - 43a1ba9 Update docker-compose.yml to use specific GitHub username for image. Modify Dockerfile to install gps version 3.25 for compatibility. Enhance gpsd_exporter.py to enforce gps version constraints. Update README.md to reflect the requirement for gps version 3.25. 22 | - 01a69c6 Enhance docker-compose and entrypoint scripts to support new environment variables for verbose output and PPS histogram. Updated README.md to reflect changes in environment variable configuration and added details on enhanced features for local builds. 23 | - f878f1e Update docker-compose.build.yml to set default value for DEBUG variable 24 | - e00de2f Update entrypoint.sh to refine DEBUG variable handling for exporter arguments 25 | - 1937923 docker build with local connections to gpds 26 | - cf22890 Enhance gpsd_exporter.py with connection timeout and retry logic. Updated docker-compose.build.yml to include DEBUG environment variable and set network mode to host. 27 | - d585e85 create a seperate docker-compose file to biuld the image locally. 28 | - 791514a Refactor docker-compose.yml to use environment variables for configuration. Updated README.md to include instructions for creating a .env file and using environment variables with docker-compose. 29 | - 710d79c Enhanced error handling in gpsd_exporter.py to manage missing satellite data and connection issues. Added try-except blocks in key functions to log warnings and errors without crashing the application. 30 | 31 | ### Docker Image 32 | 33 | The Docker image is available at: 34 | ```bash 35 | ghcr.io/brendanbank/gpsd-prometheus-exporter:1.0.2 36 | ``` 37 | 38 | ### Usage 39 | 40 | ```bash 41 | docker run -d \ 42 | --name gpsd-exporter \ 43 | -p 9015:9015 \ 44 | -e GPSD_HOST=host.docker.internal \ 45 | ghcr.io/brendanbank/gpsd-prometheus-exporter:1.0.2 46 | ``` 47 | 48 | ### Multi-Platform Support 49 | 50 | This release includes Docker images for: 51 | - **linux/amd64**: Intel/AMD 64-bit processors 52 | - **linux/arm64/v8**: Apple Silicon, modern ARM64 servers 53 | - **linux/arm/v7**: Raspberry Pi, older ARM devices 54 | 55 | Docker will automatically select the correct image for your platform. -------------------------------------------------------------------------------- /docker-publish-org.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD Pipeline 2 | 3 | on: 4 | push: 5 | tags: [ 'v*' ] 6 | workflow_dispatch: 7 | 8 | env: 9 | REGISTRY: ghcr.io 10 | IMAGE_NAME: ${{ github.repository }} 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v4 18 | 19 | - name: Install dependencies (distro packages) 20 | run: | 21 | sudo apt-get update 22 | sudo apt-get install -y python3-gps python3-prometheus-client 23 | 24 | - name: Run basic syntax check 25 | run: | 26 | python -m py_compile gpsd_exporter.py 27 | echo "Syntax check passed" 28 | 29 | - name: CLI help smoke test (system python3 with python3-gps) 30 | run: | 31 | python3 --version 32 | python3 -c "import gps; print('gps module OK:', gps.__file__)" 33 | python3 gpsd_exporter.py --help 34 | 35 | build-and-push: 36 | needs: test 37 | runs-on: ubuntu-latest 38 | if: startsWith(github.ref, 'refs/tags/v') 39 | permissions: 40 | contents: read 41 | id-token: write 42 | packages: write 43 | attestations: write 44 | 45 | steps: 46 | - name: Checkout repository 47 | uses: actions/checkout@v4 48 | 49 | - name: Set up Docker Buildx 50 | uses: docker/setup-buildx-action@v3 51 | 52 | - name: Log in to Container Registry 53 | uses: docker/login-action@v3 54 | with: 55 | registry: ${{ env.REGISTRY }} 56 | username: ${{ github.actor }} 57 | password: ${{ secrets.GITHUB_TOKEN }} 58 | 59 | - name: Extract metadata 60 | id: meta 61 | uses: docker/metadata-action@v5 62 | with: 63 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 64 | flavor: | 65 | latest=true 66 | tags: | 67 | type=semver,pattern={{version}} 68 | type=semver,pattern={{major}}.{{minor}} 69 | 70 | - name: Build and push Docker image 71 | id: build 72 | uses: docker/build-push-action@v5 73 | with: 74 | context: . 75 | platforms: linux/amd64,linux/arm64/v8,linux/arm/v7 76 | push: true 77 | tags: ${{ steps.meta.outputs.tags }} 78 | labels: ${{ steps.meta.outputs.labels }} 79 | cache-from: type=gha 80 | cache-to: type=gha,mode=max 81 | provenance: true 82 | sbom: true 83 | 84 | - name: Generate build provenance 85 | uses: actions/attest-build-provenance@v1 86 | with: 87 | subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 88 | subject-digest: ${{ steps.build.outputs.digest }} 89 | push-to-registry: true 90 | 91 | 92 | security-scan: 93 | needs: build-and-push 94 | runs-on: ubuntu-latest 95 | permissions: 96 | contents: read 97 | security-events: write 98 | steps: 99 | - name: Checkout repository 100 | uses: actions/checkout@v4 101 | 102 | - name: Set up Docker Buildx 103 | uses: docker/setup-buildx-action@v3 104 | 105 | - name: Build image for scanning (no push) 106 | uses: docker/build-push-action@v5 107 | with: 108 | context: . 109 | platforms: linux/amd64 110 | push: false 111 | load: true 112 | tags: gpsd-prometheus-exporter:scan 113 | 114 | - name: Run Trivy vulnerability scanner 115 | uses: aquasecurity/trivy-action@0.24.0 116 | with: 117 | image-ref: gpsd-prometheus-exporter:scan 118 | format: sarif 119 | output: trivy-results.sarif 120 | severity: CRITICAL,HIGH,MEDIUM 121 | ignore-unfixed: true 122 | exit-code: '0' 123 | 124 | - name: Upload Trivy SARIF to GitHub Security tab 125 | uses: github/codeql-action/upload-sarif@v3 126 | if: always() 127 | with: 128 | sarif_file: trivy-results.sarif 129 | 130 | cleanup: 131 | runs-on: ubuntu-latest 132 | needs: build-and-push 133 | permissions: 134 | contents: read 135 | packages: write 136 | steps: 137 | - name: Delete untagged versions (keep last 10) 138 | uses: actions/delete-package-versions@v5 139 | with: 140 | package-name: gpsd-prometheus-exporter 141 | package-type: container 142 | min-versions-to-keep: 10 143 | delete-only-untagged-versions: true 144 | 145 | - name: Delete old SHA-tagged versions (keep last 20) 146 | uses: actions/delete-package-versions@v5 147 | with: 148 | package-name: gpsd-prometheus-exporter 149 | package-type: container 150 | min-versions-to-keep: 20 151 | ignore-versions: '^(latest|v[0-9]+(\.[0-9]+){0,2})$' 152 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD Pipeline 2 | on: 3 | push: 4 | branches: [ develop ] 5 | tags: [ 'v*' ] 6 | pull_request: 7 | branches: [ master ] 8 | workflow_dispatch: 9 | 10 | env: 11 | REGISTRY: ghcr.io 12 | IMAGE_NAME: ${{ github.repository }} 13 | 14 | jobs: 15 | test: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v4 20 | 21 | - name: Set up Python 22 | uses: actions/setup-python@v4 23 | with: 24 | python-version: '3.9' 25 | 26 | - name: Install dependencies 27 | run: | 28 | # Speed up apt by removing man-db to avoid slow trigger processing 29 | sudo apt-get remove -y --purge man-db || true 30 | sudo rm -f /var/lib/man-db/auto-update || true 31 | python -m pip install --upgrade pip 32 | pip install prometheus-client 33 | # pip install gps 34 | # Install gpsd python library (may need system packages) 35 | # sudo apt-get update 36 | # sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends python3-gps 37 | 38 | - name: Run basic syntax check 39 | run: | 40 | python -m py_compile gpsd_exporter.py 41 | echo "Syntax check passed" 42 | 43 | - name: CLI help smoke test (system python3 with python3-gps) 44 | run: | 45 | python3 --version 46 | python3 -c "import gps; print('gps module OK:', gps.__file__)" 47 | python3 gpsd_exporter.py --help 48 | 49 | build-and-push: 50 | needs: test 51 | runs-on: ubuntu-latest 52 | permissions: 53 | contents: read 54 | packages: write 55 | id-token: write 56 | attestations: write 57 | 58 | steps: 59 | - name: Checkout repository 60 | uses: actions/checkout@v4 61 | 62 | - name: Set up Docker Buildx 63 | uses: docker/setup-buildx-action@v3 64 | 65 | - name: Log in to Container Registry 66 | uses: docker/login-action@v3 67 | with: 68 | registry: ${{ env.REGISTRY }} 69 | username: ${{ github.actor }} 70 | password: ${{ secrets.GITHUB_TOKEN }} 71 | 72 | - name: Extract metadata 73 | id: meta 74 | uses: docker/metadata-action@v5 75 | with: 76 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 77 | tags: | 78 | type=ref,event=branch 79 | type=ref,event=pr 80 | type=semver,pattern={{version}} 81 | type=semver,pattern={{major}}.{{minor}} 82 | type=raw,value=latest,enable=${{ startsWith(github.ref, 'refs/tags/v') }} 83 | 84 | - name: Build and push Docker image 85 | id: build 86 | uses: docker/build-push-action@v5 87 | with: 88 | context: . 89 | # platforms: linux/amd64,linux/arm64 90 | platforms: linux/386,linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64,linux/ppc64le,linux/s390x 91 | push: true 92 | tags: ${{ steps.meta.outputs.tags }} 93 | labels: ${{ steps.meta.outputs.labels }} 94 | cache-from: type=gha 95 | cache-to: type=gha,mode=max 96 | sbom: false 97 | provenance: false 98 | 99 | outputs: 100 | image-digest: ${{ steps.build.outputs.digest }} 101 | 102 | security-scan: 103 | needs: build-and-push 104 | runs-on: ubuntu-latest 105 | steps: 106 | - name: Checkout repository 107 | uses: actions/checkout@v4 108 | 109 | - name: Run Trivy vulnerability scanner (by digest) 110 | uses: aquasecurity/trivy-action@master 111 | with: 112 | image-ref: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@${{ needs.build-and-push.outputs.image-digest }} 113 | format: 'sarif' 114 | output: 'trivy-results.sarif' 115 | 116 | - name: Upload Trivy scan results to GitHub Security tab 117 | uses: github/codeql-action/upload-sarif@v3 118 | if: always() 119 | with: 120 | sarif_file: 'trivy-results.sarif' 121 | 122 | # cleanup: 123 | # runs-on: ubuntu-latest 124 | # needs: build-and-push 125 | # permissions: 126 | # contents: read 127 | # packages: write 128 | # steps: 129 | # - name: Delete untagged versions (keep last 10) 130 | # uses: actions/delete-package-versions@v5 131 | # with: 132 | # package-name: gpsd-prometheus-exporter 133 | # package-type: container 134 | # min-versions-to-keep: 10 135 | # delete-only-untagged-versions: true 136 | 137 | # - name: Delete old SHA-tagged versions (keep last 20) 138 | # uses: actions/delete-package-versions@v5 139 | # with: 140 | # package-name: gpsd-prometheus-exporter 141 | # package-type: container 142 | # min-versions-to-keep: 20 143 | # ignore-versions: '^(latest|v[0-9]+(\.[0-9]+){0,2})$' 144 | -------------------------------------------------------------------------------- /.github/workflows/release-notes.yml: -------------------------------------------------------------------------------- 1 | name: Generate Release Notes 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | workflow_dispatch: 8 | inputs: 9 | tag: 10 | description: 'Tag to generate release notes for' 11 | required: true 12 | default: 'v1.0.0' 13 | 14 | jobs: 15 | generate-release-notes: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout repository 19 | uses: actions/checkout@v4 20 | with: 21 | fetch-depth: 0 22 | 23 | - name: Get tag name 24 | id: tag 25 | run: | 26 | if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then 27 | echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT 28 | else 29 | echo "tag=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT 30 | fi 31 | 32 | - name: Generate release notes 33 | id: release_notes 34 | run: | 35 | TAG="${{ steps.tag.outputs.tag }}" 36 | PREVIOUS_TAG=$(git describe --tags --abbrev=0 HEAD~1 2>/dev/null || echo "") 37 | 38 | if [ -z "$PREVIOUS_TAG" ]; then 39 | # First release, compare with initial commit 40 | COMMITS=$(git log --oneline --no-merges --reverse main --not $(git rev-list --max-parents=0 HEAD) | head -20) 41 | echo "### What's New in $TAG" > RELEASE_NOTES.md 42 | echo "" >> RELEASE_NOTES.md 43 | echo "### Initial Release" >> RELEASE_NOTES.md 44 | echo "" >> RELEASE_NOTES.md 45 | echo "This is the initial release of gpsd-prometheus-exporter." >> RELEASE_NOTES.md 46 | echo "" >> RELEASE_NOTES.md 47 | echo "### Key Features:" >> RELEASE_NOTES.md 48 | echo "" >> RELEASE_NOTES.md 49 | echo "$COMMITS" | while read -r commit; do 50 | if [ ! -z "$commit" ]; then 51 | echo "- $commit" >> RELEASE_NOTES.md 52 | fi 53 | done 54 | else 55 | # Compare with previous tag 56 | COMMITS=$(git log --oneline --no-merges $PREVIOUS_TAG..HEAD) 57 | echo "## What's New in $TAG" > RELEASE_NOTES.md 58 | echo "" >> RELEASE_NOTES.md 59 | echo "### Changes since $PREVIOUS_TAG" >> RELEASE_NOTES.md 60 | echo "" >> RELEASE_NOTES.md 61 | echo "$COMMITS" | while read -r commit; do 62 | if [ ! -z "$commit" ]; then 63 | echo "- $commit" >> RELEASE_NOTES.md 64 | fi 65 | done 66 | fi 67 | 68 | # Add Docker image information 69 | echo "" >> RELEASE_NOTES.md 70 | echo "### Docker Image" >> RELEASE_NOTES.md 71 | echo "" >> RELEASE_NOTES.md 72 | echo "The Docker image is available at:" >> RELEASE_NOTES.md 73 | echo "\`\`\`bash" >> RELEASE_NOTES.md 74 | echo "ghcr.io/brendanbank/gpsd-prometheus-exporter:latest" >> RELEASE_NOTES.md 75 | echo "\`\`\`" >> RELEASE_NOTES.md 76 | echo "Or use the specific tag:" >> RELEASE_NOTES.md 77 | echo "\`\`\`" >> RELEASE_NOTES.md 78 | echo "ghcr.io/brendanbank/gpsd-prometheus-exporter:$TAG" >> RELEASE_NOTES.md 79 | echo "\`\`\`" >> RELEASE_NOTES.md 80 | echo "" >> RELEASE_NOTES.md 81 | 82 | # Add usage example 83 | echo "### Usage" >> RELEASE_NOTES.md 84 | echo "" >> RELEASE_NOTES.md 85 | echo "\`\`\`bash" >> RELEASE_NOTES.md 86 | echo "docker run -d \\" >> RELEASE_NOTES.md 87 | echo " --name gpsd-exporter \\" >> RELEASE_NOTES.md 88 | echo " --network=host \\" >> RELEASE_NOTES.md 89 | echo " -e GPSD_HOST=localhost \\" >> RELEASE_NOTES.md 90 | echo " -e GPSD_PORT=2947 \\" >> RELEASE_NOTES.md 91 | echo " -e GEOPOINT_LON=38.897809878 \\" >> RELEASE_NOTES.md 92 | echo " -e GEOPOINT_LAT=-77.036551259 \\" >> RELEASE_NOTES.md 93 | echo " -e PPS_BUCKET_SIZE=250 \\" >> RELEASE_NOTES.md 94 | echo " -e PPS_BUCKET_COUNT=40 \\" >> RELEASE_NOTES.md 95 | echo " -e PPS_TIME1=0.123 \\" >> RELEASE_NOTES.md 96 | echo " -e GEO_BUCKET_SIZE=0.5 \\" >> RELEASE_NOTES.md 97 | echo " -e GEO_BUCKET_COUNT=40 \\" >> RELEASE_NOTES.md 98 | echo " -e EXPORTER_PORT=9015 \\" >> RELEASE_NOTES.md 99 | echo " -e VERBOSE=1 \\" >> RELEASE_NOTES.md 100 | echo " -e DEBUG=0 \\" >> RELEASE_NOTES.md 101 | echo " ghcr.io/brendanbank/gpsd-prometheus-exporter:latest" >> RELEASE_NOTES.md 102 | echo "\`\`\`" >> RELEASE_NOTES.md 103 | 104 | # Read the generated notes 105 | RELEASE_NOTES=$(cat RELEASE_NOTES.md) 106 | echo "notes<> $GITHUB_OUTPUT 107 | echo "$RELEASE_NOTES" >> $GITHUB_OUTPUT 108 | echo "EOF" >> $GITHUB_OUTPUT 109 | 110 | - name: Create GitHub Release 111 | uses: actions/create-release@v1 112 | env: 113 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 114 | with: 115 | tag_name: ${{ steps.tag.outputs.tag }} 116 | release_name: Release ${{ steps.tag.outputs.tag }} 117 | body: ${{ steps.release_notes.outputs.notes }} 118 | draft: false 119 | prerelease: false 120 | 121 | - name: Upload release notes artifact 122 | uses: actions/upload-artifact@v4 123 | with: 124 | name: release-notes-${{ steps.tag.outputs.tag }} 125 | path: RELEASE_NOTES.md 126 | -------------------------------------------------------------------------------- /gps/packet.py: -------------------------------------------------------------------------------- 1 | # packet.py - recognize GPS packet types 2 | # This code is generated by scons. Do not hand-hack it! 3 | # 4 | # This file is Copyright 2019 by the GPSD project 5 | # SPDX-License-Identifier: BSD-2-Clause 6 | # 7 | # This code runs compatibly under Python 2 and 3.x for x >= 2. 8 | # Preserve this property! 9 | # 10 | # -*- coding: utf-8 -*- 11 | """Python binding of the libgpsd module for recognizing GPS packets. 12 | 13 | The new() function returns a new packet-lexer instance. Lexer instances 14 | have two methods: 15 | get() takes a file descriptor argument and returns a tuple consisting of 16 | the integer packet type and string packet value. On end of file it returns 17 | (-1, ''). 18 | reset() resets the packet-lexer to its initial state. 19 | The module also has a register_report() function that accepts a callback 20 | for debug message reporting. The callback will get two arguments, the error 21 | level of the message and the message itself. 22 | """ 23 | from __future__ import absolute_import, print_function 24 | import ctypes 25 | import ctypes.util 26 | import os 27 | import os.path 28 | import sys 29 | 30 | import gps # For gps.__path__ 31 | import gps.misc 32 | 33 | 34 | # Packet types and Logging levels extracted from gpsd.h 35 | MAX_PACKET_LENGTH = 9216 36 | COMMENT_PACKET = 0 37 | NMEA_PACKET = 1 38 | AIVDM_PACKET = 2 39 | GARMINTXT_PACKET = 3 40 | SIRF_PACKET = 4 41 | ZODIAC_PACKET = 5 42 | TSIP_PACKET = 6 43 | EVERMORE_PACKET = 7 44 | ITALK_PACKET = 8 45 | GARMIN_PACKET = 9 46 | NAVCOM_PACKET = 10 47 | UBX_PACKET = 11 48 | SUPERSTAR2_PACKET = 12 49 | ONCORE_PACKET = 13 50 | GEOSTAR_PACKET = 14 51 | NMEA2000_PACKET = 15 52 | GREIS_PACKET = 16 53 | MAX_GPSPACKET_TYPE = 16 54 | RTCM2_PACKET = 17 55 | RTCM3_PACKET = 18 56 | JSON_PACKET = 19 57 | PACKET_TYPES = 20 58 | SKY_PACKET = 21 59 | LOG_SHOUT = 0 60 | LOG_WARN = 1 61 | LOG_CLIENT = 2 62 | LOG_INF = 3 63 | LOG_PROG = 4 64 | LOG_IO = 5 65 | LOG_DATA = 6 66 | LOG_SPIN = 7 67 | LOG_RAW = 8 68 | LOG_RAW1 = 9 69 | LOG_RAW2 = 10 70 | ISGPS_ERRLEVEL_BASE = LOG_RAW 71 | 72 | 73 | class PacketLibraryNotFoundError(Exception): 74 | """Error loading packet library.""" 75 | pass 76 | 77 | 78 | def importado(): 79 | """ 80 | Load the packet library or throw a PacketLibraryNotFoundError trying. 81 | See below for search order. 82 | find_library() looks in: LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, 83 | $home/lib, /.usr/local/lib, /usr/lib, /lib 84 | Returns the library handle.""" 85 | 86 | packet_name = 'libgpsdpacket.30.0.0.dylib' 87 | packet_dirs = [] # places to look 88 | lib_dir = '/tmp/gps/lib' 89 | 90 | # First look in the directory containing this 'gps' package, possibly 91 | # following a symlink in the process. 92 | # This is the normal location within the build tree. It is expected 93 | # to fail when running the installed version. 94 | packet_dirs.append(os.path.dirname(os.path.realpath(gps.__path__[0]))) 95 | 96 | # Next look in the library install directory. 97 | # This is the expected location when running the installed version. 98 | packet_dirs.append(os.path.realpath(lib_dir)) 99 | 100 | # Form full paths to candidates so far 101 | packet_paths = [os.path.join(os.path.abspath(x), packet_name) 102 | for x in packet_dirs] 103 | 104 | # Finally try find_library(). 105 | 106 | # find_library() looks for bare library name, using dlopen() 107 | # May, or may not, return a full path. Either way use as is. 108 | # 109 | # linux dlopen() looks in: 110 | # LD_LIBRARY_PATH, 111 | # paths in /etc/ld.so.cache, 112 | # /lib(64) and /usr/lib(64) 113 | # 114 | # macOS dlopen() looks in: 115 | # LD_LIBRARY_PATH, 116 | # DYLD_LIBRARY_PATH, 117 | # current working directory, 118 | # DYLD_FALLBACK_LIBRARY_PATH (default: $HOME/lib:/usr/local/lib:/usr/lib) 119 | # Note that some recent macOS versions have stopped honoring 120 | # *_LIBRARY_PATH, for security reasons. 121 | # 122 | # Linux: 123 | # find_library() does not usually return a full path. 124 | # LoadLibrary() can use a full path, or whatever find_library() returned. 125 | # 126 | # macOS: 127 | # find_library() returns a full path unless lib in current directory 128 | # find_library() returns no full path if lib in current directory 129 | # But LoadLibrary() always needs a full path 130 | # 131 | packet_path = ctypes.util.find_library('gpsdpacket') 132 | if packet_path: 133 | packet_paths.append(packet_path) 134 | 135 | for packet_path in packet_paths: 136 | try: 137 | if sys.flags.verbose: 138 | print('try_packet_lib: %s' % packet_path, file=sys.stderr) 139 | lib = ctypes.cdll.LoadLibrary(packet_path) 140 | # get the library version from the library 141 | gpsd_version = ctypes.c_char_p.in_dll(lib, "gpsd_version").value 142 | gpsd_version = gps.polystr(gpsd_version) 143 | if '3.25' != gpsd_version: 144 | sys.stderr.write("WARNING: got library version %s, " 145 | "expected %s\n" % 146 | (gpsd_version, '3.25')) 147 | return lib 148 | except OSError: 149 | pass 150 | 151 | raise PacketLibraryNotFoundError("Can't find packet library") 152 | 153 | 154 | _loaded = None 155 | _packet = importado() 156 | 157 | _lexer_size = ctypes.c_size_t.in_dll(_packet, "fvi_size_lexer") 158 | LEXER_SIZE = _lexer_size.value 159 | _buffer_size = ctypes.c_size_t.in_dll(_packet, "fvi_size_buffer").value 160 | 161 | REPORTER = ctypes.CFUNCTYPE(ctypes.c_int, ctypes.c_char_p) 162 | 163 | 164 | class GpsdErrOutT(ctypes.Structure): 165 | '''Used in gps.packet:register_report() to set logging callback.''' 166 | # pylint: disable-msg=R0903 167 | _fields_ = [('debug', ctypes.c_int), 168 | ('report', REPORTER), 169 | ('label', ctypes.c_char_p)] 170 | 171 | 172 | class lexer_t(ctypes.Structure): 173 | '''Used in gps.packet:lexer.get() to pass in data and pull 174 | out length, packet type, packet, and another datum.''' 175 | # pylint: disable-msg=R0903 176 | _fields_ = [ 177 | ('packet_type', ctypes.c_int), 178 | ('state', ctypes.c_uint), 179 | ('length', ctypes.c_size_t), 180 | ('inbuffer', ctypes.c_ubyte * _buffer_size), 181 | ('inbuflen', ctypes.c_size_t), 182 | ('inbufptr', ctypes.c_char_p), 183 | ('outbuffer', ctypes.c_ubyte * _buffer_size), 184 | ('outbuflen', ctypes.c_size_t), 185 | ('char_counter', ctypes.c_ulong), 186 | ('retry_counter', ctypes.c_ulong), 187 | ('counter', ctypes.c_uint), 188 | ('errout', GpsdErrOutT), 189 | ] 190 | 191 | 192 | def new(): 193 | """new() -> new packet-self object""" 194 | return Lexer() 195 | 196 | 197 | def register_report(reporter): 198 | """register_report(callback) 199 | 200 | callback must be a callable object expecting a string as parameter.""" 201 | global _loaded 202 | if callable(reporter): 203 | _loaded.errout.report = REPORTER(reporter) 204 | 205 | 206 | class Lexer(): 207 | """GPS packet lexer object 208 | 209 | Fetch a single packet from file descriptor 210 | """ 211 | pointer = None 212 | 213 | def __init__(self): 214 | global _loaded 215 | _packet.ffi_Lexer_init.restype = ctypes.POINTER(lexer_t) 216 | self.pointer = _packet.ffi_Lexer_init() 217 | _loaded = self.pointer.contents 218 | 219 | def get(self, file_handle): 220 | """Get a packet from a file descriptor.""" 221 | global _loaded 222 | _packet.packet_get.restype = ctypes.c_int 223 | _packet.packet_get.argtypes = [ctypes.c_int, ctypes.POINTER(lexer_t)] 224 | length = _packet.packet_get(file_handle, self.pointer) 225 | _loaded = self.pointer.contents 226 | packet = '' 227 | for octet in range(_loaded.outbuflen): 228 | packet += chr(_loaded.outbuffer[octet]) 229 | return [length, 230 | _loaded.packet_type, 231 | gps.misc.polybytes(packet), 232 | _loaded.char_counter] 233 | 234 | def reset(self): 235 | """Reset the packet self to ground state.""" 236 | _packet.ffi_Lexer_init.restype = None 237 | _packet.ffi_Lexer_init.argtypes = [ctypes.POINTER(lexer_t)] 238 | _packet.ffi_Lexer_init(self.pointer) 239 | 240 | # vim: set expandtab shiftwidth=4 241 | -------------------------------------------------------------------------------- /gps/aiogps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | # Copyright 2019 Grand Joldes (grandwork2@yahoo.com). 5 | # 6 | # This file is Copyright 2019 by the GPSD project 7 | # SPDX-License-Identifier: BSD-2-clause 8 | 9 | # This code run compatibly under Python 3.x for x >= 6. 10 | # Codacy D203 and D211 conflict, I choose D203 11 | # Codacy D212 and D213 conflict, I choose D212 12 | 13 | """aiogps.py -- Asyncio Python interface to GPSD. 14 | 15 | This module adds asyncio support to the Python gps interface. It runs on 16 | Python versions >= 3.6 and provides the following benefits: 17 | - easy integration in asyncio applications (all I/O operations done through 18 | non-blocking coroutines, async context manager, async iterator); 19 | - support for cancellation (all operations are cancellable); 20 | - support for timeouts (on both read and connect); 21 | - support for connection keep-alive (using the TCP keep alive mechanism) 22 | - support for automatic re-connection; 23 | - configurable connection parameters; 24 | - configurable exception handling (internally or by application); 25 | - logging support (logger name: 'gps.aiogps'). 26 | 27 | The use of timeouts, keepalive and automatic reconnection make possible easy 28 | handling of GPSD connections over unreliable networks. 29 | 30 | Examples: 31 | import logging 32 | import gps.aiogps 33 | 34 | # configuring logging 35 | logging.basicConfig() 36 | logging.root.setLevel(logging.INFO) 37 | # Example of setting up logging level for the aiogps logger 38 | logging.getLogger('gps.aiogps').setLevel(logging.ERROR) 39 | 40 | # using default parameters 41 | async with gps.aiogps.aiogps() as gpsd: 42 | async for msg in gpsd: 43 | # Log last message 44 | logging.info(f'Received: {msg}') 45 | # Log updated GPS status 46 | logging.info(f'\nGPS status:\n{gpsd}') 47 | 48 | # using custom parameters 49 | try: 50 | async with gps.aiogps.aiogps( 51 | connection_args = { 52 | 'host': '192.168.10.116', 53 | 'port': 2947 54 | }, 55 | connection_timeout = 5, 56 | reconnect = 0, # do not try to reconnect, raise exceptions 57 | alive_opts = { 58 | 'rx_timeout': 5 59 | } 60 | ) as gpsd: 61 | async for msg in gpsd: 62 | logging.info(msg) 63 | except asyncio.CancelledError: 64 | return 65 | except asyncio.IncompleteReadError: 66 | logging.info('Connection closed by server') 67 | except asyncio.TimeoutError: 68 | logging.error('Timeout waiting for gpsd to respond') 69 | except Exception as exc: 70 | logging.error(f'Error: {exc}') 71 | 72 | """ 73 | 74 | __all__ = ['aiogps', ] 75 | 76 | import asyncio 77 | import logging 78 | import socket 79 | from typing import Optional, Union, Awaitable 80 | 81 | from .client import gpsjson, dictwrapper 82 | from .gps import gps, gpsdata, WATCH_ENABLE, PACKET_SET 83 | from .misc import polystr, polybytes 84 | 85 | 86 | class aiogps(gps): # pylint: disable=R0902 87 | """An asyncio gps client. 88 | 89 | Reimplements all gps IO methods using asyncio coros. Adds connection 90 | management, an asyncio context manager and an asyncio iterator. 91 | 92 | The class uses a logger named 'gps.aiogps' to record events. The logger is 93 | configured with a NullHandler to disable any message logging until the 94 | application configures another handler. 95 | """ 96 | 97 | def __init__(self, # pylint: disable=W0231 98 | connection_args: Optional[dict] = None, 99 | connection_timeout: Optional[float] = None, 100 | reconnect: Optional[float] = 2, 101 | alive_opts: Optional[dict] = None) -> None: 102 | """Arguments: 103 | connection_args: arguments needed for opening a connection. 104 | These will be passed directly to asyncio.open_connection. 105 | If set to None, a connection to the default gps host and port 106 | will be attempded. 107 | connection_timeout: time to wait for a connection to complete 108 | (seconds). Set to None to disable. 109 | reconnect: configures automatic reconnections: 110 | - 0: reconnection is not attempted in case of an error and the 111 | error is raised to the user; 112 | - number > 0: delay until next reconnection attempt (seconds). 113 | alive_opts: options related to detection of disconnections. 114 | Two mechanisms are supported: TCP keepalive (default, may not 115 | be available on all platforms) and Rx timeout, through the 116 | following options: 117 | - rx_timeout: Rx timeout (seconds). Set to None to disable. 118 | - SO_KEEPALIVE: socket keepalive and related parameters: 119 | - TCP_KEEPIDLE 120 | - TCP_KEEPINTVL 121 | - TCP_KEEPCNT 122 | """ 123 | # If connection_args are not specified use defaults 124 | self.connection_args = connection_args or { 125 | 'host': self.host, 126 | 'port': self.port 127 | } 128 | self.connection_timeout = connection_timeout 129 | assert 0 <= reconnect 130 | self.reconnect = reconnect 131 | # If alive_opts are not specified use defaults 132 | self.alive_opts = alive_opts or { 133 | 'rx_timeout': None, 134 | 'SO_KEEPALIVE': 1, 135 | 'TCP_KEEPIDLE': 2, 136 | 'TCP_KEEPINTVL': 2, 137 | 'TCP_KEEPCNT': 3 138 | } 139 | # Connection access streams 140 | self.reader: Optional[asyncio.StreamReader] = None 141 | self.writer: Optional[asyncio.StreamWriter] = None 142 | # Set up logging 143 | self.logger = logging.getLogger(__name__) 144 | # Set the Null handler - prevents logging message handling unless the 145 | # application sets up a handler. 146 | self.logger.addHandler(logging.NullHandler()) 147 | # Init gps parents 148 | gpsdata.__init__(self) # pylint: disable=W0233 149 | gpsjson.__init__(self) # pylint: disable=W0233 150 | # Provide the response in both 'str' and 'bytes' form 151 | self.bresponse = b'' 152 | self.response = polystr(self.bresponse) 153 | # Default stream command 154 | self.stream_command = self.generate_stream_command(WATCH_ENABLE) 155 | self.valid = 0 156 | 157 | def __del__(self) -> None: 158 | """Destructor.""" 159 | self.close() 160 | 161 | async def _open_connection(self) -> None: 162 | """Opens connection to GPSD server and configure the TCP socket.""" 163 | self.logger.info( 164 | f"Connecting to gpsd at {self.connection_args['host']}" + 165 | (f":{self.connection_args['port']}" 166 | if self.connection_args['port'] else '')) 167 | self.reader, self.writer = await asyncio.wait_for( 168 | asyncio.open_connection(**self.connection_args), 169 | self.connection_timeout) 170 | # Set socket options 171 | sock = self.writer.get_extra_info('socket') 172 | if sock is not None: 173 | if 'SO_KEEPALIVE' in self.alive_opts: 174 | sock.setsockopt(socket.SOL_SOCKET, 175 | socket.SO_KEEPALIVE, 176 | self.alive_opts['SO_KEEPALIVE']) 177 | if hasattr( 178 | sock, 179 | 'TCP_KEEPIDLE') and 'TCP_KEEPIDLE' in self.alive_opts: 180 | sock.setsockopt(socket.IPPROTO_TCP, 181 | socket.TCP_KEEPIDLE, # pylint: disable=E1101 182 | self.alive_opts['TCP_KEEPIDLE']) 183 | if hasattr( 184 | sock, 185 | 'TCP_KEEPINTVL') and 'TCP_KEEPINTVL' in self.alive_opts: 186 | sock.setsockopt(socket.IPPROTO_TCP, 187 | socket.TCP_KEEPINTVL, # pylint: disable=E1101 188 | self.alive_opts['TCP_KEEPINTVL']) 189 | if hasattr( 190 | sock, 191 | 'TCP_KEEPCNT') and 'TCP_KEEPCNT' in self.alive_opts: 192 | sock.setsockopt(socket.IPPROTO_TCP, 193 | socket.TCP_KEEPCNT, 194 | self.alive_opts['TCP_KEEPCNT']) 195 | 196 | def close(self) -> None: 197 | """Closes connection to GPSD server.""" 198 | if self.writer: 199 | try: 200 | self.writer.close() 201 | except Exception: # pylint: disable=W0703 202 | pass 203 | self.writer = None 204 | 205 | def waiting(self) -> bool: # pylint: disable=W0221 206 | """Mask the blocking waiting method from gpscommon.""" 207 | return True 208 | 209 | async def read(self) -> Union[dictwrapper, str]: 210 | """Reads data from GPSD server.""" 211 | while True: 212 | await self.connect() 213 | try: 214 | rx_timeout = self.alive_opts.get('rx_timeout', None) 215 | reader = self.reader.readuntil(separator=b'\n') 216 | self.bresponse = await asyncio.wait_for(reader, 217 | rx_timeout) 218 | self.response = polystr(self.bresponse) 219 | if self.response.startswith( 220 | "{") and self.response.endswith("}\r\n"): 221 | self.unpack(self.response) 222 | self._oldstyle_shim() 223 | self.valid |= PACKET_SET 224 | return self.data 225 | return self.response 226 | except asyncio.CancelledError: 227 | self.close() 228 | raise 229 | except Exception as exc: # pylint: disable=W0703 230 | error = 'timeout' if isinstance( 231 | exc, asyncio.TimeoutError) else exc 232 | self.logger.warning( 233 | f'Failed to get message from GPSD: {error}') 234 | self.close() 235 | if self.reconnect: 236 | # Try again later 237 | await asyncio.sleep(self.reconnect) 238 | else: 239 | raise 240 | 241 | async def connect(self) -> None: # pylint: disable=W0221 242 | """Connects to GPSD server and starts streaming data.""" 243 | while not self.writer: 244 | try: 245 | await self._open_connection() 246 | await self.stream() 247 | self.logger.info('Connected to gpsd') 248 | except asyncio.CancelledError: 249 | self.close() 250 | raise 251 | except Exception as exc: # pylint: disable=W0703 252 | error = 'timeout' if isinstance( 253 | exc, asyncio.TimeoutError) else exc 254 | self.logger.error(f'Failed to connect to GPSD: {error}') 255 | self.close() 256 | if self.reconnect: 257 | # Try again later 258 | await asyncio.sleep(self.reconnect) 259 | else: 260 | raise 261 | 262 | async def send(self, commands) -> None: 263 | """Sends commands.""" 264 | bcommands = polybytes(commands + "\n") 265 | if self.writer: 266 | self.writer.write(bcommands) 267 | await self.writer.drain() 268 | 269 | async def stream(self, flags: Optional[int] = 0, 270 | devpath: Optional[str] = None) -> None: 271 | """Creates and sends the stream command.""" 272 | if 0 < flags: 273 | # Update the stream command 274 | self.stream_command = self.generate_stream_command(flags, devpath) 275 | 276 | if self.stream_command: 277 | self.logger.info(f'Sent stream as: {self.stream_command}') 278 | await self.send(self.stream_command) 279 | else: 280 | raise TypeError(f'Invalid streaming command: {flags}') 281 | 282 | async def __aenter__(self) -> 'aiogps': 283 | """Context manager entry.""" 284 | return self 285 | 286 | async def __aexit__(self, exc_type, exc, traceback) -> None: 287 | """Context manager exit: close connection.""" 288 | self.close() 289 | 290 | def __aiter__(self) -> 'aiogps': 291 | """Async iterator interface.""" 292 | return self 293 | 294 | async def __anext__(self) -> Union[dictwrapper, str]: 295 | """Returns next message from GPSD.""" 296 | data = await self.read() 297 | return data 298 | 299 | def __next__(self) -> Awaitable: 300 | """Reimplementation of the blocking iterator from gps. 301 | Returns an awaitable which returns the next message from GPSD. 302 | """ 303 | return self.read() 304 | 305 | # vim: set expandtab shiftwidth=4 306 | -------------------------------------------------------------------------------- /gps/client.py: -------------------------------------------------------------------------------- 1 | # This file is Copyright 2019 by the GPSD project 2 | # SPDX-License-Identifier: BSD-2-Clause 3 | # 4 | # This code run compatibly under Python 2 and 3.x for x >= 2. 5 | # Preserve this property! 6 | # Codacy D203 and D211 conflict, I choose D203 7 | # Codacy D212 and D213 conflict, I choose D212 8 | 9 | 10 | """gpsd client functions.""" 11 | 12 | from __future__ import absolute_import, print_function, division 13 | 14 | import json 15 | import select 16 | import socket 17 | import sys 18 | import time 19 | 20 | import gps # for VERB_* 21 | from .misc import polystr, polybytes 22 | from .watch_options import * 23 | 24 | GPSD_PORT = "2947" 25 | 26 | 27 | class gpscommon(object): 28 | 29 | """Isolate socket handling and buffering from protocol interpretation.""" 30 | host = "127.0.0.1" 31 | port = GPSD_PORT 32 | 33 | def __init__(self, 34 | device=None, 35 | host="127.0.0.1", 36 | input_file_name=None, 37 | port=GPSD_PORT, 38 | should_reconnect=False, 39 | verbose=0): 40 | """Init gpscommon.""" 41 | self.device = device 42 | self.input_file_name = input_file_name 43 | self.input_fd = None 44 | self.linebuffer = b'' 45 | self.received = time.time() 46 | self.reconnect = should_reconnect 47 | self.sock = None # in case we blow up in connect 48 | self.stream_command = b'' 49 | self.verbose = verbose 50 | # Provide the response in both 'str' and 'bytes' form 51 | self.bresponse = b'' 52 | self.response = polystr(self.bresponse) 53 | 54 | if gps.VERB_PROG <= verbose: 55 | print('gpscommon(device=%s host=%s port=%s\n' 56 | ' input_file_name=%s verbose=%s)' % 57 | (device, host, port, input_file_name, verbose)) 58 | 59 | if input_file_name: 60 | # file input, binary mode, for binary data. 61 | self.input_fd = open(input_file_name, "rb") 62 | 63 | elif host is not None and port is not None: 64 | self.host = host 65 | self.port = port 66 | self.connect(self.host, self.port) 67 | # else? 68 | 69 | def connect(self, host, port): 70 | """Connect to a host on a given port. 71 | 72 | If the hostname ends with a colon (`:') followed by a number, and 73 | there is no port specified, that suffix will be stripped off and the 74 | number interpreted as the port number to use. 75 | """ 76 | if not port and (host.find(':') == host.rfind(':')): 77 | i = host.rfind(':') 78 | if 0 <= i: 79 | host, port = host[:i], host[i + 1:] 80 | try: 81 | port = int(port) 82 | except ValueError: 83 | raise socket.error("nonnumeric port") 84 | # if 0 < self.verbose: 85 | # print 'connect:', (host, port) 86 | self.sock = None 87 | for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): 88 | af, socktype, proto, _canonname, sa = res 89 | try: 90 | self.sock = socket.socket(af, socktype, proto) 91 | # if 0 < self.debuglevel: print 'connect:', (host, port) 92 | self.sock.connect(sa) 93 | if 0 < self.verbose: 94 | print('connected to tcp://{}:{}'.format(host, port)) 95 | break 96 | # do not use except ConnectionRefusedError 97 | # # Python 2.7 doc does have this exception 98 | except socket.error as e: 99 | if 1 < self.verbose: 100 | msg = str(e) + ' (to {}:{})'.format(host, port) 101 | sys.stderr.write("error: {}\n".format(msg.strip())) 102 | self.close() 103 | raise # propagate error to caller 104 | 105 | def close(self): 106 | """Close the gpsd socket or file.""" 107 | if self.input_fd: 108 | self.input_fd.close() 109 | self.input_fd = None 110 | if self.sock: 111 | self.sock.close() 112 | self.sock = None 113 | 114 | def __del__(self): 115 | """Close the gpsd socket.""" 116 | self.close() 117 | 118 | def waiting(self, timeout=0): 119 | """Return True if data is ready for the client.""" 120 | if self.linebuffer or self.input_fd: 121 | # check for input_fd EOF? 122 | return True 123 | if self.sock is None: 124 | return False 125 | 126 | (winput, _woutput, _wexceptions) = select.select( 127 | (self.sock,), (), (), timeout) 128 | return [] != winput 129 | 130 | def read(self): 131 | """Wait for and read data being streamed from the daemon.""" 132 | if not self.input_fd and None is self.sock: 133 | # input_fd.open() was earlier, and read_only, so no stream() 134 | self.connect(self.host, self.port) 135 | if None is self.sock: 136 | return -1 137 | self.stream() 138 | 139 | eol = self.linebuffer.find(b'\n') 140 | if -1 == eol: 141 | # RTCM3 JSON can be over 4.4k long, so go big 142 | if self.input_fd: 143 | frag = self.input_fd.read(8192) 144 | else: 145 | frag = self.sock.recv(8192) 146 | 147 | if not frag: 148 | if 1 < self.verbose: 149 | sys.stderr.write( 150 | "poll: no available data: returning -1.\n") 151 | # Read failed 152 | return -1 153 | 154 | self.linebuffer += frag 155 | 156 | eol = self.linebuffer.find(b'\n') 157 | if -1 == eol: 158 | if 1 < self.verbose: 159 | sys.stderr.write("poll: partial message: returning 0.\n") 160 | # Read succeeded, but only got a fragment 161 | self.response = '' # Don't duplicate last response 162 | self.bresponse = b'' # Don't duplicate last response 163 | return 0 164 | else: 165 | if 1 < self.verbose: 166 | sys.stderr.write("poll: fetching from buffer.\n") 167 | 168 | # We got a line 169 | eol += 1 170 | # Provide the response in both 'str' and 'bytes' form 171 | self.bresponse = self.linebuffer[:eol] 172 | self.response = polystr(self.bresponse) 173 | self.linebuffer = self.linebuffer[eol:] 174 | 175 | # Can happen if daemon terminates while we're reading. 176 | if not self.response: 177 | return -1 178 | if 1 < self.verbose: 179 | sys.stderr.write("poll: data is %s\n" % repr(self.response)) 180 | self.received = time.time() 181 | # We got a \n-terminated line 182 | return len(self.response) 183 | 184 | # Note that the 'data' method is sometimes shadowed by a name 185 | # collision, rendering it unusable. The documentation recommends 186 | # accessing 'response' directly. Consequently, no accessor method 187 | # for 'bresponse' is currently provided. 188 | 189 | def data(self): 190 | """Return the client data buffer.""" 191 | return self.response 192 | 193 | def send(self, commands): 194 | """Ship commands to the daemon.""" 195 | lineend = "\n" 196 | if isinstance(commands, bytes): 197 | lineend = polybytes("\n") 198 | if not commands.endswith(lineend): 199 | commands += lineend 200 | 201 | if self.sock is None: 202 | self.stream_command = commands 203 | else: 204 | self.sock.send(polybytes(commands)) 205 | 206 | 207 | class json_error(BaseException): 208 | 209 | """Class for JSON errors.""" 210 | 211 | def __init__(self, data, explanation): 212 | """Init json_error.""" 213 | BaseException.__init__(self) 214 | self.data = data 215 | self.explanation = explanation 216 | 217 | 218 | class gpsjson(object): 219 | 220 | """Basic JSON decoding.""" 221 | 222 | def __init__(self, verbose=0): 223 | """Init gpsjson.""" 224 | self.data = None 225 | self.stream_command = None 226 | self.enqueued = None 227 | self.verbose = verbose 228 | 229 | def __iter__(self): 230 | """Broken __iter__.""" 231 | return self 232 | 233 | def unpack(self, buf): 234 | """Unpack a JSON string.""" 235 | try: 236 | # json.loads(,encoding=) deprecated Python 3.1. Gone in 3.9 237 | # like it or not, data is now UTF-8 238 | self.data = dictwrapper(json.loads(buf.strip())) 239 | except ValueError as e: 240 | raise json_error(buf, e.args[0]) 241 | # Should be done for any other array-valued subobjects, too. 242 | # This particular logic can fire on SKY or RTCM2 objects. 243 | if hasattr(self.data, "satellites"): 244 | self.data.satellites = [dictwrapper(x) 245 | for x in self.data.satellites] 246 | 247 | def stream(self, flags=0, devpath=None): 248 | """Control streaming reports from the daemon,""" 249 | if 0 < flags: 250 | self.stream_command = self.generate_stream_command(flags, devpath) 251 | else: 252 | self.stream_command = self.enqueued 253 | 254 | if self.stream_command: 255 | if 1 < self.verbose: 256 | sys.stderr.write("send: stream as:" 257 | " {}\n".format(self.stream_command)) 258 | self.send(self.stream_command) 259 | else: 260 | raise TypeError("Invalid streaming command!! : " + str(flags)) 261 | 262 | def generate_stream_command(self, flags=0, devpath=None): 263 | """Generate stream command.""" 264 | if flags & WATCH_OLDSTYLE: 265 | return self.generate_stream_command_old_style(flags) 266 | 267 | return self.generate_stream_command_new_style(flags, devpath) 268 | 269 | @staticmethod 270 | def generate_stream_command_old_style(flags=0): 271 | """Generate stream command, old style.""" 272 | if flags & WATCH_DISABLE: 273 | arg = "w-" 274 | if flags & WATCH_NMEA: 275 | arg += 'r-' 276 | 277 | elif flags & WATCH_ENABLE: 278 | arg = 'w+' 279 | if flags & WATCH_NMEA: 280 | arg += 'r+' 281 | 282 | return arg 283 | 284 | @staticmethod 285 | def generate_stream_command_new_style(flags=0, devpath=None): 286 | """Generate stream command, new style.""" 287 | if (flags & (WATCH_JSON | WATCH_OLDSTYLE | WATCH_NMEA | 288 | WATCH_RAW)) == 0: 289 | flags |= WATCH_JSON 290 | 291 | if flags & WATCH_DISABLE: 292 | arg = '?WATCH={"enable":false' 293 | if flags & WATCH_JSON: 294 | arg += ',"json":false' 295 | if flags & WATCH_NMEA: 296 | arg += ',"nmea":false' 297 | if flags & WATCH_RARE: 298 | arg += ',"raw":1' 299 | if flags & WATCH_RAW: 300 | arg += ',"raw":2' 301 | if flags & WATCH_SCALED: 302 | arg += ',"scaled":false' 303 | if flags & WATCH_TIMING: 304 | arg += ',"timing":false' 305 | if flags & WATCH_SPLIT24: 306 | arg += ',"split24":false' 307 | if flags & WATCH_PPS: 308 | arg += ',"pps":false' 309 | else: # flags & WATCH_ENABLE: 310 | arg = '?WATCH={"enable":true' 311 | if flags & WATCH_JSON: 312 | arg += ',"json":true' 313 | if flags & WATCH_NMEA: 314 | arg += ',"nmea":true' 315 | if flags & WATCH_RARE: 316 | arg += ',"raw":1' 317 | if flags & WATCH_RAW: 318 | arg += ',"raw":2' 319 | if flags & WATCH_SCALED: 320 | arg += ',"scaled":true' 321 | if flags & WATCH_TIMING: 322 | arg += ',"timing":true' 323 | if flags & WATCH_SPLIT24: 324 | arg += ',"split24":true' 325 | if flags & WATCH_PPS: 326 | arg += ',"pps":true' 327 | if flags & WATCH_DEVICE: 328 | arg += ',"device":"%s"' % devpath 329 | arg += "}" 330 | return arg 331 | 332 | 333 | class dictwrapper(object): 334 | 335 | """Wrapper that yields both class and dictionary behavior,""" 336 | 337 | def __init__(self, ddict): 338 | """Init class dictwrapper.""" 339 | self.__dict__ = ddict 340 | 341 | def get(self, k, d=None): 342 | """Get dictwrapper.""" 343 | return self.__dict__.get(k, d) 344 | 345 | def keys(self): 346 | """Keys dictwrapper.""" 347 | return self.__dict__.keys() 348 | 349 | def __getitem__(self, key): 350 | """Emulate dictionary, for new-style interface.""" 351 | return self.__dict__[key] 352 | 353 | def __iter__(self): 354 | """Iterate dictwrapper.""" 355 | return self.__dict__.__iter__() 356 | 357 | def __setitem__(self, key, val): 358 | """Emulate dictionary, for new-style interface.""" 359 | self.__dict__[key] = val 360 | 361 | def __contains__(self, key): 362 | """Find key in dictwrapper.""" 363 | return key in self.__dict__ 364 | 365 | def __str__(self): 366 | """dictwrapper to string.""" 367 | return "" 368 | __repr__ = __str__ 369 | 370 | def __len__(self): 371 | """length of dictwrapper.""" 372 | return len(self.__dict__) 373 | 374 | # 375 | # Someday a cleaner Python interface using this machinery will live here 376 | # 377 | 378 | # End 379 | # vim: set expandtab shiftwidth=4 380 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # gpsd-prometheus-exporter 2 | 3 | A [Prometheus](https://prometheus.io/) exporter for the [gpsd](https://gpsd.gitlab.io/gpsd/) GPS daemon that provides comprehensive GPS monitoring and visualization capabilities. 4 | 5 | ![Grafana Dashboard](https://github.com/brendanbank/gpsd-prometheus-exporter/blob/ce8d05be537ec7fe935bad0c9479cf3e0770b41a/img/grafana_gpsd_dashboard_1.png?raw=true) 6 | 7 | ## Table of Contents 8 | 9 | - [Overview](#overview) 10 | - [Features](#features) 11 | - [GPS Position and Quality Metrics](#gps-position-and-quality-metrics) 12 | - [Per Satellite Data](#per-satellite-data) 13 | - [PPS Time Synchronization](#pps-time-synchronization) 14 | - [Geographic Offset Tracking](#geographic-offset-tracking) 15 | - [Installation](#installation) 16 | - [Docker (Recommended)](#docker-recommended) 17 | - [Quick Start](#quick-start) 18 | - [Docker Compose](#docker-compose) 19 | - [Environment Variables](#environment-variables) 20 | - [Host Network Configuration](#host-network-configuration) 21 | - [Native Installation](#native-installation) 22 | - [Configuration](#configuration) 23 | - [Command Line Options](#command-line-options) 24 | - [Usage Examples](#usage-examples) 25 | - [Basic Docker Setup](#basic-docker-setup) 26 | - [Local Build](#local-build) 27 | - [Custom Configuration](#custom-configuration) 28 | - [Prometheus Integration](#prometheus-integration) 29 | - [Grafana Dashboard](#grafana-dashboard) 30 | 31 | ## Overview 32 | 33 | `gpsd-prometheus-exporter` connects to the TCP port of the GPSD daemon and records relevant GPS statistics, formatting them as Prometheus metrics for visualization in tools like [Grafana](https://grafana.com/). 34 | 35 | The exporter provides real-time monitoring of: 36 | - GPS position accuracy and quality metrics 37 | - Individual satellite data and signal strength 38 | - PPS (Pulse Per Second) time synchronization accuracy 39 | - Geographic offset tracking from a reference point 40 | 41 | ## Features 42 | 43 | ### GPS Position and Quality Metrics 44 | 45 | Monitor GPS accuracy and quality metrics including DOP (Dilution of Precision) values: 46 | 47 | ![DOP Metrics](https://github.com/brendanbank/gpsd-prometheus-exporter/blob/ce8d05be537ec7fe935bad0c9479cf3e0770b41a/img/dop.png?raw=true) 48 | 49 | ### Per Satellite Data 50 | 51 | Track individual satellite performance and signal quality: 52 | 53 | ![Per Satellite Data](https://github.com/brendanbank/gpsd-prometheus-exporter/blob/ce8d05be537ec7fe935bad0c9479cf3e0770b41a/img/sats.png?raw=true) 54 | 55 | ### PPS Time Synchronization 56 | 57 | Monitor clock offset from PPS (Pulse Per Second) signals for precise time synchronization: 58 | 59 | ![PPS Time Offset](https://github.com/brendanbank/gpsd-prometheus-exporter/blob/ce8d05be537ec7fe935bad0c9479cf3e0770b41a/img/clock_pps_offset.png?raw=true) 60 | 61 | To enable PPS monitoring, start gpsd with a PPS device: 62 | 63 | ```bash 64 | gpsd [serial port path] /dev/pps[0-9] 65 | ``` 66 | 67 | Then add `--pps-histogram` to the exporter runtime arguments. 68 | 69 | ### Geographic Offset Tracking 70 | 71 | Track position offset from a stationary reference point: 72 | 73 | ![Geographic Offset](https://github.com/brendanbank/gpsd-prometheus-exporter/blob/ce8d05be537ec7fe935bad0c9479cf3e0770b41a/img/geo_offset.png?raw=true) 74 | 75 | ## Installation 76 | 77 | ### Docker (Recommended) 78 | 79 | The easiest way to run the exporter is using Docker. 80 | 81 | #### Quick Start 82 | 83 | ```bash 84 | docker run -d --name gpsd-exporter \ 85 | --network=host \ 86 | -p 9015:9015 \ 87 | -e GPSD_HOST=localhost \ 88 | -e GPSD_PORT=2947 \ 89 | -e GEOPOINT_LON=38.897809878 \ 90 | -e GEOPOINT_LAT=-77.036551259 \ 91 | -e PPS_BUCKET_SIZE=250 \ 92 | -e PPS_BUCKET_COUNT=40 \ 93 | -e GEO_BUCKET_SIZE=0.5 \ 94 | -e GEO_BUCKET_COUNT=40 \ 95 | -e EXPORTER_PORT=9015 \ 96 | -e VERBOSE=1 \ 97 | -e DEBUG=0 \ 98 | ghcr.io/brendanbank/gpsd-prometheus-exporter:latest 99 | ``` 100 | 101 | #### Docker Compose 102 | 103 | Two Docker Compose files are provided: 104 | 105 | **Using Pre-built Image** (`docker-compose.yml`): 106 | ```bash 107 | docker compose up -d 108 | ``` 109 | 110 | **Building Locally** (`docker-compose.build.yml`): 111 | ```bash 112 | docker compose -f docker-compose.build.yml up --build 113 | ``` 114 | 115 | **Example configurations:** 116 | 117 | **Pre-built Image** (`docker-compose.yml`): 118 | ```yaml 119 | services: 120 | gpsd-exporter: 121 | image: ghcr.io/brendanbank/gpsd-prometheus-exporter:latest 122 | container_name: gpsd-exporter 123 | ports: 124 | - "${EXPORTER_PORT:-9015}:9015" 125 | environment: 126 | - GPSD_HOST=${GPSD_HOST:-host.docker.internal} 127 | - GPSD_PORT=${GPSD_PORT:-2947} 128 | - GEOPOINT_LON=${GEOPOINT_LON:-38.897809878} 129 | - GEOPOINT_LAT=${GEOPOINT_LAT:--77.036551259} 130 | - PPS_BUCKET_SIZE=${PPS_BUCKET_SIZE:-250} 131 | - PPS_BUCKET_COUNT=${PPS_BUCKET_COUNT:-40} 132 | - PPS_TIME1=${PPS_TIME1} 133 | - GEO_BUCKET_SIZE=${GEO_BUCKET_SIZE:-0.5} 134 | - GEO_BUCKET_COUNT=${GEO_BUCKET_COUNT:-40} 135 | - EXPORTER_PORT=${EXPORTER_PORT:-9015} 136 | - DEBUG=${DEBUG:-0} 137 | - VERBOSE=${VERBOSE:-1} 138 | extra_hosts: 139 | - "host.docker.internal:host-gateway" 140 | restart: unless-stopped 141 | network_mode: host 142 | ``` 143 | 144 | **Local Build** (`docker-compose.build.yml`): 145 | ```yaml 146 | services: 147 | gpsd-exporter: 148 | build: 149 | context: . 150 | dockerfile: Dockerfile 151 | image: gpsd-prometheus-exporter:stable 152 | container_name: gpsd-exporter 153 | ports: 154 | - "${EXPORTER_PORT:-9015}:9015" 155 | environment: 156 | - GPSD_HOST=${GPSD_HOST:-localhost} 157 | - GPSD_PORT=${GPSD_PORT:-2947} 158 | - GEOPOINT_LON=${GEOPOINT_LON:-38.897809878} 159 | - GEOPOINT_LAT=${GEOPOINT_LAT:--77.036551259} 160 | - PPS_BUCKET_SIZE=${PPS_BUCKET_SIZE:-250} 161 | - PPS_BUCKET_COUNT=${PPS_BUCKET_COUNT:-40} 162 | - PPS_TIME1=${PPS_TIME1} 163 | - GEO_BUCKET_SIZE=${GEO_BUCKET_SIZE:-0.5} 164 | - GEO_BUCKET_COUNT=${GEO_BUCKET_COUNT:-40} 165 | - EXPORTER_PORT=${EXPORTER_PORT:-9015} 166 | - DEBUG=${DEBUG:-0} 167 | - VERBOSE=${VERBOSE:-1} 168 | restart: unless-stopped 169 | network_mode: host 170 | ``` 171 | 172 | #### Host Network Configuration 173 | 174 | - Linux (supports host networking): 175 | 176 | ```bash 177 | docker run -d --name gpsd-exporter \ 178 | --network=host \ 179 | -e GPSD_HOST=localhost \ 180 | -e GPSD_PORT=2947 \ 181 | ghcr.io/brendanbank/gpsd-prometheus-exporter:latest 182 | 183 | # Access metrics directly on the host 184 | curl 127.0.0.1:9015 185 | ``` 186 | 187 | Docker Compose on Linux can use `network_mode: host` (as shown in the examples above). When using host networking, omit any `ports:` mappings as they are ignored. 188 | 189 | - macOS (host networking is not supported): 190 | 191 | ```bash 192 | docker run -d --name gpsd-exporter \ 193 | -p 9015:9015 \ 194 | -e GPSD_HOST=host.docker.internal \ 195 | -e GPSD_PORT=2947 \ 196 | ghcr.io/brendanbank/gpsd-prometheus-exporter:latest 197 | 198 | # Access metrics via the published port 199 | curl 127.0.0.1:9015 200 | ``` 201 | 202 | For Docker Compose on macOS, remove `network_mode: host`, keep `ports:` and `extra_hosts`, and set `GPSD_HOST=host.docker.internal`: 203 | 204 | ```yaml 205 | services: 206 | gpsd-exporter: 207 | ports: 208 | - "9015:9015" 209 | extra_hosts: 210 | - "host.docker.internal:host-gateway" 211 | environment: 212 | - GPSD_HOST=host.docker.internal 213 | - GPSD_PORT=2947 214 | ``` 215 | 216 | Remote gpsd (any OS): If your gpsd runs on a remote host (e.g., `ntp0.bgwlan.nl`), host networking is not required. Publish the port and point `GPSD_HOST` to the remote server: 217 | 218 | ```bash 219 | docker run -d --name gpsd-exporter \ 220 | -p 9015:9015 \ 221 | -e GPSD_HOST=ntp0.bgwlan.nl \ 222 | -e GPSD_PORT=2947 \ 223 | ghcr.io/brendanbank/gpsd-prometheus-exporter:latest 224 | ``` 225 | 226 | #### Environment Variables 227 | 228 | The following environment variables are supported for Docker deployments: 229 | 230 | | Variable | Default | Description | 231 | |----------|---------|-------------| 232 | | `GPSD_HOST` | `localhost` | gpsd hostname/IP address | 233 | | `GPSD_PORT` | `2947` | gpsd TCP port | 234 | | `EXPORTER_PORT` | `9015` | Prometheus exporter port | 235 | | `GEOPOINT_LON` | `38.897809878` | Reference longitude for offset calculation | 236 | | `GEOPOINT_LAT` | `-77.036551259` | Reference latitude for offset calculation | 237 | | `PPS_BUCKET_SIZE` | `250` | PPS histogram bucket size in nanoseconds | 238 | | `PPS_TIME1` | (not set) | PPS time1 offset (enables PPS histogram when set) | 239 | | `VERBOSE` | `1` | Enable verbose output (any value = verbose) | 240 | | `DEBUG` | `0` | Debug level (0 = no debug, 1+ = debug) | 241 | | `GEO_BUCKET_SIZE` | `0.5` | Geo offset histogram bucket size in meters | 242 | | `GEO_BUCKET_COUNT` | `40` | Geo offset histogram bucket count | 243 | | `PPS_BUCKET_COUNT` | `40` | PPS histogram bucket count | 244 | 245 | Create a `.env` file for configuration: 246 | 247 | ```bash 248 | # Create .env file 249 | cat > .env << EOF 250 | GPSD_HOST=localhost 251 | GPSD_PORT=2947 252 | GEOPOINT_LON=38.897809878 253 | GEOPOINT_LAT=-77.036551259 254 | PPS_BUCKET_SIZE=250 255 | PPS_BUCKET_COUNT=40 256 | PPS_TIME1=0.123 257 | GEO_BUCKET_SIZE=0.5 258 | GEO_BUCKET_COUNT=40 259 | EXPORTER_PORT=9015 260 | VERBOSE=1 261 | DEBUG=0 262 | EOF 263 | ``` 264 | 265 | ### Native Installation 266 | 267 | For systems where Docker is not available: 268 | 269 | #### Prerequisites 270 | 271 | Ensure gpsd, Prometheus, and Grafana are properly running. The exporter requires: 272 | 273 | - Python 3 274 | - [prometheus_client](https://github.com/prometheus/client_python) 275 | - [gps](https://gpsd.gitlab.io/gpsd/) library (version 3.18+) 276 | 277 | #### Installation Steps 278 | 279 | ```bash 280 | # Install dependencies 281 | apt update 282 | apt install python3-prometheus-client python3-gps 283 | 284 | # Clone repository 285 | git clone https://github.com/brendanbank/gpsd-prometheus-exporter.git 286 | cd gpsd-prometheus-exporter 287 | 288 | # Install service files 289 | cp gpsd_exporter.defaults /etc/default 290 | cp gpsd_exporter.service /etc/systemd/system 291 | cp gpsd_exporter.py /usr/local/bin 292 | chmod +x /usr/local/bin/gpsd_exporter.py 293 | 294 | # Enable and start service 295 | systemctl enable gpsd_exporter.service 296 | systemctl start gpsd_exporter.service 297 | ``` 298 | 299 | #### U-Blox GPS Configuration 300 | 301 | Some U-Blox GPS units require forced 115200 baud. See [gps_setserial.service](https://github.com/brendanbank/gpsd-prometheus-exporter/blob/master/gps_setserial.service) for boot-time configuration. 302 | 303 | ## Configuration 304 | 305 | ### Command Line Options 306 | 307 | ```bash 308 | usage: gpsd_exporter.py [-h] [-v] [-V] [-d] [-p PORT] [-H HOSTNAME] [-E EXPORTER_PORT] [-t TIMEOUT] 309 | [--retry-delay RETRY_DELAY] [--max-retry-delay MAX_RETRY_DELAY] [-S] 310 | [--offset-from-geopoint] [--geopoint-lat GEO_LAT] [--geopoint-lon GEO_LON] 311 | [--geo-bucket-size GEO_BUCKET_SIZE] [--geo-bucket-count GEO_BUCKET_COUNT] 312 | [--pps-histogram] [--pps-bucket-size PPS_BUCKET_SIZE] 313 | [--pps-bucket-count PPS_BUCKET_COUNT] [--pps-time1 PPS_TIME1] 314 | 315 | gpsd_exporter -- Exporter for gpsd output 316 | 317 | options: 318 | -h, --help show this help message and exit 319 | -v, --verbose set verbosity level [default: None] 320 | -V, --version show program's version number and exit 321 | -d, --debug set debug level [default: 0] 322 | -p PORT, --port PORT set gpsd TCP Port number [default: 2947] 323 | -H HOSTNAME, --hostname HOSTNAME 324 | set gpsd TCP Hostname/IP address [default: localhost] 325 | -E EXPORTER_PORT, --exporter-port EXPORTER_PORT 326 | set TCP Port for the exporter server [default: 9015] 327 | -t TIMEOUT, --timeout TIMEOUT 328 | set connection timeout in seconds [default: 10] 329 | --retry-delay RETRY_DELAY 330 | initial retry delay in seconds [default: 10] 331 | --max-retry-delay MAX_RETRY_DELAY 332 | maximum retry delay in seconds [default: 300] 333 | -S, --disable-monitor-satellites 334 | Stops monitoring all satellites individually 335 | --offset-from-geopoint 336 | track offset (x,y offset and distance) from a stationary location. 337 | --geopoint-lat GEO_LAT 338 | Latitude of a fixed stationary location. 339 | --geopoint-lon GEO_LON 340 | Longitude of a fixed stationary location. 341 | --geo-bucket-size GEO_BUCKET_SIZE 342 | Bucket side of Geo histogram [default: 0.5 meter] 343 | --geo-bucket-count GEO_BUCKET_COUNT 344 | Bucket count of Geo histogram [default: 40] 345 | --pps-histogram generate histogram data from pps devices. 346 | --pps-bucket-size PPS_BUCKET_SIZE 347 | Bucket side of PPS histogram in nanoseconds. [default: 250 ns] 348 | --pps-bucket-count PPS_BUCKET_COUNT 349 | Bucket count of PPS histogram [default: 40] 350 | --pps-time1 PPS_TIME1 351 | Local pps clock (offset) time1 (ntp.conf) [default: 0] 352 | ``` 353 | 354 | ## Usage Examples 355 | 356 | ### Basic Docker Setup 357 | 358 | Test the exporter with a simple Docker run: 359 | 360 | ```bash 361 | curl -s localhost:9015 362 | ``` 363 | 364 | Expected output: 365 | ``` 366 | # HELP gpsd_gdop Geometric (hyperspherical) dilution of precision 367 | # TYPE gpsd_gdop gauge 368 | gpsd_gdop 1.36 369 | # HELP gpsd_hdop Horizontal dilution of precision 370 | # TYPE gpsd_hdop gauge 371 | gpsd_hdop 0.74 372 | # HELP gpsd_lat Latitude in degrees: +/- signifies North/South. 373 | # TYPE gpsd_lat gauge 374 | gpsd_lat 52.4240029 375 | # HELP gpsd_long Longitude in degrees: +/- signifies East/West. 376 | # TYPE gpsd_long gauge 377 | gpsd_long 4.6157675 378 | ... 379 | ``` 380 | 381 | ### Local Build 382 | 383 | Build the Docker image locally with enhanced features: 384 | 385 | ```bash 386 | docker compose -f docker-compose.build.yml up --build 387 | ``` 388 | 389 | ### Prometheus Integration 390 | 391 | Add to your Prometheus configuration (`/etc/prometheus/prometheus.yml`): 392 | 393 | ```yaml 394 | scrape_configs: 395 | - job_name: gpsd 396 | static_configs: 397 | - targets: ['localhost:9015'] 398 | scrape_interval: 15s 399 | ``` 400 | 401 | ### Grafana Dashboard 402 | 403 | Import the provided [Grafana dashboard JSON](https://raw.githubusercontent.com/brendanbank/gpsd-prometheus-exporter/refs/heads/master/gpsd_grafana_dashboard.json) into Grafana for comprehensive GPS monitoring visualization. 404 | 405 | ## License 406 | 407 | Licensed under the BSD-3-Clause License. See [LICENSE](LICENSE) file for details. 408 | 409 | ## Contributing 410 | 411 | Contributions are welcome! Please feel free to submit a Pull Request. -------------------------------------------------------------------------------- /gps/misc.py: -------------------------------------------------------------------------------- 1 | # misc.py - miscellaneous geodesy and time functions 2 | # -*- coding: utf-8 -*- 3 | """miscellaneous geodesy and time functions.""" 4 | # 5 | # This file is Copyright 2010 by the GPSD project 6 | # SPDX-License-Identifier: BSD-2-Clause 7 | 8 | # This code runs compatibly under Python 2 and 3.x for x >= 2. 9 | # Preserve this property! 10 | 11 | # A good more complete 3d math implementation: 12 | # https://github.com/geospace-code/pymap3d/ 13 | # 14 | from __future__ import absolute_import, print_function, division 15 | 16 | import calendar 17 | import io 18 | import math 19 | import time 20 | 21 | 22 | def monotonic(): 23 | """return monotonic seconds, of unknown epoch. 24 | Python 2 to 3.7 has time.clock(), deprecates in 3.3+, removed in 3.8 25 | Python 3.5+ has time.monotonic() 26 | This always works 27 | """ 28 | 29 | if hasattr(time, 'monotonic'): 30 | return time.monotonic() 31 | # else 32 | return time.clock() 33 | 34 | 35 | # Determine a single class for testing "stringness" 36 | try: 37 | STR_CLASS = basestring # Base class for 'str' and 'unicode' in Python 2 38 | except NameError: 39 | STR_CLASS = str # In Python 3, 'str' is the base class 40 | 41 | # We need to be able to handle data which may be a mixture of text and binary 42 | # data. The text in this context is known to be limited to US-ASCII, so 43 | # there aren't any issues regarding character sets, but we need to ensure 44 | # that binary data is preserved. In Python 2, this happens naturally with 45 | # "strings" and the 'str' and 'bytes' types are synonyms. But in Python 3, 46 | # these are distinct types (with 'str' being based on Unicode), and conversions 47 | # are encoding-sensitive. The most straightforward encoding to use in this 48 | # context is 'latin-1' (a.k.a.'iso-8859-1'), which directly maps all 256 49 | # 8-bit character values to Unicode page 0. Thus, if we can enforce the use 50 | # of 'latin-1' encoding, we can preserve arbitrary binary data while correctly 51 | # mapping any actual text to the proper characters. 52 | 53 | BINARY_ENCODING = 'latin-1' 54 | 55 | if bytes is str: # In Python 2 these functions can be null transformations 56 | 57 | polystr = str 58 | polybytes = bytes 59 | 60 | def make_std_wrapper(stream): 61 | """Dummy stdio wrapper function.""" 62 | return stream 63 | 64 | def get_bytes_stream(stream): 65 | """Dummy stdio bytes buffer function.""" 66 | return stream 67 | 68 | else: # Otherwise we do something real 69 | 70 | def polystr(o): 71 | """Convert bytes or str to str with proper encoding.""" 72 | 73 | if isinstance(o, str): 74 | return o 75 | if isinstance(o, (bytes, bytearray)): 76 | return str(o, encoding=BINARY_ENCODING) 77 | if isinstance(o, int): 78 | return str(o) 79 | raise ValueError 80 | 81 | def polybytes(o): 82 | """Convert bytes or str to bytes with proper encoding.""" 83 | if isinstance(o, bytes): 84 | return o 85 | if isinstance(o, str): 86 | return bytes(o, encoding=BINARY_ENCODING) 87 | raise ValueError 88 | 89 | def make_std_wrapper(stream): 90 | """Standard input/output wrapper factory function.""" 91 | # This ensures that the encoding of standard output and standard 92 | # error on Python 3 matches the binary encoding we use to turn 93 | # bytes to Unicode in polystr above. 94 | # 95 | # newline="\n" ensures that Python 3 won't mangle line breaks 96 | # line_buffering=True ensures that interactive command sessions 97 | # work as expected 98 | return io.TextIOWrapper(stream.buffer, encoding=BINARY_ENCODING, 99 | newline="\n", line_buffering=True) 100 | 101 | def get_bytes_stream(stream): 102 | """Standard input/output bytes buffer function""" 103 | return stream.buffer 104 | 105 | # WGS84(G1674) defining parameters 106 | # https://en.wikipedia.org/wiki/Geodetic_datum 107 | # Section #World_Geodetic_System_1984_(WGS_84) 108 | # 109 | # http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf 110 | # 8-Jul-2014: 111 | # ftp://ftp.nga.mil/pub2/gandg/website/wgs84/NGA.STND.0036_1.0.0_WGS84.pdf 112 | WGS84A = 6378137.0 # equatorial radius (semi-major axis), meters 113 | WGS84F = 298.257223563 # flattening 114 | WGS84B = 6356752.314245 # polar radius (semi-minor axis) 115 | # 1st eccentricity squared = (WGS84A ** 2 + WGS84B **^ 2) / (WGS84A **^ 2) 116 | # valid 8-Jul-2014: 117 | WGS84E = 6.694379990141e-3 # 1st eccentricity squared 118 | # 2nd eccentricity squared = ((WGS84A **^ 2 - WGS84B **^ 2) / (WGS84B **^ 2) 119 | # valid 8-Jul-2014: 120 | WGS84E2 = 6.739496742276e-3 # 2nd eccentricy squared 121 | # WGS 84 value of the earth's gravitational constant for GPS user 122 | # GMgpsnav, valid 8-JUl-2014 123 | # Galileo uses μ = 3.986004418 × 1014 m3/s2 124 | # GLONASS uses 3.986004418e14 м3/s2 125 | WGS84GM = 3.9860050e14 # m^3/second^2 126 | # Earth's Angular Velocity, Omega dot e 127 | # valid 8-Jul-2014: 128 | # also Galileo 129 | # GLONASS uses 7.292115x10-5 130 | WGS84AV = 7.2921151467e-5 # rad/sec 131 | 132 | # GLONASS 133 | # ICD_GLONASS_5.1_(2008)_en.pdf 134 | # Table 3.2 Geodesic constants and parametres uniearth ellipsoid ПЗ 90.02 135 | # Earth rotation rate 7,292115x10-5 rad/s 136 | # Gravitational constant 398 600,4418×109 м3/s2 137 | # Gravitational constant of atmosphere( fMa ) 0.35×109 м3/s2 138 | # Speed of light 299 792 458 м/s 139 | # Semi-major axis 6 378 136 м 140 | # Flattening 1/298,257 84 141 | # Equatorial acceleration of gravity 978 032,84 мGal 142 | # Correction to acceleration of gravity at sea-level due to Atmosphere 143 | # 0,87 мGal 144 | # Second zonal harmonic of the geopotential ( J2 0 ) 1082625,75×10-9 145 | # Fourth zonal harmonic of the geopotential ( J4 0 ) (- 2370,89×10-9) 146 | # Sixth zonal harmonic of the geopotential( J6 0 ) 6,08×10-9 147 | # Eighth zonal harmonic of the geopotential ( J8 0 ) 1,40×10-11 148 | # Normal potential at surface of common terrestrial ellipsoid (U0) 149 | # 62 636 861,4 м2/s2 150 | 151 | # speed of light (m/s), exact 152 | # same as GLONASS 153 | CLIGHT = 299792458.0 154 | # GPS_PI. Exact! The GPS and Galileo say so. 155 | GPS_PI = 3.1415926535898 156 | # GPS F, sec/sqrt(m), == -2*sqrt(WGS*$M)/c^2 157 | GPS_F = -4.442807633e-10 158 | 159 | # GPS L1 Frequency Hz (1575.42 MHz) 160 | GPS_L1_FR = 1575420000 161 | # GPS L1 Wavelength == C / GPS_L1_FR meters 162 | GPS_L1_WL = CLIGHT / GPS_L1_FR 163 | 164 | # GPS L2 Frequency Hz (1227.60 MHz) 165 | GPS_L2_FR = 1227600000 166 | # GPS L2 Wavelength == C / GPS_L2_FR meters 167 | GPS_L2_WL = CLIGHT / GPS_L2_FR 168 | 169 | # GPS L3 (1381.05 MHz) and L4 (1379.9133) unused as of 2020 170 | 171 | # GPS L5 Frequency Hz (1176.45 MHz) 172 | GPS_L5_FR = 1176450000 173 | # GPS L5 Wavelength == C / GPS_L2_FR meters 174 | GPS_L5_WL = CLIGHT / GPS_L5_FR 175 | 176 | RAD_2_DEG = 57.2957795130823208767981548141051703 177 | DEG_2_RAD = 0.0174532925199432957692369076848861271 178 | 179 | 180 | # some multipliers for interpreting GPS output 181 | # Note: A Texas Foot is ( meters * 3937/1200) 182 | # (Texas Natural Resources Code, Subchapter D, Sec 21.071 - 79) 183 | # not the same as an international fooot. 184 | FEET_TO_METERS = 0.3048 # U.S./British feet to meters, exact 185 | METERS_TO_FEET = (1 / FEET_TO_METERS) # Meters to U.S./British feet, exact 186 | MILES_TO_METERS = 1.609344 # Miles to meters, exact 187 | METERS_TO_MILES = (1 / MILES_TO_METERS) # Meters to miles, exact 188 | FATHOMS_TO_METERS = 1.8288 # Fathoms to meters, exact 189 | METERS_TO_FATHOMS = (1 / FATHOMS_TO_METERS) # Meters to fathoms, exact 190 | KNOTS_TO_MPH = (1852 / 1609.344) # Knots to miles per hour, exact 191 | KNOTS_TO_KPH = 1.852 # Knots to kilometers per hour, exact 192 | MPS_TO_KPH = 3.6 # Meters per second to klicks/hr, exact 193 | KNOTS_TO_MPS = (KNOTS_TO_KPH / MPS_TO_KPH) # Knots to meters per second, exact 194 | MPS_TO_MPH = (1 / 0.44704) # Meters/second to miles per hour, exact 195 | MPS_TO_KNOTS = (3600.0 / 1852.0) # Meters per second to knots, exact 196 | 197 | 198 | def Deg2Rad(x): 199 | """Degrees to radians.""" 200 | return x * (math.pi / 180) 201 | 202 | 203 | def Rad2Deg(x): 204 | """Radians to degrees.""" 205 | return x * (180 / math.pi) 206 | 207 | 208 | def lla2ecef(lat, lon, altHAE): 209 | """Convert Lat, lon (in degrees) and altHAE in meters 210 | to ECEF x, y and z in meters.""" 211 | # convert degrees to radians 212 | lat *= DEG_2_RAD 213 | lon *= DEG_2_RAD 214 | 215 | sin_lat = math.sin(lat) 216 | cos_lat = math.cos(lat) 217 | n = WGS84A / math.sqrt(1 - WGS84E * (sin_lat ** 2)) 218 | x = (n + altHAE) * cos_lat * math.cos(lon) 219 | y = (n + altHAE) * cos_lat * math.sin(lon) 220 | z = (n * (1 - WGS84E) + altHAE) * sin_lat 221 | return (x, y, z) 222 | 223 | 224 | def ecef2lla(x, y, z): 225 | """Convert ECEF x, y and z in meters to 226 | Lat, lon in degrees and altHAE in meters""" 227 | 228 | longitude = math.atan2(y, x) * RAD_2_DEG 229 | 230 | p = math.sqrt((x ** 2) + (y ** 2)) 231 | theta = math.atan2(z * WGS84A, p * WGS84B) 232 | # sadly Python has no sincos() 233 | sin_theta = math.sin(theta) 234 | cos_theta = math.cos(theta) 235 | 236 | phi = math.atan2(z + WGS84E2 * WGS84B * (sin_theta ** 3), 237 | p - WGS84E * WGS84A * (cos_theta ** 3)) 238 | latitude = phi * RAD_2_DEG 239 | sin_phi = math.sin(phi) 240 | cos_phi = math.cos(phi) 241 | 242 | n = WGS84A / math.sqrt(1.0 - WGS84E * (sin_phi ** 2)) 243 | 244 | # altitude is WGS84 245 | altHAE = (p / cos_phi) - n 246 | 247 | return (latitude, longitude, altHAE) 248 | 249 | 250 | # FIXME: needs tests 251 | def ecef2enu(x, y, z, lat, lon, altHAE): 252 | """Calculate ENU from lat/lon/altHAE to ECEF 253 | ECEF in meters, lat/lon in degrees, altHAE in meters. 254 | Returns ENU in meters""" 255 | 256 | # Grr, lambda is a reserved name in Python... 257 | lambd = lat * DEG_2_RAD 258 | phi = lon * DEG_2_RAD 259 | sin_lambd = math.sin(lambd) 260 | cos_lambd = math.cos(lambd) 261 | n = WGS84A / math.sqrt(1 - WGS84E * sin_lambd ** 2) 262 | 263 | sin_phi = math.sin(phi) 264 | cos_phi = math.cos(phi) 265 | 266 | # ECEF of observer 267 | x0 = (altHAE + n) * cos_lambd * cos_phi 268 | y0 = (altHAE + n) * cos_lambd * sin_phi 269 | z0 = (altHAE + (1 - WGS84E) * n) * sin_lambd 270 | 271 | xd = x - x0 272 | yd = y - y0 273 | zd = z - z0 274 | 275 | E = -sin_phi * xd + cos_phi * yd 276 | N = -cos_phi * sin_lambd * xd - sin_lambd * sin_phi * yd + cos_lambd * zd 277 | U = cos_phi * cos_lambd * xd + cos_lambd * sin_phi * yd + sin_lambd * zd 278 | 279 | return E, N, U 280 | 281 | 282 | # FIXME: needs tests. 283 | def enu2aer(E, N, U): 284 | """Convert ENU to Azimuth, Elevation and Range. 285 | ENU is in meters. Returns Azimuth and Elevation in degrees, range in meters""" 286 | 287 | enr = math.hypot(E, N) 288 | rng = math.hypot(enr, U) 289 | az = math.atan2(E, N) % (math.pi * 2) * RAD_2_DEG 290 | el = math.atan2(U, enr) * RAD_2_DEG 291 | 292 | return az, el, rng 293 | 294 | 295 | # FIXME: needs tests 296 | def ecef2aer(x, y, z, lat, lon, altHAE): 297 | """Calculate az, el and range to ECEF from lat/lon/altHAE. 298 | ECEF in meters, lat/lon in degrees, altHAE in meters. 299 | Returns Azimuth and Elevation in degrees, range in meters""" 300 | 301 | (E, N, U) = ecef2enu(x, y, z, lat, lon, altHAE) 302 | return enu2aer(E, N, U) 303 | 304 | 305 | def CalcRad(lat): 306 | """Radius of curvature in meters at specified latitude WGS-84.""" 307 | # the radius of curvature of an ellipsoidal Earth in the plane of a 308 | # meridian of latitude is given by 309 | # 310 | # R' = a * (1 - e^2) / (1 - e^2 * (sin(lat))^2)^(3/2) 311 | # 312 | # where 313 | # a is the equatorial radius (surface to center distance), 314 | # b is the polar radius (surface to center distance), 315 | # e is the first eccentricity of the ellipsoid 316 | # e2 is e^2 = (a^2 - b^2) / a^2 317 | # es is the second eccentricity of the ellipsoid (UNUSED) 318 | # es2 is es^2 = (a^2 - b^2) / b^2 319 | # 320 | # for WGS-84: 321 | # a = WGS84A/1000 = 6378.137 km (3963 mi) 322 | # b = 6356.752314245 km (3950 mi) 323 | # e2 = WGS84E = 0.00669437999014132 324 | # es2 = 0.00673949674227643 325 | sc = math.sin(math.radians(lat)) 326 | x = (WGS84A / 1000) * (1.0 - WGS84E) 327 | z = 1.0 - WGS84E * (sc ** 2) 328 | y = pow(z, 1.5) 329 | r = x / y 330 | 331 | r = r * 1000.0 # Convert to meters 332 | return r 333 | 334 | 335 | def EarthDistance(c1, c2): 336 | """ 337 | Vincenty's formula (inverse method) to calculate the distance (in 338 | kilometers or miles) between two points on the surface of a spheroid 339 | WGS 84 accurate to 1mm! 340 | """ 341 | 342 | (lat1, lon1) = c1 343 | (lat2, lon2) = c2 344 | 345 | # WGS 84 346 | a = 6378137 # meters 347 | f = 1 / 298.257223563 348 | b = 6356752.314245 # meters; b = (1 - f)a 349 | 350 | # MILES_PER_KILOMETER = 1000.0 / (.3048 * 5280.0) 351 | 352 | MAX_ITERATIONS = 200 353 | CONVERGENCE_THRESHOLD = 1e-12 # .000,000,000,001 354 | 355 | # short-circuit coincident points 356 | if lat1 == lat2 and lon1 == lon2: 357 | return 0.0 358 | 359 | U1 = math.atan((1 - f) * math.tan(math.radians(lat1))) 360 | U2 = math.atan((1 - f) * math.tan(math.radians(lat2))) 361 | L = math.radians(lon1 - lon2) 362 | Lambda = L 363 | 364 | sinU1 = math.sin(U1) 365 | cosU1 = math.cos(U1) 366 | sinU2 = math.sin(U2) 367 | cosU2 = math.cos(U2) 368 | 369 | for _ in range(MAX_ITERATIONS): 370 | sinLambda = math.sin(Lambda) 371 | cosLambda = math.cos(Lambda) 372 | sinSigma = math.sqrt((cosU2 * sinLambda) ** 2 + 373 | (cosU1 * sinU2 - sinU1 * cosU2 * cosLambda) ** 2) 374 | if 0 == sinSigma: 375 | return 0.0 # coincident points 376 | cosSigma = sinU1 * sinU2 + cosU1 * cosU2 * cosLambda 377 | sigma = math.atan2(sinSigma, cosSigma) 378 | sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma 379 | cosSqAlpha = 1 - sinAlpha ** 2 380 | try: 381 | cos2SigmaM = cosSigma - 2 * sinU1 * sinU2 / cosSqAlpha 382 | except ZeroDivisionError: 383 | cos2SigmaM = 0 384 | C = f / 16 * cosSqAlpha * (4 + f * (4 - 3 * cosSqAlpha)) 385 | LambdaPrev = Lambda 386 | Lambda = L + (1 - C) * f * sinAlpha * (sigma + C * sinSigma * 387 | (cos2SigmaM + C * cosSigma * 388 | (-1 + 2 * cos2SigmaM ** 2))) 389 | if CONVERGENCE_THRESHOLD > abs(Lambda - LambdaPrev): 390 | break # successful convergence 391 | else: 392 | # failure to converge 393 | # fall back top EarthDistanceSmall 394 | return EarthDistanceSmall(c1, c2) 395 | 396 | uSq = cosSqAlpha * (a ** 2 - b ** 2) / (b ** 2) 397 | A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq))) 398 | B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq))) 399 | deltaSigma = B * sinSigma * (cos2SigmaM + B / 4 * ( 400 | cosSigma * (-1 + 2 * cos2SigmaM ** 2) - B / 6 * cos2SigmaM * 401 | (-3 + 4 * sinSigma ** 2) * (-3 + 4 * cos2SigmaM ** 2))) 402 | s = b * A * (sigma - deltaSigma) 403 | 404 | # return meters to 6 decimal places 405 | return round(s, 6) 406 | 407 | 408 | def EarthDistanceSmall(c1, c2): 409 | """Distance in meters between two close points specified in degrees.""" 410 | # This calculation is known as an Equirectangular Projection 411 | # fewer numeric issues for small angles that other methods 412 | # the main use here is for when Vincenty's fails to converge. 413 | (lat1, lon1) = c1 414 | (lat2, lon2) = c2 415 | avglat = (lat1 + lat2) / 2 416 | phi = math.radians(avglat) # radians of avg latitude 417 | # meters per degree at this latitude, corrected for WGS84 ellipsoid 418 | # Note the wikipedia numbers are NOT ellipsoid corrected: 419 | # https://en.wikipedia.org/wiki/Decimal_degrees#Precision 420 | m_per_d = (111132.954 - 559.822 * math.cos(2 * phi) + 421 | 1.175 * math.cos(4 * phi)) 422 | dlat = (lat1 - lat2) * m_per_d 423 | dlon = (lon1 - lon2) * m_per_d * math.cos(phi) 424 | 425 | dist = math.sqrt(math.pow(dlat, 2) + math.pow(dlon, 2)) 426 | return dist 427 | 428 | 429 | def MeterOffset(c1, c2): 430 | """Return offset in meters of second arg from first.""" 431 | (lat1, lon1) = c1 432 | (lat2, lon2) = c2 433 | dx = EarthDistance((lat1, lon1), (lat1, lon2)) 434 | dy = EarthDistance((lat1, lon1), (lat2, lon1)) 435 | if lat1 < lat2: 436 | dy = -dy 437 | if lon1 < lon2: 438 | dx = -dx 439 | return (dx, dy) 440 | 441 | 442 | def isotime(s): 443 | """Convert timestamps in ISO8661 format to and from Unix time.""" 444 | if isinstance(s, int): 445 | return time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(s)) 446 | 447 | if isinstance(s, float): 448 | date = int(s) 449 | msec = s - date 450 | date = time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(s)) 451 | return date + "." + repr(msec)[3:] 452 | 453 | if isinstance(s, STR_CLASS): 454 | if "Z" == s[-1]: 455 | s = s[:-1] 456 | if "." in s: 457 | (date, msec) = s.split(".") 458 | else: 459 | date = s 460 | msec = "0" 461 | # Note: no leap-second correction! 462 | return calendar.timegm( 463 | time.strptime(date, "%Y-%m-%dT%H:%M:%S")) + float("0." + msec) 464 | 465 | # else: 466 | raise TypeError 467 | 468 | 469 | def posix2gps(posix, leapseconds): 470 | """Convert POSIX time in seconds, using leapseconds, to gps time. 471 | 472 | Return (gps_time, gps_week, gps_tow) 473 | """ 474 | 475 | # GPS Epoch starts: Jan 1980 00:00:00 UTC, POSIX/Unix time: 315964800 476 | gps_time = posix - 315964800 477 | gps_time += leapseconds 478 | # 604,800 in a GPS week 479 | (gps_week, gps_tow) = divmod(gps_time, 604800) 480 | return (gps_time, gps_week, gps_tow) 481 | 482 | 483 | # End 484 | # vim: set expandtab shiftwidth=4 485 | -------------------------------------------------------------------------------- /gpsd_exporter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # encoding: utf-8 3 | ''' 4 | gpsd_exporter -- Exporter for gpsd output 5 | 6 | gpsd_exporter is a data exporter for Prometheus 7 | 8 | It defines classes_and_methods 9 | 10 | @author: Brendan Bank 11 | 12 | @copyright: 2021 Brendan Bank. All rights reserved. 13 | 14 | @license: BSDv3 15 | 16 | @contact: brendan.bank ... gmail.com 17 | @deffield updated: Updated 18 | ''' 19 | 20 | import sys 21 | import os 22 | import gps 23 | import time 24 | import math 25 | import pwd 26 | import grp 27 | import queue 28 | import socket 29 | import json 30 | 31 | import logging 32 | from prometheus_client import Histogram, CollectorRegistry, start_http_server, Gauge, Info 33 | from prometheus_client.metrics_core import GaugeMetricFamily 34 | from time import gmtime 35 | 36 | log = logging.getLogger(__name__) 37 | 38 | from argparse import ArgumentParser 39 | from argparse import RawDescriptionHelpFormatter 40 | 41 | __all__ = [] 42 | __version__ = "1.0.8" 43 | __date__ = '2021-01-10' 44 | __updated__ = '2025-08-08' 45 | 46 | DEBUG = 1 47 | TESTRUN = 0 48 | PROFILE = 0 49 | GPSD_PORT = 2947 50 | EXPORTER_PORT = 9015 51 | DEFAULT_HOST = 'localhost' 52 | DEFAULT_TIMEOUT = 10 # Default connection timeout in seconds 53 | DEFAULT_RETRY_DELAY = 10 # Default initial retry delay in seconds 54 | DEFAULT_MAX_RETRY_DELAY = 300 # Maximum retry delay in seconds (5 minutes) 55 | NSEC=1000000000 56 | USEC=1000000 57 | MSEC=1000 58 | 59 | class DepencendyError(Exception): 60 | pass 61 | 62 | # Monkey patch to handle JSON encoding issues with newer Python versions 63 | try: 64 | # Try to patch the JSON decoder if needed 65 | original_init = json.JSONDecoder.__init__ 66 | def patched_init(self, *args, **kwargs): 67 | # Remove encoding parameter if present (deprecated in Python 3.9+) 68 | kwargs.pop('encoding', None) 69 | return original_init(self, *args, **kwargs) 70 | json.JSONDecoder.__init__ = patched_init 71 | except Exception: 72 | # If patching fails, continue anyway 73 | pass 74 | 75 | # Check gps version using packaging module (modern approach) 76 | try: 77 | from packaging import version 78 | if version.parse(gps.__version__) < version.parse("3.18"): 79 | raise DepencendyError('Please upgrade the python gps package to 3.18 or higher.') 80 | except ImportError: 81 | # Fallback to simple string comparison if packaging not available 82 | try: 83 | gps_version = gps.__version__.split('.') 84 | if len(gps_version) >= 2: 85 | major, minor = int(gps_version[0]), int(gps_version[1]) 86 | if major < 3 or (major == 3 and minor < 18): 87 | raise DepencendyError('Please upgrade the python gps package to 3.18 or higher.') 88 | except (ValueError, AttributeError): 89 | # If version check fails, continue anyway 90 | pass 91 | 92 | class CLIError(Exception): 93 | '''Generic exception to raise and log different fatal errors.''' 94 | 95 | def __init__(self, msg): 96 | super(CLIError).__init__(type(self)) 97 | self.msg = "E: %s" % msg 98 | 99 | def __str__(self): 100 | return self.msg 101 | 102 | def __unicode__(self): 103 | return self.msg 104 | 105 | class ModuleDepencendyError(ModuleNotFoundError): 106 | def __init__(self, msg): 107 | super(ModuleDepencendyError).__init__(type(self)) 108 | self.msg = "E: %s" % msg 109 | 110 | def __str__(self): 111 | return self.msg 112 | 113 | def __unicode__(self): 114 | return self.msg 115 | 116 | sat_queue = queue.Queue() 117 | 118 | 119 | def main(argv=None): # IGNORE:C0111 120 | '''Command line options.''' 121 | 122 | if argv is None: 123 | argv = sys.argv 124 | else: 125 | sys.argv.extend(argv) 126 | 127 | program_name = os.path.basename(sys.argv[0]) 128 | program_version = "v%s" % __version__ 129 | program_build_date = str(__updated__) 130 | program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date) 131 | program_shortdesc = __import__('__main__').__doc__.split("\n")[1] 132 | program_license = '''%s 133 | 134 | Created by Brendan Bank on %s. 135 | 136 | Copyright 2021 Brendan Bank. All rights reserved. 137 | 138 | Licensed under the BSD-3-Clause 139 | https://opensource.org/licenses/BSD-3-Clause 140 | 141 | Distributed on an "AS IS" basis without warranties 142 | or conditions of any kind, either express or implied. 143 | 144 | Version: %s 145 | 146 | Version Date: %s 147 | 148 | Usage: 149 | ''' % (program_shortdesc, str(__date__), program_version, program_build_date) 150 | 151 | try: 152 | # Setup argument parser 153 | parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter) 154 | parser.add_argument("-v", "--verbose", dest="verbose", action="count", 155 | help="set verbosity level [default: %(default)s]") 156 | parser.add_argument('-V', '--version', action='version', version=program_version_message) 157 | parser.add_argument('-d', '--debug', action='count', default=0, dest="debug", 158 | help="set debug level [default: %(default)s]") 159 | 160 | parser.add_argument('-p', '--port', type=int, dest="port", default=GPSD_PORT, 161 | help="set gpsd TCP Port number [default: %(default)s]") 162 | parser.add_argument('-H', '--hostname', dest="hostname", default=DEFAULT_HOST, 163 | help="set gpsd TCP Hostname/IP address [default: %(default)s]") 164 | parser.add_argument('-E', '--exporter-port', type=int, dest="exporter_port", default=EXPORTER_PORT, 165 | help="set TCP Port for the exporter server [default: %(default)s]") 166 | 167 | parser.add_argument('-t', '--timeout', type=int, dest="timeout", default=DEFAULT_TIMEOUT, 168 | help="set connection timeout in seconds [default: %(default)s]") 169 | 170 | parser.add_argument('--retry-delay', type=int, dest="retry_delay", default=DEFAULT_RETRY_DELAY, 171 | help="initial retry delay in seconds [default: %(default)s]") 172 | parser.add_argument('--max-retry-delay', type=int, dest="max_retry_delay", default=DEFAULT_MAX_RETRY_DELAY, 173 | help="maximum retry delay in seconds [default: %(default)s]") 174 | 175 | parser.add_argument('-S', '--disable-monitor-satellites', dest="mon_satellites", 176 | default=True, action="store_false", 177 | help="Stops monitoring all satellites individually") 178 | 179 | parser.add_argument('--offset-from-geopoint', action="store_true", dest="geo_offset", 180 | default=False, help="track offset (x,y offset and distance) from a stationary location.") 181 | parser.add_argument('--geopoint-lat', dest="geo_lat",type=float, 182 | default=False, help="Latitude of a fixed stationary location.") 183 | parser.add_argument('--geopoint-lon', dest="geo_lon", type=float, 184 | default=False, help="Longitude of a fixed stationary location.") 185 | 186 | parser.add_argument('--geo-bucket-size', dest="geo_bucket_size", default=0.5, type=float, 187 | help="Bucket side of Geo histogram [default: %(default)s meter] ") 188 | parser.add_argument('--geo-bucket-count', dest="geo_bucket_count", default=40, type=int, 189 | help="Bucket count of Geo histogram [default: %(default)s]") 190 | 191 | ## pps 192 | parser.add_argument('--pps-histogram', action="store_true", dest="pps", default=False, 193 | help="generate histogram data from pps devices.") 194 | parser.add_argument('--pps-bucket-size', dest="pps_bucket_size", default=250, type=int, 195 | help="Bucket side of PPS histogram [default: %(default)s ns] (nano seconds)") 196 | parser.add_argument('--pps-bucket-count', dest="pps_bucket_count", default=40, type=int, 197 | help="Bucket count of PPS histogram [default: %(default)s]") 198 | parser.add_argument('--pps-time1', dest="pps_time1", default=0, type=float, 199 | help="Local pps clock (offset) time1 (ntp.conf) [default: %(default)s]") 200 | 201 | # Process arguments 202 | args = parser.parse_args() 203 | 204 | verbose = args.verbose 205 | debug = args.debug 206 | 207 | if (debug > 0): 208 | logging.basicConfig(format='DEBUG %(funcName)s(%(lineno)s): %(message)s', 209 | stream=sys.stderr, level=logging.DEBUG) 210 | elif (verbose): 211 | logging.basicConfig(format=program_name + ': %(message)s', stream=sys.stderr, level=logging.INFO) 212 | else: 213 | logging.basicConfig(format=program_name + ': %(message)s', stream=sys.stderr, level=logging.WARN) 214 | 215 | log.info('started') 216 | 217 | metrics = init_metrics(args) 218 | 219 | start_http_server(args.exporter_port, registry=metrics['registry']) 220 | 221 | retry_count = 0 222 | current_delay = args.retry_delay 223 | 224 | while True: 225 | try: 226 | loop_connection(metrics, args) 227 | # If we get here, connection was successful, reset retry count 228 | retry_count = 0 229 | current_delay = args.retry_delay 230 | 231 | except (KeyboardInterrupt): 232 | print ("Applications closed!") 233 | return(0) 234 | except (StopIteration, ConnectionRefusedError, socket.timeout, ConnectionError, OSError) as e: 235 | retry_count += 1 236 | log.error(f'Connection to gpsd failed (attempt {retry_count}): {e}') 237 | 238 | print(f'WARNING: Connection failed (attempt {retry_count}), retrying in {current_delay}s...') 239 | print(f'Connection error: {e}') 240 | print(f'Error type: {type(e).__name__}') 241 | 242 | time.sleep(current_delay) 243 | 244 | # Exponential backoff with maximum delay 245 | current_delay = min(current_delay * 2, args.max_retry_delay) 246 | 247 | except Exception as e: 248 | log.error(f'Unexpected error in main loop: {e}') 249 | print(f'ERROR: Unexpected error: {e}') 250 | return(1) 251 | 252 | 253 | return 0 254 | except KeyboardInterrupt: 255 | ### handle keyboard interrupt ### 256 | return 0 257 | except Exception as e: 258 | if DEBUG or TESTRUN: 259 | raise(e) 260 | indent = len(program_name) * " " 261 | sys.stderr.write(program_name + ": " + repr(e) + "\n") 262 | sys.stderr.write(indent + " for help use --help") 263 | return 2 264 | 265 | 266 | def init_metrics(args): 267 | 268 | metrics = {} 269 | registry = CollectorRegistry() 270 | 271 | """ register the Satellite collector who takes the """ 272 | registry.register(SatCollector()) 273 | 274 | metrics['SKY'] = { 275 | 'gdop': Gauge('gpsd_gdop', 'Geometric (hyperspherical) dilution of precision', registry=registry), 276 | 'hdop': Gauge('gpsd_hdop', 'Horizontal dilution of precision', registry=registry), 277 | 'pdop': Gauge('gpsd_pdop', 'Position (spherical/3D) dilution of precision', registry=registry), 278 | 'tdop': Gauge('gpsd_tdop', 'Time dilution of precision', registry=registry), 279 | 'vdop': Gauge('gpsd_vdop', 'Vertical (altitude) dilution of precision', registry=registry), 280 | 'ydop': Gauge('gpsd_ydop', 'Longitudinal dilution of precision', registry=registry), 281 | 'xdop': Gauge('gpsd_xdop', 'Latitudinal dilution of precision', registry=registry), 282 | 'nSat': Gauge('gpsd_nSat', 'Number of satellite objects in "satellites" array', registry=registry), 283 | 'uSat': Gauge('gpsd_uSat', 'Number of satellites used in navigation solution.', registry=registry), 284 | } 285 | 286 | metrics['TPV'] = { 287 | 'lat': Gauge('gpsd_lat', 'Latitude in degrees: +/- signifies North/South.', registry=registry), 288 | 'lon': Gauge('gpsd_long', 'Longitude in degrees: +/- signifies East/West.', registry=registry), 289 | 'altHAE': Gauge('gpsd_altHAE', 'Altitude, height above allipsoid, in meters. Probably WGS84.', 290 | registry=registry), 291 | 'altMSL': Gauge('gpsd_altMSL', 'MSL Altitude in meters. The geoid used is rarely specified and is often inaccurate.' , registry=registry), 292 | 'mode': Gauge('gpsd_mode', 'NMEA mode: %d, 0=no mode value yet seen, 1=no fix, 2=2D, 3=3D.' , 293 | registry=registry), 294 | 'status': Gauge('gpsd_status', 'GPS fix status: %d, 2=DGPS fix, 3=RTK Fixed point, 4=RTK Floating point, 5=DR fix, 6=GNSSDR' + 295 | 'fix, 7=Time (surveyed) fix, 8=Simulated, 9=P(Y) fix, otherwise not present. ' , 296 | registry=registry), 297 | 'leapseconds': Gauge('gpsd_leapseconds', 'Current leap seconds.' , registry=registry), 298 | 'magvar': Gauge('gpsd_magvar', 'Magnetic variation, degrees.' , registry=registry), 299 | 'ept': Gauge('gpsd_ept', 'Estimated timestamp error in seconds.' , registry=registry), 300 | 'epx': Gauge('gpsd_epx', 'Longitude error estimate in meters.' , registry=registry), 301 | 'epy': Gauge('gpsd_epy', 'Latitude error estimate in meters.' , registry=registry), 302 | 'epv': Gauge('gpsd_epv', 'Estimated vertical error in meters.' , registry=registry), 303 | 'eps': Gauge('gpsd_eps', 'Estimated speed error in meters per second.' , registry=registry), 304 | 'epc': Gauge('gpsd_epc', 'Estimated climb error in meters per second.' , registry=registry), 305 | 'geoidSep': Gauge('gpsd_geoidSep', 'Geoid separation is the difference between the WGS84 reference ellipsoid and the geoid (Mean Sea Level) in meters. ' , 306 | registry=registry), 307 | 'eph': Gauge('gpsd_eph', 'Estimated horizontal Position (2D) Error in meters. Also known as Estimated Position Error (epe).' , 308 | registry=registry), 309 | 'sep': Gauge('gpsd_sep', 'Estimated Spherical (3D) Position Error in meters.' , registry=registry), 310 | 'ecefx': Gauge('gpsd_ecefx', 'ECEF X position in meters.' , registry=registry), 311 | 'ecefy': Gauge('gpsd_ecefy', 'ECEF Y position in meters.' , registry=registry), 312 | 'ecefz': Gauge('gpsd_ecefz', 'ECEF Z position in meters.' , registry=registry), 313 | 'ecefvx': Gauge('gpsd_ecefvx', 'ECEF X velocity in meters per second.' , registry=registry), 314 | 'ecefvy': Gauge('gpsd_ecefvy', 'ECEF Y velocity in meters per second.' , registry=registry), 315 | 'ecefvz': Gauge('gpsd_ecefvz', 'ECEF Z velocity in meters per second.' , registry=registry), 316 | 'ecefpAcc': Gauge('gpsd_ecefpAcc', 'ECEF position error in meters. Certainty unknown.' , 317 | registry=registry), 318 | 'velN': Gauge('gpsd_velN', 'North velocity component in meters.' , registry=registry), 319 | 'velE': Gauge('gpsd_velE', 'East velocity component in meters.' , registry=registry), 320 | 'velD': Gauge('gpsd_velD', 'Down velocity component in meters.' , registry=registry), 321 | } 322 | 323 | metrics['USED'] = Gauge('gpsd_sat_used', 'Used in current solution? ', registry=registry) 324 | metrics['SEEN'] = Gauge('gpsd_sat_seen', 'Seen in current solution? ', registry=registry) 325 | metrics['VERSION'] = Info('gpsd_version', 'Version Details', registry=registry) 326 | metrics['DEVICES'] = Info('gpsd_devices', 'Device Details', ['device'], registry=registry) 327 | metrics['SAT_STATUS'] = {} 328 | 329 | if (args.pps): 330 | PPS_BUCKETS = [] 331 | PPS_BUCKETS.append(float("-inf")) 332 | [ PPS_BUCKETS.append(i * args.pps_bucket_size) for i in range(int(args.pps_bucket_count / -2), int(args.pps_bucket_count / 2) + 1)] 333 | PPS_BUCKETS.append(float("inf")) 334 | metrics['PPS_HIS'] = Histogram('gpsd_pps_histogram', 'PPS Histogram', ['device'], buckets=PPS_BUCKETS, registry=registry) 335 | 336 | if args.geo_offset: 337 | GEO_BUCKETS_OFFSET = [] 338 | [ GEO_BUCKETS_OFFSET.append(i * args.geo_bucket_size) for i in range(1,args.geo_bucket_count)] 339 | GEO_BUCKETS_OFFSET.append(float("inf")) 340 | 341 | GEO_BUCKETS_YX = [] 342 | GEO_BUCKETS_YX.append(float("-inf")) 343 | [ GEO_BUCKETS_YX.append(i * args.geo_bucket_size) 344 | for i in range(int (args.geo_bucket_count / -2),int(args.geo_bucket_count / 2) + 1)] 345 | GEO_BUCKETS_YX.append(float("inf")) 346 | 347 | metrics['GEO_OFFSET'] = Histogram('gpsd_geo_offset_m_histogram', 'Geo offset Histogram (meters)', 348 | buckets=GEO_BUCKETS_OFFSET, registry=registry) 349 | metrics['GEO_OFFSET_Y'] = Histogram('gpsd_geo_bearing_x_histogram', 350 | 'Y offset in meters from static geo point', 351 | buckets=GEO_BUCKETS_YX, registry=registry) 352 | metrics['GEO_OFFSET_X'] = Histogram('gpsd_geo_bearing_y_histogram', 353 | 'X offset in meters from static geo point', 354 | buckets=GEO_BUCKETS_YX, registry=registry) 355 | 356 | 357 | metrics['registry'] = registry 358 | 359 | return(metrics) 360 | 361 | 362 | def getPositionData(gpsd, metrics, args): 363 | try: 364 | # Check if gpsd object is still valid 365 | if not gpsd or not hasattr(gpsd, 'next'): 366 | log.error("GPSD connection object is invalid") 367 | raise ConnectionError("GPSD connection object is invalid") 368 | 369 | nx = gpsd.next() 370 | except KeyError as e: 371 | # Handle missing satellite data fields (like 'az', 'el', etc.) 372 | log.warning(f"GPSD reported incomplete satellite data: {e}") 373 | return 374 | except (ConnectionError, OSError, socket.error, BrokenPipeError, ConnectionResetError) as e: 375 | # Handle connection errors - re-raise to trigger retry 376 | log.error(f"Connection error reading from GPSD: {type(e).__name__}: {e}") 377 | raise 378 | except Exception as e: 379 | # Handle other GPSD connection or data parsing errors 380 | log.error(f"Error reading from GPSD: {type(e).__name__}: {e}") 381 | # For any other exception, assume it might be connection-related and re-raise 382 | # This is more aggressive but prevents the endless loop 383 | log.error(f"Re-raising unexpected error as connection error: {type(e).__name__}: {e}") 384 | raise ConnectionError(f"GPSD read error: {type(e).__name__}: {e}") 385 | 386 | # For a list of all supported classes and fields refer to: 387 | # https://gpsd.gitlab.io/gpsd/gpsd_json.html 388 | 389 | if (args.debug > 1): log.debug(f'recieved {nx["class"]}: {nx}') 390 | 391 | if nx['class'] == 'VERSION': 392 | 393 | metrics['VERSION'].info({'release': nx['release'], 394 | 'rev': nx['rev'], 395 | 'proto_major': str(nx['proto_major']), 396 | 'proto_minor': str(nx['proto_minor']), 397 | }) 398 | elif nx['class'] == 'PPS': 399 | # PPSSUMMARY.observe(nx['clock_nsec']) 400 | 401 | if args.pps: 402 | 403 | args.pps_time1 404 | corr = args.pps_time1 * NSEC 405 | value = nx['clock_nsec'] - corr 406 | 407 | if (value > (NSEC/2)): 408 | value = value - NSEC 409 | 410 | log.debug (f"PPS offset {nx['clock_nsec']} -> {value}") 411 | log.debug(nx) 412 | 413 | metrics['PPS_HIS'].labels(nx['device']).observe(value) 414 | 415 | elif nx['class'] == 'DEVICES': 416 | for device in nx['devices']: 417 | log.debug(device) 418 | 419 | metrics['DEVICES'].labels(device['path']).info( 420 | { 421 | 'driver': device['driver'] if 'driver' in device else "Unknown", 422 | 'subtype': device['subtype'] if 'subtype' in device else "Unknown", 423 | 'subtype1': device['subtype1'] if 'subtype1' in device else "Unknown", 424 | 'activated': device['activated'] if 'activated' in device else "Unknown", 425 | 'flags': str(device['flags']) if 'flags' in device else "Unknown", 426 | 'native': str(device['native']) if 'native' in device else "Unknown", 427 | 'bps': str(device['bps']) if 'bps' in device else "Unknown", 428 | 'parity': str(device['parity']) if 'parity' in device else "Unknown", 429 | 'stopbits': str(device['stopbits']) if 'stopbits' in device else "Unknown", 430 | 'cycle': str(device['cycle']) if 'cycle' in device else "Unknown", 431 | 'mincycle': str(device['mincycle']) if 'mincycle' in device else "Unknown", 432 | }) 433 | 434 | elif nx['class'] == 'SKY': 435 | 436 | """process the list of satellites """ 437 | satellites = nx.get('satellites') 438 | if satellites is None: 439 | log.debug (f'no satellites in SKY') 440 | log.debug (nx) 441 | return 442 | 443 | metrics['SEEN'].set(0) 444 | metrics['USED'].set(0) 445 | 446 | for sat in satellites: 447 | metrics['SEEN'].inc() 448 | if sat['used']: 449 | metrics['USED'].inc() 450 | 451 | if args.mon_satellites: 452 | add_sat_stats(satellites) 453 | 454 | """process the dop metrics """ 455 | for key in metrics['SKY'].keys(): 456 | if (hasattr(nx, key)): 457 | 458 | value = getattr(nx, key, -1) 459 | metrics['SKY'][key].set(getattr(nx, key, -1)) 460 | if (args.debug > 2): log.debug (f'set {key} to {value}') 461 | 462 | 463 | elif nx['class'] == 'TPV': 464 | for key in metrics['TPV'].keys(): 465 | if (hasattr(nx, key)): 466 | value = getattr(nx, key, -1) 467 | metrics['TPV'][key].set(value) 468 | if (args.debug > 2): log.debug (f'set {key} to {value}') 469 | 470 | if args.geo_offset: 471 | if (hasattr(nx, 'lat') and hasattr(nx, 'lon') ): 472 | offset = MeterOffsetSmall((args.geo_lat, args.geo_lon), (nx['lat'], nx['lon'])) 473 | distance = gps.misc.EarthDistanceSmall((nx['lat'], nx['lon']), (args.geo_lat, args.geo_lon)) 474 | 475 | log.debug (f'distance {distance:0.2f}m offset x = {offset[0]:0.2f}m y = {offset[1]:0.2f}m') 476 | 477 | metrics['GEO_OFFSET_X'].observe(offset[0]) 478 | metrics['GEO_OFFSET_Y'].observe(offset[1]) 479 | metrics['GEO_OFFSET'].observe(distance) 480 | 481 | elif nx['class'] == 'WATCH': 482 | pass 483 | 484 | else: 485 | log.debug (f'received {nx["class"]}') 486 | log.debug (nx) 487 | 488 | def MeterOffsetSmall(c1, c2): 489 | "Return offset in meters of second arg from first." 490 | (lat1, lon1) = c1 491 | (lat2, lon2) = c2 492 | dx = gps.misc.EarthDistanceSmall((lat1, lon1), (lat1, lon2)) 493 | dy = gps.misc.EarthDistanceSmall((lat1, lon1), (lat2, lon1)) 494 | if lat1 < lat2: 495 | dy = -dy 496 | if lon1 < lon2: 497 | dx = -dx 498 | return (dx, dy) 499 | 500 | def drop_privileges(uid_name='nobody', gid_name='nogroup'): 501 | if os.getuid() != 0: 502 | # We're not root so, like, whatever dude 503 | return 504 | 505 | # Get the uid/gid from the name 506 | running_uid = pwd.getpwnam(uid_name).pw_uid 507 | running_gid = grp.getgrnam(gid_name).gr_gid 508 | 509 | # Remove group privileges 510 | os.setgroups([]) 511 | 512 | # Try setting the new uid/gid 513 | os.setgid(running_gid) 514 | os.setuid(running_uid) 515 | 516 | # Ensure a very conservative umask 517 | old_umask = os.umask(0o077) 518 | 519 | def loop_connection(metrics, args): 520 | 521 | try: 522 | # Set socket timeout for the connection 523 | socket.setdefaulttimeout(args.timeout) 524 | 525 | log.info(f'Attempting to connect to gpsd at {args.hostname}:{args.port} with {args.timeout}s timeout') 526 | gpsd = gps.gps(host=args.hostname, port=args.port, verbose=1, mode=gps.WATCH_ENABLE | gps.WATCH_NEWSTYLE | gps.WATCH_SCALED) 527 | drop_privileges() 528 | 529 | if not gpsd: 530 | log.critical(f'Could not connect to gpsd at {args.hostname}:{args.port}') 531 | raise ConnectionRefusedError(f'Failed to establish connection to gpsd at {args.hostname}:{args.port}') 532 | 533 | except socket.timeout: 534 | log.critical(f'Connection to gpsd at {args.hostname}:{args.port} timed out after {args.timeout}s') 535 | raise ConnectionRefusedError(f'Connection timeout after {args.timeout}s') 536 | except ConnectionRefusedError: 537 | # Re-raise to be caught by the main loop 538 | raise 539 | except Exception as e: 540 | log.critical(f'Unexpected error connecting to gpsd: {e}') 541 | raise ConnectionRefusedError(f'Failed to connect to gpsd: {e}') 542 | running = True 543 | while running: 544 | try: 545 | getPositionData(gpsd, metrics, args) 546 | except KeyboardInterrupt: 547 | log.info("Received keyboard interrupt, shutting down...") 548 | raise 549 | except (ConnectionError, OSError, socket.error, BrokenPipeError, ConnectionResetError) as e: 550 | # Re-raise connection errors to trigger retry in main loop 551 | log.error(f"Connection error in data loop: {type(e).__name__}: {e}") 552 | raise 553 | except Exception as e: 554 | log.error(f"Unexpected error in main loop: {type(e).__name__}: {e}") 555 | # Continue running to avoid crashing the container 556 | continue 557 | 558 | class SatCollector(object): 559 | 560 | 561 | def __init__(self): 562 | self.state = {} 563 | 564 | def collect(self): 565 | """ 566 | This Method is called each time exporter is called to fetch the per Satellite metrics 567 | """ 568 | 569 | metrics = { 570 | 'ss' : GaugeMetricFamily('gpsd_sat_ss', 'Signal to Noise ratio in dBHz.', labels=['PRN', 'svid', 'gnssid', 'used']), 571 | 'az' : GaugeMetricFamily('gpsd_sat_az', 'Azimuth, degrees from true north.', labels=['PRN', 'svid', 'gnssid', 'used']), 572 | 'el' : GaugeMetricFamily('gpsd_sat_el', 'Elevation in degrees.', labels=['PRN', 'svid', 'gnssid', 'used']), 573 | 'used': GaugeMetricFamily('gpsd_used', 'Used Satellite', labels=['PRN', 'svid', 'gnssid', 'used']), 574 | 'health' : GaugeMetricFamily('gpsd_health', 'The health of this satellite. 0 is unknown, 1 is OK, and 2 is unhealthy', labels=['PRN', 'svid', 'gnssid', 'used']) 575 | } 576 | 577 | log.debug(f'SatCollector::collect started ') 578 | last_measurement = {} 579 | 580 | while not sat_queue.empty(): 581 | try: 582 | measurement = sat_queue.get() 583 | log.debug(f'measurement:: {measurement}') 584 | 585 | sat = measurement['sat'] 586 | ts = measurement['ts'] 587 | 588 | last_measurement[sat['PRN']] = sat 589 | except KeyError as e: 590 | # Handle missing satellite data fields 591 | log.warning(f"Skipping satellite measurement due to missing field: {e}") 592 | continue 593 | except Exception as e: 594 | # Handle other satellite measurement processing errors 595 | log.error(f"Error processing satellite measurement: {e}") 596 | continue 597 | 598 | 599 | log.debug(f'last_measurement {last_measurement}') 600 | 601 | for sat in last_measurement.keys(): 602 | log.debug(f'sat:: {last_measurement[sat]}') 603 | try: 604 | for key in metrics.keys(): 605 | sat_dict = last_measurement[sat] 606 | if key in sat_dict.keys(): 607 | metrics[key].add_metric([str(sat_dict['PRN']), str(sat_dict['svid']), str(sat_dict['gnssid']), str(sat_dict['used'])], sat_dict[key]) 608 | except KeyError as e: 609 | # Handle missing satellite data fields 610 | log.warning(f"Skipping satellite metrics due to missing field: {e}") 611 | continue 612 | except Exception as e: 613 | # Handle other satellite metrics processing errors 614 | log.error(f"Error processing satellite metrics: {e}") 615 | continue 616 | 617 | 618 | for key in metrics: 619 | yield metrics[key] 620 | 621 | 622 | def add_sat_stats(satellites): 623 | 624 | for sat in satellites: 625 | try: 626 | ts = time.time() 627 | ts_new = int(ts) 628 | 629 | # print (f'ts {ts} ts_new {ts_new}' ) 630 | 631 | sat_queue.put({'sat': sat, 'ts': ts}) 632 | except KeyError as e: 633 | # Handle missing satellite data fields 634 | log.warning(f"Skipping satellite with missing data field: {e}") 635 | continue 636 | except Exception as e: 637 | # Handle other satellite data processing errors 638 | log.error(f"Error processing satellite data: {e}") 639 | continue 640 | 641 | """ Keep the queue managable. """ 642 | 643 | q_size = sat_queue.qsize() 644 | log.debug(f'Queue size = {q_size} items.') 645 | while sat_queue.qsize() > 2000: 646 | sat_queue.get() 647 | 648 | q_size_end = sat_queue.qsize() 649 | log.debug(f'Current satellite queue size = {q_size_end}, pruned {q_size - q_size_end}') 650 | 651 | if __name__ == "__main__": 652 | 653 | sys.exit(main()) 654 | 655 | -------------------------------------------------------------------------------- /gps/gps.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # This code is generated by scons. Do not hand-hack it! 4 | """ 5 | gps.py -- Python interface to GPSD. 6 | 7 | This interface has a lot of historical cruft in it related to old 8 | protocol, and was modeled on the C interface. It won't be thrown 9 | away, but it's likely to be deprecated in favor of something more 10 | Pythonic. 11 | 12 | The JSON parts of this (which will be reused by any new interface) 13 | now live in a different module. 14 | """ 15 | 16 | # 17 | # This file is Copyright 2010 by the GPSD project 18 | # SPDX-License-Identifier: BSD-2-Clause 19 | # 20 | 21 | # This code runs compatibly under Python 2 and 3.x for x >= 2. 22 | # Preserve this property! 23 | from __future__ import absolute_import, print_function, division 24 | import argparse 25 | import binascii # for binascii.hexlify() 26 | import os # for file handling in gps_io 27 | import socket # for socket.error 28 | import stat # for stat.S_ISBLK() 29 | import sys # to get script name for nice error/warning messages 30 | 31 | from .misc import monotonic, polybytes 32 | from .client import * 33 | from .watch_options import * 34 | 35 | 36 | # Sometimes used in gps_io if a serial device is specified 37 | try: 38 | import serial 39 | except ImportError: 40 | serial = None # Defer complaining until we know we need it. 41 | 42 | NaN = float('nan') 43 | 44 | VERB_QUIET = 0 # quiet 45 | VERB_NONE = 1 # just output requested data and some info 46 | VERB_DECODE = 2 # decode all messages 47 | VERB_INFO = 3 # more info 48 | VERB_RAW = 4 # raw info 49 | VERB_PROG = 5 # program trace 50 | 51 | GNSSID_GPS = 0 52 | GNSSID_SBAS = 1 53 | GNSSID_GAL = 2 54 | GNSSID_BD = 3 55 | GNSSID_IMES = 4 56 | GNSSID_QZSS = 5 57 | GNSSID_GLO = 6 58 | GNSSID_IRNSS = 7 # ZED-F9T mapping 59 | 60 | 61 | def isfinite(f): 62 | """Check if f is finite.""" 63 | # Python 2 does not think +Inf or -Inf are NaN 64 | # Python 2 has no easier way to test for Inf 65 | return float('-inf') < float(f) < float('inf') 66 | 67 | 68 | # Not required but a good idea if these match the list in include/gps.h 69 | ONLINE_SET = (1 << 1) 70 | TIME_SET = (1 << 2) 71 | TIMERR_SET = (1 << 3) 72 | LATLON_SET = (1 << 4) 73 | ALTITUDE_SET = (1 << 5) 74 | SPEED_SET = (1 << 6) 75 | TRACK_SET = (1 << 7) 76 | CLIMB_SET = (1 << 8) 77 | STATUS_SET = (1 << 9) 78 | MODE_SET = (1 << 10) 79 | DOP_SET = (1 << 11) 80 | HERR_SET = (1 << 12) 81 | VERR_SET = (1 << 13) 82 | ATTITUDE_SET = (1 << 14) 83 | SATELLITE_SET = (1 << 15) 84 | SPEEDERR_SET = (1 << 16) 85 | TRACKERR_SET = (1 << 17) 86 | CLIMBERR_SET = (1 << 18) 87 | DEVICE_SET = (1 << 19) 88 | DEVICELIST_SET = (1 << 20) 89 | DEVICEID_SET = (1 << 21) 90 | RTCM2_SET = (1 << 22) 91 | RTCM3_SET = (1 << 23) 92 | AIS_SET = (1 << 24) 93 | PACKET_SET = (1 << 25) 94 | SUBFRAME_SET = (1 << 26) 95 | GST_SET = (1 << 27) 96 | VERSION_SET = (1 << 28) 97 | POLICY_SET = (1 << 29) 98 | LOGMESSAGE_SET = (1 << 30) 99 | ERROR_SET = (1 << 31) 100 | TOFF_SET = (1 << 32) 101 | PPS_SET = (1 << 33) 102 | NAVDATA_SET = (1 << 34) 103 | OSCILLATOR_SET = (1 << 35) 104 | ECEF_SET = (1 << 36) 105 | VECEF_SET = (1 << 37) 106 | MAGNETIC_TRACK_SET = (1 << 38) 107 | RAW_SET = (1 << 39) 108 | NED_SET = (1 << 40) 109 | VNED_SET = (1 << 41) 110 | LOG_SET = (1 << 42) 111 | IMU_SET = (1 << 43) 112 | EOF_SET = (1 << 44) 113 | SET_HIGH_BIT = 44 114 | UNION_SET = (RTCM2_SET | RTCM3_SET | SUBFRAME_SET | AIS_SET | VERSION_SET | 115 | DEVICELIST_SET | ERROR_SET | GST_SET) 116 | STATUS_UNK = 0 117 | STATUS_GPS = 1 118 | STATUS_DGPS = 2 119 | STATUS_RTK_FIX = 3 120 | STATUS_RTK_FLT = 4 121 | STATUS_DR = 5 122 | STATUS_GNSSDR = 6 123 | STATUS_TIME = 7 124 | STATUS_SIM = 8 125 | STATUS_PPS_FIX = 9 126 | MODE_NO_FIX = 1 127 | MODE_2D = 2 128 | MODE_3D = 3 129 | MAXCHANNELS = 72 # Copied from gps.h, but not required to match 130 | SIGNAL_STRENGTH_UNKNOWN = NaN 131 | 132 | 133 | class gpsfix(object): 134 | """Class to hold one GPS fix.""" 135 | 136 | def __init__(self): 137 | """Init class gpsfix.""" 138 | self.altitude = NaN # Meters DEPRECATED 139 | self.altHAE = NaN # Meters 140 | self.altMSL = NaN # Meters 141 | self.climb = NaN # Meters per second 142 | self.datum = "" 143 | self.dgpsAge = -1 144 | self.dgpsSta = "" 145 | self.depth = NaN 146 | self.device = "" 147 | self.ecefx = NaN 148 | self.ecefy = NaN 149 | self.ecefz = NaN 150 | self.ecefvx = NaN 151 | self.ecefvy = NaN 152 | self.ecefvz = NaN 153 | self.ecefpAcc = NaN 154 | self.ecefvAcc = NaN 155 | self.epc = NaN 156 | self.epd = NaN 157 | self.eph = NaN 158 | self.eps = NaN 159 | self.ept = NaN 160 | self.epv = NaN 161 | self.epx = NaN 162 | self.epy = NaN 163 | self.geoidSep = NaN # Meters 164 | self.latitude = self.longitude = 0.0 165 | self.magtrack = NaN 166 | self.magvar = NaN 167 | self.mode = MODE_NO_FIX 168 | self.relN = NaN 169 | self.relE = NaN 170 | self.relD = NaN 171 | self.sep = NaN # a.k.a. epe 172 | self.speed = NaN # Knots 173 | self.status = STATUS_UNK 174 | self.time = NaN 175 | self.track = NaN # Degrees from true north 176 | self.velN = NaN 177 | self.velE = NaN 178 | self.velD = NaN 179 | 180 | 181 | class gps_io(object): 182 | """All the GPS I/O in one place. 183 | 184 | Three types of GPS I/O 185 | 1. read only from a file 186 | 2. read/write through a device 187 | 3. read only from a gpsd instance 188 | """ 189 | 190 | out = b'' 191 | ser = None 192 | input_is_device = False 193 | 194 | def __init__(self, input_file_name=None, read_only=False, 195 | gpsd_host='localhost', gpsd_port=2947, 196 | gpsd_device=None, 197 | input_speed=9600, 198 | verbosity_level=0, 199 | write_requested=True): 200 | """Initialize class. 201 | 202 | Arguments: 203 | input_file_name: Name of a device/file to open - None if connection to 204 | gpsd via network 205 | read_only: request a read only access (will be set automagically when 206 | a file is used for input) 207 | gpsd_host: hostname of host running the gpsd 208 | gpsd_port: port of [hostname] running the gpsd 209 | gpsd_device: Specify a dedicated device for the gpsd - None for auto 210 | input_speed: If input_file_name is a (serial) device this specifies 211 | the speed in baud 212 | verbosity_level: Specify the verbosity level (0..5) 213 | write_requested: Set to true if a write operation shall be executed 214 | (used for internal sanity checking) 215 | """ 216 | 217 | self.gpsd_device = gpsd_device 218 | # Used as an indicator in read if a device, file or network connection 219 | # is used 220 | self.gpsd_host = gpsd_host 221 | self.gpsd_port = gpsd_port 222 | # required by write for packet construction 223 | self.gpsd_device = gpsd_device 224 | # used in read to print meaningfull error 225 | self.input_file_name = input_file_name 226 | self.verbosity_level = verbosity_level 227 | self.prog_name = os.path.basename(sys.argv[0]) 228 | Serial = serial 229 | Serial_v3 = Serial and '3' <= Serial.VERSION.split('.')[0] 230 | # buffer to hold read data 231 | self.out = b'' 232 | 233 | if VERB_PROG <= verbosity_level: 234 | print('gps_io(gpsd_device=%s gpsd_host=%s gpsd_port=%s\n' 235 | ' input_file_name=%s input_speed=%s read_only=%s\n' 236 | ' verbosity_level=%s write_requested=%s)' % 237 | (gpsd_device, gpsd_host, gpsd_port, 238 | input_file_name, input_speed, read_only, 239 | verbosity_level, write_requested)) 240 | 241 | # open the input: device, file, or gpsd 242 | if input_file_name is not None: 243 | # check if input file is a file or device 244 | try: 245 | mode = os.stat(input_file_name).st_mode 246 | except OSError: 247 | sys.stderr.write('%s: failed to open input file %s\n' % 248 | (self.prog_name, input_file_name)) 249 | sys.exit(1) 250 | 251 | if stat.S_ISCHR(mode): 252 | # character device, need not be read only 253 | self.input_is_device = True 254 | 255 | # FIXME: test broken 256 | if write_requested: 257 | # check for inconsistend arguments 258 | if read_only: 259 | sys.stderr.write('%s: read-only mode, ' 260 | 'can not send commands\n' % 261 | self.prog_name) 262 | sys.exit(1) 263 | # check if a file instead of device was specified 264 | if self.input_is_device is False: 265 | sys.stderr.write('%s: input is plain file, ' 266 | 'can not send commands\n' % 267 | self.prog_name) 268 | sys.exit(1) 269 | 270 | else: 271 | # try to open local/remote gpsd daemon over tcp 272 | if not self.gpsd_host: 273 | self.gpsd_host = 'localhost' 274 | try: 275 | self.ser = gpscommon(host=self.gpsd_host, 276 | input_file_name=input_file_name, 277 | port=self.gpsd_port, 278 | verbose=self.verbosity_level) 279 | 280 | # alias self.ser.write() to self.write_gpsd() 281 | self.ser.write = self.write_gpsd 282 | 283 | # ask for raw, not rare, data 284 | data_out = b'?WATCH={' 285 | if gpsd_device is not None: 286 | # add in the requested device 287 | data_out += (b'"device":"' + 288 | polybytes(gpsd_device) + 289 | b'",') 290 | data_out += b'"enable":true,"raw":2}\r\n' 291 | if VERB_RAW <= verbosity_level: 292 | print("sent: ", data_out) 293 | self.ser.send(data_out) 294 | except socket.error as err: 295 | sys.stderr.write('%s: failed to connect to gpsd %s\n' % 296 | (self.prog_name, err)) 297 | sys.exit(1) 298 | return 299 | 300 | if self.input_is_device: 301 | # configure the serial connections (the parameters refer to 302 | # the device you are connecting to) 303 | 304 | # pyserial Ver 3.0+ changes writeTimeout to write_timeout 305 | # Using the wrong one causes an error 306 | write_timeout_arg = ('write_timeout' 307 | if Serial_v3 else 'writeTimeout') 308 | try: 309 | self.ser = Serial.Serial( 310 | baudrate=input_speed, 311 | # 8N1 is UBX default 312 | bytesize=Serial.EIGHTBITS, 313 | parity=Serial.PARITY_NONE, 314 | port=input_file_name, 315 | stopbits=Serial.STOPBITS_ONE, 316 | # read timeout 317 | timeout=0.05, 318 | **{write_timeout_arg: 0.5} 319 | ) 320 | except AttributeError: 321 | sys.stderr.write('%s: failed to import pyserial\n' % 322 | self.prog_name) 323 | sys.exit(2) 324 | except Serial.serialutil.SerialException: 325 | # this exception happens on bad serial port device name 326 | sys.stderr.write('%s: failed to open serial port "%s"\n' 327 | '%s: Your computer has the serial ports:\n' % 328 | (self.prog_name, input_file_name, 329 | self.prog_name)) 330 | 331 | # print out list of supported ports 332 | # FIXME: bad location for an import 333 | import serial.tools.list_ports as List_Ports 334 | ports = List_Ports.comports() 335 | for port in ports: 336 | sys.stderr.write(" %s: %s\n" % 337 | (port.device, port.description)) 338 | sys.exit(1) 339 | 340 | # flush input buffer, discarding all its contents 341 | # pyserial 3.0+ deprecates flushInput() in favor of 342 | # reset_input_buffer(), but flushInput() is still present. 343 | self.ser.flushInput() 344 | 345 | elif input_file_name is not None: 346 | # Read from a plain file of UBX messages 347 | try: 348 | self.ser = open(input_file_name, 'rb') 349 | except IOError: 350 | sys.stderr.write('%s: failed to open input %s\n' % 351 | (self.prog_name, input_file_name)) 352 | sys.exit(1) 353 | 354 | def read(self, decode_func, 355 | input_wait=2.0, expect_statement_identifier=None, 356 | raw_fd=None): 357 | """Read from device, until timeout or expected message. 358 | 359 | Arguments: 360 | decode_func: callable function that accepts the raw data which 361 | converts it to a human readable format 362 | expect_statement_identifier: return only the specified package or 363 | 1 if timeout. None (default) if no 364 | filtering is requested 365 | input_wait: read timeout in seconds. Set to 0 to run forever. 366 | Default: 2 seconds 367 | raw: file descriptor like object (has to support the .write method) 368 | to dump raw data. None if not used 369 | """ 370 | 371 | # are we expecting a certain message? 372 | if expect_statement_identifier: 373 | # assume failure, until we see expected message 374 | ret_code = 1 375 | else: 376 | # not expecting anything, so OK if we did not see it. 377 | ret_code = 0 378 | 379 | if 0.01 > input_wait: 380 | # 1e20 is close enough to forever for us 381 | input_wait = 1e20 382 | 383 | try: 384 | if self.gpsd_host is not None: 385 | # gpsd input 386 | start = monotonic() 387 | while (monotonic() - start) < input_wait: 388 | # First priority is to be sure the input buffer is read. 389 | # This is to prevent input buffer overuns 390 | if 0 < self.ser.waiting(): 391 | # We have serial input waiting, get it 392 | # No timeout possible 393 | # RTCM3 JSON can be over 4.4k long, so go big 394 | new_out = self.ser.sock.recv(8192) 395 | if raw_fd is not None: 396 | # save to raw file 397 | raw_fd.write(polybytes(new_out)) 398 | self.out += new_out 399 | 400 | consumed = decode_func(self.out) 401 | # TODO: the decoder shall return a some current 402 | # statement_identifier # to fill last_statement_identifier 403 | last_statement_identifier = None 404 | # 405 | self.out = self.out[consumed:] 406 | if ((expect_statement_identifier and 407 | (expect_statement_identifier == 408 | last_statement_identifier))): 409 | # Got what we were waiting for. Done? 410 | ret_code = 0 411 | 412 | elif self.input_is_device: 413 | # input is a serial device 414 | start = monotonic() 415 | while (monotonic() - start) < input_wait: 416 | # First priority is to be sure the input buffer is read. 417 | # This is to prevent input buffer overuns 418 | # pyserial 3.0+ deprecates inWaiting() in favor of 419 | # in_waiting, but inWaiting() is still present. 420 | if 0 < self.ser.inWaiting(): 421 | # We have serial input waiting, get it 422 | # 1024 is comfortably large, almost always the 423 | # Read timeout is what causes ser.read() to return 424 | new_out = self.ser.read(1024) 425 | if raw_fd is not None: 426 | # save to raw file 427 | raw_fd.write(polybytes(new_out)) 428 | self.out += new_out 429 | 430 | # TODO: Code duplicated from above - make it better 431 | consumed = decode_func(self.out) 432 | # TODO: the decoder shall return a some current 433 | # statement_identifier to fill last_statement_identifier 434 | last_statement_identifier = None 435 | # 436 | self.out = self.out[consumed:] 437 | if ((expect_statement_identifier and 438 | (expect_statement_identifier == 439 | last_statement_identifier))): 440 | # Got what we were waiting for. Done? 441 | ret_code = 0 442 | else: 443 | # ordinary file, so all read at once 444 | self.out += self.ser.read() 445 | if raw_fd is not None: 446 | # save to raw file 447 | raw_fd.write(polybytes(self.out)) 448 | while True: 449 | consumed = decode_func(self.out) 450 | self.out = self.out[consumed:] 451 | if 0 >= consumed: 452 | break 453 | 454 | except IOError: 455 | # This happens on a good device name, but gpsd already running. 456 | # or if USB device unplugged 457 | sys.stderr.write('%s: failed to read %s\n' 458 | '%s: Is gpsd already holding the port?\n' 459 | % (self.prog_name, self.input_file_name, 460 | self.prog_name)) 461 | return 1 462 | 463 | if 0 < ret_code: 464 | # did not see the message we were expecting to see 465 | sys.stderr.write('%s: waited %0.2f seconds for, ' 466 | 'but did not get: %%%s%%\n' 467 | % (self.prog_name, input_wait, 468 | expect_statement_identifier)) 469 | return ret_code 470 | 471 | def write_gpsd(self, data): 472 | """write data to gpsd daemon.""" 473 | 474 | # HEXDATA_MAX = 512, from gps.h, The max hex digits can write. 475 | # Input data is binary, converting to hex doubles its size. 476 | # Limit binary data to length 255, so hex data length less than 510. 477 | if 255 < len(data): 478 | sys.stderr.write('%s: trying to send %d bytes, max is 255\n' 479 | % (self.prog_name, len(data))) 480 | return 1 481 | 482 | if self.gpsd_device is not None: 483 | # add in the requested device 484 | data_out = (b'?DEVICE={"path":"' + 485 | polybytes(self.gpsd_device) + b'",') 486 | else: 487 | data_out = b'?DEVICE={' 488 | 489 | # Convert binary data to hex and build the message. 490 | data_out += b'"hexdata":"' + binascii.hexlify(data) + b'"}\r\n' 491 | if VERB_RAW <= self.verbosity_level: 492 | print("sent: ", data_out) 493 | self.ser.send(data_out) 494 | return 0 495 | 496 | 497 | class gpsdata(object): 498 | """Position, track, velocity and status information returned by a GPS.""" 499 | 500 | class satellite(object): 501 | """Class to hold satellite data.""" 502 | def __init__(self, PRN, elevation, azimuth, ss, used=None): 503 | self.PRN = PRN 504 | self.elevation = elevation 505 | self.azimuth = azimuth 506 | self.ss = ss 507 | self.used = used 508 | 509 | def __repr__(self): 510 | return "PRN: %3d E: %3d Az: %3d Ss: %3d Used: %s" % ( 511 | self.PRN, self.elevation, self.azimuth, self.ss, 512 | "ny"[self.used]) 513 | 514 | def __init__(self): 515 | """Initialize all data members.""" 516 | self.online = 0 # NZ if GPS on, zero if not 517 | 518 | self.valid = 0 519 | self.fix = gpsfix() 520 | 521 | self.status = STATUS_UNK 522 | self.utc = "" 523 | 524 | self.satellites_used = 0 # Satellites used in last fix 525 | self.xdop = self.ydop = self.vdop = self.tdop = 0 526 | self.pdop = self.hdop = self.gdop = 0.0 527 | 528 | self.epe = 0.0 529 | 530 | self.satellites = [] # satellite objects in view 531 | 532 | self.gps_id = None 533 | self.driver_mode = 0 534 | self.baudrate = 0 535 | self.stopbits = 0 536 | self.cycle = 0 537 | self.mincycle = 0 538 | self.device = None 539 | self.devices = [] 540 | 541 | self.version = None 542 | 543 | def __repr__(self): 544 | st = "Time: %s (%s)\n" % (self.utc, self.fix.time) 545 | st += "Lat/Lon: %f %f\n" % (self.fix.latitude, self.fix.longitude) 546 | if not isfinite(self.fix.altHAE): 547 | st += "Altitude HAE: ?\n" 548 | else: 549 | st += "Altitude HAE: %f\n" % (self.fix.altHAE) 550 | if not isfinite(self.fix.speed): 551 | st += "Speed: ?\n" 552 | else: 553 | st += "Speed: %f\n" % (self.fix.speed) 554 | if not isfinite(self.fix.track): 555 | st += "Track: ?\n" 556 | else: 557 | st += "Track: %f\n" % (self.fix.track) 558 | # FIXME: what about other status values? 559 | st += "Status: STATUS_%s\n" \ 560 | % ("NO_FIX", "FIX", "DGPS_FIX")[self.status] 561 | st += "Mode: MODE_%s\n" \ 562 | % ("ZERO", "NO_FIX", "2D", "3D")[self.fix.mode] 563 | st += "Quality: %d p=%2.2f h=%2.2f v=%2.2f t=%2.2f g=%2.2f\n" % \ 564 | (self.satellites_used, self.pdop, self.hdop, self.vdop, 565 | self.tdop, self.gdop) 566 | st += "Y: %s satellites in view:\n" % len(self.satellites) 567 | for sat in self.satellites: 568 | st += " %r\n" % sat 569 | return st 570 | 571 | 572 | class gps(gpscommon, gpsdata, gpsjson): 573 | """Client interface to a running gpsd instance. 574 | 575 | Or maybe a gpsd JSON file. 576 | """ 577 | 578 | # module version, would be nice to automate the version 579 | __version__ = "3.25" 580 | 581 | def __init__(self, 582 | device=None, 583 | host="127.0.0.1", 584 | input_file_name=None, 585 | mode=0, 586 | port=GPSD_PORT, 587 | reconnect=False, 588 | verbose=0): 589 | self.activated = None 590 | self.clock_sec = NaN 591 | self.clock_nsec = NaN 592 | self.device = device 593 | self.input_file_name = input_file_name 594 | self.path = '' 595 | self.precision = 0 596 | self.real_sec = NaN 597 | self.real_nsec = NaN 598 | self.serialmode = "8N1" 599 | self.verbose = verbose 600 | if VERB_PROG <= verbose: 601 | print('gps(device=%s host=%s port=%s\n' 602 | ' input_file_name=%s verbose=%s)' % 603 | (device, host, port, input_file_name, 604 | verbose)) 605 | 606 | gpscommon.__init__(self, host=host, port=port, 607 | input_file_name=input_file_name, 608 | should_reconnect=reconnect, 609 | verbose=verbose) 610 | 611 | gpsdata.__init__(self) 612 | gpsjson.__init__(self, verbose=verbose) 613 | if mode: 614 | self.stream(mode) 615 | 616 | def _oldstyle_shim(self): 617 | # The rest is backwards compatibility for the old interface 618 | def default(k, dflt, vbit=0): 619 | """Return default for key.""" 620 | if k not in self.data.keys(): 621 | return dflt 622 | 623 | self.valid |= vbit 624 | return self.data[k] 625 | 626 | if "VERSION" == self.data.get("class"): 627 | self.version = self.data 628 | elif "DEVICE" == self.data.get("class"): 629 | self.valid = ONLINE_SET | DEVICE_SET 630 | self.path = self.data["path"] 631 | self.activated = default("activated", None) 632 | driver = default("driver", None, DEVICEID_SET) 633 | subtype = default("subtype", None, DEVICEID_SET) 634 | self.gps_id = driver 635 | if subtype: 636 | self.gps_id += " " + subtype 637 | self.baudrate = default("bps", 0) 638 | self.cycle = default("cycle", NaN) 639 | self.driver_mode = default("native", 0) 640 | self.mincycle = default("mincycle", NaN) 641 | self.serialmode = default("serialmode", "8N1") 642 | elif "TPV" == self.data.get("class"): 643 | self.valid = ONLINE_SET 644 | self.device = default("device", "missing") 645 | self.utc = default("time", None, TIME_SET) 646 | if self.utc is not None: 647 | # self.utc is always iso 8601 string 648 | # just copy to fix.time 649 | self.fix.time = self.utc 650 | self.fix.altitude = default("alt", NaN, ALTITUDE_SET) # DEPRECATED 651 | self.fix.altHAE = default("altHAE", NaN, ALTITUDE_SET) 652 | self.fix.altMSL = default("altMSL", NaN, ALTITUDE_SET) 653 | self.fix.climb = default("climb", NaN, CLIMB_SET) 654 | self.fix.ecefvx = default("ecefvx", NaN, VECEF_SET) 655 | self.fix.ecefx = default("ecefx", NaN, ECEF_SET) 656 | self.fix.epc = default("epc", NaN, CLIMBERR_SET) 657 | self.fix.epd = default("epd", NaN) 658 | self.fix.eps = default("eps", NaN, SPEEDERR_SET) 659 | self.fix.ept = default("ept", NaN, TIMERR_SET) 660 | self.fix.epv = default("epv", NaN, VERR_SET) 661 | self.fix.epx = default("epx", NaN, HERR_SET) 662 | self.fix.epy = default("epy", NaN, HERR_SET) 663 | self.fix.latitude = default("lat", NaN, LATLON_SET) 664 | self.fix.longitude = default("lon", NaN) 665 | self.fix.mode = default("mode", 0, MODE_SET) 666 | self.fix.speed = default("speed", NaN, SPEED_SET) 667 | self.fix.status = default("status", 1) 668 | self.fix.track = default("track", NaN, TRACK_SET) 669 | elif "SKY" == self.data.get("class"): 670 | self.device = default("device", "missing") 671 | for attrp in ("g", "h", "p", "t", "v", "x", "y"): 672 | n = attrp + "dop" 673 | setattr(self, n, default(n, NaN, DOP_SET)) 674 | if "satellites" in self.data.keys(): 675 | self.satellites = [] 676 | for sat in self.data['satellites']: 677 | if 'el' not in sat: 678 | sat['el'] = -999 679 | if 'az' not in sat: 680 | sat['az'] = -999 681 | if 'ss' not in sat: 682 | sat['ss'] = -999 683 | self.satellites.append(gps.satellite(PRN=sat['PRN'], 684 | elevation=sat['el'], 685 | azimuth=sat['az'], ss=sat['ss'], 686 | used=sat['used'])) 687 | self.satellites_used = 0 688 | for sat in self.satellites: 689 | if sat.used: 690 | self.satellites_used += 1 691 | self.valid = ONLINE_SET | SATELLITE_SET 692 | elif "PPS" == self.data.get("class"): 693 | self.device = default("device", "missing") 694 | self.real_sec = default("real_sec", NaN) 695 | self.real_nsec = default("real_nsec", NaN) 696 | self.clock_sec = default("clock_sec", NaN) 697 | self.clock_nsec = default("clock_nsec", NaN) 698 | self.precision = default("precision", 0) 699 | # elif "DEVICES" == self.data.get("class"): 700 | # TODO: handle class DEVICES # pylint: disable=fixme 701 | 702 | def __next__(self): 703 | """Python 3 version of next(). 704 | 705 | This is just a shim over read() to enable implicit iteration. 706 | Not intended to be used directly, use read() instead.""" 707 | if -1 == self.read(): 708 | raise StopIteration 709 | if hasattr(self, "data"): 710 | return self.data 711 | 712 | return self.response 713 | 714 | def next(self): 715 | """Python 2 compatibile next(). 716 | 717 | This is just a shim over read() to enable implicit iteration. 718 | Not intended to be used directly, use read() instead.""" 719 | return self.__next__() 720 | 721 | def read(self): 722 | """Read and interpret data from a gpsd daemon. 723 | 724 | Return: less than zero on error or disconnect 725 | Otherwise zero, 726 | """ 727 | status = gpscommon.read(self) 728 | if 0 >= status: 729 | return status 730 | if self.response.startswith("{") and self.response.endswith("}\r\n"): 731 | self.unpack(self.response) 732 | self._oldstyle_shim() 733 | self.valid |= PACKET_SET 734 | return 0 735 | 736 | def stream(self, flags=0, devpath=None): 737 | """Ask gpsd to stream reports at your client.""" 738 | gpsjson.stream(self, flags, devpath) 739 | 740 | 741 | def is_sbas(prn): 742 | """Is this the NMEA ID of an SBAS satellite?.""" 743 | return 120 <= prn <= 158 744 | 745 | 746 | if __name__ == '__main__': 747 | # FIXME: relative imports break this __main__ 748 | description = 'gps/gps.py module.' 749 | usage = '%(prog)s [OPTIONS] [host [port]]' 750 | epilog = ('BSD terms apply: see the file COPYING in the distribution root' 751 | ' for details.') 752 | 753 | parser = argparse.ArgumentParser( 754 | description=description, 755 | epilog=epilog, 756 | formatter_class=argparse.RawDescriptionHelpFormatter, 757 | usage=usage) 758 | parser.add_argument( 759 | '-?', 760 | action="help", 761 | help='show this help message and exit' 762 | ) 763 | parser.add_argument( 764 | '-v', 765 | '--verbose', 766 | dest='verbose', 767 | default=0, 768 | action='count', 769 | help='Verbose. Repeat for more verbosity. [Default %(default)s]', 770 | ) 771 | parser.add_argument( 772 | '-V', '--version', 773 | action='version', 774 | version="%(prog)s: Version " + gps_version + "\n", 775 | help='Output version to stderr, then exit' 776 | ) 777 | parser.add_argument( 778 | 'arguments', 779 | metavar='[host [port]]', 780 | nargs='*', 781 | help='[host [port]] Host and port to connec to gpsd on.' 782 | ) 783 | options = parser.parse_args() 784 | 785 | streaming = False 786 | if arguments and 2 < len(arguments): 787 | sys.stderr.write("gps.py: too many positional arguments.") 788 | sys.exit(1) 789 | 790 | opts = {"verbose": options.verb} 791 | if options.arguments: 792 | opts["host"] = options.arguments[0] 793 | if 2 == len(options.arguments): 794 | opts["port"] = options.arguments[1] 795 | 796 | session = gps(**opts) 797 | session.stream(WATCH_ENABLE) 798 | try: 799 | for report in session: 800 | print(report) 801 | except KeyboardInterrupt: 802 | # Avoid garble on ^C 803 | print("") 804 | 805 | # gps.py ends here 806 | # vim: set expandtab shiftwidth=4 807 | -------------------------------------------------------------------------------- /gps/fake.py: -------------------------------------------------------------------------------- 1 | # This code run compatibly under Python 2 and 3.x for x >= 2. 2 | # Preserve this property! 3 | # 4 | # This file is Copyright 2010 by the GPSD project 5 | # SPDX-License-Identifier: BSD-2-Clause 6 | # Codacy D203 and D211 conflict, I choose D203 7 | # Codacy D212 and D213 conflict, I choose D212 8 | 9 | """gpsfake.py -- classes for creating controlled test environment around gpsd. 10 | 11 | The gpsfake(1) regression tester shipped with GPSD is a trivial wrapper 12 | around this code. For a more interesting usage example, see the 13 | valgrind-audit script shipped with the GPSD code. 14 | 15 | To use this code, start by instantiating a TestSession class. Use the 16 | prefix argument if you want to run the daemon under some kind of run-time 17 | monitor like valgrind or gdb. Here are some particularly useful possibilities: 18 | 19 | valgrind --tool=memcheck --gen-suppressions=yes --leak-check=yes 20 | Run under Valgrind, checking for malloc errors and memory leaks. 21 | 22 | xterm -e gdb -tui --args 23 | Run under gdb, controlled from a new xterm. 24 | 25 | You can use the options argument to pass in daemon options; normally you will 26 | use this to set the debug-logging level. 27 | 28 | On initialization, the test object spawns an instance of gpsd with no 29 | devices or clients attached, connected to a control socket. 30 | 31 | TestSession has methods to attach and detch fake GPSes. The 32 | TestSession class simulates GPS devices for you with objects composed 33 | from a pty and a class instance that cycles sentences into the master side 34 | from some specified logfile; gpsd reads the slave side. A fake GPS is 35 | identified by the string naming its slave device. 36 | 37 | TestSession also has methods to start and end client sessions. Daemon 38 | responses to a client are fed to a hook function which, by default, 39 | discards them. Note that this data is 'bytes' to accommodate possible 40 | binary data in Python 3; use polystr() if you need a str. You can 41 | change the hook to misc.get_bytes_stream(sys.stdout).write to dump 42 | responses to standard output (this is what the gpsfake executable does) 43 | or do something more exotic. A client session is identified by a small 44 | integer that counts the number of client session starts. 45 | 46 | There are a couple of convenience methods. TestSession.wait() does nothing, 47 | allowing a specified number of seconds to elapse. TestSession.send() 48 | ships commands to an open client session. 49 | 50 | TestSession does not currently capture the daemon's log output. It is 51 | run with -N, so the output will go to stderr (along with, for example, 52 | Valgrind notifications). 53 | 54 | Each FakeGPS instance tries to packetize the data from the logfile it 55 | is initialized with. It uses the same packet-getter as the daemon. 56 | Exception: if there is a Delay-Cookie line in a header comment, that 57 | delimiter is used to split up the test load. 58 | 59 | The TestSession code maintains a run queue of FakeGPS and gps.gs 60 | (client- session) objects. It repeatedly cycles through the run queue. 61 | For each client session object in the queue, it tries to read data 62 | from gpsd. For each fake GPS, it sends one line or packet of stored 63 | data. When a fake-GPS's go predicate becomes false, the fake GPS is 64 | removed from the run queue. 65 | 66 | There are two ways to use this code. The more deterministic is 67 | non-threaded mode: set up your client sessions and fake GPS devices, 68 | then call the run() method. The run() method will terminate when 69 | there are no more objects in the run queue. Note, you must have 70 | created at least one fake client or fake GPS before calling run(), 71 | otherwise it will terminate immediately. 72 | 73 | To allow for adding and removing clients while the test is running, 74 | run in threaded mode by calling the start() method. This simply calls 75 | the run method in a subthread, with locking of critical regions. 76 | """ 77 | # This code runs compatibly under Python 2 and 3.x for x >= 2. 78 | # Preserve this property! 79 | 80 | # Codacy D203 and D211 conflict, I choose D203 81 | # Codacy D212 and D213 conflict, I choose D212 82 | from __future__ import absolute_import, print_function, division 83 | 84 | import os 85 | import pty 86 | import select 87 | import signal 88 | import socket 89 | import stat 90 | import subprocess 91 | import sys 92 | import termios # fcntl, array, struct 93 | import threading 94 | import time 95 | 96 | import gps 97 | from . import packet as sniffer 98 | 99 | # The magic number below has to be derived from observation. If 100 | # it's too high you'll slow the tests down a lot. If it's too low 101 | # you'll get regression tests timing out. 102 | 103 | # WRITE_PAD: Define a per-line delay on writes so we won't spam the 104 | # buffers in the pty layer or gpsd itself. Values smaller than the 105 | # system timer tick don't make any difference here. Can be set from 106 | # WRITE_PAD in the environment. 107 | 108 | if sys.platform.startswith("linux"): 109 | WRITE_PAD = 0.0 110 | elif sys.platform.startswith("freebsd"): 111 | # Hal Murray needs 0..005 for FreeBSD 12.1 on RasPi 3B. 112 | WRITE_PAD = 0.005 113 | elif sys.platform.startswith("openbsd"): 114 | WRITE_PAD = 0.001 115 | elif sys.platform.startswith("netbsd5"): 116 | WRITE_PAD = 0.200 117 | elif sys.platform.startswith("netbsd"): 118 | WRITE_PAD = 0.001 119 | elif sys.platform.startswith("darwin"): 120 | WRITE_PAD = 0.001 121 | else: 122 | WRITE_PAD = 0.004 123 | 124 | # Additional delays in slow mode 125 | WRITE_PAD_SLOWDOWN = 0.01 126 | 127 | # If a test takes longer than this, we deem it to have timed out 128 | TEST_TIMEOUT = 60 129 | 130 | 131 | def GetDelay(slow=False): 132 | """Get appropriate per-line delay.""" 133 | delay = float(os.getenv("WRITE_PAD", WRITE_PAD)) 134 | if slow: 135 | delay += WRITE_PAD_SLOWDOWN 136 | return delay 137 | 138 | 139 | class TestError(BaseException): 140 | 141 | """Class TestError.""" 142 | def __init__(self, msg): 143 | """Initialize Class TestError.""" 144 | super(TestError, self).__init__() 145 | self.msg = msg 146 | 147 | 148 | class TestLoadError(TestError): 149 | 150 | """Class TestLoadError, empty.""" 151 | 152 | 153 | class TestLoad(object): 154 | 155 | """Digest a logfile into a list of sentences we can cycle through.""" 156 | 157 | def __init__(self, logfp, predump=False, slow=False, oneshot=False): 158 | """Initialize Class TestLoad.""" 159 | self.sentences = [] # This is the interesting part 160 | if isinstance(logfp, str): 161 | logfp = open(logfp, "rb") 162 | self.name = logfp.name 163 | self.logfp = logfp 164 | self.predump = predump 165 | self.type = None 166 | self.sourcetype = "pty" 167 | self.serial = None 168 | self.delay = GetDelay(slow) 169 | self.delimiter = None 170 | # Stash away a copy in case we need to resplit 171 | text = logfp.read() 172 | logfp = open(logfp.name, 'rb') 173 | # Grab the packets in the normal way 174 | getter = sniffer.new() 175 | # gps.packet.register_report(reporter) 176 | type_latch = None 177 | commentlen = 0 178 | while True: 179 | # Note that packet data is bytes rather than str 180 | (plen, ptype, packet, _counter) = getter.get(logfp.fileno()) 181 | if 0 >= plen: 182 | break 183 | 184 | if sniffer.COMMENT_PACKET == ptype: 185 | commentlen += len(packet) 186 | # Some comments are magic 187 | if b"Serial:" in packet: 188 | # "#Serial:' -- Change serial parameters 189 | packet = packet[1:].strip() 190 | try: 191 | (_xx, baud, params) = packet.split() 192 | baud = int(baud) 193 | if params[0] in (b'7', b'8'): 194 | databits = int(params[0]) 195 | else: 196 | raise ValueError 197 | if params[1] in (b'N', b'O', b'E'): 198 | parity = params[1] 199 | else: 200 | raise ValueError 201 | if params[2] in (b'1', b'2'): 202 | stopbits = int(params[2]) 203 | else: 204 | raise ValueError 205 | except (ValueError, IndexError): 206 | raise TestLoadError("bad serial-parameter spec in %s" % 207 | self.name) 208 | self.serial = (baud, databits, parity, stopbits) 209 | elif b"Transport: UDP" in packet: 210 | # "#Transport:' 211 | self.sourcetype = "UDP" 212 | elif b"Transport: TCP" in packet: 213 | # "#Transport:' 214 | self.sourcetype = "TCP" 215 | elif b"Delay-Cookie:" in packet: 216 | # "#Delay-Cookie:' 217 | if packet.startswith(b"#"): 218 | packet = packet[1:] 219 | try: 220 | (_dummy, self.delimiter, delay) = \ 221 | packet.strip().split() 222 | self.delay = float(delay) 223 | except ValueError: 224 | raise TestLoadError("bad Delay-Cookie line in %s" % 225 | self.name) 226 | self.resplit = True 227 | elif b"Date:" in packet: 228 | # "# Date: yyyy-mm-dd' -- preset date 229 | self.sentences.append(packet) 230 | # drop all other comments silently 231 | else: 232 | if type_latch is None: 233 | type_latch = ptype 234 | if self.predump: 235 | print(repr(packet)) 236 | if not packet: 237 | raise TestLoadError("zero-length packet from %s" % 238 | self.name) 239 | self.sentences.append(packet) 240 | # Look at the first packet to grok the GPS type 241 | self.textual = (sniffer.NMEA_PACKET == type_latch) 242 | if self.textual: 243 | self.legend = "gpsfake: line %d: " 244 | else: 245 | self.legend = "gpsfake: packet %d" 246 | # Maybe this needs to be split on different delimiters? 247 | if self.delimiter is not None: 248 | self.sentences = text[commentlen:].split(self.delimiter) 249 | # Do we want single-shot operation? 250 | if oneshot: 251 | self.sentences.append(b"# EOF\n") 252 | 253 | 254 | class PacketError(TestError): 255 | 256 | """Class PacketError, empty.""" 257 | 258 | 259 | class FakeGPS(object): 260 | 261 | """Class FakeGPS.""" 262 | def __init__(self, testload, progress=lambda x: None): 263 | """Initialize Class FakeGPS.""" 264 | self.exhausted = 0 265 | self.go_predicate = lambda: True 266 | self.index = 0 267 | self.progress = progress 268 | self.readers = 0 269 | self.testload = testload 270 | self.progress("gpsfake: %s provides %d sentences\n" 271 | % (self.testload.name, len(self.testload.sentences))) 272 | 273 | def write(self, line): 274 | """Throw an error if this superclass is ever instantiated.""" 275 | raise ValueError(line) 276 | 277 | def feed(self): 278 | """Feed a line from the contents of the GPS log to the daemon.""" 279 | line = self.testload.sentences[self.index 280 | % len(self.testload.sentences)] 281 | if b"%Delay:" in line: 282 | # Delay specified number of seconds 283 | delay = line.split()[1] 284 | time.sleep(int(delay)) 285 | # self.write has to be set by the derived class 286 | self.write(line) 287 | time.sleep(self.testload.delay) 288 | self.index += 1 289 | 290 | 291 | class FakePTY(FakeGPS): 292 | 293 | """A FakePTY is a pty with a test log ready to be cycled to it.""" 294 | 295 | def __init__(self, testload, 296 | speed=4800, databits=8, parity='N', stopbits=1, 297 | progress=lambda x: None): 298 | """Initialize Class FakePTY.""" 299 | super(FakePTY, self).__init__(testload, progress) 300 | # Allow Serial: header to be overridden by explicit speed. 301 | if self.testload.serial: 302 | (speed, databits, parity, stopbits) = self.testload.serial 303 | self.speed = speed 304 | baudrates = { 305 | 0: termios.B0, 306 | 50: termios.B50, 307 | 75: termios.B75, 308 | 110: termios.B110, 309 | 134: termios.B134, 310 | 150: termios.B150, 311 | 200: termios.B200, 312 | 300: termios.B300, 313 | 600: termios.B600, 314 | 1200: termios.B1200, 315 | 1800: termios.B1800, 316 | 2400: termios.B2400, 317 | 4800: termios.B4800, 318 | 9600: termios.B9600, 319 | 19200: termios.B19200, 320 | 38400: termios.B38400, 321 | 57600: termios.B57600, 322 | 115200: termios.B115200, 323 | 230400: termios.B230400, 324 | } 325 | (self.fd, self.slave_fd) = pty.openpty() 326 | self.byname = os.ttyname(self.slave_fd) 327 | os.chmod(self.byname, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | 328 | stat.S_IWGRP | stat.S_IROTH | stat.S_IWOTH) 329 | (iflag, oflag, cflag, lflag, ispeed, ospeed, cc) = termios.tcgetattr( 330 | self.slave_fd) 331 | cc[termios.VMIN] = 1 332 | cflag &= ~(termios.PARENB | termios.PARODD | termios.CRTSCTS) 333 | cflag |= termios.CREAD | termios.CLOCAL 334 | iflag = oflag = lflag = 0 335 | iflag &= ~ (termios.PARMRK | termios.INPCK) 336 | cflag &= ~ (termios.CSIZE | termios.CSTOPB | termios.PARENB | 337 | termios.PARODD) 338 | if 7 == databits: 339 | cflag |= termios.CS7 340 | else: 341 | cflag |= termios.CS8 342 | if 2 == stopbits: 343 | cflag |= termios.CSTOPB 344 | # Warning: attempting to set parity makes Fedora lose its cookies 345 | if 'E' == parity: 346 | iflag |= termios.INPCK 347 | cflag |= termios.PARENB 348 | elif 'O' == parity: 349 | iflag |= termios.INPCK 350 | cflag |= termios.PARENB | termios.PARODD 351 | ispeed = ospeed = baudrates[speed] 352 | try: 353 | termios.tcsetattr(self.slave_fd, termios.TCSANOW, 354 | [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]) 355 | except termios.error: 356 | raise TestLoadError("error attempting to set serial mode to %s " 357 | " %s%s%s" 358 | % (speed, databits, parity, stopbits)) 359 | 360 | def read(self): 361 | """Discard control strings written by gpsd.""" 362 | # A tcflush implementation works on Linux but fails on OpenBSD 4. 363 | termios.tcflush(self.fd, termios.TCIFLUSH) 364 | # Alas, the FIONREAD version also works on Linux and fails on OpenBSD. 365 | # try: 366 | # buf = array.array('i', [0]) 367 | # fcntl.ioctl(self.master_fd, termios.FIONREAD, buf, True) 368 | # n = struct.unpack('i', buf)[0] 369 | # os.read(self.master_fd, n) 370 | # except IOError: 371 | # pass 372 | 373 | def write(self, line): 374 | self.progress("gpsfake: %s writes %d=%s\n" 375 | % (self.testload.name, len(line), repr(line))) 376 | os.write(self.fd, line) 377 | 378 | def drain(self): 379 | """Wait for the associated device to drain (e.g. before closing).""" 380 | termios.tcdrain(self.fd) 381 | 382 | 383 | def cleansocket(host, port, socktype=socket.SOCK_STREAM): 384 | """Get a socket that we can re-use cleanly after it's closed.""" 385 | cs = socket.socket(socket.AF_INET, socktype) 386 | # This magic prevents "Address already in use" errors after 387 | # we release the socket. 388 | cs.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 389 | cs.bind((host, port)) 390 | return cs 391 | 392 | 393 | def freeport(socktype=socket.SOCK_STREAM): 394 | """Get a free port number for the given connection type. 395 | 396 | This lets the OS assign a unique port, and then assumes 397 | that it will become available for reuse once the socket 398 | is closed, and remain so long enough for the real use. 399 | """ 400 | s = cleansocket("127.0.0.1", 0, socktype) 401 | port = s.getsockname()[1] 402 | s.close() 403 | return port 404 | 405 | 406 | class FakeTCP(FakeGPS): 407 | """A TCP serverlet with a test log ready to be cycled to it.""" 408 | 409 | def __init__(self, testload, 410 | host, port, 411 | progress=lambda x: None): 412 | """Init class FakeTCP.""" 413 | super(FakeTCP, self).__init__(testload, progress) 414 | self.host = host 415 | self.dispatcher = cleansocket(self.host, int(port)) 416 | # Get actual assigned port 417 | self.port = self.dispatcher.getsockname()[1] 418 | self.byname = "tcp://" + host + ":" + str(self.port) 419 | self.dispatcher.listen(5) 420 | self.readables = [self.dispatcher] 421 | 422 | def read(self): 423 | """Handle connection requests and data.""" 424 | readable, _writable, _errored = select.select(self.readables, [], [], 425 | 0) 426 | for s in readable: 427 | if s == self.dispatcher: # Connection request 428 | client_socket, _address = s.accept() 429 | self.readables = [client_socket] 430 | # Depending on timing, gpsd may try to reconnect between the 431 | # end of the log data and the remove_device. With no listener, 432 | # this results in spurious error messages. Keeping the 433 | # listener around avoids this. It will eventually be closed 434 | # by the Python object cleanup. self.dispatcher.close() 435 | else: # Incoming data 436 | data = s.recv(1024) 437 | if not data: 438 | s.close() 439 | self.readables.remove(s) 440 | 441 | def write(self, line): 442 | """Send the next log packet to everybody connected.""" 443 | self.progress("gpsfake: %s writes %d=%s\n" 444 | % (self.testload.name, len(line), repr(line))) 445 | for s in self.readables: 446 | if s != self.dispatcher: 447 | s.send(line) 448 | 449 | def drain(self): 450 | """Wait for the associated device(s) to drain (e.g. before closing).""" 451 | for s in self.readables: 452 | if s != self.dispatcher: 453 | s.shutdown(socket.SHUT_RDWR) 454 | 455 | 456 | class FakeUDP(FakeGPS): 457 | """A UDP broadcaster with a test log ready to be cycled to it.""" 458 | 459 | def __init__(self, testload, 460 | ipaddr, port, 461 | progress=lambda x: None): 462 | """Init FakeUDP.""" 463 | super(FakeUDP, self).__init__(testload, progress) 464 | self.byname = "udp://" + ipaddr + ":" + str(port) 465 | self.ipaddr = ipaddr 466 | self.port = port 467 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 468 | 469 | def read(self): 470 | """Discard control strings written by gpsd.""" 471 | return 472 | 473 | def write(self, line): 474 | self.progress("gpsfake: %s writes %d=%s\n" 475 | % (self.testload.name, len(line), repr(line))) 476 | self.sock.sendto(line, (self.ipaddr, int(self.port))) 477 | 478 | def drain(self): 479 | """Wait for the associated device to drain (e.g. before closing).""" 480 | # shutdown() fails on UDP 481 | return # shutdown() fails on UDP 482 | 483 | 484 | class SubprogramError(TestError): 485 | 486 | """Class SubprogramError.""" 487 | def __str__(self): 488 | """Return class SubprogramError msg.""" 489 | return repr(self.msg) 490 | 491 | 492 | class SubprogramInstance(object): 493 | 494 | """Class for generic subprogram.""" 495 | 496 | ERROR = SubprogramError 497 | 498 | def __init__(self): 499 | """Init class SubprogramInstance.""" 500 | self.spawncmd = None 501 | self.process = None 502 | self.returncode = None 503 | self.env = None 504 | 505 | def spawn_sub(self, program, options, background=False, prefix="", 506 | env=None): 507 | """Spawn a subprogram instance.""" 508 | spawncmd = None 509 | 510 | # Look for program in GPSD_HOME env variable 511 | if os.environ.get('GPSD_HOME'): 512 | for path in os.environ['GPSD_HOME'].split(':'): 513 | _spawncmd = "%s/%s" % (path, program) 514 | if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK): 515 | spawncmd = _spawncmd 516 | break 517 | _spawncmd = "%s/gpsd/%s" % (path, program) 518 | if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK): 519 | spawncmd = _spawncmd 520 | break 521 | 522 | # if we could not find it yet try PATH env variable for it 523 | if not spawncmd: 524 | if '/usr/sbin' not in os.environ['PATH']: 525 | os.environ['PATH'] = os.environ['PATH'] + ":/usr/sbin" 526 | for path in os.environ['PATH'].split(':'): 527 | _spawncmd = "%s/%s" % (path, program) 528 | if os.path.isfile(_spawncmd) and os.access(_spawncmd, os.X_OK): 529 | spawncmd = _spawncmd 530 | break 531 | 532 | if not spawncmd: 533 | raise self.ERROR("Cannot execute %s: executable not found. " 534 | "Set GPSD_HOME env variable" % program) 535 | self.spawncmd = [spawncmd] + options.split() 536 | if prefix: 537 | self.spawncmd = prefix.split() + self.spawncmd 538 | if env: 539 | self.env = os.environ.copy() 540 | self.env.update(env) 541 | self.process = subprocess.Popen(self.spawncmd, env=self.env) 542 | if not background: 543 | self.returncode = status = self.process.wait() 544 | if os.WIFSIGNALED(status) or os.WEXITSTATUS(status): 545 | raise self.ERROR("%s exited with status %d" 546 | % (program, status)) 547 | 548 | def is_alive(self): 549 | """Is the program still alive?""" 550 | if not self.process: 551 | return False 552 | self.returncode = self.process.poll() 553 | if self.returncode is None: 554 | return True 555 | self.process = None 556 | return False 557 | 558 | def kill(self): 559 | """Kill the program instance.""" 560 | while self.is_alive(): 561 | try: # terminate() may fail if already killed 562 | self.process.terminate() 563 | except OSError: 564 | continue 565 | time.sleep(0.01) 566 | 567 | 568 | class DaemonError(SubprogramError): 569 | 570 | """Class DaemonError.""" 571 | 572 | 573 | class DaemonInstance(SubprogramInstance): 574 | """Control a gpsd instance.""" 575 | 576 | ERROR = DaemonError 577 | 578 | def __init__(self, control_socket=None): 579 | """Init class DaemonInstance.""" 580 | self.sock = None 581 | super(DaemonInstance, self).__init__() 582 | if control_socket: 583 | self.control_socket = control_socket 584 | else: 585 | tmpdir = os.environ.get('TMPDIR', '/tmp') 586 | self.control_socket = "%s/gpsfake-%d.sock" % (tmpdir, os.getpid()) 587 | 588 | def spawn(self, options, port, background=False, prefix=""): 589 | """Spawn a daemon instance.""" 590 | # The -b option to suppress hanging on probe returns is needed to cope 591 | # with OpenBSD (and possibly other non-Linux systems) that don't 592 | # support anything we can use to implement the FakeGPS.read() method 593 | opts = (" -b -N -S %s -F %s %s" 594 | % (port, self.control_socket, options)) 595 | # Derive a unique SHM key from the port # to avoid collisions. 596 | # Use 'Gp' as the prefix to avoid colliding with 'GPSD'. 597 | shmkey = '0x4770%.04X' % int(port) 598 | env = {'GPSD_SHM_KEY': shmkey} 599 | self.spawn_sub('gpsd', opts, background, prefix, env) 600 | 601 | def wait_ready(self): 602 | """Wait for the daemon to create the control socket.""" 603 | while self.is_alive(): 604 | if os.path.exists(self.control_socket): 605 | return 606 | time.sleep(0.1) 607 | 608 | def __get_control_socket(self): 609 | # Now we know it's running, get a connection to the control socket. 610 | if not os.path.exists(self.control_socket): 611 | return None 612 | try: 613 | self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0) 614 | self.sock.connect(self.control_socket) 615 | except socket.error: 616 | if self.sock: 617 | self.sock.close() 618 | self.sock = None 619 | return self.sock 620 | 621 | def add_device(self, path): 622 | """Add a device to the daemon's internal search list.""" 623 | if self.__get_control_socket(): 624 | self.sock.sendall(gps.polybytes("+%s\r\n\x00" % path)) 625 | self.sock.recv(12) 626 | self.sock.close() 627 | 628 | def remove_device(self, path): 629 | """Remove a device from the daemon's internal search list.""" 630 | if self.__get_control_socket(): 631 | self.sock.sendall(gps.polybytes("-%s\r\n\x00" % path)) 632 | self.sock.recv(12) 633 | self.sock.close() 634 | 635 | 636 | class TestSessionError(TestError): 637 | """class TestSessionError.""" 638 | 639 | # why does testSessionError() do nothing? " 640 | 641 | 642 | class TestSession(object): 643 | """Manage a session including a daemon with fake GPSes and clients.""" 644 | 645 | def __init__(self, prefix=None, port=None, options=None, verbose=0, 646 | predump=False, udp=False, tcp=False, slow=False, 647 | timeout=None): 648 | """Initialize the test session by launching the daemon.""" 649 | self.prefix = prefix 650 | self.options = options 651 | self.verbose = verbose 652 | self.predump = predump 653 | self.udp = udp 654 | self.tcp = tcp 655 | self.slow = slow 656 | self.daemon = DaemonInstance() 657 | self.fakegpslist = {} 658 | self.client_id = 0 659 | self.readers = 0 660 | self.writers = 0 661 | self.runqueue = [] 662 | self.index = 0 663 | if port: 664 | self.port = port 665 | else: 666 | self.port = freeport() 667 | self.progress = lambda x: None 668 | # for debugging 669 | # self.progress = lambda x: sys.stderr.write("# Hi " + x) 670 | self.reporter = lambda x: None 671 | self.default_predicate = None 672 | self.fd_set = [] 673 | self.threadlock = None 674 | self.timeout = TEST_TIMEOUT if timeout is None else timeout 675 | 676 | def spawn(self): 677 | """Spawn daemon.""" 678 | for sig in (signal.SIGQUIT, signal.SIGINT, signal.SIGTERM): 679 | signal.signal(sig, lambda unused, dummy: self.cleanup()) 680 | self.daemon.spawn(background=True, prefix=self.prefix, port=self.port, 681 | options=self.options) 682 | self.daemon.wait_ready() 683 | 684 | def set_predicate(self, pred): 685 | """Set a default go predicate for the session.""" 686 | self.default_predicate = pred 687 | 688 | def gps_add(self, logfile, speed=19200, pred=None, oneshot=False): 689 | """Add a simulated GPS being fed by the specified logfile.""" 690 | self.progress("gpsfake: gps_add(%s, %d)\n" % (logfile, speed)) 691 | if logfile not in self.fakegpslist: 692 | testload = TestLoad(logfile, predump=self.predump, slow=self.slow, 693 | oneshot=oneshot) 694 | if "UDP" == testload.sourcetype or self.udp: 695 | newgps = FakeUDP(testload, ipaddr="127.0.0.1", 696 | port=freeport(socket.SOCK_DGRAM), 697 | progress=self.progress) 698 | elif "TCP" == testload.sourcetype or self.tcp: 699 | # Let OS assign the port 700 | newgps = FakeTCP(testload, host="127.0.0.1", port=0, 701 | progress=self.progress) 702 | else: 703 | newgps = FakePTY(testload, speed=speed, 704 | progress=self.progress) 705 | if pred: 706 | newgps.go_predicate = pred 707 | elif self.default_predicate: 708 | newgps.go_predicate = self.default_predicate 709 | self.fakegpslist[newgps.byname] = newgps 710 | self.append(newgps) 711 | newgps.exhausted = 0 712 | self.daemon.add_device(newgps.byname) 713 | return newgps.byname 714 | 715 | def gps_remove(self, name): 716 | """Remove a simulated GPS from the daemon's search list.""" 717 | self.progress("gpsfake: gps_remove(%s)\n" % name) 718 | self.fakegpslist[name].drain() 719 | self.remove(self.fakegpslist[name]) 720 | self.daemon.remove_device(name) 721 | del self.fakegpslist[name] 722 | 723 | def client_add(self, commands): 724 | """Initiate a client session and force connection to a fake GPS.""" 725 | self.progress("gpsfake: client_add()\n") 726 | try: 727 | newclient = gps.gps(port=self.port, verbose=self.verbose) 728 | except socket.error: 729 | if not self.daemon.is_alive(): 730 | raise TestSessionError("daemon died") 731 | raise 732 | self.append(newclient) 733 | newclient.id = self.client_id + 1 734 | self.client_id += 1 735 | self.progress("gpsfake: client %d has %s\n" 736 | % (self.client_id, newclient.device)) 737 | if commands: 738 | self.initialize(newclient, commands) 739 | return self.client_id 740 | 741 | def client_remove(self, cid): 742 | """Terminate a client session.""" 743 | self.progress("gpsfake: client_remove(%d)\n" % cid) 744 | for obj in self.runqueue: 745 | if isinstance(obj, gps.gps) and obj.id == cid: 746 | self.remove(obj) 747 | return True 748 | return False 749 | 750 | def wait(self, seconds): 751 | """Wait, doing nothing.""" 752 | self.progress("gpsfake: wait(%d)\n" % seconds) 753 | time.sleep(seconds) 754 | 755 | def gather(self, seconds): 756 | """Wait, doing nothing but watching for sentences.""" 757 | self.progress("gpsfake: gather(%d)\n" % seconds) 758 | time.sleep(seconds) 759 | 760 | def cleanup(self): 761 | """We're done, kill the daemon.""" 762 | self.progress("gpsfake: cleanup()\n") 763 | if self.daemon: 764 | self.daemon.kill() 765 | self.daemon = None 766 | 767 | def run(self): 768 | """Run the tests.""" 769 | try: 770 | self.progress("gpsfake: test loop begins\n") 771 | while self.daemon: 772 | if not self.daemon.is_alive(): 773 | raise TestSessionError("daemon died") 774 | # We have to read anything that gpsd might have tried 775 | # to send to the GPS here -- under OpenBSD the 776 | # TIOCDRAIN will hang, otherwise. 777 | for device in self.runqueue: 778 | if isinstance(device, FakeGPS): 779 | device.read() 780 | had_output = False 781 | chosen = self.choose() 782 | if isinstance(chosen, FakeGPS): 783 | if (((chosen.exhausted and self.timeout and 784 | (time.time() - chosen.exhausted > self.timeout) and 785 | chosen.byname in self.fakegpslist))): 786 | sys.stderr.write( 787 | "Test timed out: maybe increase WRITE_PAD (= %s)\n" 788 | % GetDelay(self.slow)) 789 | raise SystemExit(1) 790 | 791 | if not chosen.go_predicate(chosen.index, chosen): 792 | if 0 == chosen.exhausted: 793 | chosen.exhausted = time.time() 794 | self.progress("gpsfake: GPS %s ran out of input\n" 795 | % chosen.byname) 796 | else: 797 | chosen.feed() 798 | elif isinstance(chosen, gps.gps): 799 | if chosen.enqueued: 800 | chosen.send(chosen.enqueued) 801 | chosen.enqueued = "" 802 | while chosen.waiting(): 803 | if not self.daemon or not self.daemon.is_alive(): 804 | raise TestSessionError("daemon died") 805 | ret = chosen.read() 806 | if 0 > ret: 807 | raise TestSessionError("daemon output stopped") 808 | # FIXME: test for 0 == ret. 809 | had_output = True 810 | if not chosen.valid & gps.PACKET_SET: 811 | continue 812 | self.reporter(gps.polybytes(chosen.bresponse)) 813 | if (("DEVICE" == chosen.data["class"] and 814 | 0 == chosen.data["activated"] and 815 | chosen.data["path"] in self.fakegpslist)): 816 | self.gps_remove(chosen.data["path"]) 817 | self.progress( 818 | "gpsfake: GPS %s removed (notification)\n" 819 | % chosen.data["path"]) 820 | else: 821 | raise TestSessionError("test object of unknown type") 822 | if not self.writers and not had_output: 823 | self.progress("gpsfake: no writers and no output\n") 824 | break 825 | self.progress("gpsfake: test loop ends\n") 826 | finally: 827 | self.cleanup() 828 | 829 | # All knowledge about locks and threading is below this line, 830 | # except for the bare fact that self.threadlock is set to None 831 | # in the class init method. 832 | 833 | def append(self, obj): 834 | """Add a producer or consumer to the object list.""" 835 | if self.threadlock: 836 | self.threadlock.acquire() 837 | self.runqueue.append(obj) 838 | if isinstance(obj, FakeGPS): 839 | self.writers += 1 840 | elif isinstance(obj, gps.gps): 841 | self.readers += 1 842 | if self.threadlock: 843 | self.threadlock.release() 844 | 845 | def remove(self, obj): 846 | """Remove a producer or consumer from the object list.""" 847 | if self.threadlock: 848 | self.threadlock.acquire() 849 | self.runqueue.remove(obj) 850 | if isinstance(obj, FakeGPS): 851 | self.writers -= 1 852 | elif isinstance(obj, gps.gps): 853 | self.readers -= 1 854 | self.index = min(len(self.runqueue) - 1, self.index) 855 | if self.threadlock: 856 | self.threadlock.release() 857 | 858 | def choose(self): 859 | """Atomically get the next object scheduled to do something.""" 860 | if self.threadlock: 861 | self.threadlock.acquire() 862 | chosen = self.index 863 | self.index += 1 864 | self.index %= len(self.runqueue) 865 | if self.threadlock: 866 | self.threadlock.release() 867 | return self.runqueue[chosen] 868 | 869 | def initialize(self, client, commands): 870 | """Ship specified commands to client when it goes active.""" 871 | client.enqueued = "" 872 | if not self.threadlock: 873 | client.send(commands) 874 | else: 875 | client.enqueued = commands 876 | 877 | def start(self): 878 | """Start thread.""" 879 | self.threadlock = threading.Lock() 880 | threading.Thread(target=self.run) 881 | 882 | # End 883 | # vim: set expandtab shiftwidth=4 884 | --------------------------------------------------------------------------------