├── .github
└── workflows
│ └── build.yaml
├── .gitignore
├── .gitmodules
├── LICENSE
├── Makefile
├── README.md
├── board-simulator
├── README.md
├── manifest.py
├── mpconfigvariant.h
└── mpconfigvariant.mk
├── board
├── board.json
├── mpconfigboard.cmake
├── mpconfigboard.h
└── sdkconfig.board
├── manifest-unix.py
├── manifest.py
├── modules
├── logging.py
├── main.py
└── ribbit
│ ├── __init__.py
│ ├── aggregate.py
│ ├── coap
│ └── __init__.py
│ ├── config.py
│ ├── config_test.py
│ ├── golioth
│ └── __init__.py
│ ├── heartbeat.py
│ ├── http.py
│ ├── improv
│ └── __init__.py
│ ├── network.py
│ ├── sensor-ui
│ ├── index.html
│ └── logo.png
│ ├── sensors
│ ├── __init__.py
│ ├── base.py
│ ├── battery.py
│ ├── board.py
│ ├── dps310.py
│ ├── gps.py
│ ├── gps_test.py
│ └── scd30.py
│ ├── time_manager.py
│ └── utils
│ ├── __init__.py
│ ├── asyncio.py
│ ├── i2c.py
│ ├── ota.py
│ └── time.py
└── tools
├── generate_static.py
└── upload-to-golioth.py
/.github/workflows/build.yaml:
--------------------------------------------------------------------------------
1 | on:
2 | pull_request:
3 | push:
4 | workflow_dispatch:
5 |
6 | jobs:
7 | build:
8 | runs-on: ubuntu-latest
9 |
10 | steps:
11 | - name: Checkout repo
12 | uses: actions/checkout@v4
13 | with:
14 | fetch-depth: 0
15 |
16 | - name: Fetch submodules
17 | run: |
18 | git submodule update --init --depth=1
19 | ( cd vendor/micropython/ports/unix ; make submodules )
20 | ( cd vendor/micropython/ports/esp32 ; make submodules )
21 |
22 | - name: Test
23 | run: |
24 | make test
25 |
26 | - name: Build
27 | run: |
28 | rm -rf ./vendor/micropython/mpy-cross/build # Workaround build failure
29 | docker run -t -u "$UID:$GID" -e "HOME=/app" -v "${GITHUB_WORKSPACE}:/app" -w "/app" espressif/idf:v5.1.2 make
30 | shell: bash
31 |
32 | - name: Upload to Golioth (Main)
33 | if: github.ref == 'refs/heads/main'
34 | run: |
35 | python ./tools/upload-to-golioth.py
36 | env:
37 | GOLIOTH_PROJECT: ribbit
38 | # Beta V4 Blueprint
39 | GOLIOTH_BLUEPRINT: 65c3ebd0f4542d968bf23817
40 | GOLIOTH_API_KEY: ${{ secrets.GOLIOTH_API_KEY }}
41 | GOLIOTH_ROLLOUT: true
42 | shell: bash
43 |
44 | - name: Upload to Golioth (Release)
45 | if: startsWith(github.ref, 'refs/tags/v')
46 | run: |
47 | python ./tools/upload-to-golioth.py
48 | env:
49 | GOLIOTH_PROJECT: ribbit
50 | # Production V4 Blueprint
51 | GOLIOTH_BLUEPRINT: 638a8a406a504ec89e7b18ee
52 | GOLIOTH_API_KEY: ${{ secrets.GOLIOTH_API_KEY }}
53 | GOLIOTH_ROLLOUT: false
54 | shell: bash
55 |
56 | - name: Upload artifacts
57 | uses: actions/upload-artifact@v4
58 |
59 | with:
60 | name: firmware
61 | path: |
62 | firmware
63 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /firmware/
2 | /__version__.py
3 | __pycache__/
4 | .vscode/
5 | /modules/ribbit/_static.py
6 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "vendor/micropython"]
2 | path = vendor/micropython
3 | url = https://github.com/Ribbit-Network/micropython
4 | branch = pr/mbedtls
5 | ignore = dirty
6 | [submodule "vendor/microdot"]
7 | path = vendor/microdot
8 | url = https://github.com/miguelgrinberg/microdot.git
9 | ignore = dirty
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Damien Tournoud
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | MP_DIR := ${CURDIR}/vendor/micropython
2 | UNIX_DIR := ${MP_DIR}/ports/unix
3 | PORT_DIR := ${MP_DIR}/ports/esp32
4 | BOARD := ribbit
5 | BUILD_DIR := ${PORT_DIR}/build-${BOARD}
6 |
7 | DEVICE := /dev/ttyACM*
8 |
9 | .PHONY: build
10 | build: ${MP_DIR}/mpy-cross/build/mpy-cross modules/ribbit/_static.py
11 | rm -f ${BUILD_DIR}/frozen_content.c
12 | ln -sfn ${CURDIR}/board ${PORT_DIR}/boards/ribbit
13 | make -C ${PORT_DIR} BOARD=${BOARD} FROZEN_MANIFEST=${CURDIR}/manifest.py
14 | mkdir -p ./firmware
15 | cp ${BUILD_DIR}/bootloader/bootloader.bin ${BUILD_DIR}/partition_table/partition-table.bin ${BUILD_DIR}/ota_data_initial.bin ${BUILD_DIR}/micropython.bin ./firmware
16 |
17 | # Workaround: mpy-cross fails to build with FROZEN_MANIFEST set
18 | # Remove when that is fixed in MicroPython
19 | ${MP_DIR}/mpy-cross/build/mpy-cross:
20 | make -C ${MP_DIR}/mpy-cross
21 |
22 | modules/ribbit/_static.py: modules/ribbit/sensor-ui/*
23 | python3 ./tools/generate_static.py
24 |
25 | ${UNIX_DIR}/build-standard/micropython: ${MP_DIR}/mpy-cross/build/mpy-cross
26 | make -C ${MP_DIR}/ports/unix -j FROZEN_MANIFEST=${CURDIR}/manifest-unix.py
27 |
28 | .PHONY: test
29 | test: ${UNIX_DIR}/build-standard/micropython
30 | cd modules ; ${UNIX_DIR}/build-standard/micropython -m unittest discover -p "*_test.py"
31 |
32 | ${UNIX_DIR}/build-simulator/micropython:
33 | rm -rf ${UNIX_DIR}/variants/simulator
34 | cp -rp ${CURDIR}/board-simulator ${UNIX_DIR}/variants/simulator
35 | make -C ${MP_DIR}/ports/unix -j VARIANT=simulator
36 |
37 | .PHONY: simulator
38 | simulator: ${UNIX_DIR}/build-simulator/micropython modules/ribbit/_static.py
39 | cd modules ; ${UNIX_DIR}/build-simulator/micropython -m main
40 |
41 | .PHONY: flash
42 | flash: build
43 | esptool.py -p ${DEVICE} -b 460800 --before default_reset --after no_reset \
44 | --chip esp32s3 \
45 | write_flash --flash_mode dio --flash_size detect --flash_freq 80m \
46 | 0x0 firmware/bootloader.bin \
47 | 0x8000 firmware/partition-table.bin \
48 | 0xd000 firmware/ota_data_initial.bin \
49 | 0x10000 firmware/micropython.bin
50 |
51 | .PHONY: clean
52 | clean:
53 | rm -rf ${BUILD_DIR} ${UNIX_DIR}/build-standard ${UNIX_DIR}/build-simulator modules/ribbit/_static.py
54 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Ribbit Frog Software
2 |
3 | Ribbit Network is a large network of open-source, low-cost, Greenhouse Gas (CO2 and hopefully other gasses in the future) Detection Sensors. These sensor units will be sold by the Ribbit Network and will upload their data to the cloud, creating the world's most complete Greenhouse Gas dataset.
4 |
5 | This respository contains the software for the Frog Sensor.
6 |
7 | ## Current Software
8 |
9 | The current Ribbit Network Frog software is being developed for the [Frog Sensor Version 4.](https://github.com/Ribbit-Network/ribbit-network-frog-hardware)
10 |
11 | ## Getting Started / Dependencies
12 |
13 | To get started, you'll need to install a few dependencies first.
14 |
15 | ### ESP IDF
16 |
17 | We currently build and test with ESP-IDF v5.1.2.
18 |
19 | ```shell
20 | mkdir -p ~/esp
21 | cd ~/esp
22 | git clone --recursive https://github.com/espressif/esp-idf.git
23 | cd esp-idf
24 | git checkout v5.1.2
25 | git submodule update --init --recursive
26 | ./install.sh all
27 | ```
28 |
29 | ## Building the Software
30 |
31 | Fetch the submodules:
32 |
33 | ```shell
34 | $ git submodule update --jobs 32 --init --recursive
35 | ```
36 |
37 | Set up ESP-IDF:
38 |
39 | ```shell
40 | $ source ~/esp/esp-idf/export.sh
41 | ```
42 |
43 | Run tests:
44 |
45 | ```shell
46 | $ make test
47 | ```
48 |
49 | Build the firmware:
50 |
51 | ```shell
52 | $ make build
53 | ```
54 |
55 | ## Flash the ESP32
56 |
57 | Connect the esp32 board to your computer using the USB-C connection port on the esp32 and a cable that will connect to your computer.
58 |
59 | Press and hold the "boot" button then press the "reset" button and release both at the same time (shown below). This puts the esp32 into a flashing mode.
60 |
61 | 
62 |
63 | The device should now appear on your machine as a serial port like `/dev/ttyACM0`.
64 |
65 | Now the command below can be run to flash the software to the device.
66 |
67 | ```shell
68 | $ make DEVICE=/dev/ttyACM0 flash
69 | ```
70 |
71 | ## Need Help?
72 |
73 | [If you are not sure where to start or just want to chat join our developer discord here.](https://discord.gg/vq8PkDb2TC). You can also [start a discussion](https://github.com/Ribbit-Network/ribbit-network-frog-sensor/discussions) right here in Github.
74 |
75 | # View the Data!
76 |
77 | The first prototype sensors are up and running! [Here is some real data from our sensor network!](https://dashboard.ribbitnetwork.org/) (Note this dashboard is still experimental and may be down occasionally).
78 |
79 | [See more about the cloud database here.](https://github.com/Ribbit-Network/ribbit-network-dashboard)
80 |
81 | ## Questions?
82 |
83 | [Check out the Frequently Asked Questions section.](https://github.com/Ribbit-Network/ribbit-network-faq) If you don't see your question, let us know either in a Github Discussion or via Discord.
84 |
85 | ## Get Involved
86 |
87 | Are you interested in getting more involved or leading an effort in the Ribbit Network project? We are recruiting for additional members to join the Core Team. [See the Open Roles and descriptions here.](https://ribbitnetwork.notion.site/Core-Team-Role-Postings-105df298e0634f179f8f063c01708069).
88 |
89 | ## Contributing
90 |
91 | See the [Issues](https://github.com/keenanjohnson/ghg-gas-cloud/issues) section of this project for the work that I've currently scoped out to be done. Reach out to me if you are interested in helping out! The [projects section](https://github.com/Ribbit-Network/ribbit-network-frog-sensor/projects) helps detail the major efforts going on right now.
92 |
93 | We have a [contributing guide](https://github.com/Ribbit-Network/ribbit-network-frog-sensor/blob/main/CONTRIBUTING.md) that details the process for making a contribution.
94 |
95 | [If you are not sure where to start or just want to chat join our developer slack here.](https://join.slack.com/t/ribbitnetworkgroup/shared_invite/zt-2vxvbo7ld-S36SgfDiev~ZQ2zvp03FOg). You can also [start a discussion](https://github.com/Ribbit-Network/ribbit-network-frog-software/discussions) right here in Github.
96 |
97 | ## Background Information
98 |
99 | [See the Wiki for background research.](https://ribbitnetwork.notion.site/Learnings-Low-cost-sensors-for-the-measurement-of-atmospheric-composition-e3d41736c49e41ad81dcdf7e16a6573b) This project is inspired by some awesome research by incredible scientists in academia.
100 |
101 | ## Ribbit Network
102 |
103 | Ribbit Network is a non-profit (501c3) creating the world's largest Greenhouse Gas Emissions dataset that will empower anyone to join in the work on climate and provide informed data for climate action. We're an all volunteer team building everything we do in the open-source community.
104 |
105 | If you would like to consider sponsoring Ribbit Nework you can do [via this link](https://givebutter.com/ribbitnetwork). The money is used to pay for software fees, purchase R&D hardware and generally support the mission of Ribbit Network.
106 |
107 | ## Ribbit Network Code of Conduct
108 | By participating in this project, you agree to follow the Ribbit Network Code of Conduct and Anti-Harassement Policy.
109 | Violations can be reported anonymously by filling out this form .
110 |
--------------------------------------------------------------------------------
/board-simulator/README.md:
--------------------------------------------------------------------------------
1 | This is a variant of the "standard" Micropython unix port, with one
2 | key difference: it enables the `MICROPY_PY_USELECT` option to use
3 | the same polling mechanism as the ESP32 port instead of using the
4 | system file-descriptor based polling that doesn't support ssl sockets
5 | properly yet.
--------------------------------------------------------------------------------
/board-simulator/manifest.py:
--------------------------------------------------------------------------------
1 | include("$(PORT_DIR)/variants/manifest.py")
2 |
3 | include("$(MPY_DIR)/extmod/asyncio")
4 |
--------------------------------------------------------------------------------
/board-simulator/mpconfigvariant.h:
--------------------------------------------------------------------------------
1 | /*
2 | * This file is part of the MicroPython project, http://micropython.org/
3 | *
4 | * The MIT License (MIT)
5 | *
6 | * Copyright (c) 2019 Damien P. George
7 | *
8 | * Permission is hereby granted, free of charge, to any person obtaining a copy
9 | * of this software and associated documentation files (the "Software"), to deal
10 | * in the Software without restriction, including without limitation the rights
11 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 | * copies of the Software, and to permit persons to whom the Software is
13 | * furnished to do so, subject to the following conditions:
14 | *
15 | * The above copyright notice and this permission notice shall be included in
16 | * all copies or substantial portions of the Software.
17 | *
18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 | * THE SOFTWARE.
25 | */
26 |
27 | // Set base feature level.
28 | #define MICROPY_CONFIG_ROM_LEVEL (MICROPY_CONFIG_ROM_LEVEL_EXTRA_FEATURES)
29 |
30 | #define MICROPY_PY_USELECT (1)
31 |
32 | // Enable extra Unix features.
33 | #include "../mpconfigvariant_common.h"
34 |
--------------------------------------------------------------------------------
/board-simulator/mpconfigvariant.mk:
--------------------------------------------------------------------------------
1 | # This is the default variant when you `make` the Unix port.
2 |
3 | FROZEN_MANIFEST ?= $(VARIANT_DIR)/manifest.py
4 |
--------------------------------------------------------------------------------
/board/board.json:
--------------------------------------------------------------------------------
1 | {
2 | "deploy": [
3 | "../deploy_s3.md"
4 | ],
5 | "docs": "",
6 | "features": [
7 | "BLE",
8 | "WiFi"
9 | ],
10 | "images": [
11 | "generic_s3.jpg"
12 | ],
13 | "mcu": "esp32s3",
14 | "product": "Ribbit sensor v4",
15 | "thumbnail": "",
16 | "url": "https://www.ribbitnetwork.org/",
17 | "vendor": "Ribbit Network"
18 | }
19 |
--------------------------------------------------------------------------------
/board/mpconfigboard.cmake:
--------------------------------------------------------------------------------
1 | set(IDF_TARGET esp32s3)
2 |
3 | set(SDKCONFIG_DEFAULTS
4 | boards/sdkconfig.base
5 | boards/sdkconfig.usb
6 | boards/sdkconfig.240mhz
7 | boards/sdkconfig.spiram_sx
8 | boards/ribbit/sdkconfig.board
9 | )
10 |
--------------------------------------------------------------------------------
/board/mpconfigboard.h:
--------------------------------------------------------------------------------
1 | #define MICROPY_HW_BOARD_NAME "Ribbit Frog Sensor v4"
2 | #define MICROPY_HW_MCU_NAME "ESP32-S3"
3 |
4 | #define MICROPY_PY_BLUETOOTH (0)
5 | #define MICROPY_PY_MACHINE_DAC (0)
6 |
7 | // Enable UART REPL for modules that have an external USB-UART and don't use native USB.
8 | #define MICROPY_HW_ENABLE_UART_REPL (1)
9 |
10 | #define MICROPY_HW_I2C0_SCL (4)
11 | #define MICROPY_HW_I2C0_SDA (3)
12 |
13 | #define MICROPY_ENABLE_COMPILER (1)
14 |
15 | #define MICROPY_PY_ESPNOW (0)
16 | #define MICROPY_PY_MACHINE_I2S (0)
17 | #define MICROPY_HW_ENABLE_SDCARD (0)
18 |
--------------------------------------------------------------------------------
/board/sdkconfig.board:
--------------------------------------------------------------------------------
1 | CONFIG_FLASHMODE_QIO=y
2 | CONFIG_ESPTOOLPY_FLASHFREQ_80M=y
3 | CONFIG_ESPTOOLPY_FLASHSIZE_DETECT=y
4 | CONFIG_ESPTOOLPY_AFTER_NORESET=y
5 |
6 | CONFIG_SPIRAM_MEMTEST=
7 |
8 | CONFIG_ESPTOOLPY_FLASHSIZE_4MB=y
9 | CONFIG_ESPTOOLPY_FLASHSIZE_8MB=
10 | CONFIG_ESPTOOLPY_FLASHSIZE_16MB=
11 | CONFIG_BOOTLOADER_APP_ROLLBACK_ENABLE=y
12 | CONFIG_PARTITION_TABLE_CUSTOM=y
13 | CONFIG_PARTITION_TABLE_CUSTOM_FILENAME="partitions-4MiB-ota.csv"
14 |
15 | CONFIG_TINYUSB_DESC_MANUFACTURER_STRING="Ribbit Network"
16 | CONFIG_TINYUSB_DESC_PRODUCT_STRING="Ribbit Frog Sensor v4"
17 | CONFIG_TINYUSB_DESC_SERIAL_STRING="ribbit"
18 |
--------------------------------------------------------------------------------
/manifest-unix.py:
--------------------------------------------------------------------------------
1 | include("$(MPY_DIR)/extmod/asyncio")
2 | require("unittest-discover")
3 |
--------------------------------------------------------------------------------
/manifest.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import subprocess
3 | import os
4 |
5 | include("$(MPY_DIR)/extmod/asyncio")
6 | freeze("$(PORT_DIR)/modules")
7 | require("neopixel")
8 | require("ntptime")
9 | freeze("modules")
10 | module("microdot.py", "vendor/microdot/src")
11 | module("microdot_asyncio.py", "vendor/microdot/src")
12 | module("microdot_websocket.py", "vendor/microdot/src")
13 |
14 | version = subprocess.check_output(
15 | [
16 | "git",
17 | "describe",
18 | "--tags", # Necessary because `actions/checkout@v3` doesn't keep the annotated tags for some reason https://github.com/actions/checkout/issues/290
19 | ],
20 | encoding="utf-8",
21 | )
22 | commit_id = subprocess.check_output(
23 | ["git", "rev-parse", "HEAD"],
24 | encoding="utf-8",
25 | )
26 |
27 | if "SOURCE_DATE_EPOCH" in os.environ:
28 | now = datetime.datetime.utcfromtimestamp(float(os.environ["SOURCE_DATE_EPOCH"]))
29 | else:
30 | now = datetime.datetime.utcnow()
31 |
32 | with open("__version__.py", "w", encoding="utf-8") as f:
33 | f.write("version = %r\n" % version.strip())
34 | f.write("commit_id = %r\n" % commit_id.strip())
35 | f.write("build_date = %r\n" % now.isoformat())
36 | f.write("build_year = %d\n" % now.year)
37 |
38 | os.utime("__version__.py", (now.timestamp(), now.timestamp()))
39 |
40 | module("__version__.py")
41 |
--------------------------------------------------------------------------------
/modules/logging.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import time
3 |
4 | from ribbit.utils.time import isotime as _isotime
5 |
6 |
7 | CRITICAL = 50
8 | ERROR = 40
9 | WARNING = 30
10 | INFO = 20
11 | DEBUG = 10
12 |
13 | _level_dict = {
14 | CRITICAL: "CRIT",
15 | ERROR: "ERROR",
16 | WARNING: "WARN",
17 | INFO: "INFO",
18 | DEBUG: "DEBUG",
19 | }
20 |
21 | _stream = sys.stderr
22 |
23 |
24 | class LogRecord:
25 | def __init__(self):
26 | self.__dict__ = {}
27 |
28 | def __getattr__(self, key):
29 | return self.__dict__[key]
30 |
31 |
32 | class Logger:
33 | def __init__(self, name):
34 | self.name = name
35 | self.setLevel(INFO)
36 |
37 | def setLevel(self, level):
38 | self.level = level
39 | self._level_str = _level_dict[level]
40 |
41 | def isEnabledFor(self, level):
42 | return level >= self.level
43 |
44 | def log(self, level, msg, *args):
45 | if self.isEnabledFor(level):
46 | if args:
47 | msg = msg % args
48 | print(
49 | _isotime(time.time()),
50 | ":",
51 | self._level_str,
52 | ":",
53 | self.name,
54 | ":",
55 | msg,
56 | sep="",
57 | file=_stream,
58 | )
59 |
60 | def debug(self, msg, *args):
61 | self.log(DEBUG, msg, *args)
62 |
63 | def info(self, msg, *args):
64 | self.log(INFO, msg, *args)
65 |
66 | def warning(self, msg, *args):
67 | self.log(WARNING, msg, *args)
68 |
69 | def error(self, msg, *args):
70 | self.log(ERROR, msg, *args)
71 |
72 | def critical(self, msg, *args):
73 | self.log(CRITICAL, msg, *args)
74 |
75 | def exc(self, e, msg, *args):
76 | self.log(ERROR, msg, *args)
77 | sys.print_exception(e, _stream)
78 |
79 |
80 | _loggers: dict[str, Logger] = {}
81 |
82 |
83 | def getLogger(name: str = "root") -> Logger:
84 | if name in _loggers:
85 | return _loggers[name]
86 | l = Logger(name)
87 | _loggers[name] = l
88 | return l
89 |
--------------------------------------------------------------------------------
/modules/main.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 |
4 | def _setup_improv(registry):
5 | import binascii
6 |
7 | import machine
8 | import network
9 | import ribbit.config as _config
10 | import ribbit.improv as _improv
11 | import ribbit.network as _network
12 |
13 | async def _improv_set_wifi_settings(ssid, password):
14 | registry.config.set(
15 | {
16 | _network.CONFIG_WIFI_SSID: ssid,
17 | _network.CONFIG_WIFI_PASSWORD: password,
18 | },
19 | )
20 |
21 | await asyncio.sleep(15)
22 |
23 | async def _improv_current_state():
24 | network_state = registry.network.state.value
25 | if network_state.state == network.STAT_GOT_IP:
26 | return _improv.STATE_PROVISIONED, "http://%s/" % (network_state.ip)
27 |
28 | _, ssid, _ = registry.config.get(_network.CONFIG_WIFI_SSID)
29 | _, password, _ = registry.config.get(_network.CONFIG_WIFI_PASSWORD)
30 |
31 | if ssid is not None and password is not None:
32 | return _improv.STATE_PROVISIONING, ""
33 |
34 | return _improv.STATE_READY, ""
35 |
36 | _improv.ImprovHandler(
37 | product_name="Ribbit Frog Sensor",
38 | product_version="4.0",
39 | hardware_name="ESP32-S3",
40 | device_name=binascii.hexlify(machine.unique_id()),
41 | scan_wifi_cb=registry.network.scan,
42 | set_wifi_settings_cb=_improv_set_wifi_settings,
43 | current_state_cb=_improv_current_state,
44 | )
45 |
46 |
47 | async def _main():
48 | global registry
49 |
50 | import sys
51 | import os
52 | import json
53 | import machine
54 | import logging
55 |
56 | in_simulator = sys.platform == "linux"
57 |
58 | if in_simulator:
59 | sys.path.append(os.getcwd() + "/../vendor/microdot/src")
60 | sys.path.append(os.getcwd() + "/..")
61 |
62 | import ribbit.aggregate as _aggregate
63 | import ribbit.config as _config
64 | import ribbit.golioth as _golioth
65 | import ribbit.coap as _coap
66 | import ribbit.http as _http
67 | import ribbit.heartbeat as _heartbeat
68 |
69 | if not in_simulator:
70 | import ribbit.network as _network
71 | import ribbit.sensors.dps310 as _dps310
72 | import ribbit.sensors.battery as _battery
73 | import ribbit.sensors.board as _board
74 | import ribbit.sensors.gps as _gps
75 | import ribbit.sensors.scd30 as _scd30
76 | import ribbit.time_manager as _time
77 | import ribbit.utils.i2c as _i2c
78 | import ribbit.utils.ota as _ota
79 |
80 | class Registry:
81 | pass
82 |
83 | registry = Registry()
84 | registry.in_simulator = in_simulator
85 |
86 | _aggregate.SensorAggregator(registry)
87 | _heartbeat.Heartbeat(in_simulator)
88 |
89 | config_schema = []
90 | if not in_simulator:
91 | config_schema.extend(_network.CONFIG_KEYS)
92 | config_schema.extend(_golioth.CONFIG_KEYS)
93 |
94 | sensor_types = {
95 | "gps": _gps.GPS,
96 | "dps310": _dps310.DPS310,
97 | "scd30": _scd30.SCD30,
98 | "battery": _battery.Battery,
99 | "board": _board.Board,
100 | "memory": _board.Memory,
101 | }
102 |
103 | default_sensors = [
104 | {
105 | "type": "battery",
106 | "id": "battery",
107 | },
108 | {
109 | "type": "board",
110 | "id": "board",
111 | },
112 | {
113 | "type": "memory",
114 | "id": "memory",
115 | },
116 | ]
117 |
118 | if not in_simulator:
119 | default_sensors.extend(
120 | [
121 | {
122 | "type": "gps",
123 | "id": "gps",
124 | "address": _gps.DEFAULT_ADDR,
125 | },
126 | {
127 | "type": "dps310",
128 | "id": "dps310",
129 | "address": _dps310.DEFAULT_ADDR,
130 | },
131 | {
132 | "type": "scd30",
133 | "id": "scd30",
134 | "address": _scd30.DEFAULT_ADDR,
135 | },
136 | ]
137 | )
138 |
139 | config_schema.append(
140 | _config.Array(
141 | name="sensors",
142 | item=_config.TypedObject(
143 | type_key="type",
144 | types={cls.config for cls in sensor_types.values()},
145 | ),
146 | default=default_sensors,
147 | ),
148 | )
149 |
150 | registry.config = _config.ConfigRegistry(config_schema, in_simulator=in_simulator)
151 |
152 | if not in_simulator:
153 | registry.network = _network.NetworkManager(registry.config)
154 |
155 | registry.time_manager = _time.TimeManager(registry)
156 |
157 | registry.ota_manager = _ota.OTAManager(in_simulator=in_simulator)
158 |
159 | registry.golioth = _golioth.Golioth(
160 | registry.config,
161 | ota_manager=registry.ota_manager,
162 | in_simulator=in_simulator,
163 | )
164 |
165 | registry.sensors = {}
166 |
167 | class Output:
168 | def __init__(self):
169 | self._logger = logging.getLogger("output")
170 |
171 | async def write(self, data):
172 | coap = registry.golioth._coap
173 | if coap is None or not coap.connected:
174 | return
175 |
176 | if isinstance(data, dict):
177 | data = [data]
178 |
179 | for item in data:
180 | try:
181 | typ = item.pop("@type")
182 | data = json.dumps(item)
183 | except Exception:
184 | pass
185 |
186 | registry.sensors_output = Output()
187 |
188 | if not in_simulator:
189 | registry.i2c_bus = _i2c.LockableI2CBus(
190 | 0, scl=machine.Pin(4), sda=machine.Pin(3), freq=50000
191 | )
192 |
193 | # Turn on the I2C power:
194 | machine.Pin(7, mode=machine.Pin.OUT, value=1, hold=True)
195 |
196 | _, sensors, _ = registry.config.get("sensors")
197 |
198 | for sensor in sensors:
199 | sensor = sensor.copy()
200 | sensor_type = sensor.pop("type")
201 | registry.sensors[sensor_type] = sensor_types[sensor_type](
202 | registry,
203 | **sensor,
204 | )
205 |
206 | for sensor in registry.sensors.values():
207 | asyncio.create_task(sensor.loop())
208 |
209 | if not in_simulator:
210 | _setup_improv(registry)
211 |
212 | registry.ota_manager.successful_boot()
213 |
214 | app = _http.build_app(registry)
215 | asyncio.create_task(
216 | app.start_server(
217 | port=80 if not in_simulator else 8082,
218 | )
219 | )
220 |
221 |
222 | if __name__ == "__main__":
223 | asyncio.create_task(_main())
224 | asyncio.get_event_loop().run_forever()
225 |
--------------------------------------------------------------------------------
/modules/ribbit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ribbit-Network/ribbit-network-frog-software/63bb0857405d0a6702b407708456b434818a4b4e/modules/ribbit/__init__.py
--------------------------------------------------------------------------------
/modules/ribbit/aggregate.py:
--------------------------------------------------------------------------------
1 | from ribbit.utils.time import isotime
2 | import time
3 | import logging
4 | import uasyncio as asyncio
5 | import collections
6 | import json
7 |
8 | import ribbit.coap as _coap
9 |
10 |
11 | class SensorAggregator:
12 | def __init__(self, registry):
13 | self._logger = logging.getLogger(__name__)
14 | self._registry = registry
15 |
16 | asyncio.create_task(self._loop())
17 |
18 | async def _loop(self):
19 | while True:
20 | # Send a data point every 5 seconds
21 | await asyncio.sleep_ms(5000)
22 |
23 | ret = collections.OrderedDict()
24 | for sensor_id, sensor in self._registry.sensors.items():
25 | ret[sensor_id] = sensor.export()
26 |
27 | ret["time_manager"] = self._registry.time_manager.export()
28 |
29 | self._logger.info("Aggregated Data: %s", json.dumps(ret))
30 | try:
31 | coap = self._registry.golioth._coap
32 | await coap.post(
33 | ".s/" + "ribbitnetwork.datapoint",
34 | json.dumps(ret),
35 | format=_coap.CONTENT_FORMAT_APPLICATION_JSON,
36 | )
37 | except Exception:
38 | pass
--------------------------------------------------------------------------------
/modules/ribbit/coap/__init__.py:
--------------------------------------------------------------------------------
1 | import binascii
2 | import logging
3 | import socket
4 | import random
5 | import os
6 | import ssl
7 | import asyncio
8 | import asyncio.core as _asyncio_core
9 |
10 | from micropython import const
11 |
12 |
13 | _HEADER_SIZE = const(4)
14 | _OPTION_HEADER_SIZE = const(1)
15 | _PAYLOAD_MARKER = const(0xFF)
16 | _MAX_OPTION_NUM = const(10)
17 | _BUF_MAX_SIZE = const(1500)
18 | _DEFAULT_PORT = const(5683)
19 |
20 | _DEBUG = const(0)
21 |
22 | VERSION_UNSUPPORTED = const(0)
23 | VERSION_1 = const(1)
24 |
25 | TYPE_CON = const(0)
26 | TYPE_NONCON = const(1)
27 | TYPE_ACK = const(2)
28 | TYPE_RESET = const(3)
29 |
30 | METHOD_EMPTY_MESSAGE = const(0)
31 | METHOD_GET = const(1)
32 | METHOD_POST = const(2)
33 | METHOD_PUT = const(3)
34 | METHOD_DELETE = const(4)
35 |
36 | RESPONSE_CODE_CREATED = const(0x41)
37 | RESPONSE_CODE_DELETED = const(0x42)
38 | RESPONSE_CODE_VALID = const(0x43)
39 | RESPONSE_CODE_CHANGED = const(0x44)
40 | RESPONSE_CODE_CONTENT = const(0x45)
41 | RESPONSE_CODE_BAD_REQUEST = const(0x80)
42 | RESPONSE_CODE_UNAUTHORIZED = const(0x81)
43 | RESPONSE_CODE_BAD_OPTION = const(0x82)
44 | RESPONSE_CODE_FORBIDDEN = const(0x83)
45 | RESPONSE_CODE_NOT_FOUND = const(0x84)
46 | RESPONSE_CODE_METHOD_NOT_ALLOWD = const(0x85)
47 | RESPONSE_CODE_NOT_ACCEPTABLE = const(0x86)
48 | RESPONSE_CODE_PRECONDITION_FAILED = const(0x8C)
49 | RESPONSE_CODE_REQUEST_ENTITY_TOO_LARGE = const(0x8D)
50 | RESPONSE_CODE_UNSUPPORTED_CONTENT_FORMAT = const(0x8F)
51 | RESPONSE_CODE_INTERNAL_SERVER_ERROR = const(0xA0)
52 | RESPONSE_CODE_NOT_IMPLEMENTED = const(0xA1)
53 | RESPONSE_CODE_BAD_GATEWAY = const(0xA2)
54 | RESPONSE_CODE_SERVICE_UNAVALIABLE = const(0xA3)
55 | RESPONSE_CODE_GATEWAY_TIMEOUT = const(0xA4)
56 | RESPONSE_CODE_PROXYING_NOT_SUPPORTED = const(0xA5)
57 |
58 | OPTION_IF_MATCH = const(1)
59 | OPTION_URI_HOST = const(3)
60 | OPTION_E_TAG = const(4)
61 | OPTION_IF_NONE_MATCH = const(5)
62 | OPTION_OBSERVE = const(6)
63 | OPTION_URI_PORT = const(7)
64 | OPTION_LOCATION_PATH = const(8)
65 | OPTION_URI_PATH = const(11)
66 | OPTION_CONTENT_FORMAT = const(12)
67 | OPTION_MAX_AGE = const(14)
68 | OPTION_URI_QUERY = const(15)
69 | OPTION_ACCEPT = const(17)
70 | OPTION_LOCATION_QUERY = const(20)
71 | OPTION_BLOCK2 = const(23)
72 | OPTION_BLOCK1 = const(27)
73 | OPTION_PROXY_URI = const(35)
74 | OPTION_PROXY_SCHEME = const(39)
75 |
76 | CONTENT_FORMAT_NONE = const(-1)
77 | CONTENT_FORMAT_TEXT_PLAIN = const(0x00)
78 | CONTENT_FORMAT_APPLICATION_LINK_FORMAT = const(0x28)
79 | CONTENT_FORMAT_APPLICATION_XML = const(0x29)
80 | CONTENT_FORMAT_APPLICATION_OCTET_STREAM = const(0x2A)
81 | CONTENT_FORMAT_APPLICATION_EXI = const(0x2F)
82 | CONTENT_FORMAT_APPLICATION_JSON = const(0x32)
83 | CONTENT_FORMAT_APPLICATION_CBOR = const(0x3C)
84 |
85 |
86 | class COAPException(Exception):
87 | pass
88 |
89 |
90 | class COAPDisconnectedError(COAPException):
91 | pass
92 |
93 |
94 | class COAPRequestTimeoutError(COAPException):
95 | pass
96 |
97 |
98 | class COAPInvalidPacketError(COAPException):
99 | pass
100 |
101 |
102 | class _Semaphore(asyncio.Lock):
103 | def __init__(self, value=1):
104 | super().__init__()
105 | self._value = value
106 |
107 | async def acquire(self):
108 | if self._value > 0:
109 | self._value -= 1
110 | if self._value == 0:
111 | await super().acquire()
112 |
113 | async def release(self):
114 | self._value += 1
115 | if self._value == 1:
116 | await super().release()
117 |
118 |
119 | class _WaitGroup:
120 | def __init__(self):
121 | self._in_flight = 0
122 | self._done_ev = asyncio.Event()
123 |
124 | def _done(self, tsk, err):
125 | self._in_flight -= 1
126 | if self._in_flight == 0:
127 | self._done_ev.set()
128 |
129 | def create_task(self, tsk):
130 | self._in_flight += 1
131 | tsk = asyncio.create_task(tsk)
132 | tsk.state = self._done
133 |
134 | async def wait(self):
135 | if self._in_flight == 0:
136 | return
137 |
138 | self._done_ev = ev = asyncio.Event()
139 | return await ev.wait()
140 |
141 |
142 | class _DTLSocket:
143 | def __init__(self, s):
144 | self.s = s
145 |
146 | def close(self):
147 | self.s.close()
148 |
149 | def read(self, n=-1):
150 | while True:
151 | yield _asyncio_core._io_queue.queue_read(self.s)
152 | r = self.s.recv(n)
153 | if r is not None:
154 | return r
155 |
156 | def readinto(self, buf):
157 | yield _asyncio_core._io_queue.queue_read(self.s)
158 | return self.s.recv_into(buf)
159 |
160 | def write(self, buf):
161 | while True:
162 | yield _asyncio_core._io_queue.queue_write(self.s)
163 | r = self.s.send(buf)
164 | if r is not None:
165 | return r
166 |
167 |
168 | class CoapOption:
169 | def __init__(self, number=-1, buffer=None):
170 | self.number = number
171 | byteBuf = bytearray()
172 | if buffer is not None:
173 | byteBuf.extend(buffer)
174 | self.buffer = byteBuf
175 |
176 | def __str__(self):
177 | return "" % (
178 | self.number,
179 | len(self.buffer),
180 | bytes(self.buffer),
181 | )
182 |
183 |
184 | class CoapPacket:
185 | def __init__(self):
186 | self.version = VERSION_UNSUPPORTED
187 | self.type = TYPE_CON # uint8_t
188 | self.method = METHOD_GET # uint8_t
189 | self.token = None
190 | self.payload = bytearray()
191 | self.message_id = 0
192 | self.content_format = CONTENT_FORMAT_NONE
193 | self.query = bytearray() # uint8_t*
194 | self.options = []
195 |
196 | def __str__(self):
197 | return (
198 | "> 5,
204 | self.method & 0x1F,
205 | len(self.payload) if self.payload is not None else 0,
206 | ", ".join(str(option) for option in self.options),
207 | )
208 | )
209 |
210 | def add_option(self, number, opt_payload):
211 | if len(self.options) >= _MAX_OPTION_NUM:
212 | raise ValueError("too many options")
213 |
214 | if self.options and self.options[0].number > number:
215 | raise ValueError("options must be sorted")
216 |
217 | self.options.append(CoapOption(number, opt_payload))
218 |
219 | def set_uri_host(self, address):
220 | self.add_option(OPTION_URI_HOST, address)
221 |
222 | def set_uri_path(self, url):
223 | for subPath in url.split("/"):
224 | self.add_option(OPTION_URI_PATH, subPath)
225 |
226 |
227 | def _parse_option(packet, runningDelta, buffer):
228 | option = CoapOption()
229 |
230 | delta = (buffer[0] & 0xF0) >> 4
231 | length = buffer[0] & 0x0F
232 | buffer = buffer[1:]
233 |
234 | if delta == 15 or length == 15:
235 | raise COAPInvalidPacketError()
236 |
237 | if delta == 13:
238 | if not buffer:
239 | raise COAPInvalidPacketError()
240 | delta = buffer[0] + 13
241 | buffer = buffer[1:]
242 | elif delta == 14:
243 | if len(buffer) < 2:
244 | raise COAPInvalidPacketError()
245 | delta = ((buffer[0] << 8) | buffer[1]) + 269
246 | buffer = buffer[2:]
247 |
248 | option.number = delta + runningDelta
249 |
250 | if length == 13:
251 | if not buffer:
252 | raise COAPInvalidPacketError()
253 | length = buffer[0] + 13
254 | buffer = buffer[1:]
255 | elif length == 14:
256 | if len(buffer) < 2:
257 | raise COAPInvalidPacketError()
258 | length = ((buffer[0] << 8) | buffer[1]) + 269
259 | buffer = buffer[2:]
260 |
261 | if len(buffer) < length:
262 | raise COAPInvalidPacketError()
263 |
264 | option.buffer = buffer[:length]
265 | buffer = buffer[length:]
266 | packet.options.append(option)
267 |
268 | return runningDelta + delta, buffer
269 |
270 |
271 | def _parse_packet(buffer, packet):
272 | packet.version = (buffer[0] & 0xC0) >> 6
273 | if packet.version != VERSION_1:
274 | raise ValueError("invalid version")
275 | packet.type = (buffer[0] & 0x30) >> 4
276 | packet.method = buffer[1]
277 | packet.message_id = 0xFF00 & (buffer[2] << 8)
278 | packet.message_id |= 0x00FF & buffer[3]
279 |
280 | token_len = buffer[0] & 0x0F
281 | if token_len == 0:
282 | packet.token = None
283 | elif token_len == 4:
284 | packet.token = (
285 | (buffer[4] << 24) | (buffer[5] << 16) | (buffer[6] << 8) | buffer[7]
286 | )
287 | else:
288 | raise COAPInvalidPacketError()
289 |
290 | buffer = buffer[4 + token_len :]
291 |
292 | if buffer:
293 | delta = 0
294 | while buffer and buffer[0] != 0xFF:
295 | delta, buffer = _parse_option(packet, delta, buffer)
296 |
297 | if buffer and buffer[0] == 0xFF:
298 | packet.payload = buffer[1:]
299 | else:
300 | packet.payload = None
301 |
302 | return True
303 |
304 |
305 | def _write_packet_header_info(buffer, packet):
306 | # make coap packet base header
307 | buffer.append(VERSION_1 << 6)
308 | buffer[0] |= (packet.type & 0x03) << 4
309 | # max: 8 bytes of tokens, if token length is greater, it is ignored
310 | token_len = 0
311 | if packet.token is not None:
312 | token_len = 4
313 |
314 | buffer[0] |= token_len & 0x0F
315 | buffer.append(packet.method)
316 | buffer.append(packet.message_id >> 8)
317 | buffer.append(packet.message_id & 0xFF)
318 |
319 | if packet.token is not None:
320 | buffer.append((packet.token >> 24) & 0xFF)
321 | buffer.append((packet.token >> 16) & 0xFF)
322 | buffer.append((packet.token >> 8) & 0xFF)
323 | buffer.append(packet.token & 0xFF)
324 |
325 |
326 | def _coap_option_delta(v):
327 | if v < 13:
328 | return 0xFF & v
329 | if v <= 0xFF + 13:
330 | return 13
331 | return 14
332 |
333 |
334 | def _write_packet_options(buffer, packet):
335 | running_delta = 0
336 | # make option header
337 | for opt in packet.options:
338 | buffer_len = len(opt.buffer)
339 |
340 | if len(buffer) + 5 + buffer_len >= _BUF_MAX_SIZE:
341 | raise ValueError("option buffer too big")
342 |
343 | delta = opt.number - running_delta
344 | delta_encoded = _coap_option_delta(delta)
345 | buffer_len_encoded = _coap_option_delta(buffer_len)
346 |
347 | buffer.append(0xFF & ((delta_encoded << 4) | buffer_len_encoded))
348 | if delta_encoded == 13:
349 | buffer.append(delta - 13)
350 | elif delta_encoded == 14:
351 | buffer.append((delta - 269) >> 8)
352 | buffer.append(0xFF & (delta - 269))
353 |
354 | if buffer_len_encoded == 13:
355 | buffer.append(buffer_len - 13)
356 | elif buffer_len_encoded == 14:
357 | buffer.append(buffer_len >> 8)
358 | buffer.append(0xFF & (buffer_len - 269))
359 |
360 | buffer.extend(opt.buffer)
361 | running_delta = opt.number
362 |
363 |
364 | def _write_packet_payload(buffer, packet):
365 | # make payload
366 | if (packet.payload is not None) and (len(packet.payload)):
367 | if (len(buffer) + 1 + len(packet.payload)) >= _BUF_MAX_SIZE:
368 | return 0
369 | buffer.append(0xFF)
370 | buffer.extend(packet.payload)
371 |
372 |
373 | class Coap:
374 | def __init__(
375 | self,
376 | host,
377 | port=_DEFAULT_PORT,
378 | ssl=False,
379 | ssl_options=None,
380 | ack_timeout_ms=2_000,
381 | ack_random_factor=1.5,
382 | max_retransmit=4,
383 | ping_interval_ms=60_000,
384 | ):
385 | self._logger = logging.getLogger(__name__)
386 | if _DEBUG:
387 | self._logger.setLevel(logging.DEBUG)
388 | self._callbacks = {}
389 | self._response_callback = None
390 | self._host = host
391 | self._port = port
392 | self._ssl = ssl
393 | self._ssl_opts = ssl_options or {}
394 |
395 | self._addr = None
396 |
397 | self._sock = None
398 | self.connected = False
399 | self._connection_epoch = 0
400 | self._next_message_id = 0
401 | self._read_loop_task = None
402 | self._ping_loop_task = None
403 | self._in_flight_requests = {}
404 | self._force_reconnect_event = asyncio.Event()
405 |
406 | # Protocol parameters:
407 | self._ack_timeout_min_ms = ack_timeout_ms
408 | self._ack_timeout_max_ms = int(self._ack_timeout_min_ms * ack_random_factor)
409 | self._max_retransmit = max_retransmit
410 |
411 | self._ping_interval_ms = ping_interval_ms
412 |
413 | self._on_connect_tasks = []
414 |
415 | self.lock = asyncio.Lock()
416 |
417 | def _get_message_id(self):
418 | message_id = self._next_message_id
419 | if message_id < 0xFFFF:
420 | self._next_message_id += 1
421 | else:
422 | self._next_message_id = 0
423 | return message_id
424 |
425 | def on_connect(self, cb):
426 | self._on_connect_tasks.append(cb)
427 |
428 | async def connect(self):
429 | self._logger.info("Connecting to CoAP server")
430 | self._force_reconnect_event.clear()
431 |
432 | rnd = os.urandom(2)
433 | self._connection_epoch = (rnd[0] << 8) | rnd[1]
434 | self._next_message_id = 0
435 |
436 | if self._addr is None:
437 | # TODO: this is blocking
438 | self._addr = socket.getaddrinfo(self._host, self._port)[0][-1]
439 |
440 | sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
441 | try:
442 | sock.setblocking(False)
443 | sock.bind(socket.getaddrinfo("0.0.0.0", 0)[0][-1])
444 | sock.connect(self._addr)
445 |
446 | if self._ssl is not False:
447 | ctx = self._ssl
448 | if ctx is True:
449 | ctx = ssl.SSLContext(
450 | ssl.PROTOCOL_DTLS_CLIENT
451 | )
452 |
453 | sock = ctx.wrap_socket(
454 | sock,
455 | do_handshake_on_connect=True,
456 | server_hostname=self._host,
457 | )
458 |
459 | self._sock = _DTLSocket(sock)
460 |
461 | self._read_loop_task = asyncio.create_task(self._read_loop())
462 |
463 | await self.ping()
464 |
465 | self._logger.info("Connected to CoAP server")
466 |
467 | self._ping_loop_task = asyncio.create_task(self._ping_loop())
468 |
469 | for task in self._on_connect_tasks:
470 | await task(self)
471 |
472 | self.connected = True
473 |
474 | except Exception:
475 | sock.close()
476 | self.connected = False
477 | raise
478 |
479 | def _force_reconnect(self, reason):
480 | if not self._force_reconnect_event.is_set():
481 | self._force_reconnect_event.set()
482 | self._logger.warning("Force reconnection, reason: %s", reason)
483 |
484 | async def disconnect(self):
485 | if self.connected:
486 | self._logger.info("Disconnecting from CoAP server")
487 | self.connected = False
488 |
489 | if self._read_loop_task is not None:
490 | self._read_loop_task.cancel()
491 | if self._ping_loop_task is not None:
492 | self._ping_loop_task.cancel()
493 |
494 | for ev in self._in_flight_requests.values():
495 | ev.disconnected = True
496 | ev.set()
497 |
498 | self._in_flight_requests = {}
499 | self._sock.close()
500 | self._sock = None
501 |
502 | async def connect_loop(self):
503 | while True:
504 | try:
505 | await self.connect()
506 | except Exception as exc:
507 | self._logger.error("Error trying to connect: %s", str(exc))
508 | await asyncio.sleep_ms(10_000)
509 | try:
510 | await self.disconnect()
511 | except Exception:
512 | pass
513 | continue
514 |
515 | await self._force_reconnect_event.wait()
516 |
517 | try:
518 | await self.disconnect()
519 | except Exception:
520 | pass
521 |
522 | async def send_packet(self, packet):
523 | if packet.message_id is None:
524 | packet.message_id = self._get_message_id()
525 |
526 | if packet.content_format != CONTENT_FORMAT_NONE:
527 | optionBuffer = bytearray(2)
528 | optionBuffer[0] = (packet.content_format & 0xFF00) >> 8
529 | optionBuffer[1] = packet.content_format & 0x00FF
530 | packet.add_option(OPTION_CONTENT_FORMAT, optionBuffer)
531 |
532 | if packet.query is not None and len(packet.query) > 0:
533 | packet.add_option(OPTION_URI_QUERY, packet.query)
534 |
535 | buffer = bytearray()
536 | _write_packet_header_info(buffer, packet)
537 | _write_packet_options(buffer, packet)
538 | _write_packet_payload(buffer, packet)
539 |
540 | if _DEBUG:
541 | self._logger.debug(">>>>>> %s", packet)
542 |
543 | try:
544 | await self._sock.write(buffer)
545 | except Exception:
546 | self._force_reconnect("error writing packet")
547 | raise
548 |
549 | async def _send_ack(self, message_id):
550 | packet = CoapPacket()
551 | packet.type = TYPE_ACK
552 | packet.method = METHOD_EMPTY_MESSAGE
553 | packet.message_id = message_id
554 | return await self.send_packet(packet)
555 |
556 | async def ping(self):
557 | packet = CoapPacket()
558 | packet.type = TYPE_CON
559 | packet.method = METHOD_EMPTY_MESSAGE
560 | return await self.request(packet)
561 |
562 | async def _ping_loop(self):
563 | while True:
564 | await asyncio.sleep_ms(self._ping_interval_ms)
565 | try:
566 | await self.ping()
567 | except Exception:
568 | self._force_reconnect("error sending ping request")
569 | return
570 |
571 | async def request(self, packet, observe_cb=None):
572 | async with self.lock:
573 | is_ping = packet.method == METHOD_EMPTY_MESSAGE
574 | packet.message_id = self._get_message_id()
575 | if packet.token is None and not is_ping:
576 | packet.token = (self._connection_epoch << 16) | packet.message_id
577 |
578 | ev = asyncio.Event()
579 | ev.acked = False
580 | ev.disconnected = False
581 | ev.only_ack = is_ping
582 | ev.observe_cb = observe_cb
583 | self._in_flight_requests[packet.message_id] = ev
584 | self._in_flight_requests[packet.token] = ev
585 |
586 | retransmit_delay_ms = random.randint(
587 | self._ack_timeout_min_ms, self._ack_timeout_max_ms
588 | )
589 | retransmissions = 0
590 |
591 | epoch = self._connection_epoch
592 | try:
593 | while not ev.acked:
594 | await self.send_packet(packet)
595 |
596 | try:
597 | await asyncio.wait_for_ms(ev.wait(), retransmit_delay_ms)
598 | break
599 | except asyncio.TimeoutError:
600 | if self._connection_epoch != epoch:
601 | raise COAPDisconnectedError()
602 |
603 | if retransmissions == self._max_retransmit:
604 | self._force_reconnect("reached max retransmissions")
605 | raise COAPRequestTimeoutError()
606 | retransmissions += 1
607 | retransmit_delay_ms *= 2
608 |
609 | if not ev.is_set():
610 | await ev.wait()
611 | if ev.disconnected:
612 | raise COAPDisconnectedError()
613 | return ev.response
614 |
615 | finally:
616 | if self._connection_epoch == epoch:
617 | if observe_cb is None:
618 | self._in_flight_requests.pop(packet.message_id, None)
619 | self._in_flight_requests.pop(packet.token, None)
620 |
621 | async def get(self, path, accept=CONTENT_FORMAT_TEXT_PLAIN):
622 | packet = CoapPacket()
623 | packet.type = TYPE_CON
624 | packet.method = METHOD_GET
625 | packet.set_uri_path(path)
626 | packet.add_option(OPTION_ACCEPT, encode_uint_option(accept))
627 | return await self.request(packet)
628 |
629 | async def observe(self, path, observe_cb, accept=CONTENT_FORMAT_TEXT_PLAIN):
630 | packet = CoapPacket()
631 | packet.type = TYPE_CON
632 | packet.method = METHOD_GET
633 | packet.add_option(OPTION_OBSERVE, b"")
634 | packet.set_uri_path(path)
635 | packet.add_option(OPTION_ACCEPT, encode_uint_option(accept))
636 | return await self.request(packet, observe_cb=observe_cb)
637 |
638 | def get_streaming(self, path):
639 | return BlockReader(self, path)
640 |
641 | async def post(self, path, data, format=CONTENT_FORMAT_TEXT_PLAIN):
642 | packet = CoapPacket()
643 | packet.type = TYPE_CON
644 | packet.method = METHOD_POST
645 | packet.set_uri_path(path)
646 | packet.add_option(OPTION_CONTENT_FORMAT, encode_uint_option(format))
647 | packet.payload = data
648 | return await self.request(packet)
649 |
650 | async def put(self, path, format=CONTENT_FORMAT_TEXT_PLAIN):
651 | packet = CoapPacket()
652 | packet.type = TYPE_CON
653 | packet.method = METHOD_PUT
654 | packet.set_uri_path(path)
655 | if format:
656 | packet.add_option(OPTION_CONTENT_FORMAT, encode_uint_option(format))
657 | return await self.request(packet)
658 |
659 | async def delete(self, path):
660 | packet = CoapPacket()
661 | packet.type = TYPE_CON
662 | packet.method = METHOD_DELETE
663 | packet.set_uri_path(path)
664 | return await self.request(packet)
665 |
666 | def _read_bytes_from_socket(self, numOfBytes):
667 | try:
668 | return self.sock.recvfrom(numOfBytes)
669 | except Exception:
670 | return (None, None)
671 |
672 | async def _read_loop(self):
673 | while True:
674 | try:
675 | buffer = await self._sock.read(_BUF_MAX_SIZE)
676 | if buffer is None:
677 | continue
678 |
679 | buffer = memoryview(buffer)
680 |
681 | packet = CoapPacket()
682 | _parse_packet(buffer, packet)
683 |
684 | except Exception:
685 | self._force_reconnect("error reading packet")
686 | return
687 |
688 | if _DEBUG:
689 | self._logger.debug("<<<<<< %s", packet)
690 |
691 | if packet.type == TYPE_CON:
692 | await self._send_ack(packet.message_id)
693 |
694 | if packet.type == TYPE_ACK and packet.method == METHOD_EMPTY_MESSAGE:
695 | # Separate response (rfc7252 #5.2.2)
696 | request_ev = self._in_flight_requests.get(packet.message_id, None)
697 | if request_ev is not None:
698 | request_ev.acked = True
699 | if request_ev.only_ack:
700 | request_ev.response = None
701 | request_ev.set()
702 | continue
703 |
704 | request_id = packet.token
705 | if request_id is None:
706 | request_id = packet.message_id
707 | request_ev = self._in_flight_requests.get(request_id, None)
708 | if request_ev is not None:
709 | request_ev.acked = True
710 | request_ev.response = packet
711 | if request_ev.observe_cb is not None:
712 | asyncio.create_task(request_ev.observe_cb(self, packet))
713 | request_ev.set()
714 |
715 |
716 | def encode_uint_option(v):
717 | l = 0
718 | vv = v
719 | while vv:
720 | l += 1
721 | vv >>= 8
722 |
723 | if l == 0:
724 | return b""
725 |
726 | buf = bytearray(l)
727 | while l > 0:
728 | l -= 1
729 | buf[l] = v & 0xFF
730 | v >>= 8
731 | return buf
732 |
733 |
734 | def decode_uint_option(v):
735 | ret = 0
736 | for c in v:
737 | ret = (ret << 8) | c
738 | return ret
739 |
740 |
741 | class BlockReader:
742 | def __init__(self, client, path):
743 | self._client = client
744 | self._path = path
745 | self._token = None
746 | self._block_num = 0
747 |
748 | async def readinto(self, buf):
749 | packet = CoapPacket()
750 | packet.token = self._token
751 | packet.type = TYPE_CON
752 | packet.method = METHOD_GET
753 | packet.set_uri_path(self._path)
754 | block_option_payload = encode_uint_option((self._block_num << 4) | 6)
755 | packet.add_option(
756 | OPTION_BLOCK2,
757 | block_option_payload,
758 | )
759 |
760 | num_retries = 0
761 | while True:
762 | response = await self._client.request(packet)
763 |
764 | options = [
765 | option for option in response.options if option.number == OPTION_BLOCK2
766 | ]
767 | if (
768 | len(options) != 1
769 | or (decode_uint_option(options[0].buffer) >> 4) != self._block_num
770 | ):
771 | # The server has unexpectedly sent a OPTION_BLOCK2 packet with an
772 | # unexpected header. This is possibly due to a very slow connection on
773 | # the device causing the server to expire the update package.
774 | # In this case, try rerquesting the packet once more before
775 | # throwing an exception in case the server did tiemout.
776 | # See https://github.com/Ribbit-Network/ribbit-network-frog-software/issues/33
777 | if num_retries == 1:
778 | raise RuntimeError("unexpected block option in server response")
779 | num_retries += 1
780 | continue
781 |
782 | break
783 |
784 | if len(buf) < len(response.payload):
785 | raise ValueError("buffer too small")
786 |
787 | buf[: len(response.payload)] = response.payload
788 | self._block_num += 1
789 | self._token = packet.token
790 | return len(response.payload)
791 |
--------------------------------------------------------------------------------
/modules/ribbit/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | import collections
3 | import errno
4 | import asyncio
5 | import ujson as json
6 | from micropython import const
7 | import logging
8 |
9 | from .utils.asyncio import Watcher
10 |
11 |
12 | class Invalid(Exception):
13 | pass
14 |
15 |
16 | required = object()
17 |
18 |
19 | class Key:
20 | def __init__(self, name=None, default=None, protected=False):
21 | self.name = name
22 | self.default = default
23 | self.protected = protected
24 |
25 | def validate(self, value):
26 | pass
27 |
28 | def hydrate(self, value):
29 | if value is None:
30 | return self.default
31 | return value
32 |
33 |
34 | class String(Key):
35 | type_name = "string"
36 |
37 | def validate(self, value):
38 | return isinstance(value, str)
39 |
40 |
41 | class Integer(Key):
42 | type_name = "integer"
43 |
44 | def validate(self, value):
45 | return isinstance(value, int)
46 |
47 |
48 | class Float(Key):
49 | type_name = "float"
50 |
51 | def validate(self, value):
52 | return isinstance(value, float)
53 |
54 |
55 | class Boolean(Key):
56 | type_name = "boolean"
57 |
58 | def validate(self, value):
59 | return isinstance(value, bool)
60 |
61 |
62 | class Object(Key):
63 | type_name = "object"
64 |
65 | def __init__(self, keys, name=None, default=None, protected=False):
66 | super().__init__(name, default, protected)
67 | self.keys = {}
68 | self.required = set()
69 | for key in keys:
70 | self.keys[key.name] = key
71 | if key.default is required:
72 | self.required.add(key.name)
73 |
74 | def validate(self, value):
75 | if not isinstance(value, dict):
76 | return False
77 |
78 | for k, v in value.items():
79 | try:
80 | key = self.keys[k]
81 | except KeyError:
82 | return False
83 |
84 | if not key.validate(v):
85 | return False
86 |
87 | for k in self.required:
88 | if k not in value:
89 | return False
90 |
91 | return True
92 |
93 | def hydrate(self, value):
94 | value = value.copy()
95 | for key in self.keys.values():
96 | if key.name not in value:
97 | value[key.name] = key.default
98 | return value
99 |
100 |
101 | class TypedObject(Key):
102 | def __init__(self, type_key, types, name=None, default=None, protected=False):
103 | super().__init__(name, default, protected)
104 | self.type_key = type_key
105 | self.types = {}
106 | for typ in types:
107 | self.types[typ.name] = typ
108 |
109 | def validate(self, value):
110 | if not isinstance(value, dict):
111 | return False
112 |
113 | value = value.copy()
114 |
115 | try:
116 | typ = value.pop(self.type_key)
117 | except KeyError:
118 | return False
119 |
120 | try:
121 | subtyp = self.types[typ]
122 | except KeyError:
123 | return False
124 |
125 | return subtyp.validate(value)
126 |
127 | def hydrate(self, value):
128 | subtyp = self.types[value[self.type_key]]
129 | return subtyp.hydrate(value)
130 |
131 |
132 | class Array(Key):
133 | type_name = "array"
134 |
135 | def __init__(self, item, name=None, default=None, protected=False):
136 | super().__init__(name, default, protected)
137 | self.item = item
138 |
139 | def validate(self, value):
140 | if not isinstance(value, list):
141 | return False
142 |
143 | for item in value:
144 | if not self.item.validate(item):
145 | return False
146 |
147 | return True
148 |
149 | def hydrate(self, value):
150 | if value is None:
151 | return value
152 |
153 | return [self.item.hydrate(item) for item in value]
154 |
155 |
156 | # Domain of the config keys that are not set anywhere
157 | # else and use their default values.
158 | DOMAIN_DEFAULT = const(-1)
159 |
160 | # Domain of the config keys that are set locally
161 | # on this specific node.
162 | #
163 | # Note: this is the index in the ConfigRegistry._config list.
164 | DOMAIN_LOCAL = const(0)
165 |
166 | # Domain of the config keys that are set on the cloud.
167 | #
168 | # Note: this is the index in the ConfigRegistry._config list.
169 | DOMAIN_REMOTE = const(1)
170 |
171 | # Domain of the config keys that are locally overriden on this
172 | # specific node.
173 | #
174 | # Note: this is the index in the ConfigRegistry._config list.
175 | DOMAIN_LOCAL_OVERRIDE = const(2)
176 |
177 | _STORED_DOMAINS = [DOMAIN_LOCAL, DOMAIN_REMOTE, DOMAIN_LOCAL_OVERRIDE]
178 | _PRIORITY_ORDER = list(reversed(_STORED_DOMAINS))
179 |
180 | # Mapping of domain constants to paths
181 | DOMAIN_NAMES = {
182 | DOMAIN_DEFAULT: "default",
183 | DOMAIN_LOCAL: "local",
184 | DOMAIN_REMOTE: "remote",
185 | DOMAIN_LOCAL_OVERRIDE: "override",
186 | }
187 |
188 | DOMAIN_PATHS = {
189 | DOMAIN_LOCAL: "/config/000-local.json",
190 | DOMAIN_REMOTE: "/config/001-remote.json",
191 | DOMAIN_LOCAL_OVERRIDE: "/config/002-local-override",
192 | }
193 |
194 |
195 | class ConfigRegistry:
196 | def __init__(self, keys, stored=True, in_simulator=False):
197 | self._logger = logging.getLogger(__name__)
198 |
199 | self._watchers = {}
200 |
201 | self._keys = collections.OrderedDict()
202 | for key in keys:
203 | self._keys[key.name] = key
204 | key.default = key.hydrate(key.default)
205 |
206 | self._stored = stored
207 | if in_simulator:
208 | prefix = os.getcwd() + "/data"
209 | self._domain_paths = {k: prefix + v for k, v in DOMAIN_PATHS.items()}
210 | else:
211 | prefix = ""
212 | self._domain_paths = DOMAIN_PATHS
213 |
214 | if stored:
215 | try:
216 | os.mkdir(prefix + "/config")
217 | except OSError as exc:
218 | if exc.errno != errno.EEXIST:
219 | raise
220 |
221 | self._config = [self._load_config(domain) for domain in _STORED_DOMAINS]
222 |
223 | def _load_config(self, domain):
224 | if not self._stored:
225 | return {}
226 |
227 | filepath = self._domain_paths[domain]
228 | try:
229 | with open(filepath, "r", encoding="utf-8") as f:
230 | self._logger.info("Loading config from %s", filepath)
231 | data = json.load(f)
232 | for k in data.keys():
233 | if not self.is_valid_key(k):
234 | del data[k]
235 | return data
236 |
237 | except OSError as exc:
238 | if exc.errno == errno.ENOENT:
239 | return {}
240 |
241 | self._logger.exc(exc, "Exception reading config %s", filepath)
242 |
243 | except Exception as exc:
244 | self._logger.exc(exc, "Exception reading config %s", filepath)
245 |
246 | self._logger.warning("Config %s was corrupted, deleting", filepath)
247 |
248 | try:
249 | os.remove(filepath)
250 | except Exception:
251 | pass
252 |
253 | return {}
254 |
255 | def _save_config(self, domain, values):
256 | if not self._stored:
257 | return {}
258 |
259 | filepath = self._domain_paths[domain]
260 | try:
261 | with open(filepath, "w", encoding="utf-8") as f:
262 | json.dump(values, f)
263 | except Exception as exc:
264 | self._logger.exc(exc, "Failed to save config %s", filepath)
265 |
266 | def is_valid_key(self, key):
267 | return key in self._keys
268 |
269 | def keys(self):
270 | return list(self._keys.keys())
271 |
272 | def get(self, key):
273 | key_info = self._keys[key]
274 |
275 | for domain in _PRIORITY_ORDER:
276 | value = self._config[domain].get(key, None)
277 | if value is not None:
278 | return (domain, key_info.hydrate(value), key_info)
279 |
280 | return (DOMAIN_DEFAULT, key_info.default, key_info)
281 |
282 | def watch(self, *keys):
283 | w = Watcher(None, self._unwatch)
284 | w.keys = keys
285 |
286 | values = tuple(self.get(k)[1] for k in keys)
287 |
288 | for k in keys:
289 | self._watchers.setdefault(k, set()).add(w)
290 |
291 | w.notify(values)
292 | return w
293 |
294 | def _unwatch(self, w):
295 | for k in w.keys:
296 | watcher_set = self._watchers.get(k, None)
297 | if watcher_set is not None:
298 | watcher_set.discard(w)
299 | if not watcher_set:
300 | self._watchers.pop(k)
301 |
302 | def _set(self, domain, config):
303 | assert domain in _STORED_DOMAINS
304 |
305 | new_keys = {}
306 | for k, v in config.items():
307 | key_info = self._keys.get(k, None)
308 | if key_info is None:
309 | continue
310 |
311 | if v is not None and not key_info.validate(v):
312 | raise ValueError("invalid value", k, v)
313 |
314 | new_keys[k] = v
315 |
316 | affected_watchers = set()
317 | domain_config = self._config[domain]
318 | for k, v in new_keys.items():
319 | if v is not None:
320 | domain_config[k] = v
321 | else:
322 | domain_config.pop(k, None)
323 |
324 | affected_watchers.update(self._watchers.get(k, []))
325 |
326 | self._save_config(domain, domain_config)
327 |
328 | for w in affected_watchers:
329 | values = tuple(self.get(k)[1] for k in w.keys)
330 | w.notify(values)
331 |
332 | def set(self, config):
333 | return self._set(DOMAIN_LOCAL, config)
334 |
335 | def set_remote(self, config):
336 | return self._set(DOMAIN_REMOTE, config)
337 |
338 | def set_override(self, config):
339 | return self._set(DOMAIN_LOCAL_OVERRIDE, config)
340 |
--------------------------------------------------------------------------------
/modules/ribbit/config_test.py:
--------------------------------------------------------------------------------
1 | def test_config():
2 | import ribbit.config as _config
3 |
4 | c = _config.ConfigRegistry(
5 | keys=[
6 | _config.String(name="foo"),
7 | _config.String(name="bar"),
8 | ],
9 | stored=False,
10 | )
11 |
12 | with c.watch("foo", "bar") as cfg_watcher:
13 | foo, bar = cfg_watcher.get()
14 | assert foo is None
15 | assert bar is None
16 |
17 | c.set({"foo": "test"})
18 |
19 | assert cfg_watcher.changed
20 | foo, bar = cfg_watcher.get()
21 | assert foo == "test"
22 | assert bar is None
23 |
24 | assert not c._watchers
25 |
26 |
27 | def test_config_override():
28 | import ribbit.config as _config
29 |
30 | c = _config.ConfigRegistry(
31 | keys=[
32 | _config.Integer(name="bar"),
33 | ],
34 | stored=False,
35 | )
36 |
37 | c.set({"bar": 1})
38 | domain, value, _ = c.get("bar")
39 | assert domain == _config.DOMAIN_LOCAL
40 | assert value == 1
41 |
42 | c.set_remote({"bar": 2})
43 | domain, value, _ = c.get("bar")
44 | assert domain == _config.DOMAIN_REMOTE
45 | assert value == 2
46 |
47 | c.set_remote({"bar": None})
48 | domain, value, _ = c.get("bar")
49 | assert domain == _config.DOMAIN_LOCAL
50 | assert value == 1
51 |
52 | c.set_override({"bar": 4})
53 | domain, value, _ = c.get("bar")
54 | assert domain == _config.DOMAIN_LOCAL_OVERRIDE
55 | assert value == 4
56 |
57 |
58 | def test_config_array():
59 | import ribbit.config as _config
60 |
61 | c = _config.ConfigRegistry(
62 | keys=[
63 | _config.Array(
64 | name="bar",
65 | item=_config.Object(
66 | keys=[
67 | _config.String(name="foo1"),
68 | _config.String(name="foo2"),
69 | ],
70 | ),
71 | ),
72 | ],
73 | stored=False,
74 | )
75 |
76 | c.set({"bar": []})
77 | domain, value, _ = c.get("bar")
78 | assert domain == _config.DOMAIN_LOCAL
79 | assert value == []
80 |
81 | raised_exc = None
82 | try:
83 | c.set({"bar": ["foo"]})
84 | except Exception as exc:
85 | raised_exc = exc
86 |
87 | assert isinstance(raised_exc, ValueError)
88 |
89 | c.set({"bar": [{"foo1": "value1"}]})
90 | domain, value, _ = c.get("bar")
91 | assert domain == _config.DOMAIN_LOCAL
92 | assert value == [{"foo1": "value1", "foo2": None}]
93 |
--------------------------------------------------------------------------------
/modules/ribbit/golioth/__init__.py:
--------------------------------------------------------------------------------
1 | import json
2 | import logging
3 | import time
4 | from micropython import const
5 | import asyncio
6 |
7 | import ribbit.config as _config
8 | import ribbit.coap as _coap
9 | import ribbit.utils.ota as _ota
10 |
11 |
12 | # gRPC return codes
13 | _RPC_OK = const(0)
14 | _RPC_CANCELED = const(1)
15 | _RPC_UNKNOWN = const(2)
16 | _RPC_INVALID_ARGUMENT = const(3)
17 | _RPC_DEADLINE_EXCEEDED = const(4)
18 | _RPC_NOT_FOUND = const(5)
19 | _RPC_ALREADYEXISTS = const(6)
20 | _RPC_PERMISSION_DENIED = (const(7),)
21 | _RPC_RESOURCE_EXHAUSTED = const(8)
22 | _RPC_FAILED_PRECONDITION = const(9)
23 | _RPC_ABORTED = const(10)
24 | _RPC_OUT_OF_RANGE = const(11)
25 | _RPC_UNIMPLEMENTED = const(12)
26 | _RPC_INTERNAL = const(13)
27 | _RPC_UNAVAILABLE = const(14)
28 | _RPC_DATA_LOSS = const(15)
29 | _RPC_UNAUTHENTICATED = const(16)
30 |
31 |
32 | CONFIG_GOLIOTH_ENABLED = const("golioth.enabled")
33 | CONFIG_GOLIOTH_HOST = const("golioth.host")
34 | CONFIG_GOLIOTH_PORT = const("golioth.port")
35 | CONFIG_GOLIOTH_USER = const("golioth.user")
36 | CONFIG_GOLIOTH_PASSWORD = const("golioth.password")
37 | CONFIG_GOLIOTH_OTA_ENABLED = const("golioth.ota.enabled")
38 |
39 | _CONFIG_KEYS = [
40 | CONFIG_GOLIOTH_ENABLED,
41 | CONFIG_GOLIOTH_HOST,
42 | CONFIG_GOLIOTH_PORT,
43 | CONFIG_GOLIOTH_USER,
44 | CONFIG_GOLIOTH_PASSWORD,
45 | CONFIG_GOLIOTH_OTA_ENABLED,
46 | ]
47 |
48 | CONFIG_KEYS = [
49 | _config.Boolean(name=CONFIG_GOLIOTH_ENABLED, default=True),
50 | _config.String(
51 | name=CONFIG_GOLIOTH_HOST,
52 | default="coap.golioth.io",
53 | ),
54 | _config.Integer(name=CONFIG_GOLIOTH_PORT, default=5684),
55 | _config.String(name=CONFIG_GOLIOTH_USER, default=None),
56 | _config.String(name=CONFIG_GOLIOTH_PASSWORD, default=None, protected=True),
57 | _config.Boolean(name=CONFIG_GOLIOTH_OTA_ENABLED, default=True),
58 | ]
59 |
60 |
61 | class Golioth:
62 | def __init__(self, config, ota_manager, commands=None, in_simulator=False):
63 | self._logger = logging.getLogger(__name__)
64 | self._config = config
65 | self._commands = commands or {}
66 | self._coap = None
67 | self._ota_manager = ota_manager
68 | self._in_simulator = in_simulator
69 | self._ota_enabled = False
70 |
71 | self.register_rpc("ping", self._pong_rpc)
72 |
73 | asyncio.create_task(self._loop())
74 |
75 | async def _loop(self):
76 | with self._config.watch(*_CONFIG_KEYS) as cfg_watcher:
77 | while True:
78 | enabled, host, port, user, password, self._ota_enabled = cfg_watcher.get()
79 |
80 | enabled = enabled and (user is not None and password is not None)
81 |
82 | if self._coap is not None:
83 | self._logger.info("Stopping Golioth integration")
84 | self._coap.close()
85 | self._coap = None
86 |
87 | if enabled:
88 | self._logger.info("Starting Golioth integration")
89 |
90 | import ssl
91 | ctx = ssl.SSLContext(
92 | ssl.PROTOCOL_DTLS_CLIENT
93 | )
94 | ctx.set_ciphers(["TLS-PSK-WITH-AES-128-CBC-SHA256"])
95 | ctx.set_psk(user, password)
96 |
97 | self._coap = _coap.Coap(
98 | host=host,
99 | port=port,
100 | ssl=ctx,
101 | )
102 | self._coap.on_connect(self._on_connect)
103 | asyncio.create_task(self._coap.connect_loop())
104 |
105 | await cfg_watcher.wait()
106 |
107 | async def _on_connect(self, client):
108 | await self._send_firmware_report(client)
109 | await client.observe(
110 | ".c", self._on_golioth_config, accept=_coap.CONTENT_FORMAT_APPLICATION_JSON
111 | )
112 | await client.observe(
113 | ".rpc", self._on_golioth_rpc, accept=_coap.CONTENT_FORMAT_APPLICATION_JSON
114 | )
115 | if self._ota_enabled and not self._in_simulator:
116 | await client.observe(
117 | ".u/desired",
118 | self._on_golioth_firmware,
119 | accept=_coap.CONTENT_FORMAT_APPLICATION_JSON,
120 | )
121 |
122 | async def _on_golioth_config(self, client, packet):
123 | req = json.loads(packet.payload)
124 | self._logger.info("Config payload received: %s", req)
125 |
126 | config = {}
127 | for k, v in req["settings"].items():
128 | k = k.replace("_", ".").lower()
129 | config[k] = v
130 |
131 | self._config.set_remote(config)
132 |
133 | await client.post(
134 | ".c/status",
135 | json.dumps(
136 | {
137 | "version": req["version"],
138 | "error_code": 0,
139 | }
140 | ),
141 | format=_coap.CONTENT_FORMAT_APPLICATION_JSON,
142 | )
143 |
144 | def register_rpc(self, method, handler):
145 | self._commands[method] = handler
146 |
147 | async def _pong_rpc(self, *args):
148 | return "pong"
149 |
150 | def _reply_rpc(self, client, req, code, detail=None):
151 | res = {
152 | "id": req["id"],
153 | "statusCode": code,
154 | }
155 | if detail is not None:
156 | res["detail"] = detail
157 |
158 | return client.post(
159 | ".rpc/status",
160 | json.dumps(res),
161 | format=_coap.CONTENT_FORMAT_APPLICATION_JSON,
162 | )
163 |
164 | async def _on_golioth_rpc(self, client, packet):
165 | req = json.loads(packet.payload)
166 | if not isinstance(req, dict):
167 | return
168 |
169 | status = _RPC_OK
170 | details = None
171 |
172 | command = self._commands.get(req["method"], None)
173 | if command is not None:
174 | try:
175 | details = await command(*req["params"])
176 | if details is not None:
177 | details = str(details)
178 | except Exception as exc:
179 | details = str(exc)
180 | status = _RPC_INTERNAL
181 | else:
182 | status = _RPC_UNIMPLEMENTED
183 |
184 | await self._reply_rpc(client, req, status, details)
185 |
186 | async def _send_firmware_report(
187 | self, client, package="main", state=0, reason=0, target_version=None
188 | ):
189 | import __version__
190 |
191 | req = {
192 | "state": state,
193 | "reason": reason,
194 | "package": package,
195 | "version": __version__.version,
196 | }
197 |
198 | if target_version is not None:
199 | req["target"] = target_version
200 |
201 | await client.post(
202 | ".u/c/" + package,
203 | json.dumps(req),
204 | format=_coap.CONTENT_FORMAT_APPLICATION_JSON,
205 | )
206 |
207 | async def _update_firmware(self, client, component):
208 | self._logger.info("Starting firmware update")
209 |
210 | await self._send_firmware_report(
211 | client,
212 | state=1,
213 | target_version=component["version"],
214 | )
215 |
216 | self._logger.info("Component %s", component)
217 |
218 | reader = client.get_streaming(component["uri"][1:])
219 |
220 | self._logger.info("Receiving firmware package")
221 |
222 | await self._ota_manager.do_ota_update(
223 | _ota.OTAUpdate(
224 | reader=reader,
225 | sha256_hash=component["hash"],
226 | size=component["size"],
227 | )
228 | )
229 |
230 | await self._send_firmware_report(
231 | client,
232 | state=2,
233 | target_version=component["version"],
234 | )
235 |
236 | import machine
237 |
238 | machine.reset()
239 |
240 | async def _on_golioth_firmware(self, client, packet):
241 | import __version__
242 |
243 | req = json.loads(packet.payload)
244 | self._logger.info("Firmware payload received: %s", req)
245 |
246 | if req.get("components", None) is None:
247 | return
248 |
249 | for component in req["components"]:
250 | if component["package"] == "main":
251 | if component["version"] != __version__.version:
252 | asyncio.create_task(self._update_firmware(client, component))
253 |
--------------------------------------------------------------------------------
/modules/ribbit/heartbeat.py:
--------------------------------------------------------------------------------
1 | import time
2 | import logging
3 | import asyncio
4 |
5 |
6 | class Heartbeat:
7 | def __init__(self, in_simulator):
8 | self._in_simulator = in_simulator
9 | self._logger = logging.getLogger(__name__)
10 |
11 | if not self._in_simulator:
12 | self._setup_pixel()
13 | asyncio.create_task(self._loop())
14 |
15 | def _setup_pixel(self):
16 | import neopixel
17 | import machine
18 |
19 | machine.Pin(21, machine.Pin.OUT, value=1)
20 | neo_ctrl = machine.Pin(33, machine.Pin.OUT)
21 | self._pixel = neopixel.NeoPixel(neo_ctrl, 1)
22 |
23 | async def _loop(self):
24 | interval = 200
25 | warn_interval = 300
26 |
27 | on = True
28 |
29 | while True:
30 | if not self._in_simulator:
31 | px = self._pixel
32 | if on:
33 | px[0] = (4, 2, 0)
34 | else:
35 | px[0] = (0, 0, 0)
36 | on = not on
37 | px.write()
38 |
39 | start = time.ticks_ms()
40 | await asyncio.sleep_ms(interval)
41 | duration = time.ticks_diff(time.ticks_ms(), start)
42 |
43 | if duration > warn_interval:
44 | self._logger.warning(
45 | "Event loop blocked for %d ms", duration - interval
46 | )
47 |
--------------------------------------------------------------------------------
/modules/ribbit/http.py:
--------------------------------------------------------------------------------
1 | import json
2 | import asyncio
3 | import collections
4 |
5 | from microdot_asyncio import Microdot, Request, Response, HTTPException
6 |
7 | from ._static import assets
8 | from .config import DOMAIN_LOCAL, DOMAIN_NAMES
9 | from ribbit.utils.time import isotime
10 |
11 |
12 | Request.max_content_length = 1 << 30
13 |
14 |
15 | def build_app(registry):
16 | app = Microdot()
17 | app.registry = registry
18 |
19 | @app.errorhandler(404)
20 | async def static(request):
21 | filename = request.path
22 |
23 | try:
24 | data = assets[filename]
25 | except KeyError:
26 | filename = filename.rstrip("/") + "/index.html"
27 | try:
28 | data = assets[filename]
29 | except KeyError:
30 | data = assets["/index.html"]
31 |
32 | headers = {}
33 |
34 | ext = filename.split(".")[-1]
35 | if ext in Response.types_map:
36 | headers["Content-Type"] = Response.types_map[ext]
37 | else:
38 | headers["Content-Type"] = "application/octet-stream"
39 |
40 | if filename.startswith("/assets/"):
41 | headers["Cache-Control"] = "public, max-age=604800, immutable"
42 |
43 | return data, 200, headers
44 |
45 | @app.route("/api/sensors")
46 | async def sensor_status(request):
47 | ret = collections.OrderedDict()
48 | for sensor_id, sensor in registry.sensors.items():
49 | ret[sensor_id] = sensor.export()
50 |
51 | return json.dumps(ret), 200, {"Content-Type": "application/json"}
52 |
53 | @app.route("/api/config")
54 | def config_get(request):
55 | ret = collections.OrderedDict()
56 | for k in registry.config.keys():
57 | domain, value, key_info = registry.config.get(k)
58 | ret[k] = out = collections.OrderedDict()
59 | out["type"] = key_info.type_name
60 | if key_info.protected:
61 | out["protected"] = key_info.protected
62 | out["domain"] = DOMAIN_NAMES[domain]
63 | if not key_info.protected:
64 | out["value"] = value
65 |
66 | return json.dumps(ret), 200, {"Content-Type": "application/json"}
67 |
68 | @app.patch("/api/config")
69 | async def config_set(req):
70 | values = req.json
71 |
72 | if not isinstance(values, dict):
73 | raise HTTPException(400)
74 |
75 | try:
76 | registry.config.set(values)
77 | return "{}", 201, {"Content-Type": "application/json"}
78 |
79 | except ValueError as exc:
80 | return (
81 | json.dumps({"error": str(exc)}),
82 | 400,
83 | {"Content-Type": "application/json"},
84 | )
85 |
86 | return app
87 |
--------------------------------------------------------------------------------
/modules/ribbit/improv/__init__.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import sys
3 | import micropython
4 | from micropython import const
5 | import logging
6 |
7 |
8 | _header = const(b"IMPROV")
9 | _version = const(b"1")
10 |
11 | STATE_READY = const(0x02)
12 | STATE_PROVISIONING = const(0x03)
13 | STATE_PROVISIONED = const(0x04)
14 |
15 | _ERROR_NO_ERROR = const(0x00)
16 | _ERROR_INVALID_PACKET = const(0x01)
17 | _ERROR_UNKNOWN_COMMAND = const(0x02)
18 | _ERROR_UNABLE_TO_CONNECT = const(0x03)
19 | _ERROR_UNKNOWN = const(0xFF)
20 |
21 | _PACKET_CURRENT_STATE = const(0x01)
22 | _PACKET_ERROR_STATE = const(0x02)
23 | _PACKET_RPC_COMMAND = const(0x03)
24 | _PACKET_RPC_RESULT = const(0x04)
25 |
26 | _RPC_SEND_SETTINGS = const(0x01)
27 | _RPC_REQUEST_CURRENT_STATE = const(0x02)
28 | _RPC_REQUEST_DEVICE_INFO = const(0x03)
29 | _RPC_REQUEST_SCAN_NETWORKS = const(0x04)
30 |
31 |
32 | def _decode_string(buf):
33 | length = buf[0]
34 | return buf[1 + length :], buf[1 : 1 + length]
35 |
36 |
37 | class _PacketBuilder:
38 | def __init__(self):
39 | self._buffer = memoryview(bytearray(256))
40 | self._buffer[0:6] = b"IMPROV"
41 | self._buffer[6] = 0x01
42 | self._idx = 0
43 | self._is_command = False
44 |
45 | def _append(self, c):
46 | self._buffer[self._idx] = c
47 | self._idx += 1
48 |
49 | def _append_string(self, s):
50 | if isinstance(s, str):
51 | s = s.encode("utf-8")
52 | self._buffer[self._idx] = len(s)
53 | self._idx += 1
54 | self._buffer[self._idx : self._idx + len(s)] = s
55 | self._idx += len(s)
56 |
57 | def _init_packet(self, typ, command=None):
58 | self._buffer[7] = typ
59 | self._idx = 9
60 | if command:
61 | self._buffer[self._idx] = command
62 | self._idx += 2
63 | self._is_command = True
64 | else:
65 | self._is_command = False
66 |
67 | def _finalize_packet(self):
68 | self._buffer[8] = self._idx - 9 # Length
69 |
70 | if self._is_command:
71 | self._buffer[10] = self._idx - 11
72 |
73 | checksum = 0
74 | for c in self._buffer[: self._idx]:
75 | checksum = (checksum + c) & 0xFF
76 |
77 | self._buffer[self._idx] = checksum
78 | self._idx += 1
79 | return self._buffer[: self._idx]
80 |
81 |
82 | class ImprovHandler:
83 | def __init__(
84 | self,
85 | product_name,
86 | product_version,
87 | hardware_name,
88 | device_name,
89 | scan_wifi_cb,
90 | set_wifi_settings_cb,
91 | current_state_cb,
92 | logger=None,
93 | ):
94 | if logger is None:
95 | logger = logging.getLogger(__name__)
96 | self._logger = logger
97 |
98 | micropython.kbd_intr(-1)
99 |
100 | self._input = asyncio.StreamReader(sys.stdin.buffer)
101 | self._output = asyncio.StreamWriter(sys.stdout.buffer)
102 |
103 | self._builder = _PacketBuilder()
104 |
105 | self._product_name = product_name
106 | self._product_version = product_version
107 | self._hardware_name = hardware_name
108 | self._device_name = device_name
109 |
110 | self._scan_wifi_cb = scan_wifi_cb
111 | self._set_wifi_settings_cb = set_wifi_settings_cb
112 | self._current_state_cb = current_state_cb
113 |
114 | asyncio.create_task(self._improv_loop())
115 |
116 | async def _improv_loop(self):
117 | while True:
118 | try:
119 | await self._improv_loop_inner()
120 | except Exception as exc:
121 | self._logger.exc(exc, "Error in IMPROV loop")
122 | await asyncio.sleep_ms(1000)
123 |
124 | async def _improv_loop_inner(self):
125 | input_buf = memoryview(bytearray(1))
126 | buf = memoryview(bytearray(256))
127 | state = 0
128 | idx = 0
129 | calculated_checksum = 0
130 |
131 | while True:
132 | await self._input.readinto(input_buf)
133 | c = input_buf[0]
134 |
135 | if state == 10:
136 | state = 0
137 | if calculated_checksum != c:
138 | self._logger.info("Failed checksum!")
139 | continue
140 |
141 | await self._process_packet(packet_type, buf[:idx])
142 | continue
143 |
144 | if state != 0:
145 | calculated_checksum = (calculated_checksum + c) & 0xFF
146 |
147 | if state == 0 and c == 0x03: # CTRL+C
148 | raise KeyboardInterrupt()
149 |
150 | if 0 <= state < len(_header):
151 | if c != _header[state]:
152 | state = 0
153 | continue
154 | if state == 0:
155 | calculated_checksum = c
156 | state += 1
157 |
158 | elif state == 6: # version
159 | if c != 1:
160 | state = 0
161 | continue
162 | state += 1
163 |
164 | elif state == 7: # type
165 | packet_type = c
166 | state += 1
167 |
168 | elif state == 8: # length
169 | packet_length = c
170 | idx = 0
171 | state += 1
172 | if idx == packet_length:
173 | state += 1
174 |
175 | elif state == 9: # packet data
176 | buf[idx] = c
177 | idx += 1
178 | if idx == packet_length:
179 | state += 1
180 |
181 | async def _send_packet(self):
182 | self._output.write(self._builder._finalize_packet())
183 | await self._output.drain()
184 |
185 | async def _reply_current_state(self, command):
186 | self._builder._init_packet(_PACKET_CURRENT_STATE)
187 | state, url = await self._current_state_cb()
188 | self._builder._append(state)
189 | await self._send_packet()
190 |
191 | self._builder._init_packet(_PACKET_RPC_RESULT, command)
192 | if url:
193 | self._builder._append_string(url)
194 | await self._send_packet()
195 |
196 | async def _process_packet(self, packet_type, buf):
197 | if packet_type == _PACKET_RPC_COMMAND:
198 | if len(buf) < 2:
199 | self._builder._init_packet(_PACKET_ERROR_STATE)
200 | self._builder._append(_ERROR_INVALID_PACKET)
201 | await self._send_packet()
202 | return
203 |
204 | command = buf[0]
205 | # buf[1] is the length, which we are ignoring
206 | buf = buf[2:]
207 |
208 | if command == _RPC_SEND_SETTINGS:
209 | buf, ssid = _decode_string(buf)
210 | buf, password = _decode_string(buf)
211 |
212 | try:
213 | await self._set_wifi_settings_cb(
214 | bytes(ssid).decode("utf-8"), bytes(password).decode("utf-8")
215 | )
216 | except Exception as exc:
217 | self._logger.exc(exc, "Exception setting wifi")
218 | self._builder._init_packet(_PACKET_ERROR_STATE)
219 | self._builder._append(_ERROR_UNKNOWN)
220 | await self._send_packet()
221 | return
222 |
223 | await self._reply_current_state(command)
224 |
225 | elif command == _RPC_REQUEST_CURRENT_STATE:
226 | await self._reply_current_state(command)
227 |
228 | elif command == _RPC_REQUEST_DEVICE_INFO:
229 | self._builder._init_packet(_PACKET_RPC_RESULT, command)
230 | self._builder._append_string(self._product_name)
231 | self._builder._append_string(self._product_version)
232 | self._builder._append_string(self._hardware_name)
233 | self._builder._append_string(self._device_name)
234 | await self._send_packet()
235 |
236 | elif command == _RPC_REQUEST_SCAN_NETWORKS:
237 | seen = set()
238 | for net in await self._scan_wifi_cb():
239 | if net[0] in seen:
240 | continue
241 | seen.add(net[0])
242 |
243 | self._builder._init_packet(_PACKET_RPC_RESULT, command)
244 | self._builder._append_string(net[0])
245 | self._builder._append_string(str(net[3]).encode("ascii"))
246 | self._builder._append_string(b"YES" if net[4] != 0 else "NO")
247 | await self._send_packet()
248 |
249 | self._builder._init_packet(_PACKET_RPC_RESULT, command)
250 | await self._send_packet()
251 |
252 | else:
253 | await self._send_packet(_PACKET_ERROR_STATE, b"0x01")
254 |
--------------------------------------------------------------------------------
/modules/ribbit/network.py:
--------------------------------------------------------------------------------
1 | import collections
2 | import logging
3 | import network
4 | import uasyncio as asyncio
5 | from micropython import const
6 |
7 | import ribbit.config as _config
8 | from ribbit.utils.asyncio import WatchableValue
9 |
10 |
11 | CONFIG_WIFI_SSID = const("wifi.ssid")
12 | CONFIG_WIFI_PASSWORD = const("wifi.password")
13 |
14 |
15 | CONFIG_KEYS = [
16 | _config.String(name=CONFIG_WIFI_SSID),
17 | _config.String(name=CONFIG_WIFI_PASSWORD, protected=True),
18 | ]
19 |
20 |
21 | State = collections.namedtuple(
22 | "State", ["state", "connected", "ip", "netmask", "gateway", "dns"]
23 | )
24 |
25 | _state_disconnected = State(
26 | state=network.STAT_IDLE,
27 | connected=False,
28 | ip=None,
29 | netmask=None,
30 | gateway=None,
31 | dns=None,
32 | )
33 |
34 | _state_connecting = State(
35 | state=network.STAT_CONNECTING,
36 | connected=False,
37 | ip=None,
38 | netmask=None,
39 | gateway=None,
40 | dns=None,
41 | )
42 |
43 |
44 | class _ConnectionRequest:
45 | def __init__(self, network_manager, timeout_ms=None):
46 | self._network_manager = network_manager
47 | self._timeout_ms = timeout_ms
48 |
49 | async def __aenter__(self):
50 | self._network_manager._connected_refs += 1
51 | if self._network_manager._connected_refs == 0:
52 | self._network_manager._connected_ref_event.set()
53 |
54 | if self._timeout_ms is not None:
55 | await asyncio.wait_for_ms(
56 | self._network_manager.connected.wait(), self._timeout_ms
57 | )
58 | else:
59 | await self._network_manager.connected.wait()
60 |
61 | async def __aexit__(self, exc_type, exc, tb):
62 | self._network_manager._connected_refs -= 1
63 | if self._network_manager._connected_refs == 0:
64 | self._network_manager._connected_ref_event.set()
65 |
66 |
67 | class NetworkManager:
68 | def __init__(
69 | self,
70 | config,
71 | always_on=True,
72 | poll_interval_connected_ms=5000,
73 | poll_interval_connecting_ms=500,
74 | ):
75 | self._config = config
76 | self._iface = network.WLAN(network.STA_IF)
77 | self._iface.active(False)
78 | self._logger = logging.getLogger(__name__)
79 |
80 | self._reconnect_event = asyncio.Event()
81 | self.state = WatchableValue(_state_disconnected)
82 | self.connected = asyncio.Event()
83 |
84 | self._network_loop_task = asyncio.create_task(self._network_loop())
85 | self._poll_interval_connected_ms = poll_interval_connected_ms
86 | self._poll_interval_connecting_ms = poll_interval_connecting_ms
87 |
88 | self._on_connect_tasks = []
89 |
90 | self._connected_refs = 0
91 | self._connected_ref_event = asyncio.Event()
92 |
93 | if always_on:
94 | self._connected_refs += 1
95 |
96 | def connection(self, timeout_ms=None):
97 | """Returns a context manager that ensures that the network is connected"""
98 | return _ConnectionRequest(self, timeout_ms=timeout_ms)
99 |
100 | def force_reconnect(self, reason="unknown reason"):
101 | if not self._reconnect_event.is_set():
102 | self._logger.info("Forcing a reconnection: %s", reason)
103 | self._reconnect_event.set()
104 |
105 | def on_connect_task(self, cb):
106 | self._on_connect_tasks.append(cb)
107 |
108 | async def scan(self):
109 | # Force cancel the network loop, as most chips do not support
110 | # scanning while connecting / being connected.
111 | self._network_loop_task.cancel()
112 | try:
113 | await self._network_loop_task
114 | except asyncio.CancelledError:
115 | pass
116 |
117 | try:
118 | iface = self._iface
119 | iface.active(False)
120 | iface.active(True)
121 | return iface.scan()
122 |
123 | finally:
124 | iface.active(False)
125 | self._network_loop_task = asyncio.create_task(self._network_loop())
126 |
127 | async def _network_loop(self):
128 | while True:
129 | try:
130 | await self._network_loop_inner()
131 | except Exception as exc:
132 | self._logger.exc(exc, "Network loop crashed")
133 | await asyncio.sleep_ms(1000)
134 |
135 | async def _network_loop_inner(self):
136 | with self._config.watch(CONFIG_WIFI_SSID, CONFIG_WIFI_PASSWORD) as cfg_watcher:
137 | iface = self._iface
138 | connection_started = False
139 |
140 | while True:
141 | force_reconnect = cfg_watcher.changed or self._reconnect_event.is_set()
142 | ssid, password = cfg_watcher.get()
143 | has_config = ssid is not None and password is not None
144 | should_connect = has_config and (self._connected_refs > 0)
145 |
146 | status = iface.status()
147 | if status == network.STAT_GOT_IP:
148 | config = iface.ifconfig()
149 | self.state.set(
150 | State(
151 | state=status,
152 | connected=True,
153 | ip=config[0],
154 | netmask=config[1],
155 | gateway=config[2],
156 | dns=config[3],
157 | )
158 | )
159 | if not self.connected.is_set():
160 | self.connected.set()
161 | for task in self._on_connect_tasks:
162 | await task(self.state.value)
163 |
164 | else:
165 | self.state.set(_state_disconnected)
166 | self.connected.clear()
167 |
168 | if force_reconnect or (connection_started and not should_connect):
169 | self._logger.info("Deactivating wifi")
170 | self.state.set(_state_disconnected)
171 | iface.active(False)
172 | connection_started = False
173 |
174 | if not connection_started and should_connect:
175 | self._reconnect_event.clear()
176 |
177 | self._logger.info("Activating wifi")
178 | self.state.set(_state_connecting)
179 | iface.active(False)
180 | iface.active(True)
181 | iface.connect(ssid, password)
182 | connection_started = True
183 |
184 | if not has_config:
185 | await cfg_watcher.wait()
186 | continue
187 |
188 | poll_interval = (
189 | self._poll_interval_connecting_ms
190 | if self.state.value.state != network.STAT_GOT_IP
191 | else self._poll_interval_connected_ms
192 | )
193 |
194 | try:
195 | await asyncio.wait_for_ms(cfg_watcher.wait(), poll_interval)
196 | except asyncio.TimeoutError:
197 | pass
198 |
--------------------------------------------------------------------------------
/modules/ribbit/sensor-ui/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Frog Sensor Status
6 |
7 |
8 |
109 |
110 |
111 |
112 |

113 |
Frog Sensor Status
114 |
140 |
141 |
142 |
Other Info
143 |
144 |
145 |
146 |
147 |
148 |
299 |
300 |
--------------------------------------------------------------------------------
/modules/ribbit/sensor-ui/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ribbit-Network/ribbit-network-frog-software/63bb0857405d0a6702b407708456b434818a4b4e/modules/ribbit/sensor-ui/logo.png
--------------------------------------------------------------------------------
/modules/ribbit/sensors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ribbit-Network/ribbit-network-frog-software/63bb0857405d0a6702b407708456b434818a4b4e/modules/ribbit/sensors/__init__.py
--------------------------------------------------------------------------------
/modules/ribbit/sensors/base.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import asyncio
4 |
5 |
6 | class BaseSensor:
7 | def __init__(self, registry, id):
8 | self._output = registry.sensors_output
9 | self._sensor_id = id
10 | self._logger = logging.getLogger("sensor." + self.config.name)
11 |
12 | def export(self):
13 | return {}
14 |
15 |
16 | class PollingSensor(BaseSensor):
17 | def __init__(self, registry, id, interval):
18 | super().__init__(registry, id)
19 | self._interval_ms = int(interval * 1000)
20 |
21 | async def loop(self):
22 | while True:
23 | try:
24 | await self.read_once()
25 | await self._output.write(self.export())
26 | except Exception as exc:
27 | self._logger.exc(exc, "Exception in polling loop")
28 |
29 | await asyncio.sleep_ms(self._interval_ms)
30 |
31 | async def read_once(self):
32 | pass
33 |
--------------------------------------------------------------------------------
/modules/ribbit/sensors/battery.py:
--------------------------------------------------------------------------------
1 | ## Based on work from @Scopel Emanuele
2 | # See: https://github.com/scopelemanuele/Micropython-LC709203F/tree/master
3 |
4 | import time
5 | import asyncio
6 |
7 | from struct import unpack
8 |
9 | import ribbit.config as _config
10 | from ribbit.utils.time import isotime
11 |
12 | from micropython import const
13 |
14 | from . import base as _base
15 |
16 | CRC_TABLE = bytearray([
17 | 0x00, 0x07, 0x0E, 0x09, 0x1C, 0x1B, 0x12, 0x15,
18 | 0x38, 0x3F, 0x36, 0x31, 0x24, 0x23, 0x2A, 0x2D,
19 | 0x70, 0x77, 0x7E, 0x79, 0x6C, 0x6B, 0x62, 0x65,
20 | 0x48, 0x4F, 0x46, 0x41, 0x54, 0x53, 0x5A, 0x5D,
21 | 0xE0, 0xE7, 0xEE, 0xE9, 0xFC, 0xFB, 0xF2, 0xF5,
22 | 0xD8, 0xDF, 0xD6, 0xD1, 0xC4, 0xC3, 0xCA, 0xCD,
23 | 0x90, 0x97, 0x9E, 0x99, 0x8C, 0x8B, 0x82, 0x85,
24 | 0xA8, 0xAF, 0xA6, 0xA1, 0xB4, 0xB3, 0xBA, 0xBD,
25 | 0xC7, 0xC0, 0xC9, 0xCE, 0xDB, 0xDC, 0xD5, 0xD2,
26 | 0xFF, 0xF8, 0xF1, 0xF6, 0xE3, 0xE4, 0xED, 0xEA,
27 | 0xB7, 0xB0, 0xB9, 0xBE, 0xAB, 0xAC, 0xA5, 0xA2,
28 | 0x8F, 0x88, 0x81, 0x86, 0x93, 0x94, 0x9D, 0x9A,
29 | 0x27, 0x20, 0x29, 0x2E, 0x3B, 0x3C, 0x35, 0x32,
30 | 0x1F, 0x18, 0x11, 0x16, 0x03, 0x04, 0x0D, 0x0A,
31 | 0x57, 0x50, 0x59, 0x5E, 0x4B, 0x4C, 0x45, 0x42,
32 | 0x6F, 0x68, 0x61, 0x66, 0x73, 0x74, 0x7D, 0x7A,
33 | 0x89, 0x8E, 0x87, 0x80, 0x95, 0x92, 0x9B, 0x9C,
34 | 0xB1, 0xB6, 0xBF, 0xB8, 0xAD, 0xAA, 0xA3, 0xA4,
35 | 0xF9, 0xFE, 0xF7, 0xF0, 0xE5, 0xE2, 0xEB, 0xEC,
36 | 0xC1, 0xC6, 0xCF, 0xC8, 0xDD, 0xDA, 0xD3, 0xD4,
37 | 0x69, 0x6E, 0x67, 0x60, 0x75, 0x72, 0x7B, 0x7C,
38 | 0x51, 0x56, 0x5F, 0x58, 0x4D, 0x4A, 0x43, 0x44,
39 | 0x19, 0x1E, 0x17, 0x10, 0x05, 0x02, 0x0B, 0x0C,
40 | 0x21, 0x26, 0x2F, 0x28, 0x3D, 0x3A, 0x33, 0x34,
41 | 0x4E, 0x49, 0x40, 0x47, 0x52, 0x55, 0x5C, 0x5B,
42 | 0x76, 0x71, 0x78, 0x7F, 0x6A, 0x6D, 0x64, 0x63,
43 | 0x3E, 0x39, 0x30, 0x37, 0x22, 0x25, 0x2C, 0x2B,
44 | 0x06, 0x01, 0x08, 0x0F, 0x1A, 0x1D, 0x14, 0x13,
45 | 0xAE, 0xA9, 0xA0, 0xA7, 0xB2, 0xB5, 0xBC, 0xBB,
46 | 0x96, 0x91, 0x98, 0x9F, 0x8A, 0x8D, 0x84, 0x83,
47 | 0xDE, 0xD9, 0xD0, 0xD7, 0xC2, 0xC5, 0xCC, 0xCB,
48 | 0xE6, 0xE1, 0xE8, 0xEF, 0xFA, 0xFD, 0xF4, 0xF3
49 | ])
50 |
51 | # A sensor that reads the battery voltage and reports it.
52 | # Inherits from PollingSensor, which means it will be polled at regular intervals.
53 | # The battery sensor on the board is the LC709203F sensor.
54 | # That sensor can be found on the main i2c bus at address 0x0xB.
55 |
56 | class Battery(_base.PollingSensor):
57 | # The configuration schema for the battery sensor.
58 | config = _config.Object(
59 | name="battery",
60 | keys=[
61 | _config.Integer(name="interval", default=60),
62 | ],
63 | )
64 |
65 | REG_VCELL = const(0x09)
66 | REG_POWER = const(0x15)
67 | ADDRESS = const(0x0B)
68 |
69 | # The constructor for the battery sensor.
70 | # The registry is passed in, which contains the i2c bus.
71 | # The id is the id of the sensor, and the interval is the interval at which the sensor should be polled.
72 | def __init__(self, registry, id, interval=60):
73 | # Call the constructor of the base class.
74 | super().__init__(registry, id, interval)
75 |
76 | # The i2c bus is stored in the registry, so we can access it here.
77 | self._i2c_bus = registry.i2c_bus
78 |
79 | self.voltage = None
80 |
81 | async def _crc8(self, data):
82 | crc = 0
83 | for d in data:
84 | crc = CRC_TABLE[crc ^ d]
85 | return crc
86 |
87 | async def _read_register(self, reg):
88 | # Read the register from the sensor.
89 | async with self._i2c_bus.lock:
90 | data = int.from_bytes(self._i2c_bus.readfrom_mem(self.ADDRESS, reg, 2), 'little') & 0xFFFF
91 | await asyncio.sleep_ms(10)
92 | return data
93 |
94 | async def _write_register(self, reg, data):
95 | # Create data array to hold bytes including crc
96 | data_array = bytearray(5)
97 | data_array[0] = 0x16
98 | data_array[1] = reg
99 | data_array[2] = (data & 0x00FF)
100 | data_array[3] = ((data & 0xFF00) >> 8)
101 | data_array[4] = self._crc8(data_array[:4])
102 |
103 | # Write the register to the sensor.
104 | async with self._i2c_bus.lock:
105 | self._i2c_bus.writeto_mem(self.ADDRESS, reg, data_array[3:])
106 | await asyncio.sleep_ms(10)
107 |
108 | async def wakeup(self):
109 | await self._write_register(self.REG_POWER, 0x0001)
110 |
111 | async def sleep(self):
112 | await self._write_register(self.REG_POWER, 0x0002)
113 |
114 | # The read_once method is called every time the sensor is polled.
115 | async def read_once(self):
116 | # Wake up the sensor
117 | await self.wakeup()
118 |
119 | # Wait for 0.5 seconds for the sensor to wake up
120 | await asyncio.sleep_ms(500)
121 |
122 | # Read the two bytes of voltage from the sensor.
123 | mv_data = await self._read_register(self.REG_VCELL)
124 | self.voltage = mv_data / 1000
125 |
126 | # Put the sensor to sleep
127 | await self.sleep()
128 |
129 | # The export method is called to get the data from the sensor.
130 | def export(self):
131 | # Return the voltage as a dictionary.
132 | return {
133 | "t": isotime(time.time()),
134 | "voltage": self.voltage,
135 | }
--------------------------------------------------------------------------------
/modules/ribbit/sensors/board.py:
--------------------------------------------------------------------------------
1 | import binascii
2 | import sys
3 | import time
4 | import gc
5 |
6 | import ribbit.config as _config
7 | from ribbit.utils.time import isotime
8 | from . import base as _base
9 |
10 |
11 | class Board(_base.PollingSensor):
12 | config = _config.Object(
13 | name="board",
14 | keys=[
15 | _config.Integer(name="interval", default=24 * 3600),
16 | ],
17 | )
18 |
19 | def __init__(self, registry, id, interval=24 * 3600):
20 | super().__init__(registry, id, interval)
21 |
22 | def export(self):
23 | import __version__
24 |
25 | return {
26 | "t": isotime(time.time()),
27 |
28 | "board": sys.implementation._machine,
29 | "version": __version__.version,
30 | }
31 |
32 |
33 | class Memory(_base.PollingSensor):
34 | config = _config.Object(
35 | name="memory",
36 | keys=[
37 | _config.Integer(name="interval", default=60),
38 | ],
39 | )
40 |
41 | def __init__(self, registry, id, interval=60):
42 | super().__init__(registry, id, interval)
43 |
44 | self.allocated = None
45 | self.free = None
46 |
47 | async def read_once(self):
48 | gc.collect()
49 | self.allocated, self.free = gc.mem_alloc(), gc.mem_free()
50 |
51 | def export(self):
52 | return {
53 | "t": isotime(time.time()),
54 |
55 | "allocated": self.allocated,
56 | "free": self.free,
57 | }
58 |
--------------------------------------------------------------------------------
/modules/ribbit/sensors/dps310.py:
--------------------------------------------------------------------------------
1 | import time
2 | import asyncio
3 | from micropython import const
4 |
5 | import ribbit.config as _config
6 | from ribbit.utils.time import isotime
7 |
8 | from . import base as _base
9 |
10 | DEFAULT_ADDR = const(0x77)
11 |
12 | _scale_factors = [
13 | 524288,
14 | 1572864,
15 | 3670016,
16 | 7864320,
17 | 253952,
18 | 516096,
19 | 1040384,
20 | 2088960,
21 | ]
22 |
23 |
24 | def _two_complement(val, bits):
25 | if val >> (bits - 1):
26 | val -= 1 << bits
27 | return val
28 |
29 |
30 | class DPS310(_base.PollingSensor):
31 | config = _config.Object(
32 | name="dps310",
33 | keys=[
34 | _config.String(name="id"),
35 | _config.Integer(name="address"),
36 | _config.Integer(name="interval", default=60),
37 | _config.Integer(name="pressure_oversampling", default=6),
38 | _config.Integer(name="temperature_oversampling", default=6),
39 | ],
40 | )
41 |
42 | def __init__(
43 | self,
44 | registry,
45 | id,
46 | address,
47 | interval=60,
48 | pressure_oversampling=6,
49 | temperature_oversampling=6,
50 | ):
51 | super().__init__(registry, id, interval)
52 |
53 | self._i2c_bus = registry.i2c_bus
54 | self._i2c_addr = address
55 |
56 | self._buf = memoryview(bytearray(16))
57 |
58 | self._pressure_oversampling = pressure_oversampling
59 | self._pressure_scale = _scale_factors[self._pressure_oversampling]
60 |
61 | self._temperature_oversampling = temperature_oversampling
62 | self._temperature_scale = _scale_factors[self._temperature_oversampling]
63 |
64 | self._pressure_cfg = self._pressure_oversampling
65 | self._temperature_cfg = (1 << 7) | self._temperature_oversampling
66 | self._cfg_reg = 0
67 | if self._pressure_oversampling > 3:
68 | self._cfg_reg |= 1 << 2
69 | if self._pressure_oversampling > 3:
70 | self._cfg_reg |= 1 << 3
71 |
72 | self._initialized = False
73 | self._c0 = None
74 | self._c1 = None
75 | self._c00 = None
76 | self._c10 = None
77 | self._c01 = None
78 | self._c11 = None
79 | self._c20 = None
80 | self._c21 = None
81 | self._c30 = None
82 |
83 | self.last_update = None
84 | self.temperature = None
85 | self.pressure = None
86 |
87 | async def _read_coefficients(self):
88 | async with self._i2c_bus.lock:
89 | buf = self._i2c_bus.readfrom_mem(self._i2c_addr, 0x10, 18)
90 |
91 | self._c0 = _two_complement((buf[0] << 4) | (buf[1] >> 4), 12)
92 | self._c1 = _two_complement(((buf[1] & 0x0F) << 8) | buf[2], 12)
93 | self._c00 = _two_complement((buf[3] << 12) | (buf[4] << 4) | (buf[5] >> 4), 20)
94 | self._c10 = _two_complement(
95 | ((buf[5] & 0x0F) << 16) | (buf[6] << 8) | buf[7], 20
96 | )
97 | self._c01 = _two_complement((buf[8] << 8) | buf[9], 16)
98 | self._c11 = _two_complement((buf[10] << 8) | buf[11], 16)
99 | self._c20 = _two_complement((buf[12] << 8) | buf[13], 16)
100 | self._c21 = _two_complement((buf[14] << 8) | buf[15], 16)
101 | self._c30 = _two_complement((buf[16] << 8) | buf[17], 16)
102 |
103 | async def _read_register(self, addr, size):
104 | buf = self._buf[:size]
105 | async with self._i2c_bus.lock:
106 | self._i2c_bus.readfrom_mem_into(
107 | self._i2c_addr,
108 | addr,
109 | buf,
110 | )
111 | await asyncio.sleep_ms(10)
112 | return buf
113 |
114 | async def _write_register(self, addr, value):
115 | self._buf[0] = value
116 | async with self._i2c_bus.lock:
117 | self._i2c_bus.writeto_mem(
118 | self._i2c_addr,
119 | addr,
120 | self._buf[:1],
121 | )
122 | await asyncio.sleep_ms(10)
123 |
124 | async def _read_raw_measurement(self, addr):
125 | buf = await self._read_register(addr, 3)
126 | return _two_complement((buf[0] << 16) | (buf[1] << 8) | buf[0], 24)
127 |
128 | async def _wait_status(self, bit):
129 | while True:
130 | status = ((await self._read_register(0x08, 1))[0] >> bit) & 0x01
131 | if status:
132 | break
133 | await asyncio.sleep_ms(10)
134 |
135 | async def initialize(self):
136 | await self._write_register(0x0C, 0b1001) # Generate a soft reset
137 | await asyncio.sleep_ms(10)
138 |
139 | await self._write_register(0x28, 1 << 7)
140 |
141 | buf = await self._read_register(0x0D, 1)
142 | rev_id = buf[0] >> 4
143 | prod_id = buf[0] & 0x0F
144 | self._logger.info(
145 | "Reading pressure from DPS310 (rev_id=%d, prod_id=%d)", rev_id, prod_id
146 | )
147 | await self._wait_status(7)
148 |
149 | self._logger.info("Reading coefficients")
150 | await self._read_coefficients()
151 |
152 | self._logger.info("Setting configuration")
153 | await self._write_register(0x06, self._pressure_cfg)
154 | await self._write_register(0x07, self._temperature_cfg)
155 | await self._write_register(0x09, self._cfg_reg)
156 | await self._wait_status(6)
157 |
158 | self._initialized = True
159 |
160 | async def read_once(self):
161 | if not self._initialized:
162 | await self.initialize()
163 |
164 | await self._write_register(0x08, 0x02)
165 | await self._wait_status(5)
166 |
167 | raw_temperature = (
168 | await self._read_raw_measurement(0x03) / self._temperature_scale
169 | )
170 | self.temperature = 0.5 * self._c0 + raw_temperature * self._c1
171 |
172 | await self._write_register(0x08, 0x01)
173 | await self._wait_status(4)
174 |
175 | raw_pressure = await self._read_raw_measurement(0x00) / self._pressure_scale
176 | self.pressure = (
177 | self._c00
178 | + raw_pressure
179 | * (self._c10 + raw_pressure * (self._c20 + raw_pressure * self._c30))
180 | + raw_temperature
181 | * (self._c01 + raw_pressure * (self._c11 + raw_pressure * self._c21))
182 | ) / 100
183 |
184 | self.last_update = time.time()
185 |
186 | def export(self):
187 | return {
188 | "t": isotime(self.last_update),
189 |
190 | "temperature": self.temperature,
191 | "pressure": self.pressure,
192 | }
193 |
--------------------------------------------------------------------------------
/modules/ribbit/sensors/gps.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | import asyncio
4 | from micropython import const
5 | import ribbit.config as _config
6 | import ribbit.time_manager as _time
7 | from ribbit.utils.time import isotime
8 |
9 | from . import base as _base
10 |
11 | _MAX_NMEA_PACKET_LEN = const(80)
12 |
13 | _STATE_PACKET_START = const(0)
14 | _STATE_PACKET_DATA = const(1)
15 | _STATE_PACKET_CHECKSUM = const(2)
16 | _STATE_PACKET_CHECKSUM2 = const(3)
17 | _STATE_PACKET_END = const(4)
18 |
19 | DEFAULT_ADDR = const(0x10)
20 |
21 |
22 | def _append_checksum(packet):
23 | checksum = 0
24 | for c in packet[1:]:
25 | checksum ^= c
26 | return b"%s*%02x\r\n" % (
27 | packet,
28 | checksum,
29 | )
30 |
31 | def _obfuscate_gps_coordinate(coordinate):
32 | """
33 | Returns the coordinate, rounded to 2 digits of precision.
34 |
35 | Github Issue: #30
36 | https://github.com/Ribbit-Network/ribbit-network-frog-software/issues/30
37 | """
38 | gps_digits_precision = 2
39 |
40 | obfuscated = round(coordinate, gps_digits_precision)
41 | return obfuscated
42 |
43 | class GPS(_base.BaseSensor):
44 | config = _config.Object(
45 | name="gps",
46 | keys=[
47 | _config.String(name="id"),
48 | _config.Integer(name="address"),
49 | _config.Integer(name="interval", default=60),
50 | ],
51 | )
52 |
53 | def __init__(self, registry, id, address, interval=60):
54 | super().__init__(registry, id)
55 | self._i2c_bus = registry.i2c_bus
56 | self._i2c_addr = address
57 | self._report_interval = interval
58 | self._time_manager = registry.time_manager
59 |
60 | self.last_update = None
61 | self.last_fix = None
62 | self.latitude = None
63 | self.longitude = None
64 | self.altitude = None
65 | self.geoid_height = None
66 | self.has_fix = False
67 | self.satellites = 0
68 | self._first_fix = False
69 | self._last_time_update = None
70 |
71 | self._stop_event = asyncio.Event()
72 |
73 | async def loop(self):
74 | while True:
75 | try:
76 | await self._read_loop_inner()
77 | except Exception as exc:
78 | self._logger.exc(exc, "Error in GPS loop")
79 | await asyncio.sleep_ms(1000)
80 |
81 | async def _read_loop_inner(self):
82 | async with self._i2c_bus.lock:
83 | # Reduce the noise by only enabling the GNGGA sequence:
84 | self._i2c_bus.writeto(
85 | self._i2c_addr,
86 | _append_checksum(b"$PMTK314,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0"),
87 | )
88 | # Only return a fix every 10s
89 | self._i2c_bus.writeto(
90 | self._i2c_addr,
91 | _append_checksum(
92 | b"$PMTK300,%d,0,0,0,0" % int(self._report_interval * 1000)
93 | ),
94 | )
95 |
96 | buf = bytearray(255)
97 |
98 | pkt = bytearray(_MAX_NMEA_PACKET_LEN)
99 | pkt_mv = memoryview(pkt)
100 | pkt_len = 0
101 | checksum = 0
102 | expected_checksum = 0
103 | state = _STATE_PACKET_START
104 | poll_interval = (self._report_interval * 1000) // 2
105 | poll_interval += 1000
106 |
107 | previous_update = None
108 |
109 | while True:
110 | async with self._i2c_bus.lock:
111 | self._i2c_bus.readfrom_into(self._i2c_addr, buf)
112 |
113 | seen_data = False
114 | for c in buf:
115 | if c == 0x0A: # \n
116 | continue
117 |
118 | seen_data = True
119 |
120 | if state == _STATE_PACKET_START:
121 | if c == 0x24: # $
122 | pkt_len = 0
123 | checksum = 0
124 | expected_checksum = 0
125 | state = _STATE_PACKET_DATA
126 |
127 | elif state == _STATE_PACKET_DATA:
128 | if c == 0x2A: # *
129 | state = _STATE_PACKET_CHECKSUM
130 | else:
131 | pkt[pkt_len] = c
132 | pkt_len += 1
133 | checksum ^= c
134 | if (
135 | pkt_len == _MAX_NMEA_PACKET_LEN
136 | ): # Overlong packet, start over
137 | state = _STATE_PACKET_START
138 | continue
139 |
140 | elif (
141 | state == _STATE_PACKET_CHECKSUM or state == _STATE_PACKET_CHECKSUM2
142 | ):
143 | val = 0
144 | if 48 <= c <= 57: # 0-9
145 | val = c - 48
146 | elif 65 <= c <= 90: # A-Z
147 | val = 10 + c - 65
148 | else: # Malformed checksum byte (not in 0-9A-Z), start over
149 | state = _STATE_PACKET_START
150 | continue
151 |
152 | expected_checksum = (expected_checksum << 4) + val
153 | state += 1
154 |
155 | elif state == _STATE_PACKET_END:
156 | if c == 0x0D and checksum == expected_checksum and len(pkt) >= 5:
157 | self._parse_packet(pkt_mv[0:pkt_len])
158 |
159 | state = _STATE_PACKET_START
160 |
161 | if not seen_data and previous_update != self.last_update:
162 | previous_update = self.last_update
163 | await self._output.write(self.export())
164 |
165 | try:
166 | await asyncio.wait_for_ms(
167 | self._stop_event.wait(),
168 | 5 if seen_data else poll_interval,
169 | )
170 | return
171 | except asyncio.TimeoutError:
172 | pass
173 |
174 | def _parse_packet(self, pkt):
175 | if pkt[0:6] == b"GNGGA,":
176 | parts = bytes(pkt[6:]).split(b",")
177 | if len(parts) != 14:
178 | return
179 |
180 | self.has_fix = parts[5] != b"0"
181 | self.satellites = int(parts[6])
182 | self.last_update = time.time()
183 |
184 | if self.has_fix:
185 | latitude_raw = parts[1]
186 | if latitude_raw != b"":
187 | if latitude_raw[4:5] != b".":
188 | return
189 | latitude = float(latitude_raw[:2]) + float(latitude_raw[2:]) / 60
190 | if parts[2] == b"S":
191 | latitude = -latitude
192 | else:
193 | latitude = None
194 |
195 | longitude_raw = parts[3]
196 | if longitude_raw != b"":
197 | if longitude_raw[5:6] != b".":
198 | return
199 | longitude = float(longitude_raw[:3]) + float(longitude_raw[3:]) / 60
200 | if parts[4] == b"W":
201 | longitude = -longitude
202 | else:
203 | longitude = None
204 |
205 | self.last_fix = self.last_update
206 |
207 | # Lat and Long are obfuscated here before storate to
208 | # ensure that precise coordinates never make it to any
209 | # logs or data storage.
210 | self.latitude = _obfuscate_gps_coordinate(latitude)
211 | self.longitude = _obfuscate_gps_coordinate(longitude)
212 |
213 | altitude_raw = parts[8]
214 | if altitude_raw != b"":
215 | self.altitude = float(altitude_raw)
216 |
217 | geoid_height_raw = parts[10]
218 | if geoid_height_raw != b"":
219 | self.geoid_height = float(geoid_height_raw)
220 |
221 | if not self._first_fix:
222 | self._logger.info(
223 | "Got GPS fix: latitude=%f longitude=%f satellites=%d",
224 | self.latitude,
225 | self.longitude,
226 | self.satellites,
227 | )
228 | self._first_fix = True
229 |
230 | elif pkt[0:6] == b"GNZDA,":
231 | if not self.has_fix:
232 | # The GPS could return bogus date/time before it has a fix.
233 | # To be on the safe side, only consider ZDA packets emitted while
234 | # the GPS has a fix.
235 | return
236 |
237 | parts = bytes(pkt[6:]).split(b",")
238 | if len(parts) != 6:
239 | return
240 |
241 | timepart = parts[0]
242 | hour = int(timepart[0:2])
243 | minute = int(timepart[2:4])
244 | second = int(timepart[4:6])
245 | day = int(parts[1])
246 | month = int(parts[2])
247 | year = int(parts[3])
248 |
249 | if self._time_manager is not None:
250 | t = time.mktime((year, month, day, hour, minute, second, 0, 0))
251 | self._time_manager.set_time(_time.TIMESOURCE_GPS, t)
252 |
253 | async def read_once(self):
254 | pass
255 |
256 | def export(self):
257 | return {
258 | "t": isotime(self.last_update),
259 |
260 | "has_fix": self.has_fix,
261 |
262 | "latitude": self.latitude,
263 | "longitude": self.longitude,
264 | "altitude": self.altitude,
265 |
266 | "geoid_height": self.geoid_height,
267 | "satellites_count": self.satellites,
268 | }
269 |
--------------------------------------------------------------------------------
/modules/ribbit/sensors/gps_test.py:
--------------------------------------------------------------------------------
1 | def test_checksum():
2 | from ribbit.sensors.gps import _append_checksum
3 |
4 | assert (
5 | _append_checksum(b"$PMTK314,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0")
6 | == b"$PMTK314,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0*29\r\n"
7 | )
8 |
9 |
10 | def test_location_obfuscation():
11 | from ribbit.sensors.gps import _obfuscate_gps_coordinate
12 |
13 | assert (
14 | _obfuscate_gps_coordinate(47.6350688) == 47.64
15 | )
16 |
17 | assert (
18 | _obfuscate_gps_coordinate(-122.3208268) == -122.32
19 | )
20 |
21 | assert (
22 | _obfuscate_gps_coordinate(0.0000000001) == 0.00
23 | )
24 |
--------------------------------------------------------------------------------
/modules/ribbit/sensors/scd30.py:
--------------------------------------------------------------------------------
1 | import time
2 | import asyncio
3 | from ustruct import unpack
4 |
5 | from micropython import const
6 |
7 | import ribbit.config as _config
8 | from ribbit.utils.time import isotime
9 |
10 | from . import base as _base
11 |
12 | DEFAULT_ADDR = const(0x61)
13 |
14 | _CMD_CONTINUOUS_MEASUREMENT = const(0x0010)
15 | _CMD_SET_MEASUREMENT_INTERVAL = const(0x4600)
16 | _CMD_GET_DATA_READY = const(0x0202)
17 | _CMD_READ_MEASUREMENT = const(0x0300)
18 | _CMD_AUTOMATIC_SELF_CALIBRATION = const(0x5306)
19 | _CMD_SET_FORCED_RECALIBRATION_FACTOR = const(0x5204)
20 | _CMD_SET_TEMPERATURE_OFFSET = const(0x5403)
21 | _CMD_SET_ALTITUDE_COMPENSATION = const(0x5102)
22 | _CMD_SOFT_RESET = const(0xD304)
23 |
24 |
25 | _READ_DELAY_MS = const(10)
26 |
27 | crc8_31 = b"\x00\x31\x62\x53\xc4\xf5\xa6\x97\xb9\x88\xdb\xea\x7d\x4c\x1f\x2e\x43\x72\x21\x10\x87\xb6\xe5\xd4\xfa\xcb\x98\xa9\x3e\x0f\x5c\x6d\x86\xb7\xe4\xd5\x42\x73\x20\x11\x3f\x0e\x5d\x6c\xfb\xca\x99\xa8\xc5\xf4\xa7\x96\x01\x30\x63\x52\x7c\x4d\x1e\x2f\xb8\x89\xda\xeb\x3d\x0c\x5f\x6e\xf9\xc8\x9b\xaa\x84\xb5\xe6\xd7\x40\x71\x22\x13\x7e\x4f\x1c\x2d\xba\x8b\xd8\xe9\xc7\xf6\xa5\x94\x03\x32\x61\x50\xbb\x8a\xd9\xe8\x7f\x4e\x1d\x2c\x02\x33\x60\x51\xc6\xf7\xa4\x95\xf8\xc9\x9a\xab\x3c\x0d\x5e\x6f\x41\x70\x23\x12\x85\xb4\xe7\xd6\x7a\x4b\x18\x29\xbe\x8f\xdc\xed\xc3\xf2\xa1\x90\x07\x36\x65\x54\x39\x08\x5b\x6a\xfd\xcc\x9f\xae\x80\xb1\xe2\xd3\x44\x75\x26\x17\xfc\xcd\x9e\xaf\x38\x09\x5a\x6b\x45\x74\x27\x16\x81\xb0\xe3\xd2\xbf\x8e\xdd\xec\x7b\x4a\x19\x28\x06\x37\x64\x55\xc2\xf3\xa0\x91\x47\x76\x25\x14\x83\xb2\xe1\xd0\xfe\xcf\x9c\xad\x3a\x0b\x58\x69\x04\x35\x66\x57\xc0\xf1\xa2\x93\xbd\x8c\xdf\xee\x79\x48\x1b\x2a\xc1\xf0\xa3\x92\x05\x34\x67\x56\x78\x49\x1a\x2b\xbc\x8d\xde\xef\x82\xb3\xe0\xd1\x46\x77\x24\x15\x3b\x0a\x59\x68\xff\xce\x9d\xac"
28 |
29 |
30 | class CRCError(Exception):
31 | pass
32 |
33 |
34 | def _crc8(a, b):
35 | crc = 0xFF
36 | crc = crc8_31[crc ^ a]
37 | crc = crc8_31[crc ^ b]
38 | return crc
39 |
40 |
41 | def _decode16(buf):
42 | """Decode a buffer containing three bytes [MSB, LSB, CRC] from the sensor and return an int"""
43 | if _crc8(buf[0], buf[1]) != buf[2]:
44 | raise CRCError()
45 |
46 | return (buf[0] << 8) | buf[1]
47 |
48 |
49 | def _decode_float(buf):
50 | """Decode a buffer containing two sets of three bytes from the sensor and return a float"""
51 | if _crc8(buf[0], buf[1]) != buf[2]:
52 | raise CRCError()
53 | if _crc8(buf[3], buf[4]) != buf[5]:
54 | raise CRCError()
55 | buf[2] = buf[3]
56 | buf[3] = buf[4]
57 | return unpack(">f", buf)[0]
58 |
59 |
60 | def _encode16(buf, data):
61 | """Encode an 16 bit int into a set of three bytes [MSB, LSB, CRC]"""
62 | buf[0] = data >> 8
63 | buf[1] = data & 0xFF
64 | buf[2] = _crc8(buf[0], buf[1])
65 |
66 |
67 | class SCD30(_base.PollingSensor):
68 | config = _config.Object(
69 | name="scd30",
70 | keys=[
71 | _config.String(name="id"),
72 | _config.Integer(name="address"),
73 | _config.Integer(name="interval", default=60),
74 | ],
75 | )
76 |
77 | def __init__(self, registry, id, address, interval=60):
78 | super().__init__(registry, id, interval)
79 |
80 | self._i2c_bus = registry.i2c_bus
81 | self._i2c_addr = address
82 |
83 | self._req_buf = memoryview(bytearray(5))
84 | self._resp_buf = memoryview(bytearray(18))
85 |
86 | if not 2 <= interval <= 1800:
87 | raise ValueError("measurement interval out of range")
88 | self._mesurement_interval = int(interval)
89 | self._mesurement_interval_ms = int(interval) * 1000
90 |
91 | self._initialized = False
92 |
93 | self._pressure_reference = 0
94 | self._pressure_updated = True
95 |
96 | self._temperature_reference = 0
97 | self._temperature_updated = False
98 |
99 | self.last_update = None
100 | self.co2 = None
101 | self.temperature = None
102 | self._temperature_offset = None
103 | self.humidity = None
104 |
105 | async def _read_register_into(self, addr, buf):
106 | async with self._i2c_bus.lock:
107 | req = self._req_buf[:2]
108 | req[0] = addr >> 8
109 | req[1] = addr & 0xFF
110 | self._i2c_bus.writeto(self._i2c_addr, req)
111 | await asyncio.sleep_ms(_READ_DELAY_MS)
112 |
113 | self._i2c_bus.readfrom_into(self._i2c_addr, buf)
114 | await asyncio.sleep_ms(_READ_DELAY_MS)
115 |
116 | async def _read_register(self, addr):
117 | buf = self._resp_buf[:3]
118 | await self._read_register_into(addr, buf)
119 | return _decode16(buf)
120 |
121 | async def _send_command(self, addr, value=None):
122 | if value is not None:
123 | buf = self._req_buf[:5]
124 | _encode16(buf[2:5], value)
125 | else:
126 | buf = self._req_buf[:2]
127 |
128 | buf[0] = addr >> 8
129 | buf[1] = addr & 0xFF
130 | async with self._i2c_bus.lock:
131 | self._i2c_bus.writeto(self._i2c_addr, buf)
132 | await asyncio.sleep_ms(_READ_DELAY_MS)
133 |
134 | def set_pressure(self, pressure):
135 | self._pressure_reference = pressure
136 | self._pressure_updated = True
137 |
138 | def set_temperature(self, temperature):
139 | self._temperature_reference = int(temperature * 100)
140 | self._temperature_updated = True
141 |
142 | async def _wait_measurement(self):
143 | count = 0
144 | while True:
145 | status = await self._read_register(_CMD_GET_DATA_READY)
146 | if status:
147 | return
148 | count += 1
149 | await asyncio.sleep_ms(100)
150 |
151 | async def initialize(self):
152 | await self._send_command(_CMD_SET_MEASUREMENT_INTERVAL, self._mesurement_interval)
153 |
154 | self._temperature_offset = await self._read_register(
155 | _CMD_SET_TEMPERATURE_OFFSET
156 | )
157 | self._logger.info(
158 | "Current temperature offset: %.2f °C", self._temperature_offset / 100
159 | )
160 |
161 | await self._send_command(_CMD_AUTOMATIC_SELF_CALIBRATION, 1)
162 | self._initialized = True
163 |
164 | async def read_once(self):
165 | if not self._initialized:
166 | await self.initialize()
167 |
168 | if self._pressure_updated:
169 | if self._pressure_reference != 0:
170 | self._logger.info(
171 | "Submitting pressure data to the sensor (%d hPa)",
172 | self._pressure_reference,
173 | )
174 | await self._send_command(
175 | _CMD_CONTINUOUS_MEASUREMENT,
176 | int(self._pressure_reference),
177 | )
178 | self._pressure_updated = False
179 |
180 | if self._temperature_updated and self.temperature is not None:
181 | offset = (
182 | int(self.temperature * 100)
183 | - self._temperature_reference
184 | + self._temperature_offset
185 | )
186 | if offset < 0:
187 | offset = 0
188 | self._logger.info(
189 | "Submitting temperature offset to the sensor (%.2f °C)",
190 | offset / 100,
191 | )
192 | await self._send_command(_CMD_SET_TEMPERATURE_OFFSET, offset)
193 | self._temperature_offset = offset
194 | self._temperature_updated = False
195 |
196 | await self._wait_measurement()
197 |
198 | buf = self._resp_buf[:18]
199 | await self._read_register_into(_CMD_READ_MEASUREMENT, buf)
200 |
201 | co2 = _decode_float(buf[0:6])
202 | temperature = _decode_float(buf[6:12])
203 | humidity = _decode_float(buf[12:18])
204 |
205 | self.last_update = time.time()
206 | self.co2 = co2
207 | self.temperature = temperature
208 | self.humidity = humidity
209 |
210 | def export(self):
211 | return {
212 | "t": isotime(self.last_update),
213 |
214 | "temperature": self.temperature,
215 | "temperature_offset": (
216 | self._temperature_offset / 100
217 | if self._temperature_offset is not None
218 | else None
219 | ),
220 | "co2": self.co2,
221 | "humidity": self.humidity,
222 | }
223 |
--------------------------------------------------------------------------------
/modules/ribbit/time_manager.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import time
3 |
4 | import machine
5 | import asyncio
6 | from micropython import const
7 | from ribbit.utils.time import isotime as _isotime
8 |
9 | TIMESOURCE_UNKNOWN = const(0)
10 | TIMESOURCE_NTP = const(1)
11 | TIMESOURCE_GPS = const(2)
12 | TIMESOURCE_SIMULATOR = const(3)
13 |
14 | SOURCE_NAMES = {
15 | TIMESOURCE_UNKNOWN: "unknown",
16 | TIMESOURCE_NTP: "ntp",
17 | TIMESOURCE_GPS: "gps",
18 | TIMESOURCE_SIMULATOR: "simulator",
19 | }
20 |
21 |
22 | class TimeManager:
23 | def __init__(self, registry, update_interval_per_source=None):
24 | self._logger = logging.getLogger(__name__)
25 |
26 | import __version__
27 |
28 | self._minimum_year = __version__.build_year
29 |
30 | if update_interval_per_source is not None:
31 | self._update_intervals = update_interval_per_source
32 | else:
33 | self._update_intervals = {
34 | TIMESOURCE_NTP: 24 * 3600,
35 | TIMESOURCE_GPS: 3600,
36 | }
37 |
38 | self.has_valid_time = self.is_valid_time(time.time())
39 | self.last_time_update = None
40 | self.last_time_source = TIMESOURCE_UNKNOWN
41 | self.boot_time = None
42 |
43 | self._in_simulator = registry.in_simulator
44 |
45 | if not self._in_simulator:
46 | registry.network.on_connect_task(self._on_network_connect)
47 | else:
48 | # In the simulator, send one fake time update
49 | self.set_time(TIMESOURCE_SIMULATOR, time.time())
50 |
51 | def is_valid_time(self, t):
52 | return time.gmtime(t)[0] >= self._minimum_year
53 |
54 | def needs_time_update(self, source):
55 | if self.last_time_source is None or source > self.last_time_source:
56 | return True # A better source is available
57 |
58 | update_interval = time.time() - self.last_time_update
59 |
60 | return update_interval >= self._update_intervals[source]
61 |
62 | def set_time(self, source, t):
63 | if not self.needs_time_update(source):
64 | return
65 |
66 | if not self._in_simulator:
67 | tm = time.gmtime(t)
68 | machine.RTC().datetime((tm[0], tm[1], tm[2], tm[6] + 1, tm[3], tm[4], tm[5], 0))
69 |
70 | self._logger.info(
71 | "Setting time to %s (source: %s)", _isotime(t), SOURCE_NAMES[source]
72 | )
73 |
74 | self.last_time_source = source
75 | self.last_time_update = t
76 | self.has_valid_time = True
77 | self.boot_time = t
78 |
79 | async def _on_network_connect(self, _state):
80 | if self.needs_time_update(TIMESOURCE_NTP):
81 | try:
82 | import ntptime
83 | except ImportError:
84 | return
85 |
86 | self._logger.info("Fetching current time via NTP")
87 | t = None
88 | for _ in range(5):
89 | try:
90 | t = ntptime.time()
91 | break
92 | except OSError:
93 | await asyncio.sleep_ms(100)
94 | continue
95 |
96 | if t is not None and self.is_valid_time(t):
97 | self.set_time(TIMESOURCE_NTP, t)
98 |
99 | def export(self):
100 | return {
101 | "t": _isotime(self.last_time_update),
102 |
103 | "source": SOURCE_NAMES[self.last_time_source],
104 | "has_valid_time": self.has_valid_time,
105 | "boot_time": _isotime(self.boot_time),
106 | }
107 |
--------------------------------------------------------------------------------
/modules/ribbit/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Ribbit-Network/ribbit-network-frog-software/63bb0857405d0a6702b407708456b434818a4b4e/modules/ribbit/utils/__init__.py
--------------------------------------------------------------------------------
/modules/ribbit/utils/asyncio.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 |
4 | class WatchableValue:
5 | def __init__(self, value):
6 | self.value = value
7 | self._watchers = set()
8 |
9 | def watch(self):
10 | w = Watcher(self.value, self._release_watcher)
11 | self._watchers.add(w)
12 | return w
13 |
14 | def set(self, value):
15 | if value != self.value:
16 | self.value = value
17 | for w in self._watchers:
18 | w.notify(value)
19 |
20 | def _release_watcher(self, w):
21 | self._watchers.discard(w)
22 |
23 |
24 | class Watcher:
25 | def __init__(self, value, release_cb=None):
26 | self._value = value
27 | self._release_cb = release_cb
28 | self._changed = asyncio.Event()
29 | self.generation = 0
30 |
31 | def __enter__(self):
32 | return self
33 |
34 | def __exit__(self, exc_type, exc, tb):
35 | self.release()
36 |
37 | def release(self):
38 | if self._release_cb is not None:
39 | self._release_cb(self)
40 |
41 | def peek(self):
42 | return self._value
43 |
44 | def get(self):
45 | self._changed.clear()
46 | return self._value
47 |
48 | def notify(self, value):
49 | self._value = value
50 | self.generation += 1
51 | self._changed.set()
52 |
53 | @property
54 | def changed(self):
55 | return self._changed.is_set()
56 |
57 | def wait(self):
58 | return self._changed.wait()
59 |
--------------------------------------------------------------------------------
/modules/ribbit/utils/i2c.py:
--------------------------------------------------------------------------------
1 | import machine
2 | import asyncio
3 |
4 |
5 | class LockableI2CBus:
6 | def __init__(self, id=None, scl=None, sda=None, freq=None):
7 | if id is not None:
8 | i2c = machine.I2C(id, scl=scl, sda=sda, freq=freq)
9 | else:
10 | i2c = machine.SoftI2C(scl, sda, freq=freq)
11 |
12 | self._i2c = i2c
13 | self.lock = asyncio.Lock()
14 |
15 | def __getattr__(self, name):
16 | return getattr(self._i2c, name)
17 |
--------------------------------------------------------------------------------
/modules/ribbit/utils/ota.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import logging
3 | from binascii import hexlify
4 |
5 |
6 | class OTAUpdate:
7 | def __init__(self, reader, sha256_hash, size):
8 | self.reader = reader
9 | self.sha256_hash = sha256_hash
10 | self.size = size
11 |
12 |
13 | class OTAManager:
14 | def __init__(self, in_simulator=False):
15 | self._logger = logging.getLogger(__name__)
16 | self._in_simulator = in_simulator
17 |
18 | def successful_boot(self):
19 | if self._in_simulator:
20 | self._logger.info("Running in simulator: skipping successful boot")
21 | return
22 |
23 | import esp32
24 | esp32.Partition.mark_app_valid_cancel_rollback()
25 |
26 | async def do_ota_update(self, u):
27 | if self._in_simulator:
28 | self._logger.info("Running in simulator: skipping update")
29 | return
30 |
31 | import esp32
32 |
33 | self._logger.info("Starting OTA update")
34 | partition = esp32.Partition(esp32.Partition.RUNNING).get_next_update()
35 | h = hashlib.sha256()
36 |
37 | block_count = partition.ioctl(4, None)
38 | block_size = partition.ioctl(5, None)
39 |
40 | self._logger.info("Block size is %d, update size is %d", block_size, u.size)
41 |
42 | if block_size * block_count < u.size:
43 | raise Exception(
44 | "Update is too large: has %d bytes, need %d bytes",
45 | block_size * block_count,
46 | u.size,
47 | )
48 |
49 | multiplier = 4
50 | buf = memoryview(bytearray(block_size * multiplier))
51 | block_id = 0
52 | total_read = 0
53 | while total_read < u.size:
54 | if block_id % 10 == 0:
55 | self._logger.info(
56 | "Processing block %d (%.2f %%)", block_id, 100 * total_read / u.size
57 | )
58 |
59 | dest_buf = buf[: u.size - total_read]
60 |
61 | n = 0
62 | while n < len(dest_buf):
63 | sz = await u.reader.readinto(dest_buf[n:])
64 | if sz == 0:
65 | break
66 | n += sz
67 |
68 | if n != len(dest_buf):
69 | raise Exception("unexpected EOF")
70 |
71 | total_read += n
72 |
73 | h.update(buf[:n])
74 |
75 | # For the last block, zero out the rest of the buffer
76 | while n < len(buf):
77 | buf[n] = 0
78 | n += 1
79 |
80 | partition.ioctl(6, block_id)
81 | partition.writeblocks(block_id, buf)
82 | block_id += multiplier
83 |
84 | partition.ioctl(
85 | 3, None
86 | ) # Sync the device, probably a no-op but it doesn't hurt
87 |
88 | self._logger.info("Finished flashing")
89 |
90 | hash = hexlify(h.digest())
91 | if hash.decode("ascii") != u.sha256_hash:
92 | raise Exception("Wrong hash: got %s, expected %s", hash, u.sha256_hash)
93 |
94 | partition.set_boot()
95 |
--------------------------------------------------------------------------------
/modules/ribbit/utils/time.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 |
4 | def isotime(t):
5 | if t is None:
6 | return None
7 |
8 | parts = time.gmtime(t)
9 |
10 | return "%04d-%02d-%02dT%02d:%02d:%02dZ" % (
11 | parts[0],
12 | parts[1],
13 | parts[2],
14 | parts[3],
15 | parts[4],
16 | parts[5],
17 | )
18 |
--------------------------------------------------------------------------------
/tools/generate_static.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | BASE_PATH = "modules/ribbit/sensor-ui/"
4 |
5 | with open("modules/ribbit/_static.py", "w") as o:
6 | o.write("assets = {\n")
7 | first = True
8 |
9 | for dirpath, dirs, files in os.walk(BASE_PATH):
10 | assert dirpath.startswith(BASE_PATH)
11 | relative_dirpath = "/" + dirpath[len(BASE_PATH):]
12 | for filename in files:
13 | relative_filepath = os.path.join(relative_dirpath, filename)
14 | filepath = os.path.join(dirpath, filename)
15 |
16 | if not first:
17 | o.write("\n")
18 | else:
19 | first = False
20 |
21 | with open(filepath, "rb") as f:
22 | o.write(" %r: %r,\n" % (relative_filepath, f.read()))
23 |
24 | o.write("}\n")
25 |
--------------------------------------------------------------------------------
/tools/upload-to-golioth.py:
--------------------------------------------------------------------------------
1 | import base64
2 | import json
3 | import os
4 |
5 | import requests
6 |
7 |
8 | def load_version():
9 | with open("__version__.py") as f:
10 | data = f.read()
11 |
12 | ret = {}
13 | exec(data, None, ret)
14 | return ret
15 |
16 |
17 | version_data = load_version()
18 |
19 | session = requests.Session()
20 | session.headers["x-api-key"] = os.environ["GOLIOTH_API_KEY"]
21 |
22 | project = os.environ["GOLIOTH_PROJECT"]
23 | blueprint = os.environ["GOLIOTH_BLUEPRINT"]
24 | rollout = os.environ.get("GOLIOTH_ROLLOUT", "false")
25 |
26 |
27 | req = {
28 | "blueprintId": blueprint,
29 | "package": "main",
30 | "projectId": project,
31 | "version": version_data["version"],
32 | }
33 |
34 | with open("firmware/micropython.bin", "rb") as f:
35 | req["content"] = base64.b64encode(f.read()).decode("ascii")
36 |
37 | r = session.post(
38 | "https://api.golioth.io/v1/artifacts",
39 | data=json.dumps(req),
40 | headers={
41 | "Content-Type": "application/json",
42 | },
43 | )
44 | r.raise_for_status()
45 | artifact = r.json()
46 |
47 | r = session.post(
48 | "https://api.golioth.io/v1/projects/%s/releases" % (project,),
49 | data=json.dumps(
50 | {
51 | "blueprintId": blueprint,
52 | "artifactIds": [
53 | artifact["data"]["id"],
54 | ],
55 | }
56 | ),
57 | headers={
58 | "Content-Type": "application/json",
59 | },
60 | )
61 | r.raise_for_status()
62 |
--------------------------------------------------------------------------------